1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
33 #include "fold-const.h"
34 #include "stringpool.h"
35 #include "stor-layout.h"
38 #include "tree-object-size.h"
41 #include "internal-fn.h"
45 #include "insn-config.h"
52 #include "insn-codes.h"
57 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
69 #include "tree-chkp.h"
73 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
75 struct target_builtins default_target_builtins
;
77 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
80 /* Define the names of the builtin function types and codes. */
81 const char *const built_in_class_names
[BUILT_IN_LAST
]
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
84 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
85 const char * built_in_names
[(int) END_BUILTINS
] =
87 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info
[(int)END_BUILTINS
];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p
;
98 static rtx
c_readstr (const char *, machine_mode
);
99 static int target_char_cast (tree
, char *);
100 static rtx
get_memory_rtx (tree
, tree
);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx
result_vector (int, rtx
);
104 static void expand_builtin_prefetch (tree
);
105 static rtx
expand_builtin_apply_args (void);
106 static rtx
expand_builtin_apply_args_1 (void);
107 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
108 static void expand_builtin_return (rtx
);
109 static enum type_class
type_to_class (tree
);
110 static rtx
expand_builtin_classify_type (tree
);
111 static void expand_errno_check (tree
, rtx
);
112 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
113 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
114 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
115 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
116 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
117 static rtx
expand_builtin_sincos (tree
);
118 static rtx
expand_builtin_cexpi (tree
, rtx
);
119 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
120 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
121 static rtx
expand_builtin_next_arg (void);
122 static rtx
expand_builtin_va_start (tree
);
123 static rtx
expand_builtin_va_end (tree
);
124 static rtx
expand_builtin_va_copy (tree
);
125 static rtx
expand_builtin_strcmp (tree
, rtx
);
126 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
127 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, machine_mode
);
128 static rtx
expand_builtin_memcpy (tree
, rtx
);
129 static rtx
expand_builtin_memcpy_with_bounds (tree
, rtx
);
130 static rtx
expand_builtin_memcpy_args (tree
, tree
, tree
, rtx
, tree
);
131 static rtx
expand_builtin_mempcpy (tree
, rtx
, machine_mode
);
132 static rtx
expand_builtin_mempcpy_with_bounds (tree
, rtx
, machine_mode
);
133 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
134 machine_mode
, int, tree
);
135 static rtx
expand_builtin_strcpy (tree
, rtx
);
136 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
137 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
138 static rtx
expand_builtin_strncpy (tree
, rtx
);
139 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, machine_mode
);
140 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
141 static rtx
expand_builtin_memset_with_bounds (tree
, rtx
, machine_mode
);
142 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
143 static rtx
expand_builtin_bzero (tree
);
144 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
145 static rtx
expand_builtin_alloca (tree
, bool);
146 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
147 static rtx
expand_builtin_frame_address (tree
, tree
);
148 static tree
stabilize_va_list_loc (location_t
, tree
, int);
149 static rtx
expand_builtin_expect (tree
, rtx
);
150 static tree
fold_builtin_constant_p (tree
);
151 static tree
fold_builtin_classify_type (tree
);
152 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
153 static tree
fold_builtin_inf (location_t
, tree
, int);
154 static tree
fold_builtin_nan (tree
, tree
, int);
155 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
156 static bool validate_arg (const_tree
, enum tree_code code
);
157 static bool integer_valued_real_p (tree
);
158 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
159 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
160 static rtx
expand_builtin_signbit (tree
, rtx
);
161 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
162 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
163 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
164 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
165 static tree
fold_builtin_tan (tree
, tree
);
166 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
167 static tree
fold_builtin_floor (location_t
, tree
, tree
);
168 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
169 static tree
fold_builtin_round (location_t
, tree
, tree
);
170 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
171 static tree
fold_builtin_bitop (tree
, tree
);
172 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
173 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
174 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
175 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
176 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
177 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
178 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
179 static tree
fold_builtin_isascii (location_t
, tree
);
180 static tree
fold_builtin_toascii (location_t
, tree
);
181 static tree
fold_builtin_isdigit (location_t
, tree
);
182 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
183 static tree
fold_builtin_abs (location_t
, tree
, tree
);
184 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
186 static tree
fold_builtin_0 (location_t
, tree
);
187 static tree
fold_builtin_1 (location_t
, tree
, tree
);
188 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
189 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
190 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
192 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
193 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
194 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
195 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
196 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
198 static rtx
expand_builtin_object_size (tree
);
199 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
200 enum built_in_function
);
201 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
202 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
203 static void maybe_emit_free_warning (tree
);
204 static tree
fold_builtin_object_size (tree
, tree
);
206 unsigned HOST_WIDE_INT target_newline
;
207 unsigned HOST_WIDE_INT target_percent
;
208 static unsigned HOST_WIDE_INT target_c
;
209 static unsigned HOST_WIDE_INT target_s
;
210 char target_percent_c
[3];
211 char target_percent_s
[3];
212 char target_percent_s_newline
[4];
213 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
214 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
215 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
216 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
217 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
218 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
219 static tree
do_mpfr_sincos (tree
, tree
, tree
);
220 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
221 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
222 const REAL_VALUE_TYPE
*, bool);
223 static tree
do_mpfr_remquo (tree
, tree
, tree
);
224 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
225 static void expand_builtin_sync_synchronize (void);
227 /* Return true if NAME starts with __builtin_ or __sync_. */
230 is_builtin_name (const char *name
)
232 if (strncmp (name
, "__builtin_", 10) == 0)
234 if (strncmp (name
, "__sync_", 7) == 0)
236 if (strncmp (name
, "__atomic_", 9) == 0)
239 && (!strcmp (name
, "__cilkrts_detach")
240 || !strcmp (name
, "__cilkrts_pop_frame")))
246 /* Return true if DECL is a function symbol representing a built-in. */
249 is_builtin_fn (tree decl
)
251 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
254 /* Return true if NODE should be considered for inline expansion regardless
255 of the optimization level. This means whenever a function is invoked with
256 its "internal" name, which normally contains the prefix "__builtin". */
259 called_as_built_in (tree node
)
261 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
262 we want the name used to call the function, not the name it
264 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
265 return is_builtin_name (name
);
268 /* Compute values M and N such that M divides (address of EXP - N) and such
269 that N < M. If these numbers can be determined, store M in alignp and N in
270 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
271 *alignp and any bit-offset to *bitposp.
273 Note that the address (and thus the alignment) computed here is based
274 on the address to which a symbol resolves, whereas DECL_ALIGN is based
275 on the address at which an object is actually located. These two
276 addresses are not always the same. For example, on ARM targets,
277 the address &foo of a Thumb function foo() has the lowest bit set,
278 whereas foo() itself starts on an even address.
280 If ADDR_P is true we are taking the address of the memory reference EXP
281 and thus cannot rely on the access taking place. */
284 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
285 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
287 HOST_WIDE_INT bitsize
, bitpos
;
290 int unsignedp
, volatilep
;
291 unsigned int align
= BITS_PER_UNIT
;
292 bool known_alignment
= false;
294 /* Get the innermost object and the constant (bitpos) and possibly
295 variable (offset) offset of the access. */
296 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
297 &mode
, &unsignedp
, &volatilep
, true);
299 /* Extract alignment information from the innermost object and
300 possibly adjust bitpos and offset. */
301 if (TREE_CODE (exp
) == FUNCTION_DECL
)
303 /* Function addresses can encode extra information besides their
304 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
305 allows the low bit to be used as a virtual bit, we know
306 that the address itself must be at least 2-byte aligned. */
307 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
308 align
= 2 * BITS_PER_UNIT
;
310 else if (TREE_CODE (exp
) == LABEL_DECL
)
312 else if (TREE_CODE (exp
) == CONST_DECL
)
314 /* The alignment of a CONST_DECL is determined by its initializer. */
315 exp
= DECL_INITIAL (exp
);
316 align
= TYPE_ALIGN (TREE_TYPE (exp
));
317 if (CONSTANT_CLASS_P (exp
))
318 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
320 known_alignment
= true;
322 else if (DECL_P (exp
))
324 align
= DECL_ALIGN (exp
);
325 known_alignment
= true;
327 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
329 align
= TYPE_ALIGN (TREE_TYPE (exp
));
331 else if (TREE_CODE (exp
) == INDIRECT_REF
332 || TREE_CODE (exp
) == MEM_REF
333 || TREE_CODE (exp
) == TARGET_MEM_REF
)
335 tree addr
= TREE_OPERAND (exp
, 0);
337 unsigned HOST_WIDE_INT ptr_bitpos
;
338 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
340 /* If the address is explicitely aligned, handle that. */
341 if (TREE_CODE (addr
) == BIT_AND_EXPR
342 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
344 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
345 ptr_bitmask
*= BITS_PER_UNIT
;
346 align
= ptr_bitmask
& -ptr_bitmask
;
347 addr
= TREE_OPERAND (addr
, 0);
351 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
352 align
= MAX (ptr_align
, align
);
354 /* Re-apply explicit alignment to the bitpos. */
355 ptr_bitpos
&= ptr_bitmask
;
357 /* The alignment of the pointer operand in a TARGET_MEM_REF
358 has to take the variable offset parts into account. */
359 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
363 unsigned HOST_WIDE_INT step
= 1;
365 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
366 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
368 if (TMR_INDEX2 (exp
))
369 align
= BITS_PER_UNIT
;
370 known_alignment
= false;
373 /* When EXP is an actual memory reference then we can use
374 TYPE_ALIGN of a pointer indirection to derive alignment.
375 Do so only if get_pointer_alignment_1 did not reveal absolute
376 alignment knowledge and if using that alignment would
377 improve the situation. */
378 if (!addr_p
&& !known_alignment
379 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
380 align
= TYPE_ALIGN (TREE_TYPE (exp
));
383 /* Else adjust bitpos accordingly. */
384 bitpos
+= ptr_bitpos
;
385 if (TREE_CODE (exp
) == MEM_REF
386 || TREE_CODE (exp
) == TARGET_MEM_REF
)
387 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
390 else if (TREE_CODE (exp
) == STRING_CST
)
392 /* STRING_CST are the only constant objects we allow to be not
393 wrapped inside a CONST_DECL. */
394 align
= TYPE_ALIGN (TREE_TYPE (exp
));
395 if (CONSTANT_CLASS_P (exp
))
396 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
398 known_alignment
= true;
401 /* If there is a non-constant offset part extract the maximum
402 alignment that can prevail. */
405 unsigned int trailing_zeros
= tree_ctz (offset
);
406 if (trailing_zeros
< HOST_BITS_PER_INT
)
408 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
410 align
= MIN (align
, inner
);
415 *bitposp
= bitpos
& (*alignp
- 1);
416 return known_alignment
;
419 /* For a memory reference expression EXP compute values M and N such that M
420 divides (&EXP - N) and such that N < M. If these numbers can be determined,
421 store M in alignp and N in *BITPOSP and return true. Otherwise return false
422 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
425 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
426 unsigned HOST_WIDE_INT
*bitposp
)
428 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
431 /* Return the alignment in bits of EXP, an object. */
434 get_object_alignment (tree exp
)
436 unsigned HOST_WIDE_INT bitpos
= 0;
439 get_object_alignment_1 (exp
, &align
, &bitpos
);
441 /* align and bitpos now specify known low bits of the pointer.
442 ptr & (align - 1) == bitpos. */
445 align
= (bitpos
& -bitpos
);
449 /* For a pointer valued expression EXP compute values M and N such that M
450 divides (EXP - N) and such that N < M. If these numbers can be determined,
451 store M in alignp and N in *BITPOSP and return true. Return false if
452 the results are just a conservative approximation.
454 If EXP is not a pointer, false is returned too. */
457 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
458 unsigned HOST_WIDE_INT
*bitposp
)
462 if (TREE_CODE (exp
) == ADDR_EXPR
)
463 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
464 alignp
, bitposp
, true);
465 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
468 unsigned HOST_WIDE_INT bitpos
;
469 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
471 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
472 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
475 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
476 if (trailing_zeros
< HOST_BITS_PER_INT
)
478 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
480 align
= MIN (align
, inner
);
484 *bitposp
= bitpos
& (align
- 1);
487 else if (TREE_CODE (exp
) == SSA_NAME
488 && POINTER_TYPE_P (TREE_TYPE (exp
)))
490 unsigned int ptr_align
, ptr_misalign
;
491 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
493 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
495 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
496 *alignp
= ptr_align
* BITS_PER_UNIT
;
497 /* We cannot really tell whether this result is an approximation. */
503 *alignp
= BITS_PER_UNIT
;
507 else if (TREE_CODE (exp
) == INTEGER_CST
)
509 *alignp
= BIGGEST_ALIGNMENT
;
510 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
511 & (BIGGEST_ALIGNMENT
- 1));
516 *alignp
= BITS_PER_UNIT
;
520 /* Return the alignment in bits of EXP, a pointer valued expression.
521 The alignment returned is, by default, the alignment of the thing that
522 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
524 Otherwise, look at the expression to see if we can do better, i.e., if the
525 expression is actually pointing at an object whose alignment is tighter. */
528 get_pointer_alignment (tree exp
)
530 unsigned HOST_WIDE_INT bitpos
= 0;
533 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
535 /* align and bitpos now specify known low bits of the pointer.
536 ptr & (align - 1) == bitpos. */
539 align
= (bitpos
& -bitpos
);
544 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
545 way, because it could contain a zero byte in the middle.
546 TREE_STRING_LENGTH is the size of the character array, not the string.
548 ONLY_VALUE should be nonzero if the result is not going to be emitted
549 into the instruction stream and zero if it is going to be expanded.
550 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
551 is returned, otherwise NULL, since
552 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
553 evaluate the side-effects.
555 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
556 accesses. Note that this implies the result is not going to be emitted
557 into the instruction stream.
559 The value returned is of type `ssizetype'.
561 Unfortunately, string_constant can't access the values of const char
562 arrays with initializers, so neither can we do so here. */
565 c_strlen (tree src
, int only_value
)
568 HOST_WIDE_INT offset
;
574 if (TREE_CODE (src
) == COND_EXPR
575 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
579 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
580 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
581 if (tree_int_cst_equal (len1
, len2
))
585 if (TREE_CODE (src
) == COMPOUND_EXPR
586 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
587 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
589 loc
= EXPR_LOC_OR_LOC (src
, input_location
);
591 src
= string_constant (src
, &offset_node
);
595 max
= TREE_STRING_LENGTH (src
) - 1;
596 ptr
= TREE_STRING_POINTER (src
);
598 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
600 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
601 compute the offset to the following null if we don't know where to
602 start searching for it. */
605 for (i
= 0; i
< max
; i
++)
609 /* We don't know the starting offset, but we do know that the string
610 has no internal zero bytes. We can assume that the offset falls
611 within the bounds of the string; otherwise, the programmer deserves
612 what he gets. Subtract the offset from the length of the string,
613 and return that. This would perhaps not be valid if we were dealing
614 with named arrays in addition to literal string constants. */
616 return size_diffop_loc (loc
, size_int (max
), offset_node
);
619 /* We have a known offset into the string. Start searching there for
620 a null character if we can represent it as a single HOST_WIDE_INT. */
621 if (offset_node
== 0)
623 else if (! tree_fits_shwi_p (offset_node
))
626 offset
= tree_to_shwi (offset_node
);
628 /* If the offset is known to be out of bounds, warn, and call strlen at
630 if (offset
< 0 || offset
> max
)
632 /* Suppress multiple warnings for propagated constant strings. */
634 && !TREE_NO_WARNING (src
))
636 warning_at (loc
, 0, "offset outside bounds of constant string");
637 TREE_NO_WARNING (src
) = 1;
642 /* Use strlen to search for the first zero byte. Since any strings
643 constructed with build_string will have nulls appended, we win even
644 if we get handed something like (char[4])"abcd".
646 Since OFFSET is our starting index into the string, no further
647 calculation is needed. */
648 return ssize_int (strlen (ptr
+ offset
));
651 /* Return a char pointer for a C string if it is a string constant
652 or sum of string constant and integer constant. */
659 src
= string_constant (src
, &offset_node
);
663 if (offset_node
== 0)
664 return TREE_STRING_POINTER (src
);
665 else if (!tree_fits_uhwi_p (offset_node
)
666 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
669 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);
672 /* Return a constant integer corresponding to target reading
673 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
676 c_readstr (const char *str
, machine_mode mode
)
680 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
682 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
683 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
684 / HOST_BITS_PER_WIDE_INT
;
686 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
687 for (i
= 0; i
< len
; i
++)
691 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
694 if (WORDS_BIG_ENDIAN
)
695 j
= GET_MODE_SIZE (mode
) - i
- 1;
696 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
697 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
698 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
702 ch
= (unsigned char) str
[i
];
703 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
706 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
707 return immed_wide_int_const (c
, mode
);
710 /* Cast a target constant CST to target CHAR and if that value fits into
711 host char type, return zero and put that value into variable pointed to by
715 target_char_cast (tree cst
, char *p
)
717 unsigned HOST_WIDE_INT val
, hostval
;
719 if (TREE_CODE (cst
) != INTEGER_CST
720 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
723 /* Do not care if it fits or not right here. */
724 val
= TREE_INT_CST_LOW (cst
);
726 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
727 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
730 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
731 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
740 /* Similar to save_expr, but assumes that arbitrary code is not executed
741 in between the multiple evaluations. In particular, we assume that a
742 non-addressable local variable will not be modified. */
745 builtin_save_expr (tree exp
)
747 if (TREE_CODE (exp
) == SSA_NAME
748 || (TREE_ADDRESSABLE (exp
) == 0
749 && (TREE_CODE (exp
) == PARM_DECL
750 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
753 return save_expr (exp
);
756 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
757 times to get the address of either a higher stack frame, or a return
758 address located within it (depending on FNDECL_CODE). */
761 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
765 #ifdef INITIAL_FRAME_ADDRESS_RTX
766 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
770 /* For a zero count with __builtin_return_address, we don't care what
771 frame address we return, because target-specific definitions will
772 override us. Therefore frame pointer elimination is OK, and using
773 the soft frame pointer is OK.
775 For a nonzero count, or a zero count with __builtin_frame_address,
776 we require a stable offset from the current frame pointer to the
777 previous one, so we must use the hard frame pointer, and
778 we must disable frame pointer elimination. */
779 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
780 tem
= frame_pointer_rtx
;
783 tem
= hard_frame_pointer_rtx
;
785 /* Tell reload not to eliminate the frame pointer. */
786 crtl
->accesses_prior_frames
= 1;
790 /* Some machines need special handling before we can access
791 arbitrary frames. For example, on the SPARC, we must first flush
792 all register windows to the stack. */
793 #ifdef SETUP_FRAME_ADDRESSES
795 SETUP_FRAME_ADDRESSES ();
798 /* On the SPARC, the return address is not in the frame, it is in a
799 register. There is no way to access it off of the current frame
800 pointer, but it can be accessed off the previous frame pointer by
801 reading the value from the register window save area. */
802 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
805 /* Scan back COUNT frames to the specified frame. */
806 for (i
= 0; i
< count
; i
++)
808 /* Assume the dynamic chain pointer is in the word that the
809 frame address points to, unless otherwise specified. */
810 #ifdef DYNAMIC_CHAIN_ADDRESS
811 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
813 tem
= memory_address (Pmode
, tem
);
814 tem
= gen_frame_mem (Pmode
, tem
);
815 tem
= copy_to_reg (tem
);
818 /* For __builtin_frame_address, return what we've got. But, on
819 the SPARC for example, we may have to add a bias. */
820 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
821 #ifdef FRAME_ADDR_RTX
822 return FRAME_ADDR_RTX (tem
);
827 /* For __builtin_return_address, get the return address from that frame. */
828 #ifdef RETURN_ADDR_RTX
829 tem
= RETURN_ADDR_RTX (count
, tem
);
831 tem
= memory_address (Pmode
,
832 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
833 tem
= gen_frame_mem (Pmode
, tem
);
838 /* Alias set used for setjmp buffer. */
839 static alias_set_type setjmp_alias_set
= -1;
841 /* Construct the leading half of a __builtin_setjmp call. Control will
842 return to RECEIVER_LABEL. This is also called directly by the SJLJ
843 exception handling code. */
846 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
848 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
852 if (setjmp_alias_set
== -1)
853 setjmp_alias_set
= new_alias_set ();
855 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
857 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
859 /* We store the frame pointer and the address of receiver_label in
860 the buffer and use the rest of it for the stack save area, which
861 is machine-dependent. */
863 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
864 set_mem_alias_set (mem
, setjmp_alias_set
);
865 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
867 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
868 GET_MODE_SIZE (Pmode
))),
869 set_mem_alias_set (mem
, setjmp_alias_set
);
871 emit_move_insn (validize_mem (mem
),
872 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
874 stack_save
= gen_rtx_MEM (sa_mode
,
875 plus_constant (Pmode
, buf_addr
,
876 2 * GET_MODE_SIZE (Pmode
)));
877 set_mem_alias_set (stack_save
, setjmp_alias_set
);
878 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
880 /* If there is further processing to do, do it. */
881 if (targetm
.have_builtin_setjmp_setup ())
882 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
884 /* We have a nonlocal label. */
885 cfun
->has_nonlocal_label
= 1;
888 /* Construct the trailing part of a __builtin_setjmp call. This is
889 also called directly by the SJLJ exception handling code.
890 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
893 expand_builtin_setjmp_receiver (rtx receiver_label
)
897 /* Mark the FP as used when we get here, so we have to make sure it's
898 marked as used by this function. */
899 emit_use (hard_frame_pointer_rtx
);
901 /* Mark the static chain as clobbered here so life information
902 doesn't get messed up for it. */
903 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
904 if (chain
&& REG_P (chain
))
905 emit_clobber (chain
);
907 /* Now put in the code to restore the frame pointer, and argument
908 pointer, if needed. */
909 if (! targetm
.have_nonlocal_goto ())
911 /* First adjust our frame pointer to its actual value. It was
912 previously set to the start of the virtual area corresponding to
913 the stacked variables when we branched here and now needs to be
914 adjusted to the actual hardware fp value.
916 Assignments to virtual registers are converted by
917 instantiate_virtual_regs into the corresponding assignment
918 to the underlying register (fp in this case) that makes
919 the original assignment true.
920 So the following insn will actually be decrementing fp by
921 STARTING_FRAME_OFFSET. */
922 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
924 /* Restoring the frame pointer also modifies the hard frame pointer.
925 Mark it used (so that the previous assignment remains live once
926 the frame pointer is eliminated) and clobbered (to represent the
927 implicit update from the assignment). */
928 emit_use (hard_frame_pointer_rtx
);
929 emit_clobber (hard_frame_pointer_rtx
);
932 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
933 if (fixed_regs
[ARG_POINTER_REGNUM
])
935 #ifdef ELIMINABLE_REGS
936 /* If the argument pointer can be eliminated in favor of the
937 frame pointer, we don't need to restore it. We assume here
938 that if such an elimination is present, it can always be used.
939 This is the case on all known machines; if we don't make this
940 assumption, we do unnecessary saving on many machines. */
942 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
944 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
945 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
946 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
949 if (i
== ARRAY_SIZE (elim_regs
))
952 /* Now restore our arg pointer from the address at which it
953 was saved in our stack frame. */
954 emit_move_insn (crtl
->args
.internal_arg_pointer
,
955 copy_to_reg (get_arg_pointer_save_area ()));
960 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
961 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
962 else if (targetm
.have_nonlocal_goto_receiver ())
963 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
967 /* We must not allow the code we just generated to be reordered by
968 scheduling. Specifically, the update of the frame pointer must
969 happen immediately, not later. */
970 emit_insn (gen_blockage ());
973 /* __builtin_longjmp is passed a pointer to an array of five words (not
974 all will be used on all machines). It operates similarly to the C
975 library function of the same name, but is more efficient. Much of
976 the code below is copied from the handling of non-local gotos. */
979 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
982 rtx_insn
*insn
, *last
;
983 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
985 /* DRAP is needed for stack realign if longjmp is expanded to current
987 if (SUPPORTS_STACK_ALIGNMENT
)
988 crtl
->need_drap
= true;
990 if (setjmp_alias_set
== -1)
991 setjmp_alias_set
= new_alias_set ();
993 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
995 buf_addr
= force_reg (Pmode
, buf_addr
);
997 /* We require that the user must pass a second argument of 1, because
998 that is what builtin_setjmp will return. */
999 gcc_assert (value
== const1_rtx
);
1001 last
= get_last_insn ();
1002 if (targetm
.have_builtin_longjmp ())
1003 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
1006 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1007 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1008 GET_MODE_SIZE (Pmode
)));
1010 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1011 2 * GET_MODE_SIZE (Pmode
)));
1012 set_mem_alias_set (fp
, setjmp_alias_set
);
1013 set_mem_alias_set (lab
, setjmp_alias_set
);
1014 set_mem_alias_set (stack
, setjmp_alias_set
);
1016 /* Pick up FP, label, and SP from the block and jump. This code is
1017 from expand_goto in stmt.c; see there for detailed comments. */
1018 if (targetm
.have_nonlocal_goto ())
1019 /* We have to pass a value to the nonlocal_goto pattern that will
1020 get copied into the static_chain pointer, but it does not matter
1021 what that value is, because builtin_setjmp does not use it. */
1022 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
1025 lab
= copy_to_reg (lab
);
1027 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1028 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1030 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1031 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1033 emit_use (hard_frame_pointer_rtx
);
1034 emit_use (stack_pointer_rtx
);
1035 emit_indirect_jump (lab
);
1039 /* Search backwards and mark the jump insn as a non-local goto.
1040 Note that this precludes the use of __builtin_longjmp to a
1041 __builtin_setjmp target in the same function. However, we've
1042 already cautioned the user that these functions are for
1043 internal exception handling use only. */
1044 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1046 gcc_assert (insn
!= last
);
1050 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1053 else if (CALL_P (insn
))
1059 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1061 return (iter
->i
< iter
->n
);
1064 /* This function validates the types of a function call argument list
1065 against a specified list of tree_codes. If the last specifier is a 0,
1066 that represents an ellipses, otherwise the last specifier must be a
1070 validate_arglist (const_tree callexpr
, ...)
1072 enum tree_code code
;
1075 const_call_expr_arg_iterator iter
;
1078 va_start (ap
, callexpr
);
1079 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1083 code
= (enum tree_code
) va_arg (ap
, int);
1087 /* This signifies an ellipses, any further arguments are all ok. */
1091 /* This signifies an endlink, if no arguments remain, return
1092 true, otherwise return false. */
1093 res
= !more_const_call_expr_args_p (&iter
);
1096 /* If no parameters remain or the parameter's code does not
1097 match the specified code, return false. Otherwise continue
1098 checking any remaining arguments. */
1099 arg
= next_const_call_expr_arg (&iter
);
1100 if (!validate_arg (arg
, code
))
1107 /* We need gotos here since we can only have one VA_CLOSE in a
1115 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1116 and the address of the save area. */
1119 expand_builtin_nonlocal_goto (tree exp
)
1121 tree t_label
, t_save_area
;
1122 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1125 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1128 t_label
= CALL_EXPR_ARG (exp
, 0);
1129 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1131 r_label
= expand_normal (t_label
);
1132 r_label
= convert_memory_address (Pmode
, r_label
);
1133 r_save_area
= expand_normal (t_save_area
);
1134 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1135 /* Copy the address of the save location to a register just in case it was
1136 based on the frame pointer. */
1137 r_save_area
= copy_to_reg (r_save_area
);
1138 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1139 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1140 plus_constant (Pmode
, r_save_area
,
1141 GET_MODE_SIZE (Pmode
)));
1143 crtl
->has_nonlocal_goto
= 1;
1145 /* ??? We no longer need to pass the static chain value, afaik. */
1146 if (targetm
.have_nonlocal_goto ())
1147 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1150 r_label
= copy_to_reg (r_label
);
1152 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1153 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1155 /* Restore frame pointer for containing function. */
1156 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1157 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1159 /* USE of hard_frame_pointer_rtx added for consistency;
1160 not clear if really needed. */
1161 emit_use (hard_frame_pointer_rtx
);
1162 emit_use (stack_pointer_rtx
);
1164 /* If the architecture is using a GP register, we must
1165 conservatively assume that the target function makes use of it.
1166 The prologue of functions with nonlocal gotos must therefore
1167 initialize the GP register to the appropriate value, and we
1168 must then make sure that this value is live at the point
1169 of the jump. (Note that this doesn't necessarily apply
1170 to targets with a nonlocal_goto pattern; they are free
1171 to implement it in their own way. Note also that this is
1172 a no-op if the GP register is a global invariant.) */
1173 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1174 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1175 emit_use (pic_offset_table_rtx
);
1177 emit_indirect_jump (r_label
);
1180 /* Search backwards to the jump insn and mark it as a
1182 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1186 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1189 else if (CALL_P (insn
))
1196 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1197 (not all will be used on all machines) that was passed to __builtin_setjmp.
1198 It updates the stack pointer in that block to the current value. This is
1199 also called directly by the SJLJ exception handling code. */
1202 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1204 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1206 = gen_rtx_MEM (sa_mode
,
1209 plus_constant (Pmode
, buf_addr
,
1210 2 * GET_MODE_SIZE (Pmode
))));
1212 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1215 /* Expand a call to __builtin_prefetch. For a target that does not support
1216 data prefetch, evaluate the memory address argument in case it has side
1220 expand_builtin_prefetch (tree exp
)
1222 tree arg0
, arg1
, arg2
;
1226 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1229 arg0
= CALL_EXPR_ARG (exp
, 0);
1231 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1232 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1234 nargs
= call_expr_nargs (exp
);
1236 arg1
= CALL_EXPR_ARG (exp
, 1);
1238 arg1
= integer_zero_node
;
1240 arg2
= CALL_EXPR_ARG (exp
, 2);
1242 arg2
= integer_three_node
;
1244 /* Argument 0 is an address. */
1245 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1247 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1248 if (TREE_CODE (arg1
) != INTEGER_CST
)
1250 error ("second argument to %<__builtin_prefetch%> must be a constant");
1251 arg1
= integer_zero_node
;
1253 op1
= expand_normal (arg1
);
1254 /* Argument 1 must be either zero or one. */
1255 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1257 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1262 /* Argument 2 (locality) must be a compile-time constant int. */
1263 if (TREE_CODE (arg2
) != INTEGER_CST
)
1265 error ("third argument to %<__builtin_prefetch%> must be a constant");
1266 arg2
= integer_zero_node
;
1268 op2
= expand_normal (arg2
);
1269 /* Argument 2 must be 0, 1, 2, or 3. */
1270 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1272 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1276 if (targetm
.have_prefetch ())
1278 struct expand_operand ops
[3];
1280 create_address_operand (&ops
[0], op0
);
1281 create_integer_operand (&ops
[1], INTVAL (op1
));
1282 create_integer_operand (&ops
[2], INTVAL (op2
));
1283 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1287 /* Don't do anything with direct references to volatile memory, but
1288 generate code to handle other side effects. */
1289 if (!MEM_P (op0
) && side_effects_p (op0
))
1293 /* Get a MEM rtx for expression EXP which is the address of an operand
1294 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1295 the maximum length of the block of memory that might be accessed or
1299 get_memory_rtx (tree exp
, tree len
)
1301 tree orig_exp
= exp
;
1304 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1305 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1306 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1307 exp
= TREE_OPERAND (exp
, 0);
1309 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1310 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1312 /* Get an expression we can use to find the attributes to assign to MEM.
1313 First remove any nops. */
1314 while (CONVERT_EXPR_P (exp
)
1315 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1316 exp
= TREE_OPERAND (exp
, 0);
1318 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1319 (as builtin stringops may alias with anything). */
1320 exp
= fold_build2 (MEM_REF
,
1321 build_array_type (char_type_node
,
1322 build_range_type (sizetype
,
1323 size_one_node
, len
)),
1324 exp
, build_int_cst (ptr_type_node
, 0));
1326 /* If the MEM_REF has no acceptable address, try to get the base object
1327 from the original address we got, and build an all-aliasing
1328 unknown-sized access to that one. */
1329 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1330 set_mem_attributes (mem
, exp
, 0);
1331 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1332 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1335 exp
= build_fold_addr_expr (exp
);
1336 exp
= fold_build2 (MEM_REF
,
1337 build_array_type (char_type_node
,
1338 build_range_type (sizetype
,
1341 exp
, build_int_cst (ptr_type_node
, 0));
1342 set_mem_attributes (mem
, exp
, 0);
1344 set_mem_alias_set (mem
, 0);
1348 /* Built-in functions to perform an untyped call and return. */
1350 #define apply_args_mode \
1351 (this_target_builtins->x_apply_args_mode)
1352 #define apply_result_mode \
1353 (this_target_builtins->x_apply_result_mode)
1355 /* Return the size required for the block returned by __builtin_apply_args,
1356 and initialize apply_args_mode. */
1359 apply_args_size (void)
1361 static int size
= -1;
1366 /* The values computed by this function never change. */
1369 /* The first value is the incoming arg-pointer. */
1370 size
= GET_MODE_SIZE (Pmode
);
1372 /* The second value is the structure value address unless this is
1373 passed as an "invisible" first argument. */
1374 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1375 size
+= GET_MODE_SIZE (Pmode
);
1377 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1378 if (FUNCTION_ARG_REGNO_P (regno
))
1380 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1382 gcc_assert (mode
!= VOIDmode
);
1384 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1385 if (size
% align
!= 0)
1386 size
= CEIL (size
, align
) * align
;
1387 size
+= GET_MODE_SIZE (mode
);
1388 apply_args_mode
[regno
] = mode
;
1392 apply_args_mode
[regno
] = VOIDmode
;
1398 /* Return the size required for the block returned by __builtin_apply,
1399 and initialize apply_result_mode. */
1402 apply_result_size (void)
1404 static int size
= -1;
1408 /* The values computed by this function never change. */
1413 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1414 if (targetm
.calls
.function_value_regno_p (regno
))
1416 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1418 gcc_assert (mode
!= VOIDmode
);
1420 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1421 if (size
% align
!= 0)
1422 size
= CEIL (size
, align
) * align
;
1423 size
+= GET_MODE_SIZE (mode
);
1424 apply_result_mode
[regno
] = mode
;
1427 apply_result_mode
[regno
] = VOIDmode
;
1429 /* Allow targets that use untyped_call and untyped_return to override
1430 the size so that machine-specific information can be stored here. */
1431 #ifdef APPLY_RESULT_SIZE
1432 size
= APPLY_RESULT_SIZE
;
1438 /* Create a vector describing the result block RESULT. If SAVEP is true,
1439 the result block is used to save the values; otherwise it is used to
1440 restore the values. */
1443 result_vector (int savep
, rtx result
)
1445 int regno
, size
, align
, nelts
;
1448 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1451 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1452 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1454 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1455 if (size
% align
!= 0)
1456 size
= CEIL (size
, align
) * align
;
1457 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1458 mem
= adjust_address (result
, mode
, size
);
1459 savevec
[nelts
++] = (savep
1460 ? gen_rtx_SET (mem
, reg
)
1461 : gen_rtx_SET (reg
, mem
));
1462 size
+= GET_MODE_SIZE (mode
);
1464 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1467 /* Save the state required to perform an untyped call with the same
1468 arguments as were passed to the current function. */
1471 expand_builtin_apply_args_1 (void)
1474 int size
, align
, regno
;
1476 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1478 /* Create a block where the arg-pointer, structure value address,
1479 and argument registers can be saved. */
1480 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1482 /* Walk past the arg-pointer and structure value address. */
1483 size
= GET_MODE_SIZE (Pmode
);
1484 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1485 size
+= GET_MODE_SIZE (Pmode
);
1487 /* Save each register used in calling a function to the block. */
1488 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1489 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1491 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1492 if (size
% align
!= 0)
1493 size
= CEIL (size
, align
) * align
;
1495 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1497 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1498 size
+= GET_MODE_SIZE (mode
);
1501 /* Save the arg pointer to the block. */
1502 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1503 /* We need the pointer as the caller actually passed them to us, not
1504 as we might have pretended they were passed. Make sure it's a valid
1505 operand, as emit_move_insn isn't expected to handle a PLUS. */
1506 if (STACK_GROWS_DOWNWARD
)
1508 = force_operand (plus_constant (Pmode
, tem
,
1509 crtl
->args
.pretend_args_size
),
1511 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1513 size
= GET_MODE_SIZE (Pmode
);
1515 /* Save the structure value address unless this is passed as an
1516 "invisible" first argument. */
1517 if (struct_incoming_value
)
1519 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1520 copy_to_reg (struct_incoming_value
));
1521 size
+= GET_MODE_SIZE (Pmode
);
1524 /* Return the address of the block. */
1525 return copy_addr_to_reg (XEXP (registers
, 0));
1528 /* __builtin_apply_args returns block of memory allocated on
1529 the stack into which is stored the arg pointer, structure
1530 value address, static chain, and all the registers that might
1531 possibly be used in performing a function call. The code is
1532 moved to the start of the function so the incoming values are
1536 expand_builtin_apply_args (void)
1538 /* Don't do __builtin_apply_args more than once in a function.
1539 Save the result of the first call and reuse it. */
1540 if (apply_args_value
!= 0)
1541 return apply_args_value
;
1543 /* When this function is called, it means that registers must be
1544 saved on entry to this function. So we migrate the
1545 call to the first insn of this function. */
1549 temp
= expand_builtin_apply_args_1 ();
1550 rtx_insn
*seq
= get_insns ();
1553 apply_args_value
= temp
;
1555 /* Put the insns after the NOTE that starts the function.
1556 If this is inside a start_sequence, make the outer-level insn
1557 chain current, so the code is placed at the start of the
1558 function. If internal_arg_pointer is a non-virtual pseudo,
1559 it needs to be placed after the function that initializes
1561 push_topmost_sequence ();
1562 if (REG_P (crtl
->args
.internal_arg_pointer
)
1563 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1564 emit_insn_before (seq
, parm_birth_insn
);
1566 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1567 pop_topmost_sequence ();
1572 /* Perform an untyped call and save the state required to perform an
1573 untyped return of whatever value was returned by the given function. */
1576 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1578 int size
, align
, regno
;
1580 rtx incoming_args
, result
, reg
, dest
, src
;
1581 rtx_call_insn
*call_insn
;
1582 rtx old_stack_level
= 0;
1583 rtx call_fusage
= 0;
1584 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1586 arguments
= convert_memory_address (Pmode
, arguments
);
1588 /* Create a block where the return registers can be saved. */
1589 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1591 /* Fetch the arg pointer from the ARGUMENTS block. */
1592 incoming_args
= gen_reg_rtx (Pmode
);
1593 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1594 if (!STACK_GROWS_DOWNWARD
)
1595 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1596 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1598 /* Push a new argument block and copy the arguments. Do not allow
1599 the (potential) memcpy call below to interfere with our stack
1601 do_pending_stack_adjust ();
1604 /* Save the stack with nonlocal if available. */
1605 if (targetm
.have_save_stack_nonlocal ())
1606 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1608 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1610 /* Allocate a block of memory onto the stack and copy the memory
1611 arguments to the outgoing arguments address. We can pass TRUE
1612 as the 4th argument because we just saved the stack pointer
1613 and will restore it right after the call. */
1614 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1616 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1617 may have already set current_function_calls_alloca to true.
1618 current_function_calls_alloca won't be set if argsize is zero,
1619 so we have to guarantee need_drap is true here. */
1620 if (SUPPORTS_STACK_ALIGNMENT
)
1621 crtl
->need_drap
= true;
1623 dest
= virtual_outgoing_args_rtx
;
1624 if (!STACK_GROWS_DOWNWARD
)
1626 if (CONST_INT_P (argsize
))
1627 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1629 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1631 dest
= gen_rtx_MEM (BLKmode
, dest
);
1632 set_mem_align (dest
, PARM_BOUNDARY
);
1633 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1634 set_mem_align (src
, PARM_BOUNDARY
);
1635 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1637 /* Refer to the argument block. */
1639 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1640 set_mem_align (arguments
, PARM_BOUNDARY
);
1642 /* Walk past the arg-pointer and structure value address. */
1643 size
= GET_MODE_SIZE (Pmode
);
1645 size
+= GET_MODE_SIZE (Pmode
);
1647 /* Restore each of the registers previously saved. Make USE insns
1648 for each of these registers for use in making the call. */
1649 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1650 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1652 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1653 if (size
% align
!= 0)
1654 size
= CEIL (size
, align
) * align
;
1655 reg
= gen_rtx_REG (mode
, regno
);
1656 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1657 use_reg (&call_fusage
, reg
);
1658 size
+= GET_MODE_SIZE (mode
);
1661 /* Restore the structure value address unless this is passed as an
1662 "invisible" first argument. */
1663 size
= GET_MODE_SIZE (Pmode
);
1666 rtx value
= gen_reg_rtx (Pmode
);
1667 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1668 emit_move_insn (struct_value
, value
);
1669 if (REG_P (struct_value
))
1670 use_reg (&call_fusage
, struct_value
);
1671 size
+= GET_MODE_SIZE (Pmode
);
1674 /* All arguments and registers used for the call are set up by now! */
1675 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1677 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1678 and we don't want to load it into a register as an optimization,
1679 because prepare_call_address already did it if it should be done. */
1680 if (GET_CODE (function
) != SYMBOL_REF
)
1681 function
= memory_address (FUNCTION_MODE
, function
);
1683 /* Generate the actual call instruction and save the return value. */
1684 if (targetm
.have_untyped_call ())
1686 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1687 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1688 result_vector (1, result
)));
1690 else if (targetm
.have_call_value ())
1694 /* Locate the unique return register. It is not possible to
1695 express a call that sets more than one return register using
1696 call_value; use untyped_call for that. In fact, untyped_call
1697 only needs to save the return registers in the given block. */
1698 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1699 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1701 gcc_assert (!valreg
); /* have_untyped_call required. */
1703 valreg
= gen_rtx_REG (mode
, regno
);
1706 emit_insn (targetm
.gen_call_value (valreg
,
1707 gen_rtx_MEM (FUNCTION_MODE
, function
),
1708 const0_rtx
, NULL_RTX
, const0_rtx
));
1710 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1715 /* Find the CALL insn we just emitted, and attach the register usage
1717 call_insn
= last_call_insn ();
1718 add_function_usage_to (call_insn
, call_fusage
);
1720 /* Restore the stack. */
1721 if (targetm
.have_save_stack_nonlocal ())
1722 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1724 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1725 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1729 /* Return the address of the result block. */
1730 result
= copy_addr_to_reg (XEXP (result
, 0));
1731 return convert_memory_address (ptr_mode
, result
);
1734 /* Perform an untyped return. */
1737 expand_builtin_return (rtx result
)
1739 int size
, align
, regno
;
1742 rtx_insn
*call_fusage
= 0;
1744 result
= convert_memory_address (Pmode
, result
);
1746 apply_result_size ();
1747 result
= gen_rtx_MEM (BLKmode
, result
);
1749 if (targetm
.have_untyped_return ())
1751 rtx vector
= result_vector (0, result
);
1752 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1757 /* Restore the return value and note that each value is used. */
1759 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1760 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1762 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1763 if (size
% align
!= 0)
1764 size
= CEIL (size
, align
) * align
;
1765 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1766 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1768 push_to_sequence (call_fusage
);
1770 call_fusage
= get_insns ();
1772 size
+= GET_MODE_SIZE (mode
);
1775 /* Put the USE insns before the return. */
1776 emit_insn (call_fusage
);
1778 /* Return whatever values was restored by jumping directly to the end
1780 expand_naked_return ();
1783 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1785 static enum type_class
1786 type_to_class (tree type
)
1788 switch (TREE_CODE (type
))
1790 case VOID_TYPE
: return void_type_class
;
1791 case INTEGER_TYPE
: return integer_type_class
;
1792 case ENUMERAL_TYPE
: return enumeral_type_class
;
1793 case BOOLEAN_TYPE
: return boolean_type_class
;
1794 case POINTER_TYPE
: return pointer_type_class
;
1795 case REFERENCE_TYPE
: return reference_type_class
;
1796 case OFFSET_TYPE
: return offset_type_class
;
1797 case REAL_TYPE
: return real_type_class
;
1798 case COMPLEX_TYPE
: return complex_type_class
;
1799 case FUNCTION_TYPE
: return function_type_class
;
1800 case METHOD_TYPE
: return method_type_class
;
1801 case RECORD_TYPE
: return record_type_class
;
1803 case QUAL_UNION_TYPE
: return union_type_class
;
1804 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1805 ? string_type_class
: array_type_class
);
1806 case LANG_TYPE
: return lang_type_class
;
1807 default: return no_type_class
;
1811 /* Expand a call EXP to __builtin_classify_type. */
1814 expand_builtin_classify_type (tree exp
)
1816 if (call_expr_nargs (exp
))
1817 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1818 return GEN_INT (no_type_class
);
1821 /* This helper macro, meant to be used in mathfn_built_in below,
1822 determines which among a set of three builtin math functions is
1823 appropriate for a given type mode. The `F' and `L' cases are
1824 automatically generated from the `double' case. */
1825 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1826 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1827 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1828 fcodel = BUILT_IN_MATHFN##L ; break;
1829 /* Similar to above, but appends _R after any F/L suffix. */
1830 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1831 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1832 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1833 fcodel = BUILT_IN_MATHFN##L_R ; break;
1835 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1836 if available. If IMPLICIT is true use the implicit builtin declaration,
1837 otherwise use the explicit declaration. If we can't do the conversion,
1841 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1843 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1847 CASE_MATHFN (BUILT_IN_ACOS
)
1848 CASE_MATHFN (BUILT_IN_ACOSH
)
1849 CASE_MATHFN (BUILT_IN_ASIN
)
1850 CASE_MATHFN (BUILT_IN_ASINH
)
1851 CASE_MATHFN (BUILT_IN_ATAN
)
1852 CASE_MATHFN (BUILT_IN_ATAN2
)
1853 CASE_MATHFN (BUILT_IN_ATANH
)
1854 CASE_MATHFN (BUILT_IN_CBRT
)
1855 CASE_MATHFN (BUILT_IN_CEIL
)
1856 CASE_MATHFN (BUILT_IN_CEXPI
)
1857 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1858 CASE_MATHFN (BUILT_IN_COS
)
1859 CASE_MATHFN (BUILT_IN_COSH
)
1860 CASE_MATHFN (BUILT_IN_DREM
)
1861 CASE_MATHFN (BUILT_IN_ERF
)
1862 CASE_MATHFN (BUILT_IN_ERFC
)
1863 CASE_MATHFN (BUILT_IN_EXP
)
1864 CASE_MATHFN (BUILT_IN_EXP10
)
1865 CASE_MATHFN (BUILT_IN_EXP2
)
1866 CASE_MATHFN (BUILT_IN_EXPM1
)
1867 CASE_MATHFN (BUILT_IN_FABS
)
1868 CASE_MATHFN (BUILT_IN_FDIM
)
1869 CASE_MATHFN (BUILT_IN_FLOOR
)
1870 CASE_MATHFN (BUILT_IN_FMA
)
1871 CASE_MATHFN (BUILT_IN_FMAX
)
1872 CASE_MATHFN (BUILT_IN_FMIN
)
1873 CASE_MATHFN (BUILT_IN_FMOD
)
1874 CASE_MATHFN (BUILT_IN_FREXP
)
1875 CASE_MATHFN (BUILT_IN_GAMMA
)
1876 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1877 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1878 CASE_MATHFN (BUILT_IN_HYPOT
)
1879 CASE_MATHFN (BUILT_IN_ILOGB
)
1880 CASE_MATHFN (BUILT_IN_ICEIL
)
1881 CASE_MATHFN (BUILT_IN_IFLOOR
)
1882 CASE_MATHFN (BUILT_IN_INF
)
1883 CASE_MATHFN (BUILT_IN_IRINT
)
1884 CASE_MATHFN (BUILT_IN_IROUND
)
1885 CASE_MATHFN (BUILT_IN_ISINF
)
1886 CASE_MATHFN (BUILT_IN_J0
)
1887 CASE_MATHFN (BUILT_IN_J1
)
1888 CASE_MATHFN (BUILT_IN_JN
)
1889 CASE_MATHFN (BUILT_IN_LCEIL
)
1890 CASE_MATHFN (BUILT_IN_LDEXP
)
1891 CASE_MATHFN (BUILT_IN_LFLOOR
)
1892 CASE_MATHFN (BUILT_IN_LGAMMA
)
1893 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1894 CASE_MATHFN (BUILT_IN_LLCEIL
)
1895 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1896 CASE_MATHFN (BUILT_IN_LLRINT
)
1897 CASE_MATHFN (BUILT_IN_LLROUND
)
1898 CASE_MATHFN (BUILT_IN_LOG
)
1899 CASE_MATHFN (BUILT_IN_LOG10
)
1900 CASE_MATHFN (BUILT_IN_LOG1P
)
1901 CASE_MATHFN (BUILT_IN_LOG2
)
1902 CASE_MATHFN (BUILT_IN_LOGB
)
1903 CASE_MATHFN (BUILT_IN_LRINT
)
1904 CASE_MATHFN (BUILT_IN_LROUND
)
1905 CASE_MATHFN (BUILT_IN_MODF
)
1906 CASE_MATHFN (BUILT_IN_NAN
)
1907 CASE_MATHFN (BUILT_IN_NANS
)
1908 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1909 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1910 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1911 CASE_MATHFN (BUILT_IN_POW
)
1912 CASE_MATHFN (BUILT_IN_POWI
)
1913 CASE_MATHFN (BUILT_IN_POW10
)
1914 CASE_MATHFN (BUILT_IN_REMAINDER
)
1915 CASE_MATHFN (BUILT_IN_REMQUO
)
1916 CASE_MATHFN (BUILT_IN_RINT
)
1917 CASE_MATHFN (BUILT_IN_ROUND
)
1918 CASE_MATHFN (BUILT_IN_SCALB
)
1919 CASE_MATHFN (BUILT_IN_SCALBLN
)
1920 CASE_MATHFN (BUILT_IN_SCALBN
)
1921 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1922 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1923 CASE_MATHFN (BUILT_IN_SIN
)
1924 CASE_MATHFN (BUILT_IN_SINCOS
)
1925 CASE_MATHFN (BUILT_IN_SINH
)
1926 CASE_MATHFN (BUILT_IN_SQRT
)
1927 CASE_MATHFN (BUILT_IN_TAN
)
1928 CASE_MATHFN (BUILT_IN_TANH
)
1929 CASE_MATHFN (BUILT_IN_TGAMMA
)
1930 CASE_MATHFN (BUILT_IN_TRUNC
)
1931 CASE_MATHFN (BUILT_IN_Y0
)
1932 CASE_MATHFN (BUILT_IN_Y1
)
1933 CASE_MATHFN (BUILT_IN_YN
)
1939 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1941 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1943 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1948 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1951 return builtin_decl_explicit (fcode2
);
1954 /* Like mathfn_built_in_1(), but always use the implicit array. */
1957 mathfn_built_in (tree type
, enum built_in_function fn
)
1959 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1962 /* If errno must be maintained, expand the RTL to check if the result,
1963 TARGET, of a built-in function call, EXP, is NaN, and if so set
1967 expand_errno_check (tree exp
, rtx target
)
1969 rtx_code_label
*lab
= gen_label_rtx ();
1971 /* Test the result; if it is NaN, set errno=EDOM because
1972 the argument was not in the domain. */
1973 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1974 NULL_RTX
, NULL
, lab
,
1975 /* The jump is very likely. */
1976 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
1979 /* If this built-in doesn't throw an exception, set errno directly. */
1980 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1982 #ifdef GEN_ERRNO_RTX
1983 rtx errno_rtx
= GEN_ERRNO_RTX
;
1986 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1988 emit_move_insn (errno_rtx
,
1989 gen_int_mode (TARGET_EDOM
, GET_MODE (errno_rtx
)));
1995 /* Make sure the library call isn't expanded as a tail call. */
1996 CALL_EXPR_TAILCALL (exp
) = 0;
1998 /* We can't set errno=EDOM directly; let the library call do it.
1999 Pop the arguments right away in case the call gets deleted. */
2001 expand_call (exp
, target
, 0);
2006 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2007 Return NULL_RTX if a normal call should be emitted rather than expanding
2008 the function in-line. EXP is the expression that is a call to the builtin
2009 function; if convenient, the result should be placed in TARGET.
2010 SUBTARGET may be used as the target for computing one of EXP's operands. */
2013 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
2015 optab builtin_optab
;
2018 tree fndecl
= get_callee_fndecl (exp
);
2020 bool errno_set
= false;
2021 bool try_widening
= false;
2024 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2027 arg
= CALL_EXPR_ARG (exp
, 0);
2029 switch (DECL_FUNCTION_CODE (fndecl
))
2031 CASE_FLT_FN (BUILT_IN_SQRT
):
2032 errno_set
= ! tree_expr_nonnegative_p (arg
);
2033 try_widening
= true;
2034 builtin_optab
= sqrt_optab
;
2036 CASE_FLT_FN (BUILT_IN_EXP
):
2037 errno_set
= true; builtin_optab
= exp_optab
; break;
2038 CASE_FLT_FN (BUILT_IN_EXP10
):
2039 CASE_FLT_FN (BUILT_IN_POW10
):
2040 errno_set
= true; builtin_optab
= exp10_optab
; break;
2041 CASE_FLT_FN (BUILT_IN_EXP2
):
2042 errno_set
= true; builtin_optab
= exp2_optab
; break;
2043 CASE_FLT_FN (BUILT_IN_EXPM1
):
2044 errno_set
= true; builtin_optab
= expm1_optab
; break;
2045 CASE_FLT_FN (BUILT_IN_LOGB
):
2046 errno_set
= true; builtin_optab
= logb_optab
; break;
2047 CASE_FLT_FN (BUILT_IN_LOG
):
2048 errno_set
= true; builtin_optab
= log_optab
; break;
2049 CASE_FLT_FN (BUILT_IN_LOG10
):
2050 errno_set
= true; builtin_optab
= log10_optab
; break;
2051 CASE_FLT_FN (BUILT_IN_LOG2
):
2052 errno_set
= true; builtin_optab
= log2_optab
; break;
2053 CASE_FLT_FN (BUILT_IN_LOG1P
):
2054 errno_set
= true; builtin_optab
= log1p_optab
; break;
2055 CASE_FLT_FN (BUILT_IN_ASIN
):
2056 builtin_optab
= asin_optab
; break;
2057 CASE_FLT_FN (BUILT_IN_ACOS
):
2058 builtin_optab
= acos_optab
; break;
2059 CASE_FLT_FN (BUILT_IN_TAN
):
2060 builtin_optab
= tan_optab
; break;
2061 CASE_FLT_FN (BUILT_IN_ATAN
):
2062 builtin_optab
= atan_optab
; break;
2063 CASE_FLT_FN (BUILT_IN_FLOOR
):
2064 builtin_optab
= floor_optab
; break;
2065 CASE_FLT_FN (BUILT_IN_CEIL
):
2066 builtin_optab
= ceil_optab
; break;
2067 CASE_FLT_FN (BUILT_IN_TRUNC
):
2068 builtin_optab
= btrunc_optab
; break;
2069 CASE_FLT_FN (BUILT_IN_ROUND
):
2070 builtin_optab
= round_optab
; break;
2071 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2072 builtin_optab
= nearbyint_optab
;
2073 if (flag_trapping_math
)
2075 /* Else fallthrough and expand as rint. */
2076 CASE_FLT_FN (BUILT_IN_RINT
):
2077 builtin_optab
= rint_optab
; break;
2078 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2079 builtin_optab
= significand_optab
; break;
2084 /* Make a suitable register to place result in. */
2085 mode
= TYPE_MODE (TREE_TYPE (exp
));
2087 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2090 /* Before working hard, check whether the instruction is available, but try
2091 to widen the mode for specific operations. */
2092 if ((optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2093 || (try_widening
&& !excess_precision_type (TREE_TYPE (exp
))))
2094 && (!errno_set
|| !optimize_insn_for_size_p ()))
2096 rtx result
= gen_reg_rtx (mode
);
2098 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2099 need to expand the argument again. This way, we will not perform
2100 side-effects more the once. */
2101 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2103 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2107 /* Compute into RESULT.
2108 Set RESULT to wherever the result comes back. */
2109 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2114 expand_errno_check (exp
, result
);
2116 /* Output the entire sequence. */
2117 insns
= get_insns ();
2123 /* If we were unable to expand via the builtin, stop the sequence
2124 (without outputting the insns) and call to the library function
2125 with the stabilized argument list. */
2129 return expand_call (exp
, target
, target
== const0_rtx
);
2132 /* Expand a call to the builtin binary math functions (pow and atan2).
2133 Return NULL_RTX if a normal call should be emitted rather than expanding the
2134 function in-line. EXP is the expression that is a call to the builtin
2135 function; if convenient, the result should be placed in TARGET.
2136 SUBTARGET may be used as the target for computing one of EXP's
2140 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2142 optab builtin_optab
;
2143 rtx op0
, op1
, result
;
2145 int op1_type
= REAL_TYPE
;
2146 tree fndecl
= get_callee_fndecl (exp
);
2149 bool errno_set
= true;
2151 switch (DECL_FUNCTION_CODE (fndecl
))
2153 CASE_FLT_FN (BUILT_IN_SCALBN
):
2154 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2155 CASE_FLT_FN (BUILT_IN_LDEXP
):
2156 op1_type
= INTEGER_TYPE
;
2161 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2164 arg0
= CALL_EXPR_ARG (exp
, 0);
2165 arg1
= CALL_EXPR_ARG (exp
, 1);
2167 switch (DECL_FUNCTION_CODE (fndecl
))
2169 CASE_FLT_FN (BUILT_IN_POW
):
2170 builtin_optab
= pow_optab
; break;
2171 CASE_FLT_FN (BUILT_IN_ATAN2
):
2172 builtin_optab
= atan2_optab
; break;
2173 CASE_FLT_FN (BUILT_IN_SCALB
):
2174 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2176 builtin_optab
= scalb_optab
; break;
2177 CASE_FLT_FN (BUILT_IN_SCALBN
):
2178 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2179 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2181 /* Fall through... */
2182 CASE_FLT_FN (BUILT_IN_LDEXP
):
2183 builtin_optab
= ldexp_optab
; break;
2184 CASE_FLT_FN (BUILT_IN_FMOD
):
2185 builtin_optab
= fmod_optab
; break;
2186 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2187 CASE_FLT_FN (BUILT_IN_DREM
):
2188 builtin_optab
= remainder_optab
; break;
2193 /* Make a suitable register to place result in. */
2194 mode
= TYPE_MODE (TREE_TYPE (exp
));
2196 /* Before working hard, check whether the instruction is available. */
2197 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2200 result
= gen_reg_rtx (mode
);
2202 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2205 if (errno_set
&& optimize_insn_for_size_p ())
2208 /* Always stabilize the argument list. */
2209 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2210 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2212 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2213 op1
= expand_normal (arg1
);
2217 /* Compute into RESULT.
2218 Set RESULT to wherever the result comes back. */
2219 result
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2220 result
, 0, OPTAB_DIRECT
);
2222 /* If we were unable to expand via the builtin, stop the sequence
2223 (without outputting the insns) and call to the library function
2224 with the stabilized argument list. */
2228 return expand_call (exp
, target
, target
== const0_rtx
);
2232 expand_errno_check (exp
, result
);
2234 /* Output the entire sequence. */
2235 insns
= get_insns ();
2242 /* Expand a call to the builtin trinary math functions (fma).
2243 Return NULL_RTX if a normal call should be emitted rather than expanding the
2244 function in-line. EXP is the expression that is a call to the builtin
2245 function; if convenient, the result should be placed in TARGET.
2246 SUBTARGET may be used as the target for computing one of EXP's
2250 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2252 optab builtin_optab
;
2253 rtx op0
, op1
, op2
, result
;
2255 tree fndecl
= get_callee_fndecl (exp
);
2256 tree arg0
, arg1
, arg2
;
2259 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2262 arg0
= CALL_EXPR_ARG (exp
, 0);
2263 arg1
= CALL_EXPR_ARG (exp
, 1);
2264 arg2
= CALL_EXPR_ARG (exp
, 2);
2266 switch (DECL_FUNCTION_CODE (fndecl
))
2268 CASE_FLT_FN (BUILT_IN_FMA
):
2269 builtin_optab
= fma_optab
; break;
2274 /* Make a suitable register to place result in. */
2275 mode
= TYPE_MODE (TREE_TYPE (exp
));
2277 /* Before working hard, check whether the instruction is available. */
2278 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2281 result
= gen_reg_rtx (mode
);
2283 /* Always stabilize the argument list. */
2284 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2285 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2286 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2288 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2289 op1
= expand_normal (arg1
);
2290 op2
= expand_normal (arg2
);
2294 /* Compute into RESULT.
2295 Set RESULT to wherever the result comes back. */
2296 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2299 /* If we were unable to expand via the builtin, stop the sequence
2300 (without outputting the insns) and call to the library function
2301 with the stabilized argument list. */
2305 return expand_call (exp
, target
, target
== const0_rtx
);
2308 /* Output the entire sequence. */
2309 insns
= get_insns ();
2316 /* Expand a call to the builtin sin and cos math functions.
2317 Return NULL_RTX if a normal call should be emitted rather than expanding the
2318 function in-line. EXP is the expression that is a call to the builtin
2319 function; if convenient, the result should be placed in TARGET.
2320 SUBTARGET may be used as the target for computing one of EXP's
2324 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2326 optab builtin_optab
;
2329 tree fndecl
= get_callee_fndecl (exp
);
2333 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2336 arg
= CALL_EXPR_ARG (exp
, 0);
2338 switch (DECL_FUNCTION_CODE (fndecl
))
2340 CASE_FLT_FN (BUILT_IN_SIN
):
2341 CASE_FLT_FN (BUILT_IN_COS
):
2342 builtin_optab
= sincos_optab
; break;
2347 /* Make a suitable register to place result in. */
2348 mode
= TYPE_MODE (TREE_TYPE (exp
));
2350 /* Check if sincos insn is available, otherwise fallback
2351 to sin or cos insn. */
2352 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2353 switch (DECL_FUNCTION_CODE (fndecl
))
2355 CASE_FLT_FN (BUILT_IN_SIN
):
2356 builtin_optab
= sin_optab
; break;
2357 CASE_FLT_FN (BUILT_IN_COS
):
2358 builtin_optab
= cos_optab
; break;
2363 /* Before working hard, check whether the instruction is available. */
2364 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2366 rtx result
= gen_reg_rtx (mode
);
2368 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2369 need to expand the argument again. This way, we will not perform
2370 side-effects more the once. */
2371 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2373 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2377 /* Compute into RESULT.
2378 Set RESULT to wherever the result comes back. */
2379 if (builtin_optab
== sincos_optab
)
2383 switch (DECL_FUNCTION_CODE (fndecl
))
2385 CASE_FLT_FN (BUILT_IN_SIN
):
2386 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2388 CASE_FLT_FN (BUILT_IN_COS
):
2389 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2397 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2401 /* Output the entire sequence. */
2402 insns
= get_insns ();
2408 /* If we were unable to expand via the builtin, stop the sequence
2409 (without outputting the insns) and call to the library function
2410 with the stabilized argument list. */
2414 return expand_call (exp
, target
, target
== const0_rtx
);
2417 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2418 return an RTL instruction code that implements the functionality.
2419 If that isn't possible or available return CODE_FOR_nothing. */
2421 static enum insn_code
2422 interclass_mathfn_icode (tree arg
, tree fndecl
)
2424 bool errno_set
= false;
2425 optab builtin_optab
= unknown_optab
;
2428 switch (DECL_FUNCTION_CODE (fndecl
))
2430 CASE_FLT_FN (BUILT_IN_ILOGB
):
2431 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2432 CASE_FLT_FN (BUILT_IN_ISINF
):
2433 builtin_optab
= isinf_optab
; break;
2434 case BUILT_IN_ISNORMAL
:
2435 case BUILT_IN_ISFINITE
:
2436 CASE_FLT_FN (BUILT_IN_FINITE
):
2437 case BUILT_IN_FINITED32
:
2438 case BUILT_IN_FINITED64
:
2439 case BUILT_IN_FINITED128
:
2440 case BUILT_IN_ISINFD32
:
2441 case BUILT_IN_ISINFD64
:
2442 case BUILT_IN_ISINFD128
:
2443 /* These builtins have no optabs (yet). */
2449 /* There's no easy way to detect the case we need to set EDOM. */
2450 if (flag_errno_math
&& errno_set
)
2451 return CODE_FOR_nothing
;
2453 /* Optab mode depends on the mode of the input argument. */
2454 mode
= TYPE_MODE (TREE_TYPE (arg
));
2457 return optab_handler (builtin_optab
, mode
);
2458 return CODE_FOR_nothing
;
2461 /* Expand a call to one of the builtin math functions that operate on
2462 floating point argument and output an integer result (ilogb, isinf,
2464 Return 0 if a normal call should be emitted rather than expanding the
2465 function in-line. EXP is the expression that is a call to the builtin
2466 function; if convenient, the result should be placed in TARGET. */
2469 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2471 enum insn_code icode
= CODE_FOR_nothing
;
2473 tree fndecl
= get_callee_fndecl (exp
);
2477 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2480 arg
= CALL_EXPR_ARG (exp
, 0);
2481 icode
= interclass_mathfn_icode (arg
, fndecl
);
2482 mode
= TYPE_MODE (TREE_TYPE (arg
));
2484 if (icode
!= CODE_FOR_nothing
)
2486 struct expand_operand ops
[1];
2487 rtx_insn
*last
= get_last_insn ();
2488 tree orig_arg
= arg
;
2490 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2491 need to expand the argument again. This way, we will not perform
2492 side-effects more the once. */
2493 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2495 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2497 if (mode
!= GET_MODE (op0
))
2498 op0
= convert_to_mode (mode
, op0
, 0);
2500 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2501 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2502 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2503 return ops
[0].value
;
2505 delete_insns_since (last
);
2506 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2512 /* Expand a call to the builtin sincos math function.
2513 Return NULL_RTX if a normal call should be emitted rather than expanding the
2514 function in-line. EXP is the expression that is a call to the builtin
2518 expand_builtin_sincos (tree exp
)
2520 rtx op0
, op1
, op2
, target1
, target2
;
2522 tree arg
, sinp
, cosp
;
2524 location_t loc
= EXPR_LOCATION (exp
);
2525 tree alias_type
, alias_off
;
2527 if (!validate_arglist (exp
, REAL_TYPE
,
2528 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2531 arg
= CALL_EXPR_ARG (exp
, 0);
2532 sinp
= CALL_EXPR_ARG (exp
, 1);
2533 cosp
= CALL_EXPR_ARG (exp
, 2);
2535 /* Make a suitable register to place result in. */
2536 mode
= TYPE_MODE (TREE_TYPE (arg
));
2538 /* Check if sincos insn is available, otherwise emit the call. */
2539 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2542 target1
= gen_reg_rtx (mode
);
2543 target2
= gen_reg_rtx (mode
);
2545 op0
= expand_normal (arg
);
2546 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2547 alias_off
= build_int_cst (alias_type
, 0);
2548 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2550 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2553 /* Compute into target1 and target2.
2554 Set TARGET to wherever the result comes back. */
2555 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2556 gcc_assert (result
);
2558 /* Move target1 and target2 to the memory locations indicated
2560 emit_move_insn (op1
, target1
);
2561 emit_move_insn (op2
, target2
);
2566 /* Expand a call to the internal cexpi builtin to the sincos math function.
2567 EXP is the expression that is a call to the builtin function; if convenient,
2568 the result should be placed in TARGET. */
2571 expand_builtin_cexpi (tree exp
, rtx target
)
2573 tree fndecl
= get_callee_fndecl (exp
);
2577 location_t loc
= EXPR_LOCATION (exp
);
2579 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2582 arg
= CALL_EXPR_ARG (exp
, 0);
2583 type
= TREE_TYPE (arg
);
2584 mode
= TYPE_MODE (TREE_TYPE (arg
));
2586 /* Try expanding via a sincos optab, fall back to emitting a libcall
2587 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2588 is only generated from sincos, cexp or if we have either of them. */
2589 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2591 op1
= gen_reg_rtx (mode
);
2592 op2
= gen_reg_rtx (mode
);
2594 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2596 /* Compute into op1 and op2. */
2597 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2599 else if (targetm
.libc_has_function (function_sincos
))
2601 tree call
, fn
= NULL_TREE
;
2605 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2606 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2607 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2608 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2609 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2610 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2614 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2615 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2616 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2617 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2618 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2619 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2621 /* Make sure not to fold the sincos call again. */
2622 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2623 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2624 call
, 3, arg
, top1
, top2
));
2628 tree call
, fn
= NULL_TREE
, narg
;
2629 tree ctype
= build_complex_type (type
);
2631 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2632 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2633 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2634 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2635 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2636 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2640 /* If we don't have a decl for cexp create one. This is the
2641 friendliest fallback if the user calls __builtin_cexpi
2642 without full target C99 function support. */
2643 if (fn
== NULL_TREE
)
2646 const char *name
= NULL
;
2648 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2650 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2652 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2655 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2656 fn
= build_fn_decl (name
, fntype
);
2659 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2660 build_real (type
, dconst0
), arg
);
2662 /* Make sure not to fold the cexp call again. */
2663 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2664 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2665 target
, VOIDmode
, EXPAND_NORMAL
);
2668 /* Now build the proper return type. */
2669 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2670 make_tree (TREE_TYPE (arg
), op2
),
2671 make_tree (TREE_TYPE (arg
), op1
)),
2672 target
, VOIDmode
, EXPAND_NORMAL
);
2675 /* Conveniently construct a function call expression. FNDECL names the
2676 function to be called, N is the number of arguments, and the "..."
2677 parameters are the argument expressions. Unlike build_call_exr
2678 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2681 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2684 tree fntype
= TREE_TYPE (fndecl
);
2685 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2688 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2690 SET_EXPR_LOCATION (fn
, loc
);
2694 /* Expand a call to one of the builtin rounding functions gcc defines
2695 as an extension (lfloor and lceil). As these are gcc extensions we
2696 do not need to worry about setting errno to EDOM.
2697 If expanding via optab fails, lower expression to (int)(floor(x)).
2698 EXP is the expression that is a call to the builtin function;
2699 if convenient, the result should be placed in TARGET. */
2702 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2704 convert_optab builtin_optab
;
2707 tree fndecl
= get_callee_fndecl (exp
);
2708 enum built_in_function fallback_fn
;
2709 tree fallback_fndecl
;
2713 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2716 arg
= CALL_EXPR_ARG (exp
, 0);
2718 switch (DECL_FUNCTION_CODE (fndecl
))
2720 CASE_FLT_FN (BUILT_IN_ICEIL
):
2721 CASE_FLT_FN (BUILT_IN_LCEIL
):
2722 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2723 builtin_optab
= lceil_optab
;
2724 fallback_fn
= BUILT_IN_CEIL
;
2727 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2728 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2729 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2730 builtin_optab
= lfloor_optab
;
2731 fallback_fn
= BUILT_IN_FLOOR
;
2738 /* Make a suitable register to place result in. */
2739 mode
= TYPE_MODE (TREE_TYPE (exp
));
2741 target
= gen_reg_rtx (mode
);
2743 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2744 need to expand the argument again. This way, we will not perform
2745 side-effects more the once. */
2746 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2748 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2752 /* Compute into TARGET. */
2753 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2755 /* Output the entire sequence. */
2756 insns
= get_insns ();
2762 /* If we were unable to expand via the builtin, stop the sequence
2763 (without outputting the insns). */
2766 /* Fall back to floating point rounding optab. */
2767 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2769 /* For non-C99 targets we may end up without a fallback fndecl here
2770 if the user called __builtin_lfloor directly. In this case emit
2771 a call to the floor/ceil variants nevertheless. This should result
2772 in the best user experience for not full C99 targets. */
2773 if (fallback_fndecl
== NULL_TREE
)
2776 const char *name
= NULL
;
2778 switch (DECL_FUNCTION_CODE (fndecl
))
2780 case BUILT_IN_ICEIL
:
2781 case BUILT_IN_LCEIL
:
2782 case BUILT_IN_LLCEIL
:
2785 case BUILT_IN_ICEILF
:
2786 case BUILT_IN_LCEILF
:
2787 case BUILT_IN_LLCEILF
:
2790 case BUILT_IN_ICEILL
:
2791 case BUILT_IN_LCEILL
:
2792 case BUILT_IN_LLCEILL
:
2795 case BUILT_IN_IFLOOR
:
2796 case BUILT_IN_LFLOOR
:
2797 case BUILT_IN_LLFLOOR
:
2800 case BUILT_IN_IFLOORF
:
2801 case BUILT_IN_LFLOORF
:
2802 case BUILT_IN_LLFLOORF
:
2805 case BUILT_IN_IFLOORL
:
2806 case BUILT_IN_LFLOORL
:
2807 case BUILT_IN_LLFLOORL
:
2814 fntype
= build_function_type_list (TREE_TYPE (arg
),
2815 TREE_TYPE (arg
), NULL_TREE
);
2816 fallback_fndecl
= build_fn_decl (name
, fntype
);
2819 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2821 tmp
= expand_normal (exp
);
2822 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2824 /* Truncate the result of floating point optab to integer
2825 via expand_fix (). */
2826 target
= gen_reg_rtx (mode
);
2827 expand_fix (target
, tmp
, 0);
2832 /* Expand a call to one of the builtin math functions doing integer
2834 Return 0 if a normal call should be emitted rather than expanding the
2835 function in-line. EXP is the expression that is a call to the builtin
2836 function; if convenient, the result should be placed in TARGET. */
2839 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2841 convert_optab builtin_optab
;
2844 tree fndecl
= get_callee_fndecl (exp
);
2847 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2849 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2852 arg
= CALL_EXPR_ARG (exp
, 0);
2854 switch (DECL_FUNCTION_CODE (fndecl
))
2856 CASE_FLT_FN (BUILT_IN_IRINT
):
2857 fallback_fn
= BUILT_IN_LRINT
;
2859 CASE_FLT_FN (BUILT_IN_LRINT
):
2860 CASE_FLT_FN (BUILT_IN_LLRINT
):
2861 builtin_optab
= lrint_optab
;
2864 CASE_FLT_FN (BUILT_IN_IROUND
):
2865 fallback_fn
= BUILT_IN_LROUND
;
2867 CASE_FLT_FN (BUILT_IN_LROUND
):
2868 CASE_FLT_FN (BUILT_IN_LLROUND
):
2869 builtin_optab
= lround_optab
;
2876 /* There's no easy way to detect the case we need to set EDOM. */
2877 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2880 /* Make a suitable register to place result in. */
2881 mode
= TYPE_MODE (TREE_TYPE (exp
));
2883 /* There's no easy way to detect the case we need to set EDOM. */
2884 if (!flag_errno_math
)
2886 rtx result
= gen_reg_rtx (mode
);
2888 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2889 need to expand the argument again. This way, we will not perform
2890 side-effects more the once. */
2891 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2893 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2897 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2899 /* Output the entire sequence. */
2900 insns
= get_insns ();
2906 /* If we were unable to expand via the builtin, stop the sequence
2907 (without outputting the insns) and call to the library function
2908 with the stabilized argument list. */
2912 if (fallback_fn
!= BUILT_IN_NONE
)
2914 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2915 targets, (int) round (x) should never be transformed into
2916 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2917 a call to lround in the hope that the target provides at least some
2918 C99 functions. This should result in the best user experience for
2919 not full C99 targets. */
2920 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2923 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2924 fallback_fndecl
, 1, arg
);
2926 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2927 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2928 return convert_to_mode (mode
, target
, 0);
2931 return expand_call (exp
, target
, target
== const0_rtx
);
2934 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2935 a normal call should be emitted rather than expanding the function
2936 in-line. EXP is the expression that is a call to the builtin
2937 function; if convenient, the result should be placed in TARGET. */
2940 expand_builtin_powi (tree exp
, rtx target
)
2947 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2950 arg0
= CALL_EXPR_ARG (exp
, 0);
2951 arg1
= CALL_EXPR_ARG (exp
, 1);
2952 mode
= TYPE_MODE (TREE_TYPE (exp
));
2954 /* Emit a libcall to libgcc. */
2956 /* Mode of the 2nd argument must match that of an int. */
2957 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2959 if (target
== NULL_RTX
)
2960 target
= gen_reg_rtx (mode
);
2962 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2963 if (GET_MODE (op0
) != mode
)
2964 op0
= convert_to_mode (mode
, op0
, 0);
2965 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2966 if (GET_MODE (op1
) != mode2
)
2967 op1
= convert_to_mode (mode2
, op1
, 0);
2969 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2970 target
, LCT_CONST
, mode
, 2,
2971 op0
, mode
, op1
, mode2
);
2976 /* Expand expression EXP which is a call to the strlen builtin. Return
2977 NULL_RTX if we failed the caller should emit a normal call, otherwise
2978 try to get the result in TARGET, if convenient. */
2981 expand_builtin_strlen (tree exp
, rtx target
,
2982 machine_mode target_mode
)
2984 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2988 struct expand_operand ops
[4];
2991 tree src
= CALL_EXPR_ARG (exp
, 0);
2993 rtx_insn
*before_strlen
;
2994 machine_mode insn_mode
= target_mode
;
2995 enum insn_code icode
= CODE_FOR_nothing
;
2998 /* If the length can be computed at compile-time, return it. */
2999 len
= c_strlen (src
, 0);
3001 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3003 /* If the length can be computed at compile-time and is constant
3004 integer, but there are side-effects in src, evaluate
3005 src for side-effects, then return len.
3006 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3007 can be optimized into: i++; x = 3; */
3008 len
= c_strlen (src
, 1);
3009 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3011 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3012 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3015 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
3017 /* If SRC is not a pointer type, don't do this operation inline. */
3021 /* Bail out if we can't compute strlen in the right mode. */
3022 while (insn_mode
!= VOIDmode
)
3024 icode
= optab_handler (strlen_optab
, insn_mode
);
3025 if (icode
!= CODE_FOR_nothing
)
3028 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3030 if (insn_mode
== VOIDmode
)
3033 /* Make a place to hold the source address. We will not expand
3034 the actual source until we are sure that the expansion will
3035 not fail -- there are trees that cannot be expanded twice. */
3036 src_reg
= gen_reg_rtx (Pmode
);
3038 /* Mark the beginning of the strlen sequence so we can emit the
3039 source operand later. */
3040 before_strlen
= get_last_insn ();
3042 create_output_operand (&ops
[0], target
, insn_mode
);
3043 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3044 create_integer_operand (&ops
[2], 0);
3045 create_integer_operand (&ops
[3], align
);
3046 if (!maybe_expand_insn (icode
, 4, ops
))
3049 /* Now that we are assured of success, expand the source. */
3051 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3054 #ifdef POINTERS_EXTEND_UNSIGNED
3055 if (GET_MODE (pat
) != Pmode
)
3056 pat
= convert_to_mode (Pmode
, pat
,
3057 POINTERS_EXTEND_UNSIGNED
);
3059 emit_move_insn (src_reg
, pat
);
3065 emit_insn_after (pat
, before_strlen
);
3067 emit_insn_before (pat
, get_insns ());
3069 /* Return the value in the proper mode for this function. */
3070 if (GET_MODE (ops
[0].value
) == target_mode
)
3071 target
= ops
[0].value
;
3072 else if (target
!= 0)
3073 convert_move (target
, ops
[0].value
, 0);
3075 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3081 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3082 bytes from constant string DATA + OFFSET and return it as target
3086 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3089 const char *str
= (const char *) data
;
3091 gcc_assert (offset
>= 0
3092 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3093 <= strlen (str
) + 1));
3095 return c_readstr (str
+ offset
, mode
);
3098 /* LEN specify length of the block of memcpy/memset operation.
3099 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3100 In some cases we can make very likely guess on max size, then we
3101 set it into PROBABLE_MAX_SIZE. */
3104 determine_block_size (tree len
, rtx len_rtx
,
3105 unsigned HOST_WIDE_INT
*min_size
,
3106 unsigned HOST_WIDE_INT
*max_size
,
3107 unsigned HOST_WIDE_INT
*probable_max_size
)
3109 if (CONST_INT_P (len_rtx
))
3111 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3117 enum value_range_type range_type
= VR_UNDEFINED
;
3119 /* Determine bounds from the type. */
3120 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3121 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3124 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3125 *probable_max_size
= *max_size
3126 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3128 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3130 if (TREE_CODE (len
) == SSA_NAME
)
3131 range_type
= get_range_info (len
, &min
, &max
);
3132 if (range_type
== VR_RANGE
)
3134 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3135 *min_size
= min
.to_uhwi ();
3136 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3137 *probable_max_size
= *max_size
= max
.to_uhwi ();
3139 else if (range_type
== VR_ANTI_RANGE
)
3141 /* Anti range 0...N lets us to determine minimal size to N+1. */
3144 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3145 *min_size
= max
.to_uhwi () + 1;
3153 Produce anti range allowing negative values of N. We still
3154 can use the information and make a guess that N is not negative.
3156 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3157 *probable_max_size
= min
.to_uhwi () - 1;
3160 gcc_checking_assert (*max_size
<=
3161 (unsigned HOST_WIDE_INT
)
3162 GET_MODE_MASK (GET_MODE (len_rtx
)));
3165 /* Helper function to do the actual work for expand_builtin_memcpy. */
3168 expand_builtin_memcpy_args (tree dest
, tree src
, tree len
, rtx target
, tree exp
)
3170 const char *src_str
;
3171 unsigned int src_align
= get_pointer_alignment (src
);
3172 unsigned int dest_align
= get_pointer_alignment (dest
);
3173 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3174 HOST_WIDE_INT expected_size
= -1;
3175 unsigned int expected_align
= 0;
3176 unsigned HOST_WIDE_INT min_size
;
3177 unsigned HOST_WIDE_INT max_size
;
3178 unsigned HOST_WIDE_INT probable_max_size
;
3180 /* If DEST is not a pointer type, call the normal function. */
3181 if (dest_align
== 0)
3184 /* If either SRC is not a pointer type, don't do this
3185 operation in-line. */
3189 if (currently_expanding_gimple_stmt
)
3190 stringop_block_profile (currently_expanding_gimple_stmt
,
3191 &expected_align
, &expected_size
);
3193 if (expected_align
< dest_align
)
3194 expected_align
= dest_align
;
3195 dest_mem
= get_memory_rtx (dest
, len
);
3196 set_mem_align (dest_mem
, dest_align
);
3197 len_rtx
= expand_normal (len
);
3198 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3199 &probable_max_size
);
3200 src_str
= c_getstr (src
);
3202 /* If SRC is a string constant and block move would be done
3203 by pieces, we can avoid loading the string from memory
3204 and only stored the computed constants. */
3206 && CONST_INT_P (len_rtx
)
3207 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3208 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3209 CONST_CAST (char *, src_str
),
3212 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3213 builtin_memcpy_read_str
,
3214 CONST_CAST (char *, src_str
),
3215 dest_align
, false, 0);
3216 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3217 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3221 src_mem
= get_memory_rtx (src
, len
);
3222 set_mem_align (src_mem
, src_align
);
3224 /* Copy word part most expediently. */
3225 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3226 CALL_EXPR_TAILCALL (exp
)
3227 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3228 expected_align
, expected_size
,
3229 min_size
, max_size
, probable_max_size
);
3233 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3234 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3240 /* Expand a call EXP to the memcpy builtin.
3241 Return NULL_RTX if we failed, the caller should emit a normal call,
3242 otherwise try to get the result in TARGET, if convenient (and in
3243 mode MODE if that's convenient). */
3246 expand_builtin_memcpy (tree exp
, rtx target
)
3248 if (!validate_arglist (exp
,
3249 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3253 tree dest
= CALL_EXPR_ARG (exp
, 0);
3254 tree src
= CALL_EXPR_ARG (exp
, 1);
3255 tree len
= CALL_EXPR_ARG (exp
, 2);
3256 return expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3260 /* Expand an instrumented call EXP to the memcpy builtin.
3261 Return NULL_RTX if we failed, the caller should emit a normal call,
3262 otherwise try to get the result in TARGET, if convenient (and in
3263 mode MODE if that's convenient). */
3266 expand_builtin_memcpy_with_bounds (tree exp
, rtx target
)
3268 if (!validate_arglist (exp
,
3269 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3270 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3271 INTEGER_TYPE
, VOID_TYPE
))
3275 tree dest
= CALL_EXPR_ARG (exp
, 0);
3276 tree src
= CALL_EXPR_ARG (exp
, 2);
3277 tree len
= CALL_EXPR_ARG (exp
, 4);
3278 rtx res
= expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3280 /* Return src bounds with the result. */
3283 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3284 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3285 res
= chkp_join_splitted_slot (res
, bnd
);
3291 /* Expand a call EXP to the mempcpy builtin.
3292 Return NULL_RTX if we failed; the caller should emit a normal call,
3293 otherwise try to get the result in TARGET, if convenient (and in
3294 mode MODE if that's convenient). If ENDP is 0 return the
3295 destination pointer, if ENDP is 1 return the end pointer ala
3296 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3300 expand_builtin_mempcpy (tree exp
, rtx target
, machine_mode mode
)
3302 if (!validate_arglist (exp
,
3303 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3307 tree dest
= CALL_EXPR_ARG (exp
, 0);
3308 tree src
= CALL_EXPR_ARG (exp
, 1);
3309 tree len
= CALL_EXPR_ARG (exp
, 2);
3310 return expand_builtin_mempcpy_args (dest
, src
, len
,
3311 target
, mode
, /*endp=*/ 1,
3316 /* Expand an instrumented call EXP to the mempcpy builtin.
3317 Return NULL_RTX if we failed, the caller should emit a normal call,
3318 otherwise try to get the result in TARGET, if convenient (and in
3319 mode MODE if that's convenient). */
3322 expand_builtin_mempcpy_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3324 if (!validate_arglist (exp
,
3325 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3326 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3327 INTEGER_TYPE
, VOID_TYPE
))
3331 tree dest
= CALL_EXPR_ARG (exp
, 0);
3332 tree src
= CALL_EXPR_ARG (exp
, 2);
3333 tree len
= CALL_EXPR_ARG (exp
, 4);
3334 rtx res
= expand_builtin_mempcpy_args (dest
, src
, len
, target
,
3337 /* Return src bounds with the result. */
3340 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3341 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3342 res
= chkp_join_splitted_slot (res
, bnd
);
3348 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3349 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3350 so that this can also be called without constructing an actual CALL_EXPR.
3351 The other arguments and return value are the same as for
3352 expand_builtin_mempcpy. */
3355 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3356 rtx target
, machine_mode mode
, int endp
,
3359 tree fndecl
= get_callee_fndecl (orig_exp
);
3361 /* If return value is ignored, transform mempcpy into memcpy. */
3362 if (target
== const0_rtx
3363 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3364 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
))
3366 tree fn
= builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
);
3367 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3369 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3371 else if (target
== const0_rtx
3372 && builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3374 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3375 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3377 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3381 const char *src_str
;
3382 unsigned int src_align
= get_pointer_alignment (src
);
3383 unsigned int dest_align
= get_pointer_alignment (dest
);
3384 rtx dest_mem
, src_mem
, len_rtx
;
3386 /* If either SRC or DEST is not a pointer type, don't do this
3387 operation in-line. */
3388 if (dest_align
== 0 || src_align
== 0)
3391 /* If LEN is not constant, call the normal function. */
3392 if (! tree_fits_uhwi_p (len
))
3395 len_rtx
= expand_normal (len
);
3396 src_str
= c_getstr (src
);
3398 /* If SRC is a string constant and block move would be done
3399 by pieces, we can avoid loading the string from memory
3400 and only stored the computed constants. */
3402 && CONST_INT_P (len_rtx
)
3403 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3404 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3405 CONST_CAST (char *, src_str
),
3408 dest_mem
= get_memory_rtx (dest
, len
);
3409 set_mem_align (dest_mem
, dest_align
);
3410 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3411 builtin_memcpy_read_str
,
3412 CONST_CAST (char *, src_str
),
3413 dest_align
, false, endp
);
3414 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3415 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3419 if (CONST_INT_P (len_rtx
)
3420 && can_move_by_pieces (INTVAL (len_rtx
),
3421 MIN (dest_align
, src_align
)))
3423 dest_mem
= get_memory_rtx (dest
, len
);
3424 set_mem_align (dest_mem
, dest_align
);
3425 src_mem
= get_memory_rtx (src
, len
);
3426 set_mem_align (src_mem
, src_align
);
3427 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3428 MIN (dest_align
, src_align
), endp
);
3429 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3430 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3438 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3439 we failed, the caller should emit a normal call, otherwise try to
3440 get the result in TARGET, if convenient. If ENDP is 0 return the
3441 destination pointer, if ENDP is 1 return the end pointer ala
3442 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3446 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3448 struct expand_operand ops
[3];
3452 if (!targetm
.have_movstr ())
3455 dest_mem
= get_memory_rtx (dest
, NULL
);
3456 src_mem
= get_memory_rtx (src
, NULL
);
3459 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3460 dest_mem
= replace_equiv_address (dest_mem
, target
);
3463 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3464 create_fixed_operand (&ops
[1], dest_mem
);
3465 create_fixed_operand (&ops
[2], src_mem
);
3466 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
3469 if (endp
&& target
!= const0_rtx
)
3471 target
= ops
[0].value
;
3472 /* movstr is supposed to set end to the address of the NUL
3473 terminator. If the caller requested a mempcpy-like return value,
3477 rtx tem
= plus_constant (GET_MODE (target
),
3478 gen_lowpart (GET_MODE (target
), target
), 1);
3479 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3485 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3486 NULL_RTX if we failed the caller should emit a normal call, otherwise
3487 try to get the result in TARGET, if convenient (and in mode MODE if that's
3491 expand_builtin_strcpy (tree exp
, rtx target
)
3493 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3495 tree dest
= CALL_EXPR_ARG (exp
, 0);
3496 tree src
= CALL_EXPR_ARG (exp
, 1);
3497 return expand_builtin_strcpy_args (dest
, src
, target
);
3502 /* Helper function to do the actual work for expand_builtin_strcpy. The
3503 arguments to the builtin_strcpy call DEST and SRC are broken out
3504 so that this can also be called without constructing an actual CALL_EXPR.
3505 The other arguments and return value are the same as for
3506 expand_builtin_strcpy. */
3509 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3511 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3514 /* Expand a call EXP to the stpcpy builtin.
3515 Return NULL_RTX if we failed the caller should emit a normal call,
3516 otherwise try to get the result in TARGET, if convenient (and in
3517 mode MODE if that's convenient). */
3520 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3523 location_t loc
= EXPR_LOCATION (exp
);
3525 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3528 dst
= CALL_EXPR_ARG (exp
, 0);
3529 src
= CALL_EXPR_ARG (exp
, 1);
3531 /* If return value is ignored, transform stpcpy into strcpy. */
3532 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3534 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3535 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3536 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3543 /* Ensure we get an actual string whose length can be evaluated at
3544 compile-time, not an expression containing a string. This is
3545 because the latter will potentially produce pessimized code
3546 when used to produce the return value. */
3547 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3548 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3550 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3551 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3552 target
, mode
, /*endp=*/2,
3558 if (TREE_CODE (len
) == INTEGER_CST
)
3560 rtx len_rtx
= expand_normal (len
);
3562 if (CONST_INT_P (len_rtx
))
3564 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3570 if (mode
!= VOIDmode
)
3571 target
= gen_reg_rtx (mode
);
3573 target
= gen_reg_rtx (GET_MODE (ret
));
3575 if (GET_MODE (target
) != GET_MODE (ret
))
3576 ret
= gen_lowpart (GET_MODE (target
), ret
);
3578 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3579 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3587 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3591 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3592 bytes from constant string DATA + OFFSET and return it as target
3596 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3599 const char *str
= (const char *) data
;
3601 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3604 return c_readstr (str
+ offset
, mode
);
3607 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3608 NULL_RTX if we failed the caller should emit a normal call. */
3611 expand_builtin_strncpy (tree exp
, rtx target
)
3613 location_t loc
= EXPR_LOCATION (exp
);
3615 if (validate_arglist (exp
,
3616 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3618 tree dest
= CALL_EXPR_ARG (exp
, 0);
3619 tree src
= CALL_EXPR_ARG (exp
, 1);
3620 tree len
= CALL_EXPR_ARG (exp
, 2);
3621 tree slen
= c_strlen (src
, 1);
3623 /* We must be passed a constant len and src parameter. */
3624 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3627 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3629 /* We're required to pad with trailing zeros if the requested
3630 len is greater than strlen(s2)+1. In that case try to
3631 use store_by_pieces, if it fails, punt. */
3632 if (tree_int_cst_lt (slen
, len
))
3634 unsigned int dest_align
= get_pointer_alignment (dest
);
3635 const char *p
= c_getstr (src
);
3638 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3639 || !can_store_by_pieces (tree_to_uhwi (len
),
3640 builtin_strncpy_read_str
,
3641 CONST_CAST (char *, p
),
3645 dest_mem
= get_memory_rtx (dest
, len
);
3646 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3647 builtin_strncpy_read_str
,
3648 CONST_CAST (char *, p
), dest_align
, false, 0);
3649 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3650 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3657 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3658 bytes from constant string DATA + OFFSET and return it as target
3662 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3665 const char *c
= (const char *) data
;
3666 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3668 memset (p
, *c
, GET_MODE_SIZE (mode
));
3670 return c_readstr (p
, mode
);
3673 /* Callback routine for store_by_pieces. Return the RTL of a register
3674 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3675 char value given in the RTL register data. For example, if mode is
3676 4 bytes wide, return the RTL for 0x01010101*data. */
3679 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3686 size
= GET_MODE_SIZE (mode
);
3690 p
= XALLOCAVEC (char, size
);
3691 memset (p
, 1, size
);
3692 coeff
= c_readstr (p
, mode
);
3694 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3695 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3696 return force_reg (mode
, target
);
3699 /* Expand expression EXP, which is a call to the memset builtin. Return
3700 NULL_RTX if we failed the caller should emit a normal call, otherwise
3701 try to get the result in TARGET, if convenient (and in mode MODE if that's
3705 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
3707 if (!validate_arglist (exp
,
3708 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3712 tree dest
= CALL_EXPR_ARG (exp
, 0);
3713 tree val
= CALL_EXPR_ARG (exp
, 1);
3714 tree len
= CALL_EXPR_ARG (exp
, 2);
3715 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3719 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3720 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3721 try to get the result in TARGET, if convenient (and in mode MODE if that's
3725 expand_builtin_memset_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3727 if (!validate_arglist (exp
,
3728 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3729 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3733 tree dest
= CALL_EXPR_ARG (exp
, 0);
3734 tree val
= CALL_EXPR_ARG (exp
, 2);
3735 tree len
= CALL_EXPR_ARG (exp
, 3);
3736 rtx res
= expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3738 /* Return src bounds with the result. */
3741 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3742 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3743 res
= chkp_join_splitted_slot (res
, bnd
);
3749 /* Helper function to do the actual work for expand_builtin_memset. The
3750 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3751 so that this can also be called without constructing an actual CALL_EXPR.
3752 The other arguments and return value are the same as for
3753 expand_builtin_memset. */
3756 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3757 rtx target
, machine_mode mode
, tree orig_exp
)
3760 enum built_in_function fcode
;
3761 machine_mode val_mode
;
3763 unsigned int dest_align
;
3764 rtx dest_mem
, dest_addr
, len_rtx
;
3765 HOST_WIDE_INT expected_size
= -1;
3766 unsigned int expected_align
= 0;
3767 unsigned HOST_WIDE_INT min_size
;
3768 unsigned HOST_WIDE_INT max_size
;
3769 unsigned HOST_WIDE_INT probable_max_size
;
3771 dest_align
= get_pointer_alignment (dest
);
3773 /* If DEST is not a pointer type, don't do this operation in-line. */
3774 if (dest_align
== 0)
3777 if (currently_expanding_gimple_stmt
)
3778 stringop_block_profile (currently_expanding_gimple_stmt
,
3779 &expected_align
, &expected_size
);
3781 if (expected_align
< dest_align
)
3782 expected_align
= dest_align
;
3784 /* If the LEN parameter is zero, return DEST. */
3785 if (integer_zerop (len
))
3787 /* Evaluate and ignore VAL in case it has side-effects. */
3788 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3789 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3792 /* Stabilize the arguments in case we fail. */
3793 dest
= builtin_save_expr (dest
);
3794 val
= builtin_save_expr (val
);
3795 len
= builtin_save_expr (len
);
3797 len_rtx
= expand_normal (len
);
3798 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3799 &probable_max_size
);
3800 dest_mem
= get_memory_rtx (dest
, len
);
3801 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3803 if (TREE_CODE (val
) != INTEGER_CST
)
3807 val_rtx
= expand_normal (val
);
3808 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3810 /* Assume that we can memset by pieces if we can store
3811 * the coefficients by pieces (in the required modes).
3812 * We can't pass builtin_memset_gen_str as that emits RTL. */
3814 if (tree_fits_uhwi_p (len
)
3815 && can_store_by_pieces (tree_to_uhwi (len
),
3816 builtin_memset_read_str
, &c
, dest_align
,
3819 val_rtx
= force_reg (val_mode
, val_rtx
);
3820 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3821 builtin_memset_gen_str
, val_rtx
, dest_align
,
3824 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3825 dest_align
, expected_align
,
3826 expected_size
, min_size
, max_size
,
3830 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3831 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3835 if (target_char_cast (val
, &c
))
3840 if (tree_fits_uhwi_p (len
)
3841 && can_store_by_pieces (tree_to_uhwi (len
),
3842 builtin_memset_read_str
, &c
, dest_align
,
3844 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3845 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3846 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3847 gen_int_mode (c
, val_mode
),
3848 dest_align
, expected_align
,
3849 expected_size
, min_size
, max_size
,
3853 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3854 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3858 set_mem_align (dest_mem
, dest_align
);
3859 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3860 CALL_EXPR_TAILCALL (orig_exp
)
3861 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3862 expected_align
, expected_size
,
3868 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3869 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3875 fndecl
= get_callee_fndecl (orig_exp
);
3876 fcode
= DECL_FUNCTION_CODE (fndecl
);
3877 if (fcode
== BUILT_IN_MEMSET
3878 || fcode
== BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
)
3879 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3881 else if (fcode
== BUILT_IN_BZERO
)
3882 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3886 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3887 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3888 return expand_call (fn
, target
, target
== const0_rtx
);
3891 /* Expand expression EXP, which is a call to the bzero builtin. Return
3892 NULL_RTX if we failed the caller should emit a normal call. */
3895 expand_builtin_bzero (tree exp
)
3898 location_t loc
= EXPR_LOCATION (exp
);
3900 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3903 dest
= CALL_EXPR_ARG (exp
, 0);
3904 size
= CALL_EXPR_ARG (exp
, 1);
3906 /* New argument list transforming bzero(ptr x, int y) to
3907 memset(ptr x, int 0, size_t y). This is done this way
3908 so that if it isn't expanded inline, we fallback to
3909 calling bzero instead of memset. */
3911 return expand_builtin_memset_args (dest
, integer_zero_node
,
3912 fold_convert_loc (loc
,
3913 size_type_node
, size
),
3914 const0_rtx
, VOIDmode
, exp
);
3917 /* Try to expand cmpstr operation ICODE with the given operands.
3918 Return the result rtx on success, otherwise return null. */
3921 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
3922 HOST_WIDE_INT align
)
3924 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
3926 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
3929 struct expand_operand ops
[4];
3930 create_output_operand (&ops
[0], target
, insn_mode
);
3931 create_fixed_operand (&ops
[1], arg1_rtx
);
3932 create_fixed_operand (&ops
[2], arg2_rtx
);
3933 create_integer_operand (&ops
[3], align
);
3934 if (maybe_expand_insn (icode
, 4, ops
))
3935 return ops
[0].value
;
3939 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3940 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3941 otherwise return null. */
3944 expand_cmpstrn_or_cmpmem (insn_code icode
, rtx target
, rtx arg1_rtx
,
3945 rtx arg2_rtx
, tree arg3_type
, rtx arg3_rtx
,
3946 HOST_WIDE_INT align
)
3948 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
3950 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
3953 struct expand_operand ops
[5];
3954 create_output_operand (&ops
[0], target
, insn_mode
);
3955 create_fixed_operand (&ops
[1], arg1_rtx
);
3956 create_fixed_operand (&ops
[2], arg2_rtx
);
3957 create_convert_operand_from (&ops
[3], arg3_rtx
, TYPE_MODE (arg3_type
),
3958 TYPE_UNSIGNED (arg3_type
));
3959 create_integer_operand (&ops
[4], align
);
3960 if (maybe_expand_insn (icode
, 5, ops
))
3961 return ops
[0].value
;
3965 /* Expand expression EXP, which is a call to the memcmp built-in function.
3966 Return NULL_RTX if we failed and the caller should emit a normal call,
3967 otherwise try to get the result in TARGET, if convenient. */
3970 expand_builtin_memcmp (tree exp
, rtx target
)
3972 if (!validate_arglist (exp
,
3973 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3976 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3977 implementing memcmp because it will stop if it encounters two
3979 insn_code icode
= direct_optab_handler (cmpmem_optab
, SImode
);
3980 if (icode
== CODE_FOR_nothing
)
3983 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3984 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3985 tree len
= CALL_EXPR_ARG (exp
, 2);
3987 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3988 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3990 /* If we don't have POINTER_TYPE, call the function. */
3991 if (arg1_align
== 0 || arg2_align
== 0)
3994 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
3995 location_t loc
= EXPR_LOCATION (exp
);
3996 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
3997 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
3998 rtx arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
4000 /* Set MEM_SIZE as appropriate. */
4001 if (CONST_INT_P (arg3_rtx
))
4003 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
4004 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
4007 rtx result
= expand_cmpstrn_or_cmpmem (icode
, target
, arg1_rtx
, arg2_rtx
,
4008 TREE_TYPE (len
), arg3_rtx
,
4009 MIN (arg1_align
, arg2_align
));
4012 /* Return the value in the proper mode for this function. */
4013 if (GET_MODE (result
) == mode
)
4018 convert_move (target
, result
, 0);
4022 return convert_to_mode (mode
, result
, 0);
4027 && REG_P (result
) && GET_MODE (result
) == mode
4028 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4029 result
= gen_reg_rtx (mode
);
4031 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
4032 TYPE_MODE (integer_type_node
), 3,
4033 XEXP (arg1_rtx
, 0), Pmode
,
4034 XEXP (arg2_rtx
, 0), Pmode
,
4035 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
4036 TYPE_UNSIGNED (sizetype
)),
4037 TYPE_MODE (sizetype
));
4041 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4042 if we failed the caller should emit a normal call, otherwise try to get
4043 the result in TARGET, if convenient. */
4046 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4048 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4051 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
4052 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4053 if (cmpstr_icode
!= CODE_FOR_nothing
|| cmpstrn_icode
!= CODE_FOR_nothing
)
4055 rtx arg1_rtx
, arg2_rtx
;
4057 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4058 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4059 rtx result
= NULL_RTX
;
4061 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4062 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4064 /* If we don't have POINTER_TYPE, call the function. */
4065 if (arg1_align
== 0 || arg2_align
== 0)
4068 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4069 arg1
= builtin_save_expr (arg1
);
4070 arg2
= builtin_save_expr (arg2
);
4072 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4073 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4075 /* Try to call cmpstrsi. */
4076 if (cmpstr_icode
!= CODE_FOR_nothing
)
4077 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
4078 MIN (arg1_align
, arg2_align
));
4080 /* Try to determine at least one length and call cmpstrnsi. */
4081 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
4086 tree len1
= c_strlen (arg1
, 1);
4087 tree len2
= c_strlen (arg2
, 1);
4090 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4092 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4094 /* If we don't have a constant length for the first, use the length
4095 of the second, if we know it. We don't require a constant for
4096 this case; some cost analysis could be done if both are available
4097 but neither is constant. For now, assume they're equally cheap,
4098 unless one has side effects. If both strings have constant lengths,
4105 else if (TREE_SIDE_EFFECTS (len1
))
4107 else if (TREE_SIDE_EFFECTS (len2
))
4109 else if (TREE_CODE (len1
) != INTEGER_CST
)
4111 else if (TREE_CODE (len2
) != INTEGER_CST
)
4113 else if (tree_int_cst_lt (len1
, len2
))
4118 /* If both arguments have side effects, we cannot optimize. */
4119 if (len
&& !TREE_SIDE_EFFECTS (len
))
4121 arg3_rtx
= expand_normal (len
);
4122 result
= expand_cmpstrn_or_cmpmem
4123 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
4124 arg3_rtx
, MIN (arg1_align
, arg2_align
));
4130 /* Return the value in the proper mode for this function. */
4131 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4132 if (GET_MODE (result
) == mode
)
4135 return convert_to_mode (mode
, result
, 0);
4136 convert_move (target
, result
, 0);
4140 /* Expand the library call ourselves using a stabilized argument
4141 list to avoid re-evaluating the function's arguments twice. */
4142 fndecl
= get_callee_fndecl (exp
);
4143 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4144 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4145 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4146 return expand_call (fn
, target
, target
== const0_rtx
);
4151 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4152 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4153 the result in TARGET, if convenient. */
4156 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4157 ATTRIBUTE_UNUSED machine_mode mode
)
4159 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4161 if (!validate_arglist (exp
,
4162 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4165 /* If c_strlen can determine an expression for one of the string
4166 lengths, and it doesn't have side effects, then emit cmpstrnsi
4167 using length MIN(strlen(string)+1, arg3). */
4168 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4169 if (cmpstrn_icode
!= CODE_FOR_nothing
)
4171 tree len
, len1
, len2
;
4172 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4175 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4176 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4177 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4179 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4180 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4182 len1
= c_strlen (arg1
, 1);
4183 len2
= c_strlen (arg2
, 1);
4186 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4188 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4190 /* If we don't have a constant length for the first, use the length
4191 of the second, if we know it. We don't require a constant for
4192 this case; some cost analysis could be done if both are available
4193 but neither is constant. For now, assume they're equally cheap,
4194 unless one has side effects. If both strings have constant lengths,
4201 else if (TREE_SIDE_EFFECTS (len1
))
4203 else if (TREE_SIDE_EFFECTS (len2
))
4205 else if (TREE_CODE (len1
) != INTEGER_CST
)
4207 else if (TREE_CODE (len2
) != INTEGER_CST
)
4209 else if (tree_int_cst_lt (len1
, len2
))
4214 /* If both arguments have side effects, we cannot optimize. */
4215 if (!len
|| TREE_SIDE_EFFECTS (len
))
4218 /* The actual new length parameter is MIN(len,arg3). */
4219 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4220 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4222 /* If we don't have POINTER_TYPE, call the function. */
4223 if (arg1_align
== 0 || arg2_align
== 0)
4226 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4227 arg1
= builtin_save_expr (arg1
);
4228 arg2
= builtin_save_expr (arg2
);
4229 len
= builtin_save_expr (len
);
4231 arg1_rtx
= get_memory_rtx (arg1
, len
);
4232 arg2_rtx
= get_memory_rtx (arg2
, len
);
4233 arg3_rtx
= expand_normal (len
);
4234 result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
4235 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
4236 MIN (arg1_align
, arg2_align
));
4239 /* Return the value in the proper mode for this function. */
4240 mode
= TYPE_MODE (TREE_TYPE (exp
));
4241 if (GET_MODE (result
) == mode
)
4244 return convert_to_mode (mode
, result
, 0);
4245 convert_move (target
, result
, 0);
4249 /* Expand the library call ourselves using a stabilized argument
4250 list to avoid re-evaluating the function's arguments twice. */
4251 fndecl
= get_callee_fndecl (exp
);
4252 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4254 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4255 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4256 return expand_call (fn
, target
, target
== const0_rtx
);
4261 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4262 if that's convenient. */
4265 expand_builtin_saveregs (void)
4270 /* Don't do __builtin_saveregs more than once in a function.
4271 Save the result of the first call and reuse it. */
4272 if (saveregs_value
!= 0)
4273 return saveregs_value
;
4275 /* When this function is called, it means that registers must be
4276 saved on entry to this function. So we migrate the call to the
4277 first insn of this function. */
4281 /* Do whatever the machine needs done in this case. */
4282 val
= targetm
.calls
.expand_builtin_saveregs ();
4287 saveregs_value
= val
;
4289 /* Put the insns after the NOTE that starts the function. If this
4290 is inside a start_sequence, make the outer-level insn chain current, so
4291 the code is placed at the start of the function. */
4292 push_topmost_sequence ();
4293 emit_insn_after (seq
, entry_of_function ());
4294 pop_topmost_sequence ();
4299 /* Expand a call to __builtin_next_arg. */
4302 expand_builtin_next_arg (void)
4304 /* Checking arguments is already done in fold_builtin_next_arg
4305 that must be called before this function. */
4306 return expand_binop (ptr_mode
, add_optab
,
4307 crtl
->args
.internal_arg_pointer
,
4308 crtl
->args
.arg_offset_rtx
,
4309 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4312 /* Make it easier for the backends by protecting the valist argument
4313 from multiple evaluations. */
4316 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4318 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4320 /* The current way of determining the type of valist is completely
4321 bogus. We should have the information on the va builtin instead. */
4323 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4325 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4327 if (TREE_SIDE_EFFECTS (valist
))
4328 valist
= save_expr (valist
);
4330 /* For this case, the backends will be expecting a pointer to
4331 vatype, but it's possible we've actually been given an array
4332 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4334 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4336 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4337 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4342 tree pt
= build_pointer_type (vatype
);
4346 if (! TREE_SIDE_EFFECTS (valist
))
4349 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4350 TREE_SIDE_EFFECTS (valist
) = 1;
4353 if (TREE_SIDE_EFFECTS (valist
))
4354 valist
= save_expr (valist
);
4355 valist
= fold_build2_loc (loc
, MEM_REF
,
4356 vatype
, valist
, build_int_cst (pt
, 0));
4362 /* The "standard" definition of va_list is void*. */
4365 std_build_builtin_va_list (void)
4367 return ptr_type_node
;
4370 /* The "standard" abi va_list is va_list_type_node. */
4373 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4375 return va_list_type_node
;
4378 /* The "standard" type of va_list is va_list_type_node. */
4381 std_canonical_va_list_type (tree type
)
4385 if (INDIRECT_REF_P (type
))
4386 type
= TREE_TYPE (type
);
4387 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (type
)))
4388 type
= TREE_TYPE (type
);
4389 wtype
= va_list_type_node
;
4391 /* Treat structure va_list types. */
4392 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4393 htype
= TREE_TYPE (htype
);
4394 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4396 /* If va_list is an array type, the argument may have decayed
4397 to a pointer type, e.g. by being passed to another function.
4398 In that case, unwrap both types so that we can compare the
4399 underlying records. */
4400 if (TREE_CODE (htype
) == ARRAY_TYPE
4401 || POINTER_TYPE_P (htype
))
4403 wtype
= TREE_TYPE (wtype
);
4404 htype
= TREE_TYPE (htype
);
4407 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4408 return va_list_type_node
;
4413 /* The "standard" implementation of va_start: just assign `nextarg' to
4417 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4419 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4420 convert_move (va_r
, nextarg
, 0);
4422 /* We do not have any valid bounds for the pointer, so
4423 just store zero bounds for it. */
4424 if (chkp_function_instrumented_p (current_function_decl
))
4425 chkp_expand_bounds_reset_for_mem (valist
,
4426 make_tree (TREE_TYPE (valist
),
4430 /* Expand EXP, a call to __builtin_va_start. */
4433 expand_builtin_va_start (tree exp
)
4437 location_t loc
= EXPR_LOCATION (exp
);
4439 if (call_expr_nargs (exp
) < 2)
4441 error_at (loc
, "too few arguments to function %<va_start%>");
4445 if (fold_builtin_next_arg (exp
, true))
4448 nextarg
= expand_builtin_next_arg ();
4449 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4451 if (targetm
.expand_builtin_va_start
)
4452 targetm
.expand_builtin_va_start (valist
, nextarg
);
4454 std_expand_builtin_va_start (valist
, nextarg
);
4459 /* Expand EXP, a call to __builtin_va_end. */
4462 expand_builtin_va_end (tree exp
)
4464 tree valist
= CALL_EXPR_ARG (exp
, 0);
4466 /* Evaluate for side effects, if needed. I hate macros that don't
4468 if (TREE_SIDE_EFFECTS (valist
))
4469 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4474 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4475 builtin rather than just as an assignment in stdarg.h because of the
4476 nastiness of array-type va_list types. */
4479 expand_builtin_va_copy (tree exp
)
4482 location_t loc
= EXPR_LOCATION (exp
);
4484 dst
= CALL_EXPR_ARG (exp
, 0);
4485 src
= CALL_EXPR_ARG (exp
, 1);
4487 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4488 src
= stabilize_va_list_loc (loc
, src
, 0);
4490 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4492 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4494 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4495 TREE_SIDE_EFFECTS (t
) = 1;
4496 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4500 rtx dstb
, srcb
, size
;
4502 /* Evaluate to pointers. */
4503 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4504 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4505 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4506 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4508 dstb
= convert_memory_address (Pmode
, dstb
);
4509 srcb
= convert_memory_address (Pmode
, srcb
);
4511 /* "Dereference" to BLKmode memories. */
4512 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4513 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4514 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4515 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4516 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4517 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4520 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4526 /* Expand a call to one of the builtin functions __builtin_frame_address or
4527 __builtin_return_address. */
4530 expand_builtin_frame_address (tree fndecl
, tree exp
)
4532 /* The argument must be a nonnegative integer constant.
4533 It counts the number of frames to scan up the stack.
4534 The value is either the frame pointer value or the return
4535 address saved in that frame. */
4536 if (call_expr_nargs (exp
) == 0)
4537 /* Warning about missing arg was already issued. */
4539 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4541 error ("invalid argument to %qD", fndecl
);
4546 /* Number of frames to scan up the stack. */
4547 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
4549 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
4551 /* Some ports cannot access arbitrary stack frames. */
4554 warning (0, "unsupported argument to %qD", fndecl
);
4560 /* Warn since no effort is made to ensure that any frame
4561 beyond the current one exists or can be safely reached. */
4562 warning (OPT_Wframe_address
, "calling %qD with "
4563 "a nonzero argument is unsafe", fndecl
);
4566 /* For __builtin_frame_address, return what we've got. */
4567 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4571 && ! CONSTANT_P (tem
))
4572 tem
= copy_addr_to_reg (tem
);
4577 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4578 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4579 is the same as for allocate_dynamic_stack_space. */
4582 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4588 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4589 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4592 = (alloca_with_align
4593 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4594 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4599 /* Compute the argument. */
4600 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4602 /* Compute the alignment. */
4603 align
= (alloca_with_align
4604 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4605 : BIGGEST_ALIGNMENT
);
4607 /* Allocate the desired space. */
4608 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4609 result
= convert_memory_address (ptr_mode
, result
);
4614 /* Expand a call to bswap builtin in EXP.
4615 Return NULL_RTX if a normal call should be emitted rather than expanding the
4616 function in-line. If convenient, the result should be placed in TARGET.
4617 SUBTARGET may be used as the target for computing one of EXP's operands. */
4620 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
4626 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4629 arg
= CALL_EXPR_ARG (exp
, 0);
4630 op0
= expand_expr (arg
,
4631 subtarget
&& GET_MODE (subtarget
) == target_mode
4632 ? subtarget
: NULL_RTX
,
4633 target_mode
, EXPAND_NORMAL
);
4634 if (GET_MODE (op0
) != target_mode
)
4635 op0
= convert_to_mode (target_mode
, op0
, 1);
4637 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4639 gcc_assert (target
);
4641 return convert_to_mode (target_mode
, target
, 1);
4644 /* Expand a call to a unary builtin in EXP.
4645 Return NULL_RTX if a normal call should be emitted rather than expanding the
4646 function in-line. If convenient, the result should be placed in TARGET.
4647 SUBTARGET may be used as the target for computing one of EXP's operands. */
4650 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
4651 rtx subtarget
, optab op_optab
)
4655 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4658 /* Compute the argument. */
4659 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4661 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4662 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4663 VOIDmode
, EXPAND_NORMAL
);
4664 /* Compute op, into TARGET if possible.
4665 Set TARGET to wherever the result comes back. */
4666 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4667 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4668 gcc_assert (target
);
4670 return convert_to_mode (target_mode
, target
, 0);
4673 /* Expand a call to __builtin_expect. We just return our argument
4674 as the builtin_expect semantic should've been already executed by
4675 tree branch prediction pass. */
4678 expand_builtin_expect (tree exp
, rtx target
)
4682 if (call_expr_nargs (exp
) < 2)
4684 arg
= CALL_EXPR_ARG (exp
, 0);
4686 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4687 /* When guessing was done, the hints should be already stripped away. */
4688 gcc_assert (!flag_guess_branch_prob
4689 || optimize
== 0 || seen_error ());
4693 /* Expand a call to __builtin_assume_aligned. We just return our first
4694 argument as the builtin_assume_aligned semantic should've been already
4698 expand_builtin_assume_aligned (tree exp
, rtx target
)
4700 if (call_expr_nargs (exp
) < 2)
4702 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4704 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4705 && (call_expr_nargs (exp
) < 3
4706 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4711 expand_builtin_trap (void)
4713 if (targetm
.have_trap ())
4715 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
4716 /* For trap insns when not accumulating outgoing args force
4717 REG_ARGS_SIZE note to prevent crossjumping of calls with
4718 different args sizes. */
4719 if (!ACCUMULATE_OUTGOING_ARGS
)
4720 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4723 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4727 /* Expand a call to __builtin_unreachable. We do nothing except emit
4728 a barrier saying that control flow will not pass here.
4730 It is the responsibility of the program being compiled to ensure
4731 that control flow does never reach __builtin_unreachable. */
4733 expand_builtin_unreachable (void)
4738 /* Expand EXP, a call to fabs, fabsf or fabsl.
4739 Return NULL_RTX if a normal call should be emitted rather than expanding
4740 the function inline. If convenient, the result should be placed
4741 in TARGET. SUBTARGET may be used as the target for computing
4745 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4751 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4754 arg
= CALL_EXPR_ARG (exp
, 0);
4755 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4756 mode
= TYPE_MODE (TREE_TYPE (arg
));
4757 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4758 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4761 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4762 Return NULL is a normal call should be emitted rather than expanding the
4763 function inline. If convenient, the result should be placed in TARGET.
4764 SUBTARGET may be used as the target for computing the operand. */
4767 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4772 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4775 arg
= CALL_EXPR_ARG (exp
, 0);
4776 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4778 arg
= CALL_EXPR_ARG (exp
, 1);
4779 op1
= expand_normal (arg
);
4781 return expand_copysign (op0
, op1
, target
);
4784 /* Expand a call to __builtin___clear_cache. */
4787 expand_builtin___clear_cache (tree exp
)
4789 if (!targetm
.code_for_clear_cache
)
4791 #ifdef CLEAR_INSN_CACHE
4792 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4793 does something. Just do the default expansion to a call to
4797 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4798 does nothing. There is no need to call it. Do nothing. */
4800 #endif /* CLEAR_INSN_CACHE */
4803 /* We have a "clear_cache" insn, and it will handle everything. */
4805 rtx begin_rtx
, end_rtx
;
4807 /* We must not expand to a library call. If we did, any
4808 fallback library function in libgcc that might contain a call to
4809 __builtin___clear_cache() would recurse infinitely. */
4810 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4812 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4816 if (targetm
.have_clear_cache ())
4818 struct expand_operand ops
[2];
4820 begin
= CALL_EXPR_ARG (exp
, 0);
4821 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4823 end
= CALL_EXPR_ARG (exp
, 1);
4824 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4826 create_address_operand (&ops
[0], begin_rtx
);
4827 create_address_operand (&ops
[1], end_rtx
);
4828 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
4834 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4837 round_trampoline_addr (rtx tramp
)
4839 rtx temp
, addend
, mask
;
4841 /* If we don't need too much alignment, we'll have been guaranteed
4842 proper alignment by get_trampoline_type. */
4843 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4846 /* Round address up to desired boundary. */
4847 temp
= gen_reg_rtx (Pmode
);
4848 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4849 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4851 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4852 temp
, 0, OPTAB_LIB_WIDEN
);
4853 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4854 temp
, 0, OPTAB_LIB_WIDEN
);
4860 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4862 tree t_tramp
, t_func
, t_chain
;
4863 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4865 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4866 POINTER_TYPE
, VOID_TYPE
))
4869 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4870 t_func
= CALL_EXPR_ARG (exp
, 1);
4871 t_chain
= CALL_EXPR_ARG (exp
, 2);
4873 r_tramp
= expand_normal (t_tramp
);
4874 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4875 MEM_NOTRAP_P (m_tramp
) = 1;
4877 /* If ONSTACK, the TRAMP argument should be the address of a field
4878 within the local function's FRAME decl. Either way, let's see if
4879 we can fill in the MEM_ATTRs for this memory. */
4880 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4881 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4883 /* Creator of a heap trampoline is responsible for making sure the
4884 address is aligned to at least STACK_BOUNDARY. Normally malloc
4885 will ensure this anyhow. */
4886 tmp
= round_trampoline_addr (r_tramp
);
4889 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4890 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4891 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4894 /* The FUNC argument should be the address of the nested function.
4895 Extract the actual function decl to pass to the hook. */
4896 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4897 t_func
= TREE_OPERAND (t_func
, 0);
4898 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4900 r_chain
= expand_normal (t_chain
);
4902 /* Generate insns to initialize the trampoline. */
4903 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4907 trampolines_created
= 1;
4909 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4910 "trampoline generated for nested function %qD", t_func
);
4917 expand_builtin_adjust_trampoline (tree exp
)
4921 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4924 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4925 tramp
= round_trampoline_addr (tramp
);
4926 if (targetm
.calls
.trampoline_adjust_address
)
4927 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4932 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4933 function. The function first checks whether the back end provides
4934 an insn to implement signbit for the respective mode. If not, it
4935 checks whether the floating point format of the value is such that
4936 the sign bit can be extracted. If that is not the case, error out.
4937 EXP is the expression that is a call to the builtin function; if
4938 convenient, the result should be placed in TARGET. */
4940 expand_builtin_signbit (tree exp
, rtx target
)
4942 const struct real_format
*fmt
;
4943 machine_mode fmode
, imode
, rmode
;
4946 enum insn_code icode
;
4948 location_t loc
= EXPR_LOCATION (exp
);
4950 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4953 arg
= CALL_EXPR_ARG (exp
, 0);
4954 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4955 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4956 fmt
= REAL_MODE_FORMAT (fmode
);
4958 arg
= builtin_save_expr (arg
);
4960 /* Expand the argument yielding a RTX expression. */
4961 temp
= expand_normal (arg
);
4963 /* Check if the back end provides an insn that handles signbit for the
4965 icode
= optab_handler (signbit_optab
, fmode
);
4966 if (icode
!= CODE_FOR_nothing
)
4968 rtx_insn
*last
= get_last_insn ();
4969 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
4970 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
4972 delete_insns_since (last
);
4975 /* For floating point formats without a sign bit, implement signbit
4977 bitpos
= fmt
->signbit_ro
;
4980 /* But we can't do this if the format supports signed zero. */
4981 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
4983 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
4984 build_real (TREE_TYPE (arg
), dconst0
));
4985 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4988 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
4990 imode
= int_mode_for_mode (fmode
);
4991 gcc_assert (imode
!= BLKmode
);
4992 temp
= gen_lowpart (imode
, temp
);
4997 /* Handle targets with different FP word orders. */
4998 if (FLOAT_WORDS_BIG_ENDIAN
)
4999 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5001 word
= bitpos
/ BITS_PER_WORD
;
5002 temp
= operand_subword_force (temp
, word
, fmode
);
5003 bitpos
= bitpos
% BITS_PER_WORD
;
5006 /* Force the intermediate word_mode (or narrower) result into a
5007 register. This avoids attempting to create paradoxical SUBREGs
5008 of floating point modes below. */
5009 temp
= force_reg (imode
, temp
);
5011 /* If the bitpos is within the "result mode" lowpart, the operation
5012 can be implement with a single bitwise AND. Otherwise, we need
5013 a right shift and an AND. */
5015 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5017 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5019 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5020 temp
= gen_lowpart (rmode
, temp
);
5021 temp
= expand_binop (rmode
, and_optab
, temp
,
5022 immed_wide_int_const (mask
, rmode
),
5023 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5027 /* Perform a logical right shift to place the signbit in the least
5028 significant bit, then truncate the result to the desired mode
5029 and mask just this bit. */
5030 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5031 temp
= gen_lowpart (rmode
, temp
);
5032 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5033 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5039 /* Expand fork or exec calls. TARGET is the desired target of the
5040 call. EXP is the call. FN is the
5041 identificator of the actual function. IGNORE is nonzero if the
5042 value is to be ignored. */
5045 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5050 /* If we are not profiling, just call the function. */
5051 if (!profile_arc_flag
)
5054 /* Otherwise call the wrapper. This should be equivalent for the rest of
5055 compiler, so the code does not diverge, and the wrapper may run the
5056 code necessary for keeping the profiling sane. */
5058 switch (DECL_FUNCTION_CODE (fn
))
5061 id
= get_identifier ("__gcov_fork");
5064 case BUILT_IN_EXECL
:
5065 id
= get_identifier ("__gcov_execl");
5068 case BUILT_IN_EXECV
:
5069 id
= get_identifier ("__gcov_execv");
5072 case BUILT_IN_EXECLP
:
5073 id
= get_identifier ("__gcov_execlp");
5076 case BUILT_IN_EXECLE
:
5077 id
= get_identifier ("__gcov_execle");
5080 case BUILT_IN_EXECVP
:
5081 id
= get_identifier ("__gcov_execvp");
5084 case BUILT_IN_EXECVE
:
5085 id
= get_identifier ("__gcov_execve");
5092 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5093 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5094 DECL_EXTERNAL (decl
) = 1;
5095 TREE_PUBLIC (decl
) = 1;
5096 DECL_ARTIFICIAL (decl
) = 1;
5097 TREE_NOTHROW (decl
) = 1;
5098 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5099 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5100 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5101 return expand_call (call
, target
, ignore
);
5106 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5107 the pointer in these functions is void*, the tree optimizers may remove
5108 casts. The mode computed in expand_builtin isn't reliable either, due
5109 to __sync_bool_compare_and_swap.
5111 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5112 group of builtins. This gives us log2 of the mode size. */
5114 static inline machine_mode
5115 get_builtin_sync_mode (int fcode_diff
)
5117 /* The size is not negotiable, so ask not to get BLKmode in return
5118 if the target indicates that a smaller size would be better. */
5119 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5122 /* Expand the memory expression LOC and return the appropriate memory operand
5123 for the builtin_sync operations. */
5126 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5130 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5131 addr
= convert_memory_address (Pmode
, addr
);
5133 /* Note that we explicitly do not want any alias information for this
5134 memory, so that we kill all other live memories. Otherwise we don't
5135 satisfy the full barrier semantics of the intrinsic. */
5136 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5138 /* The alignment needs to be at least according to that of the mode. */
5139 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5140 get_pointer_alignment (loc
)));
5141 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5142 MEM_VOLATILE_P (mem
) = 1;
5147 /* Make sure an argument is in the right mode.
5148 EXP is the tree argument.
5149 MODE is the mode it should be in. */
5152 expand_expr_force_mode (tree exp
, machine_mode mode
)
5155 machine_mode old_mode
;
5157 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5158 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5159 of CONST_INTs, where we know the old_mode only from the call argument. */
5161 old_mode
= GET_MODE (val
);
5162 if (old_mode
== VOIDmode
)
5163 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5164 val
= convert_modes (mode
, old_mode
, val
, 1);
5169 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5170 EXP is the CALL_EXPR. CODE is the rtx code
5171 that corresponds to the arithmetic or logical operation from the name;
5172 an exception here is that NOT actually means NAND. TARGET is an optional
5173 place for us to store the results; AFTER is true if this is the
5174 fetch_and_xxx form. */
5177 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
5178 enum rtx_code code
, bool after
,
5182 location_t loc
= EXPR_LOCATION (exp
);
5184 if (code
== NOT
&& warn_sync_nand
)
5186 tree fndecl
= get_callee_fndecl (exp
);
5187 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5189 static bool warned_f_a_n
, warned_n_a_f
;
5193 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5194 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5195 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5196 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5197 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5201 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5202 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5203 warned_f_a_n
= true;
5206 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5207 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5208 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5209 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5210 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5214 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5215 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5216 warned_n_a_f
= true;
5224 /* Expand the operands. */
5225 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5226 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5228 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
5232 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5233 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5234 true if this is the boolean form. TARGET is a place for us to store the
5235 results; this is NOT optional if IS_BOOL is true. */
5238 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
5239 bool is_bool
, rtx target
)
5241 rtx old_val
, new_val
, mem
;
5244 /* Expand the operands. */
5245 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5246 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5247 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5249 pbool
= poval
= NULL
;
5250 if (target
!= const0_rtx
)
5257 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5258 false, MEMMODEL_SYNC_SEQ_CST
,
5259 MEMMODEL_SYNC_SEQ_CST
))
5265 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5266 general form is actually an atomic exchange, and some targets only
5267 support a reduced form with the second argument being a constant 1.
5268 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5272 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5277 /* Expand the operands. */
5278 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5279 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5281 return expand_sync_lock_test_and_set (target
, mem
, val
);
5284 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5287 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5291 /* Expand the operands. */
5292 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5294 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
5297 /* Given an integer representing an ``enum memmodel'', verify its
5298 correctness and return the memory model enum. */
5300 static enum memmodel
5301 get_memmodel (tree exp
)
5304 unsigned HOST_WIDE_INT val
;
5306 /* If the parameter is not a constant, it's a run time value so we'll just
5307 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5308 if (TREE_CODE (exp
) != INTEGER_CST
)
5309 return MEMMODEL_SEQ_CST
;
5311 op
= expand_normal (exp
);
5314 if (targetm
.memmodel_check
)
5315 val
= targetm
.memmodel_check (val
);
5316 else if (val
& ~MEMMODEL_MASK
)
5318 warning (OPT_Winvalid_memory_model
,
5319 "Unknown architecture specifier in memory model to builtin.");
5320 return MEMMODEL_SEQ_CST
;
5323 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5324 if (memmodel_base (val
) >= MEMMODEL_LAST
)
5326 warning (OPT_Winvalid_memory_model
,
5327 "invalid memory model argument to builtin");
5328 return MEMMODEL_SEQ_CST
;
5331 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5332 be conservative and promote consume to acquire. */
5333 if (val
== MEMMODEL_CONSUME
)
5334 val
= MEMMODEL_ACQUIRE
;
5336 return (enum memmodel
) val
;
5339 /* Expand the __atomic_exchange intrinsic:
5340 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5341 EXP is the CALL_EXPR.
5342 TARGET is an optional place for us to store the results. */
5345 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5348 enum memmodel model
;
5350 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5352 if (!flag_inline_atomics
)
5355 /* Expand the operands. */
5356 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5357 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5359 return expand_atomic_exchange (target
, mem
, val
, model
);
5362 /* Expand the __atomic_compare_exchange intrinsic:
5363 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5364 TYPE desired, BOOL weak,
5365 enum memmodel success,
5366 enum memmodel failure)
5367 EXP is the CALL_EXPR.
5368 TARGET is an optional place for us to store the results. */
5371 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5374 rtx expect
, desired
, mem
, oldval
;
5375 rtx_code_label
*label
;
5376 enum memmodel success
, failure
;
5380 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5381 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5383 if (failure
> success
)
5385 warning (OPT_Winvalid_memory_model
,
5386 "failure memory model cannot be stronger than success memory "
5387 "model for %<__atomic_compare_exchange%>");
5388 success
= MEMMODEL_SEQ_CST
;
5391 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5393 warning (OPT_Winvalid_memory_model
,
5394 "invalid failure memory model for "
5395 "%<__atomic_compare_exchange%>");
5396 failure
= MEMMODEL_SEQ_CST
;
5397 success
= MEMMODEL_SEQ_CST
;
5401 if (!flag_inline_atomics
)
5404 /* Expand the operands. */
5405 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5407 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5408 expect
= convert_memory_address (Pmode
, expect
);
5409 expect
= gen_rtx_MEM (mode
, expect
);
5410 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5412 weak
= CALL_EXPR_ARG (exp
, 3);
5414 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5417 if (target
== const0_rtx
)
5420 /* Lest the rtl backend create a race condition with an imporoper store
5421 to memory, always create a new pseudo for OLDVAL. */
5424 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5425 is_weak
, success
, failure
))
5428 /* Conditionally store back to EXPECT, lest we create a race condition
5429 with an improper store to memory. */
5430 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5431 the normal case where EXPECT is totally private, i.e. a register. At
5432 which point the store can be unconditional. */
5433 label
= gen_label_rtx ();
5434 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
5435 GET_MODE (target
), 1, label
);
5436 emit_move_insn (expect
, oldval
);
5442 /* Expand the __atomic_load intrinsic:
5443 TYPE __atomic_load (TYPE *object, enum memmodel)
5444 EXP is the CALL_EXPR.
5445 TARGET is an optional place for us to store the results. */
5448 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
5451 enum memmodel model
;
5453 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5454 if (is_mm_release (model
) || is_mm_acq_rel (model
))
5456 warning (OPT_Winvalid_memory_model
,
5457 "invalid memory model for %<__atomic_load%>");
5458 model
= MEMMODEL_SEQ_CST
;
5461 if (!flag_inline_atomics
)
5464 /* Expand the operand. */
5465 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5467 return expand_atomic_load (target
, mem
, model
);
5471 /* Expand the __atomic_store intrinsic:
5472 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5473 EXP is the CALL_EXPR.
5474 TARGET is an optional place for us to store the results. */
5477 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
5480 enum memmodel model
;
5482 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5483 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
5484 || is_mm_release (model
)))
5486 warning (OPT_Winvalid_memory_model
,
5487 "invalid memory model for %<__atomic_store%>");
5488 model
= MEMMODEL_SEQ_CST
;
5491 if (!flag_inline_atomics
)
5494 /* Expand the operands. */
5495 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5496 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5498 return expand_atomic_store (mem
, val
, model
, false);
5501 /* Expand the __atomic_fetch_XXX intrinsic:
5502 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5503 EXP is the CALL_EXPR.
5504 TARGET is an optional place for us to store the results.
5505 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5506 FETCH_AFTER is true if returning the result of the operation.
5507 FETCH_AFTER is false if returning the value before the operation.
5508 IGNORE is true if the result is not used.
5509 EXT_CALL is the correct builtin for an external call if this cannot be
5510 resolved to an instruction sequence. */
5513 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
5514 enum rtx_code code
, bool fetch_after
,
5515 bool ignore
, enum built_in_function ext_call
)
5518 enum memmodel model
;
5522 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5524 /* Expand the operands. */
5525 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5526 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5528 /* Only try generating instructions if inlining is turned on. */
5529 if (flag_inline_atomics
)
5531 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5536 /* Return if a different routine isn't needed for the library call. */
5537 if (ext_call
== BUILT_IN_NONE
)
5540 /* Change the call to the specified function. */
5541 fndecl
= get_callee_fndecl (exp
);
5542 addr
= CALL_EXPR_FN (exp
);
5545 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5546 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5548 /* Expand the call here so we can emit trailing code. */
5549 ret
= expand_call (exp
, target
, ignore
);
5551 /* Replace the original function just in case it matters. */
5552 TREE_OPERAND (addr
, 0) = fndecl
;
5554 /* Then issue the arithmetic correction to return the right result. */
5559 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5561 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5564 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5570 /* Expand an atomic clear operation.
5571 void _atomic_clear (BOOL *obj, enum memmodel)
5572 EXP is the call expression. */
5575 expand_builtin_atomic_clear (tree exp
)
5579 enum memmodel model
;
5581 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5582 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5583 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5585 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
5587 warning (OPT_Winvalid_memory_model
,
5588 "invalid memory model for %<__atomic_store%>");
5589 model
= MEMMODEL_SEQ_CST
;
5592 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5593 Failing that, a store is issued by __atomic_store. The only way this can
5594 fail is if the bool type is larger than a word size. Unlikely, but
5595 handle it anyway for completeness. Assume a single threaded model since
5596 there is no atomic support in this case, and no barriers are required. */
5597 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5599 emit_move_insn (mem
, const0_rtx
);
5603 /* Expand an atomic test_and_set operation.
5604 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5605 EXP is the call expression. */
5608 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5611 enum memmodel model
;
5614 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5615 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5616 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5618 return expand_atomic_test_and_set (target
, mem
, model
);
5622 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5623 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5626 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5630 unsigned int mode_align
, type_align
;
5632 if (TREE_CODE (arg0
) != INTEGER_CST
)
5635 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5636 mode
= mode_for_size (size
, MODE_INT
, 0);
5637 mode_align
= GET_MODE_ALIGNMENT (mode
);
5639 if (TREE_CODE (arg1
) == INTEGER_CST
)
5641 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
5643 /* Either this argument is null, or it's a fake pointer encoding
5644 the alignment of the object. */
5646 val
*= BITS_PER_UNIT
;
5648 if (val
== 0 || mode_align
< val
)
5649 type_align
= mode_align
;
5655 tree ttype
= TREE_TYPE (arg1
);
5657 /* This function is usually invoked and folded immediately by the front
5658 end before anything else has a chance to look at it. The pointer
5659 parameter at this point is usually cast to a void *, so check for that
5660 and look past the cast. */
5661 if (CONVERT_EXPR_P (arg1
) && POINTER_TYPE_P (ttype
)
5662 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5663 arg1
= TREE_OPERAND (arg1
, 0);
5665 ttype
= TREE_TYPE (arg1
);
5666 gcc_assert (POINTER_TYPE_P (ttype
));
5668 /* Get the underlying type of the object. */
5669 ttype
= TREE_TYPE (ttype
);
5670 type_align
= TYPE_ALIGN (ttype
);
5673 /* If the object has smaller alignment, the lock free routines cannot
5675 if (type_align
< mode_align
)
5676 return boolean_false_node
;
5678 /* Check if a compare_and_swap pattern exists for the mode which represents
5679 the required size. The pattern is not allowed to fail, so the existence
5680 of the pattern indicates support is present. */
5681 if (can_compare_and_swap_p (mode
, true))
5682 return boolean_true_node
;
5684 return boolean_false_node
;
5687 /* Return true if the parameters to call EXP represent an object which will
5688 always generate lock free instructions. The first argument represents the
5689 size of the object, and the second parameter is a pointer to the object
5690 itself. If NULL is passed for the object, then the result is based on
5691 typical alignment for an object of the specified size. Otherwise return
5695 expand_builtin_atomic_always_lock_free (tree exp
)
5698 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5699 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5701 if (TREE_CODE (arg0
) != INTEGER_CST
)
5703 error ("non-constant argument 1 to __atomic_always_lock_free");
5707 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5708 if (size
== boolean_true_node
)
5713 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5714 is lock free on this architecture. */
5717 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5719 if (!flag_inline_atomics
)
5722 /* If it isn't always lock free, don't generate a result. */
5723 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5724 return boolean_true_node
;
5729 /* Return true if the parameters to call EXP represent an object which will
5730 always generate lock free instructions. The first argument represents the
5731 size of the object, and the second parameter is a pointer to the object
5732 itself. If NULL is passed for the object, then the result is based on
5733 typical alignment for an object of the specified size. Otherwise return
5737 expand_builtin_atomic_is_lock_free (tree exp
)
5740 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5741 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5743 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5745 error ("non-integer argument 1 to __atomic_is_lock_free");
5749 if (!flag_inline_atomics
)
5752 /* If the value is known at compile time, return the RTX for it. */
5753 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5754 if (size
== boolean_true_node
)
5760 /* Expand the __atomic_thread_fence intrinsic:
5761 void __atomic_thread_fence (enum memmodel)
5762 EXP is the CALL_EXPR. */
5765 expand_builtin_atomic_thread_fence (tree exp
)
5767 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5768 expand_mem_thread_fence (model
);
5771 /* Expand the __atomic_signal_fence intrinsic:
5772 void __atomic_signal_fence (enum memmodel)
5773 EXP is the CALL_EXPR. */
5776 expand_builtin_atomic_signal_fence (tree exp
)
5778 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5779 expand_mem_signal_fence (model
);
5782 /* Expand the __sync_synchronize intrinsic. */
5785 expand_builtin_sync_synchronize (void)
5787 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
5791 expand_builtin_thread_pointer (tree exp
, rtx target
)
5793 enum insn_code icode
;
5794 if (!validate_arglist (exp
, VOID_TYPE
))
5796 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5797 if (icode
!= CODE_FOR_nothing
)
5799 struct expand_operand op
;
5800 /* If the target is not sutitable then create a new target. */
5801 if (target
== NULL_RTX
5803 || GET_MODE (target
) != Pmode
)
5804 target
= gen_reg_rtx (Pmode
);
5805 create_output_operand (&op
, target
, Pmode
);
5806 expand_insn (icode
, 1, &op
);
5809 error ("__builtin_thread_pointer is not supported on this target");
5814 expand_builtin_set_thread_pointer (tree exp
)
5816 enum insn_code icode
;
5817 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5819 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5820 if (icode
!= CODE_FOR_nothing
)
5822 struct expand_operand op
;
5823 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5824 Pmode
, EXPAND_NORMAL
);
5825 create_input_operand (&op
, val
, Pmode
);
5826 expand_insn (icode
, 1, &op
);
5829 error ("__builtin_set_thread_pointer is not supported on this target");
5833 /* Emit code to restore the current value of stack. */
5836 expand_stack_restore (tree var
)
5839 rtx sa
= expand_normal (var
);
5841 sa
= convert_memory_address (Pmode
, sa
);
5843 prev
= get_last_insn ();
5844 emit_stack_restore (SAVE_BLOCK
, sa
);
5846 record_new_stack_level ();
5848 fixup_args_size_notes (prev
, get_last_insn (), 0);
5851 /* Emit code to save the current value of stack. */
5854 expand_stack_save (void)
5858 emit_stack_save (SAVE_BLOCK
, &ret
);
5863 /* Expand an expression EXP that calls a built-in function,
5864 with result going to TARGET if that's convenient
5865 (and in mode MODE if that's convenient).
5866 SUBTARGET may be used as the target for computing one of EXP's operands.
5867 IGNORE is nonzero if the value is to be ignored. */
5870 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
5873 tree fndecl
= get_callee_fndecl (exp
);
5874 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5875 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5878 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5879 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5881 /* When ASan is enabled, we don't want to expand some memory/string
5882 builtins and rely on libsanitizer's hooks. This allows us to avoid
5883 redundant checks and be sure, that possible overflow will be detected
5886 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
5887 return expand_call (exp
, target
, ignore
);
5889 /* When not optimizing, generate calls to library functions for a certain
5892 && !called_as_built_in (fndecl
)
5893 && fcode
!= BUILT_IN_FORK
5894 && fcode
!= BUILT_IN_EXECL
5895 && fcode
!= BUILT_IN_EXECV
5896 && fcode
!= BUILT_IN_EXECLP
5897 && fcode
!= BUILT_IN_EXECLE
5898 && fcode
!= BUILT_IN_EXECVP
5899 && fcode
!= BUILT_IN_EXECVE
5900 && fcode
!= BUILT_IN_ALLOCA
5901 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5902 && fcode
!= BUILT_IN_FREE
5903 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
5904 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
5905 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
5906 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
5907 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5908 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
5909 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5910 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5911 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5912 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
5913 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
5914 && fcode
!= BUILT_IN_CHKP_BNDRET
)
5915 return expand_call (exp
, target
, ignore
);
5917 /* The built-in function expanders test for target == const0_rtx
5918 to determine whether the function's result will be ignored. */
5920 target
= const0_rtx
;
5922 /* If the result of a pure or const built-in function is ignored, and
5923 none of its arguments are volatile, we can avoid expanding the
5924 built-in call and just evaluate the arguments for side-effects. */
5925 if (target
== const0_rtx
5926 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5927 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5929 bool volatilep
= false;
5931 call_expr_arg_iterator iter
;
5933 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5934 if (TREE_THIS_VOLATILE (arg
))
5942 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5943 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5948 /* expand_builtin_with_bounds is supposed to be used for
5949 instrumented builtin calls. */
5950 gcc_assert (!CALL_WITH_BOUNDS_P (exp
));
5954 CASE_FLT_FN (BUILT_IN_FABS
):
5955 case BUILT_IN_FABSD32
:
5956 case BUILT_IN_FABSD64
:
5957 case BUILT_IN_FABSD128
:
5958 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5963 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5964 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5969 /* Just do a normal library call if we were unable to fold
5971 CASE_FLT_FN (BUILT_IN_CABS
):
5974 CASE_FLT_FN (BUILT_IN_EXP
):
5975 CASE_FLT_FN (BUILT_IN_EXP10
):
5976 CASE_FLT_FN (BUILT_IN_POW10
):
5977 CASE_FLT_FN (BUILT_IN_EXP2
):
5978 CASE_FLT_FN (BUILT_IN_EXPM1
):
5979 CASE_FLT_FN (BUILT_IN_LOGB
):
5980 CASE_FLT_FN (BUILT_IN_LOG
):
5981 CASE_FLT_FN (BUILT_IN_LOG10
):
5982 CASE_FLT_FN (BUILT_IN_LOG2
):
5983 CASE_FLT_FN (BUILT_IN_LOG1P
):
5984 CASE_FLT_FN (BUILT_IN_TAN
):
5985 CASE_FLT_FN (BUILT_IN_ASIN
):
5986 CASE_FLT_FN (BUILT_IN_ACOS
):
5987 CASE_FLT_FN (BUILT_IN_ATAN
):
5988 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
5989 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5990 because of possible accuracy problems. */
5991 if (! flag_unsafe_math_optimizations
)
5993 CASE_FLT_FN (BUILT_IN_SQRT
):
5994 CASE_FLT_FN (BUILT_IN_FLOOR
):
5995 CASE_FLT_FN (BUILT_IN_CEIL
):
5996 CASE_FLT_FN (BUILT_IN_TRUNC
):
5997 CASE_FLT_FN (BUILT_IN_ROUND
):
5998 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
5999 CASE_FLT_FN (BUILT_IN_RINT
):
6000 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
6005 CASE_FLT_FN (BUILT_IN_FMA
):
6006 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
6011 CASE_FLT_FN (BUILT_IN_ILOGB
):
6012 if (! flag_unsafe_math_optimizations
)
6014 CASE_FLT_FN (BUILT_IN_ISINF
):
6015 CASE_FLT_FN (BUILT_IN_FINITE
):
6016 case BUILT_IN_ISFINITE
:
6017 case BUILT_IN_ISNORMAL
:
6018 target
= expand_builtin_interclass_mathfn (exp
, target
);
6023 CASE_FLT_FN (BUILT_IN_ICEIL
):
6024 CASE_FLT_FN (BUILT_IN_LCEIL
):
6025 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6026 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6027 CASE_FLT_FN (BUILT_IN_IFLOOR
):
6028 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6029 target
= expand_builtin_int_roundingfn (exp
, target
);
6034 CASE_FLT_FN (BUILT_IN_IRINT
):
6035 CASE_FLT_FN (BUILT_IN_LRINT
):
6036 CASE_FLT_FN (BUILT_IN_LLRINT
):
6037 CASE_FLT_FN (BUILT_IN_IROUND
):
6038 CASE_FLT_FN (BUILT_IN_LROUND
):
6039 CASE_FLT_FN (BUILT_IN_LLROUND
):
6040 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6045 CASE_FLT_FN (BUILT_IN_POWI
):
6046 target
= expand_builtin_powi (exp
, target
);
6051 CASE_FLT_FN (BUILT_IN_ATAN2
):
6052 CASE_FLT_FN (BUILT_IN_LDEXP
):
6053 CASE_FLT_FN (BUILT_IN_SCALB
):
6054 CASE_FLT_FN (BUILT_IN_SCALBN
):
6055 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6056 if (! flag_unsafe_math_optimizations
)
6059 CASE_FLT_FN (BUILT_IN_FMOD
):
6060 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6061 CASE_FLT_FN (BUILT_IN_DREM
):
6062 CASE_FLT_FN (BUILT_IN_POW
):
6063 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6068 CASE_FLT_FN (BUILT_IN_CEXPI
):
6069 target
= expand_builtin_cexpi (exp
, target
);
6070 gcc_assert (target
);
6073 CASE_FLT_FN (BUILT_IN_SIN
):
6074 CASE_FLT_FN (BUILT_IN_COS
):
6075 if (! flag_unsafe_math_optimizations
)
6077 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6082 CASE_FLT_FN (BUILT_IN_SINCOS
):
6083 if (! flag_unsafe_math_optimizations
)
6085 target
= expand_builtin_sincos (exp
);
6090 case BUILT_IN_APPLY_ARGS
:
6091 return expand_builtin_apply_args ();
6093 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6094 FUNCTION with a copy of the parameters described by
6095 ARGUMENTS, and ARGSIZE. It returns a block of memory
6096 allocated on the stack into which is stored all the registers
6097 that might possibly be used for returning the result of a
6098 function. ARGUMENTS is the value returned by
6099 __builtin_apply_args. ARGSIZE is the number of bytes of
6100 arguments that must be copied. ??? How should this value be
6101 computed? We'll also need a safe worst case value for varargs
6103 case BUILT_IN_APPLY
:
6104 if (!validate_arglist (exp
, POINTER_TYPE
,
6105 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6106 && !validate_arglist (exp
, REFERENCE_TYPE
,
6107 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6113 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6114 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6115 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6117 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6120 /* __builtin_return (RESULT) causes the function to return the
6121 value described by RESULT. RESULT is address of the block of
6122 memory returned by __builtin_apply. */
6123 case BUILT_IN_RETURN
:
6124 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6125 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6128 case BUILT_IN_SAVEREGS
:
6129 return expand_builtin_saveregs ();
6131 case BUILT_IN_VA_ARG_PACK
:
6132 /* All valid uses of __builtin_va_arg_pack () are removed during
6134 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6137 case BUILT_IN_VA_ARG_PACK_LEN
:
6138 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6140 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6143 /* Return the address of the first anonymous stack arg. */
6144 case BUILT_IN_NEXT_ARG
:
6145 if (fold_builtin_next_arg (exp
, false))
6147 return expand_builtin_next_arg ();
6149 case BUILT_IN_CLEAR_CACHE
:
6150 target
= expand_builtin___clear_cache (exp
);
6155 case BUILT_IN_CLASSIFY_TYPE
:
6156 return expand_builtin_classify_type (exp
);
6158 case BUILT_IN_CONSTANT_P
:
6161 case BUILT_IN_FRAME_ADDRESS
:
6162 case BUILT_IN_RETURN_ADDRESS
:
6163 return expand_builtin_frame_address (fndecl
, exp
);
6165 /* Returns the address of the area where the structure is returned.
6167 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6168 if (call_expr_nargs (exp
) != 0
6169 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6170 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6173 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6175 case BUILT_IN_ALLOCA
:
6176 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6177 /* If the allocation stems from the declaration of a variable-sized
6178 object, it cannot accumulate. */
6179 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6184 case BUILT_IN_STACK_SAVE
:
6185 return expand_stack_save ();
6187 case BUILT_IN_STACK_RESTORE
:
6188 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6191 case BUILT_IN_BSWAP16
:
6192 case BUILT_IN_BSWAP32
:
6193 case BUILT_IN_BSWAP64
:
6194 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6199 CASE_INT_FN (BUILT_IN_FFS
):
6200 target
= expand_builtin_unop (target_mode
, exp
, target
,
6201 subtarget
, ffs_optab
);
6206 CASE_INT_FN (BUILT_IN_CLZ
):
6207 target
= expand_builtin_unop (target_mode
, exp
, target
,
6208 subtarget
, clz_optab
);
6213 CASE_INT_FN (BUILT_IN_CTZ
):
6214 target
= expand_builtin_unop (target_mode
, exp
, target
,
6215 subtarget
, ctz_optab
);
6220 CASE_INT_FN (BUILT_IN_CLRSB
):
6221 target
= expand_builtin_unop (target_mode
, exp
, target
,
6222 subtarget
, clrsb_optab
);
6227 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6228 target
= expand_builtin_unop (target_mode
, exp
, target
,
6229 subtarget
, popcount_optab
);
6234 CASE_INT_FN (BUILT_IN_PARITY
):
6235 target
= expand_builtin_unop (target_mode
, exp
, target
,
6236 subtarget
, parity_optab
);
6241 case BUILT_IN_STRLEN
:
6242 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6247 case BUILT_IN_STRCPY
:
6248 target
= expand_builtin_strcpy (exp
, target
);
6253 case BUILT_IN_STRNCPY
:
6254 target
= expand_builtin_strncpy (exp
, target
);
6259 case BUILT_IN_STPCPY
:
6260 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6265 case BUILT_IN_MEMCPY
:
6266 target
= expand_builtin_memcpy (exp
, target
);
6271 case BUILT_IN_MEMPCPY
:
6272 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6277 case BUILT_IN_MEMSET
:
6278 target
= expand_builtin_memset (exp
, target
, mode
);
6283 case BUILT_IN_BZERO
:
6284 target
= expand_builtin_bzero (exp
);
6289 case BUILT_IN_STRCMP
:
6290 target
= expand_builtin_strcmp (exp
, target
);
6295 case BUILT_IN_STRNCMP
:
6296 target
= expand_builtin_strncmp (exp
, target
, mode
);
6302 case BUILT_IN_MEMCMP
:
6303 target
= expand_builtin_memcmp (exp
, target
);
6308 case BUILT_IN_SETJMP
:
6309 /* This should have been lowered to the builtins below. */
6312 case BUILT_IN_SETJMP_SETUP
:
6313 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6314 and the receiver label. */
6315 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6317 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6318 VOIDmode
, EXPAND_NORMAL
);
6319 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6320 rtx_insn
*label_r
= label_rtx (label
);
6322 /* This is copied from the handling of non-local gotos. */
6323 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6324 nonlocal_goto_handler_labels
6325 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
6326 nonlocal_goto_handler_labels
);
6327 /* ??? Do not let expand_label treat us as such since we would
6328 not want to be both on the list of non-local labels and on
6329 the list of forced labels. */
6330 FORCED_LABEL (label
) = 0;
6335 case BUILT_IN_SETJMP_RECEIVER
:
6336 /* __builtin_setjmp_receiver is passed the receiver label. */
6337 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6339 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6340 rtx_insn
*label_r
= label_rtx (label
);
6342 expand_builtin_setjmp_receiver (label_r
);
6347 /* __builtin_longjmp is passed a pointer to an array of five words.
6348 It's similar to the C library longjmp function but works with
6349 __builtin_setjmp above. */
6350 case BUILT_IN_LONGJMP
:
6351 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6353 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6354 VOIDmode
, EXPAND_NORMAL
);
6355 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6357 if (value
!= const1_rtx
)
6359 error ("%<__builtin_longjmp%> second argument must be 1");
6363 expand_builtin_longjmp (buf_addr
, value
);
6368 case BUILT_IN_NONLOCAL_GOTO
:
6369 target
= expand_builtin_nonlocal_goto (exp
);
6374 /* This updates the setjmp buffer that is its argument with the value
6375 of the current stack pointer. */
6376 case BUILT_IN_UPDATE_SETJMP_BUF
:
6377 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6380 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6382 expand_builtin_update_setjmp_buf (buf_addr
);
6388 expand_builtin_trap ();
6391 case BUILT_IN_UNREACHABLE
:
6392 expand_builtin_unreachable ();
6395 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6396 case BUILT_IN_SIGNBITD32
:
6397 case BUILT_IN_SIGNBITD64
:
6398 case BUILT_IN_SIGNBITD128
:
6399 target
= expand_builtin_signbit (exp
, target
);
6404 /* Various hooks for the DWARF 2 __throw routine. */
6405 case BUILT_IN_UNWIND_INIT
:
6406 expand_builtin_unwind_init ();
6408 case BUILT_IN_DWARF_CFA
:
6409 return virtual_cfa_rtx
;
6410 #ifdef DWARF2_UNWIND_INFO
6411 case BUILT_IN_DWARF_SP_COLUMN
:
6412 return expand_builtin_dwarf_sp_column ();
6413 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6414 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6417 case BUILT_IN_FROB_RETURN_ADDR
:
6418 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6419 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6420 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6421 case BUILT_IN_EH_RETURN
:
6422 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6423 CALL_EXPR_ARG (exp
, 1));
6425 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6426 return expand_builtin_eh_return_data_regno (exp
);
6427 case BUILT_IN_EXTEND_POINTER
:
6428 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6429 case BUILT_IN_EH_POINTER
:
6430 return expand_builtin_eh_pointer (exp
);
6431 case BUILT_IN_EH_FILTER
:
6432 return expand_builtin_eh_filter (exp
);
6433 case BUILT_IN_EH_COPY_VALUES
:
6434 return expand_builtin_eh_copy_values (exp
);
6436 case BUILT_IN_VA_START
:
6437 return expand_builtin_va_start (exp
);
6438 case BUILT_IN_VA_END
:
6439 return expand_builtin_va_end (exp
);
6440 case BUILT_IN_VA_COPY
:
6441 return expand_builtin_va_copy (exp
);
6442 case BUILT_IN_EXPECT
:
6443 return expand_builtin_expect (exp
, target
);
6444 case BUILT_IN_ASSUME_ALIGNED
:
6445 return expand_builtin_assume_aligned (exp
, target
);
6446 case BUILT_IN_PREFETCH
:
6447 expand_builtin_prefetch (exp
);
6450 case BUILT_IN_INIT_TRAMPOLINE
:
6451 return expand_builtin_init_trampoline (exp
, true);
6452 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6453 return expand_builtin_init_trampoline (exp
, false);
6454 case BUILT_IN_ADJUST_TRAMPOLINE
:
6455 return expand_builtin_adjust_trampoline (exp
);
6458 case BUILT_IN_EXECL
:
6459 case BUILT_IN_EXECV
:
6460 case BUILT_IN_EXECLP
:
6461 case BUILT_IN_EXECLE
:
6462 case BUILT_IN_EXECVP
:
6463 case BUILT_IN_EXECVE
:
6464 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6469 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6470 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6471 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6472 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6473 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6474 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6475 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6480 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6481 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6482 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6483 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6484 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6485 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6486 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6491 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6492 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6493 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6494 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6495 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6496 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6497 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6502 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6503 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6504 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6505 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6506 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6507 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6508 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6513 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6514 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6515 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6516 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6517 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6518 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6519 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6524 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6525 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6526 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6527 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6528 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6529 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6530 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6535 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6536 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6537 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6538 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6539 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6540 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6541 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6546 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6547 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6548 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6549 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6550 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6551 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6552 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6557 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6558 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6559 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6560 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6561 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6562 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6563 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6568 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6569 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6570 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6571 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6572 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6573 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6574 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6579 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6580 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6581 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6582 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6583 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6584 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6585 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6590 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6591 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6592 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6593 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6594 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6595 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6596 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6601 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6602 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6603 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6604 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6605 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6606 if (mode
== VOIDmode
)
6607 mode
= TYPE_MODE (boolean_type_node
);
6608 if (!target
|| !register_operand (target
, mode
))
6609 target
= gen_reg_rtx (mode
);
6611 mode
= get_builtin_sync_mode
6612 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6613 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6618 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6619 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6620 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6621 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6622 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6623 mode
= get_builtin_sync_mode
6624 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6625 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6630 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6631 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6632 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6633 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6634 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6635 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6636 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6641 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6642 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6643 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6644 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6645 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6646 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6647 expand_builtin_sync_lock_release (mode
, exp
);
6650 case BUILT_IN_SYNC_SYNCHRONIZE
:
6651 expand_builtin_sync_synchronize ();
6654 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6655 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6656 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6657 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6658 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6659 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6660 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6665 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6666 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6667 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6668 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6669 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6671 unsigned int nargs
, z
;
6672 vec
<tree
, va_gc
> *vec
;
6675 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6676 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6680 /* If this is turned into an external library call, the weak parameter
6681 must be dropped to match the expected parameter list. */
6682 nargs
= call_expr_nargs (exp
);
6683 vec_alloc (vec
, nargs
- 1);
6684 for (z
= 0; z
< 3; z
++)
6685 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6686 /* Skip the boolean weak parameter. */
6687 for (z
= 4; z
< 6; z
++)
6688 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6689 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6693 case BUILT_IN_ATOMIC_LOAD_1
:
6694 case BUILT_IN_ATOMIC_LOAD_2
:
6695 case BUILT_IN_ATOMIC_LOAD_4
:
6696 case BUILT_IN_ATOMIC_LOAD_8
:
6697 case BUILT_IN_ATOMIC_LOAD_16
:
6698 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6699 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6704 case BUILT_IN_ATOMIC_STORE_1
:
6705 case BUILT_IN_ATOMIC_STORE_2
:
6706 case BUILT_IN_ATOMIC_STORE_4
:
6707 case BUILT_IN_ATOMIC_STORE_8
:
6708 case BUILT_IN_ATOMIC_STORE_16
:
6709 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6710 target
= expand_builtin_atomic_store (mode
, exp
);
6715 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6716 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6717 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6718 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6719 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6721 enum built_in_function lib
;
6722 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6723 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6724 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6725 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6731 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6732 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6733 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6734 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6735 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6737 enum built_in_function lib
;
6738 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6739 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6740 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6741 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6747 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6748 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6749 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6750 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6751 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6753 enum built_in_function lib
;
6754 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6755 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6756 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6757 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6763 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6764 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6765 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6766 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6767 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6769 enum built_in_function lib
;
6770 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6771 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6772 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6773 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6779 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6780 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6781 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6782 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6783 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6785 enum built_in_function lib
;
6786 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6787 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6788 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6789 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6795 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6796 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6797 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6798 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6799 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6801 enum built_in_function lib
;
6802 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6803 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6804 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6805 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6811 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6812 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6813 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6814 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6815 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6816 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6817 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6818 ignore
, BUILT_IN_NONE
);
6823 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6824 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6825 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6826 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6827 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6828 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6829 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6830 ignore
, BUILT_IN_NONE
);
6835 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6836 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6837 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6838 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6839 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6840 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6841 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6842 ignore
, BUILT_IN_NONE
);
6847 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6848 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6849 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6850 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6851 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6852 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6853 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6854 ignore
, BUILT_IN_NONE
);
6859 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6860 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6861 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6862 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6863 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6864 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6865 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6866 ignore
, BUILT_IN_NONE
);
6871 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6872 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6873 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6874 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6875 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6876 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6877 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6878 ignore
, BUILT_IN_NONE
);
6883 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6884 return expand_builtin_atomic_test_and_set (exp
, target
);
6886 case BUILT_IN_ATOMIC_CLEAR
:
6887 return expand_builtin_atomic_clear (exp
);
6889 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6890 return expand_builtin_atomic_always_lock_free (exp
);
6892 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6893 target
= expand_builtin_atomic_is_lock_free (exp
);
6898 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6899 expand_builtin_atomic_thread_fence (exp
);
6902 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6903 expand_builtin_atomic_signal_fence (exp
);
6906 case BUILT_IN_OBJECT_SIZE
:
6907 return expand_builtin_object_size (exp
);
6909 case BUILT_IN_MEMCPY_CHK
:
6910 case BUILT_IN_MEMPCPY_CHK
:
6911 case BUILT_IN_MEMMOVE_CHK
:
6912 case BUILT_IN_MEMSET_CHK
:
6913 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6918 case BUILT_IN_STRCPY_CHK
:
6919 case BUILT_IN_STPCPY_CHK
:
6920 case BUILT_IN_STRNCPY_CHK
:
6921 case BUILT_IN_STPNCPY_CHK
:
6922 case BUILT_IN_STRCAT_CHK
:
6923 case BUILT_IN_STRNCAT_CHK
:
6924 case BUILT_IN_SNPRINTF_CHK
:
6925 case BUILT_IN_VSNPRINTF_CHK
:
6926 maybe_emit_chk_warning (exp
, fcode
);
6929 case BUILT_IN_SPRINTF_CHK
:
6930 case BUILT_IN_VSPRINTF_CHK
:
6931 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6935 if (warn_free_nonheap_object
)
6936 maybe_emit_free_warning (exp
);
6939 case BUILT_IN_THREAD_POINTER
:
6940 return expand_builtin_thread_pointer (exp
, target
);
6942 case BUILT_IN_SET_THREAD_POINTER
:
6943 expand_builtin_set_thread_pointer (exp
);
6946 case BUILT_IN_CILK_DETACH
:
6947 expand_builtin_cilk_detach (exp
);
6950 case BUILT_IN_CILK_POP_FRAME
:
6951 expand_builtin_cilk_pop_frame (exp
);
6954 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
6955 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
6956 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
6957 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
6958 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
6959 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
6960 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
6961 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
6962 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
6963 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
6964 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
6965 /* We allow user CHKP builtins if Pointer Bounds
6967 if (!chkp_function_instrumented_p (current_function_decl
))
6969 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
6970 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6971 || fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
6972 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
6973 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
6974 return expand_normal (CALL_EXPR_ARG (exp
, 0));
6975 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
6976 return expand_normal (size_zero_node
);
6977 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
6978 return expand_normal (size_int (-1));
6984 case BUILT_IN_CHKP_BNDMK
:
6985 case BUILT_IN_CHKP_BNDSTX
:
6986 case BUILT_IN_CHKP_BNDCL
:
6987 case BUILT_IN_CHKP_BNDCU
:
6988 case BUILT_IN_CHKP_BNDLDX
:
6989 case BUILT_IN_CHKP_BNDRET
:
6990 case BUILT_IN_CHKP_INTERSECT
:
6991 case BUILT_IN_CHKP_NARROW
:
6992 case BUILT_IN_CHKP_EXTRACT_LOWER
:
6993 case BUILT_IN_CHKP_EXTRACT_UPPER
:
6994 /* Software implementation of Pointer Bounds Checker is NYI.
6995 Target support is required. */
6996 error ("Your target platform does not support -fcheck-pointer-bounds");
6999 case BUILT_IN_ACC_ON_DEVICE
:
7000 /* Do library call, if we failed to expand the builtin when
7004 default: /* just do library call, if unknown builtin */
7008 /* The switch statement above can drop through to cause the function
7009 to be called normally. */
7010 return expand_call (exp
, target
, ignore
);
7013 /* Similar to expand_builtin but is used for instrumented calls. */
7016 expand_builtin_with_bounds (tree exp
, rtx target
,
7017 rtx subtarget ATTRIBUTE_UNUSED
,
7018 machine_mode mode
, int ignore
)
7020 tree fndecl
= get_callee_fndecl (exp
);
7021 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7023 gcc_assert (CALL_WITH_BOUNDS_P (exp
));
7025 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7026 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7028 gcc_assert (fcode
> BEGIN_CHKP_BUILTINS
7029 && fcode
< END_CHKP_BUILTINS
);
7033 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
:
7034 target
= expand_builtin_memcpy_with_bounds (exp
, target
);
7039 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
:
7040 target
= expand_builtin_mempcpy_with_bounds (exp
, target
, mode
);
7045 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
:
7046 target
= expand_builtin_memset_with_bounds (exp
, target
, mode
);
7055 /* The switch statement above can drop through to cause the function
7056 to be called normally. */
7057 return expand_call (exp
, target
, ignore
);
7060 /* Determine whether a tree node represents a call to a built-in
7061 function. If the tree T is a call to a built-in function with
7062 the right number of arguments of the appropriate types, return
7063 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7064 Otherwise the return value is END_BUILTINS. */
7066 enum built_in_function
7067 builtin_mathfn_code (const_tree t
)
7069 const_tree fndecl
, arg
, parmlist
;
7070 const_tree argtype
, parmtype
;
7071 const_call_expr_arg_iterator iter
;
7073 if (TREE_CODE (t
) != CALL_EXPR
7074 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
7075 return END_BUILTINS
;
7077 fndecl
= get_callee_fndecl (t
);
7078 if (fndecl
== NULL_TREE
7079 || TREE_CODE (fndecl
) != FUNCTION_DECL
7080 || ! DECL_BUILT_IN (fndecl
)
7081 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7082 return END_BUILTINS
;
7084 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7085 init_const_call_expr_arg_iterator (t
, &iter
);
7086 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7088 /* If a function doesn't take a variable number of arguments,
7089 the last element in the list will have type `void'. */
7090 parmtype
= TREE_VALUE (parmlist
);
7091 if (VOID_TYPE_P (parmtype
))
7093 if (more_const_call_expr_args_p (&iter
))
7094 return END_BUILTINS
;
7095 return DECL_FUNCTION_CODE (fndecl
);
7098 if (! more_const_call_expr_args_p (&iter
))
7099 return END_BUILTINS
;
7101 arg
= next_const_call_expr_arg (&iter
);
7102 argtype
= TREE_TYPE (arg
);
7104 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7106 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7107 return END_BUILTINS
;
7109 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7111 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7112 return END_BUILTINS
;
7114 else if (POINTER_TYPE_P (parmtype
))
7116 if (! POINTER_TYPE_P (argtype
))
7117 return END_BUILTINS
;
7119 else if (INTEGRAL_TYPE_P (parmtype
))
7121 if (! INTEGRAL_TYPE_P (argtype
))
7122 return END_BUILTINS
;
7125 return END_BUILTINS
;
7128 /* Variable-length argument list. */
7129 return DECL_FUNCTION_CODE (fndecl
);
7132 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7133 evaluate to a constant. */
7136 fold_builtin_constant_p (tree arg
)
7138 /* We return 1 for a numeric type that's known to be a constant
7139 value at compile-time or for an aggregate type that's a
7140 literal constant. */
7143 /* If we know this is a constant, emit the constant of one. */
7144 if (CONSTANT_CLASS_P (arg
)
7145 || (TREE_CODE (arg
) == CONSTRUCTOR
7146 && TREE_CONSTANT (arg
)))
7147 return integer_one_node
;
7148 if (TREE_CODE (arg
) == ADDR_EXPR
)
7150 tree op
= TREE_OPERAND (arg
, 0);
7151 if (TREE_CODE (op
) == STRING_CST
7152 || (TREE_CODE (op
) == ARRAY_REF
7153 && integer_zerop (TREE_OPERAND (op
, 1))
7154 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7155 return integer_one_node
;
7158 /* If this expression has side effects, show we don't know it to be a
7159 constant. Likewise if it's a pointer or aggregate type since in
7160 those case we only want literals, since those are only optimized
7161 when generating RTL, not later.
7162 And finally, if we are compiling an initializer, not code, we
7163 need to return a definite result now; there's not going to be any
7164 more optimization done. */
7165 if (TREE_SIDE_EFFECTS (arg
)
7166 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7167 || POINTER_TYPE_P (TREE_TYPE (arg
))
7169 || folding_initializer
7170 || force_folding_builtin_constant_p
)
7171 return integer_zero_node
;
7176 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7177 return it as a truthvalue. */
7180 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
7183 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7185 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7186 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7187 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7188 pred_type
= TREE_VALUE (arg_types
);
7189 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7191 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7192 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7193 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
7196 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7197 build_int_cst (ret_type
, 0));
7200 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7201 NULL_TREE if no simplification is possible. */
7204 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
7206 tree inner
, fndecl
, inner_arg0
;
7207 enum tree_code code
;
7209 /* Distribute the expected value over short-circuiting operators.
7210 See through the cast from truthvalue_type_node to long. */
7212 while (CONVERT_EXPR_P (inner_arg0
)
7213 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7214 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7215 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7217 /* If this is a builtin_expect within a builtin_expect keep the
7218 inner one. See through a comparison against a constant. It
7219 might have been added to create a thruthvalue. */
7222 if (COMPARISON_CLASS_P (inner
)
7223 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7224 inner
= TREE_OPERAND (inner
, 0);
7226 if (TREE_CODE (inner
) == CALL_EXPR
7227 && (fndecl
= get_callee_fndecl (inner
))
7228 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7229 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7233 code
= TREE_CODE (inner
);
7234 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7236 tree op0
= TREE_OPERAND (inner
, 0);
7237 tree op1
= TREE_OPERAND (inner
, 1);
7239 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
7240 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
7241 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7243 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7246 /* If the argument isn't invariant then there's nothing else we can do. */
7247 if (!TREE_CONSTANT (inner_arg0
))
7250 /* If we expect that a comparison against the argument will fold to
7251 a constant return the constant. In practice, this means a true
7252 constant or the address of a non-weak symbol. */
7255 if (TREE_CODE (inner
) == ADDR_EXPR
)
7259 inner
= TREE_OPERAND (inner
, 0);
7261 while (TREE_CODE (inner
) == COMPONENT_REF
7262 || TREE_CODE (inner
) == ARRAY_REF
);
7263 if ((TREE_CODE (inner
) == VAR_DECL
7264 || TREE_CODE (inner
) == FUNCTION_DECL
)
7265 && DECL_WEAK (inner
))
7269 /* Otherwise, ARG0 already has the proper type for the return value. */
7273 /* Fold a call to __builtin_classify_type with argument ARG. */
7276 fold_builtin_classify_type (tree arg
)
7279 return build_int_cst (integer_type_node
, no_type_class
);
7281 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7284 /* Fold a call to __builtin_strlen with argument ARG. */
7287 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7289 if (!validate_arg (arg
, POINTER_TYPE
))
7293 tree len
= c_strlen (arg
, 0);
7296 return fold_convert_loc (loc
, type
, len
);
7302 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7305 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7307 REAL_VALUE_TYPE real
;
7309 /* __builtin_inff is intended to be usable to define INFINITY on all
7310 targets. If an infinity is not available, INFINITY expands "to a
7311 positive constant of type float that overflows at translation
7312 time", footnote "In this case, using INFINITY will violate the
7313 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7314 Thus we pedwarn to ensure this constraint violation is
7316 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7317 pedwarn (loc
, 0, "target format does not support infinity");
7320 return build_real (type
, real
);
7323 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7326 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7328 REAL_VALUE_TYPE real
;
7331 if (!validate_arg (arg
, POINTER_TYPE
))
7333 str
= c_getstr (arg
);
7337 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7340 return build_real (type
, real
);
7343 /* Return true if the floating point expression T has an integer value.
7344 We also allow +Inf, -Inf and NaN to be considered integer values. */
7347 integer_valued_real_p (tree t
)
7349 switch (TREE_CODE (t
))
7356 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7361 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7368 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7369 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7372 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7373 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7376 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7380 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7381 if (TREE_CODE (type
) == INTEGER_TYPE
)
7383 if (TREE_CODE (type
) == REAL_TYPE
)
7384 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7389 switch (builtin_mathfn_code (t
))
7391 CASE_FLT_FN (BUILT_IN_CEIL
):
7392 CASE_FLT_FN (BUILT_IN_FLOOR
):
7393 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7394 CASE_FLT_FN (BUILT_IN_RINT
):
7395 CASE_FLT_FN (BUILT_IN_ROUND
):
7396 CASE_FLT_FN (BUILT_IN_TRUNC
):
7399 CASE_FLT_FN (BUILT_IN_FMIN
):
7400 CASE_FLT_FN (BUILT_IN_FMAX
):
7401 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7402 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7415 /* FNDECL is assumed to be a builtin where truncation can be propagated
7416 across (for instance floor((double)f) == (double)floorf (f).
7417 Do the transformation for a call with argument ARG. */
7420 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7422 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7424 if (!validate_arg (arg
, REAL_TYPE
))
7427 /* Integer rounding functions are idempotent. */
7428 if (fcode
== builtin_mathfn_code (arg
))
7431 /* If argument is already integer valued, and we don't need to worry
7432 about setting errno, there's no need to perform rounding. */
7433 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7438 tree arg0
= strip_float_extensions (arg
);
7439 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7440 tree newtype
= TREE_TYPE (arg0
);
7443 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7444 && (decl
= mathfn_built_in (newtype
, fcode
)))
7445 return fold_convert_loc (loc
, ftype
,
7446 build_call_expr_loc (loc
, decl
, 1,
7447 fold_convert_loc (loc
,
7454 /* FNDECL is assumed to be builtin which can narrow the FP type of
7455 the argument, for instance lround((double)f) -> lroundf (f).
7456 Do the transformation for a call with argument ARG. */
7459 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7461 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7463 if (!validate_arg (arg
, REAL_TYPE
))
7466 /* If argument is already integer valued, and we don't need to worry
7467 about setting errno, there's no need to perform rounding. */
7468 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7469 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7470 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7474 tree ftype
= TREE_TYPE (arg
);
7475 tree arg0
= strip_float_extensions (arg
);
7476 tree newtype
= TREE_TYPE (arg0
);
7479 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7480 && (decl
= mathfn_built_in (newtype
, fcode
)))
7481 return build_call_expr_loc (loc
, decl
, 1,
7482 fold_convert_loc (loc
, newtype
, arg0
));
7485 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7486 sizeof (int) == sizeof (long). */
7487 if (TYPE_PRECISION (integer_type_node
)
7488 == TYPE_PRECISION (long_integer_type_node
))
7490 tree newfn
= NULL_TREE
;
7493 CASE_FLT_FN (BUILT_IN_ICEIL
):
7494 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7497 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7498 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7501 CASE_FLT_FN (BUILT_IN_IROUND
):
7502 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7505 CASE_FLT_FN (BUILT_IN_IRINT
):
7506 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7515 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7516 return fold_convert_loc (loc
,
7517 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7521 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7522 sizeof (long long) == sizeof (long). */
7523 if (TYPE_PRECISION (long_long_integer_type_node
)
7524 == TYPE_PRECISION (long_integer_type_node
))
7526 tree newfn
= NULL_TREE
;
7529 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7530 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7533 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7534 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7537 CASE_FLT_FN (BUILT_IN_LLROUND
):
7538 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7541 CASE_FLT_FN (BUILT_IN_LLRINT
):
7542 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7551 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7552 return fold_convert_loc (loc
,
7553 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7560 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7561 return type. Return NULL_TREE if no simplification can be made. */
7564 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7568 if (!validate_arg (arg
, COMPLEX_TYPE
)
7569 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7572 /* Calculate the result when the argument is a constant. */
7573 if (TREE_CODE (arg
) == COMPLEX_CST
7574 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7578 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7580 tree real
= TREE_OPERAND (arg
, 0);
7581 tree imag
= TREE_OPERAND (arg
, 1);
7583 /* If either part is zero, cabs is fabs of the other. */
7584 if (real_zerop (real
))
7585 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7586 if (real_zerop (imag
))
7587 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7589 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7590 if (flag_unsafe_math_optimizations
7591 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7594 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7595 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7596 build_real_truncate (type
, dconst_sqrt2 ()));
7600 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7601 if (TREE_CODE (arg
) == NEGATE_EXPR
7602 || TREE_CODE (arg
) == CONJ_EXPR
)
7603 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7605 /* Don't do this when optimizing for size. */
7606 if (flag_unsafe_math_optimizations
7607 && optimize
&& optimize_function_for_speed_p (cfun
))
7609 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7611 if (sqrtfn
!= NULL_TREE
)
7613 tree rpart
, ipart
, result
;
7615 arg
= builtin_save_expr (arg
);
7617 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7618 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7620 rpart
= builtin_save_expr (rpart
);
7621 ipart
= builtin_save_expr (ipart
);
7623 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7624 fold_build2_loc (loc
, MULT_EXPR
, type
,
7626 fold_build2_loc (loc
, MULT_EXPR
, type
,
7629 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7636 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7637 complex tree type of the result. If NEG is true, the imaginary
7638 zero is negative. */
7641 build_complex_cproj (tree type
, bool neg
)
7643 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7647 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7648 build_real (TREE_TYPE (type
), rzero
));
7651 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7652 return type. Return NULL_TREE if no simplification can be made. */
7655 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7657 if (!validate_arg (arg
, COMPLEX_TYPE
)
7658 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7661 /* If there are no infinities, return arg. */
7662 if (! HONOR_INFINITIES (type
))
7663 return non_lvalue_loc (loc
, arg
);
7665 /* Calculate the result when the argument is a constant. */
7666 if (TREE_CODE (arg
) == COMPLEX_CST
)
7668 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7669 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7671 if (real_isinf (real
) || real_isinf (imag
))
7672 return build_complex_cproj (type
, imag
->sign
);
7676 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7678 tree real
= TREE_OPERAND (arg
, 0);
7679 tree imag
= TREE_OPERAND (arg
, 1);
7684 /* If the real part is inf and the imag part is known to be
7685 nonnegative, return (inf + 0i). Remember side-effects are
7686 possible in the imag part. */
7687 if (TREE_CODE (real
) == REAL_CST
7688 && real_isinf (TREE_REAL_CST_PTR (real
))
7689 && tree_expr_nonnegative_p (imag
))
7690 return omit_one_operand_loc (loc
, type
,
7691 build_complex_cproj (type
, false),
7694 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7695 Remember side-effects are possible in the real part. */
7696 if (TREE_CODE (imag
) == REAL_CST
7697 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7699 omit_one_operand_loc (loc
, type
,
7700 build_complex_cproj (type
, TREE_REAL_CST_PTR
7701 (imag
)->sign
), arg
);
7707 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7708 TYPE is the type of the return value. Return NULL_TREE if no
7709 simplification can be made. */
7712 fold_builtin_cos (location_t loc
,
7713 tree arg
, tree type
, tree fndecl
)
7717 if (!validate_arg (arg
, REAL_TYPE
))
7720 /* Calculate the result when the argument is a constant. */
7721 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7724 /* Optimize cos(-x) into cos (x). */
7725 if ((narg
= fold_strip_sign_ops (arg
)))
7726 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7731 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7732 Return NULL_TREE if no simplification can be made. */
7735 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7737 if (validate_arg (arg
, REAL_TYPE
))
7741 /* Calculate the result when the argument is a constant. */
7742 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7745 /* Optimize cosh(-x) into cosh (x). */
7746 if ((narg
= fold_strip_sign_ops (arg
)))
7747 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7753 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7754 argument ARG. TYPE is the type of the return value. Return
7755 NULL_TREE if no simplification can be made. */
7758 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
7761 if (validate_arg (arg
, COMPLEX_TYPE
)
7762 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7766 /* Calculate the result when the argument is a constant. */
7767 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
7770 /* Optimize fn(-x) into fn(x). */
7771 if ((tmp
= fold_strip_sign_ops (arg
)))
7772 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
7778 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7779 Return NULL_TREE if no simplification can be made. */
7782 fold_builtin_tan (tree arg
, tree type
)
7784 enum built_in_function fcode
;
7787 if (!validate_arg (arg
, REAL_TYPE
))
7790 /* Calculate the result when the argument is a constant. */
7791 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7794 /* Optimize tan(atan(x)) = x. */
7795 fcode
= builtin_mathfn_code (arg
);
7796 if (flag_unsafe_math_optimizations
7797 && (fcode
== BUILT_IN_ATAN
7798 || fcode
== BUILT_IN_ATANF
7799 || fcode
== BUILT_IN_ATANL
))
7800 return CALL_EXPR_ARG (arg
, 0);
7805 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7806 NULL_TREE if no simplification can be made. */
7809 fold_builtin_sincos (location_t loc
,
7810 tree arg0
, tree arg1
, tree arg2
)
7815 if (!validate_arg (arg0
, REAL_TYPE
)
7816 || !validate_arg (arg1
, POINTER_TYPE
)
7817 || !validate_arg (arg2
, POINTER_TYPE
))
7820 type
= TREE_TYPE (arg0
);
7822 /* Calculate the result when the argument is a constant. */
7823 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7826 /* Canonicalize sincos to cexpi. */
7827 if (!targetm
.libc_has_function (function_c99_math_complex
))
7829 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7833 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
7834 call
= builtin_save_expr (call
);
7836 return build2 (COMPOUND_EXPR
, void_type_node
,
7837 build2 (MODIFY_EXPR
, void_type_node
,
7838 build_fold_indirect_ref_loc (loc
, arg1
),
7839 build1 (IMAGPART_EXPR
, type
, call
)),
7840 build2 (MODIFY_EXPR
, void_type_node
,
7841 build_fold_indirect_ref_loc (loc
, arg2
),
7842 build1 (REALPART_EXPR
, type
, call
)));
7845 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7846 NULL_TREE if no simplification can be made. */
7849 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
7852 tree realp
, imagp
, ifn
;
7855 if (!validate_arg (arg0
, COMPLEX_TYPE
)
7856 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
7859 /* Calculate the result when the argument is a constant. */
7860 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
7863 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7865 /* In case we can figure out the real part of arg0 and it is constant zero
7867 if (!targetm
.libc_has_function (function_c99_math_complex
))
7869 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7873 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
7874 && real_zerop (realp
))
7876 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7877 return build_call_expr_loc (loc
, ifn
, 1, narg
);
7880 /* In case we can easily decompose real and imaginary parts split cexp
7881 to exp (r) * cexpi (i). */
7882 if (flag_unsafe_math_optimizations
7885 tree rfn
, rcall
, icall
;
7887 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7891 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7895 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
7896 icall
= builtin_save_expr (icall
);
7897 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
7898 rcall
= builtin_save_expr (rcall
);
7899 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
7900 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7902 fold_build1_loc (loc
, REALPART_EXPR
,
7904 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7906 fold_build1_loc (loc
, IMAGPART_EXPR
,
7913 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7914 Return NULL_TREE if no simplification can be made. */
7917 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
7919 if (!validate_arg (arg
, REAL_TYPE
))
7922 /* Optimize trunc of constant value. */
7923 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7925 REAL_VALUE_TYPE r
, x
;
7926 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7928 x
= TREE_REAL_CST (arg
);
7929 real_trunc (&r
, TYPE_MODE (type
), &x
);
7930 return build_real (type
, r
);
7933 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7936 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7937 Return NULL_TREE if no simplification can be made. */
7940 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
7942 if (!validate_arg (arg
, REAL_TYPE
))
7945 /* Optimize floor of constant value. */
7946 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7950 x
= TREE_REAL_CST (arg
);
7951 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7953 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7956 real_floor (&r
, TYPE_MODE (type
), &x
);
7957 return build_real (type
, r
);
7961 /* Fold floor (x) where x is nonnegative to trunc (x). */
7962 if (tree_expr_nonnegative_p (arg
))
7964 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7966 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
7969 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7972 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7973 Return NULL_TREE if no simplification can be made. */
7976 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
7978 if (!validate_arg (arg
, REAL_TYPE
))
7981 /* Optimize ceil of constant value. */
7982 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7986 x
= TREE_REAL_CST (arg
);
7987 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7989 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7992 real_ceil (&r
, TYPE_MODE (type
), &x
);
7993 return build_real (type
, r
);
7997 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8000 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8001 Return NULL_TREE if no simplification can be made. */
8004 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
8006 if (!validate_arg (arg
, REAL_TYPE
))
8009 /* Optimize round of constant value. */
8010 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8014 x
= TREE_REAL_CST (arg
);
8015 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8017 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8020 real_round (&r
, TYPE_MODE (type
), &x
);
8021 return build_real (type
, r
);
8025 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8028 /* Fold function call to builtin lround, lroundf or lroundl (or the
8029 corresponding long long versions) and other rounding functions. ARG
8030 is the argument to the call. Return NULL_TREE if no simplification
8034 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
8036 if (!validate_arg (arg
, REAL_TYPE
))
8039 /* Optimize lround of constant value. */
8040 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8042 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
8044 if (real_isfinite (&x
))
8046 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
8047 tree ftype
= TREE_TYPE (arg
);
8051 switch (DECL_FUNCTION_CODE (fndecl
))
8053 CASE_FLT_FN (BUILT_IN_IFLOOR
):
8054 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8055 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8056 real_floor (&r
, TYPE_MODE (ftype
), &x
);
8059 CASE_FLT_FN (BUILT_IN_ICEIL
):
8060 CASE_FLT_FN (BUILT_IN_LCEIL
):
8061 CASE_FLT_FN (BUILT_IN_LLCEIL
):
8062 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
8065 CASE_FLT_FN (BUILT_IN_IROUND
):
8066 CASE_FLT_FN (BUILT_IN_LROUND
):
8067 CASE_FLT_FN (BUILT_IN_LLROUND
):
8068 real_round (&r
, TYPE_MODE (ftype
), &x
);
8075 wide_int val
= real_to_integer (&r
, &fail
, TYPE_PRECISION (itype
));
8077 return wide_int_to_tree (itype
, val
);
8081 switch (DECL_FUNCTION_CODE (fndecl
))
8083 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8084 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8085 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8086 if (tree_expr_nonnegative_p (arg
))
8087 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
8088 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
8093 return fold_fixed_mathfn (loc
, fndecl
, arg
);
8096 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8097 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8098 the argument to the call. Return NULL_TREE if no simplification can
8102 fold_builtin_bitop (tree fndecl
, tree arg
)
8104 if (!validate_arg (arg
, INTEGER_TYPE
))
8107 /* Optimize for constant argument. */
8108 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8110 tree type
= TREE_TYPE (arg
);
8113 switch (DECL_FUNCTION_CODE (fndecl
))
8115 CASE_INT_FN (BUILT_IN_FFS
):
8116 result
= wi::ffs (arg
);
8119 CASE_INT_FN (BUILT_IN_CLZ
):
8120 if (wi::ne_p (arg
, 0))
8121 result
= wi::clz (arg
);
8122 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8123 result
= TYPE_PRECISION (type
);
8126 CASE_INT_FN (BUILT_IN_CTZ
):
8127 if (wi::ne_p (arg
, 0))
8128 result
= wi::ctz (arg
);
8129 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8130 result
= TYPE_PRECISION (type
);
8133 CASE_INT_FN (BUILT_IN_CLRSB
):
8134 result
= wi::clrsb (arg
);
8137 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8138 result
= wi::popcount (arg
);
8141 CASE_INT_FN (BUILT_IN_PARITY
):
8142 result
= wi::parity (arg
);
8149 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8155 /* Fold function call to builtin_bswap and the short, long and long long
8156 variants. Return NULL_TREE if no simplification can be made. */
8158 fold_builtin_bswap (tree fndecl
, tree arg
)
8160 if (! validate_arg (arg
, INTEGER_TYPE
))
8163 /* Optimize constant value. */
8164 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8166 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8168 switch (DECL_FUNCTION_CODE (fndecl
))
8170 case BUILT_IN_BSWAP16
:
8171 case BUILT_IN_BSWAP32
:
8172 case BUILT_IN_BSWAP64
:
8174 signop sgn
= TYPE_SIGN (type
);
8176 wide_int_to_tree (type
,
8177 wide_int::from (arg
, TYPE_PRECISION (type
),
8189 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8190 NULL_TREE if no simplification can be made. */
8193 fold_builtin_hypot (location_t loc
, tree fndecl
,
8194 tree arg0
, tree arg1
, tree type
)
8196 tree res
, narg0
, narg1
;
8198 if (!validate_arg (arg0
, REAL_TYPE
)
8199 || !validate_arg (arg1
, REAL_TYPE
))
8202 /* Calculate the result when the argument is a constant. */
8203 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8206 /* If either argument to hypot has a negate or abs, strip that off.
8207 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8208 narg0
= fold_strip_sign_ops (arg0
);
8209 narg1
= fold_strip_sign_ops (arg1
);
8212 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8213 narg1
? narg1
: arg1
);
8216 /* If either argument is zero, hypot is fabs of the other. */
8217 if (real_zerop (arg0
))
8218 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8219 else if (real_zerop (arg1
))
8220 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8222 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8223 if (flag_unsafe_math_optimizations
8224 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8225 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8226 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8227 build_real_truncate (type
, dconst_sqrt2 ()));
8233 /* Fold a builtin function call to pow, powf, or powl. Return
8234 NULL_TREE if no simplification can be made. */
8236 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8240 if (!validate_arg (arg0
, REAL_TYPE
)
8241 || !validate_arg (arg1
, REAL_TYPE
))
8244 /* Calculate the result when the argument is a constant. */
8245 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8248 /* Optimize pow(1.0,y) = 1.0. */
8249 if (real_onep (arg0
))
8250 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8252 if (TREE_CODE (arg1
) == REAL_CST
8253 && !TREE_OVERFLOW (arg1
))
8255 REAL_VALUE_TYPE cint
;
8259 c
= TREE_REAL_CST (arg1
);
8261 /* Optimize pow(x,0.0) = 1.0. */
8262 if (real_equal (&c
, &dconst0
))
8263 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8266 /* Optimize pow(x,1.0) = x. */
8267 if (real_equal (&c
, &dconst1
))
8270 /* Optimize pow(x,-1.0) = 1.0/x. */
8271 if (real_equal (&c
, &dconstm1
))
8272 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8273 build_real (type
, dconst1
), arg0
);
8275 /* Optimize pow(x,0.5) = sqrt(x). */
8276 if (flag_unsafe_math_optimizations
8277 && real_equal (&c
, &dconsthalf
))
8279 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8281 if (sqrtfn
!= NULL_TREE
)
8282 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8285 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8286 if (flag_unsafe_math_optimizations
)
8288 const REAL_VALUE_TYPE dconstroot
8289 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8291 if (real_equal (&c
, &dconstroot
))
8293 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8294 if (cbrtfn
!= NULL_TREE
)
8295 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8299 /* Check for an integer exponent. */
8300 n
= real_to_integer (&c
);
8301 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
8302 if (real_identical (&c
, &cint
))
8304 /* Attempt to evaluate pow at compile-time, unless this should
8305 raise an exception. */
8306 if (TREE_CODE (arg0
) == REAL_CST
8307 && !TREE_OVERFLOW (arg0
)
8309 || (!flag_trapping_math
&& !flag_errno_math
)
8310 || !real_equal (&TREE_REAL_CST (arg0
), &dconst0
)))
8315 x
= TREE_REAL_CST (arg0
);
8316 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8317 if (flag_unsafe_math_optimizations
|| !inexact
)
8318 return build_real (type
, x
);
8321 /* Strip sign ops from even integer powers. */
8322 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8324 tree narg0
= fold_strip_sign_ops (arg0
);
8326 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8331 if (flag_unsafe_math_optimizations
)
8333 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8335 /* Optimize pow(expN(x),y) = expN(x*y). */
8336 if (BUILTIN_EXPONENT_P (fcode
))
8338 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8339 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8340 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8341 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8344 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8345 if (BUILTIN_SQRT_P (fcode
))
8347 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8348 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8349 build_real (type
, dconsthalf
));
8350 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8353 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8354 if (BUILTIN_CBRT_P (fcode
))
8356 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8357 if (tree_expr_nonnegative_p (arg
))
8359 tree c
= build_real_truncate (type
, dconst_third ());
8360 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
, c
);
8361 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8365 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8366 if (fcode
== BUILT_IN_POW
8367 || fcode
== BUILT_IN_POWF
8368 || fcode
== BUILT_IN_POWL
)
8370 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8371 if (tree_expr_nonnegative_p (arg00
))
8373 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8374 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8375 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8383 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8384 Return NULL_TREE if no simplification can be made. */
8386 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8387 tree arg0
, tree arg1
, tree type
)
8389 if (!validate_arg (arg0
, REAL_TYPE
)
8390 || !validate_arg (arg1
, INTEGER_TYPE
))
8393 /* Optimize pow(1.0,y) = 1.0. */
8394 if (real_onep (arg0
))
8395 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8397 if (tree_fits_shwi_p (arg1
))
8399 HOST_WIDE_INT c
= tree_to_shwi (arg1
);
8401 /* Evaluate powi at compile-time. */
8402 if (TREE_CODE (arg0
) == REAL_CST
8403 && !TREE_OVERFLOW (arg0
))
8406 x
= TREE_REAL_CST (arg0
);
8407 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8408 return build_real (type
, x
);
8411 /* Optimize pow(x,0) = 1.0. */
8413 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8416 /* Optimize pow(x,1) = x. */
8420 /* Optimize pow(x,-1) = 1.0/x. */
8422 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8423 build_real (type
, dconst1
), arg0
);
8429 /* A subroutine of fold_builtin to fold the various exponent
8430 functions. Return NULL_TREE if no simplification can be made.
8431 FUNC is the corresponding MPFR exponent function. */
8434 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8435 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8437 if (validate_arg (arg
, REAL_TYPE
))
8439 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8442 /* Calculate the result when the argument is a constant. */
8443 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8446 /* Optimize expN(logN(x)) = x. */
8447 if (flag_unsafe_math_optimizations
)
8449 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8451 if ((func
== mpfr_exp
8452 && (fcode
== BUILT_IN_LOG
8453 || fcode
== BUILT_IN_LOGF
8454 || fcode
== BUILT_IN_LOGL
))
8455 || (func
== mpfr_exp2
8456 && (fcode
== BUILT_IN_LOG2
8457 || fcode
== BUILT_IN_LOG2F
8458 || fcode
== BUILT_IN_LOG2L
))
8459 || (func
== mpfr_exp10
8460 && (fcode
== BUILT_IN_LOG10
8461 || fcode
== BUILT_IN_LOG10F
8462 || fcode
== BUILT_IN_LOG10L
)))
8463 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8470 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8471 arguments to the call, and TYPE is its return type.
8472 Return NULL_TREE if no simplification can be made. */
8475 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
8477 if (!validate_arg (arg1
, POINTER_TYPE
)
8478 || !validate_arg (arg2
, INTEGER_TYPE
)
8479 || !validate_arg (len
, INTEGER_TYPE
))
8485 if (TREE_CODE (arg2
) != INTEGER_CST
8486 || !tree_fits_uhwi_p (len
))
8489 p1
= c_getstr (arg1
);
8490 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8496 if (target_char_cast (arg2
, &c
))
8499 r
= (const char *) memchr (p1
, c
, tree_to_uhwi (len
));
8502 return build_int_cst (TREE_TYPE (arg1
), 0);
8504 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
8505 return fold_convert_loc (loc
, type
, tem
);
8511 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8512 Return NULL_TREE if no simplification can be made. */
8515 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8517 const char *p1
, *p2
;
8519 if (!validate_arg (arg1
, POINTER_TYPE
)
8520 || !validate_arg (arg2
, POINTER_TYPE
)
8521 || !validate_arg (len
, INTEGER_TYPE
))
8524 /* If the LEN parameter is zero, return zero. */
8525 if (integer_zerop (len
))
8526 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8529 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8530 if (operand_equal_p (arg1
, arg2
, 0))
8531 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8533 p1
= c_getstr (arg1
);
8534 p2
= c_getstr (arg2
);
8536 /* If all arguments are constant, and the value of len is not greater
8537 than the lengths of arg1 and arg2, evaluate at compile-time. */
8538 if (tree_fits_uhwi_p (len
) && p1
&& p2
8539 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8540 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8542 const int r
= memcmp (p1
, p2
, tree_to_uhwi (len
));
8545 return integer_one_node
;
8547 return integer_minus_one_node
;
8549 return integer_zero_node
;
8552 /* If len parameter is one, return an expression corresponding to
8553 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8554 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8556 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8557 tree cst_uchar_ptr_node
8558 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8561 = fold_convert_loc (loc
, integer_type_node
,
8562 build1 (INDIRECT_REF
, cst_uchar_node
,
8563 fold_convert_loc (loc
,
8567 = fold_convert_loc (loc
, integer_type_node
,
8568 build1 (INDIRECT_REF
, cst_uchar_node
,
8569 fold_convert_loc (loc
,
8572 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8578 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8579 Return NULL_TREE if no simplification can be made. */
8582 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
8584 const char *p1
, *p2
;
8586 if (!validate_arg (arg1
, POINTER_TYPE
)
8587 || !validate_arg (arg2
, POINTER_TYPE
))
8590 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8591 if (operand_equal_p (arg1
, arg2
, 0))
8592 return integer_zero_node
;
8594 p1
= c_getstr (arg1
);
8595 p2
= c_getstr (arg2
);
8599 const int i
= strcmp (p1
, p2
);
8601 return integer_minus_one_node
;
8603 return integer_one_node
;
8605 return integer_zero_node
;
8608 /* If the second arg is "", return *(const unsigned char*)arg1. */
8609 if (p2
&& *p2
== '\0')
8611 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8612 tree cst_uchar_ptr_node
8613 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8615 return fold_convert_loc (loc
, integer_type_node
,
8616 build1 (INDIRECT_REF
, cst_uchar_node
,
8617 fold_convert_loc (loc
,
8622 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8623 if (p1
&& *p1
== '\0')
8625 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8626 tree cst_uchar_ptr_node
8627 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8630 = fold_convert_loc (loc
, integer_type_node
,
8631 build1 (INDIRECT_REF
, cst_uchar_node
,
8632 fold_convert_loc (loc
,
8635 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8641 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8642 Return NULL_TREE if no simplification can be made. */
8645 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8647 const char *p1
, *p2
;
8649 if (!validate_arg (arg1
, POINTER_TYPE
)
8650 || !validate_arg (arg2
, POINTER_TYPE
)
8651 || !validate_arg (len
, INTEGER_TYPE
))
8654 /* If the LEN parameter is zero, return zero. */
8655 if (integer_zerop (len
))
8656 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8659 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8660 if (operand_equal_p (arg1
, arg2
, 0))
8661 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8663 p1
= c_getstr (arg1
);
8664 p2
= c_getstr (arg2
);
8666 if (tree_fits_uhwi_p (len
) && p1
&& p2
)
8668 const int i
= strncmp (p1
, p2
, tree_to_uhwi (len
));
8670 return integer_one_node
;
8672 return integer_minus_one_node
;
8674 return integer_zero_node
;
8677 /* If the second arg is "", and the length is greater than zero,
8678 return *(const unsigned char*)arg1. */
8679 if (p2
&& *p2
== '\0'
8680 && TREE_CODE (len
) == INTEGER_CST
8681 && tree_int_cst_sgn (len
) == 1)
8683 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8684 tree cst_uchar_ptr_node
8685 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8687 return fold_convert_loc (loc
, integer_type_node
,
8688 build1 (INDIRECT_REF
, cst_uchar_node
,
8689 fold_convert_loc (loc
,
8694 /* If the first arg is "", and the length is greater than zero,
8695 return -*(const unsigned char*)arg2. */
8696 if (p1
&& *p1
== '\0'
8697 && TREE_CODE (len
) == INTEGER_CST
8698 && tree_int_cst_sgn (len
) == 1)
8700 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8701 tree cst_uchar_ptr_node
8702 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8704 tree temp
= fold_convert_loc (loc
, integer_type_node
,
8705 build1 (INDIRECT_REF
, cst_uchar_node
,
8706 fold_convert_loc (loc
,
8709 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8712 /* If len parameter is one, return an expression corresponding to
8713 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8714 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8716 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8717 tree cst_uchar_ptr_node
8718 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8720 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
8721 build1 (INDIRECT_REF
, cst_uchar_node
,
8722 fold_convert_loc (loc
,
8725 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
8726 build1 (INDIRECT_REF
, cst_uchar_node
,
8727 fold_convert_loc (loc
,
8730 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8736 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8737 ARG. Return NULL_TREE if no simplification can be made. */
8740 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
8742 if (!validate_arg (arg
, REAL_TYPE
))
8745 /* If ARG is a compile-time constant, determine the result. */
8746 if (TREE_CODE (arg
) == REAL_CST
8747 && !TREE_OVERFLOW (arg
))
8751 c
= TREE_REAL_CST (arg
);
8752 return (REAL_VALUE_NEGATIVE (c
)
8753 ? build_one_cst (type
)
8754 : build_zero_cst (type
));
8757 /* If ARG is non-negative, the result is always zero. */
8758 if (tree_expr_nonnegative_p (arg
))
8759 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8761 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8762 if (!HONOR_SIGNED_ZEROS (arg
))
8763 return fold_convert (type
,
8764 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
8765 build_real (TREE_TYPE (arg
), dconst0
)));
8770 /* Fold function call to builtin copysign, copysignf or copysignl with
8771 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8775 fold_builtin_copysign (location_t loc
, tree fndecl
,
8776 tree arg1
, tree arg2
, tree type
)
8780 if (!validate_arg (arg1
, REAL_TYPE
)
8781 || !validate_arg (arg2
, REAL_TYPE
))
8784 /* copysign(X,X) is X. */
8785 if (operand_equal_p (arg1
, arg2
, 0))
8786 return fold_convert_loc (loc
, type
, arg1
);
8788 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8789 if (TREE_CODE (arg1
) == REAL_CST
8790 && TREE_CODE (arg2
) == REAL_CST
8791 && !TREE_OVERFLOW (arg1
)
8792 && !TREE_OVERFLOW (arg2
))
8794 REAL_VALUE_TYPE c1
, c2
;
8796 c1
= TREE_REAL_CST (arg1
);
8797 c2
= TREE_REAL_CST (arg2
);
8798 /* c1.sign := c2.sign. */
8799 real_copysign (&c1
, &c2
);
8800 return build_real (type
, c1
);
8803 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8804 Remember to evaluate Y for side-effects. */
8805 if (tree_expr_nonnegative_p (arg2
))
8806 return omit_one_operand_loc (loc
, type
,
8807 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
8810 /* Strip sign changing operations for the first argument. */
8811 tem
= fold_strip_sign_ops (arg1
);
8813 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
8818 /* Fold a call to builtin isascii with argument ARG. */
8821 fold_builtin_isascii (location_t loc
, tree arg
)
8823 if (!validate_arg (arg
, INTEGER_TYPE
))
8827 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8828 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8829 build_int_cst (integer_type_node
,
8830 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8831 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
8832 arg
, integer_zero_node
);
8836 /* Fold a call to builtin toascii with argument ARG. */
8839 fold_builtin_toascii (location_t loc
, tree arg
)
8841 if (!validate_arg (arg
, INTEGER_TYPE
))
8844 /* Transform toascii(c) -> (c & 0x7f). */
8845 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
8846 build_int_cst (integer_type_node
, 0x7f));
8849 /* Fold a call to builtin isdigit with argument ARG. */
8852 fold_builtin_isdigit (location_t loc
, tree arg
)
8854 if (!validate_arg (arg
, INTEGER_TYPE
))
8858 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8859 /* According to the C standard, isdigit is unaffected by locale.
8860 However, it definitely is affected by the target character set. */
8861 unsigned HOST_WIDE_INT target_digit0
8862 = lang_hooks
.to_target_charset ('0');
8864 if (target_digit0
== 0)
8867 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
8868 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
8869 build_int_cst (unsigned_type_node
, target_digit0
));
8870 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
8871 build_int_cst (unsigned_type_node
, 9));
8875 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8878 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
8880 if (!validate_arg (arg
, REAL_TYPE
))
8883 arg
= fold_convert_loc (loc
, type
, arg
);
8884 if (TREE_CODE (arg
) == REAL_CST
)
8885 return fold_abs_const (arg
, type
);
8886 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8889 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8892 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
8894 if (!validate_arg (arg
, INTEGER_TYPE
))
8897 arg
= fold_convert_loc (loc
, type
, arg
);
8898 if (TREE_CODE (arg
) == INTEGER_CST
)
8899 return fold_abs_const (arg
, type
);
8900 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8903 /* Fold a fma operation with arguments ARG[012]. */
8906 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
8907 tree type
, tree arg0
, tree arg1
, tree arg2
)
8909 if (TREE_CODE (arg0
) == REAL_CST
8910 && TREE_CODE (arg1
) == REAL_CST
8911 && TREE_CODE (arg2
) == REAL_CST
)
8912 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
8917 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8920 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
8922 if (validate_arg (arg0
, REAL_TYPE
)
8923 && validate_arg (arg1
, REAL_TYPE
)
8924 && validate_arg (arg2
, REAL_TYPE
))
8926 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
8930 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8931 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
8932 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
8937 /* Fold a call to builtin fmin or fmax. */
8940 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
8941 tree type
, bool max
)
8943 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
8945 /* Calculate the result when the argument is a constant. */
8946 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
8951 /* If either argument is NaN, return the other one. Avoid the
8952 transformation if we get (and honor) a signalling NaN. Using
8953 omit_one_operand() ensures we create a non-lvalue. */
8954 if (TREE_CODE (arg0
) == REAL_CST
8955 && real_isnan (&TREE_REAL_CST (arg0
))
8956 && (! HONOR_SNANS (arg0
)
8957 || ! TREE_REAL_CST (arg0
).signalling
))
8958 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
8959 if (TREE_CODE (arg1
) == REAL_CST
8960 && real_isnan (&TREE_REAL_CST (arg1
))
8961 && (! HONOR_SNANS (arg1
)
8962 || ! TREE_REAL_CST (arg1
).signalling
))
8963 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
8965 /* Transform fmin/fmax(x,x) -> x. */
8966 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8967 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
8969 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8970 functions to return the numeric arg if the other one is NaN.
8971 These tree codes don't honor that, so only transform if
8972 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8973 handled, so we don't have to worry about it either. */
8974 if (flag_finite_math_only
)
8975 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
8976 fold_convert_loc (loc
, type
, arg0
),
8977 fold_convert_loc (loc
, type
, arg1
));
8982 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8985 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
8987 if (validate_arg (arg
, COMPLEX_TYPE
)
8988 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
8990 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
8994 tree new_arg
= builtin_save_expr (arg
);
8995 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
8996 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
8997 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9004 /* Fold a call to builtin logb/ilogb. */
9007 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9009 if (! validate_arg (arg
, REAL_TYPE
))
9014 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9016 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9022 /* If arg is Inf or NaN and we're logb, return it. */
9023 if (TREE_CODE (rettype
) == REAL_TYPE
)
9025 /* For logb(-Inf) we have to return +Inf. */
9026 if (real_isinf (value
) && real_isneg (value
))
9028 REAL_VALUE_TYPE tem
;
9030 return build_real (rettype
, tem
);
9032 return fold_convert_loc (loc
, rettype
, arg
);
9034 /* Fall through... */
9036 /* Zero may set errno and/or raise an exception for logb, also
9037 for ilogb we don't know FP_ILOGB0. */
9040 /* For normal numbers, proceed iff radix == 2. In GCC,
9041 normalized significands are in the range [0.5, 1.0). We
9042 want the exponent as if they were [1.0, 2.0) so get the
9043 exponent and subtract 1. */
9044 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9045 return fold_convert_loc (loc
, rettype
,
9046 build_int_cst (integer_type_node
,
9047 REAL_EXP (value
)-1));
9055 /* Fold a call to builtin significand, if radix == 2. */
9058 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9060 if (! validate_arg (arg
, REAL_TYPE
))
9065 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9067 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9074 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9075 return fold_convert_loc (loc
, rettype
, arg
);
9077 /* For normal numbers, proceed iff radix == 2. */
9078 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9080 REAL_VALUE_TYPE result
= *value
;
9081 /* In GCC, normalized significands are in the range [0.5,
9082 1.0). We want them to be [1.0, 2.0) so set the
9084 SET_REAL_EXP (&result
, 1);
9085 return build_real (rettype
, result
);
9094 /* Fold a call to builtin frexp, we can assume the base is 2. */
9097 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9099 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9104 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9107 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9109 /* Proceed if a valid pointer type was passed in. */
9110 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9112 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9118 /* For +-0, return (*exp = 0, +-0). */
9119 exp
= integer_zero_node
;
9124 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9125 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9128 /* Since the frexp function always expects base 2, and in
9129 GCC normalized significands are already in the range
9130 [0.5, 1.0), we have exactly what frexp wants. */
9131 REAL_VALUE_TYPE frac_rvt
= *value
;
9132 SET_REAL_EXP (&frac_rvt
, 0);
9133 frac
= build_real (rettype
, frac_rvt
);
9134 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9141 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9142 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9143 TREE_SIDE_EFFECTS (arg1
) = 1;
9144 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9150 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9151 then we can assume the base is two. If it's false, then we have to
9152 check the mode of the TYPE parameter in certain cases. */
9155 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9156 tree type
, bool ldexp
)
9158 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9163 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9164 if (real_zerop (arg0
) || integer_zerop (arg1
)
9165 || (TREE_CODE (arg0
) == REAL_CST
9166 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9167 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9169 /* If both arguments are constant, then try to evaluate it. */
9170 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9171 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9172 && tree_fits_shwi_p (arg1
))
9174 /* Bound the maximum adjustment to twice the range of the
9175 mode's valid exponents. Use abs to ensure the range is
9176 positive as a sanity check. */
9177 const long max_exp_adj
= 2 *
9178 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9179 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9181 /* Get the user-requested adjustment. */
9182 const HOST_WIDE_INT req_exp_adj
= tree_to_shwi (arg1
);
9184 /* The requested adjustment must be inside this range. This
9185 is a preliminary cap to avoid things like overflow, we
9186 may still fail to compute the result for other reasons. */
9187 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9189 REAL_VALUE_TYPE initial_result
;
9191 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9193 /* Ensure we didn't overflow. */
9194 if (! real_isinf (&initial_result
))
9196 const REAL_VALUE_TYPE trunc_result
9197 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9199 /* Only proceed if the target mode can hold the
9201 if (real_equal (&initial_result
, &trunc_result
))
9202 return build_real (type
, trunc_result
);
9211 /* Fold a call to builtin modf. */
9214 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9216 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9221 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9224 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9226 /* Proceed if a valid pointer type was passed in. */
9227 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9229 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9230 REAL_VALUE_TYPE trunc
, frac
;
9236 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9237 trunc
= frac
= *value
;
9240 /* For +-Inf, return (*arg1 = arg0, +-0). */
9242 frac
.sign
= value
->sign
;
9246 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9247 real_trunc (&trunc
, VOIDmode
, value
);
9248 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9249 /* If the original number was negative and already
9250 integral, then the fractional part is -0.0. */
9251 if (value
->sign
&& frac
.cl
== rvc_zero
)
9252 frac
.sign
= value
->sign
;
9256 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9257 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9258 build_real (rettype
, trunc
));
9259 TREE_SIDE_EFFECTS (arg1
) = 1;
9260 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9261 build_real (rettype
, frac
));
9267 /* Given a location LOC, an interclass builtin function decl FNDECL
9268 and its single argument ARG, return an folded expression computing
9269 the same, or NULL_TREE if we either couldn't or didn't want to fold
9270 (the latter happen if there's an RTL instruction available). */
9273 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9277 if (!validate_arg (arg
, REAL_TYPE
))
9280 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9283 mode
= TYPE_MODE (TREE_TYPE (arg
));
9285 /* If there is no optab, try generic code. */
9286 switch (DECL_FUNCTION_CODE (fndecl
))
9290 CASE_FLT_FN (BUILT_IN_ISINF
):
9292 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9293 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9294 tree
const type
= TREE_TYPE (arg
);
9298 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9299 real_from_string (&r
, buf
);
9300 result
= build_call_expr (isgr_fn
, 2,
9301 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9302 build_real (type
, r
));
9305 CASE_FLT_FN (BUILT_IN_FINITE
):
9306 case BUILT_IN_ISFINITE
:
9308 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9309 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9310 tree
const type
= TREE_TYPE (arg
);
9314 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9315 real_from_string (&r
, buf
);
9316 result
= build_call_expr (isle_fn
, 2,
9317 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9318 build_real (type
, r
));
9319 /*result = fold_build2_loc (loc, UNGT_EXPR,
9320 TREE_TYPE (TREE_TYPE (fndecl)),
9321 fold_build1_loc (loc, ABS_EXPR, type, arg),
9322 build_real (type, r));
9323 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9324 TREE_TYPE (TREE_TYPE (fndecl)),
9328 case BUILT_IN_ISNORMAL
:
9330 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9331 islessequal(fabs(x),DBL_MAX). */
9332 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9333 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9334 tree
const type
= TREE_TYPE (arg
);
9335 REAL_VALUE_TYPE rmax
, rmin
;
9338 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9339 real_from_string (&rmax
, buf
);
9340 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9341 real_from_string (&rmin
, buf
);
9342 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9343 result
= build_call_expr (isle_fn
, 2, arg
,
9344 build_real (type
, rmax
));
9345 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
9346 build_call_expr (isge_fn
, 2, arg
,
9347 build_real (type
, rmin
)));
9357 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9358 ARG is the argument for the call. */
9361 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9363 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9366 if (!validate_arg (arg
, REAL_TYPE
))
9369 switch (builtin_index
)
9371 case BUILT_IN_ISINF
:
9372 if (!HONOR_INFINITIES (arg
))
9373 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9375 if (TREE_CODE (arg
) == REAL_CST
)
9377 r
= TREE_REAL_CST (arg
);
9378 if (real_isinf (&r
))
9379 return real_compare (GT_EXPR
, &r
, &dconst0
)
9380 ? integer_one_node
: integer_minus_one_node
;
9382 return integer_zero_node
;
9387 case BUILT_IN_ISINF_SIGN
:
9389 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9390 /* In a boolean context, GCC will fold the inner COND_EXPR to
9391 1. So e.g. "if (isinf_sign(x))" would be folded to just
9392 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9393 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9394 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9395 tree tmp
= NULL_TREE
;
9397 arg
= builtin_save_expr (arg
);
9399 if (signbit_fn
&& isinf_fn
)
9401 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9402 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9404 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9405 signbit_call
, integer_zero_node
);
9406 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9407 isinf_call
, integer_zero_node
);
9409 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9410 integer_minus_one_node
, integer_one_node
);
9411 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9419 case BUILT_IN_ISFINITE
:
9420 if (!HONOR_NANS (arg
)
9421 && !HONOR_INFINITIES (arg
))
9422 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9424 if (TREE_CODE (arg
) == REAL_CST
)
9426 r
= TREE_REAL_CST (arg
);
9427 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9432 case BUILT_IN_ISNAN
:
9433 if (!HONOR_NANS (arg
))
9434 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9436 if (TREE_CODE (arg
) == REAL_CST
)
9438 r
= TREE_REAL_CST (arg
);
9439 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9442 arg
= builtin_save_expr (arg
);
9443 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9450 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9451 This builtin will generate code to return the appropriate floating
9452 point classification depending on the value of the floating point
9453 number passed in. The possible return values must be supplied as
9454 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9455 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9456 one floating point argument which is "type generic". */
9459 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
9461 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9462 arg
, type
, res
, tmp
;
9467 /* Verify the required arguments in the original call. */
9469 || !validate_arg (args
[0], INTEGER_TYPE
)
9470 || !validate_arg (args
[1], INTEGER_TYPE
)
9471 || !validate_arg (args
[2], INTEGER_TYPE
)
9472 || !validate_arg (args
[3], INTEGER_TYPE
)
9473 || !validate_arg (args
[4], INTEGER_TYPE
)
9474 || !validate_arg (args
[5], REAL_TYPE
))
9478 fp_infinite
= args
[1];
9479 fp_normal
= args
[2];
9480 fp_subnormal
= args
[3];
9483 type
= TREE_TYPE (arg
);
9484 mode
= TYPE_MODE (type
);
9485 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9489 (fabs(x) == Inf ? FP_INFINITE :
9490 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9491 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9493 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9494 build_real (type
, dconst0
));
9495 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9496 tmp
, fp_zero
, fp_subnormal
);
9498 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9499 real_from_string (&r
, buf
);
9500 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9501 arg
, build_real (type
, r
));
9502 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9504 if (HONOR_INFINITIES (mode
))
9507 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9508 build_real (type
, r
));
9509 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9513 if (HONOR_NANS (mode
))
9515 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9516 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9522 /* Fold a call to an unordered comparison function such as
9523 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9524 being called and ARG0 and ARG1 are the arguments for the call.
9525 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9526 the opposite of the desired result. UNORDERED_CODE is used
9527 for modes that can hold NaNs and ORDERED_CODE is used for
9531 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9532 enum tree_code unordered_code
,
9533 enum tree_code ordered_code
)
9535 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9536 enum tree_code code
;
9538 enum tree_code code0
, code1
;
9539 tree cmp_type
= NULL_TREE
;
9541 type0
= TREE_TYPE (arg0
);
9542 type1
= TREE_TYPE (arg1
);
9544 code0
= TREE_CODE (type0
);
9545 code1
= TREE_CODE (type1
);
9547 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9548 /* Choose the wider of two real types. */
9549 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9551 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9553 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9556 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9557 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9559 if (unordered_code
== UNORDERED_EXPR
)
9561 if (!HONOR_NANS (arg0
))
9562 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9563 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9566 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
9567 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9568 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9571 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9572 arithmetics if it can never overflow, or into internal functions that
9573 return both result of arithmetics and overflowed boolean flag in
9574 a complex integer result, or some other check for overflow. */
9577 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
9578 tree arg0
, tree arg1
, tree arg2
)
9580 enum internal_fn ifn
= IFN_LAST
;
9581 tree type
= TREE_TYPE (TREE_TYPE (arg2
));
9582 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
9585 case BUILT_IN_ADD_OVERFLOW
:
9586 case BUILT_IN_SADD_OVERFLOW
:
9587 case BUILT_IN_SADDL_OVERFLOW
:
9588 case BUILT_IN_SADDLL_OVERFLOW
:
9589 case BUILT_IN_UADD_OVERFLOW
:
9590 case BUILT_IN_UADDL_OVERFLOW
:
9591 case BUILT_IN_UADDLL_OVERFLOW
:
9592 ifn
= IFN_ADD_OVERFLOW
;
9594 case BUILT_IN_SUB_OVERFLOW
:
9595 case BUILT_IN_SSUB_OVERFLOW
:
9596 case BUILT_IN_SSUBL_OVERFLOW
:
9597 case BUILT_IN_SSUBLL_OVERFLOW
:
9598 case BUILT_IN_USUB_OVERFLOW
:
9599 case BUILT_IN_USUBL_OVERFLOW
:
9600 case BUILT_IN_USUBLL_OVERFLOW
:
9601 ifn
= IFN_SUB_OVERFLOW
;
9603 case BUILT_IN_MUL_OVERFLOW
:
9604 case BUILT_IN_SMUL_OVERFLOW
:
9605 case BUILT_IN_SMULL_OVERFLOW
:
9606 case BUILT_IN_SMULLL_OVERFLOW
:
9607 case BUILT_IN_UMUL_OVERFLOW
:
9608 case BUILT_IN_UMULL_OVERFLOW
:
9609 case BUILT_IN_UMULLL_OVERFLOW
:
9610 ifn
= IFN_MUL_OVERFLOW
;
9615 tree ctype
= build_complex_type (type
);
9616 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
9618 tree tgt
= save_expr (call
);
9619 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
9620 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
9621 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
9623 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
9624 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
9627 /* Fold a call to built-in function FNDECL with 0 arguments.
9628 This function returns NULL_TREE if no simplification was possible. */
9631 fold_builtin_0 (location_t loc
, tree fndecl
)
9633 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9634 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9637 CASE_FLT_FN (BUILT_IN_INF
):
9638 case BUILT_IN_INFD32
:
9639 case BUILT_IN_INFD64
:
9640 case BUILT_IN_INFD128
:
9641 return fold_builtin_inf (loc
, type
, true);
9643 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9644 return fold_builtin_inf (loc
, type
, false);
9646 case BUILT_IN_CLASSIFY_TYPE
:
9647 return fold_builtin_classify_type (NULL_TREE
);
9655 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9656 This function returns NULL_TREE if no simplification was possible. */
9659 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
9661 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9662 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9665 case BUILT_IN_CONSTANT_P
:
9667 tree val
= fold_builtin_constant_p (arg0
);
9669 /* Gimplification will pull the CALL_EXPR for the builtin out of
9670 an if condition. When not optimizing, we'll not CSE it back.
9671 To avoid link error types of regressions, return false now. */
9672 if (!val
&& !optimize
)
9673 val
= integer_zero_node
;
9678 case BUILT_IN_CLASSIFY_TYPE
:
9679 return fold_builtin_classify_type (arg0
);
9681 case BUILT_IN_STRLEN
:
9682 return fold_builtin_strlen (loc
, type
, arg0
);
9684 CASE_FLT_FN (BUILT_IN_FABS
):
9685 case BUILT_IN_FABSD32
:
9686 case BUILT_IN_FABSD64
:
9687 case BUILT_IN_FABSD128
:
9688 return fold_builtin_fabs (loc
, arg0
, type
);
9692 case BUILT_IN_LLABS
:
9693 case BUILT_IN_IMAXABS
:
9694 return fold_builtin_abs (loc
, arg0
, type
);
9696 CASE_FLT_FN (BUILT_IN_CONJ
):
9697 if (validate_arg (arg0
, COMPLEX_TYPE
)
9698 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9699 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9702 CASE_FLT_FN (BUILT_IN_CREAL
):
9703 if (validate_arg (arg0
, COMPLEX_TYPE
)
9704 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9705 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
9708 CASE_FLT_FN (BUILT_IN_CIMAG
):
9709 if (validate_arg (arg0
, COMPLEX_TYPE
)
9710 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9711 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9714 CASE_FLT_FN (BUILT_IN_CCOS
):
9715 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
9717 CASE_FLT_FN (BUILT_IN_CCOSH
):
9718 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
9720 CASE_FLT_FN (BUILT_IN_CPROJ
):
9721 return fold_builtin_cproj (loc
, arg0
, type
);
9723 CASE_FLT_FN (BUILT_IN_CSIN
):
9724 if (validate_arg (arg0
, COMPLEX_TYPE
)
9725 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9726 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
9729 CASE_FLT_FN (BUILT_IN_CSINH
):
9730 if (validate_arg (arg0
, COMPLEX_TYPE
)
9731 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9732 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
9735 CASE_FLT_FN (BUILT_IN_CTAN
):
9736 if (validate_arg (arg0
, COMPLEX_TYPE
)
9737 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9738 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
9741 CASE_FLT_FN (BUILT_IN_CTANH
):
9742 if (validate_arg (arg0
, COMPLEX_TYPE
)
9743 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9744 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
9747 CASE_FLT_FN (BUILT_IN_CLOG
):
9748 if (validate_arg (arg0
, COMPLEX_TYPE
)
9749 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9750 return do_mpc_arg1 (arg0
, type
, mpc_log
);
9753 CASE_FLT_FN (BUILT_IN_CSQRT
):
9754 if (validate_arg (arg0
, COMPLEX_TYPE
)
9755 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9756 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
9759 CASE_FLT_FN (BUILT_IN_CASIN
):
9760 if (validate_arg (arg0
, COMPLEX_TYPE
)
9761 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9762 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
9765 CASE_FLT_FN (BUILT_IN_CACOS
):
9766 if (validate_arg (arg0
, COMPLEX_TYPE
)
9767 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9768 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
9771 CASE_FLT_FN (BUILT_IN_CATAN
):
9772 if (validate_arg (arg0
, COMPLEX_TYPE
)
9773 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9774 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
9777 CASE_FLT_FN (BUILT_IN_CASINH
):
9778 if (validate_arg (arg0
, COMPLEX_TYPE
)
9779 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9780 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
9783 CASE_FLT_FN (BUILT_IN_CACOSH
):
9784 if (validate_arg (arg0
, COMPLEX_TYPE
)
9785 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9786 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
9789 CASE_FLT_FN (BUILT_IN_CATANH
):
9790 if (validate_arg (arg0
, COMPLEX_TYPE
)
9791 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9792 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
9795 CASE_FLT_FN (BUILT_IN_CABS
):
9796 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
9798 CASE_FLT_FN (BUILT_IN_CARG
):
9799 return fold_builtin_carg (loc
, arg0
, type
);
9801 CASE_FLT_FN (BUILT_IN_SQRT
):
9802 if (validate_arg (arg0
, REAL_TYPE
))
9803 return do_mpfr_arg1 (arg0
, type
, mpfr_sqrt
, &dconst0
, NULL
, true);
9806 CASE_FLT_FN (BUILT_IN_CBRT
):
9807 if (validate_arg (arg0
, REAL_TYPE
))
9808 return do_mpfr_arg1 (arg0
, type
, mpfr_cbrt
, NULL
, NULL
, 0);
9811 CASE_FLT_FN (BUILT_IN_ASIN
):
9812 if (validate_arg (arg0
, REAL_TYPE
))
9813 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
9814 &dconstm1
, &dconst1
, true);
9817 CASE_FLT_FN (BUILT_IN_ACOS
):
9818 if (validate_arg (arg0
, REAL_TYPE
))
9819 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
9820 &dconstm1
, &dconst1
, true);
9823 CASE_FLT_FN (BUILT_IN_ATAN
):
9824 if (validate_arg (arg0
, REAL_TYPE
))
9825 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
9828 CASE_FLT_FN (BUILT_IN_ASINH
):
9829 if (validate_arg (arg0
, REAL_TYPE
))
9830 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
9833 CASE_FLT_FN (BUILT_IN_ACOSH
):
9834 if (validate_arg (arg0
, REAL_TYPE
))
9835 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
9836 &dconst1
, NULL
, true);
9839 CASE_FLT_FN (BUILT_IN_ATANH
):
9840 if (validate_arg (arg0
, REAL_TYPE
))
9841 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
9842 &dconstm1
, &dconst1
, false);
9845 CASE_FLT_FN (BUILT_IN_SIN
):
9846 if (validate_arg (arg0
, REAL_TYPE
))
9847 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
9850 CASE_FLT_FN (BUILT_IN_COS
):
9851 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
9853 CASE_FLT_FN (BUILT_IN_TAN
):
9854 return fold_builtin_tan (arg0
, type
);
9856 CASE_FLT_FN (BUILT_IN_CEXP
):
9857 return fold_builtin_cexp (loc
, arg0
, type
);
9859 CASE_FLT_FN (BUILT_IN_CEXPI
):
9860 if (validate_arg (arg0
, REAL_TYPE
))
9861 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
9864 CASE_FLT_FN (BUILT_IN_SINH
):
9865 if (validate_arg (arg0
, REAL_TYPE
))
9866 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
9869 CASE_FLT_FN (BUILT_IN_COSH
):
9870 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
9872 CASE_FLT_FN (BUILT_IN_TANH
):
9873 if (validate_arg (arg0
, REAL_TYPE
))
9874 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
9877 CASE_FLT_FN (BUILT_IN_ERF
):
9878 if (validate_arg (arg0
, REAL_TYPE
))
9879 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
9882 CASE_FLT_FN (BUILT_IN_ERFC
):
9883 if (validate_arg (arg0
, REAL_TYPE
))
9884 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
9887 CASE_FLT_FN (BUILT_IN_TGAMMA
):
9888 if (validate_arg (arg0
, REAL_TYPE
))
9889 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
9892 CASE_FLT_FN (BUILT_IN_EXP
):
9893 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
9895 CASE_FLT_FN (BUILT_IN_EXP2
):
9896 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
9898 CASE_FLT_FN (BUILT_IN_EXP10
):
9899 CASE_FLT_FN (BUILT_IN_POW10
):
9900 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
9902 CASE_FLT_FN (BUILT_IN_EXPM1
):
9903 if (validate_arg (arg0
, REAL_TYPE
))
9904 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
9907 CASE_FLT_FN (BUILT_IN_LOG
):
9908 if (validate_arg (arg0
, REAL_TYPE
))
9909 return do_mpfr_arg1 (arg0
, type
, mpfr_log
, &dconst0
, NULL
, false);
9912 CASE_FLT_FN (BUILT_IN_LOG2
):
9913 if (validate_arg (arg0
, REAL_TYPE
))
9914 return do_mpfr_arg1 (arg0
, type
, mpfr_log2
, &dconst0
, NULL
, false);
9917 CASE_FLT_FN (BUILT_IN_LOG10
):
9918 if (validate_arg (arg0
, REAL_TYPE
))
9919 return do_mpfr_arg1 (arg0
, type
, mpfr_log10
, &dconst0
, NULL
, false);
9922 CASE_FLT_FN (BUILT_IN_LOG1P
):
9923 if (validate_arg (arg0
, REAL_TYPE
))
9924 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
9925 &dconstm1
, NULL
, false);
9928 CASE_FLT_FN (BUILT_IN_J0
):
9929 if (validate_arg (arg0
, REAL_TYPE
))
9930 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
9934 CASE_FLT_FN (BUILT_IN_J1
):
9935 if (validate_arg (arg0
, REAL_TYPE
))
9936 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
9940 CASE_FLT_FN (BUILT_IN_Y0
):
9941 if (validate_arg (arg0
, REAL_TYPE
))
9942 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
9943 &dconst0
, NULL
, false);
9946 CASE_FLT_FN (BUILT_IN_Y1
):
9947 if (validate_arg (arg0
, REAL_TYPE
))
9948 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
9949 &dconst0
, NULL
, false);
9952 CASE_FLT_FN (BUILT_IN_NAN
):
9953 case BUILT_IN_NAND32
:
9954 case BUILT_IN_NAND64
:
9955 case BUILT_IN_NAND128
:
9956 return fold_builtin_nan (arg0
, type
, true);
9958 CASE_FLT_FN (BUILT_IN_NANS
):
9959 return fold_builtin_nan (arg0
, type
, false);
9961 CASE_FLT_FN (BUILT_IN_FLOOR
):
9962 return fold_builtin_floor (loc
, fndecl
, arg0
);
9964 CASE_FLT_FN (BUILT_IN_CEIL
):
9965 return fold_builtin_ceil (loc
, fndecl
, arg0
);
9967 CASE_FLT_FN (BUILT_IN_TRUNC
):
9968 return fold_builtin_trunc (loc
, fndecl
, arg0
);
9970 CASE_FLT_FN (BUILT_IN_ROUND
):
9971 return fold_builtin_round (loc
, fndecl
, arg0
);
9973 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
9974 CASE_FLT_FN (BUILT_IN_RINT
):
9975 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
9977 CASE_FLT_FN (BUILT_IN_ICEIL
):
9978 CASE_FLT_FN (BUILT_IN_LCEIL
):
9979 CASE_FLT_FN (BUILT_IN_LLCEIL
):
9980 CASE_FLT_FN (BUILT_IN_LFLOOR
):
9981 CASE_FLT_FN (BUILT_IN_IFLOOR
):
9982 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
9983 CASE_FLT_FN (BUILT_IN_IROUND
):
9984 CASE_FLT_FN (BUILT_IN_LROUND
):
9985 CASE_FLT_FN (BUILT_IN_LLROUND
):
9986 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
9988 CASE_FLT_FN (BUILT_IN_IRINT
):
9989 CASE_FLT_FN (BUILT_IN_LRINT
):
9990 CASE_FLT_FN (BUILT_IN_LLRINT
):
9991 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
9993 case BUILT_IN_BSWAP16
:
9994 case BUILT_IN_BSWAP32
:
9995 case BUILT_IN_BSWAP64
:
9996 return fold_builtin_bswap (fndecl
, arg0
);
9998 CASE_INT_FN (BUILT_IN_FFS
):
9999 CASE_INT_FN (BUILT_IN_CLZ
):
10000 CASE_INT_FN (BUILT_IN_CTZ
):
10001 CASE_INT_FN (BUILT_IN_CLRSB
):
10002 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10003 CASE_INT_FN (BUILT_IN_PARITY
):
10004 return fold_builtin_bitop (fndecl
, arg0
);
10006 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10007 return fold_builtin_signbit (loc
, arg0
, type
);
10009 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10010 return fold_builtin_significand (loc
, arg0
, type
);
10012 CASE_FLT_FN (BUILT_IN_ILOGB
):
10013 CASE_FLT_FN (BUILT_IN_LOGB
):
10014 return fold_builtin_logb (loc
, arg0
, type
);
10016 case BUILT_IN_ISASCII
:
10017 return fold_builtin_isascii (loc
, arg0
);
10019 case BUILT_IN_TOASCII
:
10020 return fold_builtin_toascii (loc
, arg0
);
10022 case BUILT_IN_ISDIGIT
:
10023 return fold_builtin_isdigit (loc
, arg0
);
10025 CASE_FLT_FN (BUILT_IN_FINITE
):
10026 case BUILT_IN_FINITED32
:
10027 case BUILT_IN_FINITED64
:
10028 case BUILT_IN_FINITED128
:
10029 case BUILT_IN_ISFINITE
:
10031 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10034 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10037 CASE_FLT_FN (BUILT_IN_ISINF
):
10038 case BUILT_IN_ISINFD32
:
10039 case BUILT_IN_ISINFD64
:
10040 case BUILT_IN_ISINFD128
:
10042 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10045 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10048 case BUILT_IN_ISNORMAL
:
10049 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10051 case BUILT_IN_ISINF_SIGN
:
10052 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10054 CASE_FLT_FN (BUILT_IN_ISNAN
):
10055 case BUILT_IN_ISNAND32
:
10056 case BUILT_IN_ISNAND64
:
10057 case BUILT_IN_ISNAND128
:
10058 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10060 case BUILT_IN_FREE
:
10061 if (integer_zerop (arg0
))
10062 return build_empty_stmt (loc
);
10073 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10074 This function returns NULL_TREE if no simplification was possible. */
10077 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
10079 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10080 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10084 CASE_FLT_FN (BUILT_IN_JN
):
10085 if (validate_arg (arg0
, INTEGER_TYPE
)
10086 && validate_arg (arg1
, REAL_TYPE
))
10087 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10090 CASE_FLT_FN (BUILT_IN_YN
):
10091 if (validate_arg (arg0
, INTEGER_TYPE
)
10092 && validate_arg (arg1
, REAL_TYPE
))
10093 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10097 CASE_FLT_FN (BUILT_IN_DREM
):
10098 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10099 if (validate_arg (arg0
, REAL_TYPE
)
10100 && validate_arg (arg1
, REAL_TYPE
))
10101 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10104 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10105 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10106 if (validate_arg (arg0
, REAL_TYPE
)
10107 && validate_arg (arg1
, POINTER_TYPE
))
10108 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10111 CASE_FLT_FN (BUILT_IN_ATAN2
):
10112 if (validate_arg (arg0
, REAL_TYPE
)
10113 && validate_arg (arg1
, REAL_TYPE
))
10114 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10117 CASE_FLT_FN (BUILT_IN_FDIM
):
10118 if (validate_arg (arg0
, REAL_TYPE
)
10119 && validate_arg (arg1
, REAL_TYPE
))
10120 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10123 CASE_FLT_FN (BUILT_IN_HYPOT
):
10124 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10126 CASE_FLT_FN (BUILT_IN_CPOW
):
10127 if (validate_arg (arg0
, COMPLEX_TYPE
)
10128 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10129 && validate_arg (arg1
, COMPLEX_TYPE
)
10130 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10131 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10134 CASE_FLT_FN (BUILT_IN_LDEXP
):
10135 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10136 CASE_FLT_FN (BUILT_IN_SCALBN
):
10137 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10138 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10139 type
, /*ldexp=*/false);
10141 CASE_FLT_FN (BUILT_IN_FREXP
):
10142 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10144 CASE_FLT_FN (BUILT_IN_MODF
):
10145 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10147 case BUILT_IN_STRSTR
:
10148 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10150 case BUILT_IN_STRSPN
:
10151 return fold_builtin_strspn (loc
, arg0
, arg1
);
10153 case BUILT_IN_STRCSPN
:
10154 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10156 case BUILT_IN_STRCHR
:
10157 case BUILT_IN_INDEX
:
10158 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10160 case BUILT_IN_STRRCHR
:
10161 case BUILT_IN_RINDEX
:
10162 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10164 case BUILT_IN_STRCMP
:
10165 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10167 case BUILT_IN_STRPBRK
:
10168 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10170 case BUILT_IN_EXPECT
:
10171 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
10173 CASE_FLT_FN (BUILT_IN_POW
):
10174 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10176 CASE_FLT_FN (BUILT_IN_POWI
):
10177 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10179 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10180 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10182 CASE_FLT_FN (BUILT_IN_FMIN
):
10183 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10185 CASE_FLT_FN (BUILT_IN_FMAX
):
10186 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10188 case BUILT_IN_ISGREATER
:
10189 return fold_builtin_unordered_cmp (loc
, fndecl
,
10190 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10191 case BUILT_IN_ISGREATEREQUAL
:
10192 return fold_builtin_unordered_cmp (loc
, fndecl
,
10193 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10194 case BUILT_IN_ISLESS
:
10195 return fold_builtin_unordered_cmp (loc
, fndecl
,
10196 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10197 case BUILT_IN_ISLESSEQUAL
:
10198 return fold_builtin_unordered_cmp (loc
, fndecl
,
10199 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10200 case BUILT_IN_ISLESSGREATER
:
10201 return fold_builtin_unordered_cmp (loc
, fndecl
,
10202 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10203 case BUILT_IN_ISUNORDERED
:
10204 return fold_builtin_unordered_cmp (loc
, fndecl
,
10205 arg0
, arg1
, UNORDERED_EXPR
,
10208 /* We do the folding for va_start in the expander. */
10209 case BUILT_IN_VA_START
:
10212 case BUILT_IN_OBJECT_SIZE
:
10213 return fold_builtin_object_size (arg0
, arg1
);
10215 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10216 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10218 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10219 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10227 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10229 This function returns NULL_TREE if no simplification was possible. */
10232 fold_builtin_3 (location_t loc
, tree fndecl
,
10233 tree arg0
, tree arg1
, tree arg2
)
10235 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10236 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10240 CASE_FLT_FN (BUILT_IN_SINCOS
):
10241 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10243 CASE_FLT_FN (BUILT_IN_FMA
):
10244 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10247 CASE_FLT_FN (BUILT_IN_REMQUO
):
10248 if (validate_arg (arg0
, REAL_TYPE
)
10249 && validate_arg (arg1
, REAL_TYPE
)
10250 && validate_arg (arg2
, POINTER_TYPE
))
10251 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10254 case BUILT_IN_STRNCMP
:
10255 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10257 case BUILT_IN_MEMCHR
:
10258 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10260 case BUILT_IN_BCMP
:
10261 case BUILT_IN_MEMCMP
:
10262 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10264 case BUILT_IN_EXPECT
:
10265 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
10267 case BUILT_IN_ADD_OVERFLOW
:
10268 case BUILT_IN_SUB_OVERFLOW
:
10269 case BUILT_IN_MUL_OVERFLOW
:
10270 case BUILT_IN_SADD_OVERFLOW
:
10271 case BUILT_IN_SADDL_OVERFLOW
:
10272 case BUILT_IN_SADDLL_OVERFLOW
:
10273 case BUILT_IN_SSUB_OVERFLOW
:
10274 case BUILT_IN_SSUBL_OVERFLOW
:
10275 case BUILT_IN_SSUBLL_OVERFLOW
:
10276 case BUILT_IN_SMUL_OVERFLOW
:
10277 case BUILT_IN_SMULL_OVERFLOW
:
10278 case BUILT_IN_SMULLL_OVERFLOW
:
10279 case BUILT_IN_UADD_OVERFLOW
:
10280 case BUILT_IN_UADDL_OVERFLOW
:
10281 case BUILT_IN_UADDLL_OVERFLOW
:
10282 case BUILT_IN_USUB_OVERFLOW
:
10283 case BUILT_IN_USUBL_OVERFLOW
:
10284 case BUILT_IN_USUBLL_OVERFLOW
:
10285 case BUILT_IN_UMUL_OVERFLOW
:
10286 case BUILT_IN_UMULL_OVERFLOW
:
10287 case BUILT_IN_UMULLL_OVERFLOW
:
10288 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
10296 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10297 arguments. IGNORE is true if the result of the
10298 function call is ignored. This function returns NULL_TREE if no
10299 simplification was possible. */
10302 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
10304 tree ret
= NULL_TREE
;
10309 ret
= fold_builtin_0 (loc
, fndecl
);
10312 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
10315 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
10318 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
10321 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
10326 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10327 SET_EXPR_LOCATION (ret
, loc
);
10328 TREE_NO_WARNING (ret
) = 1;
10334 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10335 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10336 of arguments in ARGS to be omitted. OLDNARGS is the number of
10337 elements in ARGS. */
10340 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
10341 int skip
, tree fndecl
, int n
, va_list newargs
)
10343 int nargs
= oldnargs
- skip
+ n
;
10350 buffer
= XALLOCAVEC (tree
, nargs
);
10351 for (i
= 0; i
< n
; i
++)
10352 buffer
[i
] = va_arg (newargs
, tree
);
10353 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10354 buffer
[i
] = args
[j
];
10357 buffer
= args
+ skip
;
10359 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
10362 /* Return true if FNDECL shouldn't be folded right now.
10363 If a built-in function has an inline attribute always_inline
10364 wrapper, defer folding it after always_inline functions have
10365 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10366 might not be performed. */
10369 avoid_folding_inline_builtin (tree fndecl
)
10371 return (DECL_DECLARED_INLINE_P (fndecl
)
10372 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
10374 && !cfun
->always_inline_functions_inlined
10375 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
10378 /* A wrapper function for builtin folding that prevents warnings for
10379 "statement without effect" and the like, caused by removing the
10380 call node earlier than the warning is generated. */
10383 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
10385 tree ret
= NULL_TREE
;
10386 tree fndecl
= get_callee_fndecl (exp
);
10388 && TREE_CODE (fndecl
) == FUNCTION_DECL
10389 && DECL_BUILT_IN (fndecl
)
10390 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10391 yet. Defer folding until we see all the arguments
10392 (after inlining). */
10393 && !CALL_EXPR_VA_ARG_PACK (exp
))
10395 int nargs
= call_expr_nargs (exp
);
10397 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10398 instead last argument is __builtin_va_arg_pack (). Defer folding
10399 even in that case, until arguments are finalized. */
10400 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10402 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10404 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10405 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10406 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10410 if (avoid_folding_inline_builtin (fndecl
))
10413 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10414 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
10415 CALL_EXPR_ARGP (exp
), ignore
);
10418 tree
*args
= CALL_EXPR_ARGP (exp
);
10419 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10427 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10428 N arguments are passed in the array ARGARRAY. Return a folded
10429 expression or NULL_TREE if no simplification was possible. */
10432 fold_builtin_call_array (location_t loc
, tree
,
10437 if (TREE_CODE (fn
) != ADDR_EXPR
)
10440 tree fndecl
= TREE_OPERAND (fn
, 0);
10441 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10442 && DECL_BUILT_IN (fndecl
))
10444 /* If last argument is __builtin_va_arg_pack (), arguments to this
10445 function are not finalized yet. Defer folding until they are. */
10446 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10448 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10450 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10451 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10452 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10455 if (avoid_folding_inline_builtin (fndecl
))
10457 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10458 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
10460 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
10466 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10467 along with N new arguments specified as the "..." parameters. SKIP
10468 is the number of arguments in EXP to be omitted. This function is used
10469 to do varargs-to-varargs transformations. */
10472 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
10478 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
10479 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
10485 /* Validate a single argument ARG against a tree code CODE representing
10489 validate_arg (const_tree arg
, enum tree_code code
)
10493 else if (code
== POINTER_TYPE
)
10494 return POINTER_TYPE_P (TREE_TYPE (arg
));
10495 else if (code
== INTEGER_TYPE
)
10496 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
10497 return code
== TREE_CODE (TREE_TYPE (arg
));
10500 /* This function validates the types of a function call argument list
10501 against a specified list of tree_codes. If the last specifier is a 0,
10502 that represents an ellipses, otherwise the last specifier must be a
10505 This is the GIMPLE version of validate_arglist. Eventually we want to
10506 completely convert builtins.c to work from GIMPLEs and the tree based
10507 validate_arglist will then be removed. */
10510 validate_gimple_arglist (const gcall
*call
, ...)
10512 enum tree_code code
;
10518 va_start (ap
, call
);
10523 code
= (enum tree_code
) va_arg (ap
, int);
10527 /* This signifies an ellipses, any further arguments are all ok. */
10531 /* This signifies an endlink, if no arguments remain, return
10532 true, otherwise return false. */
10533 res
= (i
== gimple_call_num_args (call
));
10536 /* If no parameters remain or the parameter's code does not
10537 match the specified code, return false. Otherwise continue
10538 checking any remaining arguments. */
10539 arg
= gimple_call_arg (call
, i
++);
10540 if (!validate_arg (arg
, code
))
10547 /* We need gotos here since we can only have one VA_CLOSE in a
10555 /* Default target-specific builtin expander that does nothing. */
10558 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10559 rtx target ATTRIBUTE_UNUSED
,
10560 rtx subtarget ATTRIBUTE_UNUSED
,
10561 machine_mode mode ATTRIBUTE_UNUSED
,
10562 int ignore ATTRIBUTE_UNUSED
)
10567 /* Returns true is EXP represents data that would potentially reside
10568 in a readonly section. */
10571 readonly_data_expr (tree exp
)
10575 if (TREE_CODE (exp
) != ADDR_EXPR
)
10578 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10582 /* Make sure we call decl_readonly_section only for trees it
10583 can handle (since it returns true for everything it doesn't
10585 if (TREE_CODE (exp
) == STRING_CST
10586 || TREE_CODE (exp
) == CONSTRUCTOR
10587 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
10588 return decl_readonly_section (exp
, 0);
10593 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10594 to the call, and TYPE is its return type.
10596 Return NULL_TREE if no simplification was possible, otherwise return the
10597 simplified form of the call as a tree.
10599 The simplified form may be a constant or other expression which
10600 computes the same value, but in a more efficient manner (including
10601 calls to other builtin functions).
10603 The call may contain arguments which need to be evaluated, but
10604 which are not useful to determine the result of the call. In
10605 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10606 COMPOUND_EXPR will be an argument which must be evaluated.
10607 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10608 COMPOUND_EXPR in the chain will contain the tree for the simplified
10609 form of the builtin function call. */
10612 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
10614 if (!validate_arg (s1
, POINTER_TYPE
)
10615 || !validate_arg (s2
, POINTER_TYPE
))
10620 const char *p1
, *p2
;
10622 p2
= c_getstr (s2
);
10626 p1
= c_getstr (s1
);
10629 const char *r
= strstr (p1
, p2
);
10633 return build_int_cst (TREE_TYPE (s1
), 0);
10635 /* Return an offset into the constant string argument. */
10636 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10637 return fold_convert_loc (loc
, type
, tem
);
10640 /* The argument is const char *, and the result is char *, so we need
10641 a type conversion here to avoid a warning. */
10643 return fold_convert_loc (loc
, type
, s1
);
10648 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10652 /* New argument list transforming strstr(s1, s2) to
10653 strchr(s1, s2[0]). */
10654 return build_call_expr_loc (loc
, fn
, 2, s1
,
10655 build_int_cst (integer_type_node
, p2
[0]));
10659 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10660 the call, and TYPE is its return type.
10662 Return NULL_TREE if no simplification was possible, otherwise return the
10663 simplified form of the call as a tree.
10665 The simplified form may be a constant or other expression which
10666 computes the same value, but in a more efficient manner (including
10667 calls to other builtin functions).
10669 The call may contain arguments which need to be evaluated, but
10670 which are not useful to determine the result of the call. In
10671 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10672 COMPOUND_EXPR will be an argument which must be evaluated.
10673 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10674 COMPOUND_EXPR in the chain will contain the tree for the simplified
10675 form of the builtin function call. */
10678 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
10680 if (!validate_arg (s1
, POINTER_TYPE
)
10681 || !validate_arg (s2
, INTEGER_TYPE
))
10687 if (TREE_CODE (s2
) != INTEGER_CST
)
10690 p1
= c_getstr (s1
);
10697 if (target_char_cast (s2
, &c
))
10700 r
= strchr (p1
, c
);
10703 return build_int_cst (TREE_TYPE (s1
), 0);
10705 /* Return an offset into the constant string argument. */
10706 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10707 return fold_convert_loc (loc
, type
, tem
);
10713 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10714 the call, and TYPE is its return type.
10716 Return NULL_TREE if no simplification was possible, otherwise return the
10717 simplified form of the call as a tree.
10719 The simplified form may be a constant or other expression which
10720 computes the same value, but in a more efficient manner (including
10721 calls to other builtin functions).
10723 The call may contain arguments which need to be evaluated, but
10724 which are not useful to determine the result of the call. In
10725 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10726 COMPOUND_EXPR will be an argument which must be evaluated.
10727 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10728 COMPOUND_EXPR in the chain will contain the tree for the simplified
10729 form of the builtin function call. */
10732 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
10734 if (!validate_arg (s1
, POINTER_TYPE
)
10735 || !validate_arg (s2
, INTEGER_TYPE
))
10742 if (TREE_CODE (s2
) != INTEGER_CST
)
10745 p1
= c_getstr (s1
);
10752 if (target_char_cast (s2
, &c
))
10755 r
= strrchr (p1
, c
);
10758 return build_int_cst (TREE_TYPE (s1
), 0);
10760 /* Return an offset into the constant string argument. */
10761 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10762 return fold_convert_loc (loc
, type
, tem
);
10765 if (! integer_zerop (s2
))
10768 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10772 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10773 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
10777 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10778 to the call, and TYPE is its return type.
10780 Return NULL_TREE if no simplification was possible, otherwise return the
10781 simplified form of the call as a tree.
10783 The simplified form may be a constant or other expression which
10784 computes the same value, but in a more efficient manner (including
10785 calls to other builtin functions).
10787 The call may contain arguments which need to be evaluated, but
10788 which are not useful to determine the result of the call. In
10789 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10790 COMPOUND_EXPR will be an argument which must be evaluated.
10791 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10792 COMPOUND_EXPR in the chain will contain the tree for the simplified
10793 form of the builtin function call. */
10796 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
10798 if (!validate_arg (s1
, POINTER_TYPE
)
10799 || !validate_arg (s2
, POINTER_TYPE
))
10804 const char *p1
, *p2
;
10806 p2
= c_getstr (s2
);
10810 p1
= c_getstr (s1
);
10813 const char *r
= strpbrk (p1
, p2
);
10817 return build_int_cst (TREE_TYPE (s1
), 0);
10819 /* Return an offset into the constant string argument. */
10820 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10821 return fold_convert_loc (loc
, type
, tem
);
10825 /* strpbrk(x, "") == NULL.
10826 Evaluate and ignore s1 in case it had side-effects. */
10827 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
10830 return NULL_TREE
; /* Really call strpbrk. */
10832 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10836 /* New argument list transforming strpbrk(s1, s2) to
10837 strchr(s1, s2[0]). */
10838 return build_call_expr_loc (loc
, fn
, 2, s1
,
10839 build_int_cst (integer_type_node
, p2
[0]));
10843 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10846 Return NULL_TREE if no simplification was possible, otherwise return the
10847 simplified form of the call as a tree.
10849 The simplified form may be a constant or other expression which
10850 computes the same value, but in a more efficient manner (including
10851 calls to other builtin functions).
10853 The call may contain arguments which need to be evaluated, but
10854 which are not useful to determine the result of the call. In
10855 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10856 COMPOUND_EXPR will be an argument which must be evaluated.
10857 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10858 COMPOUND_EXPR in the chain will contain the tree for the simplified
10859 form of the builtin function call. */
10862 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
10864 if (!validate_arg (s1
, POINTER_TYPE
)
10865 || !validate_arg (s2
, POINTER_TYPE
))
10869 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10871 /* If both arguments are constants, evaluate at compile-time. */
10874 const size_t r
= strspn (p1
, p2
);
10875 return build_int_cst (size_type_node
, r
);
10878 /* If either argument is "", return NULL_TREE. */
10879 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
10880 /* Evaluate and ignore both arguments in case either one has
10882 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
10888 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10891 Return NULL_TREE if no simplification was possible, otherwise return the
10892 simplified form of the call as a tree.
10894 The simplified form may be a constant or other expression which
10895 computes the same value, but in a more efficient manner (including
10896 calls to other builtin functions).
10898 The call may contain arguments which need to be evaluated, but
10899 which are not useful to determine the result of the call. In
10900 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10901 COMPOUND_EXPR will be an argument which must be evaluated.
10902 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10903 COMPOUND_EXPR in the chain will contain the tree for the simplified
10904 form of the builtin function call. */
10907 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
10909 if (!validate_arg (s1
, POINTER_TYPE
)
10910 || !validate_arg (s2
, POINTER_TYPE
))
10914 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10916 /* If both arguments are constants, evaluate at compile-time. */
10919 const size_t r
= strcspn (p1
, p2
);
10920 return build_int_cst (size_type_node
, r
);
10923 /* If the first argument is "", return NULL_TREE. */
10924 if (p1
&& *p1
== '\0')
10926 /* Evaluate and ignore argument s2 in case it has
10928 return omit_one_operand_loc (loc
, size_type_node
,
10929 size_zero_node
, s2
);
10932 /* If the second argument is "", return __builtin_strlen(s1). */
10933 if (p2
&& *p2
== '\0')
10935 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
10937 /* If the replacement _DECL isn't initialized, don't do the
10942 return build_call_expr_loc (loc
, fn
, 1, s1
);
10948 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10949 produced. False otherwise. This is done so that we don't output the error
10950 or warning twice or three times. */
10953 fold_builtin_next_arg (tree exp
, bool va_start_p
)
10955 tree fntype
= TREE_TYPE (current_function_decl
);
10956 int nargs
= call_expr_nargs (exp
);
10958 /* There is good chance the current input_location points inside the
10959 definition of the va_start macro (perhaps on the token for
10960 builtin) in a system header, so warnings will not be emitted.
10961 Use the location in real source code. */
10962 source_location current_location
=
10963 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
10966 if (!stdarg_p (fntype
))
10968 error ("%<va_start%> used in function with fixed args");
10974 if (va_start_p
&& (nargs
!= 2))
10976 error ("wrong number of arguments to function %<va_start%>");
10979 arg
= CALL_EXPR_ARG (exp
, 1);
10981 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10982 when we checked the arguments and if needed issued a warning. */
10987 /* Evidently an out of date version of <stdarg.h>; can't validate
10988 va_start's second argument, but can still work as intended. */
10989 warning_at (current_location
,
10991 "%<__builtin_next_arg%> called without an argument");
10994 else if (nargs
> 1)
10996 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10999 arg
= CALL_EXPR_ARG (exp
, 0);
11002 if (TREE_CODE (arg
) == SSA_NAME
)
11003 arg
= SSA_NAME_VAR (arg
);
11005 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11006 or __builtin_next_arg (0) the first time we see it, after checking
11007 the arguments and if needed issuing a warning. */
11008 if (!integer_zerop (arg
))
11010 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11012 /* Strip off all nops for the sake of the comparison. This
11013 is not quite the same as STRIP_NOPS. It does more.
11014 We must also strip off INDIRECT_EXPR for C++ reference
11016 while (CONVERT_EXPR_P (arg
)
11017 || TREE_CODE (arg
) == INDIRECT_REF
)
11018 arg
= TREE_OPERAND (arg
, 0);
11019 if (arg
!= last_parm
)
11021 /* FIXME: Sometimes with the tree optimizers we can get the
11022 not the last argument even though the user used the last
11023 argument. We just warn and set the arg to be the last
11024 argument so that we will get wrong-code because of
11026 warning_at (current_location
,
11028 "second parameter of %<va_start%> not last named argument");
11031 /* Undefined by C99 7.15.1.4p4 (va_start):
11032 "If the parameter parmN is declared with the register storage
11033 class, with a function or array type, or with a type that is
11034 not compatible with the type that results after application of
11035 the default argument promotions, the behavior is undefined."
11037 else if (DECL_REGISTER (arg
))
11039 warning_at (current_location
,
11041 "undefined behaviour when second parameter of "
11042 "%<va_start%> is declared with %<register%> storage");
11045 /* We want to verify the second parameter just once before the tree
11046 optimizers are run and then avoid keeping it in the tree,
11047 as otherwise we could warn even for correct code like:
11048 void foo (int i, ...)
11049 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11051 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11053 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11059 /* Expand a call EXP to __builtin_object_size. */
11062 expand_builtin_object_size (tree exp
)
11065 int object_size_type
;
11066 tree fndecl
= get_callee_fndecl (exp
);
11068 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11070 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11072 expand_builtin_trap ();
11076 ost
= CALL_EXPR_ARG (exp
, 1);
11079 if (TREE_CODE (ost
) != INTEGER_CST
11080 || tree_int_cst_sgn (ost
) < 0
11081 || compare_tree_int (ost
, 3) > 0)
11083 error ("%Klast argument of %D is not integer constant between 0 and 3",
11085 expand_builtin_trap ();
11089 object_size_type
= tree_to_shwi (ost
);
11091 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11094 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11095 FCODE is the BUILT_IN_* to use.
11096 Return NULL_RTX if we failed; the caller should emit a normal call,
11097 otherwise try to get the result in TARGET, if convenient (and in
11098 mode MODE if that's convenient). */
11101 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
11102 enum built_in_function fcode
)
11104 tree dest
, src
, len
, size
;
11106 if (!validate_arglist (exp
,
11108 fcode
== BUILT_IN_MEMSET_CHK
11109 ? INTEGER_TYPE
: POINTER_TYPE
,
11110 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11113 dest
= CALL_EXPR_ARG (exp
, 0);
11114 src
= CALL_EXPR_ARG (exp
, 1);
11115 len
= CALL_EXPR_ARG (exp
, 2);
11116 size
= CALL_EXPR_ARG (exp
, 3);
11118 if (! tree_fits_uhwi_p (size
))
11121 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
11125 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11127 warning_at (tree_nonartificial_location (exp
),
11128 0, "%Kcall to %D will always overflow destination buffer",
11129 exp
, get_callee_fndecl (exp
));
11134 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11135 mem{cpy,pcpy,move,set} is available. */
11138 case BUILT_IN_MEMCPY_CHK
:
11139 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
11141 case BUILT_IN_MEMPCPY_CHK
:
11142 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
11144 case BUILT_IN_MEMMOVE_CHK
:
11145 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
11147 case BUILT_IN_MEMSET_CHK
:
11148 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
11157 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
11158 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11159 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11160 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11162 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11166 unsigned int dest_align
= get_pointer_alignment (dest
);
11168 /* If DEST is not a pointer type, call the normal function. */
11169 if (dest_align
== 0)
11172 /* If SRC and DEST are the same (and not volatile), do nothing. */
11173 if (operand_equal_p (src
, dest
, 0))
11177 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11179 /* Evaluate and ignore LEN in case it has side-effects. */
11180 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11181 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11184 expr
= fold_build_pointer_plus (dest
, len
);
11185 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11188 /* __memmove_chk special case. */
11189 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11191 unsigned int src_align
= get_pointer_alignment (src
);
11193 if (src_align
== 0)
11196 /* If src is categorized for a readonly section we can use
11197 normal __memcpy_chk. */
11198 if (readonly_data_expr (src
))
11200 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
11203 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
11204 dest
, src
, len
, size
);
11205 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11206 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11207 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11214 /* Emit warning if a buffer overflow is detected at compile time. */
11217 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11221 location_t loc
= tree_nonartificial_location (exp
);
11225 case BUILT_IN_STRCPY_CHK
:
11226 case BUILT_IN_STPCPY_CHK
:
11227 /* For __strcat_chk the warning will be emitted only if overflowing
11228 by at least strlen (dest) + 1 bytes. */
11229 case BUILT_IN_STRCAT_CHK
:
11230 len
= CALL_EXPR_ARG (exp
, 1);
11231 size
= CALL_EXPR_ARG (exp
, 2);
11234 case BUILT_IN_STRNCAT_CHK
:
11235 case BUILT_IN_STRNCPY_CHK
:
11236 case BUILT_IN_STPNCPY_CHK
:
11237 len
= CALL_EXPR_ARG (exp
, 2);
11238 size
= CALL_EXPR_ARG (exp
, 3);
11240 case BUILT_IN_SNPRINTF_CHK
:
11241 case BUILT_IN_VSNPRINTF_CHK
:
11242 len
= CALL_EXPR_ARG (exp
, 1);
11243 size
= CALL_EXPR_ARG (exp
, 3);
11246 gcc_unreachable ();
11252 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11257 len
= c_strlen (len
, 1);
11258 if (! len
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11261 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11263 tree src
= CALL_EXPR_ARG (exp
, 1);
11264 if (! src
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11266 src
= c_strlen (src
, 1);
11267 if (! src
|| ! tree_fits_uhwi_p (src
))
11269 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
11270 exp
, get_callee_fndecl (exp
));
11273 else if (tree_int_cst_lt (src
, size
))
11276 else if (! tree_fits_uhwi_p (len
) || ! tree_int_cst_lt (size
, len
))
11279 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
11280 exp
, get_callee_fndecl (exp
));
11283 /* Emit warning if a buffer overflow is detected at compile time
11284 in __sprintf_chk/__vsprintf_chk calls. */
11287 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11289 tree size
, len
, fmt
;
11290 const char *fmt_str
;
11291 int nargs
= call_expr_nargs (exp
);
11293 /* Verify the required arguments in the original call. */
11297 size
= CALL_EXPR_ARG (exp
, 2);
11298 fmt
= CALL_EXPR_ARG (exp
, 3);
11300 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11303 /* Check whether the format is a literal string constant. */
11304 fmt_str
= c_getstr (fmt
);
11305 if (fmt_str
== NULL
)
11308 if (!init_target_chars ())
11311 /* If the format doesn't contain % args or %%, we know its size. */
11312 if (strchr (fmt_str
, target_percent
) == 0)
11313 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11314 /* If the format is "%s" and first ... argument is a string literal,
11316 else if (fcode
== BUILT_IN_SPRINTF_CHK
11317 && strcmp (fmt_str
, target_percent_s
) == 0)
11323 arg
= CALL_EXPR_ARG (exp
, 4);
11324 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11327 len
= c_strlen (arg
, 1);
11328 if (!len
|| ! tree_fits_uhwi_p (len
))
11334 if (! tree_int_cst_lt (len
, size
))
11335 warning_at (tree_nonartificial_location (exp
),
11336 0, "%Kcall to %D will always overflow destination buffer",
11337 exp
, get_callee_fndecl (exp
));
11340 /* Emit warning if a free is called with address of a variable. */
11343 maybe_emit_free_warning (tree exp
)
11345 tree arg
= CALL_EXPR_ARG (exp
, 0);
11348 if (TREE_CODE (arg
) != ADDR_EXPR
)
11351 arg
= get_base_address (TREE_OPERAND (arg
, 0));
11352 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
11355 if (SSA_VAR_P (arg
))
11356 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11357 "%Kattempt to free a non-heap object %qD", exp
, arg
);
11359 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11360 "%Kattempt to free a non-heap object", exp
);
11363 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11367 fold_builtin_object_size (tree ptr
, tree ost
)
11369 unsigned HOST_WIDE_INT bytes
;
11370 int object_size_type
;
11372 if (!validate_arg (ptr
, POINTER_TYPE
)
11373 || !validate_arg (ost
, INTEGER_TYPE
))
11378 if (TREE_CODE (ost
) != INTEGER_CST
11379 || tree_int_cst_sgn (ost
) < 0
11380 || compare_tree_int (ost
, 3) > 0)
11383 object_size_type
= tree_to_shwi (ost
);
11385 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11386 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11387 and (size_t) 0 for types 2 and 3. */
11388 if (TREE_SIDE_EFFECTS (ptr
))
11389 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11391 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11393 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11394 if (wi::fits_to_tree_p (bytes
, size_type_node
))
11395 return build_int_cstu (size_type_node
, bytes
);
11397 else if (TREE_CODE (ptr
) == SSA_NAME
)
11399 /* If object size is not known yet, delay folding until
11400 later. Maybe subsequent passes will help determining
11402 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11403 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
11404 && wi::fits_to_tree_p (bytes
, size_type_node
))
11405 return build_int_cstu (size_type_node
, bytes
);
11411 /* Builtins with folding operations that operate on "..." arguments
11412 need special handling; we need to store the arguments in a convenient
11413 data structure before attempting any folding. Fortunately there are
11414 only a few builtins that fall into this category. FNDECL is the
11415 function, EXP is the CALL_EXPR for the call. */
11418 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
11420 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11421 tree ret
= NULL_TREE
;
11425 case BUILT_IN_FPCLASSIFY
:
11426 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
11434 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11435 SET_EXPR_LOCATION (ret
, loc
);
11436 TREE_NO_WARNING (ret
) = 1;
11442 /* Initialize format string characters in the target charset. */
11445 init_target_chars (void)
11450 target_newline
= lang_hooks
.to_target_charset ('\n');
11451 target_percent
= lang_hooks
.to_target_charset ('%');
11452 target_c
= lang_hooks
.to_target_charset ('c');
11453 target_s
= lang_hooks
.to_target_charset ('s');
11454 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
11458 target_percent_c
[0] = target_percent
;
11459 target_percent_c
[1] = target_c
;
11460 target_percent_c
[2] = '\0';
11462 target_percent_s
[0] = target_percent
;
11463 target_percent_s
[1] = target_s
;
11464 target_percent_s
[2] = '\0';
11466 target_percent_s_newline
[0] = target_percent
;
11467 target_percent_s_newline
[1] = target_s
;
11468 target_percent_s_newline
[2] = target_newline
;
11469 target_percent_s_newline
[3] = '\0';
11476 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11477 and no overflow/underflow occurred. INEXACT is true if M was not
11478 exactly calculated. TYPE is the tree type for the result. This
11479 function assumes that you cleared the MPFR flags and then
11480 calculated M to see if anything subsequently set a flag prior to
11481 entering this function. Return NULL_TREE if any checks fail. */
11484 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
11486 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11487 overflow/underflow occurred. If -frounding-math, proceed iff the
11488 result of calling FUNC was exact. */
11489 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11490 && (!flag_rounding_math
|| !inexact
))
11492 REAL_VALUE_TYPE rr
;
11494 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
11495 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11496 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11497 but the mpft_t is not, then we underflowed in the
11499 if (real_isfinite (&rr
)
11500 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
11502 REAL_VALUE_TYPE rmode
;
11504 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
11505 /* Proceed iff the specified mode can hold the value. */
11506 if (real_identical (&rmode
, &rr
))
11507 return build_real (type
, rmode
);
11513 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11514 number and no overflow/underflow occurred. INEXACT is true if M
11515 was not exactly calculated. TYPE is the tree type for the result.
11516 This function assumes that you cleared the MPFR flags and then
11517 calculated M to see if anything subsequently set a flag prior to
11518 entering this function. Return NULL_TREE if any checks fail, if
11519 FORCE_CONVERT is true, then bypass the checks. */
11522 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
11524 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11525 overflow/underflow occurred. If -frounding-math, proceed iff the
11526 result of calling FUNC was exact. */
11528 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
11529 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11530 && (!flag_rounding_math
|| !inexact
)))
11532 REAL_VALUE_TYPE re
, im
;
11534 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
11535 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
11536 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11537 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11538 but the mpft_t is not, then we underflowed in the
11541 || (real_isfinite (&re
) && real_isfinite (&im
)
11542 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
11543 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
11545 REAL_VALUE_TYPE re_mode
, im_mode
;
11547 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
11548 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
11549 /* Proceed iff the specified mode can hold the value. */
11551 || (real_identical (&re_mode
, &re
)
11552 && real_identical (&im_mode
, &im
)))
11553 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
11554 build_real (TREE_TYPE (type
), im_mode
));
11560 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11561 FUNC on it and return the resulting value as a tree with type TYPE.
11562 If MIN and/or MAX are not NULL, then the supplied ARG must be
11563 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11564 acceptable values, otherwise they are not. The mpfr precision is
11565 set to the precision of TYPE. We assume that function FUNC returns
11566 zero if the result could be calculated exactly within the requested
11570 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
11571 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
11574 tree result
= NULL_TREE
;
11578 /* To proceed, MPFR must exactly represent the target floating point
11579 format, which only happens when the target base equals two. */
11580 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11581 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
11583 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11585 if (real_isfinite (ra
)
11586 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
11587 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
11589 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11590 const int prec
= fmt
->p
;
11591 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11595 mpfr_init2 (m
, prec
);
11596 mpfr_from_real (m
, ra
, GMP_RNDN
);
11597 mpfr_clear_flags ();
11598 inexact
= func (m
, m
, rnd
);
11599 result
= do_mpfr_ckconv (m
, type
, inexact
);
11607 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11608 FUNC on it and return the resulting value as a tree with type TYPE.
11609 The mpfr precision is set to the precision of TYPE. We assume that
11610 function FUNC returns zero if the result could be calculated
11611 exactly within the requested precision. */
11614 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
11615 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11617 tree result
= NULL_TREE
;
11622 /* To proceed, MPFR must exactly represent the target floating point
11623 format, which only happens when the target base equals two. */
11624 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11625 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11626 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
11628 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11629 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11631 if (real_isfinite (ra1
) && real_isfinite (ra2
))
11633 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11634 const int prec
= fmt
->p
;
11635 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11639 mpfr_inits2 (prec
, m1
, m2
, NULL
);
11640 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11641 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11642 mpfr_clear_flags ();
11643 inexact
= func (m1
, m1
, m2
, rnd
);
11644 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11645 mpfr_clears (m1
, m2
, NULL
);
11652 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11653 FUNC on it and return the resulting value as a tree with type TYPE.
11654 The mpfr precision is set to the precision of TYPE. We assume that
11655 function FUNC returns zero if the result could be calculated
11656 exactly within the requested precision. */
11659 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
11660 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11662 tree result
= NULL_TREE
;
11668 /* To proceed, MPFR must exactly represent the target floating point
11669 format, which only happens when the target base equals two. */
11670 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11671 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11672 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
11673 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
11675 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11676 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11677 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
11679 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
11681 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11682 const int prec
= fmt
->p
;
11683 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11687 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
11688 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11689 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11690 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
11691 mpfr_clear_flags ();
11692 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
11693 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11694 mpfr_clears (m1
, m2
, m3
, NULL
);
11701 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11702 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11703 If ARG_SINP and ARG_COSP are NULL then the result is returned
11704 as a complex value.
11705 The type is taken from the type of ARG and is used for setting the
11706 precision of the calculation and results. */
11709 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
11711 tree
const type
= TREE_TYPE (arg
);
11712 tree result
= NULL_TREE
;
11716 /* To proceed, MPFR must exactly represent the target floating point
11717 format, which only happens when the target base equals two. */
11718 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11719 && TREE_CODE (arg
) == REAL_CST
11720 && !TREE_OVERFLOW (arg
))
11722 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11724 if (real_isfinite (ra
))
11726 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11727 const int prec
= fmt
->p
;
11728 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11729 tree result_s
, result_c
;
11733 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
11734 mpfr_from_real (m
, ra
, GMP_RNDN
);
11735 mpfr_clear_flags ();
11736 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
11737 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
11738 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
11739 mpfr_clears (m
, ms
, mc
, NULL
);
11740 if (result_s
&& result_c
)
11742 /* If we are to return in a complex value do so. */
11743 if (!arg_sinp
&& !arg_cosp
)
11744 return build_complex (build_complex_type (type
),
11745 result_c
, result_s
);
11747 /* Dereference the sin/cos pointer arguments. */
11748 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
11749 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
11750 /* Proceed if valid pointer type were passed in. */
11751 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
11752 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
11754 /* Set the values. */
11755 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
11757 TREE_SIDE_EFFECTS (result_s
) = 1;
11758 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
11760 TREE_SIDE_EFFECTS (result_c
) = 1;
11761 /* Combine the assignments into a compound expr. */
11762 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11763 result_s
, result_c
));
11771 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11772 two-argument mpfr order N Bessel function FUNC on them and return
11773 the resulting value as a tree with type TYPE. The mpfr precision
11774 is set to the precision of TYPE. We assume that function FUNC
11775 returns zero if the result could be calculated exactly within the
11776 requested precision. */
11778 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
11779 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
11780 const REAL_VALUE_TYPE
*min
, bool inclusive
)
11782 tree result
= NULL_TREE
;
11787 /* To proceed, MPFR must exactly represent the target floating point
11788 format, which only happens when the target base equals two. */
11789 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11790 && tree_fits_shwi_p (arg1
)
11791 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
11793 const HOST_WIDE_INT n
= tree_to_shwi (arg1
);
11794 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
11797 && real_isfinite (ra
)
11798 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
11800 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11801 const int prec
= fmt
->p
;
11802 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11806 mpfr_init2 (m
, prec
);
11807 mpfr_from_real (m
, ra
, GMP_RNDN
);
11808 mpfr_clear_flags ();
11809 inexact
= func (m
, n
, m
, rnd
);
11810 result
= do_mpfr_ckconv (m
, type
, inexact
);
11818 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11819 the pointer *(ARG_QUO) and return the result. The type is taken
11820 from the type of ARG0 and is used for setting the precision of the
11821 calculation and results. */
11824 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
11826 tree
const type
= TREE_TYPE (arg0
);
11827 tree result
= NULL_TREE
;
11832 /* To proceed, MPFR must exactly represent the target floating point
11833 format, which only happens when the target base equals two. */
11834 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11835 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
11836 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
11838 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
11839 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
11841 if (real_isfinite (ra0
) && real_isfinite (ra1
))
11843 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11844 const int prec
= fmt
->p
;
11845 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11850 mpfr_inits2 (prec
, m0
, m1
, NULL
);
11851 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
11852 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11853 mpfr_clear_flags ();
11854 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
11855 /* Remquo is independent of the rounding mode, so pass
11856 inexact=0 to do_mpfr_ckconv(). */
11857 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
11858 mpfr_clears (m0
, m1
, NULL
);
11861 /* MPFR calculates quo in the host's long so it may
11862 return more bits in quo than the target int can hold
11863 if sizeof(host long) > sizeof(target int). This can
11864 happen even for native compilers in LP64 mode. In
11865 these cases, modulo the quo value with the largest
11866 number that the target int can hold while leaving one
11867 bit for the sign. */
11868 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
11869 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
11871 /* Dereference the quo pointer argument. */
11872 arg_quo
= build_fold_indirect_ref (arg_quo
);
11873 /* Proceed iff a valid pointer type was passed in. */
11874 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
11876 /* Set the value. */
11878 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
11879 build_int_cst (TREE_TYPE (arg_quo
),
11881 TREE_SIDE_EFFECTS (result_quo
) = 1;
11882 /* Combine the quo assignment with the rem. */
11883 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11884 result_quo
, result_rem
));
11892 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11893 resulting value as a tree with type TYPE. The mpfr precision is
11894 set to the precision of TYPE. We assume that this mpfr function
11895 returns zero if the result could be calculated exactly within the
11896 requested precision. In addition, the integer pointer represented
11897 by ARG_SG will be dereferenced and set to the appropriate signgam
11901 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
11903 tree result
= NULL_TREE
;
11907 /* To proceed, MPFR must exactly represent the target floating point
11908 format, which only happens when the target base equals two. Also
11909 verify ARG is a constant and that ARG_SG is an int pointer. */
11910 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11911 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
11912 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
11913 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
11915 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
11917 /* In addition to NaN and Inf, the argument cannot be zero or a
11918 negative integer. */
11919 if (real_isfinite (ra
)
11920 && ra
->cl
!= rvc_zero
11921 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
11923 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11924 const int prec
= fmt
->p
;
11925 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11930 mpfr_init2 (m
, prec
);
11931 mpfr_from_real (m
, ra
, GMP_RNDN
);
11932 mpfr_clear_flags ();
11933 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
11934 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
11940 /* Dereference the arg_sg pointer argument. */
11941 arg_sg
= build_fold_indirect_ref (arg_sg
);
11942 /* Assign the signgam value into *arg_sg. */
11943 result_sg
= fold_build2 (MODIFY_EXPR
,
11944 TREE_TYPE (arg_sg
), arg_sg
,
11945 build_int_cst (TREE_TYPE (arg_sg
), sg
));
11946 TREE_SIDE_EFFECTS (result_sg
) = 1;
11947 /* Combine the signgam assignment with the lgamma result. */
11948 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11949 result_sg
, result_lg
));
11957 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
11958 function FUNC on it and return the resulting value as a tree with
11959 type TYPE. The mpfr precision is set to the precision of TYPE. We
11960 assume that function FUNC returns zero if the result could be
11961 calculated exactly within the requested precision. */
11964 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
11966 tree result
= NULL_TREE
;
11970 /* To proceed, MPFR must exactly represent the target floating point
11971 format, which only happens when the target base equals two. */
11972 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
11973 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
11974 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
11976 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
11977 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
11979 if (real_isfinite (re
) && real_isfinite (im
))
11981 const struct real_format
*const fmt
=
11982 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
11983 const int prec
= fmt
->p
;
11984 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11985 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
11989 mpc_init2 (m
, prec
);
11990 mpfr_from_real (mpc_realref (m
), re
, rnd
);
11991 mpfr_from_real (mpc_imagref (m
), im
, rnd
);
11992 mpfr_clear_flags ();
11993 inexact
= func (m
, m
, crnd
);
11994 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
12002 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12003 mpc function FUNC on it and return the resulting value as a tree
12004 with type TYPE. The mpfr precision is set to the precision of
12005 TYPE. We assume that function FUNC returns zero if the result
12006 could be calculated exactly within the requested precision. If
12007 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12008 in the arguments and/or results. */
12011 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
12012 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
12014 tree result
= NULL_TREE
;
12019 /* To proceed, MPFR must exactly represent the target floating point
12020 format, which only happens when the target base equals two. */
12021 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
12022 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
12023 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
12024 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
12025 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
12027 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
12028 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
12029 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
12030 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
12033 || (real_isfinite (re0
) && real_isfinite (im0
)
12034 && real_isfinite (re1
) && real_isfinite (im1
)))
12036 const struct real_format
*const fmt
=
12037 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12038 const int prec
= fmt
->p
;
12039 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12040 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12044 mpc_init2 (m0
, prec
);
12045 mpc_init2 (m1
, prec
);
12046 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
12047 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
12048 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
12049 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
12050 mpfr_clear_flags ();
12051 inexact
= func (m0
, m0
, m1
, crnd
);
12052 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
12061 /* A wrapper function for builtin folding that prevents warnings for
12062 "statement without effect" and the like, caused by removing the
12063 call node earlier than the warning is generated. */
12066 fold_call_stmt (gcall
*stmt
, bool ignore
)
12068 tree ret
= NULL_TREE
;
12069 tree fndecl
= gimple_call_fndecl (stmt
);
12070 location_t loc
= gimple_location (stmt
);
12072 && TREE_CODE (fndecl
) == FUNCTION_DECL
12073 && DECL_BUILT_IN (fndecl
)
12074 && !gimple_call_va_arg_pack_p (stmt
))
12076 int nargs
= gimple_call_num_args (stmt
);
12077 tree
*args
= (nargs
> 0
12078 ? gimple_call_arg_ptr (stmt
, 0)
12079 : &error_mark_node
);
12081 if (avoid_folding_inline_builtin (fndecl
))
12083 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
12085 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
12089 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
12092 /* Propagate location information from original call to
12093 expansion of builtin. Otherwise things like
12094 maybe_emit_chk_warning, that operate on the expansion
12095 of a builtin, will use the wrong location information. */
12096 if (gimple_has_location (stmt
))
12098 tree realret
= ret
;
12099 if (TREE_CODE (ret
) == NOP_EXPR
)
12100 realret
= TREE_OPERAND (ret
, 0);
12101 if (CAN_HAVE_LOCATION_P (realret
)
12102 && !EXPR_HAS_LOCATION (realret
))
12103 SET_EXPR_LOCATION (realret
, loc
);
12113 /* Look up the function in builtin_decl that corresponds to DECL
12114 and set ASMSPEC as its user assembler name. DECL must be a
12115 function decl that declares a builtin. */
12118 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
12121 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
12122 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
12125 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
12126 set_user_assembler_name (builtin
, asmspec
);
12127 switch (DECL_FUNCTION_CODE (decl
))
12129 case BUILT_IN_MEMCPY
:
12130 init_block_move_fn (asmspec
);
12131 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
12133 case BUILT_IN_MEMSET
:
12134 init_block_clear_fn (asmspec
);
12135 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
12137 case BUILT_IN_MEMMOVE
:
12138 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
12140 case BUILT_IN_MEMCMP
:
12141 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
12143 case BUILT_IN_ABORT
:
12144 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
12147 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
12149 set_user_assembler_libfunc ("ffs", asmspec
);
12150 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
12151 MODE_INT
, 0), "ffs");
12159 /* Return true if DECL is a builtin that expands to a constant or similarly
12162 is_simple_builtin (tree decl
)
12164 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12165 switch (DECL_FUNCTION_CODE (decl
))
12167 /* Builtins that expand to constants. */
12168 case BUILT_IN_CONSTANT_P
:
12169 case BUILT_IN_EXPECT
:
12170 case BUILT_IN_OBJECT_SIZE
:
12171 case BUILT_IN_UNREACHABLE
:
12172 /* Simple register moves or loads from stack. */
12173 case BUILT_IN_ASSUME_ALIGNED
:
12174 case BUILT_IN_RETURN_ADDRESS
:
12175 case BUILT_IN_EXTRACT_RETURN_ADDR
:
12176 case BUILT_IN_FROB_RETURN_ADDR
:
12177 case BUILT_IN_RETURN
:
12178 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
12179 case BUILT_IN_FRAME_ADDRESS
:
12180 case BUILT_IN_VA_END
:
12181 case BUILT_IN_STACK_SAVE
:
12182 case BUILT_IN_STACK_RESTORE
:
12183 /* Exception state returns or moves registers around. */
12184 case BUILT_IN_EH_FILTER
:
12185 case BUILT_IN_EH_POINTER
:
12186 case BUILT_IN_EH_COPY_VALUES
:
12196 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12197 most probably expanded inline into reasonably simple code. This is a
12198 superset of is_simple_builtin. */
12200 is_inexpensive_builtin (tree decl
)
12204 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
12206 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12207 switch (DECL_FUNCTION_CODE (decl
))
12210 case BUILT_IN_ALLOCA
:
12211 case BUILT_IN_ALLOCA_WITH_ALIGN
:
12212 case BUILT_IN_BSWAP16
:
12213 case BUILT_IN_BSWAP32
:
12214 case BUILT_IN_BSWAP64
:
12216 case BUILT_IN_CLZIMAX
:
12217 case BUILT_IN_CLZL
:
12218 case BUILT_IN_CLZLL
:
12220 case BUILT_IN_CTZIMAX
:
12221 case BUILT_IN_CTZL
:
12222 case BUILT_IN_CTZLL
:
12224 case BUILT_IN_FFSIMAX
:
12225 case BUILT_IN_FFSL
:
12226 case BUILT_IN_FFSLL
:
12227 case BUILT_IN_IMAXABS
:
12228 case BUILT_IN_FINITE
:
12229 case BUILT_IN_FINITEF
:
12230 case BUILT_IN_FINITEL
:
12231 case BUILT_IN_FINITED32
:
12232 case BUILT_IN_FINITED64
:
12233 case BUILT_IN_FINITED128
:
12234 case BUILT_IN_FPCLASSIFY
:
12235 case BUILT_IN_ISFINITE
:
12236 case BUILT_IN_ISINF_SIGN
:
12237 case BUILT_IN_ISINF
:
12238 case BUILT_IN_ISINFF
:
12239 case BUILT_IN_ISINFL
:
12240 case BUILT_IN_ISINFD32
:
12241 case BUILT_IN_ISINFD64
:
12242 case BUILT_IN_ISINFD128
:
12243 case BUILT_IN_ISNAN
:
12244 case BUILT_IN_ISNANF
:
12245 case BUILT_IN_ISNANL
:
12246 case BUILT_IN_ISNAND32
:
12247 case BUILT_IN_ISNAND64
:
12248 case BUILT_IN_ISNAND128
:
12249 case BUILT_IN_ISNORMAL
:
12250 case BUILT_IN_ISGREATER
:
12251 case BUILT_IN_ISGREATEREQUAL
:
12252 case BUILT_IN_ISLESS
:
12253 case BUILT_IN_ISLESSEQUAL
:
12254 case BUILT_IN_ISLESSGREATER
:
12255 case BUILT_IN_ISUNORDERED
:
12256 case BUILT_IN_VA_ARG_PACK
:
12257 case BUILT_IN_VA_ARG_PACK_LEN
:
12258 case BUILT_IN_VA_COPY
:
12259 case BUILT_IN_TRAP
:
12260 case BUILT_IN_SAVEREGS
:
12261 case BUILT_IN_POPCOUNTL
:
12262 case BUILT_IN_POPCOUNTLL
:
12263 case BUILT_IN_POPCOUNTIMAX
:
12264 case BUILT_IN_POPCOUNT
:
12265 case BUILT_IN_PARITYL
:
12266 case BUILT_IN_PARITYLL
:
12267 case BUILT_IN_PARITYIMAX
:
12268 case BUILT_IN_PARITY
:
12269 case BUILT_IN_LABS
:
12270 case BUILT_IN_LLABS
:
12271 case BUILT_IN_PREFETCH
:
12272 case BUILT_IN_ACC_ON_DEVICE
:
12276 return is_simple_builtin (decl
);