1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
33 #include "fold-const.h"
34 #include "stringpool.h"
35 #include "stor-layout.h"
38 #include "tree-object-size.h"
41 #include "internal-fn.h"
45 #include "insn-config.h"
52 #include "insn-codes.h"
57 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
69 #include "tree-chkp.h"
71 #include "gomp-constants.h"
74 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
76 struct target_builtins default_target_builtins
;
78 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names
[BUILT_IN_LAST
]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names
[(int) END_BUILTINS
] =
88 #include "builtins.def"
92 /* Setup an array of builtin_info_type, make sure each element decl is
93 initialized to NULL_TREE. */
94 builtin_info_type builtin_info
[(int)END_BUILTINS
];
96 /* Non-zero if __builtin_constant_p should be folded right away. */
97 bool force_folding_builtin_constant_p
;
99 static rtx
c_readstr (const char *, machine_mode
);
100 static int target_char_cast (tree
, char *);
101 static rtx
get_memory_rtx (tree
, tree
);
102 static int apply_args_size (void);
103 static int apply_result_size (void);
104 static rtx
result_vector (int, rtx
);
105 static void expand_builtin_prefetch (tree
);
106 static rtx
expand_builtin_apply_args (void);
107 static rtx
expand_builtin_apply_args_1 (void);
108 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
109 static void expand_builtin_return (rtx
);
110 static enum type_class
type_to_class (tree
);
111 static rtx
expand_builtin_classify_type (tree
);
112 static void expand_errno_check (tree
, rtx
);
113 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
114 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
115 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
116 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
117 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
118 static rtx
expand_builtin_sincos (tree
);
119 static rtx
expand_builtin_cexpi (tree
, rtx
);
120 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
121 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
122 static rtx
expand_builtin_next_arg (void);
123 static rtx
expand_builtin_va_start (tree
);
124 static rtx
expand_builtin_va_end (tree
);
125 static rtx
expand_builtin_va_copy (tree
);
126 static rtx
expand_builtin_strcmp (tree
, rtx
);
127 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
128 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, machine_mode
);
129 static rtx
expand_builtin_memcpy (tree
, rtx
);
130 static rtx
expand_builtin_memcpy_with_bounds (tree
, rtx
);
131 static rtx
expand_builtin_memcpy_args (tree
, tree
, tree
, rtx
, tree
);
132 static rtx
expand_builtin_mempcpy (tree
, rtx
, machine_mode
);
133 static rtx
expand_builtin_mempcpy_with_bounds (tree
, rtx
, machine_mode
);
134 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
135 machine_mode
, int, tree
);
136 static rtx
expand_builtin_strcpy (tree
, rtx
);
137 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
138 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
139 static rtx
expand_builtin_strncpy (tree
, rtx
);
140 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, machine_mode
);
141 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
142 static rtx
expand_builtin_memset_with_bounds (tree
, rtx
, machine_mode
);
143 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
144 static rtx
expand_builtin_bzero (tree
);
145 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
146 static rtx
expand_builtin_alloca (tree
, bool);
147 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
148 static rtx
expand_builtin_frame_address (tree
, tree
);
149 static tree
stabilize_va_list_loc (location_t
, tree
, int);
150 static rtx
expand_builtin_expect (tree
, rtx
);
151 static tree
fold_builtin_constant_p (tree
);
152 static tree
fold_builtin_classify_type (tree
);
153 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
154 static tree
fold_builtin_inf (location_t
, tree
, int);
155 static tree
fold_builtin_nan (tree
, tree
, int);
156 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
157 static bool validate_arg (const_tree
, enum tree_code code
);
158 static bool integer_valued_real_p (tree
);
159 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
160 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
161 static rtx
expand_builtin_signbit (tree
, rtx
);
162 static tree
fold_builtin_sqrt (location_t
, tree
, tree
);
163 static tree
fold_builtin_cbrt (location_t
, tree
, tree
);
164 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
165 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
166 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
167 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
168 static tree
fold_builtin_tan (tree
, tree
);
169 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
170 static tree
fold_builtin_floor (location_t
, tree
, tree
);
171 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
172 static tree
fold_builtin_round (location_t
, tree
, tree
);
173 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
174 static tree
fold_builtin_bitop (tree
, tree
);
175 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
176 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
177 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
178 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
179 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
180 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
181 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
182 static tree
fold_builtin_isascii (location_t
, tree
);
183 static tree
fold_builtin_toascii (location_t
, tree
);
184 static tree
fold_builtin_isdigit (location_t
, tree
);
185 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
186 static tree
fold_builtin_abs (location_t
, tree
, tree
);
187 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
189 static tree
fold_builtin_0 (location_t
, tree
);
190 static tree
fold_builtin_1 (location_t
, tree
, tree
);
191 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
192 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
193 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
195 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
196 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
197 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
198 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
199 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
201 static rtx
expand_builtin_object_size (tree
);
202 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
203 enum built_in_function
);
204 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
205 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
206 static void maybe_emit_free_warning (tree
);
207 static tree
fold_builtin_object_size (tree
, tree
);
209 unsigned HOST_WIDE_INT target_newline
;
210 unsigned HOST_WIDE_INT target_percent
;
211 static unsigned HOST_WIDE_INT target_c
;
212 static unsigned HOST_WIDE_INT target_s
;
213 char target_percent_c
[3];
214 char target_percent_s
[3];
215 char target_percent_s_newline
[4];
216 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
217 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
218 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
219 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
220 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
221 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
222 static tree
do_mpfr_sincos (tree
, tree
, tree
);
223 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
224 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
225 const REAL_VALUE_TYPE
*, bool);
226 static tree
do_mpfr_remquo (tree
, tree
, tree
);
227 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
228 static void expand_builtin_sync_synchronize (void);
230 /* Return true if NAME starts with __builtin_ or __sync_. */
233 is_builtin_name (const char *name
)
235 if (strncmp (name
, "__builtin_", 10) == 0)
237 if (strncmp (name
, "__sync_", 7) == 0)
239 if (strncmp (name
, "__atomic_", 9) == 0)
242 && (!strcmp (name
, "__cilkrts_detach")
243 || !strcmp (name
, "__cilkrts_pop_frame")))
249 /* Return true if DECL is a function symbol representing a built-in. */
252 is_builtin_fn (tree decl
)
254 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
257 /* Return true if NODE should be considered for inline expansion regardless
258 of the optimization level. This means whenever a function is invoked with
259 its "internal" name, which normally contains the prefix "__builtin". */
262 called_as_built_in (tree node
)
264 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
265 we want the name used to call the function, not the name it
267 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
268 return is_builtin_name (name
);
271 /* Compute values M and N such that M divides (address of EXP - N) and such
272 that N < M. If these numbers can be determined, store M in alignp and N in
273 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
274 *alignp and any bit-offset to *bitposp.
276 Note that the address (and thus the alignment) computed here is based
277 on the address to which a symbol resolves, whereas DECL_ALIGN is based
278 on the address at which an object is actually located. These two
279 addresses are not always the same. For example, on ARM targets,
280 the address &foo of a Thumb function foo() has the lowest bit set,
281 whereas foo() itself starts on an even address.
283 If ADDR_P is true we are taking the address of the memory reference EXP
284 and thus cannot rely on the access taking place. */
287 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
288 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
290 HOST_WIDE_INT bitsize
, bitpos
;
293 int unsignedp
, volatilep
;
294 unsigned int align
= BITS_PER_UNIT
;
295 bool known_alignment
= false;
297 /* Get the innermost object and the constant (bitpos) and possibly
298 variable (offset) offset of the access. */
299 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
300 &mode
, &unsignedp
, &volatilep
, true);
302 /* Extract alignment information from the innermost object and
303 possibly adjust bitpos and offset. */
304 if (TREE_CODE (exp
) == FUNCTION_DECL
)
306 /* Function addresses can encode extra information besides their
307 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
308 allows the low bit to be used as a virtual bit, we know
309 that the address itself must be at least 2-byte aligned. */
310 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
311 align
= 2 * BITS_PER_UNIT
;
313 else if (TREE_CODE (exp
) == LABEL_DECL
)
315 else if (TREE_CODE (exp
) == CONST_DECL
)
317 /* The alignment of a CONST_DECL is determined by its initializer. */
318 exp
= DECL_INITIAL (exp
);
319 align
= TYPE_ALIGN (TREE_TYPE (exp
));
320 if (CONSTANT_CLASS_P (exp
))
321 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
323 known_alignment
= true;
325 else if (DECL_P (exp
))
327 align
= DECL_ALIGN (exp
);
328 known_alignment
= true;
330 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
332 align
= TYPE_ALIGN (TREE_TYPE (exp
));
334 else if (TREE_CODE (exp
) == INDIRECT_REF
335 || TREE_CODE (exp
) == MEM_REF
336 || TREE_CODE (exp
) == TARGET_MEM_REF
)
338 tree addr
= TREE_OPERAND (exp
, 0);
340 unsigned HOST_WIDE_INT ptr_bitpos
;
341 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
343 /* If the address is explicitely aligned, handle that. */
344 if (TREE_CODE (addr
) == BIT_AND_EXPR
345 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
347 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
348 ptr_bitmask
*= BITS_PER_UNIT
;
349 align
= ptr_bitmask
& -ptr_bitmask
;
350 addr
= TREE_OPERAND (addr
, 0);
354 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
355 align
= MAX (ptr_align
, align
);
357 /* Re-apply explicit alignment to the bitpos. */
358 ptr_bitpos
&= ptr_bitmask
;
360 /* The alignment of the pointer operand in a TARGET_MEM_REF
361 has to take the variable offset parts into account. */
362 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
366 unsigned HOST_WIDE_INT step
= 1;
368 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
369 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
371 if (TMR_INDEX2 (exp
))
372 align
= BITS_PER_UNIT
;
373 known_alignment
= false;
376 /* When EXP is an actual memory reference then we can use
377 TYPE_ALIGN of a pointer indirection to derive alignment.
378 Do so only if get_pointer_alignment_1 did not reveal absolute
379 alignment knowledge and if using that alignment would
380 improve the situation. */
381 if (!addr_p
&& !known_alignment
382 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
383 align
= TYPE_ALIGN (TREE_TYPE (exp
));
386 /* Else adjust bitpos accordingly. */
387 bitpos
+= ptr_bitpos
;
388 if (TREE_CODE (exp
) == MEM_REF
389 || TREE_CODE (exp
) == TARGET_MEM_REF
)
390 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
393 else if (TREE_CODE (exp
) == STRING_CST
)
395 /* STRING_CST are the only constant objects we allow to be not
396 wrapped inside a CONST_DECL. */
397 align
= TYPE_ALIGN (TREE_TYPE (exp
));
398 if (CONSTANT_CLASS_P (exp
))
399 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
401 known_alignment
= true;
404 /* If there is a non-constant offset part extract the maximum
405 alignment that can prevail. */
408 unsigned int trailing_zeros
= tree_ctz (offset
);
409 if (trailing_zeros
< HOST_BITS_PER_INT
)
411 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
413 align
= MIN (align
, inner
);
418 *bitposp
= bitpos
& (*alignp
- 1);
419 return known_alignment
;
422 /* For a memory reference expression EXP compute values M and N such that M
423 divides (&EXP - N) and such that N < M. If these numbers can be determined,
424 store M in alignp and N in *BITPOSP and return true. Otherwise return false
425 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
428 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
429 unsigned HOST_WIDE_INT
*bitposp
)
431 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
434 /* Return the alignment in bits of EXP, an object. */
437 get_object_alignment (tree exp
)
439 unsigned HOST_WIDE_INT bitpos
= 0;
442 get_object_alignment_1 (exp
, &align
, &bitpos
);
444 /* align and bitpos now specify known low bits of the pointer.
445 ptr & (align - 1) == bitpos. */
448 align
= (bitpos
& -bitpos
);
452 /* For a pointer valued expression EXP compute values M and N such that M
453 divides (EXP - N) and such that N < M. If these numbers can be determined,
454 store M in alignp and N in *BITPOSP and return true. Return false if
455 the results are just a conservative approximation.
457 If EXP is not a pointer, false is returned too. */
460 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
461 unsigned HOST_WIDE_INT
*bitposp
)
465 if (TREE_CODE (exp
) == ADDR_EXPR
)
466 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
467 alignp
, bitposp
, true);
468 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
471 unsigned HOST_WIDE_INT bitpos
;
472 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
474 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
475 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
478 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
479 if (trailing_zeros
< HOST_BITS_PER_INT
)
481 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
483 align
= MIN (align
, inner
);
487 *bitposp
= bitpos
& (align
- 1);
490 else if (TREE_CODE (exp
) == SSA_NAME
491 && POINTER_TYPE_P (TREE_TYPE (exp
)))
493 unsigned int ptr_align
, ptr_misalign
;
494 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
496 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
498 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
499 *alignp
= ptr_align
* BITS_PER_UNIT
;
500 /* We cannot really tell whether this result is an approximation. */
506 *alignp
= BITS_PER_UNIT
;
510 else if (TREE_CODE (exp
) == INTEGER_CST
)
512 *alignp
= BIGGEST_ALIGNMENT
;
513 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
514 & (BIGGEST_ALIGNMENT
- 1));
519 *alignp
= BITS_PER_UNIT
;
523 /* Return the alignment in bits of EXP, a pointer valued expression.
524 The alignment returned is, by default, the alignment of the thing that
525 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
527 Otherwise, look at the expression to see if we can do better, i.e., if the
528 expression is actually pointing at an object whose alignment is tighter. */
531 get_pointer_alignment (tree exp
)
533 unsigned HOST_WIDE_INT bitpos
= 0;
536 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
538 /* align and bitpos now specify known low bits of the pointer.
539 ptr & (align - 1) == bitpos. */
542 align
= (bitpos
& -bitpos
);
547 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
548 way, because it could contain a zero byte in the middle.
549 TREE_STRING_LENGTH is the size of the character array, not the string.
551 ONLY_VALUE should be nonzero if the result is not going to be emitted
552 into the instruction stream and zero if it is going to be expanded.
553 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
554 is returned, otherwise NULL, since
555 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
556 evaluate the side-effects.
558 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
559 accesses. Note that this implies the result is not going to be emitted
560 into the instruction stream.
562 The value returned is of type `ssizetype'.
564 Unfortunately, string_constant can't access the values of const char
565 arrays with initializers, so neither can we do so here. */
568 c_strlen (tree src
, int only_value
)
571 HOST_WIDE_INT offset
;
577 if (TREE_CODE (src
) == COND_EXPR
578 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
582 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
583 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
584 if (tree_int_cst_equal (len1
, len2
))
588 if (TREE_CODE (src
) == COMPOUND_EXPR
589 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
590 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
592 loc
= EXPR_LOC_OR_LOC (src
, input_location
);
594 src
= string_constant (src
, &offset_node
);
598 max
= TREE_STRING_LENGTH (src
) - 1;
599 ptr
= TREE_STRING_POINTER (src
);
601 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
603 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
604 compute the offset to the following null if we don't know where to
605 start searching for it. */
608 for (i
= 0; i
< max
; i
++)
612 /* We don't know the starting offset, but we do know that the string
613 has no internal zero bytes. We can assume that the offset falls
614 within the bounds of the string; otherwise, the programmer deserves
615 what he gets. Subtract the offset from the length of the string,
616 and return that. This would perhaps not be valid if we were dealing
617 with named arrays in addition to literal string constants. */
619 return size_diffop_loc (loc
, size_int (max
), offset_node
);
622 /* We have a known offset into the string. Start searching there for
623 a null character if we can represent it as a single HOST_WIDE_INT. */
624 if (offset_node
== 0)
626 else if (! tree_fits_shwi_p (offset_node
))
629 offset
= tree_to_shwi (offset_node
);
631 /* If the offset is known to be out of bounds, warn, and call strlen at
633 if (offset
< 0 || offset
> max
)
635 /* Suppress multiple warnings for propagated constant strings. */
637 && !TREE_NO_WARNING (src
))
639 warning_at (loc
, 0, "offset outside bounds of constant string");
640 TREE_NO_WARNING (src
) = 1;
645 /* Use strlen to search for the first zero byte. Since any strings
646 constructed with build_string will have nulls appended, we win even
647 if we get handed something like (char[4])"abcd".
649 Since OFFSET is our starting index into the string, no further
650 calculation is needed. */
651 return ssize_int (strlen (ptr
+ offset
));
654 /* Return a char pointer for a C string if it is a string constant
655 or sum of string constant and integer constant. */
662 src
= string_constant (src
, &offset_node
);
666 if (offset_node
== 0)
667 return TREE_STRING_POINTER (src
);
668 else if (!tree_fits_uhwi_p (offset_node
)
669 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
672 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);
675 /* Return a constant integer corresponding to target reading
676 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
679 c_readstr (const char *str
, machine_mode mode
)
683 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
685 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
686 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
687 / HOST_BITS_PER_WIDE_INT
;
689 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
690 for (i
= 0; i
< len
; i
++)
694 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
697 if (WORDS_BIG_ENDIAN
)
698 j
= GET_MODE_SIZE (mode
) - i
- 1;
699 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
700 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
701 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
705 ch
= (unsigned char) str
[i
];
706 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
709 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
710 return immed_wide_int_const (c
, mode
);
713 /* Cast a target constant CST to target CHAR and if that value fits into
714 host char type, return zero and put that value into variable pointed to by
718 target_char_cast (tree cst
, char *p
)
720 unsigned HOST_WIDE_INT val
, hostval
;
722 if (TREE_CODE (cst
) != INTEGER_CST
723 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
726 /* Do not care if it fits or not right here. */
727 val
= TREE_INT_CST_LOW (cst
);
729 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
730 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
733 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
734 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
743 /* Similar to save_expr, but assumes that arbitrary code is not executed
744 in between the multiple evaluations. In particular, we assume that a
745 non-addressable local variable will not be modified. */
748 builtin_save_expr (tree exp
)
750 if (TREE_CODE (exp
) == SSA_NAME
751 || (TREE_ADDRESSABLE (exp
) == 0
752 && (TREE_CODE (exp
) == PARM_DECL
753 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
756 return save_expr (exp
);
759 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
760 times to get the address of either a higher stack frame, or a return
761 address located within it (depending on FNDECL_CODE). */
764 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
768 #ifdef INITIAL_FRAME_ADDRESS_RTX
769 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
773 /* For a zero count with __builtin_return_address, we don't care what
774 frame address we return, because target-specific definitions will
775 override us. Therefore frame pointer elimination is OK, and using
776 the soft frame pointer is OK.
778 For a nonzero count, or a zero count with __builtin_frame_address,
779 we require a stable offset from the current frame pointer to the
780 previous one, so we must use the hard frame pointer, and
781 we must disable frame pointer elimination. */
782 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
783 tem
= frame_pointer_rtx
;
786 tem
= hard_frame_pointer_rtx
;
788 /* Tell reload not to eliminate the frame pointer. */
789 crtl
->accesses_prior_frames
= 1;
793 /* Some machines need special handling before we can access
794 arbitrary frames. For example, on the SPARC, we must first flush
795 all register windows to the stack. */
796 #ifdef SETUP_FRAME_ADDRESSES
798 SETUP_FRAME_ADDRESSES ();
801 /* On the SPARC, the return address is not in the frame, it is in a
802 register. There is no way to access it off of the current frame
803 pointer, but it can be accessed off the previous frame pointer by
804 reading the value from the register window save area. */
805 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
808 /* Scan back COUNT frames to the specified frame. */
809 for (i
= 0; i
< count
; i
++)
811 /* Assume the dynamic chain pointer is in the word that the
812 frame address points to, unless otherwise specified. */
813 #ifdef DYNAMIC_CHAIN_ADDRESS
814 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
816 tem
= memory_address (Pmode
, tem
);
817 tem
= gen_frame_mem (Pmode
, tem
);
818 tem
= copy_to_reg (tem
);
821 /* For __builtin_frame_address, return what we've got. But, on
822 the SPARC for example, we may have to add a bias. */
823 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
824 #ifdef FRAME_ADDR_RTX
825 return FRAME_ADDR_RTX (tem
);
830 /* For __builtin_return_address, get the return address from that frame. */
831 #ifdef RETURN_ADDR_RTX
832 tem
= RETURN_ADDR_RTX (count
, tem
);
834 tem
= memory_address (Pmode
,
835 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
836 tem
= gen_frame_mem (Pmode
, tem
);
841 /* Alias set used for setjmp buffer. */
842 static alias_set_type setjmp_alias_set
= -1;
844 /* Construct the leading half of a __builtin_setjmp call. Control will
845 return to RECEIVER_LABEL. This is also called directly by the SJLJ
846 exception handling code. */
849 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
851 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
855 if (setjmp_alias_set
== -1)
856 setjmp_alias_set
= new_alias_set ();
858 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
860 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
862 /* We store the frame pointer and the address of receiver_label in
863 the buffer and use the rest of it for the stack save area, which
864 is machine-dependent. */
866 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
867 set_mem_alias_set (mem
, setjmp_alias_set
);
868 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
870 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
871 GET_MODE_SIZE (Pmode
))),
872 set_mem_alias_set (mem
, setjmp_alias_set
);
874 emit_move_insn (validize_mem (mem
),
875 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
877 stack_save
= gen_rtx_MEM (sa_mode
,
878 plus_constant (Pmode
, buf_addr
,
879 2 * GET_MODE_SIZE (Pmode
)));
880 set_mem_alias_set (stack_save
, setjmp_alias_set
);
881 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
883 /* If there is further processing to do, do it. */
884 if (targetm
.have_builtin_setjmp_setup ())
885 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
887 /* We have a nonlocal label. */
888 cfun
->has_nonlocal_label
= 1;
891 /* Construct the trailing part of a __builtin_setjmp call. This is
892 also called directly by the SJLJ exception handling code.
893 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
896 expand_builtin_setjmp_receiver (rtx receiver_label
)
900 /* Mark the FP as used when we get here, so we have to make sure it's
901 marked as used by this function. */
902 emit_use (hard_frame_pointer_rtx
);
904 /* Mark the static chain as clobbered here so life information
905 doesn't get messed up for it. */
906 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
907 if (chain
&& REG_P (chain
))
908 emit_clobber (chain
);
910 /* Now put in the code to restore the frame pointer, and argument
911 pointer, if needed. */
912 if (! targetm
.have_nonlocal_goto ())
914 /* First adjust our frame pointer to its actual value. It was
915 previously set to the start of the virtual area corresponding to
916 the stacked variables when we branched here and now needs to be
917 adjusted to the actual hardware fp value.
919 Assignments to virtual registers are converted by
920 instantiate_virtual_regs into the corresponding assignment
921 to the underlying register (fp in this case) that makes
922 the original assignment true.
923 So the following insn will actually be decrementing fp by
924 STARTING_FRAME_OFFSET. */
925 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
927 /* Restoring the frame pointer also modifies the hard frame pointer.
928 Mark it used (so that the previous assignment remains live once
929 the frame pointer is eliminated) and clobbered (to represent the
930 implicit update from the assignment). */
931 emit_use (hard_frame_pointer_rtx
);
932 emit_clobber (hard_frame_pointer_rtx
);
935 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
936 if (fixed_regs
[ARG_POINTER_REGNUM
])
938 #ifdef ELIMINABLE_REGS
939 /* If the argument pointer can be eliminated in favor of the
940 frame pointer, we don't need to restore it. We assume here
941 that if such an elimination is present, it can always be used.
942 This is the case on all known machines; if we don't make this
943 assumption, we do unnecessary saving on many machines. */
945 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
947 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
948 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
949 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
952 if (i
== ARRAY_SIZE (elim_regs
))
955 /* Now restore our arg pointer from the address at which it
956 was saved in our stack frame. */
957 emit_move_insn (crtl
->args
.internal_arg_pointer
,
958 copy_to_reg (get_arg_pointer_save_area ()));
963 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
964 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
965 else if (targetm
.have_nonlocal_goto_receiver ())
966 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
970 /* We must not allow the code we just generated to be reordered by
971 scheduling. Specifically, the update of the frame pointer must
972 happen immediately, not later. */
973 emit_insn (gen_blockage ());
976 /* __builtin_longjmp is passed a pointer to an array of five words (not
977 all will be used on all machines). It operates similarly to the C
978 library function of the same name, but is more efficient. Much of
979 the code below is copied from the handling of non-local gotos. */
982 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
985 rtx_insn
*insn
, *last
;
986 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
988 /* DRAP is needed for stack realign if longjmp is expanded to current
990 if (SUPPORTS_STACK_ALIGNMENT
)
991 crtl
->need_drap
= true;
993 if (setjmp_alias_set
== -1)
994 setjmp_alias_set
= new_alias_set ();
996 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
998 buf_addr
= force_reg (Pmode
, buf_addr
);
1000 /* We require that the user must pass a second argument of 1, because
1001 that is what builtin_setjmp will return. */
1002 gcc_assert (value
== const1_rtx
);
1004 last
= get_last_insn ();
1005 if (targetm
.have_builtin_longjmp ())
1006 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
1009 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1010 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1011 GET_MODE_SIZE (Pmode
)));
1013 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1014 2 * GET_MODE_SIZE (Pmode
)));
1015 set_mem_alias_set (fp
, setjmp_alias_set
);
1016 set_mem_alias_set (lab
, setjmp_alias_set
);
1017 set_mem_alias_set (stack
, setjmp_alias_set
);
1019 /* Pick up FP, label, and SP from the block and jump. This code is
1020 from expand_goto in stmt.c; see there for detailed comments. */
1021 if (targetm
.have_nonlocal_goto ())
1022 /* We have to pass a value to the nonlocal_goto pattern that will
1023 get copied into the static_chain pointer, but it does not matter
1024 what that value is, because builtin_setjmp does not use it. */
1025 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
1028 lab
= copy_to_reg (lab
);
1030 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1031 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1033 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1034 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1036 emit_use (hard_frame_pointer_rtx
);
1037 emit_use (stack_pointer_rtx
);
1038 emit_indirect_jump (lab
);
1042 /* Search backwards and mark the jump insn as a non-local goto.
1043 Note that this precludes the use of __builtin_longjmp to a
1044 __builtin_setjmp target in the same function. However, we've
1045 already cautioned the user that these functions are for
1046 internal exception handling use only. */
1047 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1049 gcc_assert (insn
!= last
);
1053 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1056 else if (CALL_P (insn
))
1062 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1064 return (iter
->i
< iter
->n
);
1067 /* This function validates the types of a function call argument list
1068 against a specified list of tree_codes. If the last specifier is a 0,
1069 that represents an ellipses, otherwise the last specifier must be a
1073 validate_arglist (const_tree callexpr
, ...)
1075 enum tree_code code
;
1078 const_call_expr_arg_iterator iter
;
1081 va_start (ap
, callexpr
);
1082 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1086 code
= (enum tree_code
) va_arg (ap
, int);
1090 /* This signifies an ellipses, any further arguments are all ok. */
1094 /* This signifies an endlink, if no arguments remain, return
1095 true, otherwise return false. */
1096 res
= !more_const_call_expr_args_p (&iter
);
1099 /* If no parameters remain or the parameter's code does not
1100 match the specified code, return false. Otherwise continue
1101 checking any remaining arguments. */
1102 arg
= next_const_call_expr_arg (&iter
);
1103 if (!validate_arg (arg
, code
))
1110 /* We need gotos here since we can only have one VA_CLOSE in a
1118 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1119 and the address of the save area. */
1122 expand_builtin_nonlocal_goto (tree exp
)
1124 tree t_label
, t_save_area
;
1125 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1128 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1131 t_label
= CALL_EXPR_ARG (exp
, 0);
1132 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1134 r_label
= expand_normal (t_label
);
1135 r_label
= convert_memory_address (Pmode
, r_label
);
1136 r_save_area
= expand_normal (t_save_area
);
1137 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1138 /* Copy the address of the save location to a register just in case it was
1139 based on the frame pointer. */
1140 r_save_area
= copy_to_reg (r_save_area
);
1141 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1142 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1143 plus_constant (Pmode
, r_save_area
,
1144 GET_MODE_SIZE (Pmode
)));
1146 crtl
->has_nonlocal_goto
= 1;
1148 /* ??? We no longer need to pass the static chain value, afaik. */
1149 if (targetm
.have_nonlocal_goto ())
1150 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1153 r_label
= copy_to_reg (r_label
);
1155 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1156 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1158 /* Restore frame pointer for containing function. */
1159 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1160 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1162 /* USE of hard_frame_pointer_rtx added for consistency;
1163 not clear if really needed. */
1164 emit_use (hard_frame_pointer_rtx
);
1165 emit_use (stack_pointer_rtx
);
1167 /* If the architecture is using a GP register, we must
1168 conservatively assume that the target function makes use of it.
1169 The prologue of functions with nonlocal gotos must therefore
1170 initialize the GP register to the appropriate value, and we
1171 must then make sure that this value is live at the point
1172 of the jump. (Note that this doesn't necessarily apply
1173 to targets with a nonlocal_goto pattern; they are free
1174 to implement it in their own way. Note also that this is
1175 a no-op if the GP register is a global invariant.) */
1176 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1177 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1178 emit_use (pic_offset_table_rtx
);
1180 emit_indirect_jump (r_label
);
1183 /* Search backwards to the jump insn and mark it as a
1185 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1189 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1192 else if (CALL_P (insn
))
1199 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1200 (not all will be used on all machines) that was passed to __builtin_setjmp.
1201 It updates the stack pointer in that block to the current value. This is
1202 also called directly by the SJLJ exception handling code. */
1205 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1207 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1209 = gen_rtx_MEM (sa_mode
,
1212 plus_constant (Pmode
, buf_addr
,
1213 2 * GET_MODE_SIZE (Pmode
))));
1215 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1218 /* Expand a call to __builtin_prefetch. For a target that does not support
1219 data prefetch, evaluate the memory address argument in case it has side
1223 expand_builtin_prefetch (tree exp
)
1225 tree arg0
, arg1
, arg2
;
1229 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1232 arg0
= CALL_EXPR_ARG (exp
, 0);
1234 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1235 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1237 nargs
= call_expr_nargs (exp
);
1239 arg1
= CALL_EXPR_ARG (exp
, 1);
1241 arg1
= integer_zero_node
;
1243 arg2
= CALL_EXPR_ARG (exp
, 2);
1245 arg2
= integer_three_node
;
1247 /* Argument 0 is an address. */
1248 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1250 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1251 if (TREE_CODE (arg1
) != INTEGER_CST
)
1253 error ("second argument to %<__builtin_prefetch%> must be a constant");
1254 arg1
= integer_zero_node
;
1256 op1
= expand_normal (arg1
);
1257 /* Argument 1 must be either zero or one. */
1258 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1260 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1265 /* Argument 2 (locality) must be a compile-time constant int. */
1266 if (TREE_CODE (arg2
) != INTEGER_CST
)
1268 error ("third argument to %<__builtin_prefetch%> must be a constant");
1269 arg2
= integer_zero_node
;
1271 op2
= expand_normal (arg2
);
1272 /* Argument 2 must be 0, 1, 2, or 3. */
1273 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1275 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1279 if (targetm
.have_prefetch ())
1281 struct expand_operand ops
[3];
1283 create_address_operand (&ops
[0], op0
);
1284 create_integer_operand (&ops
[1], INTVAL (op1
));
1285 create_integer_operand (&ops
[2], INTVAL (op2
));
1286 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1290 /* Don't do anything with direct references to volatile memory, but
1291 generate code to handle other side effects. */
1292 if (!MEM_P (op0
) && side_effects_p (op0
))
1296 /* Get a MEM rtx for expression EXP which is the address of an operand
1297 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1298 the maximum length of the block of memory that might be accessed or
1302 get_memory_rtx (tree exp
, tree len
)
1304 tree orig_exp
= exp
;
1307 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1308 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1309 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1310 exp
= TREE_OPERAND (exp
, 0);
1312 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1313 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1315 /* Get an expression we can use to find the attributes to assign to MEM.
1316 First remove any nops. */
1317 while (CONVERT_EXPR_P (exp
)
1318 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1319 exp
= TREE_OPERAND (exp
, 0);
1321 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1322 (as builtin stringops may alias with anything). */
1323 exp
= fold_build2 (MEM_REF
,
1324 build_array_type (char_type_node
,
1325 build_range_type (sizetype
,
1326 size_one_node
, len
)),
1327 exp
, build_int_cst (ptr_type_node
, 0));
1329 /* If the MEM_REF has no acceptable address, try to get the base object
1330 from the original address we got, and build an all-aliasing
1331 unknown-sized access to that one. */
1332 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1333 set_mem_attributes (mem
, exp
, 0);
1334 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1335 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1338 exp
= build_fold_addr_expr (exp
);
1339 exp
= fold_build2 (MEM_REF
,
1340 build_array_type (char_type_node
,
1341 build_range_type (sizetype
,
1344 exp
, build_int_cst (ptr_type_node
, 0));
1345 set_mem_attributes (mem
, exp
, 0);
1347 set_mem_alias_set (mem
, 0);
1351 /* Built-in functions to perform an untyped call and return. */
1353 #define apply_args_mode \
1354 (this_target_builtins->x_apply_args_mode)
1355 #define apply_result_mode \
1356 (this_target_builtins->x_apply_result_mode)
1358 /* Return the size required for the block returned by __builtin_apply_args,
1359 and initialize apply_args_mode. */
1362 apply_args_size (void)
1364 static int size
= -1;
1369 /* The values computed by this function never change. */
1372 /* The first value is the incoming arg-pointer. */
1373 size
= GET_MODE_SIZE (Pmode
);
1375 /* The second value is the structure value address unless this is
1376 passed as an "invisible" first argument. */
1377 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1378 size
+= GET_MODE_SIZE (Pmode
);
1380 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1381 if (FUNCTION_ARG_REGNO_P (regno
))
1383 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1385 gcc_assert (mode
!= VOIDmode
);
1387 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1388 if (size
% align
!= 0)
1389 size
= CEIL (size
, align
) * align
;
1390 size
+= GET_MODE_SIZE (mode
);
1391 apply_args_mode
[regno
] = mode
;
1395 apply_args_mode
[regno
] = VOIDmode
;
1401 /* Return the size required for the block returned by __builtin_apply,
1402 and initialize apply_result_mode. */
1405 apply_result_size (void)
1407 static int size
= -1;
1411 /* The values computed by this function never change. */
1416 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1417 if (targetm
.calls
.function_value_regno_p (regno
))
1419 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1421 gcc_assert (mode
!= VOIDmode
);
1423 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1424 if (size
% align
!= 0)
1425 size
= CEIL (size
, align
) * align
;
1426 size
+= GET_MODE_SIZE (mode
);
1427 apply_result_mode
[regno
] = mode
;
1430 apply_result_mode
[regno
] = VOIDmode
;
1432 /* Allow targets that use untyped_call and untyped_return to override
1433 the size so that machine-specific information can be stored here. */
1434 #ifdef APPLY_RESULT_SIZE
1435 size
= APPLY_RESULT_SIZE
;
1441 /* Create a vector describing the result block RESULT. If SAVEP is true,
1442 the result block is used to save the values; otherwise it is used to
1443 restore the values. */
1446 result_vector (int savep
, rtx result
)
1448 int regno
, size
, align
, nelts
;
1451 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1454 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1455 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1457 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1458 if (size
% align
!= 0)
1459 size
= CEIL (size
, align
) * align
;
1460 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1461 mem
= adjust_address (result
, mode
, size
);
1462 savevec
[nelts
++] = (savep
1463 ? gen_rtx_SET (mem
, reg
)
1464 : gen_rtx_SET (reg
, mem
));
1465 size
+= GET_MODE_SIZE (mode
);
1467 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1470 /* Save the state required to perform an untyped call with the same
1471 arguments as were passed to the current function. */
1474 expand_builtin_apply_args_1 (void)
1477 int size
, align
, regno
;
1479 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1481 /* Create a block where the arg-pointer, structure value address,
1482 and argument registers can be saved. */
1483 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1485 /* Walk past the arg-pointer and structure value address. */
1486 size
= GET_MODE_SIZE (Pmode
);
1487 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1488 size
+= GET_MODE_SIZE (Pmode
);
1490 /* Save each register used in calling a function to the block. */
1491 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1492 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1494 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1495 if (size
% align
!= 0)
1496 size
= CEIL (size
, align
) * align
;
1498 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1500 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1501 size
+= GET_MODE_SIZE (mode
);
1504 /* Save the arg pointer to the block. */
1505 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1506 /* We need the pointer as the caller actually passed them to us, not
1507 as we might have pretended they were passed. Make sure it's a valid
1508 operand, as emit_move_insn isn't expected to handle a PLUS. */
1509 if (STACK_GROWS_DOWNWARD
)
1511 = force_operand (plus_constant (Pmode
, tem
,
1512 crtl
->args
.pretend_args_size
),
1514 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1516 size
= GET_MODE_SIZE (Pmode
);
1518 /* Save the structure value address unless this is passed as an
1519 "invisible" first argument. */
1520 if (struct_incoming_value
)
1522 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1523 copy_to_reg (struct_incoming_value
));
1524 size
+= GET_MODE_SIZE (Pmode
);
1527 /* Return the address of the block. */
1528 return copy_addr_to_reg (XEXP (registers
, 0));
1531 /* __builtin_apply_args returns block of memory allocated on
1532 the stack into which is stored the arg pointer, structure
1533 value address, static chain, and all the registers that might
1534 possibly be used in performing a function call. The code is
1535 moved to the start of the function so the incoming values are
1539 expand_builtin_apply_args (void)
1541 /* Don't do __builtin_apply_args more than once in a function.
1542 Save the result of the first call and reuse it. */
1543 if (apply_args_value
!= 0)
1544 return apply_args_value
;
1546 /* When this function is called, it means that registers must be
1547 saved on entry to this function. So we migrate the
1548 call to the first insn of this function. */
1552 temp
= expand_builtin_apply_args_1 ();
1553 rtx_insn
*seq
= get_insns ();
1556 apply_args_value
= temp
;
1558 /* Put the insns after the NOTE that starts the function.
1559 If this is inside a start_sequence, make the outer-level insn
1560 chain current, so the code is placed at the start of the
1561 function. If internal_arg_pointer is a non-virtual pseudo,
1562 it needs to be placed after the function that initializes
1564 push_topmost_sequence ();
1565 if (REG_P (crtl
->args
.internal_arg_pointer
)
1566 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1567 emit_insn_before (seq
, parm_birth_insn
);
1569 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1570 pop_topmost_sequence ();
1575 /* Perform an untyped call and save the state required to perform an
1576 untyped return of whatever value was returned by the given function. */
1579 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1581 int size
, align
, regno
;
1583 rtx incoming_args
, result
, reg
, dest
, src
;
1584 rtx_call_insn
*call_insn
;
1585 rtx old_stack_level
= 0;
1586 rtx call_fusage
= 0;
1587 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1589 arguments
= convert_memory_address (Pmode
, arguments
);
1591 /* Create a block where the return registers can be saved. */
1592 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1594 /* Fetch the arg pointer from the ARGUMENTS block. */
1595 incoming_args
= gen_reg_rtx (Pmode
);
1596 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1597 if (!STACK_GROWS_DOWNWARD
)
1598 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1599 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1601 /* Push a new argument block and copy the arguments. Do not allow
1602 the (potential) memcpy call below to interfere with our stack
1604 do_pending_stack_adjust ();
1607 /* Save the stack with nonlocal if available. */
1608 if (targetm
.have_save_stack_nonlocal ())
1609 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1611 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1613 /* Allocate a block of memory onto the stack and copy the memory
1614 arguments to the outgoing arguments address. We can pass TRUE
1615 as the 4th argument because we just saved the stack pointer
1616 and will restore it right after the call. */
1617 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1619 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1620 may have already set current_function_calls_alloca to true.
1621 current_function_calls_alloca won't be set if argsize is zero,
1622 so we have to guarantee need_drap is true here. */
1623 if (SUPPORTS_STACK_ALIGNMENT
)
1624 crtl
->need_drap
= true;
1626 dest
= virtual_outgoing_args_rtx
;
1627 if (!STACK_GROWS_DOWNWARD
)
1629 if (CONST_INT_P (argsize
))
1630 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1632 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1634 dest
= gen_rtx_MEM (BLKmode
, dest
);
1635 set_mem_align (dest
, PARM_BOUNDARY
);
1636 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1637 set_mem_align (src
, PARM_BOUNDARY
);
1638 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1640 /* Refer to the argument block. */
1642 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1643 set_mem_align (arguments
, PARM_BOUNDARY
);
1645 /* Walk past the arg-pointer and structure value address. */
1646 size
= GET_MODE_SIZE (Pmode
);
1648 size
+= GET_MODE_SIZE (Pmode
);
1650 /* Restore each of the registers previously saved. Make USE insns
1651 for each of these registers for use in making the call. */
1652 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1653 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1655 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1656 if (size
% align
!= 0)
1657 size
= CEIL (size
, align
) * align
;
1658 reg
= gen_rtx_REG (mode
, regno
);
1659 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1660 use_reg (&call_fusage
, reg
);
1661 size
+= GET_MODE_SIZE (mode
);
1664 /* Restore the structure value address unless this is passed as an
1665 "invisible" first argument. */
1666 size
= GET_MODE_SIZE (Pmode
);
1669 rtx value
= gen_reg_rtx (Pmode
);
1670 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1671 emit_move_insn (struct_value
, value
);
1672 if (REG_P (struct_value
))
1673 use_reg (&call_fusage
, struct_value
);
1674 size
+= GET_MODE_SIZE (Pmode
);
1677 /* All arguments and registers used for the call are set up by now! */
1678 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1680 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1681 and we don't want to load it into a register as an optimization,
1682 because prepare_call_address already did it if it should be done. */
1683 if (GET_CODE (function
) != SYMBOL_REF
)
1684 function
= memory_address (FUNCTION_MODE
, function
);
1686 /* Generate the actual call instruction and save the return value. */
1687 if (targetm
.have_untyped_call ())
1689 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1690 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1691 result_vector (1, result
)));
1693 else if (targetm
.have_call_value ())
1697 /* Locate the unique return register. It is not possible to
1698 express a call that sets more than one return register using
1699 call_value; use untyped_call for that. In fact, untyped_call
1700 only needs to save the return registers in the given block. */
1701 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1702 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1704 gcc_assert (!valreg
); /* have_untyped_call required. */
1706 valreg
= gen_rtx_REG (mode
, regno
);
1709 emit_insn (targetm
.gen_call_value (valreg
,
1710 gen_rtx_MEM (FUNCTION_MODE
, function
),
1711 const0_rtx
, NULL_RTX
, const0_rtx
));
1713 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1718 /* Find the CALL insn we just emitted, and attach the register usage
1720 call_insn
= last_call_insn ();
1721 add_function_usage_to (call_insn
, call_fusage
);
1723 /* Restore the stack. */
1724 if (targetm
.have_save_stack_nonlocal ())
1725 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1727 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1728 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1732 /* Return the address of the result block. */
1733 result
= copy_addr_to_reg (XEXP (result
, 0));
1734 return convert_memory_address (ptr_mode
, result
);
1737 /* Perform an untyped return. */
1740 expand_builtin_return (rtx result
)
1742 int size
, align
, regno
;
1745 rtx_insn
*call_fusage
= 0;
1747 result
= convert_memory_address (Pmode
, result
);
1749 apply_result_size ();
1750 result
= gen_rtx_MEM (BLKmode
, result
);
1752 if (targetm
.have_untyped_return ())
1754 rtx vector
= result_vector (0, result
);
1755 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1760 /* Restore the return value and note that each value is used. */
1762 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1763 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1765 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1766 if (size
% align
!= 0)
1767 size
= CEIL (size
, align
) * align
;
1768 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1769 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1771 push_to_sequence (call_fusage
);
1773 call_fusage
= get_insns ();
1775 size
+= GET_MODE_SIZE (mode
);
1778 /* Put the USE insns before the return. */
1779 emit_insn (call_fusage
);
1781 /* Return whatever values was restored by jumping directly to the end
1783 expand_naked_return ();
1786 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1788 static enum type_class
1789 type_to_class (tree type
)
1791 switch (TREE_CODE (type
))
1793 case VOID_TYPE
: return void_type_class
;
1794 case INTEGER_TYPE
: return integer_type_class
;
1795 case ENUMERAL_TYPE
: return enumeral_type_class
;
1796 case BOOLEAN_TYPE
: return boolean_type_class
;
1797 case POINTER_TYPE
: return pointer_type_class
;
1798 case REFERENCE_TYPE
: return reference_type_class
;
1799 case OFFSET_TYPE
: return offset_type_class
;
1800 case REAL_TYPE
: return real_type_class
;
1801 case COMPLEX_TYPE
: return complex_type_class
;
1802 case FUNCTION_TYPE
: return function_type_class
;
1803 case METHOD_TYPE
: return method_type_class
;
1804 case RECORD_TYPE
: return record_type_class
;
1806 case QUAL_UNION_TYPE
: return union_type_class
;
1807 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1808 ? string_type_class
: array_type_class
);
1809 case LANG_TYPE
: return lang_type_class
;
1810 default: return no_type_class
;
1814 /* Expand a call EXP to __builtin_classify_type. */
1817 expand_builtin_classify_type (tree exp
)
1819 if (call_expr_nargs (exp
))
1820 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1821 return GEN_INT (no_type_class
);
1824 /* This helper macro, meant to be used in mathfn_built_in below,
1825 determines which among a set of three builtin math functions is
1826 appropriate for a given type mode. The `F' and `L' cases are
1827 automatically generated from the `double' case. */
1828 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1829 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1830 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1831 fcodel = BUILT_IN_MATHFN##L ; break;
1832 /* Similar to above, but appends _R after any F/L suffix. */
1833 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1834 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1835 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1836 fcodel = BUILT_IN_MATHFN##L_R ; break;
1838 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1839 if available. If IMPLICIT is true use the implicit builtin declaration,
1840 otherwise use the explicit declaration. If we can't do the conversion,
1844 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1846 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1850 CASE_MATHFN (BUILT_IN_ACOS
)
1851 CASE_MATHFN (BUILT_IN_ACOSH
)
1852 CASE_MATHFN (BUILT_IN_ASIN
)
1853 CASE_MATHFN (BUILT_IN_ASINH
)
1854 CASE_MATHFN (BUILT_IN_ATAN
)
1855 CASE_MATHFN (BUILT_IN_ATAN2
)
1856 CASE_MATHFN (BUILT_IN_ATANH
)
1857 CASE_MATHFN (BUILT_IN_CBRT
)
1858 CASE_MATHFN (BUILT_IN_CEIL
)
1859 CASE_MATHFN (BUILT_IN_CEXPI
)
1860 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1861 CASE_MATHFN (BUILT_IN_COS
)
1862 CASE_MATHFN (BUILT_IN_COSH
)
1863 CASE_MATHFN (BUILT_IN_DREM
)
1864 CASE_MATHFN (BUILT_IN_ERF
)
1865 CASE_MATHFN (BUILT_IN_ERFC
)
1866 CASE_MATHFN (BUILT_IN_EXP
)
1867 CASE_MATHFN (BUILT_IN_EXP10
)
1868 CASE_MATHFN (BUILT_IN_EXP2
)
1869 CASE_MATHFN (BUILT_IN_EXPM1
)
1870 CASE_MATHFN (BUILT_IN_FABS
)
1871 CASE_MATHFN (BUILT_IN_FDIM
)
1872 CASE_MATHFN (BUILT_IN_FLOOR
)
1873 CASE_MATHFN (BUILT_IN_FMA
)
1874 CASE_MATHFN (BUILT_IN_FMAX
)
1875 CASE_MATHFN (BUILT_IN_FMIN
)
1876 CASE_MATHFN (BUILT_IN_FMOD
)
1877 CASE_MATHFN (BUILT_IN_FREXP
)
1878 CASE_MATHFN (BUILT_IN_GAMMA
)
1879 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1880 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1881 CASE_MATHFN (BUILT_IN_HYPOT
)
1882 CASE_MATHFN (BUILT_IN_ILOGB
)
1883 CASE_MATHFN (BUILT_IN_ICEIL
)
1884 CASE_MATHFN (BUILT_IN_IFLOOR
)
1885 CASE_MATHFN (BUILT_IN_INF
)
1886 CASE_MATHFN (BUILT_IN_IRINT
)
1887 CASE_MATHFN (BUILT_IN_IROUND
)
1888 CASE_MATHFN (BUILT_IN_ISINF
)
1889 CASE_MATHFN (BUILT_IN_J0
)
1890 CASE_MATHFN (BUILT_IN_J1
)
1891 CASE_MATHFN (BUILT_IN_JN
)
1892 CASE_MATHFN (BUILT_IN_LCEIL
)
1893 CASE_MATHFN (BUILT_IN_LDEXP
)
1894 CASE_MATHFN (BUILT_IN_LFLOOR
)
1895 CASE_MATHFN (BUILT_IN_LGAMMA
)
1896 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1897 CASE_MATHFN (BUILT_IN_LLCEIL
)
1898 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1899 CASE_MATHFN (BUILT_IN_LLRINT
)
1900 CASE_MATHFN (BUILT_IN_LLROUND
)
1901 CASE_MATHFN (BUILT_IN_LOG
)
1902 CASE_MATHFN (BUILT_IN_LOG10
)
1903 CASE_MATHFN (BUILT_IN_LOG1P
)
1904 CASE_MATHFN (BUILT_IN_LOG2
)
1905 CASE_MATHFN (BUILT_IN_LOGB
)
1906 CASE_MATHFN (BUILT_IN_LRINT
)
1907 CASE_MATHFN (BUILT_IN_LROUND
)
1908 CASE_MATHFN (BUILT_IN_MODF
)
1909 CASE_MATHFN (BUILT_IN_NAN
)
1910 CASE_MATHFN (BUILT_IN_NANS
)
1911 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1912 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1913 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1914 CASE_MATHFN (BUILT_IN_POW
)
1915 CASE_MATHFN (BUILT_IN_POWI
)
1916 CASE_MATHFN (BUILT_IN_POW10
)
1917 CASE_MATHFN (BUILT_IN_REMAINDER
)
1918 CASE_MATHFN (BUILT_IN_REMQUO
)
1919 CASE_MATHFN (BUILT_IN_RINT
)
1920 CASE_MATHFN (BUILT_IN_ROUND
)
1921 CASE_MATHFN (BUILT_IN_SCALB
)
1922 CASE_MATHFN (BUILT_IN_SCALBLN
)
1923 CASE_MATHFN (BUILT_IN_SCALBN
)
1924 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1925 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1926 CASE_MATHFN (BUILT_IN_SIN
)
1927 CASE_MATHFN (BUILT_IN_SINCOS
)
1928 CASE_MATHFN (BUILT_IN_SINH
)
1929 CASE_MATHFN (BUILT_IN_SQRT
)
1930 CASE_MATHFN (BUILT_IN_TAN
)
1931 CASE_MATHFN (BUILT_IN_TANH
)
1932 CASE_MATHFN (BUILT_IN_TGAMMA
)
1933 CASE_MATHFN (BUILT_IN_TRUNC
)
1934 CASE_MATHFN (BUILT_IN_Y0
)
1935 CASE_MATHFN (BUILT_IN_Y1
)
1936 CASE_MATHFN (BUILT_IN_YN
)
1942 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1944 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1946 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1951 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1954 return builtin_decl_explicit (fcode2
);
1957 /* Like mathfn_built_in_1(), but always use the implicit array. */
1960 mathfn_built_in (tree type
, enum built_in_function fn
)
1962 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1965 /* If errno must be maintained, expand the RTL to check if the result,
1966 TARGET, of a built-in function call, EXP, is NaN, and if so set
1970 expand_errno_check (tree exp
, rtx target
)
1972 rtx_code_label
*lab
= gen_label_rtx ();
1974 /* Test the result; if it is NaN, set errno=EDOM because
1975 the argument was not in the domain. */
1976 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1977 NULL_RTX
, NULL
, lab
,
1978 /* The jump is very likely. */
1979 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
1982 /* If this built-in doesn't throw an exception, set errno directly. */
1983 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1985 #ifdef GEN_ERRNO_RTX
1986 rtx errno_rtx
= GEN_ERRNO_RTX
;
1989 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1991 emit_move_insn (errno_rtx
,
1992 gen_int_mode (TARGET_EDOM
, GET_MODE (errno_rtx
)));
1998 /* Make sure the library call isn't expanded as a tail call. */
1999 CALL_EXPR_TAILCALL (exp
) = 0;
2001 /* We can't set errno=EDOM directly; let the library call do it.
2002 Pop the arguments right away in case the call gets deleted. */
2004 expand_call (exp
, target
, 0);
2009 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2010 Return NULL_RTX if a normal call should be emitted rather than expanding
2011 the function in-line. EXP is the expression that is a call to the builtin
2012 function; if convenient, the result should be placed in TARGET.
2013 SUBTARGET may be used as the target for computing one of EXP's operands. */
2016 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
2018 optab builtin_optab
;
2021 tree fndecl
= get_callee_fndecl (exp
);
2023 bool errno_set
= false;
2024 bool try_widening
= false;
2027 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2030 arg
= CALL_EXPR_ARG (exp
, 0);
2032 switch (DECL_FUNCTION_CODE (fndecl
))
2034 CASE_FLT_FN (BUILT_IN_SQRT
):
2035 errno_set
= ! tree_expr_nonnegative_p (arg
);
2036 try_widening
= true;
2037 builtin_optab
= sqrt_optab
;
2039 CASE_FLT_FN (BUILT_IN_EXP
):
2040 errno_set
= true; builtin_optab
= exp_optab
; break;
2041 CASE_FLT_FN (BUILT_IN_EXP10
):
2042 CASE_FLT_FN (BUILT_IN_POW10
):
2043 errno_set
= true; builtin_optab
= exp10_optab
; break;
2044 CASE_FLT_FN (BUILT_IN_EXP2
):
2045 errno_set
= true; builtin_optab
= exp2_optab
; break;
2046 CASE_FLT_FN (BUILT_IN_EXPM1
):
2047 errno_set
= true; builtin_optab
= expm1_optab
; break;
2048 CASE_FLT_FN (BUILT_IN_LOGB
):
2049 errno_set
= true; builtin_optab
= logb_optab
; break;
2050 CASE_FLT_FN (BUILT_IN_LOG
):
2051 errno_set
= true; builtin_optab
= log_optab
; break;
2052 CASE_FLT_FN (BUILT_IN_LOG10
):
2053 errno_set
= true; builtin_optab
= log10_optab
; break;
2054 CASE_FLT_FN (BUILT_IN_LOG2
):
2055 errno_set
= true; builtin_optab
= log2_optab
; break;
2056 CASE_FLT_FN (BUILT_IN_LOG1P
):
2057 errno_set
= true; builtin_optab
= log1p_optab
; break;
2058 CASE_FLT_FN (BUILT_IN_ASIN
):
2059 builtin_optab
= asin_optab
; break;
2060 CASE_FLT_FN (BUILT_IN_ACOS
):
2061 builtin_optab
= acos_optab
; break;
2062 CASE_FLT_FN (BUILT_IN_TAN
):
2063 builtin_optab
= tan_optab
; break;
2064 CASE_FLT_FN (BUILT_IN_ATAN
):
2065 builtin_optab
= atan_optab
; break;
2066 CASE_FLT_FN (BUILT_IN_FLOOR
):
2067 builtin_optab
= floor_optab
; break;
2068 CASE_FLT_FN (BUILT_IN_CEIL
):
2069 builtin_optab
= ceil_optab
; break;
2070 CASE_FLT_FN (BUILT_IN_TRUNC
):
2071 builtin_optab
= btrunc_optab
; break;
2072 CASE_FLT_FN (BUILT_IN_ROUND
):
2073 builtin_optab
= round_optab
; break;
2074 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2075 builtin_optab
= nearbyint_optab
;
2076 if (flag_trapping_math
)
2078 /* Else fallthrough and expand as rint. */
2079 CASE_FLT_FN (BUILT_IN_RINT
):
2080 builtin_optab
= rint_optab
; break;
2081 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2082 builtin_optab
= significand_optab
; break;
2087 /* Make a suitable register to place result in. */
2088 mode
= TYPE_MODE (TREE_TYPE (exp
));
2090 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2093 /* Before working hard, check whether the instruction is available, but try
2094 to widen the mode for specific operations. */
2095 if ((optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2096 || (try_widening
&& !excess_precision_type (TREE_TYPE (exp
))))
2097 && (!errno_set
|| !optimize_insn_for_size_p ()))
2099 rtx result
= gen_reg_rtx (mode
);
2101 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2102 need to expand the argument again. This way, we will not perform
2103 side-effects more the once. */
2104 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2106 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2110 /* Compute into RESULT.
2111 Set RESULT to wherever the result comes back. */
2112 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2117 expand_errno_check (exp
, result
);
2119 /* Output the entire sequence. */
2120 insns
= get_insns ();
2126 /* If we were unable to expand via the builtin, stop the sequence
2127 (without outputting the insns) and call to the library function
2128 with the stabilized argument list. */
2132 return expand_call (exp
, target
, target
== const0_rtx
);
2135 /* Expand a call to the builtin binary math functions (pow and atan2).
2136 Return NULL_RTX if a normal call should be emitted rather than expanding the
2137 function in-line. EXP is the expression that is a call to the builtin
2138 function; if convenient, the result should be placed in TARGET.
2139 SUBTARGET may be used as the target for computing one of EXP's
2143 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2145 optab builtin_optab
;
2146 rtx op0
, op1
, result
;
2148 int op1_type
= REAL_TYPE
;
2149 tree fndecl
= get_callee_fndecl (exp
);
2152 bool errno_set
= true;
2154 switch (DECL_FUNCTION_CODE (fndecl
))
2156 CASE_FLT_FN (BUILT_IN_SCALBN
):
2157 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2158 CASE_FLT_FN (BUILT_IN_LDEXP
):
2159 op1_type
= INTEGER_TYPE
;
2164 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2167 arg0
= CALL_EXPR_ARG (exp
, 0);
2168 arg1
= CALL_EXPR_ARG (exp
, 1);
2170 switch (DECL_FUNCTION_CODE (fndecl
))
2172 CASE_FLT_FN (BUILT_IN_POW
):
2173 builtin_optab
= pow_optab
; break;
2174 CASE_FLT_FN (BUILT_IN_ATAN2
):
2175 builtin_optab
= atan2_optab
; break;
2176 CASE_FLT_FN (BUILT_IN_SCALB
):
2177 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2179 builtin_optab
= scalb_optab
; break;
2180 CASE_FLT_FN (BUILT_IN_SCALBN
):
2181 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2182 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2184 /* Fall through... */
2185 CASE_FLT_FN (BUILT_IN_LDEXP
):
2186 builtin_optab
= ldexp_optab
; break;
2187 CASE_FLT_FN (BUILT_IN_FMOD
):
2188 builtin_optab
= fmod_optab
; break;
2189 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2190 CASE_FLT_FN (BUILT_IN_DREM
):
2191 builtin_optab
= remainder_optab
; break;
2196 /* Make a suitable register to place result in. */
2197 mode
= TYPE_MODE (TREE_TYPE (exp
));
2199 /* Before working hard, check whether the instruction is available. */
2200 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2203 result
= gen_reg_rtx (mode
);
2205 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2208 if (errno_set
&& optimize_insn_for_size_p ())
2211 /* Always stabilize the argument list. */
2212 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2213 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2215 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2216 op1
= expand_normal (arg1
);
2220 /* Compute into RESULT.
2221 Set RESULT to wherever the result comes back. */
2222 result
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2223 result
, 0, OPTAB_DIRECT
);
2225 /* If we were unable to expand via the builtin, stop the sequence
2226 (without outputting the insns) and call to the library function
2227 with the stabilized argument list. */
2231 return expand_call (exp
, target
, target
== const0_rtx
);
2235 expand_errno_check (exp
, result
);
2237 /* Output the entire sequence. */
2238 insns
= get_insns ();
2245 /* Expand a call to the builtin trinary math functions (fma).
2246 Return NULL_RTX if a normal call should be emitted rather than expanding the
2247 function in-line. EXP is the expression that is a call to the builtin
2248 function; if convenient, the result should be placed in TARGET.
2249 SUBTARGET may be used as the target for computing one of EXP's
2253 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2255 optab builtin_optab
;
2256 rtx op0
, op1
, op2
, result
;
2258 tree fndecl
= get_callee_fndecl (exp
);
2259 tree arg0
, arg1
, arg2
;
2262 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2265 arg0
= CALL_EXPR_ARG (exp
, 0);
2266 arg1
= CALL_EXPR_ARG (exp
, 1);
2267 arg2
= CALL_EXPR_ARG (exp
, 2);
2269 switch (DECL_FUNCTION_CODE (fndecl
))
2271 CASE_FLT_FN (BUILT_IN_FMA
):
2272 builtin_optab
= fma_optab
; break;
2277 /* Make a suitable register to place result in. */
2278 mode
= TYPE_MODE (TREE_TYPE (exp
));
2280 /* Before working hard, check whether the instruction is available. */
2281 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2284 result
= gen_reg_rtx (mode
);
2286 /* Always stabilize the argument list. */
2287 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2288 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2289 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2291 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2292 op1
= expand_normal (arg1
);
2293 op2
= expand_normal (arg2
);
2297 /* Compute into RESULT.
2298 Set RESULT to wherever the result comes back. */
2299 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2302 /* If we were unable to expand via the builtin, stop the sequence
2303 (without outputting the insns) and call to the library function
2304 with the stabilized argument list. */
2308 return expand_call (exp
, target
, target
== const0_rtx
);
2311 /* Output the entire sequence. */
2312 insns
= get_insns ();
2319 /* Expand a call to the builtin sin and cos math functions.
2320 Return NULL_RTX if a normal call should be emitted rather than expanding the
2321 function in-line. EXP is the expression that is a call to the builtin
2322 function; if convenient, the result should be placed in TARGET.
2323 SUBTARGET may be used as the target for computing one of EXP's
2327 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2329 optab builtin_optab
;
2332 tree fndecl
= get_callee_fndecl (exp
);
2336 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2339 arg
= CALL_EXPR_ARG (exp
, 0);
2341 switch (DECL_FUNCTION_CODE (fndecl
))
2343 CASE_FLT_FN (BUILT_IN_SIN
):
2344 CASE_FLT_FN (BUILT_IN_COS
):
2345 builtin_optab
= sincos_optab
; break;
2350 /* Make a suitable register to place result in. */
2351 mode
= TYPE_MODE (TREE_TYPE (exp
));
2353 /* Check if sincos insn is available, otherwise fallback
2354 to sin or cos insn. */
2355 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2356 switch (DECL_FUNCTION_CODE (fndecl
))
2358 CASE_FLT_FN (BUILT_IN_SIN
):
2359 builtin_optab
= sin_optab
; break;
2360 CASE_FLT_FN (BUILT_IN_COS
):
2361 builtin_optab
= cos_optab
; break;
2366 /* Before working hard, check whether the instruction is available. */
2367 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2369 rtx result
= gen_reg_rtx (mode
);
2371 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2372 need to expand the argument again. This way, we will not perform
2373 side-effects more the once. */
2374 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2376 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2380 /* Compute into RESULT.
2381 Set RESULT to wherever the result comes back. */
2382 if (builtin_optab
== sincos_optab
)
2386 switch (DECL_FUNCTION_CODE (fndecl
))
2388 CASE_FLT_FN (BUILT_IN_SIN
):
2389 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2391 CASE_FLT_FN (BUILT_IN_COS
):
2392 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2400 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2404 /* Output the entire sequence. */
2405 insns
= get_insns ();
2411 /* If we were unable to expand via the builtin, stop the sequence
2412 (without outputting the insns) and call to the library function
2413 with the stabilized argument list. */
2417 return expand_call (exp
, target
, target
== const0_rtx
);
2420 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2421 return an RTL instruction code that implements the functionality.
2422 If that isn't possible or available return CODE_FOR_nothing. */
2424 static enum insn_code
2425 interclass_mathfn_icode (tree arg
, tree fndecl
)
2427 bool errno_set
= false;
2428 optab builtin_optab
= unknown_optab
;
2431 switch (DECL_FUNCTION_CODE (fndecl
))
2433 CASE_FLT_FN (BUILT_IN_ILOGB
):
2434 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2435 CASE_FLT_FN (BUILT_IN_ISINF
):
2436 builtin_optab
= isinf_optab
; break;
2437 case BUILT_IN_ISNORMAL
:
2438 case BUILT_IN_ISFINITE
:
2439 CASE_FLT_FN (BUILT_IN_FINITE
):
2440 case BUILT_IN_FINITED32
:
2441 case BUILT_IN_FINITED64
:
2442 case BUILT_IN_FINITED128
:
2443 case BUILT_IN_ISINFD32
:
2444 case BUILT_IN_ISINFD64
:
2445 case BUILT_IN_ISINFD128
:
2446 /* These builtins have no optabs (yet). */
2452 /* There's no easy way to detect the case we need to set EDOM. */
2453 if (flag_errno_math
&& errno_set
)
2454 return CODE_FOR_nothing
;
2456 /* Optab mode depends on the mode of the input argument. */
2457 mode
= TYPE_MODE (TREE_TYPE (arg
));
2460 return optab_handler (builtin_optab
, mode
);
2461 return CODE_FOR_nothing
;
2464 /* Expand a call to one of the builtin math functions that operate on
2465 floating point argument and output an integer result (ilogb, isinf,
2467 Return 0 if a normal call should be emitted rather than expanding the
2468 function in-line. EXP is the expression that is a call to the builtin
2469 function; if convenient, the result should be placed in TARGET. */
2472 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2474 enum insn_code icode
= CODE_FOR_nothing
;
2476 tree fndecl
= get_callee_fndecl (exp
);
2480 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2483 arg
= CALL_EXPR_ARG (exp
, 0);
2484 icode
= interclass_mathfn_icode (arg
, fndecl
);
2485 mode
= TYPE_MODE (TREE_TYPE (arg
));
2487 if (icode
!= CODE_FOR_nothing
)
2489 struct expand_operand ops
[1];
2490 rtx_insn
*last
= get_last_insn ();
2491 tree orig_arg
= arg
;
2493 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2494 need to expand the argument again. This way, we will not perform
2495 side-effects more the once. */
2496 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2498 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2500 if (mode
!= GET_MODE (op0
))
2501 op0
= convert_to_mode (mode
, op0
, 0);
2503 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2504 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2505 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2506 return ops
[0].value
;
2508 delete_insns_since (last
);
2509 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2515 /* Expand a call to the builtin sincos math function.
2516 Return NULL_RTX if a normal call should be emitted rather than expanding the
2517 function in-line. EXP is the expression that is a call to the builtin
2521 expand_builtin_sincos (tree exp
)
2523 rtx op0
, op1
, op2
, target1
, target2
;
2525 tree arg
, sinp
, cosp
;
2527 location_t loc
= EXPR_LOCATION (exp
);
2528 tree alias_type
, alias_off
;
2530 if (!validate_arglist (exp
, REAL_TYPE
,
2531 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2534 arg
= CALL_EXPR_ARG (exp
, 0);
2535 sinp
= CALL_EXPR_ARG (exp
, 1);
2536 cosp
= CALL_EXPR_ARG (exp
, 2);
2538 /* Make a suitable register to place result in. */
2539 mode
= TYPE_MODE (TREE_TYPE (arg
));
2541 /* Check if sincos insn is available, otherwise emit the call. */
2542 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2545 target1
= gen_reg_rtx (mode
);
2546 target2
= gen_reg_rtx (mode
);
2548 op0
= expand_normal (arg
);
2549 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2550 alias_off
= build_int_cst (alias_type
, 0);
2551 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2553 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2556 /* Compute into target1 and target2.
2557 Set TARGET to wherever the result comes back. */
2558 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2559 gcc_assert (result
);
2561 /* Move target1 and target2 to the memory locations indicated
2563 emit_move_insn (op1
, target1
);
2564 emit_move_insn (op2
, target2
);
2569 /* Expand a call to the internal cexpi builtin to the sincos math function.
2570 EXP is the expression that is a call to the builtin function; if convenient,
2571 the result should be placed in TARGET. */
2574 expand_builtin_cexpi (tree exp
, rtx target
)
2576 tree fndecl
= get_callee_fndecl (exp
);
2580 location_t loc
= EXPR_LOCATION (exp
);
2582 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2585 arg
= CALL_EXPR_ARG (exp
, 0);
2586 type
= TREE_TYPE (arg
);
2587 mode
= TYPE_MODE (TREE_TYPE (arg
));
2589 /* Try expanding via a sincos optab, fall back to emitting a libcall
2590 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2591 is only generated from sincos, cexp or if we have either of them. */
2592 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2594 op1
= gen_reg_rtx (mode
);
2595 op2
= gen_reg_rtx (mode
);
2597 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2599 /* Compute into op1 and op2. */
2600 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2602 else if (targetm
.libc_has_function (function_sincos
))
2604 tree call
, fn
= NULL_TREE
;
2608 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2609 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2610 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2611 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2612 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2613 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2617 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2618 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2619 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2620 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2621 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2622 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2624 /* Make sure not to fold the sincos call again. */
2625 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2626 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2627 call
, 3, arg
, top1
, top2
));
2631 tree call
, fn
= NULL_TREE
, narg
;
2632 tree ctype
= build_complex_type (type
);
2634 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2635 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2636 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2637 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2638 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2639 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2643 /* If we don't have a decl for cexp create one. This is the
2644 friendliest fallback if the user calls __builtin_cexpi
2645 without full target C99 function support. */
2646 if (fn
== NULL_TREE
)
2649 const char *name
= NULL
;
2651 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2653 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2655 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2658 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2659 fn
= build_fn_decl (name
, fntype
);
2662 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2663 build_real (type
, dconst0
), arg
);
2665 /* Make sure not to fold the cexp call again. */
2666 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2667 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2668 target
, VOIDmode
, EXPAND_NORMAL
);
2671 /* Now build the proper return type. */
2672 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2673 make_tree (TREE_TYPE (arg
), op2
),
2674 make_tree (TREE_TYPE (arg
), op1
)),
2675 target
, VOIDmode
, EXPAND_NORMAL
);
2678 /* Conveniently construct a function call expression. FNDECL names the
2679 function to be called, N is the number of arguments, and the "..."
2680 parameters are the argument expressions. Unlike build_call_exr
2681 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2684 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2687 tree fntype
= TREE_TYPE (fndecl
);
2688 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2691 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2693 SET_EXPR_LOCATION (fn
, loc
);
2697 /* Expand a call to one of the builtin rounding functions gcc defines
2698 as an extension (lfloor and lceil). As these are gcc extensions we
2699 do not need to worry about setting errno to EDOM.
2700 If expanding via optab fails, lower expression to (int)(floor(x)).
2701 EXP is the expression that is a call to the builtin function;
2702 if convenient, the result should be placed in TARGET. */
2705 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2707 convert_optab builtin_optab
;
2710 tree fndecl
= get_callee_fndecl (exp
);
2711 enum built_in_function fallback_fn
;
2712 tree fallback_fndecl
;
2716 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2719 arg
= CALL_EXPR_ARG (exp
, 0);
2721 switch (DECL_FUNCTION_CODE (fndecl
))
2723 CASE_FLT_FN (BUILT_IN_ICEIL
):
2724 CASE_FLT_FN (BUILT_IN_LCEIL
):
2725 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2726 builtin_optab
= lceil_optab
;
2727 fallback_fn
= BUILT_IN_CEIL
;
2730 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2731 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2732 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2733 builtin_optab
= lfloor_optab
;
2734 fallback_fn
= BUILT_IN_FLOOR
;
2741 /* Make a suitable register to place result in. */
2742 mode
= TYPE_MODE (TREE_TYPE (exp
));
2744 target
= gen_reg_rtx (mode
);
2746 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2747 need to expand the argument again. This way, we will not perform
2748 side-effects more the once. */
2749 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2751 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2755 /* Compute into TARGET. */
2756 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2758 /* Output the entire sequence. */
2759 insns
= get_insns ();
2765 /* If we were unable to expand via the builtin, stop the sequence
2766 (without outputting the insns). */
2769 /* Fall back to floating point rounding optab. */
2770 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2772 /* For non-C99 targets we may end up without a fallback fndecl here
2773 if the user called __builtin_lfloor directly. In this case emit
2774 a call to the floor/ceil variants nevertheless. This should result
2775 in the best user experience for not full C99 targets. */
2776 if (fallback_fndecl
== NULL_TREE
)
2779 const char *name
= NULL
;
2781 switch (DECL_FUNCTION_CODE (fndecl
))
2783 case BUILT_IN_ICEIL
:
2784 case BUILT_IN_LCEIL
:
2785 case BUILT_IN_LLCEIL
:
2788 case BUILT_IN_ICEILF
:
2789 case BUILT_IN_LCEILF
:
2790 case BUILT_IN_LLCEILF
:
2793 case BUILT_IN_ICEILL
:
2794 case BUILT_IN_LCEILL
:
2795 case BUILT_IN_LLCEILL
:
2798 case BUILT_IN_IFLOOR
:
2799 case BUILT_IN_LFLOOR
:
2800 case BUILT_IN_LLFLOOR
:
2803 case BUILT_IN_IFLOORF
:
2804 case BUILT_IN_LFLOORF
:
2805 case BUILT_IN_LLFLOORF
:
2808 case BUILT_IN_IFLOORL
:
2809 case BUILT_IN_LFLOORL
:
2810 case BUILT_IN_LLFLOORL
:
2817 fntype
= build_function_type_list (TREE_TYPE (arg
),
2818 TREE_TYPE (arg
), NULL_TREE
);
2819 fallback_fndecl
= build_fn_decl (name
, fntype
);
2822 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2824 tmp
= expand_normal (exp
);
2825 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2827 /* Truncate the result of floating point optab to integer
2828 via expand_fix (). */
2829 target
= gen_reg_rtx (mode
);
2830 expand_fix (target
, tmp
, 0);
2835 /* Expand a call to one of the builtin math functions doing integer
2837 Return 0 if a normal call should be emitted rather than expanding the
2838 function in-line. EXP is the expression that is a call to the builtin
2839 function; if convenient, the result should be placed in TARGET. */
2842 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2844 convert_optab builtin_optab
;
2847 tree fndecl
= get_callee_fndecl (exp
);
2850 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2852 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2855 arg
= CALL_EXPR_ARG (exp
, 0);
2857 switch (DECL_FUNCTION_CODE (fndecl
))
2859 CASE_FLT_FN (BUILT_IN_IRINT
):
2860 fallback_fn
= BUILT_IN_LRINT
;
2862 CASE_FLT_FN (BUILT_IN_LRINT
):
2863 CASE_FLT_FN (BUILT_IN_LLRINT
):
2864 builtin_optab
= lrint_optab
;
2867 CASE_FLT_FN (BUILT_IN_IROUND
):
2868 fallback_fn
= BUILT_IN_LROUND
;
2870 CASE_FLT_FN (BUILT_IN_LROUND
):
2871 CASE_FLT_FN (BUILT_IN_LLROUND
):
2872 builtin_optab
= lround_optab
;
2879 /* There's no easy way to detect the case we need to set EDOM. */
2880 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2883 /* Make a suitable register to place result in. */
2884 mode
= TYPE_MODE (TREE_TYPE (exp
));
2886 /* There's no easy way to detect the case we need to set EDOM. */
2887 if (!flag_errno_math
)
2889 rtx result
= gen_reg_rtx (mode
);
2891 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2892 need to expand the argument again. This way, we will not perform
2893 side-effects more the once. */
2894 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2896 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2900 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2902 /* Output the entire sequence. */
2903 insns
= get_insns ();
2909 /* If we were unable to expand via the builtin, stop the sequence
2910 (without outputting the insns) and call to the library function
2911 with the stabilized argument list. */
2915 if (fallback_fn
!= BUILT_IN_NONE
)
2917 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2918 targets, (int) round (x) should never be transformed into
2919 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2920 a call to lround in the hope that the target provides at least some
2921 C99 functions. This should result in the best user experience for
2922 not full C99 targets. */
2923 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2926 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2927 fallback_fndecl
, 1, arg
);
2929 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2930 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2931 return convert_to_mode (mode
, target
, 0);
2934 return expand_call (exp
, target
, target
== const0_rtx
);
2937 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2938 a normal call should be emitted rather than expanding the function
2939 in-line. EXP is the expression that is a call to the builtin
2940 function; if convenient, the result should be placed in TARGET. */
2943 expand_builtin_powi (tree exp
, rtx target
)
2950 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2953 arg0
= CALL_EXPR_ARG (exp
, 0);
2954 arg1
= CALL_EXPR_ARG (exp
, 1);
2955 mode
= TYPE_MODE (TREE_TYPE (exp
));
2957 /* Emit a libcall to libgcc. */
2959 /* Mode of the 2nd argument must match that of an int. */
2960 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2962 if (target
== NULL_RTX
)
2963 target
= gen_reg_rtx (mode
);
2965 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2966 if (GET_MODE (op0
) != mode
)
2967 op0
= convert_to_mode (mode
, op0
, 0);
2968 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2969 if (GET_MODE (op1
) != mode2
)
2970 op1
= convert_to_mode (mode2
, op1
, 0);
2972 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2973 target
, LCT_CONST
, mode
, 2,
2974 op0
, mode
, op1
, mode2
);
2979 /* Expand expression EXP which is a call to the strlen builtin. Return
2980 NULL_RTX if we failed the caller should emit a normal call, otherwise
2981 try to get the result in TARGET, if convenient. */
2984 expand_builtin_strlen (tree exp
, rtx target
,
2985 machine_mode target_mode
)
2987 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2991 struct expand_operand ops
[4];
2994 tree src
= CALL_EXPR_ARG (exp
, 0);
2996 rtx_insn
*before_strlen
;
2997 machine_mode insn_mode
= target_mode
;
2998 enum insn_code icode
= CODE_FOR_nothing
;
3001 /* If the length can be computed at compile-time, return it. */
3002 len
= c_strlen (src
, 0);
3004 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3006 /* If the length can be computed at compile-time and is constant
3007 integer, but there are side-effects in src, evaluate
3008 src for side-effects, then return len.
3009 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3010 can be optimized into: i++; x = 3; */
3011 len
= c_strlen (src
, 1);
3012 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3014 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3015 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3018 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
3020 /* If SRC is not a pointer type, don't do this operation inline. */
3024 /* Bail out if we can't compute strlen in the right mode. */
3025 while (insn_mode
!= VOIDmode
)
3027 icode
= optab_handler (strlen_optab
, insn_mode
);
3028 if (icode
!= CODE_FOR_nothing
)
3031 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3033 if (insn_mode
== VOIDmode
)
3036 /* Make a place to hold the source address. We will not expand
3037 the actual source until we are sure that the expansion will
3038 not fail -- there are trees that cannot be expanded twice. */
3039 src_reg
= gen_reg_rtx (Pmode
);
3041 /* Mark the beginning of the strlen sequence so we can emit the
3042 source operand later. */
3043 before_strlen
= get_last_insn ();
3045 create_output_operand (&ops
[0], target
, insn_mode
);
3046 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3047 create_integer_operand (&ops
[2], 0);
3048 create_integer_operand (&ops
[3], align
);
3049 if (!maybe_expand_insn (icode
, 4, ops
))
3052 /* Now that we are assured of success, expand the source. */
3054 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3057 #ifdef POINTERS_EXTEND_UNSIGNED
3058 if (GET_MODE (pat
) != Pmode
)
3059 pat
= convert_to_mode (Pmode
, pat
,
3060 POINTERS_EXTEND_UNSIGNED
);
3062 emit_move_insn (src_reg
, pat
);
3068 emit_insn_after (pat
, before_strlen
);
3070 emit_insn_before (pat
, get_insns ());
3072 /* Return the value in the proper mode for this function. */
3073 if (GET_MODE (ops
[0].value
) == target_mode
)
3074 target
= ops
[0].value
;
3075 else if (target
!= 0)
3076 convert_move (target
, ops
[0].value
, 0);
3078 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3084 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3085 bytes from constant string DATA + OFFSET and return it as target
3089 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3092 const char *str
= (const char *) data
;
3094 gcc_assert (offset
>= 0
3095 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3096 <= strlen (str
) + 1));
3098 return c_readstr (str
+ offset
, mode
);
3101 /* LEN specify length of the block of memcpy/memset operation.
3102 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3103 In some cases we can make very likely guess on max size, then we
3104 set it into PROBABLE_MAX_SIZE. */
3107 determine_block_size (tree len
, rtx len_rtx
,
3108 unsigned HOST_WIDE_INT
*min_size
,
3109 unsigned HOST_WIDE_INT
*max_size
,
3110 unsigned HOST_WIDE_INT
*probable_max_size
)
3112 if (CONST_INT_P (len_rtx
))
3114 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3120 enum value_range_type range_type
= VR_UNDEFINED
;
3122 /* Determine bounds from the type. */
3123 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3124 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3127 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3128 *probable_max_size
= *max_size
3129 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3131 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3133 if (TREE_CODE (len
) == SSA_NAME
)
3134 range_type
= get_range_info (len
, &min
, &max
);
3135 if (range_type
== VR_RANGE
)
3137 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3138 *min_size
= min
.to_uhwi ();
3139 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3140 *probable_max_size
= *max_size
= max
.to_uhwi ();
3142 else if (range_type
== VR_ANTI_RANGE
)
3144 /* Anti range 0...N lets us to determine minimal size to N+1. */
3147 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3148 *min_size
= max
.to_uhwi () + 1;
3156 Produce anti range allowing negative values of N. We still
3157 can use the information and make a guess that N is not negative.
3159 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3160 *probable_max_size
= min
.to_uhwi () - 1;
3163 gcc_checking_assert (*max_size
<=
3164 (unsigned HOST_WIDE_INT
)
3165 GET_MODE_MASK (GET_MODE (len_rtx
)));
3168 /* Helper function to do the actual work for expand_builtin_memcpy. */
3171 expand_builtin_memcpy_args (tree dest
, tree src
, tree len
, rtx target
, tree exp
)
3173 const char *src_str
;
3174 unsigned int src_align
= get_pointer_alignment (src
);
3175 unsigned int dest_align
= get_pointer_alignment (dest
);
3176 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3177 HOST_WIDE_INT expected_size
= -1;
3178 unsigned int expected_align
= 0;
3179 unsigned HOST_WIDE_INT min_size
;
3180 unsigned HOST_WIDE_INT max_size
;
3181 unsigned HOST_WIDE_INT probable_max_size
;
3183 /* If DEST is not a pointer type, call the normal function. */
3184 if (dest_align
== 0)
3187 /* If either SRC is not a pointer type, don't do this
3188 operation in-line. */
3192 if (currently_expanding_gimple_stmt
)
3193 stringop_block_profile (currently_expanding_gimple_stmt
,
3194 &expected_align
, &expected_size
);
3196 if (expected_align
< dest_align
)
3197 expected_align
= dest_align
;
3198 dest_mem
= get_memory_rtx (dest
, len
);
3199 set_mem_align (dest_mem
, dest_align
);
3200 len_rtx
= expand_normal (len
);
3201 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3202 &probable_max_size
);
3203 src_str
= c_getstr (src
);
3205 /* If SRC is a string constant and block move would be done
3206 by pieces, we can avoid loading the string from memory
3207 and only stored the computed constants. */
3209 && CONST_INT_P (len_rtx
)
3210 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3211 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3212 CONST_CAST (char *, src_str
),
3215 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3216 builtin_memcpy_read_str
,
3217 CONST_CAST (char *, src_str
),
3218 dest_align
, false, 0);
3219 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3220 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3224 src_mem
= get_memory_rtx (src
, len
);
3225 set_mem_align (src_mem
, src_align
);
3227 /* Copy word part most expediently. */
3228 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3229 CALL_EXPR_TAILCALL (exp
)
3230 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3231 expected_align
, expected_size
,
3232 min_size
, max_size
, probable_max_size
);
3236 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3237 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3243 /* Expand a call EXP to the memcpy builtin.
3244 Return NULL_RTX if we failed, the caller should emit a normal call,
3245 otherwise try to get the result in TARGET, if convenient (and in
3246 mode MODE if that's convenient). */
3249 expand_builtin_memcpy (tree exp
, rtx target
)
3251 if (!validate_arglist (exp
,
3252 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3256 tree dest
= CALL_EXPR_ARG (exp
, 0);
3257 tree src
= CALL_EXPR_ARG (exp
, 1);
3258 tree len
= CALL_EXPR_ARG (exp
, 2);
3259 return expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3263 /* Expand an instrumented call EXP to the memcpy builtin.
3264 Return NULL_RTX if we failed, the caller should emit a normal call,
3265 otherwise try to get the result in TARGET, if convenient (and in
3266 mode MODE if that's convenient). */
3269 expand_builtin_memcpy_with_bounds (tree exp
, rtx target
)
3271 if (!validate_arglist (exp
,
3272 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3273 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3274 INTEGER_TYPE
, VOID_TYPE
))
3278 tree dest
= CALL_EXPR_ARG (exp
, 0);
3279 tree src
= CALL_EXPR_ARG (exp
, 2);
3280 tree len
= CALL_EXPR_ARG (exp
, 4);
3281 rtx res
= expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3283 /* Return src bounds with the result. */
3286 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3287 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3288 res
= chkp_join_splitted_slot (res
, bnd
);
3294 /* Expand a call EXP to the mempcpy builtin.
3295 Return NULL_RTX if we failed; the caller should emit a normal call,
3296 otherwise try to get the result in TARGET, if convenient (and in
3297 mode MODE if that's convenient). If ENDP is 0 return the
3298 destination pointer, if ENDP is 1 return the end pointer ala
3299 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3303 expand_builtin_mempcpy (tree exp
, rtx target
, machine_mode mode
)
3305 if (!validate_arglist (exp
,
3306 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3310 tree dest
= CALL_EXPR_ARG (exp
, 0);
3311 tree src
= CALL_EXPR_ARG (exp
, 1);
3312 tree len
= CALL_EXPR_ARG (exp
, 2);
3313 return expand_builtin_mempcpy_args (dest
, src
, len
,
3314 target
, mode
, /*endp=*/ 1,
3319 /* Expand an instrumented call EXP to the mempcpy builtin.
3320 Return NULL_RTX if we failed, the caller should emit a normal call,
3321 otherwise try to get the result in TARGET, if convenient (and in
3322 mode MODE if that's convenient). */
3325 expand_builtin_mempcpy_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3327 if (!validate_arglist (exp
,
3328 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3329 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3330 INTEGER_TYPE
, VOID_TYPE
))
3334 tree dest
= CALL_EXPR_ARG (exp
, 0);
3335 tree src
= CALL_EXPR_ARG (exp
, 2);
3336 tree len
= CALL_EXPR_ARG (exp
, 4);
3337 rtx res
= expand_builtin_mempcpy_args (dest
, src
, len
, target
,
3340 /* Return src bounds with the result. */
3343 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3344 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3345 res
= chkp_join_splitted_slot (res
, bnd
);
3351 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3352 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3353 so that this can also be called without constructing an actual CALL_EXPR.
3354 The other arguments and return value are the same as for
3355 expand_builtin_mempcpy. */
3358 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3359 rtx target
, machine_mode mode
, int endp
,
3362 tree fndecl
= get_callee_fndecl (orig_exp
);
3364 /* If return value is ignored, transform mempcpy into memcpy. */
3365 if (target
== const0_rtx
3366 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3367 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
))
3369 tree fn
= builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
);
3370 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3372 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3374 else if (target
== const0_rtx
3375 && builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3377 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3378 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3380 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3384 const char *src_str
;
3385 unsigned int src_align
= get_pointer_alignment (src
);
3386 unsigned int dest_align
= get_pointer_alignment (dest
);
3387 rtx dest_mem
, src_mem
, len_rtx
;
3389 /* If either SRC or DEST is not a pointer type, don't do this
3390 operation in-line. */
3391 if (dest_align
== 0 || src_align
== 0)
3394 /* If LEN is not constant, call the normal function. */
3395 if (! tree_fits_uhwi_p (len
))
3398 len_rtx
= expand_normal (len
);
3399 src_str
= c_getstr (src
);
3401 /* If SRC is a string constant and block move would be done
3402 by pieces, we can avoid loading the string from memory
3403 and only stored the computed constants. */
3405 && CONST_INT_P (len_rtx
)
3406 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3407 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3408 CONST_CAST (char *, src_str
),
3411 dest_mem
= get_memory_rtx (dest
, len
);
3412 set_mem_align (dest_mem
, dest_align
);
3413 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3414 builtin_memcpy_read_str
,
3415 CONST_CAST (char *, src_str
),
3416 dest_align
, false, endp
);
3417 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3418 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3422 if (CONST_INT_P (len_rtx
)
3423 && can_move_by_pieces (INTVAL (len_rtx
),
3424 MIN (dest_align
, src_align
)))
3426 dest_mem
= get_memory_rtx (dest
, len
);
3427 set_mem_align (dest_mem
, dest_align
);
3428 src_mem
= get_memory_rtx (src
, len
);
3429 set_mem_align (src_mem
, src_align
);
3430 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3431 MIN (dest_align
, src_align
), endp
);
3432 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3433 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3441 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3442 we failed, the caller should emit a normal call, otherwise try to
3443 get the result in TARGET, if convenient. If ENDP is 0 return the
3444 destination pointer, if ENDP is 1 return the end pointer ala
3445 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3449 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3451 struct expand_operand ops
[3];
3455 if (!targetm
.have_movstr ())
3458 dest_mem
= get_memory_rtx (dest
, NULL
);
3459 src_mem
= get_memory_rtx (src
, NULL
);
3462 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3463 dest_mem
= replace_equiv_address (dest_mem
, target
);
3466 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3467 create_fixed_operand (&ops
[1], dest_mem
);
3468 create_fixed_operand (&ops
[2], src_mem
);
3469 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
3472 if (endp
&& target
!= const0_rtx
)
3474 target
= ops
[0].value
;
3475 /* movstr is supposed to set end to the address of the NUL
3476 terminator. If the caller requested a mempcpy-like return value,
3480 rtx tem
= plus_constant (GET_MODE (target
),
3481 gen_lowpart (GET_MODE (target
), target
), 1);
3482 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3488 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3489 NULL_RTX if we failed the caller should emit a normal call, otherwise
3490 try to get the result in TARGET, if convenient (and in mode MODE if that's
3494 expand_builtin_strcpy (tree exp
, rtx target
)
3496 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3498 tree dest
= CALL_EXPR_ARG (exp
, 0);
3499 tree src
= CALL_EXPR_ARG (exp
, 1);
3500 return expand_builtin_strcpy_args (dest
, src
, target
);
3505 /* Helper function to do the actual work for expand_builtin_strcpy. The
3506 arguments to the builtin_strcpy call DEST and SRC are broken out
3507 so that this can also be called without constructing an actual CALL_EXPR.
3508 The other arguments and return value are the same as for
3509 expand_builtin_strcpy. */
3512 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3514 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3517 /* Expand a call EXP to the stpcpy builtin.
3518 Return NULL_RTX if we failed the caller should emit a normal call,
3519 otherwise try to get the result in TARGET, if convenient (and in
3520 mode MODE if that's convenient). */
3523 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3526 location_t loc
= EXPR_LOCATION (exp
);
3528 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3531 dst
= CALL_EXPR_ARG (exp
, 0);
3532 src
= CALL_EXPR_ARG (exp
, 1);
3534 /* If return value is ignored, transform stpcpy into strcpy. */
3535 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3537 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3538 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3539 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3546 /* Ensure we get an actual string whose length can be evaluated at
3547 compile-time, not an expression containing a string. This is
3548 because the latter will potentially produce pessimized code
3549 when used to produce the return value. */
3550 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3551 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3553 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3554 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3555 target
, mode
, /*endp=*/2,
3561 if (TREE_CODE (len
) == INTEGER_CST
)
3563 rtx len_rtx
= expand_normal (len
);
3565 if (CONST_INT_P (len_rtx
))
3567 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3573 if (mode
!= VOIDmode
)
3574 target
= gen_reg_rtx (mode
);
3576 target
= gen_reg_rtx (GET_MODE (ret
));
3578 if (GET_MODE (target
) != GET_MODE (ret
))
3579 ret
= gen_lowpart (GET_MODE (target
), ret
);
3581 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3582 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3590 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3594 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3595 bytes from constant string DATA + OFFSET and return it as target
3599 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3602 const char *str
= (const char *) data
;
3604 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3607 return c_readstr (str
+ offset
, mode
);
3610 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3611 NULL_RTX if we failed the caller should emit a normal call. */
3614 expand_builtin_strncpy (tree exp
, rtx target
)
3616 location_t loc
= EXPR_LOCATION (exp
);
3618 if (validate_arglist (exp
,
3619 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3621 tree dest
= CALL_EXPR_ARG (exp
, 0);
3622 tree src
= CALL_EXPR_ARG (exp
, 1);
3623 tree len
= CALL_EXPR_ARG (exp
, 2);
3624 tree slen
= c_strlen (src
, 1);
3626 /* We must be passed a constant len and src parameter. */
3627 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3630 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3632 /* We're required to pad with trailing zeros if the requested
3633 len is greater than strlen(s2)+1. In that case try to
3634 use store_by_pieces, if it fails, punt. */
3635 if (tree_int_cst_lt (slen
, len
))
3637 unsigned int dest_align
= get_pointer_alignment (dest
);
3638 const char *p
= c_getstr (src
);
3641 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3642 || !can_store_by_pieces (tree_to_uhwi (len
),
3643 builtin_strncpy_read_str
,
3644 CONST_CAST (char *, p
),
3648 dest_mem
= get_memory_rtx (dest
, len
);
3649 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3650 builtin_strncpy_read_str
,
3651 CONST_CAST (char *, p
), dest_align
, false, 0);
3652 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3653 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3660 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3661 bytes from constant string DATA + OFFSET and return it as target
3665 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3668 const char *c
= (const char *) data
;
3669 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3671 memset (p
, *c
, GET_MODE_SIZE (mode
));
3673 return c_readstr (p
, mode
);
3676 /* Callback routine for store_by_pieces. Return the RTL of a register
3677 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3678 char value given in the RTL register data. For example, if mode is
3679 4 bytes wide, return the RTL for 0x01010101*data. */
3682 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3689 size
= GET_MODE_SIZE (mode
);
3693 p
= XALLOCAVEC (char, size
);
3694 memset (p
, 1, size
);
3695 coeff
= c_readstr (p
, mode
);
3697 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3698 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3699 return force_reg (mode
, target
);
3702 /* Expand expression EXP, which is a call to the memset builtin. Return
3703 NULL_RTX if we failed the caller should emit a normal call, otherwise
3704 try to get the result in TARGET, if convenient (and in mode MODE if that's
3708 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
3710 if (!validate_arglist (exp
,
3711 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3715 tree dest
= CALL_EXPR_ARG (exp
, 0);
3716 tree val
= CALL_EXPR_ARG (exp
, 1);
3717 tree len
= CALL_EXPR_ARG (exp
, 2);
3718 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3722 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3723 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3724 try to get the result in TARGET, if convenient (and in mode MODE if that's
3728 expand_builtin_memset_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3730 if (!validate_arglist (exp
,
3731 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3732 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3736 tree dest
= CALL_EXPR_ARG (exp
, 0);
3737 tree val
= CALL_EXPR_ARG (exp
, 2);
3738 tree len
= CALL_EXPR_ARG (exp
, 3);
3739 rtx res
= expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3741 /* Return src bounds with the result. */
3744 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3745 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3746 res
= chkp_join_splitted_slot (res
, bnd
);
3752 /* Helper function to do the actual work for expand_builtin_memset. The
3753 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3754 so that this can also be called without constructing an actual CALL_EXPR.
3755 The other arguments and return value are the same as for
3756 expand_builtin_memset. */
3759 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3760 rtx target
, machine_mode mode
, tree orig_exp
)
3763 enum built_in_function fcode
;
3764 machine_mode val_mode
;
3766 unsigned int dest_align
;
3767 rtx dest_mem
, dest_addr
, len_rtx
;
3768 HOST_WIDE_INT expected_size
= -1;
3769 unsigned int expected_align
= 0;
3770 unsigned HOST_WIDE_INT min_size
;
3771 unsigned HOST_WIDE_INT max_size
;
3772 unsigned HOST_WIDE_INT probable_max_size
;
3774 dest_align
= get_pointer_alignment (dest
);
3776 /* If DEST is not a pointer type, don't do this operation in-line. */
3777 if (dest_align
== 0)
3780 if (currently_expanding_gimple_stmt
)
3781 stringop_block_profile (currently_expanding_gimple_stmt
,
3782 &expected_align
, &expected_size
);
3784 if (expected_align
< dest_align
)
3785 expected_align
= dest_align
;
3787 /* If the LEN parameter is zero, return DEST. */
3788 if (integer_zerop (len
))
3790 /* Evaluate and ignore VAL in case it has side-effects. */
3791 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3792 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3795 /* Stabilize the arguments in case we fail. */
3796 dest
= builtin_save_expr (dest
);
3797 val
= builtin_save_expr (val
);
3798 len
= builtin_save_expr (len
);
3800 len_rtx
= expand_normal (len
);
3801 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3802 &probable_max_size
);
3803 dest_mem
= get_memory_rtx (dest
, len
);
3804 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3806 if (TREE_CODE (val
) != INTEGER_CST
)
3810 val_rtx
= expand_normal (val
);
3811 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3813 /* Assume that we can memset by pieces if we can store
3814 * the coefficients by pieces (in the required modes).
3815 * We can't pass builtin_memset_gen_str as that emits RTL. */
3817 if (tree_fits_uhwi_p (len
)
3818 && can_store_by_pieces (tree_to_uhwi (len
),
3819 builtin_memset_read_str
, &c
, dest_align
,
3822 val_rtx
= force_reg (val_mode
, val_rtx
);
3823 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3824 builtin_memset_gen_str
, val_rtx
, dest_align
,
3827 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3828 dest_align
, expected_align
,
3829 expected_size
, min_size
, max_size
,
3833 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3834 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3838 if (target_char_cast (val
, &c
))
3843 if (tree_fits_uhwi_p (len
)
3844 && can_store_by_pieces (tree_to_uhwi (len
),
3845 builtin_memset_read_str
, &c
, dest_align
,
3847 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3848 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3849 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3850 gen_int_mode (c
, val_mode
),
3851 dest_align
, expected_align
,
3852 expected_size
, min_size
, max_size
,
3856 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3857 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3861 set_mem_align (dest_mem
, dest_align
);
3862 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3863 CALL_EXPR_TAILCALL (orig_exp
)
3864 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3865 expected_align
, expected_size
,
3871 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3872 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3878 fndecl
= get_callee_fndecl (orig_exp
);
3879 fcode
= DECL_FUNCTION_CODE (fndecl
);
3880 if (fcode
== BUILT_IN_MEMSET
3881 || fcode
== BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
)
3882 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3884 else if (fcode
== BUILT_IN_BZERO
)
3885 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3889 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3890 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3891 return expand_call (fn
, target
, target
== const0_rtx
);
3894 /* Expand expression EXP, which is a call to the bzero builtin. Return
3895 NULL_RTX if we failed the caller should emit a normal call. */
3898 expand_builtin_bzero (tree exp
)
3901 location_t loc
= EXPR_LOCATION (exp
);
3903 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3906 dest
= CALL_EXPR_ARG (exp
, 0);
3907 size
= CALL_EXPR_ARG (exp
, 1);
3909 /* New argument list transforming bzero(ptr x, int y) to
3910 memset(ptr x, int 0, size_t y). This is done this way
3911 so that if it isn't expanded inline, we fallback to
3912 calling bzero instead of memset. */
3914 return expand_builtin_memset_args (dest
, integer_zero_node
,
3915 fold_convert_loc (loc
,
3916 size_type_node
, size
),
3917 const0_rtx
, VOIDmode
, exp
);
3920 /* Try to expand cmpstr operation ICODE with the given operands.
3921 Return the result rtx on success, otherwise return null. */
3924 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
3925 HOST_WIDE_INT align
)
3927 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
3929 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
3932 struct expand_operand ops
[4];
3933 create_output_operand (&ops
[0], target
, insn_mode
);
3934 create_fixed_operand (&ops
[1], arg1_rtx
);
3935 create_fixed_operand (&ops
[2], arg2_rtx
);
3936 create_integer_operand (&ops
[3], align
);
3937 if (maybe_expand_insn (icode
, 4, ops
))
3938 return ops
[0].value
;
3942 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3943 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3944 otherwise return null. */
3947 expand_cmpstrn_or_cmpmem (insn_code icode
, rtx target
, rtx arg1_rtx
,
3948 rtx arg2_rtx
, tree arg3_type
, rtx arg3_rtx
,
3949 HOST_WIDE_INT align
)
3951 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
3953 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
3956 struct expand_operand ops
[5];
3957 create_output_operand (&ops
[0], target
, insn_mode
);
3958 create_fixed_operand (&ops
[1], arg1_rtx
);
3959 create_fixed_operand (&ops
[2], arg2_rtx
);
3960 create_convert_operand_from (&ops
[3], arg3_rtx
, TYPE_MODE (arg3_type
),
3961 TYPE_UNSIGNED (arg3_type
));
3962 create_integer_operand (&ops
[4], align
);
3963 if (maybe_expand_insn (icode
, 5, ops
))
3964 return ops
[0].value
;
3968 /* Expand expression EXP, which is a call to the memcmp built-in function.
3969 Return NULL_RTX if we failed and the caller should emit a normal call,
3970 otherwise try to get the result in TARGET, if convenient. */
3973 expand_builtin_memcmp (tree exp
, rtx target
)
3975 if (!validate_arglist (exp
,
3976 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3979 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3980 implementing memcmp because it will stop if it encounters two
3982 insn_code icode
= direct_optab_handler (cmpmem_optab
, SImode
);
3983 if (icode
== CODE_FOR_nothing
)
3986 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3987 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3988 tree len
= CALL_EXPR_ARG (exp
, 2);
3990 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3991 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3993 /* If we don't have POINTER_TYPE, call the function. */
3994 if (arg1_align
== 0 || arg2_align
== 0)
3997 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
3998 location_t loc
= EXPR_LOCATION (exp
);
3999 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
4000 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
4001 rtx arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
4003 /* Set MEM_SIZE as appropriate. */
4004 if (CONST_INT_P (arg3_rtx
))
4006 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
4007 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
4010 rtx result
= expand_cmpstrn_or_cmpmem (icode
, target
, arg1_rtx
, arg2_rtx
,
4011 TREE_TYPE (len
), arg3_rtx
,
4012 MIN (arg1_align
, arg2_align
));
4015 /* Return the value in the proper mode for this function. */
4016 if (GET_MODE (result
) == mode
)
4021 convert_move (target
, result
, 0);
4025 return convert_to_mode (mode
, result
, 0);
4030 && REG_P (result
) && GET_MODE (result
) == mode
4031 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4032 result
= gen_reg_rtx (mode
);
4034 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
4035 TYPE_MODE (integer_type_node
), 3,
4036 XEXP (arg1_rtx
, 0), Pmode
,
4037 XEXP (arg2_rtx
, 0), Pmode
,
4038 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
4039 TYPE_UNSIGNED (sizetype
)),
4040 TYPE_MODE (sizetype
));
4044 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4045 if we failed the caller should emit a normal call, otherwise try to get
4046 the result in TARGET, if convenient. */
4049 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4051 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4054 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
4055 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4056 if (cmpstr_icode
!= CODE_FOR_nothing
|| cmpstrn_icode
!= CODE_FOR_nothing
)
4058 rtx arg1_rtx
, arg2_rtx
;
4060 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4061 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4062 rtx result
= NULL_RTX
;
4064 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4065 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4067 /* If we don't have POINTER_TYPE, call the function. */
4068 if (arg1_align
== 0 || arg2_align
== 0)
4071 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4072 arg1
= builtin_save_expr (arg1
);
4073 arg2
= builtin_save_expr (arg2
);
4075 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4076 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4078 /* Try to call cmpstrsi. */
4079 if (cmpstr_icode
!= CODE_FOR_nothing
)
4080 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
4081 MIN (arg1_align
, arg2_align
));
4083 /* Try to determine at least one length and call cmpstrnsi. */
4084 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
4089 tree len1
= c_strlen (arg1
, 1);
4090 tree len2
= c_strlen (arg2
, 1);
4093 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4095 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4097 /* If we don't have a constant length for the first, use the length
4098 of the second, if we know it. We don't require a constant for
4099 this case; some cost analysis could be done if both are available
4100 but neither is constant. For now, assume they're equally cheap,
4101 unless one has side effects. If both strings have constant lengths,
4108 else if (TREE_SIDE_EFFECTS (len1
))
4110 else if (TREE_SIDE_EFFECTS (len2
))
4112 else if (TREE_CODE (len1
) != INTEGER_CST
)
4114 else if (TREE_CODE (len2
) != INTEGER_CST
)
4116 else if (tree_int_cst_lt (len1
, len2
))
4121 /* If both arguments have side effects, we cannot optimize. */
4122 if (len
&& !TREE_SIDE_EFFECTS (len
))
4124 arg3_rtx
= expand_normal (len
);
4125 result
= expand_cmpstrn_or_cmpmem
4126 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
4127 arg3_rtx
, MIN (arg1_align
, arg2_align
));
4133 /* Return the value in the proper mode for this function. */
4134 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4135 if (GET_MODE (result
) == mode
)
4138 return convert_to_mode (mode
, result
, 0);
4139 convert_move (target
, result
, 0);
4143 /* Expand the library call ourselves using a stabilized argument
4144 list to avoid re-evaluating the function's arguments twice. */
4145 fndecl
= get_callee_fndecl (exp
);
4146 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4147 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4148 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4149 return expand_call (fn
, target
, target
== const0_rtx
);
4154 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4155 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4156 the result in TARGET, if convenient. */
4159 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4160 ATTRIBUTE_UNUSED machine_mode mode
)
4162 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4164 if (!validate_arglist (exp
,
4165 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4168 /* If c_strlen can determine an expression for one of the string
4169 lengths, and it doesn't have side effects, then emit cmpstrnsi
4170 using length MIN(strlen(string)+1, arg3). */
4171 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4172 if (cmpstrn_icode
!= CODE_FOR_nothing
)
4174 tree len
, len1
, len2
;
4175 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4178 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4179 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4180 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4182 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4183 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4185 len1
= c_strlen (arg1
, 1);
4186 len2
= c_strlen (arg2
, 1);
4189 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4191 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4193 /* If we don't have a constant length for the first, use the length
4194 of the second, if we know it. We don't require a constant for
4195 this case; some cost analysis could be done if both are available
4196 but neither is constant. For now, assume they're equally cheap,
4197 unless one has side effects. If both strings have constant lengths,
4204 else if (TREE_SIDE_EFFECTS (len1
))
4206 else if (TREE_SIDE_EFFECTS (len2
))
4208 else if (TREE_CODE (len1
) != INTEGER_CST
)
4210 else if (TREE_CODE (len2
) != INTEGER_CST
)
4212 else if (tree_int_cst_lt (len1
, len2
))
4217 /* If both arguments have side effects, we cannot optimize. */
4218 if (!len
|| TREE_SIDE_EFFECTS (len
))
4221 /* The actual new length parameter is MIN(len,arg3). */
4222 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4223 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4225 /* If we don't have POINTER_TYPE, call the function. */
4226 if (arg1_align
== 0 || arg2_align
== 0)
4229 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4230 arg1
= builtin_save_expr (arg1
);
4231 arg2
= builtin_save_expr (arg2
);
4232 len
= builtin_save_expr (len
);
4234 arg1_rtx
= get_memory_rtx (arg1
, len
);
4235 arg2_rtx
= get_memory_rtx (arg2
, len
);
4236 arg3_rtx
= expand_normal (len
);
4237 result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
4238 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
4239 MIN (arg1_align
, arg2_align
));
4242 /* Return the value in the proper mode for this function. */
4243 mode
= TYPE_MODE (TREE_TYPE (exp
));
4244 if (GET_MODE (result
) == mode
)
4247 return convert_to_mode (mode
, result
, 0);
4248 convert_move (target
, result
, 0);
4252 /* Expand the library call ourselves using a stabilized argument
4253 list to avoid re-evaluating the function's arguments twice. */
4254 fndecl
= get_callee_fndecl (exp
);
4255 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4257 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4258 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4259 return expand_call (fn
, target
, target
== const0_rtx
);
4264 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4265 if that's convenient. */
4268 expand_builtin_saveregs (void)
4273 /* Don't do __builtin_saveregs more than once in a function.
4274 Save the result of the first call and reuse it. */
4275 if (saveregs_value
!= 0)
4276 return saveregs_value
;
4278 /* When this function is called, it means that registers must be
4279 saved on entry to this function. So we migrate the call to the
4280 first insn of this function. */
4284 /* Do whatever the machine needs done in this case. */
4285 val
= targetm
.calls
.expand_builtin_saveregs ();
4290 saveregs_value
= val
;
4292 /* Put the insns after the NOTE that starts the function. If this
4293 is inside a start_sequence, make the outer-level insn chain current, so
4294 the code is placed at the start of the function. */
4295 push_topmost_sequence ();
4296 emit_insn_after (seq
, entry_of_function ());
4297 pop_topmost_sequence ();
4302 /* Expand a call to __builtin_next_arg. */
4305 expand_builtin_next_arg (void)
4307 /* Checking arguments is already done in fold_builtin_next_arg
4308 that must be called before this function. */
4309 return expand_binop (ptr_mode
, add_optab
,
4310 crtl
->args
.internal_arg_pointer
,
4311 crtl
->args
.arg_offset_rtx
,
4312 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4315 /* Make it easier for the backends by protecting the valist argument
4316 from multiple evaluations. */
4319 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4321 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4323 /* The current way of determining the type of valist is completely
4324 bogus. We should have the information on the va builtin instead. */
4326 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4328 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4330 if (TREE_SIDE_EFFECTS (valist
))
4331 valist
= save_expr (valist
);
4333 /* For this case, the backends will be expecting a pointer to
4334 vatype, but it's possible we've actually been given an array
4335 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4337 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4339 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4340 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4345 tree pt
= build_pointer_type (vatype
);
4349 if (! TREE_SIDE_EFFECTS (valist
))
4352 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4353 TREE_SIDE_EFFECTS (valist
) = 1;
4356 if (TREE_SIDE_EFFECTS (valist
))
4357 valist
= save_expr (valist
);
4358 valist
= fold_build2_loc (loc
, MEM_REF
,
4359 vatype
, valist
, build_int_cst (pt
, 0));
4365 /* The "standard" definition of va_list is void*. */
4368 std_build_builtin_va_list (void)
4370 return ptr_type_node
;
4373 /* The "standard" abi va_list is va_list_type_node. */
4376 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4378 return va_list_type_node
;
4381 /* The "standard" type of va_list is va_list_type_node. */
4384 std_canonical_va_list_type (tree type
)
4388 if (INDIRECT_REF_P (type
))
4389 type
= TREE_TYPE (type
);
4390 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (type
)))
4391 type
= TREE_TYPE (type
);
4392 wtype
= va_list_type_node
;
4394 /* Treat structure va_list types. */
4395 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4396 htype
= TREE_TYPE (htype
);
4397 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4399 /* If va_list is an array type, the argument may have decayed
4400 to a pointer type, e.g. by being passed to another function.
4401 In that case, unwrap both types so that we can compare the
4402 underlying records. */
4403 if (TREE_CODE (htype
) == ARRAY_TYPE
4404 || POINTER_TYPE_P (htype
))
4406 wtype
= TREE_TYPE (wtype
);
4407 htype
= TREE_TYPE (htype
);
4410 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4411 return va_list_type_node
;
4416 /* The "standard" implementation of va_start: just assign `nextarg' to
4420 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4422 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4423 convert_move (va_r
, nextarg
, 0);
4425 /* We do not have any valid bounds for the pointer, so
4426 just store zero bounds for it. */
4427 if (chkp_function_instrumented_p (current_function_decl
))
4428 chkp_expand_bounds_reset_for_mem (valist
,
4429 make_tree (TREE_TYPE (valist
),
4433 /* Expand EXP, a call to __builtin_va_start. */
4436 expand_builtin_va_start (tree exp
)
4440 location_t loc
= EXPR_LOCATION (exp
);
4442 if (call_expr_nargs (exp
) < 2)
4444 error_at (loc
, "too few arguments to function %<va_start%>");
4448 if (fold_builtin_next_arg (exp
, true))
4451 nextarg
= expand_builtin_next_arg ();
4452 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4454 if (targetm
.expand_builtin_va_start
)
4455 targetm
.expand_builtin_va_start (valist
, nextarg
);
4457 std_expand_builtin_va_start (valist
, nextarg
);
4462 /* Expand EXP, a call to __builtin_va_end. */
4465 expand_builtin_va_end (tree exp
)
4467 tree valist
= CALL_EXPR_ARG (exp
, 0);
4469 /* Evaluate for side effects, if needed. I hate macros that don't
4471 if (TREE_SIDE_EFFECTS (valist
))
4472 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4477 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4478 builtin rather than just as an assignment in stdarg.h because of the
4479 nastiness of array-type va_list types. */
4482 expand_builtin_va_copy (tree exp
)
4485 location_t loc
= EXPR_LOCATION (exp
);
4487 dst
= CALL_EXPR_ARG (exp
, 0);
4488 src
= CALL_EXPR_ARG (exp
, 1);
4490 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4491 src
= stabilize_va_list_loc (loc
, src
, 0);
4493 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4495 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4497 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4498 TREE_SIDE_EFFECTS (t
) = 1;
4499 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4503 rtx dstb
, srcb
, size
;
4505 /* Evaluate to pointers. */
4506 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4507 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4508 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4509 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4511 dstb
= convert_memory_address (Pmode
, dstb
);
4512 srcb
= convert_memory_address (Pmode
, srcb
);
4514 /* "Dereference" to BLKmode memories. */
4515 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4516 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4517 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4518 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4519 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4520 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4523 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4529 /* Expand a call to one of the builtin functions __builtin_frame_address or
4530 __builtin_return_address. */
4533 expand_builtin_frame_address (tree fndecl
, tree exp
)
4535 /* The argument must be a nonnegative integer constant.
4536 It counts the number of frames to scan up the stack.
4537 The value is either the frame pointer value or the return
4538 address saved in that frame. */
4539 if (call_expr_nargs (exp
) == 0)
4540 /* Warning about missing arg was already issued. */
4542 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4544 error ("invalid argument to %qD", fndecl
);
4549 /* Number of frames to scan up the stack. */
4550 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
4552 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
4554 /* Some ports cannot access arbitrary stack frames. */
4557 warning (0, "unsupported argument to %qD", fndecl
);
4563 /* Warn since no effort is made to ensure that any frame
4564 beyond the current one exists or can be safely reached. */
4565 warning (OPT_Wframe_address
, "calling %qD with "
4566 "a nonzero argument is unsafe", fndecl
);
4569 /* For __builtin_frame_address, return what we've got. */
4570 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4574 && ! CONSTANT_P (tem
))
4575 tem
= copy_addr_to_reg (tem
);
4580 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4581 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4582 is the same as for allocate_dynamic_stack_space. */
4585 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4591 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4592 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4595 = (alloca_with_align
4596 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4597 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4602 /* Compute the argument. */
4603 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4605 /* Compute the alignment. */
4606 align
= (alloca_with_align
4607 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4608 : BIGGEST_ALIGNMENT
);
4610 /* Allocate the desired space. */
4611 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4612 result
= convert_memory_address (ptr_mode
, result
);
4617 /* Expand a call to bswap builtin in EXP.
4618 Return NULL_RTX if a normal call should be emitted rather than expanding the
4619 function in-line. If convenient, the result should be placed in TARGET.
4620 SUBTARGET may be used as the target for computing one of EXP's operands. */
4623 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
4629 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4632 arg
= CALL_EXPR_ARG (exp
, 0);
4633 op0
= expand_expr (arg
,
4634 subtarget
&& GET_MODE (subtarget
) == target_mode
4635 ? subtarget
: NULL_RTX
,
4636 target_mode
, EXPAND_NORMAL
);
4637 if (GET_MODE (op0
) != target_mode
)
4638 op0
= convert_to_mode (target_mode
, op0
, 1);
4640 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4642 gcc_assert (target
);
4644 return convert_to_mode (target_mode
, target
, 1);
4647 /* Expand a call to a unary builtin in EXP.
4648 Return NULL_RTX if a normal call should be emitted rather than expanding the
4649 function in-line. If convenient, the result should be placed in TARGET.
4650 SUBTARGET may be used as the target for computing one of EXP's operands. */
4653 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
4654 rtx subtarget
, optab op_optab
)
4658 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4661 /* Compute the argument. */
4662 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4664 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4665 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4666 VOIDmode
, EXPAND_NORMAL
);
4667 /* Compute op, into TARGET if possible.
4668 Set TARGET to wherever the result comes back. */
4669 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4670 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4671 gcc_assert (target
);
4673 return convert_to_mode (target_mode
, target
, 0);
4676 /* Expand a call to __builtin_expect. We just return our argument
4677 as the builtin_expect semantic should've been already executed by
4678 tree branch prediction pass. */
4681 expand_builtin_expect (tree exp
, rtx target
)
4685 if (call_expr_nargs (exp
) < 2)
4687 arg
= CALL_EXPR_ARG (exp
, 0);
4689 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4690 /* When guessing was done, the hints should be already stripped away. */
4691 gcc_assert (!flag_guess_branch_prob
4692 || optimize
== 0 || seen_error ());
4696 /* Expand a call to __builtin_assume_aligned. We just return our first
4697 argument as the builtin_assume_aligned semantic should've been already
4701 expand_builtin_assume_aligned (tree exp
, rtx target
)
4703 if (call_expr_nargs (exp
) < 2)
4705 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4707 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4708 && (call_expr_nargs (exp
) < 3
4709 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4714 expand_builtin_trap (void)
4716 if (targetm
.have_trap ())
4718 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
4719 /* For trap insns when not accumulating outgoing args force
4720 REG_ARGS_SIZE note to prevent crossjumping of calls with
4721 different args sizes. */
4722 if (!ACCUMULATE_OUTGOING_ARGS
)
4723 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4726 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4730 /* Expand a call to __builtin_unreachable. We do nothing except emit
4731 a barrier saying that control flow will not pass here.
4733 It is the responsibility of the program being compiled to ensure
4734 that control flow does never reach __builtin_unreachable. */
4736 expand_builtin_unreachable (void)
4741 /* Expand EXP, a call to fabs, fabsf or fabsl.
4742 Return NULL_RTX if a normal call should be emitted rather than expanding
4743 the function inline. If convenient, the result should be placed
4744 in TARGET. SUBTARGET may be used as the target for computing
4748 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4754 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4757 arg
= CALL_EXPR_ARG (exp
, 0);
4758 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4759 mode
= TYPE_MODE (TREE_TYPE (arg
));
4760 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4761 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4764 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4765 Return NULL is a normal call should be emitted rather than expanding the
4766 function inline. If convenient, the result should be placed in TARGET.
4767 SUBTARGET may be used as the target for computing the operand. */
4770 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4775 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4778 arg
= CALL_EXPR_ARG (exp
, 0);
4779 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4781 arg
= CALL_EXPR_ARG (exp
, 1);
4782 op1
= expand_normal (arg
);
4784 return expand_copysign (op0
, op1
, target
);
4787 /* Expand a call to __builtin___clear_cache. */
4790 expand_builtin___clear_cache (tree exp
)
4792 if (!targetm
.code_for_clear_cache
)
4794 #ifdef CLEAR_INSN_CACHE
4795 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4796 does something. Just do the default expansion to a call to
4800 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4801 does nothing. There is no need to call it. Do nothing. */
4803 #endif /* CLEAR_INSN_CACHE */
4806 /* We have a "clear_cache" insn, and it will handle everything. */
4808 rtx begin_rtx
, end_rtx
;
4810 /* We must not expand to a library call. If we did, any
4811 fallback library function in libgcc that might contain a call to
4812 __builtin___clear_cache() would recurse infinitely. */
4813 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4815 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4819 if (targetm
.have_clear_cache ())
4821 struct expand_operand ops
[2];
4823 begin
= CALL_EXPR_ARG (exp
, 0);
4824 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4826 end
= CALL_EXPR_ARG (exp
, 1);
4827 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4829 create_address_operand (&ops
[0], begin_rtx
);
4830 create_address_operand (&ops
[1], end_rtx
);
4831 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
4837 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4840 round_trampoline_addr (rtx tramp
)
4842 rtx temp
, addend
, mask
;
4844 /* If we don't need too much alignment, we'll have been guaranteed
4845 proper alignment by get_trampoline_type. */
4846 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4849 /* Round address up to desired boundary. */
4850 temp
= gen_reg_rtx (Pmode
);
4851 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4852 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4854 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4855 temp
, 0, OPTAB_LIB_WIDEN
);
4856 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4857 temp
, 0, OPTAB_LIB_WIDEN
);
4863 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4865 tree t_tramp
, t_func
, t_chain
;
4866 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4868 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4869 POINTER_TYPE
, VOID_TYPE
))
4872 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4873 t_func
= CALL_EXPR_ARG (exp
, 1);
4874 t_chain
= CALL_EXPR_ARG (exp
, 2);
4876 r_tramp
= expand_normal (t_tramp
);
4877 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4878 MEM_NOTRAP_P (m_tramp
) = 1;
4880 /* If ONSTACK, the TRAMP argument should be the address of a field
4881 within the local function's FRAME decl. Either way, let's see if
4882 we can fill in the MEM_ATTRs for this memory. */
4883 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4884 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4886 /* Creator of a heap trampoline is responsible for making sure the
4887 address is aligned to at least STACK_BOUNDARY. Normally malloc
4888 will ensure this anyhow. */
4889 tmp
= round_trampoline_addr (r_tramp
);
4892 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4893 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4894 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4897 /* The FUNC argument should be the address of the nested function.
4898 Extract the actual function decl to pass to the hook. */
4899 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4900 t_func
= TREE_OPERAND (t_func
, 0);
4901 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4903 r_chain
= expand_normal (t_chain
);
4905 /* Generate insns to initialize the trampoline. */
4906 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4910 trampolines_created
= 1;
4912 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4913 "trampoline generated for nested function %qD", t_func
);
4920 expand_builtin_adjust_trampoline (tree exp
)
4924 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4927 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4928 tramp
= round_trampoline_addr (tramp
);
4929 if (targetm
.calls
.trampoline_adjust_address
)
4930 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4935 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4936 function. The function first checks whether the back end provides
4937 an insn to implement signbit for the respective mode. If not, it
4938 checks whether the floating point format of the value is such that
4939 the sign bit can be extracted. If that is not the case, error out.
4940 EXP is the expression that is a call to the builtin function; if
4941 convenient, the result should be placed in TARGET. */
4943 expand_builtin_signbit (tree exp
, rtx target
)
4945 const struct real_format
*fmt
;
4946 machine_mode fmode
, imode
, rmode
;
4949 enum insn_code icode
;
4951 location_t loc
= EXPR_LOCATION (exp
);
4953 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4956 arg
= CALL_EXPR_ARG (exp
, 0);
4957 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4958 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4959 fmt
= REAL_MODE_FORMAT (fmode
);
4961 arg
= builtin_save_expr (arg
);
4963 /* Expand the argument yielding a RTX expression. */
4964 temp
= expand_normal (arg
);
4966 /* Check if the back end provides an insn that handles signbit for the
4968 icode
= optab_handler (signbit_optab
, fmode
);
4969 if (icode
!= CODE_FOR_nothing
)
4971 rtx_insn
*last
= get_last_insn ();
4972 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
4973 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
4975 delete_insns_since (last
);
4978 /* For floating point formats without a sign bit, implement signbit
4980 bitpos
= fmt
->signbit_ro
;
4983 /* But we can't do this if the format supports signed zero. */
4984 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
4986 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
4987 build_real (TREE_TYPE (arg
), dconst0
));
4988 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4991 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
4993 imode
= int_mode_for_mode (fmode
);
4994 gcc_assert (imode
!= BLKmode
);
4995 temp
= gen_lowpart (imode
, temp
);
5000 /* Handle targets with different FP word orders. */
5001 if (FLOAT_WORDS_BIG_ENDIAN
)
5002 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5004 word
= bitpos
/ BITS_PER_WORD
;
5005 temp
= operand_subword_force (temp
, word
, fmode
);
5006 bitpos
= bitpos
% BITS_PER_WORD
;
5009 /* Force the intermediate word_mode (or narrower) result into a
5010 register. This avoids attempting to create paradoxical SUBREGs
5011 of floating point modes below. */
5012 temp
= force_reg (imode
, temp
);
5014 /* If the bitpos is within the "result mode" lowpart, the operation
5015 can be implement with a single bitwise AND. Otherwise, we need
5016 a right shift and an AND. */
5018 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5020 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5022 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5023 temp
= gen_lowpart (rmode
, temp
);
5024 temp
= expand_binop (rmode
, and_optab
, temp
,
5025 immed_wide_int_const (mask
, rmode
),
5026 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5030 /* Perform a logical right shift to place the signbit in the least
5031 significant bit, then truncate the result to the desired mode
5032 and mask just this bit. */
5033 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5034 temp
= gen_lowpart (rmode
, temp
);
5035 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5036 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5042 /* Expand fork or exec calls. TARGET is the desired target of the
5043 call. EXP is the call. FN is the
5044 identificator of the actual function. IGNORE is nonzero if the
5045 value is to be ignored. */
5048 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5053 /* If we are not profiling, just call the function. */
5054 if (!profile_arc_flag
)
5057 /* Otherwise call the wrapper. This should be equivalent for the rest of
5058 compiler, so the code does not diverge, and the wrapper may run the
5059 code necessary for keeping the profiling sane. */
5061 switch (DECL_FUNCTION_CODE (fn
))
5064 id
= get_identifier ("__gcov_fork");
5067 case BUILT_IN_EXECL
:
5068 id
= get_identifier ("__gcov_execl");
5071 case BUILT_IN_EXECV
:
5072 id
= get_identifier ("__gcov_execv");
5075 case BUILT_IN_EXECLP
:
5076 id
= get_identifier ("__gcov_execlp");
5079 case BUILT_IN_EXECLE
:
5080 id
= get_identifier ("__gcov_execle");
5083 case BUILT_IN_EXECVP
:
5084 id
= get_identifier ("__gcov_execvp");
5087 case BUILT_IN_EXECVE
:
5088 id
= get_identifier ("__gcov_execve");
5095 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5096 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5097 DECL_EXTERNAL (decl
) = 1;
5098 TREE_PUBLIC (decl
) = 1;
5099 DECL_ARTIFICIAL (decl
) = 1;
5100 TREE_NOTHROW (decl
) = 1;
5101 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5102 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5103 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5104 return expand_call (call
, target
, ignore
);
5109 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5110 the pointer in these functions is void*, the tree optimizers may remove
5111 casts. The mode computed in expand_builtin isn't reliable either, due
5112 to __sync_bool_compare_and_swap.
5114 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5115 group of builtins. This gives us log2 of the mode size. */
5117 static inline machine_mode
5118 get_builtin_sync_mode (int fcode_diff
)
5120 /* The size is not negotiable, so ask not to get BLKmode in return
5121 if the target indicates that a smaller size would be better. */
5122 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5125 /* Expand the memory expression LOC and return the appropriate memory operand
5126 for the builtin_sync operations. */
5129 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5133 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5134 addr
= convert_memory_address (Pmode
, addr
);
5136 /* Note that we explicitly do not want any alias information for this
5137 memory, so that we kill all other live memories. Otherwise we don't
5138 satisfy the full barrier semantics of the intrinsic. */
5139 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5141 /* The alignment needs to be at least according to that of the mode. */
5142 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5143 get_pointer_alignment (loc
)));
5144 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5145 MEM_VOLATILE_P (mem
) = 1;
5150 /* Make sure an argument is in the right mode.
5151 EXP is the tree argument.
5152 MODE is the mode it should be in. */
5155 expand_expr_force_mode (tree exp
, machine_mode mode
)
5158 machine_mode old_mode
;
5160 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5161 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5162 of CONST_INTs, where we know the old_mode only from the call argument. */
5164 old_mode
= GET_MODE (val
);
5165 if (old_mode
== VOIDmode
)
5166 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5167 val
= convert_modes (mode
, old_mode
, val
, 1);
5172 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5173 EXP is the CALL_EXPR. CODE is the rtx code
5174 that corresponds to the arithmetic or logical operation from the name;
5175 an exception here is that NOT actually means NAND. TARGET is an optional
5176 place for us to store the results; AFTER is true if this is the
5177 fetch_and_xxx form. */
5180 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
5181 enum rtx_code code
, bool after
,
5185 location_t loc
= EXPR_LOCATION (exp
);
5187 if (code
== NOT
&& warn_sync_nand
)
5189 tree fndecl
= get_callee_fndecl (exp
);
5190 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5192 static bool warned_f_a_n
, warned_n_a_f
;
5196 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5197 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5198 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5199 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5200 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5204 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5205 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5206 warned_f_a_n
= true;
5209 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5210 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5211 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5212 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5213 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5217 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5218 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5219 warned_n_a_f
= true;
5227 /* Expand the operands. */
5228 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5229 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5231 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
5235 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5236 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5237 true if this is the boolean form. TARGET is a place for us to store the
5238 results; this is NOT optional if IS_BOOL is true. */
5241 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
5242 bool is_bool
, rtx target
)
5244 rtx old_val
, new_val
, mem
;
5247 /* Expand the operands. */
5248 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5249 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5250 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5252 pbool
= poval
= NULL
;
5253 if (target
!= const0_rtx
)
5260 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5261 false, MEMMODEL_SYNC_SEQ_CST
,
5262 MEMMODEL_SYNC_SEQ_CST
))
5268 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5269 general form is actually an atomic exchange, and some targets only
5270 support a reduced form with the second argument being a constant 1.
5271 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5275 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5280 /* Expand the operands. */
5281 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5282 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5284 return expand_sync_lock_test_and_set (target
, mem
, val
);
5287 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5290 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5294 /* Expand the operands. */
5295 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5297 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
5300 /* Given an integer representing an ``enum memmodel'', verify its
5301 correctness and return the memory model enum. */
5303 static enum memmodel
5304 get_memmodel (tree exp
)
5307 unsigned HOST_WIDE_INT val
;
5309 /* If the parameter is not a constant, it's a run time value so we'll just
5310 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5311 if (TREE_CODE (exp
) != INTEGER_CST
)
5312 return MEMMODEL_SEQ_CST
;
5314 op
= expand_normal (exp
);
5317 if (targetm
.memmodel_check
)
5318 val
= targetm
.memmodel_check (val
);
5319 else if (val
& ~MEMMODEL_MASK
)
5321 warning (OPT_Winvalid_memory_model
,
5322 "Unknown architecture specifier in memory model to builtin.");
5323 return MEMMODEL_SEQ_CST
;
5326 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5327 if (memmodel_base (val
) >= MEMMODEL_LAST
)
5329 warning (OPT_Winvalid_memory_model
,
5330 "invalid memory model argument to builtin");
5331 return MEMMODEL_SEQ_CST
;
5334 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5335 be conservative and promote consume to acquire. */
5336 if (val
== MEMMODEL_CONSUME
)
5337 val
= MEMMODEL_ACQUIRE
;
5339 return (enum memmodel
) val
;
5342 /* Expand the __atomic_exchange intrinsic:
5343 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5344 EXP is the CALL_EXPR.
5345 TARGET is an optional place for us to store the results. */
5348 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5351 enum memmodel model
;
5353 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5355 if (!flag_inline_atomics
)
5358 /* Expand the operands. */
5359 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5360 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5362 return expand_atomic_exchange (target
, mem
, val
, model
);
5365 /* Expand the __atomic_compare_exchange intrinsic:
5366 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5367 TYPE desired, BOOL weak,
5368 enum memmodel success,
5369 enum memmodel failure)
5370 EXP is the CALL_EXPR.
5371 TARGET is an optional place for us to store the results. */
5374 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5377 rtx expect
, desired
, mem
, oldval
;
5378 rtx_code_label
*label
;
5379 enum memmodel success
, failure
;
5383 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5384 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5386 if (failure
> success
)
5388 warning (OPT_Winvalid_memory_model
,
5389 "failure memory model cannot be stronger than success memory "
5390 "model for %<__atomic_compare_exchange%>");
5391 success
= MEMMODEL_SEQ_CST
;
5394 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5396 warning (OPT_Winvalid_memory_model
,
5397 "invalid failure memory model for "
5398 "%<__atomic_compare_exchange%>");
5399 failure
= MEMMODEL_SEQ_CST
;
5400 success
= MEMMODEL_SEQ_CST
;
5404 if (!flag_inline_atomics
)
5407 /* Expand the operands. */
5408 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5410 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5411 expect
= convert_memory_address (Pmode
, expect
);
5412 expect
= gen_rtx_MEM (mode
, expect
);
5413 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5415 weak
= CALL_EXPR_ARG (exp
, 3);
5417 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5420 if (target
== const0_rtx
)
5423 /* Lest the rtl backend create a race condition with an imporoper store
5424 to memory, always create a new pseudo for OLDVAL. */
5427 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5428 is_weak
, success
, failure
))
5431 /* Conditionally store back to EXPECT, lest we create a race condition
5432 with an improper store to memory. */
5433 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5434 the normal case where EXPECT is totally private, i.e. a register. At
5435 which point the store can be unconditional. */
5436 label
= gen_label_rtx ();
5437 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
5438 GET_MODE (target
), 1, label
);
5439 emit_move_insn (expect
, oldval
);
5445 /* Expand the __atomic_load intrinsic:
5446 TYPE __atomic_load (TYPE *object, enum memmodel)
5447 EXP is the CALL_EXPR.
5448 TARGET is an optional place for us to store the results. */
5451 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
5454 enum memmodel model
;
5456 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5457 if (is_mm_release (model
) || is_mm_acq_rel (model
))
5459 warning (OPT_Winvalid_memory_model
,
5460 "invalid memory model for %<__atomic_load%>");
5461 model
= MEMMODEL_SEQ_CST
;
5464 if (!flag_inline_atomics
)
5467 /* Expand the operand. */
5468 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5470 return expand_atomic_load (target
, mem
, model
);
5474 /* Expand the __atomic_store intrinsic:
5475 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5476 EXP is the CALL_EXPR.
5477 TARGET is an optional place for us to store the results. */
5480 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
5483 enum memmodel model
;
5485 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5486 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
5487 || is_mm_release (model
)))
5489 warning (OPT_Winvalid_memory_model
,
5490 "invalid memory model for %<__atomic_store%>");
5491 model
= MEMMODEL_SEQ_CST
;
5494 if (!flag_inline_atomics
)
5497 /* Expand the operands. */
5498 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5499 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5501 return expand_atomic_store (mem
, val
, model
, false);
5504 /* Expand the __atomic_fetch_XXX intrinsic:
5505 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5506 EXP is the CALL_EXPR.
5507 TARGET is an optional place for us to store the results.
5508 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5509 FETCH_AFTER is true if returning the result of the operation.
5510 FETCH_AFTER is false if returning the value before the operation.
5511 IGNORE is true if the result is not used.
5512 EXT_CALL is the correct builtin for an external call if this cannot be
5513 resolved to an instruction sequence. */
5516 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
5517 enum rtx_code code
, bool fetch_after
,
5518 bool ignore
, enum built_in_function ext_call
)
5521 enum memmodel model
;
5525 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5527 /* Expand the operands. */
5528 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5529 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5531 /* Only try generating instructions if inlining is turned on. */
5532 if (flag_inline_atomics
)
5534 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5539 /* Return if a different routine isn't needed for the library call. */
5540 if (ext_call
== BUILT_IN_NONE
)
5543 /* Change the call to the specified function. */
5544 fndecl
= get_callee_fndecl (exp
);
5545 addr
= CALL_EXPR_FN (exp
);
5548 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5549 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5551 /* Expand the call here so we can emit trailing code. */
5552 ret
= expand_call (exp
, target
, ignore
);
5554 /* Replace the original function just in case it matters. */
5555 TREE_OPERAND (addr
, 0) = fndecl
;
5557 /* Then issue the arithmetic correction to return the right result. */
5562 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5564 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5567 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5573 /* Expand an atomic clear operation.
5574 void _atomic_clear (BOOL *obj, enum memmodel)
5575 EXP is the call expression. */
5578 expand_builtin_atomic_clear (tree exp
)
5582 enum memmodel model
;
5584 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5585 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5586 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5588 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
5590 warning (OPT_Winvalid_memory_model
,
5591 "invalid memory model for %<__atomic_store%>");
5592 model
= MEMMODEL_SEQ_CST
;
5595 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5596 Failing that, a store is issued by __atomic_store. The only way this can
5597 fail is if the bool type is larger than a word size. Unlikely, but
5598 handle it anyway for completeness. Assume a single threaded model since
5599 there is no atomic support in this case, and no barriers are required. */
5600 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5602 emit_move_insn (mem
, const0_rtx
);
5606 /* Expand an atomic test_and_set operation.
5607 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5608 EXP is the call expression. */
5611 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5614 enum memmodel model
;
5617 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5618 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5619 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5621 return expand_atomic_test_and_set (target
, mem
, model
);
5625 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5626 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5629 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5633 unsigned int mode_align
, type_align
;
5635 if (TREE_CODE (arg0
) != INTEGER_CST
)
5638 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5639 mode
= mode_for_size (size
, MODE_INT
, 0);
5640 mode_align
= GET_MODE_ALIGNMENT (mode
);
5642 if (TREE_CODE (arg1
) == INTEGER_CST
)
5644 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
5646 /* Either this argument is null, or it's a fake pointer encoding
5647 the alignment of the object. */
5649 val
*= BITS_PER_UNIT
;
5651 if (val
== 0 || mode_align
< val
)
5652 type_align
= mode_align
;
5658 tree ttype
= TREE_TYPE (arg1
);
5660 /* This function is usually invoked and folded immediately by the front
5661 end before anything else has a chance to look at it. The pointer
5662 parameter at this point is usually cast to a void *, so check for that
5663 and look past the cast. */
5664 if (CONVERT_EXPR_P (arg1
) && POINTER_TYPE_P (ttype
)
5665 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5666 arg1
= TREE_OPERAND (arg1
, 0);
5668 ttype
= TREE_TYPE (arg1
);
5669 gcc_assert (POINTER_TYPE_P (ttype
));
5671 /* Get the underlying type of the object. */
5672 ttype
= TREE_TYPE (ttype
);
5673 type_align
= TYPE_ALIGN (ttype
);
5676 /* If the object has smaller alignment, the lock free routines cannot
5678 if (type_align
< mode_align
)
5679 return boolean_false_node
;
5681 /* Check if a compare_and_swap pattern exists for the mode which represents
5682 the required size. The pattern is not allowed to fail, so the existence
5683 of the pattern indicates support is present. */
5684 if (can_compare_and_swap_p (mode
, true))
5685 return boolean_true_node
;
5687 return boolean_false_node
;
5690 /* Return true if the parameters to call EXP represent an object which will
5691 always generate lock free instructions. The first argument represents the
5692 size of the object, and the second parameter is a pointer to the object
5693 itself. If NULL is passed for the object, then the result is based on
5694 typical alignment for an object of the specified size. Otherwise return
5698 expand_builtin_atomic_always_lock_free (tree exp
)
5701 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5702 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5704 if (TREE_CODE (arg0
) != INTEGER_CST
)
5706 error ("non-constant argument 1 to __atomic_always_lock_free");
5710 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5711 if (size
== boolean_true_node
)
5716 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5717 is lock free on this architecture. */
5720 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5722 if (!flag_inline_atomics
)
5725 /* If it isn't always lock free, don't generate a result. */
5726 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5727 return boolean_true_node
;
5732 /* Return true if the parameters to call EXP represent an object which will
5733 always generate lock free instructions. The first argument represents the
5734 size of the object, and the second parameter is a pointer to the object
5735 itself. If NULL is passed for the object, then the result is based on
5736 typical alignment for an object of the specified size. Otherwise return
5740 expand_builtin_atomic_is_lock_free (tree exp
)
5743 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5744 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5746 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5748 error ("non-integer argument 1 to __atomic_is_lock_free");
5752 if (!flag_inline_atomics
)
5755 /* If the value is known at compile time, return the RTX for it. */
5756 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5757 if (size
== boolean_true_node
)
5763 /* Expand the __atomic_thread_fence intrinsic:
5764 void __atomic_thread_fence (enum memmodel)
5765 EXP is the CALL_EXPR. */
5768 expand_builtin_atomic_thread_fence (tree exp
)
5770 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5771 expand_mem_thread_fence (model
);
5774 /* Expand the __atomic_signal_fence intrinsic:
5775 void __atomic_signal_fence (enum memmodel)
5776 EXP is the CALL_EXPR. */
5779 expand_builtin_atomic_signal_fence (tree exp
)
5781 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5782 expand_mem_signal_fence (model
);
5785 /* Expand the __sync_synchronize intrinsic. */
5788 expand_builtin_sync_synchronize (void)
5790 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
5794 expand_builtin_thread_pointer (tree exp
, rtx target
)
5796 enum insn_code icode
;
5797 if (!validate_arglist (exp
, VOID_TYPE
))
5799 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5800 if (icode
!= CODE_FOR_nothing
)
5802 struct expand_operand op
;
5803 /* If the target is not sutitable then create a new target. */
5804 if (target
== NULL_RTX
5806 || GET_MODE (target
) != Pmode
)
5807 target
= gen_reg_rtx (Pmode
);
5808 create_output_operand (&op
, target
, Pmode
);
5809 expand_insn (icode
, 1, &op
);
5812 error ("__builtin_thread_pointer is not supported on this target");
5817 expand_builtin_set_thread_pointer (tree exp
)
5819 enum insn_code icode
;
5820 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5822 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5823 if (icode
!= CODE_FOR_nothing
)
5825 struct expand_operand op
;
5826 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5827 Pmode
, EXPAND_NORMAL
);
5828 create_input_operand (&op
, val
, Pmode
);
5829 expand_insn (icode
, 1, &op
);
5832 error ("__builtin_set_thread_pointer is not supported on this target");
5836 /* Emit code to restore the current value of stack. */
5839 expand_stack_restore (tree var
)
5842 rtx sa
= expand_normal (var
);
5844 sa
= convert_memory_address (Pmode
, sa
);
5846 prev
= get_last_insn ();
5847 emit_stack_restore (SAVE_BLOCK
, sa
);
5849 record_new_stack_level ();
5851 fixup_args_size_notes (prev
, get_last_insn (), 0);
5854 /* Emit code to save the current value of stack. */
5857 expand_stack_save (void)
5861 emit_stack_save (SAVE_BLOCK
, &ret
);
5866 /* Expand an expression EXP that calls a built-in function,
5867 with result going to TARGET if that's convenient
5868 (and in mode MODE if that's convenient).
5869 SUBTARGET may be used as the target for computing one of EXP's operands.
5870 IGNORE is nonzero if the value is to be ignored. */
5873 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
5876 tree fndecl
= get_callee_fndecl (exp
);
5877 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5878 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5881 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5882 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5884 /* When ASan is enabled, we don't want to expand some memory/string
5885 builtins and rely on libsanitizer's hooks. This allows us to avoid
5886 redundant checks and be sure, that possible overflow will be detected
5889 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
5890 return expand_call (exp
, target
, ignore
);
5892 /* When not optimizing, generate calls to library functions for a certain
5895 && !called_as_built_in (fndecl
)
5896 && fcode
!= BUILT_IN_FORK
5897 && fcode
!= BUILT_IN_EXECL
5898 && fcode
!= BUILT_IN_EXECV
5899 && fcode
!= BUILT_IN_EXECLP
5900 && fcode
!= BUILT_IN_EXECLE
5901 && fcode
!= BUILT_IN_EXECVP
5902 && fcode
!= BUILT_IN_EXECVE
5903 && fcode
!= BUILT_IN_ALLOCA
5904 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5905 && fcode
!= BUILT_IN_FREE
5906 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
5907 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
5908 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
5909 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
5910 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5911 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
5912 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5913 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5914 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5915 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
5916 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
5917 && fcode
!= BUILT_IN_CHKP_BNDRET
)
5918 return expand_call (exp
, target
, ignore
);
5920 /* The built-in function expanders test for target == const0_rtx
5921 to determine whether the function's result will be ignored. */
5923 target
= const0_rtx
;
5925 /* If the result of a pure or const built-in function is ignored, and
5926 none of its arguments are volatile, we can avoid expanding the
5927 built-in call and just evaluate the arguments for side-effects. */
5928 if (target
== const0_rtx
5929 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5930 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5932 bool volatilep
= false;
5934 call_expr_arg_iterator iter
;
5936 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5937 if (TREE_THIS_VOLATILE (arg
))
5945 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5946 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5951 /* expand_builtin_with_bounds is supposed to be used for
5952 instrumented builtin calls. */
5953 gcc_assert (!CALL_WITH_BOUNDS_P (exp
));
5957 CASE_FLT_FN (BUILT_IN_FABS
):
5958 case BUILT_IN_FABSD32
:
5959 case BUILT_IN_FABSD64
:
5960 case BUILT_IN_FABSD128
:
5961 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5966 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5967 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5972 /* Just do a normal library call if we were unable to fold
5974 CASE_FLT_FN (BUILT_IN_CABS
):
5977 CASE_FLT_FN (BUILT_IN_EXP
):
5978 CASE_FLT_FN (BUILT_IN_EXP10
):
5979 CASE_FLT_FN (BUILT_IN_POW10
):
5980 CASE_FLT_FN (BUILT_IN_EXP2
):
5981 CASE_FLT_FN (BUILT_IN_EXPM1
):
5982 CASE_FLT_FN (BUILT_IN_LOGB
):
5983 CASE_FLT_FN (BUILT_IN_LOG
):
5984 CASE_FLT_FN (BUILT_IN_LOG10
):
5985 CASE_FLT_FN (BUILT_IN_LOG2
):
5986 CASE_FLT_FN (BUILT_IN_LOG1P
):
5987 CASE_FLT_FN (BUILT_IN_TAN
):
5988 CASE_FLT_FN (BUILT_IN_ASIN
):
5989 CASE_FLT_FN (BUILT_IN_ACOS
):
5990 CASE_FLT_FN (BUILT_IN_ATAN
):
5991 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
5992 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5993 because of possible accuracy problems. */
5994 if (! flag_unsafe_math_optimizations
)
5996 CASE_FLT_FN (BUILT_IN_SQRT
):
5997 CASE_FLT_FN (BUILT_IN_FLOOR
):
5998 CASE_FLT_FN (BUILT_IN_CEIL
):
5999 CASE_FLT_FN (BUILT_IN_TRUNC
):
6000 CASE_FLT_FN (BUILT_IN_ROUND
):
6001 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
6002 CASE_FLT_FN (BUILT_IN_RINT
):
6003 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
6008 CASE_FLT_FN (BUILT_IN_FMA
):
6009 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
6014 CASE_FLT_FN (BUILT_IN_ILOGB
):
6015 if (! flag_unsafe_math_optimizations
)
6017 CASE_FLT_FN (BUILT_IN_ISINF
):
6018 CASE_FLT_FN (BUILT_IN_FINITE
):
6019 case BUILT_IN_ISFINITE
:
6020 case BUILT_IN_ISNORMAL
:
6021 target
= expand_builtin_interclass_mathfn (exp
, target
);
6026 CASE_FLT_FN (BUILT_IN_ICEIL
):
6027 CASE_FLT_FN (BUILT_IN_LCEIL
):
6028 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6029 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6030 CASE_FLT_FN (BUILT_IN_IFLOOR
):
6031 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6032 target
= expand_builtin_int_roundingfn (exp
, target
);
6037 CASE_FLT_FN (BUILT_IN_IRINT
):
6038 CASE_FLT_FN (BUILT_IN_LRINT
):
6039 CASE_FLT_FN (BUILT_IN_LLRINT
):
6040 CASE_FLT_FN (BUILT_IN_IROUND
):
6041 CASE_FLT_FN (BUILT_IN_LROUND
):
6042 CASE_FLT_FN (BUILT_IN_LLROUND
):
6043 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6048 CASE_FLT_FN (BUILT_IN_POWI
):
6049 target
= expand_builtin_powi (exp
, target
);
6054 CASE_FLT_FN (BUILT_IN_ATAN2
):
6055 CASE_FLT_FN (BUILT_IN_LDEXP
):
6056 CASE_FLT_FN (BUILT_IN_SCALB
):
6057 CASE_FLT_FN (BUILT_IN_SCALBN
):
6058 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6059 if (! flag_unsafe_math_optimizations
)
6062 CASE_FLT_FN (BUILT_IN_FMOD
):
6063 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6064 CASE_FLT_FN (BUILT_IN_DREM
):
6065 CASE_FLT_FN (BUILT_IN_POW
):
6066 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6071 CASE_FLT_FN (BUILT_IN_CEXPI
):
6072 target
= expand_builtin_cexpi (exp
, target
);
6073 gcc_assert (target
);
6076 CASE_FLT_FN (BUILT_IN_SIN
):
6077 CASE_FLT_FN (BUILT_IN_COS
):
6078 if (! flag_unsafe_math_optimizations
)
6080 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6085 CASE_FLT_FN (BUILT_IN_SINCOS
):
6086 if (! flag_unsafe_math_optimizations
)
6088 target
= expand_builtin_sincos (exp
);
6093 case BUILT_IN_APPLY_ARGS
:
6094 return expand_builtin_apply_args ();
6096 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6097 FUNCTION with a copy of the parameters described by
6098 ARGUMENTS, and ARGSIZE. It returns a block of memory
6099 allocated on the stack into which is stored all the registers
6100 that might possibly be used for returning the result of a
6101 function. ARGUMENTS is the value returned by
6102 __builtin_apply_args. ARGSIZE is the number of bytes of
6103 arguments that must be copied. ??? How should this value be
6104 computed? We'll also need a safe worst case value for varargs
6106 case BUILT_IN_APPLY
:
6107 if (!validate_arglist (exp
, POINTER_TYPE
,
6108 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6109 && !validate_arglist (exp
, REFERENCE_TYPE
,
6110 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6116 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6117 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6118 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6120 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6123 /* __builtin_return (RESULT) causes the function to return the
6124 value described by RESULT. RESULT is address of the block of
6125 memory returned by __builtin_apply. */
6126 case BUILT_IN_RETURN
:
6127 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6128 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6131 case BUILT_IN_SAVEREGS
:
6132 return expand_builtin_saveregs ();
6134 case BUILT_IN_VA_ARG_PACK
:
6135 /* All valid uses of __builtin_va_arg_pack () are removed during
6137 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6140 case BUILT_IN_VA_ARG_PACK_LEN
:
6141 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6143 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6146 /* Return the address of the first anonymous stack arg. */
6147 case BUILT_IN_NEXT_ARG
:
6148 if (fold_builtin_next_arg (exp
, false))
6150 return expand_builtin_next_arg ();
6152 case BUILT_IN_CLEAR_CACHE
:
6153 target
= expand_builtin___clear_cache (exp
);
6158 case BUILT_IN_CLASSIFY_TYPE
:
6159 return expand_builtin_classify_type (exp
);
6161 case BUILT_IN_CONSTANT_P
:
6164 case BUILT_IN_FRAME_ADDRESS
:
6165 case BUILT_IN_RETURN_ADDRESS
:
6166 return expand_builtin_frame_address (fndecl
, exp
);
6168 /* Returns the address of the area where the structure is returned.
6170 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6171 if (call_expr_nargs (exp
) != 0
6172 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6173 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6176 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6178 case BUILT_IN_ALLOCA
:
6179 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6180 /* If the allocation stems from the declaration of a variable-sized
6181 object, it cannot accumulate. */
6182 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6187 case BUILT_IN_STACK_SAVE
:
6188 return expand_stack_save ();
6190 case BUILT_IN_STACK_RESTORE
:
6191 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6194 case BUILT_IN_BSWAP16
:
6195 case BUILT_IN_BSWAP32
:
6196 case BUILT_IN_BSWAP64
:
6197 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6202 CASE_INT_FN (BUILT_IN_FFS
):
6203 target
= expand_builtin_unop (target_mode
, exp
, target
,
6204 subtarget
, ffs_optab
);
6209 CASE_INT_FN (BUILT_IN_CLZ
):
6210 target
= expand_builtin_unop (target_mode
, exp
, target
,
6211 subtarget
, clz_optab
);
6216 CASE_INT_FN (BUILT_IN_CTZ
):
6217 target
= expand_builtin_unop (target_mode
, exp
, target
,
6218 subtarget
, ctz_optab
);
6223 CASE_INT_FN (BUILT_IN_CLRSB
):
6224 target
= expand_builtin_unop (target_mode
, exp
, target
,
6225 subtarget
, clrsb_optab
);
6230 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6231 target
= expand_builtin_unop (target_mode
, exp
, target
,
6232 subtarget
, popcount_optab
);
6237 CASE_INT_FN (BUILT_IN_PARITY
):
6238 target
= expand_builtin_unop (target_mode
, exp
, target
,
6239 subtarget
, parity_optab
);
6244 case BUILT_IN_STRLEN
:
6245 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6250 case BUILT_IN_STRCPY
:
6251 target
= expand_builtin_strcpy (exp
, target
);
6256 case BUILT_IN_STRNCPY
:
6257 target
= expand_builtin_strncpy (exp
, target
);
6262 case BUILT_IN_STPCPY
:
6263 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6268 case BUILT_IN_MEMCPY
:
6269 target
= expand_builtin_memcpy (exp
, target
);
6274 case BUILT_IN_MEMPCPY
:
6275 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6280 case BUILT_IN_MEMSET
:
6281 target
= expand_builtin_memset (exp
, target
, mode
);
6286 case BUILT_IN_BZERO
:
6287 target
= expand_builtin_bzero (exp
);
6292 case BUILT_IN_STRCMP
:
6293 target
= expand_builtin_strcmp (exp
, target
);
6298 case BUILT_IN_STRNCMP
:
6299 target
= expand_builtin_strncmp (exp
, target
, mode
);
6305 case BUILT_IN_MEMCMP
:
6306 target
= expand_builtin_memcmp (exp
, target
);
6311 case BUILT_IN_SETJMP
:
6312 /* This should have been lowered to the builtins below. */
6315 case BUILT_IN_SETJMP_SETUP
:
6316 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6317 and the receiver label. */
6318 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6320 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6321 VOIDmode
, EXPAND_NORMAL
);
6322 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6323 rtx_insn
*label_r
= label_rtx (label
);
6325 /* This is copied from the handling of non-local gotos. */
6326 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6327 nonlocal_goto_handler_labels
6328 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
6329 nonlocal_goto_handler_labels
);
6330 /* ??? Do not let expand_label treat us as such since we would
6331 not want to be both on the list of non-local labels and on
6332 the list of forced labels. */
6333 FORCED_LABEL (label
) = 0;
6338 case BUILT_IN_SETJMP_RECEIVER
:
6339 /* __builtin_setjmp_receiver is passed the receiver label. */
6340 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6342 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6343 rtx_insn
*label_r
= label_rtx (label
);
6345 expand_builtin_setjmp_receiver (label_r
);
6350 /* __builtin_longjmp is passed a pointer to an array of five words.
6351 It's similar to the C library longjmp function but works with
6352 __builtin_setjmp above. */
6353 case BUILT_IN_LONGJMP
:
6354 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6356 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6357 VOIDmode
, EXPAND_NORMAL
);
6358 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6360 if (value
!= const1_rtx
)
6362 error ("%<__builtin_longjmp%> second argument must be 1");
6366 expand_builtin_longjmp (buf_addr
, value
);
6371 case BUILT_IN_NONLOCAL_GOTO
:
6372 target
= expand_builtin_nonlocal_goto (exp
);
6377 /* This updates the setjmp buffer that is its argument with the value
6378 of the current stack pointer. */
6379 case BUILT_IN_UPDATE_SETJMP_BUF
:
6380 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6383 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6385 expand_builtin_update_setjmp_buf (buf_addr
);
6391 expand_builtin_trap ();
6394 case BUILT_IN_UNREACHABLE
:
6395 expand_builtin_unreachable ();
6398 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6399 case BUILT_IN_SIGNBITD32
:
6400 case BUILT_IN_SIGNBITD64
:
6401 case BUILT_IN_SIGNBITD128
:
6402 target
= expand_builtin_signbit (exp
, target
);
6407 /* Various hooks for the DWARF 2 __throw routine. */
6408 case BUILT_IN_UNWIND_INIT
:
6409 expand_builtin_unwind_init ();
6411 case BUILT_IN_DWARF_CFA
:
6412 return virtual_cfa_rtx
;
6413 #ifdef DWARF2_UNWIND_INFO
6414 case BUILT_IN_DWARF_SP_COLUMN
:
6415 return expand_builtin_dwarf_sp_column ();
6416 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6417 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6420 case BUILT_IN_FROB_RETURN_ADDR
:
6421 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6422 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6423 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6424 case BUILT_IN_EH_RETURN
:
6425 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6426 CALL_EXPR_ARG (exp
, 1));
6428 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6429 return expand_builtin_eh_return_data_regno (exp
);
6430 case BUILT_IN_EXTEND_POINTER
:
6431 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6432 case BUILT_IN_EH_POINTER
:
6433 return expand_builtin_eh_pointer (exp
);
6434 case BUILT_IN_EH_FILTER
:
6435 return expand_builtin_eh_filter (exp
);
6436 case BUILT_IN_EH_COPY_VALUES
:
6437 return expand_builtin_eh_copy_values (exp
);
6439 case BUILT_IN_VA_START
:
6440 return expand_builtin_va_start (exp
);
6441 case BUILT_IN_VA_END
:
6442 return expand_builtin_va_end (exp
);
6443 case BUILT_IN_VA_COPY
:
6444 return expand_builtin_va_copy (exp
);
6445 case BUILT_IN_EXPECT
:
6446 return expand_builtin_expect (exp
, target
);
6447 case BUILT_IN_ASSUME_ALIGNED
:
6448 return expand_builtin_assume_aligned (exp
, target
);
6449 case BUILT_IN_PREFETCH
:
6450 expand_builtin_prefetch (exp
);
6453 case BUILT_IN_INIT_TRAMPOLINE
:
6454 return expand_builtin_init_trampoline (exp
, true);
6455 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6456 return expand_builtin_init_trampoline (exp
, false);
6457 case BUILT_IN_ADJUST_TRAMPOLINE
:
6458 return expand_builtin_adjust_trampoline (exp
);
6461 case BUILT_IN_EXECL
:
6462 case BUILT_IN_EXECV
:
6463 case BUILT_IN_EXECLP
:
6464 case BUILT_IN_EXECLE
:
6465 case BUILT_IN_EXECVP
:
6466 case BUILT_IN_EXECVE
:
6467 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6472 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6473 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6474 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6475 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6476 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6477 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6478 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6483 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6484 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6485 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6486 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6487 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6488 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6489 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6494 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6495 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6496 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6497 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6498 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6499 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6500 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6505 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6506 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6507 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6508 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6509 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6510 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6511 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6516 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6517 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6518 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6519 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6520 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6521 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6522 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6527 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6528 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6529 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6530 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6531 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6532 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6533 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6538 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6539 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6540 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6541 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6542 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6543 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6544 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6549 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6550 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6551 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6552 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6553 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6554 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6555 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6560 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6561 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6562 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6563 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6564 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6565 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6566 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6571 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6572 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6573 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6574 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6575 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6576 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6577 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6582 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6583 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6584 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6585 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6586 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6587 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6588 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6593 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6594 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6595 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6596 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6597 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6598 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6599 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6604 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6605 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6606 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6607 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6608 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6609 if (mode
== VOIDmode
)
6610 mode
= TYPE_MODE (boolean_type_node
);
6611 if (!target
|| !register_operand (target
, mode
))
6612 target
= gen_reg_rtx (mode
);
6614 mode
= get_builtin_sync_mode
6615 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6616 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6621 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6622 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6623 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6624 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6625 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6626 mode
= get_builtin_sync_mode
6627 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6628 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6633 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6634 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6635 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6636 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6637 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6638 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6639 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6644 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6645 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6646 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6647 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6648 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6649 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6650 expand_builtin_sync_lock_release (mode
, exp
);
6653 case BUILT_IN_SYNC_SYNCHRONIZE
:
6654 expand_builtin_sync_synchronize ();
6657 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6658 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6659 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6660 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6661 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6662 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6663 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6668 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6669 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6670 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6671 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6672 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6674 unsigned int nargs
, z
;
6675 vec
<tree
, va_gc
> *vec
;
6678 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6679 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6683 /* If this is turned into an external library call, the weak parameter
6684 must be dropped to match the expected parameter list. */
6685 nargs
= call_expr_nargs (exp
);
6686 vec_alloc (vec
, nargs
- 1);
6687 for (z
= 0; z
< 3; z
++)
6688 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6689 /* Skip the boolean weak parameter. */
6690 for (z
= 4; z
< 6; z
++)
6691 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6692 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6696 case BUILT_IN_ATOMIC_LOAD_1
:
6697 case BUILT_IN_ATOMIC_LOAD_2
:
6698 case BUILT_IN_ATOMIC_LOAD_4
:
6699 case BUILT_IN_ATOMIC_LOAD_8
:
6700 case BUILT_IN_ATOMIC_LOAD_16
:
6701 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6702 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6707 case BUILT_IN_ATOMIC_STORE_1
:
6708 case BUILT_IN_ATOMIC_STORE_2
:
6709 case BUILT_IN_ATOMIC_STORE_4
:
6710 case BUILT_IN_ATOMIC_STORE_8
:
6711 case BUILT_IN_ATOMIC_STORE_16
:
6712 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6713 target
= expand_builtin_atomic_store (mode
, exp
);
6718 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6719 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6720 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6721 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6722 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6724 enum built_in_function lib
;
6725 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6726 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6727 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6728 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6734 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6735 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6736 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6737 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6738 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6740 enum built_in_function lib
;
6741 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6742 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6743 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6744 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6750 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6751 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6752 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6753 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6754 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6756 enum built_in_function lib
;
6757 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6758 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6759 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6760 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6766 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6767 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6768 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6769 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6770 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6772 enum built_in_function lib
;
6773 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6774 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6775 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6776 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6782 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6783 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6784 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6785 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6786 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6788 enum built_in_function lib
;
6789 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6790 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6791 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6792 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6798 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6799 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6800 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6801 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6802 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6804 enum built_in_function lib
;
6805 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6806 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6807 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6808 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6814 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6815 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6816 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6817 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6818 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6819 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6820 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6821 ignore
, BUILT_IN_NONE
);
6826 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6827 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6828 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6829 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6830 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6831 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6832 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6833 ignore
, BUILT_IN_NONE
);
6838 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6839 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6840 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6841 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6842 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6843 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6844 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6845 ignore
, BUILT_IN_NONE
);
6850 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6851 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6852 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6853 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6854 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6855 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6856 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6857 ignore
, BUILT_IN_NONE
);
6862 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6863 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6864 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6865 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6866 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6867 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6868 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6869 ignore
, BUILT_IN_NONE
);
6874 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6875 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6876 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6877 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6878 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6879 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6880 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6881 ignore
, BUILT_IN_NONE
);
6886 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6887 return expand_builtin_atomic_test_and_set (exp
, target
);
6889 case BUILT_IN_ATOMIC_CLEAR
:
6890 return expand_builtin_atomic_clear (exp
);
6892 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6893 return expand_builtin_atomic_always_lock_free (exp
);
6895 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6896 target
= expand_builtin_atomic_is_lock_free (exp
);
6901 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6902 expand_builtin_atomic_thread_fence (exp
);
6905 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6906 expand_builtin_atomic_signal_fence (exp
);
6909 case BUILT_IN_OBJECT_SIZE
:
6910 return expand_builtin_object_size (exp
);
6912 case BUILT_IN_MEMCPY_CHK
:
6913 case BUILT_IN_MEMPCPY_CHK
:
6914 case BUILT_IN_MEMMOVE_CHK
:
6915 case BUILT_IN_MEMSET_CHK
:
6916 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6921 case BUILT_IN_STRCPY_CHK
:
6922 case BUILT_IN_STPCPY_CHK
:
6923 case BUILT_IN_STRNCPY_CHK
:
6924 case BUILT_IN_STPNCPY_CHK
:
6925 case BUILT_IN_STRCAT_CHK
:
6926 case BUILT_IN_STRNCAT_CHK
:
6927 case BUILT_IN_SNPRINTF_CHK
:
6928 case BUILT_IN_VSNPRINTF_CHK
:
6929 maybe_emit_chk_warning (exp
, fcode
);
6932 case BUILT_IN_SPRINTF_CHK
:
6933 case BUILT_IN_VSPRINTF_CHK
:
6934 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6938 if (warn_free_nonheap_object
)
6939 maybe_emit_free_warning (exp
);
6942 case BUILT_IN_THREAD_POINTER
:
6943 return expand_builtin_thread_pointer (exp
, target
);
6945 case BUILT_IN_SET_THREAD_POINTER
:
6946 expand_builtin_set_thread_pointer (exp
);
6949 case BUILT_IN_CILK_DETACH
:
6950 expand_builtin_cilk_detach (exp
);
6953 case BUILT_IN_CILK_POP_FRAME
:
6954 expand_builtin_cilk_pop_frame (exp
);
6957 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
6958 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
6959 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
6960 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
6961 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
6962 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
6963 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
6964 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
6965 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
6966 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
6967 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
6968 /* We allow user CHKP builtins if Pointer Bounds
6970 if (!chkp_function_instrumented_p (current_function_decl
))
6972 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
6973 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6974 || fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
6975 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
6976 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
6977 return expand_normal (CALL_EXPR_ARG (exp
, 0));
6978 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
6979 return expand_normal (size_zero_node
);
6980 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
6981 return expand_normal (size_int (-1));
6987 case BUILT_IN_CHKP_BNDMK
:
6988 case BUILT_IN_CHKP_BNDSTX
:
6989 case BUILT_IN_CHKP_BNDCL
:
6990 case BUILT_IN_CHKP_BNDCU
:
6991 case BUILT_IN_CHKP_BNDLDX
:
6992 case BUILT_IN_CHKP_BNDRET
:
6993 case BUILT_IN_CHKP_INTERSECT
:
6994 case BUILT_IN_CHKP_NARROW
:
6995 case BUILT_IN_CHKP_EXTRACT_LOWER
:
6996 case BUILT_IN_CHKP_EXTRACT_UPPER
:
6997 /* Software implementation of Pointer Bounds Checker is NYI.
6998 Target support is required. */
6999 error ("Your target platform does not support -fcheck-pointer-bounds");
7002 case BUILT_IN_ACC_ON_DEVICE
:
7003 /* Do library call, if we failed to expand the builtin when
7007 default: /* just do library call, if unknown builtin */
7011 /* The switch statement above can drop through to cause the function
7012 to be called normally. */
7013 return expand_call (exp
, target
, ignore
);
7016 /* Similar to expand_builtin but is used for instrumented calls. */
7019 expand_builtin_with_bounds (tree exp
, rtx target
,
7020 rtx subtarget ATTRIBUTE_UNUSED
,
7021 machine_mode mode
, int ignore
)
7023 tree fndecl
= get_callee_fndecl (exp
);
7024 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7026 gcc_assert (CALL_WITH_BOUNDS_P (exp
));
7028 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7029 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7031 gcc_assert (fcode
> BEGIN_CHKP_BUILTINS
7032 && fcode
< END_CHKP_BUILTINS
);
7036 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
:
7037 target
= expand_builtin_memcpy_with_bounds (exp
, target
);
7042 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
:
7043 target
= expand_builtin_mempcpy_with_bounds (exp
, target
, mode
);
7048 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
:
7049 target
= expand_builtin_memset_with_bounds (exp
, target
, mode
);
7058 /* The switch statement above can drop through to cause the function
7059 to be called normally. */
7060 return expand_call (exp
, target
, ignore
);
7063 /* Determine whether a tree node represents a call to a built-in
7064 function. If the tree T is a call to a built-in function with
7065 the right number of arguments of the appropriate types, return
7066 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7067 Otherwise the return value is END_BUILTINS. */
7069 enum built_in_function
7070 builtin_mathfn_code (const_tree t
)
7072 const_tree fndecl
, arg
, parmlist
;
7073 const_tree argtype
, parmtype
;
7074 const_call_expr_arg_iterator iter
;
7076 if (TREE_CODE (t
) != CALL_EXPR
7077 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
7078 return END_BUILTINS
;
7080 fndecl
= get_callee_fndecl (t
);
7081 if (fndecl
== NULL_TREE
7082 || TREE_CODE (fndecl
) != FUNCTION_DECL
7083 || ! DECL_BUILT_IN (fndecl
)
7084 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7085 return END_BUILTINS
;
7087 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7088 init_const_call_expr_arg_iterator (t
, &iter
);
7089 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7091 /* If a function doesn't take a variable number of arguments,
7092 the last element in the list will have type `void'. */
7093 parmtype
= TREE_VALUE (parmlist
);
7094 if (VOID_TYPE_P (parmtype
))
7096 if (more_const_call_expr_args_p (&iter
))
7097 return END_BUILTINS
;
7098 return DECL_FUNCTION_CODE (fndecl
);
7101 if (! more_const_call_expr_args_p (&iter
))
7102 return END_BUILTINS
;
7104 arg
= next_const_call_expr_arg (&iter
);
7105 argtype
= TREE_TYPE (arg
);
7107 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7109 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7110 return END_BUILTINS
;
7112 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7114 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7115 return END_BUILTINS
;
7117 else if (POINTER_TYPE_P (parmtype
))
7119 if (! POINTER_TYPE_P (argtype
))
7120 return END_BUILTINS
;
7122 else if (INTEGRAL_TYPE_P (parmtype
))
7124 if (! INTEGRAL_TYPE_P (argtype
))
7125 return END_BUILTINS
;
7128 return END_BUILTINS
;
7131 /* Variable-length argument list. */
7132 return DECL_FUNCTION_CODE (fndecl
);
7135 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7136 evaluate to a constant. */
7139 fold_builtin_constant_p (tree arg
)
7141 /* We return 1 for a numeric type that's known to be a constant
7142 value at compile-time or for an aggregate type that's a
7143 literal constant. */
7146 /* If we know this is a constant, emit the constant of one. */
7147 if (CONSTANT_CLASS_P (arg
)
7148 || (TREE_CODE (arg
) == CONSTRUCTOR
7149 && TREE_CONSTANT (arg
)))
7150 return integer_one_node
;
7151 if (TREE_CODE (arg
) == ADDR_EXPR
)
7153 tree op
= TREE_OPERAND (arg
, 0);
7154 if (TREE_CODE (op
) == STRING_CST
7155 || (TREE_CODE (op
) == ARRAY_REF
7156 && integer_zerop (TREE_OPERAND (op
, 1))
7157 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7158 return integer_one_node
;
7161 /* If this expression has side effects, show we don't know it to be a
7162 constant. Likewise if it's a pointer or aggregate type since in
7163 those case we only want literals, since those are only optimized
7164 when generating RTL, not later.
7165 And finally, if we are compiling an initializer, not code, we
7166 need to return a definite result now; there's not going to be any
7167 more optimization done. */
7168 if (TREE_SIDE_EFFECTS (arg
)
7169 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7170 || POINTER_TYPE_P (TREE_TYPE (arg
))
7172 || folding_initializer
7173 || force_folding_builtin_constant_p
)
7174 return integer_zero_node
;
7179 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7180 return it as a truthvalue. */
7183 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
7186 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7188 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7189 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7190 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7191 pred_type
= TREE_VALUE (arg_types
);
7192 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7194 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7195 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7196 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
7199 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7200 build_int_cst (ret_type
, 0));
7203 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7204 NULL_TREE if no simplification is possible. */
7207 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
7209 tree inner
, fndecl
, inner_arg0
;
7210 enum tree_code code
;
7212 /* Distribute the expected value over short-circuiting operators.
7213 See through the cast from truthvalue_type_node to long. */
7215 while (CONVERT_EXPR_P (inner_arg0
)
7216 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7217 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7218 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7220 /* If this is a builtin_expect within a builtin_expect keep the
7221 inner one. See through a comparison against a constant. It
7222 might have been added to create a thruthvalue. */
7225 if (COMPARISON_CLASS_P (inner
)
7226 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7227 inner
= TREE_OPERAND (inner
, 0);
7229 if (TREE_CODE (inner
) == CALL_EXPR
7230 && (fndecl
= get_callee_fndecl (inner
))
7231 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7232 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7236 code
= TREE_CODE (inner
);
7237 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7239 tree op0
= TREE_OPERAND (inner
, 0);
7240 tree op1
= TREE_OPERAND (inner
, 1);
7242 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
7243 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
7244 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7246 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7249 /* If the argument isn't invariant then there's nothing else we can do. */
7250 if (!TREE_CONSTANT (inner_arg0
))
7253 /* If we expect that a comparison against the argument will fold to
7254 a constant return the constant. In practice, this means a true
7255 constant or the address of a non-weak symbol. */
7258 if (TREE_CODE (inner
) == ADDR_EXPR
)
7262 inner
= TREE_OPERAND (inner
, 0);
7264 while (TREE_CODE (inner
) == COMPONENT_REF
7265 || TREE_CODE (inner
) == ARRAY_REF
);
7266 if ((TREE_CODE (inner
) == VAR_DECL
7267 || TREE_CODE (inner
) == FUNCTION_DECL
)
7268 && DECL_WEAK (inner
))
7272 /* Otherwise, ARG0 already has the proper type for the return value. */
7276 /* Fold a call to __builtin_classify_type with argument ARG. */
7279 fold_builtin_classify_type (tree arg
)
7282 return build_int_cst (integer_type_node
, no_type_class
);
7284 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7287 /* Fold a call to __builtin_strlen with argument ARG. */
7290 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7292 if (!validate_arg (arg
, POINTER_TYPE
))
7296 tree len
= c_strlen (arg
, 0);
7299 return fold_convert_loc (loc
, type
, len
);
7305 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7308 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7310 REAL_VALUE_TYPE real
;
7312 /* __builtin_inff is intended to be usable to define INFINITY on all
7313 targets. If an infinity is not available, INFINITY expands "to a
7314 positive constant of type float that overflows at translation
7315 time", footnote "In this case, using INFINITY will violate the
7316 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7317 Thus we pedwarn to ensure this constraint violation is
7319 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7320 pedwarn (loc
, 0, "target format does not support infinity");
7323 return build_real (type
, real
);
7326 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7329 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7331 REAL_VALUE_TYPE real
;
7334 if (!validate_arg (arg
, POINTER_TYPE
))
7336 str
= c_getstr (arg
);
7340 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7343 return build_real (type
, real
);
7346 /* Return true if the floating point expression T has an integer value.
7347 We also allow +Inf, -Inf and NaN to be considered integer values. */
7350 integer_valued_real_p (tree t
)
7352 switch (TREE_CODE (t
))
7359 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7364 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7371 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7372 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7375 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7376 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7379 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7383 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7384 if (TREE_CODE (type
) == INTEGER_TYPE
)
7386 if (TREE_CODE (type
) == REAL_TYPE
)
7387 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7392 switch (builtin_mathfn_code (t
))
7394 CASE_FLT_FN (BUILT_IN_CEIL
):
7395 CASE_FLT_FN (BUILT_IN_FLOOR
):
7396 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7397 CASE_FLT_FN (BUILT_IN_RINT
):
7398 CASE_FLT_FN (BUILT_IN_ROUND
):
7399 CASE_FLT_FN (BUILT_IN_TRUNC
):
7402 CASE_FLT_FN (BUILT_IN_FMIN
):
7403 CASE_FLT_FN (BUILT_IN_FMAX
):
7404 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7405 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7418 /* FNDECL is assumed to be a builtin where truncation can be propagated
7419 across (for instance floor((double)f) == (double)floorf (f).
7420 Do the transformation for a call with argument ARG. */
7423 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7425 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7427 if (!validate_arg (arg
, REAL_TYPE
))
7430 /* Integer rounding functions are idempotent. */
7431 if (fcode
== builtin_mathfn_code (arg
))
7434 /* If argument is already integer valued, and we don't need to worry
7435 about setting errno, there's no need to perform rounding. */
7436 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7441 tree arg0
= strip_float_extensions (arg
);
7442 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7443 tree newtype
= TREE_TYPE (arg0
);
7446 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7447 && (decl
= mathfn_built_in (newtype
, fcode
)))
7448 return fold_convert_loc (loc
, ftype
,
7449 build_call_expr_loc (loc
, decl
, 1,
7450 fold_convert_loc (loc
,
7457 /* FNDECL is assumed to be builtin which can narrow the FP type of
7458 the argument, for instance lround((double)f) -> lroundf (f).
7459 Do the transformation for a call with argument ARG. */
7462 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7464 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7466 if (!validate_arg (arg
, REAL_TYPE
))
7469 /* If argument is already integer valued, and we don't need to worry
7470 about setting errno, there's no need to perform rounding. */
7471 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7472 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7473 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7477 tree ftype
= TREE_TYPE (arg
);
7478 tree arg0
= strip_float_extensions (arg
);
7479 tree newtype
= TREE_TYPE (arg0
);
7482 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7483 && (decl
= mathfn_built_in (newtype
, fcode
)))
7484 return build_call_expr_loc (loc
, decl
, 1,
7485 fold_convert_loc (loc
, newtype
, arg0
));
7488 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7489 sizeof (int) == sizeof (long). */
7490 if (TYPE_PRECISION (integer_type_node
)
7491 == TYPE_PRECISION (long_integer_type_node
))
7493 tree newfn
= NULL_TREE
;
7496 CASE_FLT_FN (BUILT_IN_ICEIL
):
7497 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7500 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7501 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7504 CASE_FLT_FN (BUILT_IN_IROUND
):
7505 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7508 CASE_FLT_FN (BUILT_IN_IRINT
):
7509 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7518 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7519 return fold_convert_loc (loc
,
7520 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7524 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7525 sizeof (long long) == sizeof (long). */
7526 if (TYPE_PRECISION (long_long_integer_type_node
)
7527 == TYPE_PRECISION (long_integer_type_node
))
7529 tree newfn
= NULL_TREE
;
7532 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7533 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7536 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7537 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7540 CASE_FLT_FN (BUILT_IN_LLROUND
):
7541 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7544 CASE_FLT_FN (BUILT_IN_LLRINT
):
7545 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7554 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7555 return fold_convert_loc (loc
,
7556 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7563 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7564 return type. Return NULL_TREE if no simplification can be made. */
7567 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7571 if (!validate_arg (arg
, COMPLEX_TYPE
)
7572 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7575 /* Calculate the result when the argument is a constant. */
7576 if (TREE_CODE (arg
) == COMPLEX_CST
7577 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7581 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7583 tree real
= TREE_OPERAND (arg
, 0);
7584 tree imag
= TREE_OPERAND (arg
, 1);
7586 /* If either part is zero, cabs is fabs of the other. */
7587 if (real_zerop (real
))
7588 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7589 if (real_zerop (imag
))
7590 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7592 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7593 if (flag_unsafe_math_optimizations
7594 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7596 const REAL_VALUE_TYPE sqrt2_trunc
7597 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7599 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7600 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7601 build_real (type
, sqrt2_trunc
));
7605 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7606 if (TREE_CODE (arg
) == NEGATE_EXPR
7607 || TREE_CODE (arg
) == CONJ_EXPR
)
7608 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7610 /* Don't do this when optimizing for size. */
7611 if (flag_unsafe_math_optimizations
7612 && optimize
&& optimize_function_for_speed_p (cfun
))
7614 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7616 if (sqrtfn
!= NULL_TREE
)
7618 tree rpart
, ipart
, result
;
7620 arg
= builtin_save_expr (arg
);
7622 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7623 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7625 rpart
= builtin_save_expr (rpart
);
7626 ipart
= builtin_save_expr (ipart
);
7628 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7629 fold_build2_loc (loc
, MULT_EXPR
, type
,
7631 fold_build2_loc (loc
, MULT_EXPR
, type
,
7634 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7641 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7642 complex tree type of the result. If NEG is true, the imaginary
7643 zero is negative. */
7646 build_complex_cproj (tree type
, bool neg
)
7648 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7652 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7653 build_real (TREE_TYPE (type
), rzero
));
7656 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7657 return type. Return NULL_TREE if no simplification can be made. */
7660 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7662 if (!validate_arg (arg
, COMPLEX_TYPE
)
7663 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7666 /* If there are no infinities, return arg. */
7667 if (! HONOR_INFINITIES (type
))
7668 return non_lvalue_loc (loc
, arg
);
7670 /* Calculate the result when the argument is a constant. */
7671 if (TREE_CODE (arg
) == COMPLEX_CST
)
7673 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7674 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7676 if (real_isinf (real
) || real_isinf (imag
))
7677 return build_complex_cproj (type
, imag
->sign
);
7681 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7683 tree real
= TREE_OPERAND (arg
, 0);
7684 tree imag
= TREE_OPERAND (arg
, 1);
7689 /* If the real part is inf and the imag part is known to be
7690 nonnegative, return (inf + 0i). Remember side-effects are
7691 possible in the imag part. */
7692 if (TREE_CODE (real
) == REAL_CST
7693 && real_isinf (TREE_REAL_CST_PTR (real
))
7694 && tree_expr_nonnegative_p (imag
))
7695 return omit_one_operand_loc (loc
, type
,
7696 build_complex_cproj (type
, false),
7699 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7700 Remember side-effects are possible in the real part. */
7701 if (TREE_CODE (imag
) == REAL_CST
7702 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7704 omit_one_operand_loc (loc
, type
,
7705 build_complex_cproj (type
, TREE_REAL_CST_PTR
7706 (imag
)->sign
), arg
);
7712 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7713 Return NULL_TREE if no simplification can be made. */
7716 fold_builtin_sqrt (location_t loc
, tree arg
, tree type
)
7719 enum built_in_function fcode
;
7722 if (!validate_arg (arg
, REAL_TYPE
))
7725 /* Calculate the result when the argument is a constant. */
7726 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7729 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7730 fcode
= builtin_mathfn_code (arg
);
7731 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7733 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7734 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7735 CALL_EXPR_ARG (arg
, 0),
7736 build_real (type
, dconsthalf
));
7737 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7740 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7741 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7743 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7747 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7749 /* The inner root was either sqrt or cbrt. */
7750 /* This was a conditional expression but it triggered a bug
7752 REAL_VALUE_TYPE dconstroot
;
7753 if (BUILTIN_SQRT_P (fcode
))
7754 dconstroot
= dconsthalf
;
7756 dconstroot
= dconst_third ();
7758 /* Adjust for the outer root. */
7759 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7760 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7761 tree_root
= build_real (type
, dconstroot
);
7762 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7766 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7767 if (flag_unsafe_math_optimizations
7768 && (fcode
== BUILT_IN_POW
7769 || fcode
== BUILT_IN_POWF
7770 || fcode
== BUILT_IN_POWL
))
7772 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7773 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7774 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7776 if (!tree_expr_nonnegative_p (arg0
))
7777 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7778 narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
7779 build_real (type
, dconsthalf
));
7780 return build_call_expr_loc (loc
, powfn
, 2, arg0
, narg1
);
7786 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7787 Return NULL_TREE if no simplification can be made. */
7790 fold_builtin_cbrt (location_t loc
, tree arg
, tree type
)
7792 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7795 if (!validate_arg (arg
, REAL_TYPE
))
7798 /* Calculate the result when the argument is a constant. */
7799 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7802 if (flag_unsafe_math_optimizations
)
7804 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7805 if (BUILTIN_EXPONENT_P (fcode
))
7807 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7808 const REAL_VALUE_TYPE third_trunc
=
7809 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7810 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7811 CALL_EXPR_ARG (arg
, 0),
7812 build_real (type
, third_trunc
));
7813 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7816 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7817 if (BUILTIN_SQRT_P (fcode
))
7819 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7823 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7825 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7827 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7828 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7829 tree_root
= build_real (type
, dconstroot
);
7830 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7834 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7835 if (BUILTIN_CBRT_P (fcode
))
7837 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7838 if (tree_expr_nonnegative_p (arg0
))
7840 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7845 REAL_VALUE_TYPE dconstroot
;
7847 real_arithmetic (&dconstroot
, MULT_EXPR
,
7848 dconst_third_ptr (), dconst_third_ptr ());
7849 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7850 tree_root
= build_real (type
, dconstroot
);
7851 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7856 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7857 if (fcode
== BUILT_IN_POW
7858 || fcode
== BUILT_IN_POWF
7859 || fcode
== BUILT_IN_POWL
)
7861 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7862 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7863 if (tree_expr_nonnegative_p (arg00
))
7865 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7866 const REAL_VALUE_TYPE dconstroot
7867 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7868 tree narg01
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
,
7869 build_real (type
, dconstroot
));
7870 return build_call_expr_loc (loc
, powfn
, 2, arg00
, narg01
);
7877 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7878 TYPE is the type of the return value. Return NULL_TREE if no
7879 simplification can be made. */
7882 fold_builtin_cos (location_t loc
,
7883 tree arg
, tree type
, tree fndecl
)
7887 if (!validate_arg (arg
, REAL_TYPE
))
7890 /* Calculate the result when the argument is a constant. */
7891 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7894 /* Optimize cos(-x) into cos (x). */
7895 if ((narg
= fold_strip_sign_ops (arg
)))
7896 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7901 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7902 Return NULL_TREE if no simplification can be made. */
7905 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7907 if (validate_arg (arg
, REAL_TYPE
))
7911 /* Calculate the result when the argument is a constant. */
7912 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7915 /* Optimize cosh(-x) into cosh (x). */
7916 if ((narg
= fold_strip_sign_ops (arg
)))
7917 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7923 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7924 argument ARG. TYPE is the type of the return value. Return
7925 NULL_TREE if no simplification can be made. */
7928 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
7931 if (validate_arg (arg
, COMPLEX_TYPE
)
7932 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7936 /* Calculate the result when the argument is a constant. */
7937 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
7940 /* Optimize fn(-x) into fn(x). */
7941 if ((tmp
= fold_strip_sign_ops (arg
)))
7942 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
7948 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7949 Return NULL_TREE if no simplification can be made. */
7952 fold_builtin_tan (tree arg
, tree type
)
7954 enum built_in_function fcode
;
7957 if (!validate_arg (arg
, REAL_TYPE
))
7960 /* Calculate the result when the argument is a constant. */
7961 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7964 /* Optimize tan(atan(x)) = x. */
7965 fcode
= builtin_mathfn_code (arg
);
7966 if (flag_unsafe_math_optimizations
7967 && (fcode
== BUILT_IN_ATAN
7968 || fcode
== BUILT_IN_ATANF
7969 || fcode
== BUILT_IN_ATANL
))
7970 return CALL_EXPR_ARG (arg
, 0);
7975 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7976 NULL_TREE if no simplification can be made. */
7979 fold_builtin_sincos (location_t loc
,
7980 tree arg0
, tree arg1
, tree arg2
)
7985 if (!validate_arg (arg0
, REAL_TYPE
)
7986 || !validate_arg (arg1
, POINTER_TYPE
)
7987 || !validate_arg (arg2
, POINTER_TYPE
))
7990 type
= TREE_TYPE (arg0
);
7992 /* Calculate the result when the argument is a constant. */
7993 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7996 /* Canonicalize sincos to cexpi. */
7997 if (!targetm
.libc_has_function (function_c99_math_complex
))
7999 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
8003 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
8004 call
= builtin_save_expr (call
);
8006 return build2 (COMPOUND_EXPR
, void_type_node
,
8007 build2 (MODIFY_EXPR
, void_type_node
,
8008 build_fold_indirect_ref_loc (loc
, arg1
),
8009 build1 (IMAGPART_EXPR
, type
, call
)),
8010 build2 (MODIFY_EXPR
, void_type_node
,
8011 build_fold_indirect_ref_loc (loc
, arg2
),
8012 build1 (REALPART_EXPR
, type
, call
)));
8015 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8016 NULL_TREE if no simplification can be made. */
8019 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
8022 tree realp
, imagp
, ifn
;
8025 if (!validate_arg (arg0
, COMPLEX_TYPE
)
8026 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
8029 /* Calculate the result when the argument is a constant. */
8030 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
8033 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
8035 /* In case we can figure out the real part of arg0 and it is constant zero
8037 if (!targetm
.libc_has_function (function_c99_math_complex
))
8039 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
8043 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
8044 && real_zerop (realp
))
8046 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
8047 return build_call_expr_loc (loc
, ifn
, 1, narg
);
8050 /* In case we can easily decompose real and imaginary parts split cexp
8051 to exp (r) * cexpi (i). */
8052 if (flag_unsafe_math_optimizations
8055 tree rfn
, rcall
, icall
;
8057 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
8061 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
8065 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
8066 icall
= builtin_save_expr (icall
);
8067 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
8068 rcall
= builtin_save_expr (rcall
);
8069 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
8070 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
8072 fold_build1_loc (loc
, REALPART_EXPR
,
8074 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
8076 fold_build1_loc (loc
, IMAGPART_EXPR
,
8083 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8084 Return NULL_TREE if no simplification can be made. */
8087 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
8089 if (!validate_arg (arg
, REAL_TYPE
))
8092 /* Optimize trunc of constant value. */
8093 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8095 REAL_VALUE_TYPE r
, x
;
8096 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8098 x
= TREE_REAL_CST (arg
);
8099 real_trunc (&r
, TYPE_MODE (type
), &x
);
8100 return build_real (type
, r
);
8103 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8106 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8107 Return NULL_TREE if no simplification can be made. */
8110 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
8112 if (!validate_arg (arg
, REAL_TYPE
))
8115 /* Optimize floor of constant value. */
8116 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8120 x
= TREE_REAL_CST (arg
);
8121 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8123 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8126 real_floor (&r
, TYPE_MODE (type
), &x
);
8127 return build_real (type
, r
);
8131 /* Fold floor (x) where x is nonnegative to trunc (x). */
8132 if (tree_expr_nonnegative_p (arg
))
8134 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
8136 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
8139 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8142 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8143 Return NULL_TREE if no simplification can be made. */
8146 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
8148 if (!validate_arg (arg
, REAL_TYPE
))
8151 /* Optimize ceil of constant value. */
8152 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8156 x
= TREE_REAL_CST (arg
);
8157 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8159 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8162 real_ceil (&r
, TYPE_MODE (type
), &x
);
8163 return build_real (type
, r
);
8167 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8170 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8171 Return NULL_TREE if no simplification can be made. */
8174 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
8176 if (!validate_arg (arg
, REAL_TYPE
))
8179 /* Optimize round of constant value. */
8180 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8184 x
= TREE_REAL_CST (arg
);
8185 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8187 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8190 real_round (&r
, TYPE_MODE (type
), &x
);
8191 return build_real (type
, r
);
8195 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8198 /* Fold function call to builtin lround, lroundf or lroundl (or the
8199 corresponding long long versions) and other rounding functions. ARG
8200 is the argument to the call. Return NULL_TREE if no simplification
8204 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
8206 if (!validate_arg (arg
, REAL_TYPE
))
8209 /* Optimize lround of constant value. */
8210 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8212 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
8214 if (real_isfinite (&x
))
8216 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
8217 tree ftype
= TREE_TYPE (arg
);
8221 switch (DECL_FUNCTION_CODE (fndecl
))
8223 CASE_FLT_FN (BUILT_IN_IFLOOR
):
8224 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8225 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8226 real_floor (&r
, TYPE_MODE (ftype
), &x
);
8229 CASE_FLT_FN (BUILT_IN_ICEIL
):
8230 CASE_FLT_FN (BUILT_IN_LCEIL
):
8231 CASE_FLT_FN (BUILT_IN_LLCEIL
):
8232 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
8235 CASE_FLT_FN (BUILT_IN_IROUND
):
8236 CASE_FLT_FN (BUILT_IN_LROUND
):
8237 CASE_FLT_FN (BUILT_IN_LLROUND
):
8238 real_round (&r
, TYPE_MODE (ftype
), &x
);
8245 wide_int val
= real_to_integer (&r
, &fail
, TYPE_PRECISION (itype
));
8247 return wide_int_to_tree (itype
, val
);
8251 switch (DECL_FUNCTION_CODE (fndecl
))
8253 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8254 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8255 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8256 if (tree_expr_nonnegative_p (arg
))
8257 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
8258 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
8263 return fold_fixed_mathfn (loc
, fndecl
, arg
);
8266 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8267 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8268 the argument to the call. Return NULL_TREE if no simplification can
8272 fold_builtin_bitop (tree fndecl
, tree arg
)
8274 if (!validate_arg (arg
, INTEGER_TYPE
))
8277 /* Optimize for constant argument. */
8278 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8280 tree type
= TREE_TYPE (arg
);
8283 switch (DECL_FUNCTION_CODE (fndecl
))
8285 CASE_INT_FN (BUILT_IN_FFS
):
8286 result
= wi::ffs (arg
);
8289 CASE_INT_FN (BUILT_IN_CLZ
):
8290 if (wi::ne_p (arg
, 0))
8291 result
= wi::clz (arg
);
8292 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8293 result
= TYPE_PRECISION (type
);
8296 CASE_INT_FN (BUILT_IN_CTZ
):
8297 if (wi::ne_p (arg
, 0))
8298 result
= wi::ctz (arg
);
8299 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8300 result
= TYPE_PRECISION (type
);
8303 CASE_INT_FN (BUILT_IN_CLRSB
):
8304 result
= wi::clrsb (arg
);
8307 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8308 result
= wi::popcount (arg
);
8311 CASE_INT_FN (BUILT_IN_PARITY
):
8312 result
= wi::parity (arg
);
8319 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8325 /* Fold function call to builtin_bswap and the short, long and long long
8326 variants. Return NULL_TREE if no simplification can be made. */
8328 fold_builtin_bswap (tree fndecl
, tree arg
)
8330 if (! validate_arg (arg
, INTEGER_TYPE
))
8333 /* Optimize constant value. */
8334 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8336 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8338 switch (DECL_FUNCTION_CODE (fndecl
))
8340 case BUILT_IN_BSWAP16
:
8341 case BUILT_IN_BSWAP32
:
8342 case BUILT_IN_BSWAP64
:
8344 signop sgn
= TYPE_SIGN (type
);
8346 wide_int_to_tree (type
,
8347 wide_int::from (arg
, TYPE_PRECISION (type
),
8359 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8360 NULL_TREE if no simplification can be made. */
8363 fold_builtin_hypot (location_t loc
, tree fndecl
,
8364 tree arg0
, tree arg1
, tree type
)
8366 tree res
, narg0
, narg1
;
8368 if (!validate_arg (arg0
, REAL_TYPE
)
8369 || !validate_arg (arg1
, REAL_TYPE
))
8372 /* Calculate the result when the argument is a constant. */
8373 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8376 /* If either argument to hypot has a negate or abs, strip that off.
8377 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8378 narg0
= fold_strip_sign_ops (arg0
);
8379 narg1
= fold_strip_sign_ops (arg1
);
8382 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8383 narg1
? narg1
: arg1
);
8386 /* If either argument is zero, hypot is fabs of the other. */
8387 if (real_zerop (arg0
))
8388 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8389 else if (real_zerop (arg1
))
8390 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8392 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8393 if (flag_unsafe_math_optimizations
8394 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8396 const REAL_VALUE_TYPE sqrt2_trunc
8397 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
8398 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8399 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8400 build_real (type
, sqrt2_trunc
));
8407 /* Fold a builtin function call to pow, powf, or powl. Return
8408 NULL_TREE if no simplification can be made. */
8410 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8414 if (!validate_arg (arg0
, REAL_TYPE
)
8415 || !validate_arg (arg1
, REAL_TYPE
))
8418 /* Calculate the result when the argument is a constant. */
8419 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8422 /* Optimize pow(1.0,y) = 1.0. */
8423 if (real_onep (arg0
))
8424 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8426 if (TREE_CODE (arg1
) == REAL_CST
8427 && !TREE_OVERFLOW (arg1
))
8429 REAL_VALUE_TYPE cint
;
8433 c
= TREE_REAL_CST (arg1
);
8435 /* Optimize pow(x,0.0) = 1.0. */
8436 if (REAL_VALUES_EQUAL (c
, dconst0
))
8437 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8440 /* Optimize pow(x,1.0) = x. */
8441 if (REAL_VALUES_EQUAL (c
, dconst1
))
8444 /* Optimize pow(x,-1.0) = 1.0/x. */
8445 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8446 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8447 build_real (type
, dconst1
), arg0
);
8449 /* Optimize pow(x,0.5) = sqrt(x). */
8450 if (flag_unsafe_math_optimizations
8451 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8453 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8455 if (sqrtfn
!= NULL_TREE
)
8456 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8459 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8460 if (flag_unsafe_math_optimizations
)
8462 const REAL_VALUE_TYPE dconstroot
8463 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8465 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8467 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8468 if (cbrtfn
!= NULL_TREE
)
8469 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8473 /* Check for an integer exponent. */
8474 n
= real_to_integer (&c
);
8475 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
8476 if (real_identical (&c
, &cint
))
8478 /* Attempt to evaluate pow at compile-time, unless this should
8479 raise an exception. */
8480 if (TREE_CODE (arg0
) == REAL_CST
8481 && !TREE_OVERFLOW (arg0
)
8483 || (!flag_trapping_math
&& !flag_errno_math
)
8484 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8489 x
= TREE_REAL_CST (arg0
);
8490 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8491 if (flag_unsafe_math_optimizations
|| !inexact
)
8492 return build_real (type
, x
);
8495 /* Strip sign ops from even integer powers. */
8496 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8498 tree narg0
= fold_strip_sign_ops (arg0
);
8500 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8505 if (flag_unsafe_math_optimizations
)
8507 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8509 /* Optimize pow(expN(x),y) = expN(x*y). */
8510 if (BUILTIN_EXPONENT_P (fcode
))
8512 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8513 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8514 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8515 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8518 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8519 if (BUILTIN_SQRT_P (fcode
))
8521 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8522 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8523 build_real (type
, dconsthalf
));
8524 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8527 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8528 if (BUILTIN_CBRT_P (fcode
))
8530 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8531 if (tree_expr_nonnegative_p (arg
))
8533 const REAL_VALUE_TYPE dconstroot
8534 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8535 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8536 build_real (type
, dconstroot
));
8537 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8541 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8542 if (fcode
== BUILT_IN_POW
8543 || fcode
== BUILT_IN_POWF
8544 || fcode
== BUILT_IN_POWL
)
8546 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8547 if (tree_expr_nonnegative_p (arg00
))
8549 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8550 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8551 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8559 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8560 Return NULL_TREE if no simplification can be made. */
8562 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8563 tree arg0
, tree arg1
, tree type
)
8565 if (!validate_arg (arg0
, REAL_TYPE
)
8566 || !validate_arg (arg1
, INTEGER_TYPE
))
8569 /* Optimize pow(1.0,y) = 1.0. */
8570 if (real_onep (arg0
))
8571 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8573 if (tree_fits_shwi_p (arg1
))
8575 HOST_WIDE_INT c
= tree_to_shwi (arg1
);
8577 /* Evaluate powi at compile-time. */
8578 if (TREE_CODE (arg0
) == REAL_CST
8579 && !TREE_OVERFLOW (arg0
))
8582 x
= TREE_REAL_CST (arg0
);
8583 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8584 return build_real (type
, x
);
8587 /* Optimize pow(x,0) = 1.0. */
8589 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8592 /* Optimize pow(x,1) = x. */
8596 /* Optimize pow(x,-1) = 1.0/x. */
8598 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8599 build_real (type
, dconst1
), arg0
);
8605 /* A subroutine of fold_builtin to fold the various exponent
8606 functions. Return NULL_TREE if no simplification can be made.
8607 FUNC is the corresponding MPFR exponent function. */
8610 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8611 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8613 if (validate_arg (arg
, REAL_TYPE
))
8615 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8618 /* Calculate the result when the argument is a constant. */
8619 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8622 /* Optimize expN(logN(x)) = x. */
8623 if (flag_unsafe_math_optimizations
)
8625 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8627 if ((func
== mpfr_exp
8628 && (fcode
== BUILT_IN_LOG
8629 || fcode
== BUILT_IN_LOGF
8630 || fcode
== BUILT_IN_LOGL
))
8631 || (func
== mpfr_exp2
8632 && (fcode
== BUILT_IN_LOG2
8633 || fcode
== BUILT_IN_LOG2F
8634 || fcode
== BUILT_IN_LOG2L
))
8635 || (func
== mpfr_exp10
8636 && (fcode
== BUILT_IN_LOG10
8637 || fcode
== BUILT_IN_LOG10F
8638 || fcode
== BUILT_IN_LOG10L
)))
8639 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8646 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8647 arguments to the call, and TYPE is its return type.
8648 Return NULL_TREE if no simplification can be made. */
8651 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
8653 if (!validate_arg (arg1
, POINTER_TYPE
)
8654 || !validate_arg (arg2
, INTEGER_TYPE
)
8655 || !validate_arg (len
, INTEGER_TYPE
))
8661 if (TREE_CODE (arg2
) != INTEGER_CST
8662 || !tree_fits_uhwi_p (len
))
8665 p1
= c_getstr (arg1
);
8666 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8672 if (target_char_cast (arg2
, &c
))
8675 r
= (const char *) memchr (p1
, c
, tree_to_uhwi (len
));
8678 return build_int_cst (TREE_TYPE (arg1
), 0);
8680 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
8681 return fold_convert_loc (loc
, type
, tem
);
8687 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8688 Return NULL_TREE if no simplification can be made. */
8691 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8693 const char *p1
, *p2
;
8695 if (!validate_arg (arg1
, POINTER_TYPE
)
8696 || !validate_arg (arg2
, POINTER_TYPE
)
8697 || !validate_arg (len
, INTEGER_TYPE
))
8700 /* If the LEN parameter is zero, return zero. */
8701 if (integer_zerop (len
))
8702 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8705 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8706 if (operand_equal_p (arg1
, arg2
, 0))
8707 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8709 p1
= c_getstr (arg1
);
8710 p2
= c_getstr (arg2
);
8712 /* If all arguments are constant, and the value of len is not greater
8713 than the lengths of arg1 and arg2, evaluate at compile-time. */
8714 if (tree_fits_uhwi_p (len
) && p1
&& p2
8715 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8716 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8718 const int r
= memcmp (p1
, p2
, tree_to_uhwi (len
));
8721 return integer_one_node
;
8723 return integer_minus_one_node
;
8725 return integer_zero_node
;
8728 /* If len parameter is one, return an expression corresponding to
8729 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8730 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8732 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8733 tree cst_uchar_ptr_node
8734 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8737 = fold_convert_loc (loc
, integer_type_node
,
8738 build1 (INDIRECT_REF
, cst_uchar_node
,
8739 fold_convert_loc (loc
,
8743 = fold_convert_loc (loc
, integer_type_node
,
8744 build1 (INDIRECT_REF
, cst_uchar_node
,
8745 fold_convert_loc (loc
,
8748 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8754 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8755 Return NULL_TREE if no simplification can be made. */
8758 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
8760 const char *p1
, *p2
;
8762 if (!validate_arg (arg1
, POINTER_TYPE
)
8763 || !validate_arg (arg2
, POINTER_TYPE
))
8766 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8767 if (operand_equal_p (arg1
, arg2
, 0))
8768 return integer_zero_node
;
8770 p1
= c_getstr (arg1
);
8771 p2
= c_getstr (arg2
);
8775 const int i
= strcmp (p1
, p2
);
8777 return integer_minus_one_node
;
8779 return integer_one_node
;
8781 return integer_zero_node
;
8784 /* If the second arg is "", return *(const unsigned char*)arg1. */
8785 if (p2
&& *p2
== '\0')
8787 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8788 tree cst_uchar_ptr_node
8789 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8791 return fold_convert_loc (loc
, integer_type_node
,
8792 build1 (INDIRECT_REF
, cst_uchar_node
,
8793 fold_convert_loc (loc
,
8798 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8799 if (p1
&& *p1
== '\0')
8801 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8802 tree cst_uchar_ptr_node
8803 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8806 = fold_convert_loc (loc
, integer_type_node
,
8807 build1 (INDIRECT_REF
, cst_uchar_node
,
8808 fold_convert_loc (loc
,
8811 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8817 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8818 Return NULL_TREE if no simplification can be made. */
8821 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8823 const char *p1
, *p2
;
8825 if (!validate_arg (arg1
, POINTER_TYPE
)
8826 || !validate_arg (arg2
, POINTER_TYPE
)
8827 || !validate_arg (len
, INTEGER_TYPE
))
8830 /* If the LEN parameter is zero, return zero. */
8831 if (integer_zerop (len
))
8832 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8835 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8836 if (operand_equal_p (arg1
, arg2
, 0))
8837 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8839 p1
= c_getstr (arg1
);
8840 p2
= c_getstr (arg2
);
8842 if (tree_fits_uhwi_p (len
) && p1
&& p2
)
8844 const int i
= strncmp (p1
, p2
, tree_to_uhwi (len
));
8846 return integer_one_node
;
8848 return integer_minus_one_node
;
8850 return integer_zero_node
;
8853 /* If the second arg is "", and the length is greater than zero,
8854 return *(const unsigned char*)arg1. */
8855 if (p2
&& *p2
== '\0'
8856 && TREE_CODE (len
) == INTEGER_CST
8857 && tree_int_cst_sgn (len
) == 1)
8859 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8860 tree cst_uchar_ptr_node
8861 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8863 return fold_convert_loc (loc
, integer_type_node
,
8864 build1 (INDIRECT_REF
, cst_uchar_node
,
8865 fold_convert_loc (loc
,
8870 /* If the first arg is "", and the length is greater than zero,
8871 return -*(const unsigned char*)arg2. */
8872 if (p1
&& *p1
== '\0'
8873 && TREE_CODE (len
) == INTEGER_CST
8874 && tree_int_cst_sgn (len
) == 1)
8876 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8877 tree cst_uchar_ptr_node
8878 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8880 tree temp
= fold_convert_loc (loc
, integer_type_node
,
8881 build1 (INDIRECT_REF
, cst_uchar_node
,
8882 fold_convert_loc (loc
,
8885 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8888 /* If len parameter is one, return an expression corresponding to
8889 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8890 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8892 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8893 tree cst_uchar_ptr_node
8894 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8896 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
8897 build1 (INDIRECT_REF
, cst_uchar_node
,
8898 fold_convert_loc (loc
,
8901 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
8902 build1 (INDIRECT_REF
, cst_uchar_node
,
8903 fold_convert_loc (loc
,
8906 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8912 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8913 ARG. Return NULL_TREE if no simplification can be made. */
8916 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
8918 if (!validate_arg (arg
, REAL_TYPE
))
8921 /* If ARG is a compile-time constant, determine the result. */
8922 if (TREE_CODE (arg
) == REAL_CST
8923 && !TREE_OVERFLOW (arg
))
8927 c
= TREE_REAL_CST (arg
);
8928 return (REAL_VALUE_NEGATIVE (c
)
8929 ? build_one_cst (type
)
8930 : build_zero_cst (type
));
8933 /* If ARG is non-negative, the result is always zero. */
8934 if (tree_expr_nonnegative_p (arg
))
8935 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8937 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8938 if (!HONOR_SIGNED_ZEROS (arg
))
8939 return fold_convert (type
,
8940 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
8941 build_real (TREE_TYPE (arg
), dconst0
)));
8946 /* Fold function call to builtin copysign, copysignf or copysignl with
8947 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8951 fold_builtin_copysign (location_t loc
, tree fndecl
,
8952 tree arg1
, tree arg2
, tree type
)
8956 if (!validate_arg (arg1
, REAL_TYPE
)
8957 || !validate_arg (arg2
, REAL_TYPE
))
8960 /* copysign(X,X) is X. */
8961 if (operand_equal_p (arg1
, arg2
, 0))
8962 return fold_convert_loc (loc
, type
, arg1
);
8964 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8965 if (TREE_CODE (arg1
) == REAL_CST
8966 && TREE_CODE (arg2
) == REAL_CST
8967 && !TREE_OVERFLOW (arg1
)
8968 && !TREE_OVERFLOW (arg2
))
8970 REAL_VALUE_TYPE c1
, c2
;
8972 c1
= TREE_REAL_CST (arg1
);
8973 c2
= TREE_REAL_CST (arg2
);
8974 /* c1.sign := c2.sign. */
8975 real_copysign (&c1
, &c2
);
8976 return build_real (type
, c1
);
8979 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8980 Remember to evaluate Y for side-effects. */
8981 if (tree_expr_nonnegative_p (arg2
))
8982 return omit_one_operand_loc (loc
, type
,
8983 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
8986 /* Strip sign changing operations for the first argument. */
8987 tem
= fold_strip_sign_ops (arg1
);
8989 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
8994 /* Fold a call to builtin isascii with argument ARG. */
8997 fold_builtin_isascii (location_t loc
, tree arg
)
8999 if (!validate_arg (arg
, INTEGER_TYPE
))
9003 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9004 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9005 build_int_cst (integer_type_node
,
9006 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9007 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
9008 arg
, integer_zero_node
);
9012 /* Fold a call to builtin toascii with argument ARG. */
9015 fold_builtin_toascii (location_t loc
, tree arg
)
9017 if (!validate_arg (arg
, INTEGER_TYPE
))
9020 /* Transform toascii(c) -> (c & 0x7f). */
9021 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
9022 build_int_cst (integer_type_node
, 0x7f));
9025 /* Fold a call to builtin isdigit with argument ARG. */
9028 fold_builtin_isdigit (location_t loc
, tree arg
)
9030 if (!validate_arg (arg
, INTEGER_TYPE
))
9034 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9035 /* According to the C standard, isdigit is unaffected by locale.
9036 However, it definitely is affected by the target character set. */
9037 unsigned HOST_WIDE_INT target_digit0
9038 = lang_hooks
.to_target_charset ('0');
9040 if (target_digit0
== 0)
9043 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
9044 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9045 build_int_cst (unsigned_type_node
, target_digit0
));
9046 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
9047 build_int_cst (unsigned_type_node
, 9));
9051 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9054 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
9056 if (!validate_arg (arg
, REAL_TYPE
))
9059 arg
= fold_convert_loc (loc
, type
, arg
);
9060 if (TREE_CODE (arg
) == REAL_CST
)
9061 return fold_abs_const (arg
, type
);
9062 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9065 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9068 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
9070 if (!validate_arg (arg
, INTEGER_TYPE
))
9073 arg
= fold_convert_loc (loc
, type
, arg
);
9074 if (TREE_CODE (arg
) == INTEGER_CST
)
9075 return fold_abs_const (arg
, type
);
9076 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9079 /* Fold a fma operation with arguments ARG[012]. */
9082 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
9083 tree type
, tree arg0
, tree arg1
, tree arg2
)
9085 if (TREE_CODE (arg0
) == REAL_CST
9086 && TREE_CODE (arg1
) == REAL_CST
9087 && TREE_CODE (arg2
) == REAL_CST
)
9088 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
9093 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9096 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
9098 if (validate_arg (arg0
, REAL_TYPE
)
9099 && validate_arg (arg1
, REAL_TYPE
)
9100 && validate_arg (arg2
, REAL_TYPE
))
9102 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
9106 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9107 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
9108 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
9113 /* Fold a call to builtin fmin or fmax. */
9116 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
9117 tree type
, bool max
)
9119 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9121 /* Calculate the result when the argument is a constant. */
9122 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9127 /* If either argument is NaN, return the other one. Avoid the
9128 transformation if we get (and honor) a signalling NaN. Using
9129 omit_one_operand() ensures we create a non-lvalue. */
9130 if (TREE_CODE (arg0
) == REAL_CST
9131 && real_isnan (&TREE_REAL_CST (arg0
))
9132 && (! HONOR_SNANS (arg0
)
9133 || ! TREE_REAL_CST (arg0
).signalling
))
9134 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
9135 if (TREE_CODE (arg1
) == REAL_CST
9136 && real_isnan (&TREE_REAL_CST (arg1
))
9137 && (! HONOR_SNANS (arg1
)
9138 || ! TREE_REAL_CST (arg1
).signalling
))
9139 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9141 /* Transform fmin/fmax(x,x) -> x. */
9142 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9143 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9145 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9146 functions to return the numeric arg if the other one is NaN.
9147 These tree codes don't honor that, so only transform if
9148 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9149 handled, so we don't have to worry about it either. */
9150 if (flag_finite_math_only
)
9151 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
9152 fold_convert_loc (loc
, type
, arg0
),
9153 fold_convert_loc (loc
, type
, arg1
));
9158 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9161 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9163 if (validate_arg (arg
, COMPLEX_TYPE
)
9164 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9166 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9170 tree new_arg
= builtin_save_expr (arg
);
9171 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9172 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9173 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9180 /* Fold a call to builtin logb/ilogb. */
9183 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9185 if (! validate_arg (arg
, REAL_TYPE
))
9190 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9192 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9198 /* If arg is Inf or NaN and we're logb, return it. */
9199 if (TREE_CODE (rettype
) == REAL_TYPE
)
9201 /* For logb(-Inf) we have to return +Inf. */
9202 if (real_isinf (value
) && real_isneg (value
))
9204 REAL_VALUE_TYPE tem
;
9206 return build_real (rettype
, tem
);
9208 return fold_convert_loc (loc
, rettype
, arg
);
9210 /* Fall through... */
9212 /* Zero may set errno and/or raise an exception for logb, also
9213 for ilogb we don't know FP_ILOGB0. */
9216 /* For normal numbers, proceed iff radix == 2. In GCC,
9217 normalized significands are in the range [0.5, 1.0). We
9218 want the exponent as if they were [1.0, 2.0) so get the
9219 exponent and subtract 1. */
9220 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9221 return fold_convert_loc (loc
, rettype
,
9222 build_int_cst (integer_type_node
,
9223 REAL_EXP (value
)-1));
9231 /* Fold a call to builtin significand, if radix == 2. */
9234 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9236 if (! validate_arg (arg
, REAL_TYPE
))
9241 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9243 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9250 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9251 return fold_convert_loc (loc
, rettype
, arg
);
9253 /* For normal numbers, proceed iff radix == 2. */
9254 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9256 REAL_VALUE_TYPE result
= *value
;
9257 /* In GCC, normalized significands are in the range [0.5,
9258 1.0). We want them to be [1.0, 2.0) so set the
9260 SET_REAL_EXP (&result
, 1);
9261 return build_real (rettype
, result
);
9270 /* Fold a call to builtin frexp, we can assume the base is 2. */
9273 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9275 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9280 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9283 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9285 /* Proceed if a valid pointer type was passed in. */
9286 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9288 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9294 /* For +-0, return (*exp = 0, +-0). */
9295 exp
= integer_zero_node
;
9300 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9301 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9304 /* Since the frexp function always expects base 2, and in
9305 GCC normalized significands are already in the range
9306 [0.5, 1.0), we have exactly what frexp wants. */
9307 REAL_VALUE_TYPE frac_rvt
= *value
;
9308 SET_REAL_EXP (&frac_rvt
, 0);
9309 frac
= build_real (rettype
, frac_rvt
);
9310 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9317 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9318 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9319 TREE_SIDE_EFFECTS (arg1
) = 1;
9320 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9326 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9327 then we can assume the base is two. If it's false, then we have to
9328 check the mode of the TYPE parameter in certain cases. */
9331 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9332 tree type
, bool ldexp
)
9334 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9339 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9340 if (real_zerop (arg0
) || integer_zerop (arg1
)
9341 || (TREE_CODE (arg0
) == REAL_CST
9342 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9343 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9345 /* If both arguments are constant, then try to evaluate it. */
9346 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9347 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9348 && tree_fits_shwi_p (arg1
))
9350 /* Bound the maximum adjustment to twice the range of the
9351 mode's valid exponents. Use abs to ensure the range is
9352 positive as a sanity check. */
9353 const long max_exp_adj
= 2 *
9354 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9355 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9357 /* Get the user-requested adjustment. */
9358 const HOST_WIDE_INT req_exp_adj
= tree_to_shwi (arg1
);
9360 /* The requested adjustment must be inside this range. This
9361 is a preliminary cap to avoid things like overflow, we
9362 may still fail to compute the result for other reasons. */
9363 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9365 REAL_VALUE_TYPE initial_result
;
9367 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9369 /* Ensure we didn't overflow. */
9370 if (! real_isinf (&initial_result
))
9372 const REAL_VALUE_TYPE trunc_result
9373 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9375 /* Only proceed if the target mode can hold the
9377 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9378 return build_real (type
, trunc_result
);
9387 /* Fold a call to builtin modf. */
9390 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9392 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9397 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9400 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9402 /* Proceed if a valid pointer type was passed in. */
9403 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9405 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9406 REAL_VALUE_TYPE trunc
, frac
;
9412 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9413 trunc
= frac
= *value
;
9416 /* For +-Inf, return (*arg1 = arg0, +-0). */
9418 frac
.sign
= value
->sign
;
9422 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9423 real_trunc (&trunc
, VOIDmode
, value
);
9424 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9425 /* If the original number was negative and already
9426 integral, then the fractional part is -0.0. */
9427 if (value
->sign
&& frac
.cl
== rvc_zero
)
9428 frac
.sign
= value
->sign
;
9432 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9433 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9434 build_real (rettype
, trunc
));
9435 TREE_SIDE_EFFECTS (arg1
) = 1;
9436 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9437 build_real (rettype
, frac
));
9443 /* Given a location LOC, an interclass builtin function decl FNDECL
9444 and its single argument ARG, return an folded expression computing
9445 the same, or NULL_TREE if we either couldn't or didn't want to fold
9446 (the latter happen if there's an RTL instruction available). */
9449 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9453 if (!validate_arg (arg
, REAL_TYPE
))
9456 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9459 mode
= TYPE_MODE (TREE_TYPE (arg
));
9461 /* If there is no optab, try generic code. */
9462 switch (DECL_FUNCTION_CODE (fndecl
))
9466 CASE_FLT_FN (BUILT_IN_ISINF
):
9468 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9469 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9470 tree
const type
= TREE_TYPE (arg
);
9474 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9475 real_from_string (&r
, buf
);
9476 result
= build_call_expr (isgr_fn
, 2,
9477 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9478 build_real (type
, r
));
9481 CASE_FLT_FN (BUILT_IN_FINITE
):
9482 case BUILT_IN_ISFINITE
:
9484 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9485 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9486 tree
const type
= TREE_TYPE (arg
);
9490 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9491 real_from_string (&r
, buf
);
9492 result
= build_call_expr (isle_fn
, 2,
9493 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9494 build_real (type
, r
));
9495 /*result = fold_build2_loc (loc, UNGT_EXPR,
9496 TREE_TYPE (TREE_TYPE (fndecl)),
9497 fold_build1_loc (loc, ABS_EXPR, type, arg),
9498 build_real (type, r));
9499 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9500 TREE_TYPE (TREE_TYPE (fndecl)),
9504 case BUILT_IN_ISNORMAL
:
9506 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9507 islessequal(fabs(x),DBL_MAX). */
9508 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9509 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9510 tree
const type
= TREE_TYPE (arg
);
9511 REAL_VALUE_TYPE rmax
, rmin
;
9514 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9515 real_from_string (&rmax
, buf
);
9516 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9517 real_from_string (&rmin
, buf
);
9518 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9519 result
= build_call_expr (isle_fn
, 2, arg
,
9520 build_real (type
, rmax
));
9521 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
9522 build_call_expr (isge_fn
, 2, arg
,
9523 build_real (type
, rmin
)));
9533 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9534 ARG is the argument for the call. */
9537 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9539 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9542 if (!validate_arg (arg
, REAL_TYPE
))
9545 switch (builtin_index
)
9547 case BUILT_IN_ISINF
:
9548 if (!HONOR_INFINITIES (arg
))
9549 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9551 if (TREE_CODE (arg
) == REAL_CST
)
9553 r
= TREE_REAL_CST (arg
);
9554 if (real_isinf (&r
))
9555 return real_compare (GT_EXPR
, &r
, &dconst0
)
9556 ? integer_one_node
: integer_minus_one_node
;
9558 return integer_zero_node
;
9563 case BUILT_IN_ISINF_SIGN
:
9565 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9566 /* In a boolean context, GCC will fold the inner COND_EXPR to
9567 1. So e.g. "if (isinf_sign(x))" would be folded to just
9568 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9569 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9570 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9571 tree tmp
= NULL_TREE
;
9573 arg
= builtin_save_expr (arg
);
9575 if (signbit_fn
&& isinf_fn
)
9577 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9578 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9580 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9581 signbit_call
, integer_zero_node
);
9582 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9583 isinf_call
, integer_zero_node
);
9585 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9586 integer_minus_one_node
, integer_one_node
);
9587 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9595 case BUILT_IN_ISFINITE
:
9596 if (!HONOR_NANS (arg
)
9597 && !HONOR_INFINITIES (arg
))
9598 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9600 if (TREE_CODE (arg
) == REAL_CST
)
9602 r
= TREE_REAL_CST (arg
);
9603 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9608 case BUILT_IN_ISNAN
:
9609 if (!HONOR_NANS (arg
))
9610 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9612 if (TREE_CODE (arg
) == REAL_CST
)
9614 r
= TREE_REAL_CST (arg
);
9615 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9618 arg
= builtin_save_expr (arg
);
9619 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9626 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9627 This builtin will generate code to return the appropriate floating
9628 point classification depending on the value of the floating point
9629 number passed in. The possible return values must be supplied as
9630 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9631 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9632 one floating point argument which is "type generic". */
9635 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
9637 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9638 arg
, type
, res
, tmp
;
9643 /* Verify the required arguments in the original call. */
9645 || !validate_arg (args
[0], INTEGER_TYPE
)
9646 || !validate_arg (args
[1], INTEGER_TYPE
)
9647 || !validate_arg (args
[2], INTEGER_TYPE
)
9648 || !validate_arg (args
[3], INTEGER_TYPE
)
9649 || !validate_arg (args
[4], INTEGER_TYPE
)
9650 || !validate_arg (args
[5], REAL_TYPE
))
9654 fp_infinite
= args
[1];
9655 fp_normal
= args
[2];
9656 fp_subnormal
= args
[3];
9659 type
= TREE_TYPE (arg
);
9660 mode
= TYPE_MODE (type
);
9661 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9665 (fabs(x) == Inf ? FP_INFINITE :
9666 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9667 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9669 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9670 build_real (type
, dconst0
));
9671 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9672 tmp
, fp_zero
, fp_subnormal
);
9674 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9675 real_from_string (&r
, buf
);
9676 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9677 arg
, build_real (type
, r
));
9678 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9680 if (HONOR_INFINITIES (mode
))
9683 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9684 build_real (type
, r
));
9685 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9689 if (HONOR_NANS (mode
))
9691 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9692 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9698 /* Fold a call to an unordered comparison function such as
9699 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9700 being called and ARG0 and ARG1 are the arguments for the call.
9701 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9702 the opposite of the desired result. UNORDERED_CODE is used
9703 for modes that can hold NaNs and ORDERED_CODE is used for
9707 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9708 enum tree_code unordered_code
,
9709 enum tree_code ordered_code
)
9711 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9712 enum tree_code code
;
9714 enum tree_code code0
, code1
;
9715 tree cmp_type
= NULL_TREE
;
9717 type0
= TREE_TYPE (arg0
);
9718 type1
= TREE_TYPE (arg1
);
9720 code0
= TREE_CODE (type0
);
9721 code1
= TREE_CODE (type1
);
9723 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9724 /* Choose the wider of two real types. */
9725 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9727 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9729 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9732 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9733 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9735 if (unordered_code
== UNORDERED_EXPR
)
9737 if (!HONOR_NANS (arg0
))
9738 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9739 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9742 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
9743 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9744 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9747 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9748 arithmetics if it can never overflow, or into internal functions that
9749 return both result of arithmetics and overflowed boolean flag in
9750 a complex integer result, or some other check for overflow. */
9753 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
9754 tree arg0
, tree arg1
, tree arg2
)
9756 enum internal_fn ifn
= IFN_LAST
;
9757 tree type
= TREE_TYPE (TREE_TYPE (arg2
));
9758 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
9761 case BUILT_IN_ADD_OVERFLOW
:
9762 case BUILT_IN_SADD_OVERFLOW
:
9763 case BUILT_IN_SADDL_OVERFLOW
:
9764 case BUILT_IN_SADDLL_OVERFLOW
:
9765 case BUILT_IN_UADD_OVERFLOW
:
9766 case BUILT_IN_UADDL_OVERFLOW
:
9767 case BUILT_IN_UADDLL_OVERFLOW
:
9768 ifn
= IFN_ADD_OVERFLOW
;
9770 case BUILT_IN_SUB_OVERFLOW
:
9771 case BUILT_IN_SSUB_OVERFLOW
:
9772 case BUILT_IN_SSUBL_OVERFLOW
:
9773 case BUILT_IN_SSUBLL_OVERFLOW
:
9774 case BUILT_IN_USUB_OVERFLOW
:
9775 case BUILT_IN_USUBL_OVERFLOW
:
9776 case BUILT_IN_USUBLL_OVERFLOW
:
9777 ifn
= IFN_SUB_OVERFLOW
;
9779 case BUILT_IN_MUL_OVERFLOW
:
9780 case BUILT_IN_SMUL_OVERFLOW
:
9781 case BUILT_IN_SMULL_OVERFLOW
:
9782 case BUILT_IN_SMULLL_OVERFLOW
:
9783 case BUILT_IN_UMUL_OVERFLOW
:
9784 case BUILT_IN_UMULL_OVERFLOW
:
9785 case BUILT_IN_UMULLL_OVERFLOW
:
9786 ifn
= IFN_MUL_OVERFLOW
;
9791 tree ctype
= build_complex_type (type
);
9792 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
9794 tree tgt
= save_expr (call
);
9795 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
9796 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
9797 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
9799 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
9800 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
9803 /* Fold a call to built-in function FNDECL with 0 arguments.
9804 This function returns NULL_TREE if no simplification was possible. */
9807 fold_builtin_0 (location_t loc
, tree fndecl
)
9809 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9810 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9813 CASE_FLT_FN (BUILT_IN_INF
):
9814 case BUILT_IN_INFD32
:
9815 case BUILT_IN_INFD64
:
9816 case BUILT_IN_INFD128
:
9817 return fold_builtin_inf (loc
, type
, true);
9819 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9820 return fold_builtin_inf (loc
, type
, false);
9822 case BUILT_IN_CLASSIFY_TYPE
:
9823 return fold_builtin_classify_type (NULL_TREE
);
9831 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9832 This function returns NULL_TREE if no simplification was possible. */
9835 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
9837 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9838 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9841 case BUILT_IN_CONSTANT_P
:
9843 tree val
= fold_builtin_constant_p (arg0
);
9845 /* Gimplification will pull the CALL_EXPR for the builtin out of
9846 an if condition. When not optimizing, we'll not CSE it back.
9847 To avoid link error types of regressions, return false now. */
9848 if (!val
&& !optimize
)
9849 val
= integer_zero_node
;
9854 case BUILT_IN_CLASSIFY_TYPE
:
9855 return fold_builtin_classify_type (arg0
);
9857 case BUILT_IN_STRLEN
:
9858 return fold_builtin_strlen (loc
, type
, arg0
);
9860 CASE_FLT_FN (BUILT_IN_FABS
):
9861 case BUILT_IN_FABSD32
:
9862 case BUILT_IN_FABSD64
:
9863 case BUILT_IN_FABSD128
:
9864 return fold_builtin_fabs (loc
, arg0
, type
);
9868 case BUILT_IN_LLABS
:
9869 case BUILT_IN_IMAXABS
:
9870 return fold_builtin_abs (loc
, arg0
, type
);
9872 CASE_FLT_FN (BUILT_IN_CONJ
):
9873 if (validate_arg (arg0
, COMPLEX_TYPE
)
9874 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9875 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9878 CASE_FLT_FN (BUILT_IN_CREAL
):
9879 if (validate_arg (arg0
, COMPLEX_TYPE
)
9880 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9881 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
9884 CASE_FLT_FN (BUILT_IN_CIMAG
):
9885 if (validate_arg (arg0
, COMPLEX_TYPE
)
9886 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9887 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9890 CASE_FLT_FN (BUILT_IN_CCOS
):
9891 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
9893 CASE_FLT_FN (BUILT_IN_CCOSH
):
9894 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
9896 CASE_FLT_FN (BUILT_IN_CPROJ
):
9897 return fold_builtin_cproj (loc
, arg0
, type
);
9899 CASE_FLT_FN (BUILT_IN_CSIN
):
9900 if (validate_arg (arg0
, COMPLEX_TYPE
)
9901 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9902 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
9905 CASE_FLT_FN (BUILT_IN_CSINH
):
9906 if (validate_arg (arg0
, COMPLEX_TYPE
)
9907 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9908 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
9911 CASE_FLT_FN (BUILT_IN_CTAN
):
9912 if (validate_arg (arg0
, COMPLEX_TYPE
)
9913 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9914 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
9917 CASE_FLT_FN (BUILT_IN_CTANH
):
9918 if (validate_arg (arg0
, COMPLEX_TYPE
)
9919 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9920 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
9923 CASE_FLT_FN (BUILT_IN_CLOG
):
9924 if (validate_arg (arg0
, COMPLEX_TYPE
)
9925 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9926 return do_mpc_arg1 (arg0
, type
, mpc_log
);
9929 CASE_FLT_FN (BUILT_IN_CSQRT
):
9930 if (validate_arg (arg0
, COMPLEX_TYPE
)
9931 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9932 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
9935 CASE_FLT_FN (BUILT_IN_CASIN
):
9936 if (validate_arg (arg0
, COMPLEX_TYPE
)
9937 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9938 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
9941 CASE_FLT_FN (BUILT_IN_CACOS
):
9942 if (validate_arg (arg0
, COMPLEX_TYPE
)
9943 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9944 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
9947 CASE_FLT_FN (BUILT_IN_CATAN
):
9948 if (validate_arg (arg0
, COMPLEX_TYPE
)
9949 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9950 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
9953 CASE_FLT_FN (BUILT_IN_CASINH
):
9954 if (validate_arg (arg0
, COMPLEX_TYPE
)
9955 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9956 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
9959 CASE_FLT_FN (BUILT_IN_CACOSH
):
9960 if (validate_arg (arg0
, COMPLEX_TYPE
)
9961 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9962 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
9965 CASE_FLT_FN (BUILT_IN_CATANH
):
9966 if (validate_arg (arg0
, COMPLEX_TYPE
)
9967 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9968 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
9971 CASE_FLT_FN (BUILT_IN_CABS
):
9972 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
9974 CASE_FLT_FN (BUILT_IN_CARG
):
9975 return fold_builtin_carg (loc
, arg0
, type
);
9977 CASE_FLT_FN (BUILT_IN_SQRT
):
9978 return fold_builtin_sqrt (loc
, arg0
, type
);
9980 CASE_FLT_FN (BUILT_IN_CBRT
):
9981 return fold_builtin_cbrt (loc
, arg0
, type
);
9983 CASE_FLT_FN (BUILT_IN_ASIN
):
9984 if (validate_arg (arg0
, REAL_TYPE
))
9985 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
9986 &dconstm1
, &dconst1
, true);
9989 CASE_FLT_FN (BUILT_IN_ACOS
):
9990 if (validate_arg (arg0
, REAL_TYPE
))
9991 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
9992 &dconstm1
, &dconst1
, true);
9995 CASE_FLT_FN (BUILT_IN_ATAN
):
9996 if (validate_arg (arg0
, REAL_TYPE
))
9997 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
10000 CASE_FLT_FN (BUILT_IN_ASINH
):
10001 if (validate_arg (arg0
, REAL_TYPE
))
10002 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
10005 CASE_FLT_FN (BUILT_IN_ACOSH
):
10006 if (validate_arg (arg0
, REAL_TYPE
))
10007 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
10008 &dconst1
, NULL
, true);
10011 CASE_FLT_FN (BUILT_IN_ATANH
):
10012 if (validate_arg (arg0
, REAL_TYPE
))
10013 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
10014 &dconstm1
, &dconst1
, false);
10017 CASE_FLT_FN (BUILT_IN_SIN
):
10018 if (validate_arg (arg0
, REAL_TYPE
))
10019 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
10022 CASE_FLT_FN (BUILT_IN_COS
):
10023 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
10025 CASE_FLT_FN (BUILT_IN_TAN
):
10026 return fold_builtin_tan (arg0
, type
);
10028 CASE_FLT_FN (BUILT_IN_CEXP
):
10029 return fold_builtin_cexp (loc
, arg0
, type
);
10031 CASE_FLT_FN (BUILT_IN_CEXPI
):
10032 if (validate_arg (arg0
, REAL_TYPE
))
10033 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
10036 CASE_FLT_FN (BUILT_IN_SINH
):
10037 if (validate_arg (arg0
, REAL_TYPE
))
10038 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
10041 CASE_FLT_FN (BUILT_IN_COSH
):
10042 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
10044 CASE_FLT_FN (BUILT_IN_TANH
):
10045 if (validate_arg (arg0
, REAL_TYPE
))
10046 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
10049 CASE_FLT_FN (BUILT_IN_ERF
):
10050 if (validate_arg (arg0
, REAL_TYPE
))
10051 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
10054 CASE_FLT_FN (BUILT_IN_ERFC
):
10055 if (validate_arg (arg0
, REAL_TYPE
))
10056 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
10059 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10060 if (validate_arg (arg0
, REAL_TYPE
))
10061 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
10064 CASE_FLT_FN (BUILT_IN_EXP
):
10065 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
10067 CASE_FLT_FN (BUILT_IN_EXP2
):
10068 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
10070 CASE_FLT_FN (BUILT_IN_EXP10
):
10071 CASE_FLT_FN (BUILT_IN_POW10
):
10072 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
10074 CASE_FLT_FN (BUILT_IN_EXPM1
):
10075 if (validate_arg (arg0
, REAL_TYPE
))
10076 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
10079 CASE_FLT_FN (BUILT_IN_LOG
):
10080 if (validate_arg (arg0
, REAL_TYPE
))
10081 return do_mpfr_arg1 (arg0
, type
, mpfr_log
, &dconst0
, NULL
, false);
10084 CASE_FLT_FN (BUILT_IN_LOG2
):
10085 if (validate_arg (arg0
, REAL_TYPE
))
10086 return do_mpfr_arg1 (arg0
, type
, mpfr_log2
, &dconst0
, NULL
, false);
10089 CASE_FLT_FN (BUILT_IN_LOG10
):
10090 if (validate_arg (arg0
, REAL_TYPE
))
10091 return do_mpfr_arg1 (arg0
, type
, mpfr_log10
, &dconst0
, NULL
, false);
10094 CASE_FLT_FN (BUILT_IN_LOG1P
):
10095 if (validate_arg (arg0
, REAL_TYPE
))
10096 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
10097 &dconstm1
, NULL
, false);
10100 CASE_FLT_FN (BUILT_IN_J0
):
10101 if (validate_arg (arg0
, REAL_TYPE
))
10102 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
10106 CASE_FLT_FN (BUILT_IN_J1
):
10107 if (validate_arg (arg0
, REAL_TYPE
))
10108 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
10112 CASE_FLT_FN (BUILT_IN_Y0
):
10113 if (validate_arg (arg0
, REAL_TYPE
))
10114 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
10115 &dconst0
, NULL
, false);
10118 CASE_FLT_FN (BUILT_IN_Y1
):
10119 if (validate_arg (arg0
, REAL_TYPE
))
10120 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
10121 &dconst0
, NULL
, false);
10124 CASE_FLT_FN (BUILT_IN_NAN
):
10125 case BUILT_IN_NAND32
:
10126 case BUILT_IN_NAND64
:
10127 case BUILT_IN_NAND128
:
10128 return fold_builtin_nan (arg0
, type
, true);
10130 CASE_FLT_FN (BUILT_IN_NANS
):
10131 return fold_builtin_nan (arg0
, type
, false);
10133 CASE_FLT_FN (BUILT_IN_FLOOR
):
10134 return fold_builtin_floor (loc
, fndecl
, arg0
);
10136 CASE_FLT_FN (BUILT_IN_CEIL
):
10137 return fold_builtin_ceil (loc
, fndecl
, arg0
);
10139 CASE_FLT_FN (BUILT_IN_TRUNC
):
10140 return fold_builtin_trunc (loc
, fndecl
, arg0
);
10142 CASE_FLT_FN (BUILT_IN_ROUND
):
10143 return fold_builtin_round (loc
, fndecl
, arg0
);
10145 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10146 CASE_FLT_FN (BUILT_IN_RINT
):
10147 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
10149 CASE_FLT_FN (BUILT_IN_ICEIL
):
10150 CASE_FLT_FN (BUILT_IN_LCEIL
):
10151 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10152 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10153 CASE_FLT_FN (BUILT_IN_IFLOOR
):
10154 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10155 CASE_FLT_FN (BUILT_IN_IROUND
):
10156 CASE_FLT_FN (BUILT_IN_LROUND
):
10157 CASE_FLT_FN (BUILT_IN_LLROUND
):
10158 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
10160 CASE_FLT_FN (BUILT_IN_IRINT
):
10161 CASE_FLT_FN (BUILT_IN_LRINT
):
10162 CASE_FLT_FN (BUILT_IN_LLRINT
):
10163 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
10165 case BUILT_IN_BSWAP16
:
10166 case BUILT_IN_BSWAP32
:
10167 case BUILT_IN_BSWAP64
:
10168 return fold_builtin_bswap (fndecl
, arg0
);
10170 CASE_INT_FN (BUILT_IN_FFS
):
10171 CASE_INT_FN (BUILT_IN_CLZ
):
10172 CASE_INT_FN (BUILT_IN_CTZ
):
10173 CASE_INT_FN (BUILT_IN_CLRSB
):
10174 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10175 CASE_INT_FN (BUILT_IN_PARITY
):
10176 return fold_builtin_bitop (fndecl
, arg0
);
10178 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10179 return fold_builtin_signbit (loc
, arg0
, type
);
10181 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10182 return fold_builtin_significand (loc
, arg0
, type
);
10184 CASE_FLT_FN (BUILT_IN_ILOGB
):
10185 CASE_FLT_FN (BUILT_IN_LOGB
):
10186 return fold_builtin_logb (loc
, arg0
, type
);
10188 case BUILT_IN_ISASCII
:
10189 return fold_builtin_isascii (loc
, arg0
);
10191 case BUILT_IN_TOASCII
:
10192 return fold_builtin_toascii (loc
, arg0
);
10194 case BUILT_IN_ISDIGIT
:
10195 return fold_builtin_isdigit (loc
, arg0
);
10197 CASE_FLT_FN (BUILT_IN_FINITE
):
10198 case BUILT_IN_FINITED32
:
10199 case BUILT_IN_FINITED64
:
10200 case BUILT_IN_FINITED128
:
10201 case BUILT_IN_ISFINITE
:
10203 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10206 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10209 CASE_FLT_FN (BUILT_IN_ISINF
):
10210 case BUILT_IN_ISINFD32
:
10211 case BUILT_IN_ISINFD64
:
10212 case BUILT_IN_ISINFD128
:
10214 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10217 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10220 case BUILT_IN_ISNORMAL
:
10221 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10223 case BUILT_IN_ISINF_SIGN
:
10224 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10226 CASE_FLT_FN (BUILT_IN_ISNAN
):
10227 case BUILT_IN_ISNAND32
:
10228 case BUILT_IN_ISNAND64
:
10229 case BUILT_IN_ISNAND128
:
10230 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10232 case BUILT_IN_FREE
:
10233 if (integer_zerop (arg0
))
10234 return build_empty_stmt (loc
);
10237 case BUILT_IN_ACC_ON_DEVICE
:
10238 /* Don't fold on_device until we know which compiler is active. */
10239 if (symtab
->state
== EXPANSION
)
10241 unsigned val_host
= GOMP_DEVICE_HOST
;
10242 unsigned val_dev
= GOMP_DEVICE_NONE
;
10244 #ifdef ACCEL_COMPILER
10245 val_host
= GOMP_DEVICE_NOT_HOST
;
10246 val_dev
= ACCEL_COMPILER_acc_device
;
10248 tree host
= build2 (EQ_EXPR
, boolean_type_node
, arg0
,
10249 build_int_cst (integer_type_node
, val_host
));
10250 tree dev
= build2 (EQ_EXPR
, boolean_type_node
, arg0
,
10251 build_int_cst (integer_type_node
, val_dev
));
10253 tree result
= build2 (TRUTH_OR_EXPR
, boolean_type_node
, host
, dev
);
10254 return fold_convert (integer_type_node
, result
);
10266 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10267 This function returns NULL_TREE if no simplification was possible. */
10270 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
10272 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10273 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10277 CASE_FLT_FN (BUILT_IN_JN
):
10278 if (validate_arg (arg0
, INTEGER_TYPE
)
10279 && validate_arg (arg1
, REAL_TYPE
))
10280 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10283 CASE_FLT_FN (BUILT_IN_YN
):
10284 if (validate_arg (arg0
, INTEGER_TYPE
)
10285 && validate_arg (arg1
, REAL_TYPE
))
10286 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10290 CASE_FLT_FN (BUILT_IN_DREM
):
10291 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10292 if (validate_arg (arg0
, REAL_TYPE
)
10293 && validate_arg (arg1
, REAL_TYPE
))
10294 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10297 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10298 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10299 if (validate_arg (arg0
, REAL_TYPE
)
10300 && validate_arg (arg1
, POINTER_TYPE
))
10301 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10304 CASE_FLT_FN (BUILT_IN_ATAN2
):
10305 if (validate_arg (arg0
, REAL_TYPE
)
10306 && validate_arg (arg1
, REAL_TYPE
))
10307 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10310 CASE_FLT_FN (BUILT_IN_FDIM
):
10311 if (validate_arg (arg0
, REAL_TYPE
)
10312 && validate_arg (arg1
, REAL_TYPE
))
10313 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10316 CASE_FLT_FN (BUILT_IN_HYPOT
):
10317 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10319 CASE_FLT_FN (BUILT_IN_CPOW
):
10320 if (validate_arg (arg0
, COMPLEX_TYPE
)
10321 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10322 && validate_arg (arg1
, COMPLEX_TYPE
)
10323 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10324 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10327 CASE_FLT_FN (BUILT_IN_LDEXP
):
10328 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10329 CASE_FLT_FN (BUILT_IN_SCALBN
):
10330 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10331 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10332 type
, /*ldexp=*/false);
10334 CASE_FLT_FN (BUILT_IN_FREXP
):
10335 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10337 CASE_FLT_FN (BUILT_IN_MODF
):
10338 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10340 case BUILT_IN_STRSTR
:
10341 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10343 case BUILT_IN_STRSPN
:
10344 return fold_builtin_strspn (loc
, arg0
, arg1
);
10346 case BUILT_IN_STRCSPN
:
10347 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10349 case BUILT_IN_STRCHR
:
10350 case BUILT_IN_INDEX
:
10351 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10353 case BUILT_IN_STRRCHR
:
10354 case BUILT_IN_RINDEX
:
10355 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10357 case BUILT_IN_STRCMP
:
10358 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10360 case BUILT_IN_STRPBRK
:
10361 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10363 case BUILT_IN_EXPECT
:
10364 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
10366 CASE_FLT_FN (BUILT_IN_POW
):
10367 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10369 CASE_FLT_FN (BUILT_IN_POWI
):
10370 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10372 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10373 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10375 CASE_FLT_FN (BUILT_IN_FMIN
):
10376 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10378 CASE_FLT_FN (BUILT_IN_FMAX
):
10379 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10381 case BUILT_IN_ISGREATER
:
10382 return fold_builtin_unordered_cmp (loc
, fndecl
,
10383 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10384 case BUILT_IN_ISGREATEREQUAL
:
10385 return fold_builtin_unordered_cmp (loc
, fndecl
,
10386 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10387 case BUILT_IN_ISLESS
:
10388 return fold_builtin_unordered_cmp (loc
, fndecl
,
10389 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10390 case BUILT_IN_ISLESSEQUAL
:
10391 return fold_builtin_unordered_cmp (loc
, fndecl
,
10392 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10393 case BUILT_IN_ISLESSGREATER
:
10394 return fold_builtin_unordered_cmp (loc
, fndecl
,
10395 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10396 case BUILT_IN_ISUNORDERED
:
10397 return fold_builtin_unordered_cmp (loc
, fndecl
,
10398 arg0
, arg1
, UNORDERED_EXPR
,
10401 /* We do the folding for va_start in the expander. */
10402 case BUILT_IN_VA_START
:
10405 case BUILT_IN_OBJECT_SIZE
:
10406 return fold_builtin_object_size (arg0
, arg1
);
10408 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10409 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10411 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10412 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10420 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10422 This function returns NULL_TREE if no simplification was possible. */
10425 fold_builtin_3 (location_t loc
, tree fndecl
,
10426 tree arg0
, tree arg1
, tree arg2
)
10428 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10429 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10433 CASE_FLT_FN (BUILT_IN_SINCOS
):
10434 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10436 CASE_FLT_FN (BUILT_IN_FMA
):
10437 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10440 CASE_FLT_FN (BUILT_IN_REMQUO
):
10441 if (validate_arg (arg0
, REAL_TYPE
)
10442 && validate_arg (arg1
, REAL_TYPE
)
10443 && validate_arg (arg2
, POINTER_TYPE
))
10444 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10447 case BUILT_IN_STRNCMP
:
10448 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10450 case BUILT_IN_MEMCHR
:
10451 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10453 case BUILT_IN_BCMP
:
10454 case BUILT_IN_MEMCMP
:
10455 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10457 case BUILT_IN_EXPECT
:
10458 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
10460 case BUILT_IN_ADD_OVERFLOW
:
10461 case BUILT_IN_SUB_OVERFLOW
:
10462 case BUILT_IN_MUL_OVERFLOW
:
10463 case BUILT_IN_SADD_OVERFLOW
:
10464 case BUILT_IN_SADDL_OVERFLOW
:
10465 case BUILT_IN_SADDLL_OVERFLOW
:
10466 case BUILT_IN_SSUB_OVERFLOW
:
10467 case BUILT_IN_SSUBL_OVERFLOW
:
10468 case BUILT_IN_SSUBLL_OVERFLOW
:
10469 case BUILT_IN_SMUL_OVERFLOW
:
10470 case BUILT_IN_SMULL_OVERFLOW
:
10471 case BUILT_IN_SMULLL_OVERFLOW
:
10472 case BUILT_IN_UADD_OVERFLOW
:
10473 case BUILT_IN_UADDL_OVERFLOW
:
10474 case BUILT_IN_UADDLL_OVERFLOW
:
10475 case BUILT_IN_USUB_OVERFLOW
:
10476 case BUILT_IN_USUBL_OVERFLOW
:
10477 case BUILT_IN_USUBLL_OVERFLOW
:
10478 case BUILT_IN_UMUL_OVERFLOW
:
10479 case BUILT_IN_UMULL_OVERFLOW
:
10480 case BUILT_IN_UMULLL_OVERFLOW
:
10481 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
10489 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10490 arguments. IGNORE is true if the result of the
10491 function call is ignored. This function returns NULL_TREE if no
10492 simplification was possible. */
10495 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
10497 tree ret
= NULL_TREE
;
10502 ret
= fold_builtin_0 (loc
, fndecl
);
10505 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
10508 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
10511 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
10514 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
10519 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10520 SET_EXPR_LOCATION (ret
, loc
);
10521 TREE_NO_WARNING (ret
) = 1;
10527 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10528 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10529 of arguments in ARGS to be omitted. OLDNARGS is the number of
10530 elements in ARGS. */
10533 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
10534 int skip
, tree fndecl
, int n
, va_list newargs
)
10536 int nargs
= oldnargs
- skip
+ n
;
10543 buffer
= XALLOCAVEC (tree
, nargs
);
10544 for (i
= 0; i
< n
; i
++)
10545 buffer
[i
] = va_arg (newargs
, tree
);
10546 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10547 buffer
[i
] = args
[j
];
10550 buffer
= args
+ skip
;
10552 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
10555 /* Return true if FNDECL shouldn't be folded right now.
10556 If a built-in function has an inline attribute always_inline
10557 wrapper, defer folding it after always_inline functions have
10558 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10559 might not be performed. */
10562 avoid_folding_inline_builtin (tree fndecl
)
10564 return (DECL_DECLARED_INLINE_P (fndecl
)
10565 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
10567 && !cfun
->always_inline_functions_inlined
10568 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
10571 /* A wrapper function for builtin folding that prevents warnings for
10572 "statement without effect" and the like, caused by removing the
10573 call node earlier than the warning is generated. */
10576 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
10578 tree ret
= NULL_TREE
;
10579 tree fndecl
= get_callee_fndecl (exp
);
10581 && TREE_CODE (fndecl
) == FUNCTION_DECL
10582 && DECL_BUILT_IN (fndecl
)
10583 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10584 yet. Defer folding until we see all the arguments
10585 (after inlining). */
10586 && !CALL_EXPR_VA_ARG_PACK (exp
))
10588 int nargs
= call_expr_nargs (exp
);
10590 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10591 instead last argument is __builtin_va_arg_pack (). Defer folding
10592 even in that case, until arguments are finalized. */
10593 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10595 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10597 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10598 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10599 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10603 if (avoid_folding_inline_builtin (fndecl
))
10606 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10607 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
10608 CALL_EXPR_ARGP (exp
), ignore
);
10611 tree
*args
= CALL_EXPR_ARGP (exp
);
10612 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10620 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10621 N arguments are passed in the array ARGARRAY. Return a folded
10622 expression or NULL_TREE if no simplification was possible. */
10625 fold_builtin_call_array (location_t loc
, tree
,
10630 if (TREE_CODE (fn
) != ADDR_EXPR
)
10633 tree fndecl
= TREE_OPERAND (fn
, 0);
10634 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10635 && DECL_BUILT_IN (fndecl
))
10637 /* If last argument is __builtin_va_arg_pack (), arguments to this
10638 function are not finalized yet. Defer folding until they are. */
10639 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10641 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10643 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10644 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10645 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10648 if (avoid_folding_inline_builtin (fndecl
))
10650 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10651 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
10653 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
10659 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10660 along with N new arguments specified as the "..." parameters. SKIP
10661 is the number of arguments in EXP to be omitted. This function is used
10662 to do varargs-to-varargs transformations. */
10665 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
10671 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
10672 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
10678 /* Validate a single argument ARG against a tree code CODE representing
10682 validate_arg (const_tree arg
, enum tree_code code
)
10686 else if (code
== POINTER_TYPE
)
10687 return POINTER_TYPE_P (TREE_TYPE (arg
));
10688 else if (code
== INTEGER_TYPE
)
10689 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
10690 return code
== TREE_CODE (TREE_TYPE (arg
));
10693 /* This function validates the types of a function call argument list
10694 against a specified list of tree_codes. If the last specifier is a 0,
10695 that represents an ellipses, otherwise the last specifier must be a
10698 This is the GIMPLE version of validate_arglist. Eventually we want to
10699 completely convert builtins.c to work from GIMPLEs and the tree based
10700 validate_arglist will then be removed. */
10703 validate_gimple_arglist (const gcall
*call
, ...)
10705 enum tree_code code
;
10711 va_start (ap
, call
);
10716 code
= (enum tree_code
) va_arg (ap
, int);
10720 /* This signifies an ellipses, any further arguments are all ok. */
10724 /* This signifies an endlink, if no arguments remain, return
10725 true, otherwise return false. */
10726 res
= (i
== gimple_call_num_args (call
));
10729 /* If no parameters remain or the parameter's code does not
10730 match the specified code, return false. Otherwise continue
10731 checking any remaining arguments. */
10732 arg
= gimple_call_arg (call
, i
++);
10733 if (!validate_arg (arg
, code
))
10740 /* We need gotos here since we can only have one VA_CLOSE in a
10748 /* Default target-specific builtin expander that does nothing. */
10751 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10752 rtx target ATTRIBUTE_UNUSED
,
10753 rtx subtarget ATTRIBUTE_UNUSED
,
10754 machine_mode mode ATTRIBUTE_UNUSED
,
10755 int ignore ATTRIBUTE_UNUSED
)
10760 /* Returns true is EXP represents data that would potentially reside
10761 in a readonly section. */
10764 readonly_data_expr (tree exp
)
10768 if (TREE_CODE (exp
) != ADDR_EXPR
)
10771 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10775 /* Make sure we call decl_readonly_section only for trees it
10776 can handle (since it returns true for everything it doesn't
10778 if (TREE_CODE (exp
) == STRING_CST
10779 || TREE_CODE (exp
) == CONSTRUCTOR
10780 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
10781 return decl_readonly_section (exp
, 0);
10786 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10787 to the call, and TYPE is its return type.
10789 Return NULL_TREE if no simplification was possible, otherwise return the
10790 simplified form of the call as a tree.
10792 The simplified form may be a constant or other expression which
10793 computes the same value, but in a more efficient manner (including
10794 calls to other builtin functions).
10796 The call may contain arguments which need to be evaluated, but
10797 which are not useful to determine the result of the call. In
10798 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10799 COMPOUND_EXPR will be an argument which must be evaluated.
10800 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10801 COMPOUND_EXPR in the chain will contain the tree for the simplified
10802 form of the builtin function call. */
10805 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
10807 if (!validate_arg (s1
, POINTER_TYPE
)
10808 || !validate_arg (s2
, POINTER_TYPE
))
10813 const char *p1
, *p2
;
10815 p2
= c_getstr (s2
);
10819 p1
= c_getstr (s1
);
10822 const char *r
= strstr (p1
, p2
);
10826 return build_int_cst (TREE_TYPE (s1
), 0);
10828 /* Return an offset into the constant string argument. */
10829 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10830 return fold_convert_loc (loc
, type
, tem
);
10833 /* The argument is const char *, and the result is char *, so we need
10834 a type conversion here to avoid a warning. */
10836 return fold_convert_loc (loc
, type
, s1
);
10841 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10845 /* New argument list transforming strstr(s1, s2) to
10846 strchr(s1, s2[0]). */
10847 return build_call_expr_loc (loc
, fn
, 2, s1
,
10848 build_int_cst (integer_type_node
, p2
[0]));
10852 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10853 the call, and TYPE is its return type.
10855 Return NULL_TREE if no simplification was possible, otherwise return the
10856 simplified form of the call as a tree.
10858 The simplified form may be a constant or other expression which
10859 computes the same value, but in a more efficient manner (including
10860 calls to other builtin functions).
10862 The call may contain arguments which need to be evaluated, but
10863 which are not useful to determine the result of the call. In
10864 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10865 COMPOUND_EXPR will be an argument which must be evaluated.
10866 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10867 COMPOUND_EXPR in the chain will contain the tree for the simplified
10868 form of the builtin function call. */
10871 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
10873 if (!validate_arg (s1
, POINTER_TYPE
)
10874 || !validate_arg (s2
, INTEGER_TYPE
))
10880 if (TREE_CODE (s2
) != INTEGER_CST
)
10883 p1
= c_getstr (s1
);
10890 if (target_char_cast (s2
, &c
))
10893 r
= strchr (p1
, c
);
10896 return build_int_cst (TREE_TYPE (s1
), 0);
10898 /* Return an offset into the constant string argument. */
10899 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10900 return fold_convert_loc (loc
, type
, tem
);
10906 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10907 the call, and TYPE is its return type.
10909 Return NULL_TREE if no simplification was possible, otherwise return the
10910 simplified form of the call as a tree.
10912 The simplified form may be a constant or other expression which
10913 computes the same value, but in a more efficient manner (including
10914 calls to other builtin functions).
10916 The call may contain arguments which need to be evaluated, but
10917 which are not useful to determine the result of the call. In
10918 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10919 COMPOUND_EXPR will be an argument which must be evaluated.
10920 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10921 COMPOUND_EXPR in the chain will contain the tree for the simplified
10922 form of the builtin function call. */
10925 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
10927 if (!validate_arg (s1
, POINTER_TYPE
)
10928 || !validate_arg (s2
, INTEGER_TYPE
))
10935 if (TREE_CODE (s2
) != INTEGER_CST
)
10938 p1
= c_getstr (s1
);
10945 if (target_char_cast (s2
, &c
))
10948 r
= strrchr (p1
, c
);
10951 return build_int_cst (TREE_TYPE (s1
), 0);
10953 /* Return an offset into the constant string argument. */
10954 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10955 return fold_convert_loc (loc
, type
, tem
);
10958 if (! integer_zerop (s2
))
10961 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10965 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10966 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
10970 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10971 to the call, and TYPE is its return type.
10973 Return NULL_TREE if no simplification was possible, otherwise return the
10974 simplified form of the call as a tree.
10976 The simplified form may be a constant or other expression which
10977 computes the same value, but in a more efficient manner (including
10978 calls to other builtin functions).
10980 The call may contain arguments which need to be evaluated, but
10981 which are not useful to determine the result of the call. In
10982 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10983 COMPOUND_EXPR will be an argument which must be evaluated.
10984 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10985 COMPOUND_EXPR in the chain will contain the tree for the simplified
10986 form of the builtin function call. */
10989 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
10991 if (!validate_arg (s1
, POINTER_TYPE
)
10992 || !validate_arg (s2
, POINTER_TYPE
))
10997 const char *p1
, *p2
;
10999 p2
= c_getstr (s2
);
11003 p1
= c_getstr (s1
);
11006 const char *r
= strpbrk (p1
, p2
);
11010 return build_int_cst (TREE_TYPE (s1
), 0);
11012 /* Return an offset into the constant string argument. */
11013 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11014 return fold_convert_loc (loc
, type
, tem
);
11018 /* strpbrk(x, "") == NULL.
11019 Evaluate and ignore s1 in case it had side-effects. */
11020 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
11023 return NULL_TREE
; /* Really call strpbrk. */
11025 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11029 /* New argument list transforming strpbrk(s1, s2) to
11030 strchr(s1, s2[0]). */
11031 return build_call_expr_loc (loc
, fn
, 2, s1
,
11032 build_int_cst (integer_type_node
, p2
[0]));
11036 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11039 Return NULL_TREE if no simplification was possible, otherwise return the
11040 simplified form of the call as a tree.
11042 The simplified form may be a constant or other expression which
11043 computes the same value, but in a more efficient manner (including
11044 calls to other builtin functions).
11046 The call may contain arguments which need to be evaluated, but
11047 which are not useful to determine the result of the call. In
11048 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11049 COMPOUND_EXPR will be an argument which must be evaluated.
11050 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11051 COMPOUND_EXPR in the chain will contain the tree for the simplified
11052 form of the builtin function call. */
11055 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
11057 if (!validate_arg (s1
, POINTER_TYPE
)
11058 || !validate_arg (s2
, POINTER_TYPE
))
11062 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11064 /* If both arguments are constants, evaluate at compile-time. */
11067 const size_t r
= strspn (p1
, p2
);
11068 return build_int_cst (size_type_node
, r
);
11071 /* If either argument is "", return NULL_TREE. */
11072 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11073 /* Evaluate and ignore both arguments in case either one has
11075 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
11081 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11084 Return NULL_TREE if no simplification was possible, otherwise return the
11085 simplified form of the call as a tree.
11087 The simplified form may be a constant or other expression which
11088 computes the same value, but in a more efficient manner (including
11089 calls to other builtin functions).
11091 The call may contain arguments which need to be evaluated, but
11092 which are not useful to determine the result of the call. In
11093 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11094 COMPOUND_EXPR will be an argument which must be evaluated.
11095 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11096 COMPOUND_EXPR in the chain will contain the tree for the simplified
11097 form of the builtin function call. */
11100 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
11102 if (!validate_arg (s1
, POINTER_TYPE
)
11103 || !validate_arg (s2
, POINTER_TYPE
))
11107 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11109 /* If both arguments are constants, evaluate at compile-time. */
11112 const size_t r
= strcspn (p1
, p2
);
11113 return build_int_cst (size_type_node
, r
);
11116 /* If the first argument is "", return NULL_TREE. */
11117 if (p1
&& *p1
== '\0')
11119 /* Evaluate and ignore argument s2 in case it has
11121 return omit_one_operand_loc (loc
, size_type_node
,
11122 size_zero_node
, s2
);
11125 /* If the second argument is "", return __builtin_strlen(s1). */
11126 if (p2
&& *p2
== '\0')
11128 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11130 /* If the replacement _DECL isn't initialized, don't do the
11135 return build_call_expr_loc (loc
, fn
, 1, s1
);
11141 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11142 produced. False otherwise. This is done so that we don't output the error
11143 or warning twice or three times. */
11146 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11148 tree fntype
= TREE_TYPE (current_function_decl
);
11149 int nargs
= call_expr_nargs (exp
);
11151 /* There is good chance the current input_location points inside the
11152 definition of the va_start macro (perhaps on the token for
11153 builtin) in a system header, so warnings will not be emitted.
11154 Use the location in real source code. */
11155 source_location current_location
=
11156 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
11159 if (!stdarg_p (fntype
))
11161 error ("%<va_start%> used in function with fixed args");
11167 if (va_start_p
&& (nargs
!= 2))
11169 error ("wrong number of arguments to function %<va_start%>");
11172 arg
= CALL_EXPR_ARG (exp
, 1);
11174 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11175 when we checked the arguments and if needed issued a warning. */
11180 /* Evidently an out of date version of <stdarg.h>; can't validate
11181 va_start's second argument, but can still work as intended. */
11182 warning_at (current_location
,
11184 "%<__builtin_next_arg%> called without an argument");
11187 else if (nargs
> 1)
11189 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11192 arg
= CALL_EXPR_ARG (exp
, 0);
11195 if (TREE_CODE (arg
) == SSA_NAME
)
11196 arg
= SSA_NAME_VAR (arg
);
11198 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11199 or __builtin_next_arg (0) the first time we see it, after checking
11200 the arguments and if needed issuing a warning. */
11201 if (!integer_zerop (arg
))
11203 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11205 /* Strip off all nops for the sake of the comparison. This
11206 is not quite the same as STRIP_NOPS. It does more.
11207 We must also strip off INDIRECT_EXPR for C++ reference
11209 while (CONVERT_EXPR_P (arg
)
11210 || TREE_CODE (arg
) == INDIRECT_REF
)
11211 arg
= TREE_OPERAND (arg
, 0);
11212 if (arg
!= last_parm
)
11214 /* FIXME: Sometimes with the tree optimizers we can get the
11215 not the last argument even though the user used the last
11216 argument. We just warn and set the arg to be the last
11217 argument so that we will get wrong-code because of
11219 warning_at (current_location
,
11221 "second parameter of %<va_start%> not last named argument");
11224 /* Undefined by C99 7.15.1.4p4 (va_start):
11225 "If the parameter parmN is declared with the register storage
11226 class, with a function or array type, or with a type that is
11227 not compatible with the type that results after application of
11228 the default argument promotions, the behavior is undefined."
11230 else if (DECL_REGISTER (arg
))
11232 warning_at (current_location
,
11234 "undefined behaviour when second parameter of "
11235 "%<va_start%> is declared with %<register%> storage");
11238 /* We want to verify the second parameter just once before the tree
11239 optimizers are run and then avoid keeping it in the tree,
11240 as otherwise we could warn even for correct code like:
11241 void foo (int i, ...)
11242 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11244 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11246 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11252 /* Expand a call EXP to __builtin_object_size. */
11255 expand_builtin_object_size (tree exp
)
11258 int object_size_type
;
11259 tree fndecl
= get_callee_fndecl (exp
);
11261 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11263 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11265 expand_builtin_trap ();
11269 ost
= CALL_EXPR_ARG (exp
, 1);
11272 if (TREE_CODE (ost
) != INTEGER_CST
11273 || tree_int_cst_sgn (ost
) < 0
11274 || compare_tree_int (ost
, 3) > 0)
11276 error ("%Klast argument of %D is not integer constant between 0 and 3",
11278 expand_builtin_trap ();
11282 object_size_type
= tree_to_shwi (ost
);
11284 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11287 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11288 FCODE is the BUILT_IN_* to use.
11289 Return NULL_RTX if we failed; the caller should emit a normal call,
11290 otherwise try to get the result in TARGET, if convenient (and in
11291 mode MODE if that's convenient). */
11294 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
11295 enum built_in_function fcode
)
11297 tree dest
, src
, len
, size
;
11299 if (!validate_arglist (exp
,
11301 fcode
== BUILT_IN_MEMSET_CHK
11302 ? INTEGER_TYPE
: POINTER_TYPE
,
11303 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11306 dest
= CALL_EXPR_ARG (exp
, 0);
11307 src
= CALL_EXPR_ARG (exp
, 1);
11308 len
= CALL_EXPR_ARG (exp
, 2);
11309 size
= CALL_EXPR_ARG (exp
, 3);
11311 if (! tree_fits_uhwi_p (size
))
11314 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
11318 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11320 warning_at (tree_nonartificial_location (exp
),
11321 0, "%Kcall to %D will always overflow destination buffer",
11322 exp
, get_callee_fndecl (exp
));
11327 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11328 mem{cpy,pcpy,move,set} is available. */
11331 case BUILT_IN_MEMCPY_CHK
:
11332 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
11334 case BUILT_IN_MEMPCPY_CHK
:
11335 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
11337 case BUILT_IN_MEMMOVE_CHK
:
11338 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
11340 case BUILT_IN_MEMSET_CHK
:
11341 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
11350 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
11351 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11352 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11353 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11355 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11359 unsigned int dest_align
= get_pointer_alignment (dest
);
11361 /* If DEST is not a pointer type, call the normal function. */
11362 if (dest_align
== 0)
11365 /* If SRC and DEST are the same (and not volatile), do nothing. */
11366 if (operand_equal_p (src
, dest
, 0))
11370 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11372 /* Evaluate and ignore LEN in case it has side-effects. */
11373 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11374 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11377 expr
= fold_build_pointer_plus (dest
, len
);
11378 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11381 /* __memmove_chk special case. */
11382 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11384 unsigned int src_align
= get_pointer_alignment (src
);
11386 if (src_align
== 0)
11389 /* If src is categorized for a readonly section we can use
11390 normal __memcpy_chk. */
11391 if (readonly_data_expr (src
))
11393 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
11396 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
11397 dest
, src
, len
, size
);
11398 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11399 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11400 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11407 /* Emit warning if a buffer overflow is detected at compile time. */
11410 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11414 location_t loc
= tree_nonartificial_location (exp
);
11418 case BUILT_IN_STRCPY_CHK
:
11419 case BUILT_IN_STPCPY_CHK
:
11420 /* For __strcat_chk the warning will be emitted only if overflowing
11421 by at least strlen (dest) + 1 bytes. */
11422 case BUILT_IN_STRCAT_CHK
:
11423 len
= CALL_EXPR_ARG (exp
, 1);
11424 size
= CALL_EXPR_ARG (exp
, 2);
11427 case BUILT_IN_STRNCAT_CHK
:
11428 case BUILT_IN_STRNCPY_CHK
:
11429 case BUILT_IN_STPNCPY_CHK
:
11430 len
= CALL_EXPR_ARG (exp
, 2);
11431 size
= CALL_EXPR_ARG (exp
, 3);
11433 case BUILT_IN_SNPRINTF_CHK
:
11434 case BUILT_IN_VSNPRINTF_CHK
:
11435 len
= CALL_EXPR_ARG (exp
, 1);
11436 size
= CALL_EXPR_ARG (exp
, 3);
11439 gcc_unreachable ();
11445 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11450 len
= c_strlen (len
, 1);
11451 if (! len
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11454 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11456 tree src
= CALL_EXPR_ARG (exp
, 1);
11457 if (! src
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11459 src
= c_strlen (src
, 1);
11460 if (! src
|| ! tree_fits_uhwi_p (src
))
11462 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
11463 exp
, get_callee_fndecl (exp
));
11466 else if (tree_int_cst_lt (src
, size
))
11469 else if (! tree_fits_uhwi_p (len
) || ! tree_int_cst_lt (size
, len
))
11472 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
11473 exp
, get_callee_fndecl (exp
));
11476 /* Emit warning if a buffer overflow is detected at compile time
11477 in __sprintf_chk/__vsprintf_chk calls. */
11480 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11482 tree size
, len
, fmt
;
11483 const char *fmt_str
;
11484 int nargs
= call_expr_nargs (exp
);
11486 /* Verify the required arguments in the original call. */
11490 size
= CALL_EXPR_ARG (exp
, 2);
11491 fmt
= CALL_EXPR_ARG (exp
, 3);
11493 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11496 /* Check whether the format is a literal string constant. */
11497 fmt_str
= c_getstr (fmt
);
11498 if (fmt_str
== NULL
)
11501 if (!init_target_chars ())
11504 /* If the format doesn't contain % args or %%, we know its size. */
11505 if (strchr (fmt_str
, target_percent
) == 0)
11506 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11507 /* If the format is "%s" and first ... argument is a string literal,
11509 else if (fcode
== BUILT_IN_SPRINTF_CHK
11510 && strcmp (fmt_str
, target_percent_s
) == 0)
11516 arg
= CALL_EXPR_ARG (exp
, 4);
11517 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11520 len
= c_strlen (arg
, 1);
11521 if (!len
|| ! tree_fits_uhwi_p (len
))
11527 if (! tree_int_cst_lt (len
, size
))
11528 warning_at (tree_nonartificial_location (exp
),
11529 0, "%Kcall to %D will always overflow destination buffer",
11530 exp
, get_callee_fndecl (exp
));
11533 /* Emit warning if a free is called with address of a variable. */
11536 maybe_emit_free_warning (tree exp
)
11538 tree arg
= CALL_EXPR_ARG (exp
, 0);
11541 if (TREE_CODE (arg
) != ADDR_EXPR
)
11544 arg
= get_base_address (TREE_OPERAND (arg
, 0));
11545 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
11548 if (SSA_VAR_P (arg
))
11549 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11550 "%Kattempt to free a non-heap object %qD", exp
, arg
);
11552 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11553 "%Kattempt to free a non-heap object", exp
);
11556 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11560 fold_builtin_object_size (tree ptr
, tree ost
)
11562 unsigned HOST_WIDE_INT bytes
;
11563 int object_size_type
;
11565 if (!validate_arg (ptr
, POINTER_TYPE
)
11566 || !validate_arg (ost
, INTEGER_TYPE
))
11571 if (TREE_CODE (ost
) != INTEGER_CST
11572 || tree_int_cst_sgn (ost
) < 0
11573 || compare_tree_int (ost
, 3) > 0)
11576 object_size_type
= tree_to_shwi (ost
);
11578 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11579 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11580 and (size_t) 0 for types 2 and 3. */
11581 if (TREE_SIDE_EFFECTS (ptr
))
11582 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11584 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11586 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11587 if (wi::fits_to_tree_p (bytes
, size_type_node
))
11588 return build_int_cstu (size_type_node
, bytes
);
11590 else if (TREE_CODE (ptr
) == SSA_NAME
)
11592 /* If object size is not known yet, delay folding until
11593 later. Maybe subsequent passes will help determining
11595 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11596 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
11597 && wi::fits_to_tree_p (bytes
, size_type_node
))
11598 return build_int_cstu (size_type_node
, bytes
);
11604 /* Builtins with folding operations that operate on "..." arguments
11605 need special handling; we need to store the arguments in a convenient
11606 data structure before attempting any folding. Fortunately there are
11607 only a few builtins that fall into this category. FNDECL is the
11608 function, EXP is the CALL_EXPR for the call. */
11611 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
11613 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11614 tree ret
= NULL_TREE
;
11618 case BUILT_IN_FPCLASSIFY
:
11619 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
11627 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11628 SET_EXPR_LOCATION (ret
, loc
);
11629 TREE_NO_WARNING (ret
) = 1;
11635 /* Initialize format string characters in the target charset. */
11638 init_target_chars (void)
11643 target_newline
= lang_hooks
.to_target_charset ('\n');
11644 target_percent
= lang_hooks
.to_target_charset ('%');
11645 target_c
= lang_hooks
.to_target_charset ('c');
11646 target_s
= lang_hooks
.to_target_charset ('s');
11647 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
11651 target_percent_c
[0] = target_percent
;
11652 target_percent_c
[1] = target_c
;
11653 target_percent_c
[2] = '\0';
11655 target_percent_s
[0] = target_percent
;
11656 target_percent_s
[1] = target_s
;
11657 target_percent_s
[2] = '\0';
11659 target_percent_s_newline
[0] = target_percent
;
11660 target_percent_s_newline
[1] = target_s
;
11661 target_percent_s_newline
[2] = target_newline
;
11662 target_percent_s_newline
[3] = '\0';
11669 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11670 and no overflow/underflow occurred. INEXACT is true if M was not
11671 exactly calculated. TYPE is the tree type for the result. This
11672 function assumes that you cleared the MPFR flags and then
11673 calculated M to see if anything subsequently set a flag prior to
11674 entering this function. Return NULL_TREE if any checks fail. */
11677 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
11679 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11680 overflow/underflow occurred. If -frounding-math, proceed iff the
11681 result of calling FUNC was exact. */
11682 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11683 && (!flag_rounding_math
|| !inexact
))
11685 REAL_VALUE_TYPE rr
;
11687 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
11688 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11689 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11690 but the mpft_t is not, then we underflowed in the
11692 if (real_isfinite (&rr
)
11693 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
11695 REAL_VALUE_TYPE rmode
;
11697 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
11698 /* Proceed iff the specified mode can hold the value. */
11699 if (real_identical (&rmode
, &rr
))
11700 return build_real (type
, rmode
);
11706 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11707 number and no overflow/underflow occurred. INEXACT is true if M
11708 was not exactly calculated. TYPE is the tree type for the result.
11709 This function assumes that you cleared the MPFR flags and then
11710 calculated M to see if anything subsequently set a flag prior to
11711 entering this function. Return NULL_TREE if any checks fail, if
11712 FORCE_CONVERT is true, then bypass the checks. */
11715 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
11717 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11718 overflow/underflow occurred. If -frounding-math, proceed iff the
11719 result of calling FUNC was exact. */
11721 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
11722 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11723 && (!flag_rounding_math
|| !inexact
)))
11725 REAL_VALUE_TYPE re
, im
;
11727 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
11728 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
11729 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11730 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11731 but the mpft_t is not, then we underflowed in the
11734 || (real_isfinite (&re
) && real_isfinite (&im
)
11735 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
11736 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
11738 REAL_VALUE_TYPE re_mode
, im_mode
;
11740 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
11741 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
11742 /* Proceed iff the specified mode can hold the value. */
11744 || (real_identical (&re_mode
, &re
)
11745 && real_identical (&im_mode
, &im
)))
11746 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
11747 build_real (TREE_TYPE (type
), im_mode
));
11753 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11754 FUNC on it and return the resulting value as a tree with type TYPE.
11755 If MIN and/or MAX are not NULL, then the supplied ARG must be
11756 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11757 acceptable values, otherwise they are not. The mpfr precision is
11758 set to the precision of TYPE. We assume that function FUNC returns
11759 zero if the result could be calculated exactly within the requested
11763 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
11764 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
11767 tree result
= NULL_TREE
;
11771 /* To proceed, MPFR must exactly represent the target floating point
11772 format, which only happens when the target base equals two. */
11773 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11774 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
11776 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11778 if (real_isfinite (ra
)
11779 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
11780 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
11782 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11783 const int prec
= fmt
->p
;
11784 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11788 mpfr_init2 (m
, prec
);
11789 mpfr_from_real (m
, ra
, GMP_RNDN
);
11790 mpfr_clear_flags ();
11791 inexact
= func (m
, m
, rnd
);
11792 result
= do_mpfr_ckconv (m
, type
, inexact
);
11800 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11801 FUNC on it and return the resulting value as a tree with type TYPE.
11802 The mpfr precision is set to the precision of TYPE. We assume that
11803 function FUNC returns zero if the result could be calculated
11804 exactly within the requested precision. */
11807 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
11808 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11810 tree result
= NULL_TREE
;
11815 /* To proceed, MPFR must exactly represent the target floating point
11816 format, which only happens when the target base equals two. */
11817 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11818 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11819 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
11821 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11822 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11824 if (real_isfinite (ra1
) && real_isfinite (ra2
))
11826 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11827 const int prec
= fmt
->p
;
11828 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11832 mpfr_inits2 (prec
, m1
, m2
, NULL
);
11833 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11834 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11835 mpfr_clear_flags ();
11836 inexact
= func (m1
, m1
, m2
, rnd
);
11837 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11838 mpfr_clears (m1
, m2
, NULL
);
11845 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11846 FUNC on it and return the resulting value as a tree with type TYPE.
11847 The mpfr precision is set to the precision of TYPE. We assume that
11848 function FUNC returns zero if the result could be calculated
11849 exactly within the requested precision. */
11852 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
11853 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11855 tree result
= NULL_TREE
;
11861 /* To proceed, MPFR must exactly represent the target floating point
11862 format, which only happens when the target base equals two. */
11863 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11864 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11865 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
11866 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
11868 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11869 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11870 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
11872 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
11874 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11875 const int prec
= fmt
->p
;
11876 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11880 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
11881 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11882 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11883 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
11884 mpfr_clear_flags ();
11885 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
11886 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11887 mpfr_clears (m1
, m2
, m3
, NULL
);
11894 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11895 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11896 If ARG_SINP and ARG_COSP are NULL then the result is returned
11897 as a complex value.
11898 The type is taken from the type of ARG and is used for setting the
11899 precision of the calculation and results. */
11902 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
11904 tree
const type
= TREE_TYPE (arg
);
11905 tree result
= NULL_TREE
;
11909 /* To proceed, MPFR must exactly represent the target floating point
11910 format, which only happens when the target base equals two. */
11911 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11912 && TREE_CODE (arg
) == REAL_CST
11913 && !TREE_OVERFLOW (arg
))
11915 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11917 if (real_isfinite (ra
))
11919 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11920 const int prec
= fmt
->p
;
11921 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11922 tree result_s
, result_c
;
11926 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
11927 mpfr_from_real (m
, ra
, GMP_RNDN
);
11928 mpfr_clear_flags ();
11929 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
11930 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
11931 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
11932 mpfr_clears (m
, ms
, mc
, NULL
);
11933 if (result_s
&& result_c
)
11935 /* If we are to return in a complex value do so. */
11936 if (!arg_sinp
&& !arg_cosp
)
11937 return build_complex (build_complex_type (type
),
11938 result_c
, result_s
);
11940 /* Dereference the sin/cos pointer arguments. */
11941 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
11942 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
11943 /* Proceed if valid pointer type were passed in. */
11944 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
11945 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
11947 /* Set the values. */
11948 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
11950 TREE_SIDE_EFFECTS (result_s
) = 1;
11951 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
11953 TREE_SIDE_EFFECTS (result_c
) = 1;
11954 /* Combine the assignments into a compound expr. */
11955 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11956 result_s
, result_c
));
11964 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11965 two-argument mpfr order N Bessel function FUNC on them and return
11966 the resulting value as a tree with type TYPE. The mpfr precision
11967 is set to the precision of TYPE. We assume that function FUNC
11968 returns zero if the result could be calculated exactly within the
11969 requested precision. */
11971 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
11972 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
11973 const REAL_VALUE_TYPE
*min
, bool inclusive
)
11975 tree result
= NULL_TREE
;
11980 /* To proceed, MPFR must exactly represent the target floating point
11981 format, which only happens when the target base equals two. */
11982 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11983 && tree_fits_shwi_p (arg1
)
11984 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
11986 const HOST_WIDE_INT n
= tree_to_shwi (arg1
);
11987 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
11990 && real_isfinite (ra
)
11991 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
11993 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11994 const int prec
= fmt
->p
;
11995 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11999 mpfr_init2 (m
, prec
);
12000 mpfr_from_real (m
, ra
, GMP_RNDN
);
12001 mpfr_clear_flags ();
12002 inexact
= func (m
, n
, m
, rnd
);
12003 result
= do_mpfr_ckconv (m
, type
, inexact
);
12011 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12012 the pointer *(ARG_QUO) and return the result. The type is taken
12013 from the type of ARG0 and is used for setting the precision of the
12014 calculation and results. */
12017 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
12019 tree
const type
= TREE_TYPE (arg0
);
12020 tree result
= NULL_TREE
;
12025 /* To proceed, MPFR must exactly represent the target floating point
12026 format, which only happens when the target base equals two. */
12027 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12028 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
12029 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
12031 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
12032 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
12034 if (real_isfinite (ra0
) && real_isfinite (ra1
))
12036 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12037 const int prec
= fmt
->p
;
12038 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12043 mpfr_inits2 (prec
, m0
, m1
, NULL
);
12044 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
12045 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12046 mpfr_clear_flags ();
12047 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
12048 /* Remquo is independent of the rounding mode, so pass
12049 inexact=0 to do_mpfr_ckconv(). */
12050 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
12051 mpfr_clears (m0
, m1
, NULL
);
12054 /* MPFR calculates quo in the host's long so it may
12055 return more bits in quo than the target int can hold
12056 if sizeof(host long) > sizeof(target int). This can
12057 happen even for native compilers in LP64 mode. In
12058 these cases, modulo the quo value with the largest
12059 number that the target int can hold while leaving one
12060 bit for the sign. */
12061 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
12062 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
12064 /* Dereference the quo pointer argument. */
12065 arg_quo
= build_fold_indirect_ref (arg_quo
);
12066 /* Proceed iff a valid pointer type was passed in. */
12067 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
12069 /* Set the value. */
12071 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
12072 build_int_cst (TREE_TYPE (arg_quo
),
12074 TREE_SIDE_EFFECTS (result_quo
) = 1;
12075 /* Combine the quo assignment with the rem. */
12076 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12077 result_quo
, result_rem
));
12085 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12086 resulting value as a tree with type TYPE. The mpfr precision is
12087 set to the precision of TYPE. We assume that this mpfr function
12088 returns zero if the result could be calculated exactly within the
12089 requested precision. In addition, the integer pointer represented
12090 by ARG_SG will be dereferenced and set to the appropriate signgam
12094 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
12096 tree result
= NULL_TREE
;
12100 /* To proceed, MPFR must exactly represent the target floating point
12101 format, which only happens when the target base equals two. Also
12102 verify ARG is a constant and that ARG_SG is an int pointer. */
12103 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12104 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
12105 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
12106 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
12108 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
12110 /* In addition to NaN and Inf, the argument cannot be zero or a
12111 negative integer. */
12112 if (real_isfinite (ra
)
12113 && ra
->cl
!= rvc_zero
12114 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
12116 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12117 const int prec
= fmt
->p
;
12118 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12123 mpfr_init2 (m
, prec
);
12124 mpfr_from_real (m
, ra
, GMP_RNDN
);
12125 mpfr_clear_flags ();
12126 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
12127 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
12133 /* Dereference the arg_sg pointer argument. */
12134 arg_sg
= build_fold_indirect_ref (arg_sg
);
12135 /* Assign the signgam value into *arg_sg. */
12136 result_sg
= fold_build2 (MODIFY_EXPR
,
12137 TREE_TYPE (arg_sg
), arg_sg
,
12138 build_int_cst (TREE_TYPE (arg_sg
), sg
));
12139 TREE_SIDE_EFFECTS (result_sg
) = 1;
12140 /* Combine the signgam assignment with the lgamma result. */
12141 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12142 result_sg
, result_lg
));
12150 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12151 function FUNC on it and return the resulting value as a tree with
12152 type TYPE. The mpfr precision is set to the precision of TYPE. We
12153 assume that function FUNC returns zero if the result could be
12154 calculated exactly within the requested precision. */
12157 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
12159 tree result
= NULL_TREE
;
12163 /* To proceed, MPFR must exactly represent the target floating point
12164 format, which only happens when the target base equals two. */
12165 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
12166 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
12167 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
12169 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
12170 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
12172 if (real_isfinite (re
) && real_isfinite (im
))
12174 const struct real_format
*const fmt
=
12175 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12176 const int prec
= fmt
->p
;
12177 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12178 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12182 mpc_init2 (m
, prec
);
12183 mpfr_from_real (mpc_realref (m
), re
, rnd
);
12184 mpfr_from_real (mpc_imagref (m
), im
, rnd
);
12185 mpfr_clear_flags ();
12186 inexact
= func (m
, m
, crnd
);
12187 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
12195 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12196 mpc function FUNC on it and return the resulting value as a tree
12197 with type TYPE. The mpfr precision is set to the precision of
12198 TYPE. We assume that function FUNC returns zero if the result
12199 could be calculated exactly within the requested precision. If
12200 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12201 in the arguments and/or results. */
12204 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
12205 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
12207 tree result
= NULL_TREE
;
12212 /* To proceed, MPFR must exactly represent the target floating point
12213 format, which only happens when the target base equals two. */
12214 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
12215 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
12216 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
12217 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
12218 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
12220 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
12221 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
12222 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
12223 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
12226 || (real_isfinite (re0
) && real_isfinite (im0
)
12227 && real_isfinite (re1
) && real_isfinite (im1
)))
12229 const struct real_format
*const fmt
=
12230 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12231 const int prec
= fmt
->p
;
12232 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12233 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12237 mpc_init2 (m0
, prec
);
12238 mpc_init2 (m1
, prec
);
12239 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
12240 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
12241 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
12242 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
12243 mpfr_clear_flags ();
12244 inexact
= func (m0
, m0
, m1
, crnd
);
12245 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
12254 /* A wrapper function for builtin folding that prevents warnings for
12255 "statement without effect" and the like, caused by removing the
12256 call node earlier than the warning is generated. */
12259 fold_call_stmt (gcall
*stmt
, bool ignore
)
12261 tree ret
= NULL_TREE
;
12262 tree fndecl
= gimple_call_fndecl (stmt
);
12263 location_t loc
= gimple_location (stmt
);
12265 && TREE_CODE (fndecl
) == FUNCTION_DECL
12266 && DECL_BUILT_IN (fndecl
)
12267 && !gimple_call_va_arg_pack_p (stmt
))
12269 int nargs
= gimple_call_num_args (stmt
);
12270 tree
*args
= (nargs
> 0
12271 ? gimple_call_arg_ptr (stmt
, 0)
12272 : &error_mark_node
);
12274 if (avoid_folding_inline_builtin (fndecl
))
12276 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
12278 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
12282 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
12285 /* Propagate location information from original call to
12286 expansion of builtin. Otherwise things like
12287 maybe_emit_chk_warning, that operate on the expansion
12288 of a builtin, will use the wrong location information. */
12289 if (gimple_has_location (stmt
))
12291 tree realret
= ret
;
12292 if (TREE_CODE (ret
) == NOP_EXPR
)
12293 realret
= TREE_OPERAND (ret
, 0);
12294 if (CAN_HAVE_LOCATION_P (realret
)
12295 && !EXPR_HAS_LOCATION (realret
))
12296 SET_EXPR_LOCATION (realret
, loc
);
12306 /* Look up the function in builtin_decl that corresponds to DECL
12307 and set ASMSPEC as its user assembler name. DECL must be a
12308 function decl that declares a builtin. */
12311 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
12314 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
12315 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
12318 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
12319 set_user_assembler_name (builtin
, asmspec
);
12320 switch (DECL_FUNCTION_CODE (decl
))
12322 case BUILT_IN_MEMCPY
:
12323 init_block_move_fn (asmspec
);
12324 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
12326 case BUILT_IN_MEMSET
:
12327 init_block_clear_fn (asmspec
);
12328 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
12330 case BUILT_IN_MEMMOVE
:
12331 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
12333 case BUILT_IN_MEMCMP
:
12334 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
12336 case BUILT_IN_ABORT
:
12337 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
12340 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
12342 set_user_assembler_libfunc ("ffs", asmspec
);
12343 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
12344 MODE_INT
, 0), "ffs");
12352 /* Return true if DECL is a builtin that expands to a constant or similarly
12355 is_simple_builtin (tree decl
)
12357 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12358 switch (DECL_FUNCTION_CODE (decl
))
12360 /* Builtins that expand to constants. */
12361 case BUILT_IN_CONSTANT_P
:
12362 case BUILT_IN_EXPECT
:
12363 case BUILT_IN_OBJECT_SIZE
:
12364 case BUILT_IN_UNREACHABLE
:
12365 /* Simple register moves or loads from stack. */
12366 case BUILT_IN_ASSUME_ALIGNED
:
12367 case BUILT_IN_RETURN_ADDRESS
:
12368 case BUILT_IN_EXTRACT_RETURN_ADDR
:
12369 case BUILT_IN_FROB_RETURN_ADDR
:
12370 case BUILT_IN_RETURN
:
12371 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
12372 case BUILT_IN_FRAME_ADDRESS
:
12373 case BUILT_IN_VA_END
:
12374 case BUILT_IN_STACK_SAVE
:
12375 case BUILT_IN_STACK_RESTORE
:
12376 /* Exception state returns or moves registers around. */
12377 case BUILT_IN_EH_FILTER
:
12378 case BUILT_IN_EH_POINTER
:
12379 case BUILT_IN_EH_COPY_VALUES
:
12389 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12390 most probably expanded inline into reasonably simple code. This is a
12391 superset of is_simple_builtin. */
12393 is_inexpensive_builtin (tree decl
)
12397 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
12399 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12400 switch (DECL_FUNCTION_CODE (decl
))
12403 case BUILT_IN_ALLOCA
:
12404 case BUILT_IN_ALLOCA_WITH_ALIGN
:
12405 case BUILT_IN_BSWAP16
:
12406 case BUILT_IN_BSWAP32
:
12407 case BUILT_IN_BSWAP64
:
12409 case BUILT_IN_CLZIMAX
:
12410 case BUILT_IN_CLZL
:
12411 case BUILT_IN_CLZLL
:
12413 case BUILT_IN_CTZIMAX
:
12414 case BUILT_IN_CTZL
:
12415 case BUILT_IN_CTZLL
:
12417 case BUILT_IN_FFSIMAX
:
12418 case BUILT_IN_FFSL
:
12419 case BUILT_IN_FFSLL
:
12420 case BUILT_IN_IMAXABS
:
12421 case BUILT_IN_FINITE
:
12422 case BUILT_IN_FINITEF
:
12423 case BUILT_IN_FINITEL
:
12424 case BUILT_IN_FINITED32
:
12425 case BUILT_IN_FINITED64
:
12426 case BUILT_IN_FINITED128
:
12427 case BUILT_IN_FPCLASSIFY
:
12428 case BUILT_IN_ISFINITE
:
12429 case BUILT_IN_ISINF_SIGN
:
12430 case BUILT_IN_ISINF
:
12431 case BUILT_IN_ISINFF
:
12432 case BUILT_IN_ISINFL
:
12433 case BUILT_IN_ISINFD32
:
12434 case BUILT_IN_ISINFD64
:
12435 case BUILT_IN_ISINFD128
:
12436 case BUILT_IN_ISNAN
:
12437 case BUILT_IN_ISNANF
:
12438 case BUILT_IN_ISNANL
:
12439 case BUILT_IN_ISNAND32
:
12440 case BUILT_IN_ISNAND64
:
12441 case BUILT_IN_ISNAND128
:
12442 case BUILT_IN_ISNORMAL
:
12443 case BUILT_IN_ISGREATER
:
12444 case BUILT_IN_ISGREATEREQUAL
:
12445 case BUILT_IN_ISLESS
:
12446 case BUILT_IN_ISLESSEQUAL
:
12447 case BUILT_IN_ISLESSGREATER
:
12448 case BUILT_IN_ISUNORDERED
:
12449 case BUILT_IN_VA_ARG_PACK
:
12450 case BUILT_IN_VA_ARG_PACK_LEN
:
12451 case BUILT_IN_VA_COPY
:
12452 case BUILT_IN_TRAP
:
12453 case BUILT_IN_SAVEREGS
:
12454 case BUILT_IN_POPCOUNTL
:
12455 case BUILT_IN_POPCOUNTLL
:
12456 case BUILT_IN_POPCOUNTIMAX
:
12457 case BUILT_IN_POPCOUNT
:
12458 case BUILT_IN_PARITYL
:
12459 case BUILT_IN_PARITYLL
:
12460 case BUILT_IN_PARITYIMAX
:
12461 case BUILT_IN_PARITY
:
12462 case BUILT_IN_LABS
:
12463 case BUILT_IN_LLABS
:
12464 case BUILT_IN_PREFETCH
:
12465 case BUILT_IN_ACC_ON_DEVICE
:
12469 return is_simple_builtin (decl
);