1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
34 #include "stringpool.h"
35 #include "tree-ssanames.h"
40 #include "diagnostic-core.h"
42 #include "fold-const.h"
43 #include "stor-layout.h"
46 #include "tree-object-size.h"
56 #include "typeclass.h"
57 #include "langhooks.h"
58 #include "value-prof.h"
62 #include "tree-chkp.h"
66 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
68 struct target_builtins default_target_builtins
;
70 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
73 /* Define the names of the builtin function types and codes. */
74 const char *const built_in_class_names
[BUILT_IN_LAST
]
75 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
77 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
78 const char * built_in_names
[(int) END_BUILTINS
] =
80 #include "builtins.def"
84 /* Setup an array of builtin_info_type, make sure each element decl is
85 initialized to NULL_TREE. */
86 builtin_info_type builtin_info
[(int)END_BUILTINS
];
88 /* Non-zero if __builtin_constant_p should be folded right away. */
89 bool force_folding_builtin_constant_p
;
91 static rtx
c_readstr (const char *, machine_mode
);
92 static int target_char_cast (tree
, char *);
93 static rtx
get_memory_rtx (tree
, tree
);
94 static int apply_args_size (void);
95 static int apply_result_size (void);
96 static rtx
result_vector (int, rtx
);
97 static void expand_builtin_prefetch (tree
);
98 static rtx
expand_builtin_apply_args (void);
99 static rtx
expand_builtin_apply_args_1 (void);
100 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
101 static void expand_builtin_return (rtx
);
102 static enum type_class
type_to_class (tree
);
103 static rtx
expand_builtin_classify_type (tree
);
104 static void expand_errno_check (tree
, rtx
);
105 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
106 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
107 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
108 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
109 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
110 static rtx
expand_builtin_sincos (tree
);
111 static rtx
expand_builtin_cexpi (tree
, rtx
);
112 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
113 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
114 static rtx
expand_builtin_next_arg (void);
115 static rtx
expand_builtin_va_start (tree
);
116 static rtx
expand_builtin_va_end (tree
);
117 static rtx
expand_builtin_va_copy (tree
);
118 static rtx
expand_builtin_strcmp (tree
, rtx
);
119 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
120 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, machine_mode
);
121 static rtx
expand_builtin_memcpy (tree
, rtx
);
122 static rtx
expand_builtin_memcpy_with_bounds (tree
, rtx
);
123 static rtx
expand_builtin_memcpy_args (tree
, tree
, tree
, rtx
, tree
);
124 static rtx
expand_builtin_mempcpy (tree
, rtx
, machine_mode
);
125 static rtx
expand_builtin_mempcpy_with_bounds (tree
, rtx
, machine_mode
);
126 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
127 machine_mode
, int, tree
);
128 static rtx
expand_builtin_strcpy (tree
, rtx
);
129 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
130 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
131 static rtx
expand_builtin_strncpy (tree
, rtx
);
132 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, machine_mode
);
133 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
134 static rtx
expand_builtin_memset_with_bounds (tree
, rtx
, machine_mode
);
135 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
136 static rtx
expand_builtin_bzero (tree
);
137 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
138 static rtx
expand_builtin_alloca (tree
, bool);
139 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
140 static rtx
expand_builtin_frame_address (tree
, tree
);
141 static tree
stabilize_va_list_loc (location_t
, tree
, int);
142 static rtx
expand_builtin_expect (tree
, rtx
);
143 static tree
fold_builtin_constant_p (tree
);
144 static tree
fold_builtin_classify_type (tree
);
145 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
146 static tree
fold_builtin_inf (location_t
, tree
, int);
147 static tree
fold_builtin_nan (tree
, tree
, int);
148 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
149 static bool validate_arg (const_tree
, enum tree_code code
);
150 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
151 static rtx
expand_builtin_signbit (tree
, rtx
);
152 static tree
fold_builtin_bitop (tree
, tree
);
153 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
154 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
155 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
156 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
157 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
158 static tree
fold_builtin_isascii (location_t
, tree
);
159 static tree
fold_builtin_toascii (location_t
, tree
);
160 static tree
fold_builtin_isdigit (location_t
, tree
);
161 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
162 static tree
fold_builtin_abs (location_t
, tree
, tree
);
163 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
165 static tree
fold_builtin_0 (location_t
, tree
);
166 static tree
fold_builtin_1 (location_t
, tree
, tree
);
167 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
168 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
169 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
171 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
172 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
173 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
174 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
175 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
177 static rtx
expand_builtin_object_size (tree
);
178 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
179 enum built_in_function
);
180 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
181 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
182 static void maybe_emit_free_warning (tree
);
183 static tree
fold_builtin_object_size (tree
, tree
);
185 unsigned HOST_WIDE_INT target_newline
;
186 unsigned HOST_WIDE_INT target_percent
;
187 static unsigned HOST_WIDE_INT target_c
;
188 static unsigned HOST_WIDE_INT target_s
;
189 char target_percent_c
[3];
190 char target_percent_s
[3];
191 char target_percent_s_newline
[4];
192 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
193 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
194 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
195 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
196 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
197 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
198 static tree
do_mpfr_sincos (tree
, tree
, tree
);
199 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
200 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
201 const REAL_VALUE_TYPE
*, bool);
202 static tree
do_mpfr_remquo (tree
, tree
, tree
);
203 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
204 static void expand_builtin_sync_synchronize (void);
206 /* Return true if NAME starts with __builtin_ or __sync_. */
209 is_builtin_name (const char *name
)
211 if (strncmp (name
, "__builtin_", 10) == 0)
213 if (strncmp (name
, "__sync_", 7) == 0)
215 if (strncmp (name
, "__atomic_", 9) == 0)
218 && (!strcmp (name
, "__cilkrts_detach")
219 || !strcmp (name
, "__cilkrts_pop_frame")))
225 /* Return true if DECL is a function symbol representing a built-in. */
228 is_builtin_fn (tree decl
)
230 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
233 /* Return true if NODE should be considered for inline expansion regardless
234 of the optimization level. This means whenever a function is invoked with
235 its "internal" name, which normally contains the prefix "__builtin". */
238 called_as_built_in (tree node
)
240 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
241 we want the name used to call the function, not the name it
243 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
244 return is_builtin_name (name
);
247 /* Compute values M and N such that M divides (address of EXP - N) and such
248 that N < M. If these numbers can be determined, store M in alignp and N in
249 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
250 *alignp and any bit-offset to *bitposp.
252 Note that the address (and thus the alignment) computed here is based
253 on the address to which a symbol resolves, whereas DECL_ALIGN is based
254 on the address at which an object is actually located. These two
255 addresses are not always the same. For example, on ARM targets,
256 the address &foo of a Thumb function foo() has the lowest bit set,
257 whereas foo() itself starts on an even address.
259 If ADDR_P is true we are taking the address of the memory reference EXP
260 and thus cannot rely on the access taking place. */
263 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
264 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
266 HOST_WIDE_INT bitsize
, bitpos
;
269 int unsignedp
, volatilep
;
270 unsigned int align
= BITS_PER_UNIT
;
271 bool known_alignment
= false;
273 /* Get the innermost object and the constant (bitpos) and possibly
274 variable (offset) offset of the access. */
275 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
276 &mode
, &unsignedp
, &volatilep
, true);
278 /* Extract alignment information from the innermost object and
279 possibly adjust bitpos and offset. */
280 if (TREE_CODE (exp
) == FUNCTION_DECL
)
282 /* Function addresses can encode extra information besides their
283 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
284 allows the low bit to be used as a virtual bit, we know
285 that the address itself must be at least 2-byte aligned. */
286 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
287 align
= 2 * BITS_PER_UNIT
;
289 else if (TREE_CODE (exp
) == LABEL_DECL
)
291 else if (TREE_CODE (exp
) == CONST_DECL
)
293 /* The alignment of a CONST_DECL is determined by its initializer. */
294 exp
= DECL_INITIAL (exp
);
295 align
= TYPE_ALIGN (TREE_TYPE (exp
));
296 if (CONSTANT_CLASS_P (exp
))
297 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
299 known_alignment
= true;
301 else if (DECL_P (exp
))
303 align
= DECL_ALIGN (exp
);
304 known_alignment
= true;
306 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
308 align
= TYPE_ALIGN (TREE_TYPE (exp
));
310 else if (TREE_CODE (exp
) == INDIRECT_REF
311 || TREE_CODE (exp
) == MEM_REF
312 || TREE_CODE (exp
) == TARGET_MEM_REF
)
314 tree addr
= TREE_OPERAND (exp
, 0);
316 unsigned HOST_WIDE_INT ptr_bitpos
;
317 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
319 /* If the address is explicitely aligned, handle that. */
320 if (TREE_CODE (addr
) == BIT_AND_EXPR
321 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
323 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
324 ptr_bitmask
*= BITS_PER_UNIT
;
325 align
= ptr_bitmask
& -ptr_bitmask
;
326 addr
= TREE_OPERAND (addr
, 0);
330 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
331 align
= MAX (ptr_align
, align
);
333 /* Re-apply explicit alignment to the bitpos. */
334 ptr_bitpos
&= ptr_bitmask
;
336 /* The alignment of the pointer operand in a TARGET_MEM_REF
337 has to take the variable offset parts into account. */
338 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
342 unsigned HOST_WIDE_INT step
= 1;
344 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
345 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
347 if (TMR_INDEX2 (exp
))
348 align
= BITS_PER_UNIT
;
349 known_alignment
= false;
352 /* When EXP is an actual memory reference then we can use
353 TYPE_ALIGN of a pointer indirection to derive alignment.
354 Do so only if get_pointer_alignment_1 did not reveal absolute
355 alignment knowledge and if using that alignment would
356 improve the situation. */
357 if (!addr_p
&& !known_alignment
358 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
359 align
= TYPE_ALIGN (TREE_TYPE (exp
));
362 /* Else adjust bitpos accordingly. */
363 bitpos
+= ptr_bitpos
;
364 if (TREE_CODE (exp
) == MEM_REF
365 || TREE_CODE (exp
) == TARGET_MEM_REF
)
366 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
369 else if (TREE_CODE (exp
) == STRING_CST
)
371 /* STRING_CST are the only constant objects we allow to be not
372 wrapped inside a CONST_DECL. */
373 align
= TYPE_ALIGN (TREE_TYPE (exp
));
374 if (CONSTANT_CLASS_P (exp
))
375 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
377 known_alignment
= true;
380 /* If there is a non-constant offset part extract the maximum
381 alignment that can prevail. */
384 unsigned int trailing_zeros
= tree_ctz (offset
);
385 if (trailing_zeros
< HOST_BITS_PER_INT
)
387 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
389 align
= MIN (align
, inner
);
394 *bitposp
= bitpos
& (*alignp
- 1);
395 return known_alignment
;
398 /* For a memory reference expression EXP compute values M and N such that M
399 divides (&EXP - N) and such that N < M. If these numbers can be determined,
400 store M in alignp and N in *BITPOSP and return true. Otherwise return false
401 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
404 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
405 unsigned HOST_WIDE_INT
*bitposp
)
407 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
410 /* Return the alignment in bits of EXP, an object. */
413 get_object_alignment (tree exp
)
415 unsigned HOST_WIDE_INT bitpos
= 0;
418 get_object_alignment_1 (exp
, &align
, &bitpos
);
420 /* align and bitpos now specify known low bits of the pointer.
421 ptr & (align - 1) == bitpos. */
424 align
= (bitpos
& -bitpos
);
428 /* For a pointer valued expression EXP compute values M and N such that M
429 divides (EXP - N) and such that N < M. If these numbers can be determined,
430 store M in alignp and N in *BITPOSP and return true. Return false if
431 the results are just a conservative approximation.
433 If EXP is not a pointer, false is returned too. */
436 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
437 unsigned HOST_WIDE_INT
*bitposp
)
441 if (TREE_CODE (exp
) == ADDR_EXPR
)
442 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
443 alignp
, bitposp
, true);
444 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
447 unsigned HOST_WIDE_INT bitpos
;
448 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
450 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
451 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
454 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
455 if (trailing_zeros
< HOST_BITS_PER_INT
)
457 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
459 align
= MIN (align
, inner
);
463 *bitposp
= bitpos
& (align
- 1);
466 else if (TREE_CODE (exp
) == SSA_NAME
467 && POINTER_TYPE_P (TREE_TYPE (exp
)))
469 unsigned int ptr_align
, ptr_misalign
;
470 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
472 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
474 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
475 *alignp
= ptr_align
* BITS_PER_UNIT
;
476 /* We cannot really tell whether this result is an approximation. */
482 *alignp
= BITS_PER_UNIT
;
486 else if (TREE_CODE (exp
) == INTEGER_CST
)
488 *alignp
= BIGGEST_ALIGNMENT
;
489 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
490 & (BIGGEST_ALIGNMENT
- 1));
495 *alignp
= BITS_PER_UNIT
;
499 /* Return the alignment in bits of EXP, a pointer valued expression.
500 The alignment returned is, by default, the alignment of the thing that
501 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
503 Otherwise, look at the expression to see if we can do better, i.e., if the
504 expression is actually pointing at an object whose alignment is tighter. */
507 get_pointer_alignment (tree exp
)
509 unsigned HOST_WIDE_INT bitpos
= 0;
512 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
514 /* align and bitpos now specify known low bits of the pointer.
515 ptr & (align - 1) == bitpos. */
518 align
= (bitpos
& -bitpos
);
523 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
524 way, because it could contain a zero byte in the middle.
525 TREE_STRING_LENGTH is the size of the character array, not the string.
527 ONLY_VALUE should be nonzero if the result is not going to be emitted
528 into the instruction stream and zero if it is going to be expanded.
529 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
530 is returned, otherwise NULL, since
531 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
532 evaluate the side-effects.
534 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
535 accesses. Note that this implies the result is not going to be emitted
536 into the instruction stream.
538 The value returned is of type `ssizetype'.
540 Unfortunately, string_constant can't access the values of const char
541 arrays with initializers, so neither can we do so here. */
544 c_strlen (tree src
, int only_value
)
547 HOST_WIDE_INT offset
;
553 if (TREE_CODE (src
) == COND_EXPR
554 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
558 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
559 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
560 if (tree_int_cst_equal (len1
, len2
))
564 if (TREE_CODE (src
) == COMPOUND_EXPR
565 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
566 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
568 loc
= EXPR_LOC_OR_LOC (src
, input_location
);
570 src
= string_constant (src
, &offset_node
);
574 max
= TREE_STRING_LENGTH (src
) - 1;
575 ptr
= TREE_STRING_POINTER (src
);
577 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
579 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
580 compute the offset to the following null if we don't know where to
581 start searching for it. */
584 for (i
= 0; i
< max
; i
++)
588 /* We don't know the starting offset, but we do know that the string
589 has no internal zero bytes. We can assume that the offset falls
590 within the bounds of the string; otherwise, the programmer deserves
591 what he gets. Subtract the offset from the length of the string,
592 and return that. This would perhaps not be valid if we were dealing
593 with named arrays in addition to literal string constants. */
595 return size_diffop_loc (loc
, size_int (max
), offset_node
);
598 /* We have a known offset into the string. Start searching there for
599 a null character if we can represent it as a single HOST_WIDE_INT. */
600 if (offset_node
== 0)
602 else if (! tree_fits_shwi_p (offset_node
))
605 offset
= tree_to_shwi (offset_node
);
607 /* If the offset is known to be out of bounds, warn, and call strlen at
609 if (offset
< 0 || offset
> max
)
611 /* Suppress multiple warnings for propagated constant strings. */
613 && !TREE_NO_WARNING (src
))
615 warning_at (loc
, 0, "offset outside bounds of constant string");
616 TREE_NO_WARNING (src
) = 1;
621 /* Use strlen to search for the first zero byte. Since any strings
622 constructed with build_string will have nulls appended, we win even
623 if we get handed something like (char[4])"abcd".
625 Since OFFSET is our starting index into the string, no further
626 calculation is needed. */
627 return ssize_int (strlen (ptr
+ offset
));
630 /* Return a char pointer for a C string if it is a string constant
631 or sum of string constant and integer constant. */
638 src
= string_constant (src
, &offset_node
);
642 if (offset_node
== 0)
643 return TREE_STRING_POINTER (src
);
644 else if (!tree_fits_uhwi_p (offset_node
)
645 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
648 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);
651 /* Return a constant integer corresponding to target reading
652 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
655 c_readstr (const char *str
, machine_mode mode
)
659 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
661 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
662 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
663 / HOST_BITS_PER_WIDE_INT
;
665 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
666 for (i
= 0; i
< len
; i
++)
670 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
673 if (WORDS_BIG_ENDIAN
)
674 j
= GET_MODE_SIZE (mode
) - i
- 1;
675 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
676 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
677 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
681 ch
= (unsigned char) str
[i
];
682 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
685 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
686 return immed_wide_int_const (c
, mode
);
689 /* Cast a target constant CST to target CHAR and if that value fits into
690 host char type, return zero and put that value into variable pointed to by
694 target_char_cast (tree cst
, char *p
)
696 unsigned HOST_WIDE_INT val
, hostval
;
698 if (TREE_CODE (cst
) != INTEGER_CST
699 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
702 /* Do not care if it fits or not right here. */
703 val
= TREE_INT_CST_LOW (cst
);
705 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
706 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
709 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
710 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
719 /* Similar to save_expr, but assumes that arbitrary code is not executed
720 in between the multiple evaluations. In particular, we assume that a
721 non-addressable local variable will not be modified. */
724 builtin_save_expr (tree exp
)
726 if (TREE_CODE (exp
) == SSA_NAME
727 || (TREE_ADDRESSABLE (exp
) == 0
728 && (TREE_CODE (exp
) == PARM_DECL
729 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
732 return save_expr (exp
);
735 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
736 times to get the address of either a higher stack frame, or a return
737 address located within it (depending on FNDECL_CODE). */
740 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
743 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
746 /* For a zero count with __builtin_return_address, we don't care what
747 frame address we return, because target-specific definitions will
748 override us. Therefore frame pointer elimination is OK, and using
749 the soft frame pointer is OK.
751 For a nonzero count, or a zero count with __builtin_frame_address,
752 we require a stable offset from the current frame pointer to the
753 previous one, so we must use the hard frame pointer, and
754 we must disable frame pointer elimination. */
755 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
756 tem
= frame_pointer_rtx
;
759 tem
= hard_frame_pointer_rtx
;
761 /* Tell reload not to eliminate the frame pointer. */
762 crtl
->accesses_prior_frames
= 1;
767 SETUP_FRAME_ADDRESSES ();
769 /* On the SPARC, the return address is not in the frame, it is in a
770 register. There is no way to access it off of the current frame
771 pointer, but it can be accessed off the previous frame pointer by
772 reading the value from the register window save area. */
773 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
776 /* Scan back COUNT frames to the specified frame. */
777 for (i
= 0; i
< count
; i
++)
779 /* Assume the dynamic chain pointer is in the word that the
780 frame address points to, unless otherwise specified. */
781 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
782 tem
= memory_address (Pmode
, tem
);
783 tem
= gen_frame_mem (Pmode
, tem
);
784 tem
= copy_to_reg (tem
);
787 /* For __builtin_frame_address, return what we've got. But, on
788 the SPARC for example, we may have to add a bias. */
789 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
790 return FRAME_ADDR_RTX (tem
);
792 /* For __builtin_return_address, get the return address from that frame. */
793 #ifdef RETURN_ADDR_RTX
794 tem
= RETURN_ADDR_RTX (count
, tem
);
796 tem
= memory_address (Pmode
,
797 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
798 tem
= gen_frame_mem (Pmode
, tem
);
803 /* Alias set used for setjmp buffer. */
804 static alias_set_type setjmp_alias_set
= -1;
806 /* Construct the leading half of a __builtin_setjmp call. Control will
807 return to RECEIVER_LABEL. This is also called directly by the SJLJ
808 exception handling code. */
811 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
813 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
817 if (setjmp_alias_set
== -1)
818 setjmp_alias_set
= new_alias_set ();
820 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
822 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
824 /* We store the frame pointer and the address of receiver_label in
825 the buffer and use the rest of it for the stack save area, which
826 is machine-dependent. */
828 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
829 set_mem_alias_set (mem
, setjmp_alias_set
);
830 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
832 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
833 GET_MODE_SIZE (Pmode
))),
834 set_mem_alias_set (mem
, setjmp_alias_set
);
836 emit_move_insn (validize_mem (mem
),
837 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
839 stack_save
= gen_rtx_MEM (sa_mode
,
840 plus_constant (Pmode
, buf_addr
,
841 2 * GET_MODE_SIZE (Pmode
)));
842 set_mem_alias_set (stack_save
, setjmp_alias_set
);
843 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
845 /* If there is further processing to do, do it. */
846 if (targetm
.have_builtin_setjmp_setup ())
847 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
849 /* We have a nonlocal label. */
850 cfun
->has_nonlocal_label
= 1;
853 /* Construct the trailing part of a __builtin_setjmp call. This is
854 also called directly by the SJLJ exception handling code.
855 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
858 expand_builtin_setjmp_receiver (rtx receiver_label
)
862 /* Mark the FP as used when we get here, so we have to make sure it's
863 marked as used by this function. */
864 emit_use (hard_frame_pointer_rtx
);
866 /* Mark the static chain as clobbered here so life information
867 doesn't get messed up for it. */
868 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
869 if (chain
&& REG_P (chain
))
870 emit_clobber (chain
);
872 /* Now put in the code to restore the frame pointer, and argument
873 pointer, if needed. */
874 if (! targetm
.have_nonlocal_goto ())
876 /* First adjust our frame pointer to its actual value. It was
877 previously set to the start of the virtual area corresponding to
878 the stacked variables when we branched here and now needs to be
879 adjusted to the actual hardware fp value.
881 Assignments to virtual registers are converted by
882 instantiate_virtual_regs into the corresponding assignment
883 to the underlying register (fp in this case) that makes
884 the original assignment true.
885 So the following insn will actually be decrementing fp by
886 STARTING_FRAME_OFFSET. */
887 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
889 /* Restoring the frame pointer also modifies the hard frame pointer.
890 Mark it used (so that the previous assignment remains live once
891 the frame pointer is eliminated) and clobbered (to represent the
892 implicit update from the assignment). */
893 emit_use (hard_frame_pointer_rtx
);
894 emit_clobber (hard_frame_pointer_rtx
);
897 if (!HARD_FRAME_POINTER_IS_ARG_POINTER
&& fixed_regs
[ARG_POINTER_REGNUM
])
899 #ifdef ELIMINABLE_REGS
900 /* If the argument pointer can be eliminated in favor of the
901 frame pointer, we don't need to restore it. We assume here
902 that if such an elimination is present, it can always be used.
903 This is the case on all known machines; if we don't make this
904 assumption, we do unnecessary saving on many machines. */
906 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
908 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
909 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
910 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
913 if (i
== ARRAY_SIZE (elim_regs
))
916 /* Now restore our arg pointer from the address at which it
917 was saved in our stack frame. */
918 emit_move_insn (crtl
->args
.internal_arg_pointer
,
919 copy_to_reg (get_arg_pointer_save_area ()));
923 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
924 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
925 else if (targetm
.have_nonlocal_goto_receiver ())
926 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
930 /* We must not allow the code we just generated to be reordered by
931 scheduling. Specifically, the update of the frame pointer must
932 happen immediately, not later. */
933 emit_insn (gen_blockage ());
936 /* __builtin_longjmp is passed a pointer to an array of five words (not
937 all will be used on all machines). It operates similarly to the C
938 library function of the same name, but is more efficient. Much of
939 the code below is copied from the handling of non-local gotos. */
942 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
945 rtx_insn
*insn
, *last
;
946 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
948 /* DRAP is needed for stack realign if longjmp is expanded to current
950 if (SUPPORTS_STACK_ALIGNMENT
)
951 crtl
->need_drap
= true;
953 if (setjmp_alias_set
== -1)
954 setjmp_alias_set
= new_alias_set ();
956 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
958 buf_addr
= force_reg (Pmode
, buf_addr
);
960 /* We require that the user must pass a second argument of 1, because
961 that is what builtin_setjmp will return. */
962 gcc_assert (value
== const1_rtx
);
964 last
= get_last_insn ();
965 if (targetm
.have_builtin_longjmp ())
966 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
969 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
970 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
971 GET_MODE_SIZE (Pmode
)));
973 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
974 2 * GET_MODE_SIZE (Pmode
)));
975 set_mem_alias_set (fp
, setjmp_alias_set
);
976 set_mem_alias_set (lab
, setjmp_alias_set
);
977 set_mem_alias_set (stack
, setjmp_alias_set
);
979 /* Pick up FP, label, and SP from the block and jump. This code is
980 from expand_goto in stmt.c; see there for detailed comments. */
981 if (targetm
.have_nonlocal_goto ())
982 /* We have to pass a value to the nonlocal_goto pattern that will
983 get copied into the static_chain pointer, but it does not matter
984 what that value is, because builtin_setjmp does not use it. */
985 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
988 lab
= copy_to_reg (lab
);
990 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
991 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
993 emit_move_insn (hard_frame_pointer_rtx
, fp
);
994 emit_stack_restore (SAVE_NONLOCAL
, stack
);
996 emit_use (hard_frame_pointer_rtx
);
997 emit_use (stack_pointer_rtx
);
998 emit_indirect_jump (lab
);
1002 /* Search backwards and mark the jump insn as a non-local goto.
1003 Note that this precludes the use of __builtin_longjmp to a
1004 __builtin_setjmp target in the same function. However, we've
1005 already cautioned the user that these functions are for
1006 internal exception handling use only. */
1007 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1009 gcc_assert (insn
!= last
);
1013 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1016 else if (CALL_P (insn
))
1022 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1024 return (iter
->i
< iter
->n
);
1027 /* This function validates the types of a function call argument list
1028 against a specified list of tree_codes. If the last specifier is a 0,
1029 that represents an ellipses, otherwise the last specifier must be a
1033 validate_arglist (const_tree callexpr
, ...)
1035 enum tree_code code
;
1038 const_call_expr_arg_iterator iter
;
1041 va_start (ap
, callexpr
);
1042 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1046 code
= (enum tree_code
) va_arg (ap
, int);
1050 /* This signifies an ellipses, any further arguments are all ok. */
1054 /* This signifies an endlink, if no arguments remain, return
1055 true, otherwise return false. */
1056 res
= !more_const_call_expr_args_p (&iter
);
1059 /* If no parameters remain or the parameter's code does not
1060 match the specified code, return false. Otherwise continue
1061 checking any remaining arguments. */
1062 arg
= next_const_call_expr_arg (&iter
);
1063 if (!validate_arg (arg
, code
))
1070 /* We need gotos here since we can only have one VA_CLOSE in a
1078 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1079 and the address of the save area. */
1082 expand_builtin_nonlocal_goto (tree exp
)
1084 tree t_label
, t_save_area
;
1085 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1088 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1091 t_label
= CALL_EXPR_ARG (exp
, 0);
1092 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1094 r_label
= expand_normal (t_label
);
1095 r_label
= convert_memory_address (Pmode
, r_label
);
1096 r_save_area
= expand_normal (t_save_area
);
1097 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1098 /* Copy the address of the save location to a register just in case it was
1099 based on the frame pointer. */
1100 r_save_area
= copy_to_reg (r_save_area
);
1101 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1102 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1103 plus_constant (Pmode
, r_save_area
,
1104 GET_MODE_SIZE (Pmode
)));
1106 crtl
->has_nonlocal_goto
= 1;
1108 /* ??? We no longer need to pass the static chain value, afaik. */
1109 if (targetm
.have_nonlocal_goto ())
1110 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1113 r_label
= copy_to_reg (r_label
);
1115 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1116 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1118 /* Restore frame pointer for containing function. */
1119 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1120 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1122 /* USE of hard_frame_pointer_rtx added for consistency;
1123 not clear if really needed. */
1124 emit_use (hard_frame_pointer_rtx
);
1125 emit_use (stack_pointer_rtx
);
1127 /* If the architecture is using a GP register, we must
1128 conservatively assume that the target function makes use of it.
1129 The prologue of functions with nonlocal gotos must therefore
1130 initialize the GP register to the appropriate value, and we
1131 must then make sure that this value is live at the point
1132 of the jump. (Note that this doesn't necessarily apply
1133 to targets with a nonlocal_goto pattern; they are free
1134 to implement it in their own way. Note also that this is
1135 a no-op if the GP register is a global invariant.) */
1136 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1137 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1138 emit_use (pic_offset_table_rtx
);
1140 emit_indirect_jump (r_label
);
1143 /* Search backwards to the jump insn and mark it as a
1145 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1149 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1152 else if (CALL_P (insn
))
1159 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1160 (not all will be used on all machines) that was passed to __builtin_setjmp.
1161 It updates the stack pointer in that block to the current value. This is
1162 also called directly by the SJLJ exception handling code. */
1165 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1167 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1169 = gen_rtx_MEM (sa_mode
,
1172 plus_constant (Pmode
, buf_addr
,
1173 2 * GET_MODE_SIZE (Pmode
))));
1175 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1178 /* Expand a call to __builtin_prefetch. For a target that does not support
1179 data prefetch, evaluate the memory address argument in case it has side
1183 expand_builtin_prefetch (tree exp
)
1185 tree arg0
, arg1
, arg2
;
1189 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1192 arg0
= CALL_EXPR_ARG (exp
, 0);
1194 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1195 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1197 nargs
= call_expr_nargs (exp
);
1199 arg1
= CALL_EXPR_ARG (exp
, 1);
1201 arg1
= integer_zero_node
;
1203 arg2
= CALL_EXPR_ARG (exp
, 2);
1205 arg2
= integer_three_node
;
1207 /* Argument 0 is an address. */
1208 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1210 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1211 if (TREE_CODE (arg1
) != INTEGER_CST
)
1213 error ("second argument to %<__builtin_prefetch%> must be a constant");
1214 arg1
= integer_zero_node
;
1216 op1
= expand_normal (arg1
);
1217 /* Argument 1 must be either zero or one. */
1218 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1220 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1225 /* Argument 2 (locality) must be a compile-time constant int. */
1226 if (TREE_CODE (arg2
) != INTEGER_CST
)
1228 error ("third argument to %<__builtin_prefetch%> must be a constant");
1229 arg2
= integer_zero_node
;
1231 op2
= expand_normal (arg2
);
1232 /* Argument 2 must be 0, 1, 2, or 3. */
1233 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1235 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1239 if (targetm
.have_prefetch ())
1241 struct expand_operand ops
[3];
1243 create_address_operand (&ops
[0], op0
);
1244 create_integer_operand (&ops
[1], INTVAL (op1
));
1245 create_integer_operand (&ops
[2], INTVAL (op2
));
1246 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1250 /* Don't do anything with direct references to volatile memory, but
1251 generate code to handle other side effects. */
1252 if (!MEM_P (op0
) && side_effects_p (op0
))
1256 /* Get a MEM rtx for expression EXP which is the address of an operand
1257 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1258 the maximum length of the block of memory that might be accessed or
1262 get_memory_rtx (tree exp
, tree len
)
1264 tree orig_exp
= exp
;
1267 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1268 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1269 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1270 exp
= TREE_OPERAND (exp
, 0);
1272 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1273 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1275 /* Get an expression we can use to find the attributes to assign to MEM.
1276 First remove any nops. */
1277 while (CONVERT_EXPR_P (exp
)
1278 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1279 exp
= TREE_OPERAND (exp
, 0);
1281 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1282 (as builtin stringops may alias with anything). */
1283 exp
= fold_build2 (MEM_REF
,
1284 build_array_type (char_type_node
,
1285 build_range_type (sizetype
,
1286 size_one_node
, len
)),
1287 exp
, build_int_cst (ptr_type_node
, 0));
1289 /* If the MEM_REF has no acceptable address, try to get the base object
1290 from the original address we got, and build an all-aliasing
1291 unknown-sized access to that one. */
1292 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1293 set_mem_attributes (mem
, exp
, 0);
1294 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1295 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1298 exp
= build_fold_addr_expr (exp
);
1299 exp
= fold_build2 (MEM_REF
,
1300 build_array_type (char_type_node
,
1301 build_range_type (sizetype
,
1304 exp
, build_int_cst (ptr_type_node
, 0));
1305 set_mem_attributes (mem
, exp
, 0);
1307 set_mem_alias_set (mem
, 0);
1311 /* Built-in functions to perform an untyped call and return. */
1313 #define apply_args_mode \
1314 (this_target_builtins->x_apply_args_mode)
1315 #define apply_result_mode \
1316 (this_target_builtins->x_apply_result_mode)
1318 /* Return the size required for the block returned by __builtin_apply_args,
1319 and initialize apply_args_mode. */
1322 apply_args_size (void)
1324 static int size
= -1;
1329 /* The values computed by this function never change. */
1332 /* The first value is the incoming arg-pointer. */
1333 size
= GET_MODE_SIZE (Pmode
);
1335 /* The second value is the structure value address unless this is
1336 passed as an "invisible" first argument. */
1337 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1338 size
+= GET_MODE_SIZE (Pmode
);
1340 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1341 if (FUNCTION_ARG_REGNO_P (regno
))
1343 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1345 gcc_assert (mode
!= VOIDmode
);
1347 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1348 if (size
% align
!= 0)
1349 size
= CEIL (size
, align
) * align
;
1350 size
+= GET_MODE_SIZE (mode
);
1351 apply_args_mode
[regno
] = mode
;
1355 apply_args_mode
[regno
] = VOIDmode
;
1361 /* Return the size required for the block returned by __builtin_apply,
1362 and initialize apply_result_mode. */
1365 apply_result_size (void)
1367 static int size
= -1;
1371 /* The values computed by this function never change. */
1376 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1377 if (targetm
.calls
.function_value_regno_p (regno
))
1379 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1381 gcc_assert (mode
!= VOIDmode
);
1383 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1384 if (size
% align
!= 0)
1385 size
= CEIL (size
, align
) * align
;
1386 size
+= GET_MODE_SIZE (mode
);
1387 apply_result_mode
[regno
] = mode
;
1390 apply_result_mode
[regno
] = VOIDmode
;
1392 /* Allow targets that use untyped_call and untyped_return to override
1393 the size so that machine-specific information can be stored here. */
1394 #ifdef APPLY_RESULT_SIZE
1395 size
= APPLY_RESULT_SIZE
;
1401 /* Create a vector describing the result block RESULT. If SAVEP is true,
1402 the result block is used to save the values; otherwise it is used to
1403 restore the values. */
1406 result_vector (int savep
, rtx result
)
1408 int regno
, size
, align
, nelts
;
1411 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1414 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1415 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1417 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1418 if (size
% align
!= 0)
1419 size
= CEIL (size
, align
) * align
;
1420 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1421 mem
= adjust_address (result
, mode
, size
);
1422 savevec
[nelts
++] = (savep
1423 ? gen_rtx_SET (mem
, reg
)
1424 : gen_rtx_SET (reg
, mem
));
1425 size
+= GET_MODE_SIZE (mode
);
1427 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1430 /* Save the state required to perform an untyped call with the same
1431 arguments as were passed to the current function. */
1434 expand_builtin_apply_args_1 (void)
1437 int size
, align
, regno
;
1439 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1441 /* Create a block where the arg-pointer, structure value address,
1442 and argument registers can be saved. */
1443 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1445 /* Walk past the arg-pointer and structure value address. */
1446 size
= GET_MODE_SIZE (Pmode
);
1447 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1448 size
+= GET_MODE_SIZE (Pmode
);
1450 /* Save each register used in calling a function to the block. */
1451 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1452 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1454 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1455 if (size
% align
!= 0)
1456 size
= CEIL (size
, align
) * align
;
1458 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1460 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1461 size
+= GET_MODE_SIZE (mode
);
1464 /* Save the arg pointer to the block. */
1465 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1466 /* We need the pointer as the caller actually passed them to us, not
1467 as we might have pretended they were passed. Make sure it's a valid
1468 operand, as emit_move_insn isn't expected to handle a PLUS. */
1469 if (STACK_GROWS_DOWNWARD
)
1471 = force_operand (plus_constant (Pmode
, tem
,
1472 crtl
->args
.pretend_args_size
),
1474 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1476 size
= GET_MODE_SIZE (Pmode
);
1478 /* Save the structure value address unless this is passed as an
1479 "invisible" first argument. */
1480 if (struct_incoming_value
)
1482 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1483 copy_to_reg (struct_incoming_value
));
1484 size
+= GET_MODE_SIZE (Pmode
);
1487 /* Return the address of the block. */
1488 return copy_addr_to_reg (XEXP (registers
, 0));
1491 /* __builtin_apply_args returns block of memory allocated on
1492 the stack into which is stored the arg pointer, structure
1493 value address, static chain, and all the registers that might
1494 possibly be used in performing a function call. The code is
1495 moved to the start of the function so the incoming values are
1499 expand_builtin_apply_args (void)
1501 /* Don't do __builtin_apply_args more than once in a function.
1502 Save the result of the first call and reuse it. */
1503 if (apply_args_value
!= 0)
1504 return apply_args_value
;
1506 /* When this function is called, it means that registers must be
1507 saved on entry to this function. So we migrate the
1508 call to the first insn of this function. */
1512 temp
= expand_builtin_apply_args_1 ();
1513 rtx_insn
*seq
= get_insns ();
1516 apply_args_value
= temp
;
1518 /* Put the insns after the NOTE that starts the function.
1519 If this is inside a start_sequence, make the outer-level insn
1520 chain current, so the code is placed at the start of the
1521 function. If internal_arg_pointer is a non-virtual pseudo,
1522 it needs to be placed after the function that initializes
1524 push_topmost_sequence ();
1525 if (REG_P (crtl
->args
.internal_arg_pointer
)
1526 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1527 emit_insn_before (seq
, parm_birth_insn
);
1529 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1530 pop_topmost_sequence ();
1535 /* Perform an untyped call and save the state required to perform an
1536 untyped return of whatever value was returned by the given function. */
1539 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1541 int size
, align
, regno
;
1543 rtx incoming_args
, result
, reg
, dest
, src
;
1544 rtx_call_insn
*call_insn
;
1545 rtx old_stack_level
= 0;
1546 rtx call_fusage
= 0;
1547 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1549 arguments
= convert_memory_address (Pmode
, arguments
);
1551 /* Create a block where the return registers can be saved. */
1552 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1554 /* Fetch the arg pointer from the ARGUMENTS block. */
1555 incoming_args
= gen_reg_rtx (Pmode
);
1556 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1557 if (!STACK_GROWS_DOWNWARD
)
1558 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1559 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1561 /* Push a new argument block and copy the arguments. Do not allow
1562 the (potential) memcpy call below to interfere with our stack
1564 do_pending_stack_adjust ();
1567 /* Save the stack with nonlocal if available. */
1568 if (targetm
.have_save_stack_nonlocal ())
1569 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1571 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1573 /* Allocate a block of memory onto the stack and copy the memory
1574 arguments to the outgoing arguments address. We can pass TRUE
1575 as the 4th argument because we just saved the stack pointer
1576 and will restore it right after the call. */
1577 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1579 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1580 may have already set current_function_calls_alloca to true.
1581 current_function_calls_alloca won't be set if argsize is zero,
1582 so we have to guarantee need_drap is true here. */
1583 if (SUPPORTS_STACK_ALIGNMENT
)
1584 crtl
->need_drap
= true;
1586 dest
= virtual_outgoing_args_rtx
;
1587 if (!STACK_GROWS_DOWNWARD
)
1589 if (CONST_INT_P (argsize
))
1590 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1592 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1594 dest
= gen_rtx_MEM (BLKmode
, dest
);
1595 set_mem_align (dest
, PARM_BOUNDARY
);
1596 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1597 set_mem_align (src
, PARM_BOUNDARY
);
1598 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1600 /* Refer to the argument block. */
1602 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1603 set_mem_align (arguments
, PARM_BOUNDARY
);
1605 /* Walk past the arg-pointer and structure value address. */
1606 size
= GET_MODE_SIZE (Pmode
);
1608 size
+= GET_MODE_SIZE (Pmode
);
1610 /* Restore each of the registers previously saved. Make USE insns
1611 for each of these registers for use in making the call. */
1612 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1613 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1615 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1616 if (size
% align
!= 0)
1617 size
= CEIL (size
, align
) * align
;
1618 reg
= gen_rtx_REG (mode
, regno
);
1619 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1620 use_reg (&call_fusage
, reg
);
1621 size
+= GET_MODE_SIZE (mode
);
1624 /* Restore the structure value address unless this is passed as an
1625 "invisible" first argument. */
1626 size
= GET_MODE_SIZE (Pmode
);
1629 rtx value
= gen_reg_rtx (Pmode
);
1630 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1631 emit_move_insn (struct_value
, value
);
1632 if (REG_P (struct_value
))
1633 use_reg (&call_fusage
, struct_value
);
1634 size
+= GET_MODE_SIZE (Pmode
);
1637 /* All arguments and registers used for the call are set up by now! */
1638 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1640 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1641 and we don't want to load it into a register as an optimization,
1642 because prepare_call_address already did it if it should be done. */
1643 if (GET_CODE (function
) != SYMBOL_REF
)
1644 function
= memory_address (FUNCTION_MODE
, function
);
1646 /* Generate the actual call instruction and save the return value. */
1647 if (targetm
.have_untyped_call ())
1649 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1650 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1651 result_vector (1, result
)));
1653 else if (targetm
.have_call_value ())
1657 /* Locate the unique return register. It is not possible to
1658 express a call that sets more than one return register using
1659 call_value; use untyped_call for that. In fact, untyped_call
1660 only needs to save the return registers in the given block. */
1661 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1662 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1664 gcc_assert (!valreg
); /* have_untyped_call required. */
1666 valreg
= gen_rtx_REG (mode
, regno
);
1669 emit_insn (targetm
.gen_call_value (valreg
,
1670 gen_rtx_MEM (FUNCTION_MODE
, function
),
1671 const0_rtx
, NULL_RTX
, const0_rtx
));
1673 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1678 /* Find the CALL insn we just emitted, and attach the register usage
1680 call_insn
= last_call_insn ();
1681 add_function_usage_to (call_insn
, call_fusage
);
1683 /* Restore the stack. */
1684 if (targetm
.have_save_stack_nonlocal ())
1685 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1687 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1688 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1692 /* Return the address of the result block. */
1693 result
= copy_addr_to_reg (XEXP (result
, 0));
1694 return convert_memory_address (ptr_mode
, result
);
1697 /* Perform an untyped return. */
1700 expand_builtin_return (rtx result
)
1702 int size
, align
, regno
;
1705 rtx_insn
*call_fusage
= 0;
1707 result
= convert_memory_address (Pmode
, result
);
1709 apply_result_size ();
1710 result
= gen_rtx_MEM (BLKmode
, result
);
1712 if (targetm
.have_untyped_return ())
1714 rtx vector
= result_vector (0, result
);
1715 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1720 /* Restore the return value and note that each value is used. */
1722 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1723 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1725 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1726 if (size
% align
!= 0)
1727 size
= CEIL (size
, align
) * align
;
1728 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1729 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1731 push_to_sequence (call_fusage
);
1733 call_fusage
= get_insns ();
1735 size
+= GET_MODE_SIZE (mode
);
1738 /* Put the USE insns before the return. */
1739 emit_insn (call_fusage
);
1741 /* Return whatever values was restored by jumping directly to the end
1743 expand_naked_return ();
1746 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1748 static enum type_class
1749 type_to_class (tree type
)
1751 switch (TREE_CODE (type
))
1753 case VOID_TYPE
: return void_type_class
;
1754 case INTEGER_TYPE
: return integer_type_class
;
1755 case ENUMERAL_TYPE
: return enumeral_type_class
;
1756 case BOOLEAN_TYPE
: return boolean_type_class
;
1757 case POINTER_TYPE
: return pointer_type_class
;
1758 case REFERENCE_TYPE
: return reference_type_class
;
1759 case OFFSET_TYPE
: return offset_type_class
;
1760 case REAL_TYPE
: return real_type_class
;
1761 case COMPLEX_TYPE
: return complex_type_class
;
1762 case FUNCTION_TYPE
: return function_type_class
;
1763 case METHOD_TYPE
: return method_type_class
;
1764 case RECORD_TYPE
: return record_type_class
;
1766 case QUAL_UNION_TYPE
: return union_type_class
;
1767 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1768 ? string_type_class
: array_type_class
);
1769 case LANG_TYPE
: return lang_type_class
;
1770 default: return no_type_class
;
1774 /* Expand a call EXP to __builtin_classify_type. */
1777 expand_builtin_classify_type (tree exp
)
1779 if (call_expr_nargs (exp
))
1780 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1781 return GEN_INT (no_type_class
);
1784 /* This helper macro, meant to be used in mathfn_built_in below,
1785 determines which among a set of three builtin math functions is
1786 appropriate for a given type mode. The `F' and `L' cases are
1787 automatically generated from the `double' case. */
1788 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1789 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1790 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1791 fcodel = BUILT_IN_MATHFN##L ; break;
1792 /* Similar to above, but appends _R after any F/L suffix. */
1793 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1794 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1795 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1796 fcodel = BUILT_IN_MATHFN##L_R ; break;
1798 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1799 if available. If IMPLICIT is true use the implicit builtin declaration,
1800 otherwise use the explicit declaration. If we can't do the conversion,
1804 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1806 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1810 CASE_MATHFN (BUILT_IN_ACOS
)
1811 CASE_MATHFN (BUILT_IN_ACOSH
)
1812 CASE_MATHFN (BUILT_IN_ASIN
)
1813 CASE_MATHFN (BUILT_IN_ASINH
)
1814 CASE_MATHFN (BUILT_IN_ATAN
)
1815 CASE_MATHFN (BUILT_IN_ATAN2
)
1816 CASE_MATHFN (BUILT_IN_ATANH
)
1817 CASE_MATHFN (BUILT_IN_CBRT
)
1818 CASE_MATHFN (BUILT_IN_CEIL
)
1819 CASE_MATHFN (BUILT_IN_CEXPI
)
1820 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1821 CASE_MATHFN (BUILT_IN_COS
)
1822 CASE_MATHFN (BUILT_IN_COSH
)
1823 CASE_MATHFN (BUILT_IN_DREM
)
1824 CASE_MATHFN (BUILT_IN_ERF
)
1825 CASE_MATHFN (BUILT_IN_ERFC
)
1826 CASE_MATHFN (BUILT_IN_EXP
)
1827 CASE_MATHFN (BUILT_IN_EXP10
)
1828 CASE_MATHFN (BUILT_IN_EXP2
)
1829 CASE_MATHFN (BUILT_IN_EXPM1
)
1830 CASE_MATHFN (BUILT_IN_FABS
)
1831 CASE_MATHFN (BUILT_IN_FDIM
)
1832 CASE_MATHFN (BUILT_IN_FLOOR
)
1833 CASE_MATHFN (BUILT_IN_FMA
)
1834 CASE_MATHFN (BUILT_IN_FMAX
)
1835 CASE_MATHFN (BUILT_IN_FMIN
)
1836 CASE_MATHFN (BUILT_IN_FMOD
)
1837 CASE_MATHFN (BUILT_IN_FREXP
)
1838 CASE_MATHFN (BUILT_IN_GAMMA
)
1839 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1840 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1841 CASE_MATHFN (BUILT_IN_HYPOT
)
1842 CASE_MATHFN (BUILT_IN_ILOGB
)
1843 CASE_MATHFN (BUILT_IN_ICEIL
)
1844 CASE_MATHFN (BUILT_IN_IFLOOR
)
1845 CASE_MATHFN (BUILT_IN_INF
)
1846 CASE_MATHFN (BUILT_IN_IRINT
)
1847 CASE_MATHFN (BUILT_IN_IROUND
)
1848 CASE_MATHFN (BUILT_IN_ISINF
)
1849 CASE_MATHFN (BUILT_IN_J0
)
1850 CASE_MATHFN (BUILT_IN_J1
)
1851 CASE_MATHFN (BUILT_IN_JN
)
1852 CASE_MATHFN (BUILT_IN_LCEIL
)
1853 CASE_MATHFN (BUILT_IN_LDEXP
)
1854 CASE_MATHFN (BUILT_IN_LFLOOR
)
1855 CASE_MATHFN (BUILT_IN_LGAMMA
)
1856 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1857 CASE_MATHFN (BUILT_IN_LLCEIL
)
1858 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1859 CASE_MATHFN (BUILT_IN_LLRINT
)
1860 CASE_MATHFN (BUILT_IN_LLROUND
)
1861 CASE_MATHFN (BUILT_IN_LOG
)
1862 CASE_MATHFN (BUILT_IN_LOG10
)
1863 CASE_MATHFN (BUILT_IN_LOG1P
)
1864 CASE_MATHFN (BUILT_IN_LOG2
)
1865 CASE_MATHFN (BUILT_IN_LOGB
)
1866 CASE_MATHFN (BUILT_IN_LRINT
)
1867 CASE_MATHFN (BUILT_IN_LROUND
)
1868 CASE_MATHFN (BUILT_IN_MODF
)
1869 CASE_MATHFN (BUILT_IN_NAN
)
1870 CASE_MATHFN (BUILT_IN_NANS
)
1871 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1872 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1873 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1874 CASE_MATHFN (BUILT_IN_POW
)
1875 CASE_MATHFN (BUILT_IN_POWI
)
1876 CASE_MATHFN (BUILT_IN_POW10
)
1877 CASE_MATHFN (BUILT_IN_REMAINDER
)
1878 CASE_MATHFN (BUILT_IN_REMQUO
)
1879 CASE_MATHFN (BUILT_IN_RINT
)
1880 CASE_MATHFN (BUILT_IN_ROUND
)
1881 CASE_MATHFN (BUILT_IN_SCALB
)
1882 CASE_MATHFN (BUILT_IN_SCALBLN
)
1883 CASE_MATHFN (BUILT_IN_SCALBN
)
1884 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1885 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1886 CASE_MATHFN (BUILT_IN_SIN
)
1887 CASE_MATHFN (BUILT_IN_SINCOS
)
1888 CASE_MATHFN (BUILT_IN_SINH
)
1889 CASE_MATHFN (BUILT_IN_SQRT
)
1890 CASE_MATHFN (BUILT_IN_TAN
)
1891 CASE_MATHFN (BUILT_IN_TANH
)
1892 CASE_MATHFN (BUILT_IN_TGAMMA
)
1893 CASE_MATHFN (BUILT_IN_TRUNC
)
1894 CASE_MATHFN (BUILT_IN_Y0
)
1895 CASE_MATHFN (BUILT_IN_Y1
)
1896 CASE_MATHFN (BUILT_IN_YN
)
1902 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1904 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1906 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1911 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1914 return builtin_decl_explicit (fcode2
);
1917 /* Like mathfn_built_in_1(), but always use the implicit array. */
1920 mathfn_built_in (tree type
, enum built_in_function fn
)
1922 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1925 /* If errno must be maintained, expand the RTL to check if the result,
1926 TARGET, of a built-in function call, EXP, is NaN, and if so set
1930 expand_errno_check (tree exp
, rtx target
)
1932 rtx_code_label
*lab
= gen_label_rtx ();
1934 /* Test the result; if it is NaN, set errno=EDOM because
1935 the argument was not in the domain. */
1936 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1937 NULL_RTX
, NULL
, lab
,
1938 /* The jump is very likely. */
1939 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
1942 /* If this built-in doesn't throw an exception, set errno directly. */
1943 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1945 #ifdef GEN_ERRNO_RTX
1946 rtx errno_rtx
= GEN_ERRNO_RTX
;
1949 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1951 emit_move_insn (errno_rtx
,
1952 gen_int_mode (TARGET_EDOM
, GET_MODE (errno_rtx
)));
1958 /* Make sure the library call isn't expanded as a tail call. */
1959 CALL_EXPR_TAILCALL (exp
) = 0;
1961 /* We can't set errno=EDOM directly; let the library call do it.
1962 Pop the arguments right away in case the call gets deleted. */
1964 expand_call (exp
, target
, 0);
1969 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1970 Return NULL_RTX if a normal call should be emitted rather than expanding
1971 the function in-line. EXP is the expression that is a call to the builtin
1972 function; if convenient, the result should be placed in TARGET.
1973 SUBTARGET may be used as the target for computing one of EXP's operands. */
1976 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1978 optab builtin_optab
;
1981 tree fndecl
= get_callee_fndecl (exp
);
1983 bool errno_set
= false;
1984 bool try_widening
= false;
1987 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
1990 arg
= CALL_EXPR_ARG (exp
, 0);
1992 switch (DECL_FUNCTION_CODE (fndecl
))
1994 CASE_FLT_FN (BUILT_IN_SQRT
):
1995 errno_set
= ! tree_expr_nonnegative_p (arg
);
1996 try_widening
= true;
1997 builtin_optab
= sqrt_optab
;
1999 CASE_FLT_FN (BUILT_IN_EXP
):
2000 errno_set
= true; builtin_optab
= exp_optab
; break;
2001 CASE_FLT_FN (BUILT_IN_EXP10
):
2002 CASE_FLT_FN (BUILT_IN_POW10
):
2003 errno_set
= true; builtin_optab
= exp10_optab
; break;
2004 CASE_FLT_FN (BUILT_IN_EXP2
):
2005 errno_set
= true; builtin_optab
= exp2_optab
; break;
2006 CASE_FLT_FN (BUILT_IN_EXPM1
):
2007 errno_set
= true; builtin_optab
= expm1_optab
; break;
2008 CASE_FLT_FN (BUILT_IN_LOGB
):
2009 errno_set
= true; builtin_optab
= logb_optab
; break;
2010 CASE_FLT_FN (BUILT_IN_LOG
):
2011 errno_set
= true; builtin_optab
= log_optab
; break;
2012 CASE_FLT_FN (BUILT_IN_LOG10
):
2013 errno_set
= true; builtin_optab
= log10_optab
; break;
2014 CASE_FLT_FN (BUILT_IN_LOG2
):
2015 errno_set
= true; builtin_optab
= log2_optab
; break;
2016 CASE_FLT_FN (BUILT_IN_LOG1P
):
2017 errno_set
= true; builtin_optab
= log1p_optab
; break;
2018 CASE_FLT_FN (BUILT_IN_ASIN
):
2019 builtin_optab
= asin_optab
; break;
2020 CASE_FLT_FN (BUILT_IN_ACOS
):
2021 builtin_optab
= acos_optab
; break;
2022 CASE_FLT_FN (BUILT_IN_TAN
):
2023 builtin_optab
= tan_optab
; break;
2024 CASE_FLT_FN (BUILT_IN_ATAN
):
2025 builtin_optab
= atan_optab
; break;
2026 CASE_FLT_FN (BUILT_IN_FLOOR
):
2027 builtin_optab
= floor_optab
; break;
2028 CASE_FLT_FN (BUILT_IN_CEIL
):
2029 builtin_optab
= ceil_optab
; break;
2030 CASE_FLT_FN (BUILT_IN_TRUNC
):
2031 builtin_optab
= btrunc_optab
; break;
2032 CASE_FLT_FN (BUILT_IN_ROUND
):
2033 builtin_optab
= round_optab
; break;
2034 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2035 builtin_optab
= nearbyint_optab
;
2036 if (flag_trapping_math
)
2038 /* Else fallthrough and expand as rint. */
2039 CASE_FLT_FN (BUILT_IN_RINT
):
2040 builtin_optab
= rint_optab
; break;
2041 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2042 builtin_optab
= significand_optab
; break;
2047 /* Make a suitable register to place result in. */
2048 mode
= TYPE_MODE (TREE_TYPE (exp
));
2050 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2053 /* Before working hard, check whether the instruction is available, but try
2054 to widen the mode for specific operations. */
2055 if ((optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2056 || (try_widening
&& !excess_precision_type (TREE_TYPE (exp
))))
2057 && (!errno_set
|| !optimize_insn_for_size_p ()))
2059 rtx result
= gen_reg_rtx (mode
);
2061 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2062 need to expand the argument again. This way, we will not perform
2063 side-effects more the once. */
2064 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2066 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2070 /* Compute into RESULT.
2071 Set RESULT to wherever the result comes back. */
2072 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2077 expand_errno_check (exp
, result
);
2079 /* Output the entire sequence. */
2080 insns
= get_insns ();
2086 /* If we were unable to expand via the builtin, stop the sequence
2087 (without outputting the insns) and call to the library function
2088 with the stabilized argument list. */
2092 return expand_call (exp
, target
, target
== const0_rtx
);
2095 /* Expand a call to the builtin binary math functions (pow and atan2).
2096 Return NULL_RTX if a normal call should be emitted rather than expanding the
2097 function in-line. EXP is the expression that is a call to the builtin
2098 function; if convenient, the result should be placed in TARGET.
2099 SUBTARGET may be used as the target for computing one of EXP's
2103 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2105 optab builtin_optab
;
2106 rtx op0
, op1
, result
;
2108 int op1_type
= REAL_TYPE
;
2109 tree fndecl
= get_callee_fndecl (exp
);
2112 bool errno_set
= true;
2114 switch (DECL_FUNCTION_CODE (fndecl
))
2116 CASE_FLT_FN (BUILT_IN_SCALBN
):
2117 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2118 CASE_FLT_FN (BUILT_IN_LDEXP
):
2119 op1_type
= INTEGER_TYPE
;
2124 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2127 arg0
= CALL_EXPR_ARG (exp
, 0);
2128 arg1
= CALL_EXPR_ARG (exp
, 1);
2130 switch (DECL_FUNCTION_CODE (fndecl
))
2132 CASE_FLT_FN (BUILT_IN_POW
):
2133 builtin_optab
= pow_optab
; break;
2134 CASE_FLT_FN (BUILT_IN_ATAN2
):
2135 builtin_optab
= atan2_optab
; break;
2136 CASE_FLT_FN (BUILT_IN_SCALB
):
2137 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2139 builtin_optab
= scalb_optab
; break;
2140 CASE_FLT_FN (BUILT_IN_SCALBN
):
2141 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2142 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2144 /* Fall through... */
2145 CASE_FLT_FN (BUILT_IN_LDEXP
):
2146 builtin_optab
= ldexp_optab
; break;
2147 CASE_FLT_FN (BUILT_IN_FMOD
):
2148 builtin_optab
= fmod_optab
; break;
2149 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2150 CASE_FLT_FN (BUILT_IN_DREM
):
2151 builtin_optab
= remainder_optab
; break;
2156 /* Make a suitable register to place result in. */
2157 mode
= TYPE_MODE (TREE_TYPE (exp
));
2159 /* Before working hard, check whether the instruction is available. */
2160 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2163 result
= gen_reg_rtx (mode
);
2165 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2168 if (errno_set
&& optimize_insn_for_size_p ())
2171 /* Always stabilize the argument list. */
2172 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2173 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2175 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2176 op1
= expand_normal (arg1
);
2180 /* Compute into RESULT.
2181 Set RESULT to wherever the result comes back. */
2182 result
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2183 result
, 0, OPTAB_DIRECT
);
2185 /* If we were unable to expand via the builtin, stop the sequence
2186 (without outputting the insns) and call to the library function
2187 with the stabilized argument list. */
2191 return expand_call (exp
, target
, target
== const0_rtx
);
2195 expand_errno_check (exp
, result
);
2197 /* Output the entire sequence. */
2198 insns
= get_insns ();
2205 /* Expand a call to the builtin trinary math functions (fma).
2206 Return NULL_RTX if a normal call should be emitted rather than expanding the
2207 function in-line. EXP is the expression that is a call to the builtin
2208 function; if convenient, the result should be placed in TARGET.
2209 SUBTARGET may be used as the target for computing one of EXP's
2213 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2215 optab builtin_optab
;
2216 rtx op0
, op1
, op2
, result
;
2218 tree fndecl
= get_callee_fndecl (exp
);
2219 tree arg0
, arg1
, arg2
;
2222 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2225 arg0
= CALL_EXPR_ARG (exp
, 0);
2226 arg1
= CALL_EXPR_ARG (exp
, 1);
2227 arg2
= CALL_EXPR_ARG (exp
, 2);
2229 switch (DECL_FUNCTION_CODE (fndecl
))
2231 CASE_FLT_FN (BUILT_IN_FMA
):
2232 builtin_optab
= fma_optab
; break;
2237 /* Make a suitable register to place result in. */
2238 mode
= TYPE_MODE (TREE_TYPE (exp
));
2240 /* Before working hard, check whether the instruction is available. */
2241 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2244 result
= gen_reg_rtx (mode
);
2246 /* Always stabilize the argument list. */
2247 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2248 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2249 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2251 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2252 op1
= expand_normal (arg1
);
2253 op2
= expand_normal (arg2
);
2257 /* Compute into RESULT.
2258 Set RESULT to wherever the result comes back. */
2259 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2262 /* If we were unable to expand via the builtin, stop the sequence
2263 (without outputting the insns) and call to the library function
2264 with the stabilized argument list. */
2268 return expand_call (exp
, target
, target
== const0_rtx
);
2271 /* Output the entire sequence. */
2272 insns
= get_insns ();
2279 /* Expand a call to the builtin sin and cos math functions.
2280 Return NULL_RTX if a normal call should be emitted rather than expanding the
2281 function in-line. EXP is the expression that is a call to the builtin
2282 function; if convenient, the result should be placed in TARGET.
2283 SUBTARGET may be used as the target for computing one of EXP's
2287 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2289 optab builtin_optab
;
2292 tree fndecl
= get_callee_fndecl (exp
);
2296 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2299 arg
= CALL_EXPR_ARG (exp
, 0);
2301 switch (DECL_FUNCTION_CODE (fndecl
))
2303 CASE_FLT_FN (BUILT_IN_SIN
):
2304 CASE_FLT_FN (BUILT_IN_COS
):
2305 builtin_optab
= sincos_optab
; break;
2310 /* Make a suitable register to place result in. */
2311 mode
= TYPE_MODE (TREE_TYPE (exp
));
2313 /* Check if sincos insn is available, otherwise fallback
2314 to sin or cos insn. */
2315 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2316 switch (DECL_FUNCTION_CODE (fndecl
))
2318 CASE_FLT_FN (BUILT_IN_SIN
):
2319 builtin_optab
= sin_optab
; break;
2320 CASE_FLT_FN (BUILT_IN_COS
):
2321 builtin_optab
= cos_optab
; break;
2326 /* Before working hard, check whether the instruction is available. */
2327 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2329 rtx result
= gen_reg_rtx (mode
);
2331 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2332 need to expand the argument again. This way, we will not perform
2333 side-effects more the once. */
2334 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2336 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2340 /* Compute into RESULT.
2341 Set RESULT to wherever the result comes back. */
2342 if (builtin_optab
== sincos_optab
)
2346 switch (DECL_FUNCTION_CODE (fndecl
))
2348 CASE_FLT_FN (BUILT_IN_SIN
):
2349 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2351 CASE_FLT_FN (BUILT_IN_COS
):
2352 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2360 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2364 /* Output the entire sequence. */
2365 insns
= get_insns ();
2371 /* If we were unable to expand via the builtin, stop the sequence
2372 (without outputting the insns) and call to the library function
2373 with the stabilized argument list. */
2377 return expand_call (exp
, target
, target
== const0_rtx
);
2380 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2381 return an RTL instruction code that implements the functionality.
2382 If that isn't possible or available return CODE_FOR_nothing. */
2384 static enum insn_code
2385 interclass_mathfn_icode (tree arg
, tree fndecl
)
2387 bool errno_set
= false;
2388 optab builtin_optab
= unknown_optab
;
2391 switch (DECL_FUNCTION_CODE (fndecl
))
2393 CASE_FLT_FN (BUILT_IN_ILOGB
):
2394 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2395 CASE_FLT_FN (BUILT_IN_ISINF
):
2396 builtin_optab
= isinf_optab
; break;
2397 case BUILT_IN_ISNORMAL
:
2398 case BUILT_IN_ISFINITE
:
2399 CASE_FLT_FN (BUILT_IN_FINITE
):
2400 case BUILT_IN_FINITED32
:
2401 case BUILT_IN_FINITED64
:
2402 case BUILT_IN_FINITED128
:
2403 case BUILT_IN_ISINFD32
:
2404 case BUILT_IN_ISINFD64
:
2405 case BUILT_IN_ISINFD128
:
2406 /* These builtins have no optabs (yet). */
2412 /* There's no easy way to detect the case we need to set EDOM. */
2413 if (flag_errno_math
&& errno_set
)
2414 return CODE_FOR_nothing
;
2416 /* Optab mode depends on the mode of the input argument. */
2417 mode
= TYPE_MODE (TREE_TYPE (arg
));
2420 return optab_handler (builtin_optab
, mode
);
2421 return CODE_FOR_nothing
;
2424 /* Expand a call to one of the builtin math functions that operate on
2425 floating point argument and output an integer result (ilogb, isinf,
2427 Return 0 if a normal call should be emitted rather than expanding the
2428 function in-line. EXP is the expression that is a call to the builtin
2429 function; if convenient, the result should be placed in TARGET. */
2432 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2434 enum insn_code icode
= CODE_FOR_nothing
;
2436 tree fndecl
= get_callee_fndecl (exp
);
2440 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2443 arg
= CALL_EXPR_ARG (exp
, 0);
2444 icode
= interclass_mathfn_icode (arg
, fndecl
);
2445 mode
= TYPE_MODE (TREE_TYPE (arg
));
2447 if (icode
!= CODE_FOR_nothing
)
2449 struct expand_operand ops
[1];
2450 rtx_insn
*last
= get_last_insn ();
2451 tree orig_arg
= arg
;
2453 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2454 need to expand the argument again. This way, we will not perform
2455 side-effects more the once. */
2456 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2458 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2460 if (mode
!= GET_MODE (op0
))
2461 op0
= convert_to_mode (mode
, op0
, 0);
2463 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2464 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2465 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2466 return ops
[0].value
;
2468 delete_insns_since (last
);
2469 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2475 /* Expand a call to the builtin sincos math function.
2476 Return NULL_RTX if a normal call should be emitted rather than expanding the
2477 function in-line. EXP is the expression that is a call to the builtin
2481 expand_builtin_sincos (tree exp
)
2483 rtx op0
, op1
, op2
, target1
, target2
;
2485 tree arg
, sinp
, cosp
;
2487 location_t loc
= EXPR_LOCATION (exp
);
2488 tree alias_type
, alias_off
;
2490 if (!validate_arglist (exp
, REAL_TYPE
,
2491 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2494 arg
= CALL_EXPR_ARG (exp
, 0);
2495 sinp
= CALL_EXPR_ARG (exp
, 1);
2496 cosp
= CALL_EXPR_ARG (exp
, 2);
2498 /* Make a suitable register to place result in. */
2499 mode
= TYPE_MODE (TREE_TYPE (arg
));
2501 /* Check if sincos insn is available, otherwise emit the call. */
2502 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2505 target1
= gen_reg_rtx (mode
);
2506 target2
= gen_reg_rtx (mode
);
2508 op0
= expand_normal (arg
);
2509 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2510 alias_off
= build_int_cst (alias_type
, 0);
2511 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2513 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2516 /* Compute into target1 and target2.
2517 Set TARGET to wherever the result comes back. */
2518 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2519 gcc_assert (result
);
2521 /* Move target1 and target2 to the memory locations indicated
2523 emit_move_insn (op1
, target1
);
2524 emit_move_insn (op2
, target2
);
2529 /* Expand a call to the internal cexpi builtin to the sincos math function.
2530 EXP is the expression that is a call to the builtin function; if convenient,
2531 the result should be placed in TARGET. */
2534 expand_builtin_cexpi (tree exp
, rtx target
)
2536 tree fndecl
= get_callee_fndecl (exp
);
2540 location_t loc
= EXPR_LOCATION (exp
);
2542 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2545 arg
= CALL_EXPR_ARG (exp
, 0);
2546 type
= TREE_TYPE (arg
);
2547 mode
= TYPE_MODE (TREE_TYPE (arg
));
2549 /* Try expanding via a sincos optab, fall back to emitting a libcall
2550 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2551 is only generated from sincos, cexp or if we have either of them. */
2552 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2554 op1
= gen_reg_rtx (mode
);
2555 op2
= gen_reg_rtx (mode
);
2557 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2559 /* Compute into op1 and op2. */
2560 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2562 else if (targetm
.libc_has_function (function_sincos
))
2564 tree call
, fn
= NULL_TREE
;
2568 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2569 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2570 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2571 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2572 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2573 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2577 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2578 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2579 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2580 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2581 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2582 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2584 /* Make sure not to fold the sincos call again. */
2585 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2586 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2587 call
, 3, arg
, top1
, top2
));
2591 tree call
, fn
= NULL_TREE
, narg
;
2592 tree ctype
= build_complex_type (type
);
2594 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2595 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2596 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2597 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2598 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2599 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2603 /* If we don't have a decl for cexp create one. This is the
2604 friendliest fallback if the user calls __builtin_cexpi
2605 without full target C99 function support. */
2606 if (fn
== NULL_TREE
)
2609 const char *name
= NULL
;
2611 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2613 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2615 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2618 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2619 fn
= build_fn_decl (name
, fntype
);
2622 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2623 build_real (type
, dconst0
), arg
);
2625 /* Make sure not to fold the cexp call again. */
2626 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2627 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2628 target
, VOIDmode
, EXPAND_NORMAL
);
2631 /* Now build the proper return type. */
2632 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2633 make_tree (TREE_TYPE (arg
), op2
),
2634 make_tree (TREE_TYPE (arg
), op1
)),
2635 target
, VOIDmode
, EXPAND_NORMAL
);
2638 /* Conveniently construct a function call expression. FNDECL names the
2639 function to be called, N is the number of arguments, and the "..."
2640 parameters are the argument expressions. Unlike build_call_exr
2641 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2644 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2647 tree fntype
= TREE_TYPE (fndecl
);
2648 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2651 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2653 SET_EXPR_LOCATION (fn
, loc
);
2657 /* Expand a call to one of the builtin rounding functions gcc defines
2658 as an extension (lfloor and lceil). As these are gcc extensions we
2659 do not need to worry about setting errno to EDOM.
2660 If expanding via optab fails, lower expression to (int)(floor(x)).
2661 EXP is the expression that is a call to the builtin function;
2662 if convenient, the result should be placed in TARGET. */
2665 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2667 convert_optab builtin_optab
;
2670 tree fndecl
= get_callee_fndecl (exp
);
2671 enum built_in_function fallback_fn
;
2672 tree fallback_fndecl
;
2676 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2679 arg
= CALL_EXPR_ARG (exp
, 0);
2681 switch (DECL_FUNCTION_CODE (fndecl
))
2683 CASE_FLT_FN (BUILT_IN_ICEIL
):
2684 CASE_FLT_FN (BUILT_IN_LCEIL
):
2685 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2686 builtin_optab
= lceil_optab
;
2687 fallback_fn
= BUILT_IN_CEIL
;
2690 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2691 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2692 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2693 builtin_optab
= lfloor_optab
;
2694 fallback_fn
= BUILT_IN_FLOOR
;
2701 /* Make a suitable register to place result in. */
2702 mode
= TYPE_MODE (TREE_TYPE (exp
));
2704 target
= gen_reg_rtx (mode
);
2706 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2707 need to expand the argument again. This way, we will not perform
2708 side-effects more the once. */
2709 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2711 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2715 /* Compute into TARGET. */
2716 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2718 /* Output the entire sequence. */
2719 insns
= get_insns ();
2725 /* If we were unable to expand via the builtin, stop the sequence
2726 (without outputting the insns). */
2729 /* Fall back to floating point rounding optab. */
2730 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2732 /* For non-C99 targets we may end up without a fallback fndecl here
2733 if the user called __builtin_lfloor directly. In this case emit
2734 a call to the floor/ceil variants nevertheless. This should result
2735 in the best user experience for not full C99 targets. */
2736 if (fallback_fndecl
== NULL_TREE
)
2739 const char *name
= NULL
;
2741 switch (DECL_FUNCTION_CODE (fndecl
))
2743 case BUILT_IN_ICEIL
:
2744 case BUILT_IN_LCEIL
:
2745 case BUILT_IN_LLCEIL
:
2748 case BUILT_IN_ICEILF
:
2749 case BUILT_IN_LCEILF
:
2750 case BUILT_IN_LLCEILF
:
2753 case BUILT_IN_ICEILL
:
2754 case BUILT_IN_LCEILL
:
2755 case BUILT_IN_LLCEILL
:
2758 case BUILT_IN_IFLOOR
:
2759 case BUILT_IN_LFLOOR
:
2760 case BUILT_IN_LLFLOOR
:
2763 case BUILT_IN_IFLOORF
:
2764 case BUILT_IN_LFLOORF
:
2765 case BUILT_IN_LLFLOORF
:
2768 case BUILT_IN_IFLOORL
:
2769 case BUILT_IN_LFLOORL
:
2770 case BUILT_IN_LLFLOORL
:
2777 fntype
= build_function_type_list (TREE_TYPE (arg
),
2778 TREE_TYPE (arg
), NULL_TREE
);
2779 fallback_fndecl
= build_fn_decl (name
, fntype
);
2782 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2784 tmp
= expand_normal (exp
);
2785 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2787 /* Truncate the result of floating point optab to integer
2788 via expand_fix (). */
2789 target
= gen_reg_rtx (mode
);
2790 expand_fix (target
, tmp
, 0);
2795 /* Expand a call to one of the builtin math functions doing integer
2797 Return 0 if a normal call should be emitted rather than expanding the
2798 function in-line. EXP is the expression that is a call to the builtin
2799 function; if convenient, the result should be placed in TARGET. */
2802 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2804 convert_optab builtin_optab
;
2807 tree fndecl
= get_callee_fndecl (exp
);
2810 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2812 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2815 arg
= CALL_EXPR_ARG (exp
, 0);
2817 switch (DECL_FUNCTION_CODE (fndecl
))
2819 CASE_FLT_FN (BUILT_IN_IRINT
):
2820 fallback_fn
= BUILT_IN_LRINT
;
2822 CASE_FLT_FN (BUILT_IN_LRINT
):
2823 CASE_FLT_FN (BUILT_IN_LLRINT
):
2824 builtin_optab
= lrint_optab
;
2827 CASE_FLT_FN (BUILT_IN_IROUND
):
2828 fallback_fn
= BUILT_IN_LROUND
;
2830 CASE_FLT_FN (BUILT_IN_LROUND
):
2831 CASE_FLT_FN (BUILT_IN_LLROUND
):
2832 builtin_optab
= lround_optab
;
2839 /* There's no easy way to detect the case we need to set EDOM. */
2840 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2843 /* Make a suitable register to place result in. */
2844 mode
= TYPE_MODE (TREE_TYPE (exp
));
2846 /* There's no easy way to detect the case we need to set EDOM. */
2847 if (!flag_errno_math
)
2849 rtx result
= gen_reg_rtx (mode
);
2851 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2852 need to expand the argument again. This way, we will not perform
2853 side-effects more the once. */
2854 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2856 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2860 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2862 /* Output the entire sequence. */
2863 insns
= get_insns ();
2869 /* If we were unable to expand via the builtin, stop the sequence
2870 (without outputting the insns) and call to the library function
2871 with the stabilized argument list. */
2875 if (fallback_fn
!= BUILT_IN_NONE
)
2877 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2878 targets, (int) round (x) should never be transformed into
2879 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2880 a call to lround in the hope that the target provides at least some
2881 C99 functions. This should result in the best user experience for
2882 not full C99 targets. */
2883 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2886 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2887 fallback_fndecl
, 1, arg
);
2889 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2890 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2891 return convert_to_mode (mode
, target
, 0);
2894 return expand_call (exp
, target
, target
== const0_rtx
);
2897 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2898 a normal call should be emitted rather than expanding the function
2899 in-line. EXP is the expression that is a call to the builtin
2900 function; if convenient, the result should be placed in TARGET. */
2903 expand_builtin_powi (tree exp
, rtx target
)
2910 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2913 arg0
= CALL_EXPR_ARG (exp
, 0);
2914 arg1
= CALL_EXPR_ARG (exp
, 1);
2915 mode
= TYPE_MODE (TREE_TYPE (exp
));
2917 /* Emit a libcall to libgcc. */
2919 /* Mode of the 2nd argument must match that of an int. */
2920 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2922 if (target
== NULL_RTX
)
2923 target
= gen_reg_rtx (mode
);
2925 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2926 if (GET_MODE (op0
) != mode
)
2927 op0
= convert_to_mode (mode
, op0
, 0);
2928 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2929 if (GET_MODE (op1
) != mode2
)
2930 op1
= convert_to_mode (mode2
, op1
, 0);
2932 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2933 target
, LCT_CONST
, mode
, 2,
2934 op0
, mode
, op1
, mode2
);
2939 /* Expand expression EXP which is a call to the strlen builtin. Return
2940 NULL_RTX if we failed the caller should emit a normal call, otherwise
2941 try to get the result in TARGET, if convenient. */
2944 expand_builtin_strlen (tree exp
, rtx target
,
2945 machine_mode target_mode
)
2947 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2951 struct expand_operand ops
[4];
2954 tree src
= CALL_EXPR_ARG (exp
, 0);
2956 rtx_insn
*before_strlen
;
2957 machine_mode insn_mode
= target_mode
;
2958 enum insn_code icode
= CODE_FOR_nothing
;
2961 /* If the length can be computed at compile-time, return it. */
2962 len
= c_strlen (src
, 0);
2964 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2966 /* If the length can be computed at compile-time and is constant
2967 integer, but there are side-effects in src, evaluate
2968 src for side-effects, then return len.
2969 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2970 can be optimized into: i++; x = 3; */
2971 len
= c_strlen (src
, 1);
2972 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2974 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2975 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2978 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2980 /* If SRC is not a pointer type, don't do this operation inline. */
2984 /* Bail out if we can't compute strlen in the right mode. */
2985 while (insn_mode
!= VOIDmode
)
2987 icode
= optab_handler (strlen_optab
, insn_mode
);
2988 if (icode
!= CODE_FOR_nothing
)
2991 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
2993 if (insn_mode
== VOIDmode
)
2996 /* Make a place to hold the source address. We will not expand
2997 the actual source until we are sure that the expansion will
2998 not fail -- there are trees that cannot be expanded twice. */
2999 src_reg
= gen_reg_rtx (Pmode
);
3001 /* Mark the beginning of the strlen sequence so we can emit the
3002 source operand later. */
3003 before_strlen
= get_last_insn ();
3005 create_output_operand (&ops
[0], target
, insn_mode
);
3006 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3007 create_integer_operand (&ops
[2], 0);
3008 create_integer_operand (&ops
[3], align
);
3009 if (!maybe_expand_insn (icode
, 4, ops
))
3012 /* Now that we are assured of success, expand the source. */
3014 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3017 #ifdef POINTERS_EXTEND_UNSIGNED
3018 if (GET_MODE (pat
) != Pmode
)
3019 pat
= convert_to_mode (Pmode
, pat
,
3020 POINTERS_EXTEND_UNSIGNED
);
3022 emit_move_insn (src_reg
, pat
);
3028 emit_insn_after (pat
, before_strlen
);
3030 emit_insn_before (pat
, get_insns ());
3032 /* Return the value in the proper mode for this function. */
3033 if (GET_MODE (ops
[0].value
) == target_mode
)
3034 target
= ops
[0].value
;
3035 else if (target
!= 0)
3036 convert_move (target
, ops
[0].value
, 0);
3038 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3044 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3045 bytes from constant string DATA + OFFSET and return it as target
3049 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3052 const char *str
= (const char *) data
;
3054 gcc_assert (offset
>= 0
3055 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3056 <= strlen (str
) + 1));
3058 return c_readstr (str
+ offset
, mode
);
3061 /* LEN specify length of the block of memcpy/memset operation.
3062 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3063 In some cases we can make very likely guess on max size, then we
3064 set it into PROBABLE_MAX_SIZE. */
3067 determine_block_size (tree len
, rtx len_rtx
,
3068 unsigned HOST_WIDE_INT
*min_size
,
3069 unsigned HOST_WIDE_INT
*max_size
,
3070 unsigned HOST_WIDE_INT
*probable_max_size
)
3072 if (CONST_INT_P (len_rtx
))
3074 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3080 enum value_range_type range_type
= VR_UNDEFINED
;
3082 /* Determine bounds from the type. */
3083 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3084 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3087 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3088 *probable_max_size
= *max_size
3089 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3091 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3093 if (TREE_CODE (len
) == SSA_NAME
)
3094 range_type
= get_range_info (len
, &min
, &max
);
3095 if (range_type
== VR_RANGE
)
3097 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3098 *min_size
= min
.to_uhwi ();
3099 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3100 *probable_max_size
= *max_size
= max
.to_uhwi ();
3102 else if (range_type
== VR_ANTI_RANGE
)
3104 /* Anti range 0...N lets us to determine minimal size to N+1. */
3107 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3108 *min_size
= max
.to_uhwi () + 1;
3116 Produce anti range allowing negative values of N. We still
3117 can use the information and make a guess that N is not negative.
3119 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3120 *probable_max_size
= min
.to_uhwi () - 1;
3123 gcc_checking_assert (*max_size
<=
3124 (unsigned HOST_WIDE_INT
)
3125 GET_MODE_MASK (GET_MODE (len_rtx
)));
3128 /* Helper function to do the actual work for expand_builtin_memcpy. */
3131 expand_builtin_memcpy_args (tree dest
, tree src
, tree len
, rtx target
, tree exp
)
3133 const char *src_str
;
3134 unsigned int src_align
= get_pointer_alignment (src
);
3135 unsigned int dest_align
= get_pointer_alignment (dest
);
3136 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3137 HOST_WIDE_INT expected_size
= -1;
3138 unsigned int expected_align
= 0;
3139 unsigned HOST_WIDE_INT min_size
;
3140 unsigned HOST_WIDE_INT max_size
;
3141 unsigned HOST_WIDE_INT probable_max_size
;
3143 /* If DEST is not a pointer type, call the normal function. */
3144 if (dest_align
== 0)
3147 /* If either SRC is not a pointer type, don't do this
3148 operation in-line. */
3152 if (currently_expanding_gimple_stmt
)
3153 stringop_block_profile (currently_expanding_gimple_stmt
,
3154 &expected_align
, &expected_size
);
3156 if (expected_align
< dest_align
)
3157 expected_align
= dest_align
;
3158 dest_mem
= get_memory_rtx (dest
, len
);
3159 set_mem_align (dest_mem
, dest_align
);
3160 len_rtx
= expand_normal (len
);
3161 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3162 &probable_max_size
);
3163 src_str
= c_getstr (src
);
3165 /* If SRC is a string constant and block move would be done
3166 by pieces, we can avoid loading the string from memory
3167 and only stored the computed constants. */
3169 && CONST_INT_P (len_rtx
)
3170 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3171 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3172 CONST_CAST (char *, src_str
),
3175 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3176 builtin_memcpy_read_str
,
3177 CONST_CAST (char *, src_str
),
3178 dest_align
, false, 0);
3179 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3180 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3184 src_mem
= get_memory_rtx (src
, len
);
3185 set_mem_align (src_mem
, src_align
);
3187 /* Copy word part most expediently. */
3188 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3189 CALL_EXPR_TAILCALL (exp
)
3190 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3191 expected_align
, expected_size
,
3192 min_size
, max_size
, probable_max_size
);
3196 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3197 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3203 /* Expand a call EXP to the memcpy builtin.
3204 Return NULL_RTX if we failed, the caller should emit a normal call,
3205 otherwise try to get the result in TARGET, if convenient (and in
3206 mode MODE if that's convenient). */
3209 expand_builtin_memcpy (tree exp
, rtx target
)
3211 if (!validate_arglist (exp
,
3212 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3216 tree dest
= CALL_EXPR_ARG (exp
, 0);
3217 tree src
= CALL_EXPR_ARG (exp
, 1);
3218 tree len
= CALL_EXPR_ARG (exp
, 2);
3219 return expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3223 /* Expand an instrumented call EXP to the memcpy builtin.
3224 Return NULL_RTX if we failed, the caller should emit a normal call,
3225 otherwise try to get the result in TARGET, if convenient (and in
3226 mode MODE if that's convenient). */
3229 expand_builtin_memcpy_with_bounds (tree exp
, rtx target
)
3231 if (!validate_arglist (exp
,
3232 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3233 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3234 INTEGER_TYPE
, VOID_TYPE
))
3238 tree dest
= CALL_EXPR_ARG (exp
, 0);
3239 tree src
= CALL_EXPR_ARG (exp
, 2);
3240 tree len
= CALL_EXPR_ARG (exp
, 4);
3241 rtx res
= expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3243 /* Return src bounds with the result. */
3246 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3247 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3248 res
= chkp_join_splitted_slot (res
, bnd
);
3254 /* Expand a call EXP to the mempcpy builtin.
3255 Return NULL_RTX if we failed; the caller should emit a normal call,
3256 otherwise try to get the result in TARGET, if convenient (and in
3257 mode MODE if that's convenient). If ENDP is 0 return the
3258 destination pointer, if ENDP is 1 return the end pointer ala
3259 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3263 expand_builtin_mempcpy (tree exp
, rtx target
, machine_mode mode
)
3265 if (!validate_arglist (exp
,
3266 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3270 tree dest
= CALL_EXPR_ARG (exp
, 0);
3271 tree src
= CALL_EXPR_ARG (exp
, 1);
3272 tree len
= CALL_EXPR_ARG (exp
, 2);
3273 return expand_builtin_mempcpy_args (dest
, src
, len
,
3274 target
, mode
, /*endp=*/ 1,
3279 /* Expand an instrumented call EXP to the mempcpy builtin.
3280 Return NULL_RTX if we failed, the caller should emit a normal call,
3281 otherwise try to get the result in TARGET, if convenient (and in
3282 mode MODE if that's convenient). */
3285 expand_builtin_mempcpy_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3287 if (!validate_arglist (exp
,
3288 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3289 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3290 INTEGER_TYPE
, VOID_TYPE
))
3294 tree dest
= CALL_EXPR_ARG (exp
, 0);
3295 tree src
= CALL_EXPR_ARG (exp
, 2);
3296 tree len
= CALL_EXPR_ARG (exp
, 4);
3297 rtx res
= expand_builtin_mempcpy_args (dest
, src
, len
, target
,
3300 /* Return src bounds with the result. */
3303 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3304 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3305 res
= chkp_join_splitted_slot (res
, bnd
);
3311 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3312 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3313 so that this can also be called without constructing an actual CALL_EXPR.
3314 The other arguments and return value are the same as for
3315 expand_builtin_mempcpy. */
3318 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3319 rtx target
, machine_mode mode
, int endp
,
3322 tree fndecl
= get_callee_fndecl (orig_exp
);
3324 /* If return value is ignored, transform mempcpy into memcpy. */
3325 if (target
== const0_rtx
3326 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3327 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
))
3329 tree fn
= builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
);
3330 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3332 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3334 else if (target
== const0_rtx
3335 && builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3337 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3338 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3340 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3344 const char *src_str
;
3345 unsigned int src_align
= get_pointer_alignment (src
);
3346 unsigned int dest_align
= get_pointer_alignment (dest
);
3347 rtx dest_mem
, src_mem
, len_rtx
;
3349 /* If either SRC or DEST is not a pointer type, don't do this
3350 operation in-line. */
3351 if (dest_align
== 0 || src_align
== 0)
3354 /* If LEN is not constant, call the normal function. */
3355 if (! tree_fits_uhwi_p (len
))
3358 len_rtx
= expand_normal (len
);
3359 src_str
= c_getstr (src
);
3361 /* If SRC is a string constant and block move would be done
3362 by pieces, we can avoid loading the string from memory
3363 and only stored the computed constants. */
3365 && CONST_INT_P (len_rtx
)
3366 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3367 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3368 CONST_CAST (char *, src_str
),
3371 dest_mem
= get_memory_rtx (dest
, len
);
3372 set_mem_align (dest_mem
, dest_align
);
3373 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3374 builtin_memcpy_read_str
,
3375 CONST_CAST (char *, src_str
),
3376 dest_align
, false, endp
);
3377 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3378 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3382 if (CONST_INT_P (len_rtx
)
3383 && can_move_by_pieces (INTVAL (len_rtx
),
3384 MIN (dest_align
, src_align
)))
3386 dest_mem
= get_memory_rtx (dest
, len
);
3387 set_mem_align (dest_mem
, dest_align
);
3388 src_mem
= get_memory_rtx (src
, len
);
3389 set_mem_align (src_mem
, src_align
);
3390 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3391 MIN (dest_align
, src_align
), endp
);
3392 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3393 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3401 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3402 we failed, the caller should emit a normal call, otherwise try to
3403 get the result in TARGET, if convenient. If ENDP is 0 return the
3404 destination pointer, if ENDP is 1 return the end pointer ala
3405 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3409 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3411 struct expand_operand ops
[3];
3415 if (!targetm
.have_movstr ())
3418 dest_mem
= get_memory_rtx (dest
, NULL
);
3419 src_mem
= get_memory_rtx (src
, NULL
);
3422 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3423 dest_mem
= replace_equiv_address (dest_mem
, target
);
3426 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3427 create_fixed_operand (&ops
[1], dest_mem
);
3428 create_fixed_operand (&ops
[2], src_mem
);
3429 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
3432 if (endp
&& target
!= const0_rtx
)
3434 target
= ops
[0].value
;
3435 /* movstr is supposed to set end to the address of the NUL
3436 terminator. If the caller requested a mempcpy-like return value,
3440 rtx tem
= plus_constant (GET_MODE (target
),
3441 gen_lowpart (GET_MODE (target
), target
), 1);
3442 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3448 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3449 NULL_RTX if we failed the caller should emit a normal call, otherwise
3450 try to get the result in TARGET, if convenient (and in mode MODE if that's
3454 expand_builtin_strcpy (tree exp
, rtx target
)
3456 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3458 tree dest
= CALL_EXPR_ARG (exp
, 0);
3459 tree src
= CALL_EXPR_ARG (exp
, 1);
3460 return expand_builtin_strcpy_args (dest
, src
, target
);
3465 /* Helper function to do the actual work for expand_builtin_strcpy. The
3466 arguments to the builtin_strcpy call DEST and SRC are broken out
3467 so that this can also be called without constructing an actual CALL_EXPR.
3468 The other arguments and return value are the same as for
3469 expand_builtin_strcpy. */
3472 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3474 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3477 /* Expand a call EXP to the stpcpy builtin.
3478 Return NULL_RTX if we failed the caller should emit a normal call,
3479 otherwise try to get the result in TARGET, if convenient (and in
3480 mode MODE if that's convenient). */
3483 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3486 location_t loc
= EXPR_LOCATION (exp
);
3488 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3491 dst
= CALL_EXPR_ARG (exp
, 0);
3492 src
= CALL_EXPR_ARG (exp
, 1);
3494 /* If return value is ignored, transform stpcpy into strcpy. */
3495 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3497 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3498 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3499 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3506 /* Ensure we get an actual string whose length can be evaluated at
3507 compile-time, not an expression containing a string. This is
3508 because the latter will potentially produce pessimized code
3509 when used to produce the return value. */
3510 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3511 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3513 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3514 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3515 target
, mode
, /*endp=*/2,
3521 if (TREE_CODE (len
) == INTEGER_CST
)
3523 rtx len_rtx
= expand_normal (len
);
3525 if (CONST_INT_P (len_rtx
))
3527 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3533 if (mode
!= VOIDmode
)
3534 target
= gen_reg_rtx (mode
);
3536 target
= gen_reg_rtx (GET_MODE (ret
));
3538 if (GET_MODE (target
) != GET_MODE (ret
))
3539 ret
= gen_lowpart (GET_MODE (target
), ret
);
3541 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3542 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3550 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3554 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3555 bytes from constant string DATA + OFFSET and return it as target
3559 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3562 const char *str
= (const char *) data
;
3564 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3567 return c_readstr (str
+ offset
, mode
);
3570 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3571 NULL_RTX if we failed the caller should emit a normal call. */
3574 expand_builtin_strncpy (tree exp
, rtx target
)
3576 location_t loc
= EXPR_LOCATION (exp
);
3578 if (validate_arglist (exp
,
3579 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3581 tree dest
= CALL_EXPR_ARG (exp
, 0);
3582 tree src
= CALL_EXPR_ARG (exp
, 1);
3583 tree len
= CALL_EXPR_ARG (exp
, 2);
3584 tree slen
= c_strlen (src
, 1);
3586 /* We must be passed a constant len and src parameter. */
3587 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3590 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3592 /* We're required to pad with trailing zeros if the requested
3593 len is greater than strlen(s2)+1. In that case try to
3594 use store_by_pieces, if it fails, punt. */
3595 if (tree_int_cst_lt (slen
, len
))
3597 unsigned int dest_align
= get_pointer_alignment (dest
);
3598 const char *p
= c_getstr (src
);
3601 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3602 || !can_store_by_pieces (tree_to_uhwi (len
),
3603 builtin_strncpy_read_str
,
3604 CONST_CAST (char *, p
),
3608 dest_mem
= get_memory_rtx (dest
, len
);
3609 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3610 builtin_strncpy_read_str
,
3611 CONST_CAST (char *, p
), dest_align
, false, 0);
3612 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3613 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3620 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3621 bytes from constant string DATA + OFFSET and return it as target
3625 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3628 const char *c
= (const char *) data
;
3629 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3631 memset (p
, *c
, GET_MODE_SIZE (mode
));
3633 return c_readstr (p
, mode
);
3636 /* Callback routine for store_by_pieces. Return the RTL of a register
3637 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3638 char value given in the RTL register data. For example, if mode is
3639 4 bytes wide, return the RTL for 0x01010101*data. */
3642 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3649 size
= GET_MODE_SIZE (mode
);
3653 p
= XALLOCAVEC (char, size
);
3654 memset (p
, 1, size
);
3655 coeff
= c_readstr (p
, mode
);
3657 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3658 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3659 return force_reg (mode
, target
);
3662 /* Expand expression EXP, which is a call to the memset builtin. Return
3663 NULL_RTX if we failed the caller should emit a normal call, otherwise
3664 try to get the result in TARGET, if convenient (and in mode MODE if that's
3668 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
3670 if (!validate_arglist (exp
,
3671 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3675 tree dest
= CALL_EXPR_ARG (exp
, 0);
3676 tree val
= CALL_EXPR_ARG (exp
, 1);
3677 tree len
= CALL_EXPR_ARG (exp
, 2);
3678 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3682 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3683 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3684 try to get the result in TARGET, if convenient (and in mode MODE if that's
3688 expand_builtin_memset_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3690 if (!validate_arglist (exp
,
3691 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3692 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3696 tree dest
= CALL_EXPR_ARG (exp
, 0);
3697 tree val
= CALL_EXPR_ARG (exp
, 2);
3698 tree len
= CALL_EXPR_ARG (exp
, 3);
3699 rtx res
= expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3701 /* Return src bounds with the result. */
3704 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3705 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3706 res
= chkp_join_splitted_slot (res
, bnd
);
3712 /* Helper function to do the actual work for expand_builtin_memset. The
3713 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3714 so that this can also be called without constructing an actual CALL_EXPR.
3715 The other arguments and return value are the same as for
3716 expand_builtin_memset. */
3719 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3720 rtx target
, machine_mode mode
, tree orig_exp
)
3723 enum built_in_function fcode
;
3724 machine_mode val_mode
;
3726 unsigned int dest_align
;
3727 rtx dest_mem
, dest_addr
, len_rtx
;
3728 HOST_WIDE_INT expected_size
= -1;
3729 unsigned int expected_align
= 0;
3730 unsigned HOST_WIDE_INT min_size
;
3731 unsigned HOST_WIDE_INT max_size
;
3732 unsigned HOST_WIDE_INT probable_max_size
;
3734 dest_align
= get_pointer_alignment (dest
);
3736 /* If DEST is not a pointer type, don't do this operation in-line. */
3737 if (dest_align
== 0)
3740 if (currently_expanding_gimple_stmt
)
3741 stringop_block_profile (currently_expanding_gimple_stmt
,
3742 &expected_align
, &expected_size
);
3744 if (expected_align
< dest_align
)
3745 expected_align
= dest_align
;
3747 /* If the LEN parameter is zero, return DEST. */
3748 if (integer_zerop (len
))
3750 /* Evaluate and ignore VAL in case it has side-effects. */
3751 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3752 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3755 /* Stabilize the arguments in case we fail. */
3756 dest
= builtin_save_expr (dest
);
3757 val
= builtin_save_expr (val
);
3758 len
= builtin_save_expr (len
);
3760 len_rtx
= expand_normal (len
);
3761 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3762 &probable_max_size
);
3763 dest_mem
= get_memory_rtx (dest
, len
);
3764 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3766 if (TREE_CODE (val
) != INTEGER_CST
)
3770 val_rtx
= expand_normal (val
);
3771 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3773 /* Assume that we can memset by pieces if we can store
3774 * the coefficients by pieces (in the required modes).
3775 * We can't pass builtin_memset_gen_str as that emits RTL. */
3777 if (tree_fits_uhwi_p (len
)
3778 && can_store_by_pieces (tree_to_uhwi (len
),
3779 builtin_memset_read_str
, &c
, dest_align
,
3782 val_rtx
= force_reg (val_mode
, val_rtx
);
3783 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3784 builtin_memset_gen_str
, val_rtx
, dest_align
,
3787 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3788 dest_align
, expected_align
,
3789 expected_size
, min_size
, max_size
,
3793 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3794 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3798 if (target_char_cast (val
, &c
))
3803 if (tree_fits_uhwi_p (len
)
3804 && can_store_by_pieces (tree_to_uhwi (len
),
3805 builtin_memset_read_str
, &c
, dest_align
,
3807 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3808 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3809 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3810 gen_int_mode (c
, val_mode
),
3811 dest_align
, expected_align
,
3812 expected_size
, min_size
, max_size
,
3816 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3817 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3821 set_mem_align (dest_mem
, dest_align
);
3822 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3823 CALL_EXPR_TAILCALL (orig_exp
)
3824 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3825 expected_align
, expected_size
,
3831 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3832 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3838 fndecl
= get_callee_fndecl (orig_exp
);
3839 fcode
= DECL_FUNCTION_CODE (fndecl
);
3840 if (fcode
== BUILT_IN_MEMSET
3841 || fcode
== BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
)
3842 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3844 else if (fcode
== BUILT_IN_BZERO
)
3845 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3849 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3850 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3851 return expand_call (fn
, target
, target
== const0_rtx
);
3854 /* Expand expression EXP, which is a call to the bzero builtin. Return
3855 NULL_RTX if we failed the caller should emit a normal call. */
3858 expand_builtin_bzero (tree exp
)
3861 location_t loc
= EXPR_LOCATION (exp
);
3863 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3866 dest
= CALL_EXPR_ARG (exp
, 0);
3867 size
= CALL_EXPR_ARG (exp
, 1);
3869 /* New argument list transforming bzero(ptr x, int y) to
3870 memset(ptr x, int 0, size_t y). This is done this way
3871 so that if it isn't expanded inline, we fallback to
3872 calling bzero instead of memset. */
3874 return expand_builtin_memset_args (dest
, integer_zero_node
,
3875 fold_convert_loc (loc
,
3876 size_type_node
, size
),
3877 const0_rtx
, VOIDmode
, exp
);
3880 /* Try to expand cmpstr operation ICODE with the given operands.
3881 Return the result rtx on success, otherwise return null. */
3884 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
3885 HOST_WIDE_INT align
)
3887 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
3889 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
3892 struct expand_operand ops
[4];
3893 create_output_operand (&ops
[0], target
, insn_mode
);
3894 create_fixed_operand (&ops
[1], arg1_rtx
);
3895 create_fixed_operand (&ops
[2], arg2_rtx
);
3896 create_integer_operand (&ops
[3], align
);
3897 if (maybe_expand_insn (icode
, 4, ops
))
3898 return ops
[0].value
;
3902 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3903 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3904 otherwise return null. */
3907 expand_cmpstrn_or_cmpmem (insn_code icode
, rtx target
, rtx arg1_rtx
,
3908 rtx arg2_rtx
, tree arg3_type
, rtx arg3_rtx
,
3909 HOST_WIDE_INT align
)
3911 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
3913 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
3916 struct expand_operand ops
[5];
3917 create_output_operand (&ops
[0], target
, insn_mode
);
3918 create_fixed_operand (&ops
[1], arg1_rtx
);
3919 create_fixed_operand (&ops
[2], arg2_rtx
);
3920 create_convert_operand_from (&ops
[3], arg3_rtx
, TYPE_MODE (arg3_type
),
3921 TYPE_UNSIGNED (arg3_type
));
3922 create_integer_operand (&ops
[4], align
);
3923 if (maybe_expand_insn (icode
, 5, ops
))
3924 return ops
[0].value
;
3928 /* Expand expression EXP, which is a call to the memcmp built-in function.
3929 Return NULL_RTX if we failed and the caller should emit a normal call,
3930 otherwise try to get the result in TARGET, if convenient. */
3933 expand_builtin_memcmp (tree exp
, rtx target
)
3935 if (!validate_arglist (exp
,
3936 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3939 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3940 implementing memcmp because it will stop if it encounters two
3942 insn_code icode
= direct_optab_handler (cmpmem_optab
, SImode
);
3943 if (icode
== CODE_FOR_nothing
)
3946 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3947 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3948 tree len
= CALL_EXPR_ARG (exp
, 2);
3950 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3951 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3953 /* If we don't have POINTER_TYPE, call the function. */
3954 if (arg1_align
== 0 || arg2_align
== 0)
3957 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
3958 location_t loc
= EXPR_LOCATION (exp
);
3959 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
3960 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
3961 rtx arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3963 /* Set MEM_SIZE as appropriate. */
3964 if (CONST_INT_P (arg3_rtx
))
3966 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
3967 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
3970 rtx result
= expand_cmpstrn_or_cmpmem (icode
, target
, arg1_rtx
, arg2_rtx
,
3971 TREE_TYPE (len
), arg3_rtx
,
3972 MIN (arg1_align
, arg2_align
));
3975 /* Return the value in the proper mode for this function. */
3976 if (GET_MODE (result
) == mode
)
3981 convert_move (target
, result
, 0);
3985 return convert_to_mode (mode
, result
, 0);
3990 && REG_P (result
) && GET_MODE (result
) == mode
3991 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3992 result
= gen_reg_rtx (mode
);
3994 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
3995 TYPE_MODE (integer_type_node
), 3,
3996 XEXP (arg1_rtx
, 0), Pmode
,
3997 XEXP (arg2_rtx
, 0), Pmode
,
3998 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
3999 TYPE_UNSIGNED (sizetype
)),
4000 TYPE_MODE (sizetype
));
4004 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4005 if we failed the caller should emit a normal call, otherwise try to get
4006 the result in TARGET, if convenient. */
4009 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4011 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4014 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
4015 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4016 if (cmpstr_icode
!= CODE_FOR_nothing
|| cmpstrn_icode
!= CODE_FOR_nothing
)
4018 rtx arg1_rtx
, arg2_rtx
;
4020 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4021 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4022 rtx result
= NULL_RTX
;
4024 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4025 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4027 /* If we don't have POINTER_TYPE, call the function. */
4028 if (arg1_align
== 0 || arg2_align
== 0)
4031 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4032 arg1
= builtin_save_expr (arg1
);
4033 arg2
= builtin_save_expr (arg2
);
4035 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4036 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4038 /* Try to call cmpstrsi. */
4039 if (cmpstr_icode
!= CODE_FOR_nothing
)
4040 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
4041 MIN (arg1_align
, arg2_align
));
4043 /* Try to determine at least one length and call cmpstrnsi. */
4044 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
4049 tree len1
= c_strlen (arg1
, 1);
4050 tree len2
= c_strlen (arg2
, 1);
4053 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4055 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4057 /* If we don't have a constant length for the first, use the length
4058 of the second, if we know it. We don't require a constant for
4059 this case; some cost analysis could be done if both are available
4060 but neither is constant. For now, assume they're equally cheap,
4061 unless one has side effects. If both strings have constant lengths,
4068 else if (TREE_SIDE_EFFECTS (len1
))
4070 else if (TREE_SIDE_EFFECTS (len2
))
4072 else if (TREE_CODE (len1
) != INTEGER_CST
)
4074 else if (TREE_CODE (len2
) != INTEGER_CST
)
4076 else if (tree_int_cst_lt (len1
, len2
))
4081 /* If both arguments have side effects, we cannot optimize. */
4082 if (len
&& !TREE_SIDE_EFFECTS (len
))
4084 arg3_rtx
= expand_normal (len
);
4085 result
= expand_cmpstrn_or_cmpmem
4086 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
4087 arg3_rtx
, MIN (arg1_align
, arg2_align
));
4093 /* Return the value in the proper mode for this function. */
4094 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4095 if (GET_MODE (result
) == mode
)
4098 return convert_to_mode (mode
, result
, 0);
4099 convert_move (target
, result
, 0);
4103 /* Expand the library call ourselves using a stabilized argument
4104 list to avoid re-evaluating the function's arguments twice. */
4105 fndecl
= get_callee_fndecl (exp
);
4106 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4107 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4108 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4109 return expand_call (fn
, target
, target
== const0_rtx
);
4114 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4115 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4116 the result in TARGET, if convenient. */
4119 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4120 ATTRIBUTE_UNUSED machine_mode mode
)
4122 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4124 if (!validate_arglist (exp
,
4125 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4128 /* If c_strlen can determine an expression for one of the string
4129 lengths, and it doesn't have side effects, then emit cmpstrnsi
4130 using length MIN(strlen(string)+1, arg3). */
4131 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4132 if (cmpstrn_icode
!= CODE_FOR_nothing
)
4134 tree len
, len1
, len2
;
4135 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4138 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4139 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4140 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4142 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4143 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4145 len1
= c_strlen (arg1
, 1);
4146 len2
= c_strlen (arg2
, 1);
4149 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4151 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4153 /* If we don't have a constant length for the first, use the length
4154 of the second, if we know it. We don't require a constant for
4155 this case; some cost analysis could be done if both are available
4156 but neither is constant. For now, assume they're equally cheap,
4157 unless one has side effects. If both strings have constant lengths,
4164 else if (TREE_SIDE_EFFECTS (len1
))
4166 else if (TREE_SIDE_EFFECTS (len2
))
4168 else if (TREE_CODE (len1
) != INTEGER_CST
)
4170 else if (TREE_CODE (len2
) != INTEGER_CST
)
4172 else if (tree_int_cst_lt (len1
, len2
))
4177 /* If both arguments have side effects, we cannot optimize. */
4178 if (!len
|| TREE_SIDE_EFFECTS (len
))
4181 /* The actual new length parameter is MIN(len,arg3). */
4182 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4183 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4185 /* If we don't have POINTER_TYPE, call the function. */
4186 if (arg1_align
== 0 || arg2_align
== 0)
4189 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4190 arg1
= builtin_save_expr (arg1
);
4191 arg2
= builtin_save_expr (arg2
);
4192 len
= builtin_save_expr (len
);
4194 arg1_rtx
= get_memory_rtx (arg1
, len
);
4195 arg2_rtx
= get_memory_rtx (arg2
, len
);
4196 arg3_rtx
= expand_normal (len
);
4197 result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
4198 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
4199 MIN (arg1_align
, arg2_align
));
4202 /* Return the value in the proper mode for this function. */
4203 mode
= TYPE_MODE (TREE_TYPE (exp
));
4204 if (GET_MODE (result
) == mode
)
4207 return convert_to_mode (mode
, result
, 0);
4208 convert_move (target
, result
, 0);
4212 /* Expand the library call ourselves using a stabilized argument
4213 list to avoid re-evaluating the function's arguments twice. */
4214 fndecl
= get_callee_fndecl (exp
);
4215 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4217 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4218 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4219 return expand_call (fn
, target
, target
== const0_rtx
);
4224 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4225 if that's convenient. */
4228 expand_builtin_saveregs (void)
4233 /* Don't do __builtin_saveregs more than once in a function.
4234 Save the result of the first call and reuse it. */
4235 if (saveregs_value
!= 0)
4236 return saveregs_value
;
4238 /* When this function is called, it means that registers must be
4239 saved on entry to this function. So we migrate the call to the
4240 first insn of this function. */
4244 /* Do whatever the machine needs done in this case. */
4245 val
= targetm
.calls
.expand_builtin_saveregs ();
4250 saveregs_value
= val
;
4252 /* Put the insns after the NOTE that starts the function. If this
4253 is inside a start_sequence, make the outer-level insn chain current, so
4254 the code is placed at the start of the function. */
4255 push_topmost_sequence ();
4256 emit_insn_after (seq
, entry_of_function ());
4257 pop_topmost_sequence ();
4262 /* Expand a call to __builtin_next_arg. */
4265 expand_builtin_next_arg (void)
4267 /* Checking arguments is already done in fold_builtin_next_arg
4268 that must be called before this function. */
4269 return expand_binop (ptr_mode
, add_optab
,
4270 crtl
->args
.internal_arg_pointer
,
4271 crtl
->args
.arg_offset_rtx
,
4272 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4275 /* Make it easier for the backends by protecting the valist argument
4276 from multiple evaluations. */
4279 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4281 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4283 /* The current way of determining the type of valist is completely
4284 bogus. We should have the information on the va builtin instead. */
4286 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4288 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4290 if (TREE_SIDE_EFFECTS (valist
))
4291 valist
= save_expr (valist
);
4293 /* For this case, the backends will be expecting a pointer to
4294 vatype, but it's possible we've actually been given an array
4295 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4297 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4299 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4300 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4305 tree pt
= build_pointer_type (vatype
);
4309 if (! TREE_SIDE_EFFECTS (valist
))
4312 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4313 TREE_SIDE_EFFECTS (valist
) = 1;
4316 if (TREE_SIDE_EFFECTS (valist
))
4317 valist
= save_expr (valist
);
4318 valist
= fold_build2_loc (loc
, MEM_REF
,
4319 vatype
, valist
, build_int_cst (pt
, 0));
4325 /* The "standard" definition of va_list is void*. */
4328 std_build_builtin_va_list (void)
4330 return ptr_type_node
;
4333 /* The "standard" abi va_list is va_list_type_node. */
4336 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4338 return va_list_type_node
;
4341 /* The "standard" type of va_list is va_list_type_node. */
4344 std_canonical_va_list_type (tree type
)
4348 if (INDIRECT_REF_P (type
))
4349 type
= TREE_TYPE (type
);
4350 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (type
)))
4351 type
= TREE_TYPE (type
);
4352 wtype
= va_list_type_node
;
4354 /* Treat structure va_list types. */
4355 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4356 htype
= TREE_TYPE (htype
);
4357 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4359 /* If va_list is an array type, the argument may have decayed
4360 to a pointer type, e.g. by being passed to another function.
4361 In that case, unwrap both types so that we can compare the
4362 underlying records. */
4363 if (TREE_CODE (htype
) == ARRAY_TYPE
4364 || POINTER_TYPE_P (htype
))
4366 wtype
= TREE_TYPE (wtype
);
4367 htype
= TREE_TYPE (htype
);
4370 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4371 return va_list_type_node
;
4376 /* The "standard" implementation of va_start: just assign `nextarg' to
4380 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4382 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4383 convert_move (va_r
, nextarg
, 0);
4385 /* We do not have any valid bounds for the pointer, so
4386 just store zero bounds for it. */
4387 if (chkp_function_instrumented_p (current_function_decl
))
4388 chkp_expand_bounds_reset_for_mem (valist
,
4389 make_tree (TREE_TYPE (valist
),
4393 /* Expand EXP, a call to __builtin_va_start. */
4396 expand_builtin_va_start (tree exp
)
4400 location_t loc
= EXPR_LOCATION (exp
);
4402 if (call_expr_nargs (exp
) < 2)
4404 error_at (loc
, "too few arguments to function %<va_start%>");
4408 if (fold_builtin_next_arg (exp
, true))
4411 nextarg
= expand_builtin_next_arg ();
4412 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4414 if (targetm
.expand_builtin_va_start
)
4415 targetm
.expand_builtin_va_start (valist
, nextarg
);
4417 std_expand_builtin_va_start (valist
, nextarg
);
4422 /* Expand EXP, a call to __builtin_va_end. */
4425 expand_builtin_va_end (tree exp
)
4427 tree valist
= CALL_EXPR_ARG (exp
, 0);
4429 /* Evaluate for side effects, if needed. I hate macros that don't
4431 if (TREE_SIDE_EFFECTS (valist
))
4432 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4437 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4438 builtin rather than just as an assignment in stdarg.h because of the
4439 nastiness of array-type va_list types. */
4442 expand_builtin_va_copy (tree exp
)
4445 location_t loc
= EXPR_LOCATION (exp
);
4447 dst
= CALL_EXPR_ARG (exp
, 0);
4448 src
= CALL_EXPR_ARG (exp
, 1);
4450 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4451 src
= stabilize_va_list_loc (loc
, src
, 0);
4453 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4455 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4457 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4458 TREE_SIDE_EFFECTS (t
) = 1;
4459 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4463 rtx dstb
, srcb
, size
;
4465 /* Evaluate to pointers. */
4466 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4467 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4468 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4469 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4471 dstb
= convert_memory_address (Pmode
, dstb
);
4472 srcb
= convert_memory_address (Pmode
, srcb
);
4474 /* "Dereference" to BLKmode memories. */
4475 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4476 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4477 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4478 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4479 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4480 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4483 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4489 /* Expand a call to one of the builtin functions __builtin_frame_address or
4490 __builtin_return_address. */
4493 expand_builtin_frame_address (tree fndecl
, tree exp
)
4495 /* The argument must be a nonnegative integer constant.
4496 It counts the number of frames to scan up the stack.
4497 The value is either the frame pointer value or the return
4498 address saved in that frame. */
4499 if (call_expr_nargs (exp
) == 0)
4500 /* Warning about missing arg was already issued. */
4502 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4504 error ("invalid argument to %qD", fndecl
);
4509 /* Number of frames to scan up the stack. */
4510 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
4512 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
4514 /* Some ports cannot access arbitrary stack frames. */
4517 warning (0, "unsupported argument to %qD", fndecl
);
4523 /* Warn since no effort is made to ensure that any frame
4524 beyond the current one exists or can be safely reached. */
4525 warning (OPT_Wframe_address
, "calling %qD with "
4526 "a nonzero argument is unsafe", fndecl
);
4529 /* For __builtin_frame_address, return what we've got. */
4530 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4534 && ! CONSTANT_P (tem
))
4535 tem
= copy_addr_to_reg (tem
);
4540 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4541 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4542 is the same as for allocate_dynamic_stack_space. */
4545 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4551 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4552 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4555 = (alloca_with_align
4556 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4557 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4562 /* Compute the argument. */
4563 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4565 /* Compute the alignment. */
4566 align
= (alloca_with_align
4567 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4568 : BIGGEST_ALIGNMENT
);
4570 /* Allocate the desired space. */
4571 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4572 result
= convert_memory_address (ptr_mode
, result
);
4577 /* Expand a call to bswap builtin in EXP.
4578 Return NULL_RTX if a normal call should be emitted rather than expanding the
4579 function in-line. If convenient, the result should be placed in TARGET.
4580 SUBTARGET may be used as the target for computing one of EXP's operands. */
4583 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
4589 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4592 arg
= CALL_EXPR_ARG (exp
, 0);
4593 op0
= expand_expr (arg
,
4594 subtarget
&& GET_MODE (subtarget
) == target_mode
4595 ? subtarget
: NULL_RTX
,
4596 target_mode
, EXPAND_NORMAL
);
4597 if (GET_MODE (op0
) != target_mode
)
4598 op0
= convert_to_mode (target_mode
, op0
, 1);
4600 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4602 gcc_assert (target
);
4604 return convert_to_mode (target_mode
, target
, 1);
4607 /* Expand a call to a unary builtin in EXP.
4608 Return NULL_RTX if a normal call should be emitted rather than expanding the
4609 function in-line. If convenient, the result should be placed in TARGET.
4610 SUBTARGET may be used as the target for computing one of EXP's operands. */
4613 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
4614 rtx subtarget
, optab op_optab
)
4618 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4621 /* Compute the argument. */
4622 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4624 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4625 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4626 VOIDmode
, EXPAND_NORMAL
);
4627 /* Compute op, into TARGET if possible.
4628 Set TARGET to wherever the result comes back. */
4629 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4630 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4631 gcc_assert (target
);
4633 return convert_to_mode (target_mode
, target
, 0);
4636 /* Expand a call to __builtin_expect. We just return our argument
4637 as the builtin_expect semantic should've been already executed by
4638 tree branch prediction pass. */
4641 expand_builtin_expect (tree exp
, rtx target
)
4645 if (call_expr_nargs (exp
) < 2)
4647 arg
= CALL_EXPR_ARG (exp
, 0);
4649 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4650 /* When guessing was done, the hints should be already stripped away. */
4651 gcc_assert (!flag_guess_branch_prob
4652 || optimize
== 0 || seen_error ());
4656 /* Expand a call to __builtin_assume_aligned. We just return our first
4657 argument as the builtin_assume_aligned semantic should've been already
4661 expand_builtin_assume_aligned (tree exp
, rtx target
)
4663 if (call_expr_nargs (exp
) < 2)
4665 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4667 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4668 && (call_expr_nargs (exp
) < 3
4669 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4674 expand_builtin_trap (void)
4676 if (targetm
.have_trap ())
4678 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
4679 /* For trap insns when not accumulating outgoing args force
4680 REG_ARGS_SIZE note to prevent crossjumping of calls with
4681 different args sizes. */
4682 if (!ACCUMULATE_OUTGOING_ARGS
)
4683 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4686 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4690 /* Expand a call to __builtin_unreachable. We do nothing except emit
4691 a barrier saying that control flow will not pass here.
4693 It is the responsibility of the program being compiled to ensure
4694 that control flow does never reach __builtin_unreachable. */
4696 expand_builtin_unreachable (void)
4701 /* Expand EXP, a call to fabs, fabsf or fabsl.
4702 Return NULL_RTX if a normal call should be emitted rather than expanding
4703 the function inline. If convenient, the result should be placed
4704 in TARGET. SUBTARGET may be used as the target for computing
4708 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4714 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4717 arg
= CALL_EXPR_ARG (exp
, 0);
4718 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4719 mode
= TYPE_MODE (TREE_TYPE (arg
));
4720 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4721 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4724 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4725 Return NULL is a normal call should be emitted rather than expanding the
4726 function inline. If convenient, the result should be placed in TARGET.
4727 SUBTARGET may be used as the target for computing the operand. */
4730 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4735 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4738 arg
= CALL_EXPR_ARG (exp
, 0);
4739 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4741 arg
= CALL_EXPR_ARG (exp
, 1);
4742 op1
= expand_normal (arg
);
4744 return expand_copysign (op0
, op1
, target
);
4747 /* Expand a call to __builtin___clear_cache. */
4750 expand_builtin___clear_cache (tree exp
)
4752 if (!targetm
.code_for_clear_cache
)
4754 #ifdef CLEAR_INSN_CACHE
4755 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4756 does something. Just do the default expansion to a call to
4760 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4761 does nothing. There is no need to call it. Do nothing. */
4763 #endif /* CLEAR_INSN_CACHE */
4766 /* We have a "clear_cache" insn, and it will handle everything. */
4768 rtx begin_rtx
, end_rtx
;
4770 /* We must not expand to a library call. If we did, any
4771 fallback library function in libgcc that might contain a call to
4772 __builtin___clear_cache() would recurse infinitely. */
4773 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4775 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4779 if (targetm
.have_clear_cache ())
4781 struct expand_operand ops
[2];
4783 begin
= CALL_EXPR_ARG (exp
, 0);
4784 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4786 end
= CALL_EXPR_ARG (exp
, 1);
4787 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4789 create_address_operand (&ops
[0], begin_rtx
);
4790 create_address_operand (&ops
[1], end_rtx
);
4791 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
4797 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4800 round_trampoline_addr (rtx tramp
)
4802 rtx temp
, addend
, mask
;
4804 /* If we don't need too much alignment, we'll have been guaranteed
4805 proper alignment by get_trampoline_type. */
4806 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4809 /* Round address up to desired boundary. */
4810 temp
= gen_reg_rtx (Pmode
);
4811 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4812 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4814 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4815 temp
, 0, OPTAB_LIB_WIDEN
);
4816 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4817 temp
, 0, OPTAB_LIB_WIDEN
);
4823 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4825 tree t_tramp
, t_func
, t_chain
;
4826 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4828 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4829 POINTER_TYPE
, VOID_TYPE
))
4832 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4833 t_func
= CALL_EXPR_ARG (exp
, 1);
4834 t_chain
= CALL_EXPR_ARG (exp
, 2);
4836 r_tramp
= expand_normal (t_tramp
);
4837 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4838 MEM_NOTRAP_P (m_tramp
) = 1;
4840 /* If ONSTACK, the TRAMP argument should be the address of a field
4841 within the local function's FRAME decl. Either way, let's see if
4842 we can fill in the MEM_ATTRs for this memory. */
4843 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4844 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4846 /* Creator of a heap trampoline is responsible for making sure the
4847 address is aligned to at least STACK_BOUNDARY. Normally malloc
4848 will ensure this anyhow. */
4849 tmp
= round_trampoline_addr (r_tramp
);
4852 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4853 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4854 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4857 /* The FUNC argument should be the address of the nested function.
4858 Extract the actual function decl to pass to the hook. */
4859 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4860 t_func
= TREE_OPERAND (t_func
, 0);
4861 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4863 r_chain
= expand_normal (t_chain
);
4865 /* Generate insns to initialize the trampoline. */
4866 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4870 trampolines_created
= 1;
4872 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4873 "trampoline generated for nested function %qD", t_func
);
4880 expand_builtin_adjust_trampoline (tree exp
)
4884 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4887 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4888 tramp
= round_trampoline_addr (tramp
);
4889 if (targetm
.calls
.trampoline_adjust_address
)
4890 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4895 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4896 function. The function first checks whether the back end provides
4897 an insn to implement signbit for the respective mode. If not, it
4898 checks whether the floating point format of the value is such that
4899 the sign bit can be extracted. If that is not the case, error out.
4900 EXP is the expression that is a call to the builtin function; if
4901 convenient, the result should be placed in TARGET. */
4903 expand_builtin_signbit (tree exp
, rtx target
)
4905 const struct real_format
*fmt
;
4906 machine_mode fmode
, imode
, rmode
;
4909 enum insn_code icode
;
4911 location_t loc
= EXPR_LOCATION (exp
);
4913 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4916 arg
= CALL_EXPR_ARG (exp
, 0);
4917 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4918 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4919 fmt
= REAL_MODE_FORMAT (fmode
);
4921 arg
= builtin_save_expr (arg
);
4923 /* Expand the argument yielding a RTX expression. */
4924 temp
= expand_normal (arg
);
4926 /* Check if the back end provides an insn that handles signbit for the
4928 icode
= optab_handler (signbit_optab
, fmode
);
4929 if (icode
!= CODE_FOR_nothing
)
4931 rtx_insn
*last
= get_last_insn ();
4932 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
4933 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
4935 delete_insns_since (last
);
4938 /* For floating point formats without a sign bit, implement signbit
4940 bitpos
= fmt
->signbit_ro
;
4943 /* But we can't do this if the format supports signed zero. */
4944 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
4946 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
4947 build_real (TREE_TYPE (arg
), dconst0
));
4948 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4951 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
4953 imode
= int_mode_for_mode (fmode
);
4954 gcc_assert (imode
!= BLKmode
);
4955 temp
= gen_lowpart (imode
, temp
);
4960 /* Handle targets with different FP word orders. */
4961 if (FLOAT_WORDS_BIG_ENDIAN
)
4962 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
4964 word
= bitpos
/ BITS_PER_WORD
;
4965 temp
= operand_subword_force (temp
, word
, fmode
);
4966 bitpos
= bitpos
% BITS_PER_WORD
;
4969 /* Force the intermediate word_mode (or narrower) result into a
4970 register. This avoids attempting to create paradoxical SUBREGs
4971 of floating point modes below. */
4972 temp
= force_reg (imode
, temp
);
4974 /* If the bitpos is within the "result mode" lowpart, the operation
4975 can be implement with a single bitwise AND. Otherwise, we need
4976 a right shift and an AND. */
4978 if (bitpos
< GET_MODE_BITSIZE (rmode
))
4980 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
4982 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
4983 temp
= gen_lowpart (rmode
, temp
);
4984 temp
= expand_binop (rmode
, and_optab
, temp
,
4985 immed_wide_int_const (mask
, rmode
),
4986 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
4990 /* Perform a logical right shift to place the signbit in the least
4991 significant bit, then truncate the result to the desired mode
4992 and mask just this bit. */
4993 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
4994 temp
= gen_lowpart (rmode
, temp
);
4995 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
4996 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5002 /* Expand fork or exec calls. TARGET is the desired target of the
5003 call. EXP is the call. FN is the
5004 identificator of the actual function. IGNORE is nonzero if the
5005 value is to be ignored. */
5008 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5013 /* If we are not profiling, just call the function. */
5014 if (!profile_arc_flag
)
5017 /* Otherwise call the wrapper. This should be equivalent for the rest of
5018 compiler, so the code does not diverge, and the wrapper may run the
5019 code necessary for keeping the profiling sane. */
5021 switch (DECL_FUNCTION_CODE (fn
))
5024 id
= get_identifier ("__gcov_fork");
5027 case BUILT_IN_EXECL
:
5028 id
= get_identifier ("__gcov_execl");
5031 case BUILT_IN_EXECV
:
5032 id
= get_identifier ("__gcov_execv");
5035 case BUILT_IN_EXECLP
:
5036 id
= get_identifier ("__gcov_execlp");
5039 case BUILT_IN_EXECLE
:
5040 id
= get_identifier ("__gcov_execle");
5043 case BUILT_IN_EXECVP
:
5044 id
= get_identifier ("__gcov_execvp");
5047 case BUILT_IN_EXECVE
:
5048 id
= get_identifier ("__gcov_execve");
5055 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5056 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5057 DECL_EXTERNAL (decl
) = 1;
5058 TREE_PUBLIC (decl
) = 1;
5059 DECL_ARTIFICIAL (decl
) = 1;
5060 TREE_NOTHROW (decl
) = 1;
5061 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5062 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5063 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5064 return expand_call (call
, target
, ignore
);
5069 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5070 the pointer in these functions is void*, the tree optimizers may remove
5071 casts. The mode computed in expand_builtin isn't reliable either, due
5072 to __sync_bool_compare_and_swap.
5074 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5075 group of builtins. This gives us log2 of the mode size. */
5077 static inline machine_mode
5078 get_builtin_sync_mode (int fcode_diff
)
5080 /* The size is not negotiable, so ask not to get BLKmode in return
5081 if the target indicates that a smaller size would be better. */
5082 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5085 /* Expand the memory expression LOC and return the appropriate memory operand
5086 for the builtin_sync operations. */
5089 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5093 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5094 addr
= convert_memory_address (Pmode
, addr
);
5096 /* Note that we explicitly do not want any alias information for this
5097 memory, so that we kill all other live memories. Otherwise we don't
5098 satisfy the full barrier semantics of the intrinsic. */
5099 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5101 /* The alignment needs to be at least according to that of the mode. */
5102 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5103 get_pointer_alignment (loc
)));
5104 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5105 MEM_VOLATILE_P (mem
) = 1;
5110 /* Make sure an argument is in the right mode.
5111 EXP is the tree argument.
5112 MODE is the mode it should be in. */
5115 expand_expr_force_mode (tree exp
, machine_mode mode
)
5118 machine_mode old_mode
;
5120 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5121 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5122 of CONST_INTs, where we know the old_mode only from the call argument. */
5124 old_mode
= GET_MODE (val
);
5125 if (old_mode
== VOIDmode
)
5126 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5127 val
= convert_modes (mode
, old_mode
, val
, 1);
5132 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5133 EXP is the CALL_EXPR. CODE is the rtx code
5134 that corresponds to the arithmetic or logical operation from the name;
5135 an exception here is that NOT actually means NAND. TARGET is an optional
5136 place for us to store the results; AFTER is true if this is the
5137 fetch_and_xxx form. */
5140 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
5141 enum rtx_code code
, bool after
,
5145 location_t loc
= EXPR_LOCATION (exp
);
5147 if (code
== NOT
&& warn_sync_nand
)
5149 tree fndecl
= get_callee_fndecl (exp
);
5150 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5152 static bool warned_f_a_n
, warned_n_a_f
;
5156 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5157 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5158 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5159 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5160 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5164 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5165 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5166 warned_f_a_n
= true;
5169 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5170 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5171 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5172 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5173 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5177 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5178 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5179 warned_n_a_f
= true;
5187 /* Expand the operands. */
5188 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5189 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5191 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
5195 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5196 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5197 true if this is the boolean form. TARGET is a place for us to store the
5198 results; this is NOT optional if IS_BOOL is true. */
5201 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
5202 bool is_bool
, rtx target
)
5204 rtx old_val
, new_val
, mem
;
5207 /* Expand the operands. */
5208 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5209 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5210 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5212 pbool
= poval
= NULL
;
5213 if (target
!= const0_rtx
)
5220 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5221 false, MEMMODEL_SYNC_SEQ_CST
,
5222 MEMMODEL_SYNC_SEQ_CST
))
5228 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5229 general form is actually an atomic exchange, and some targets only
5230 support a reduced form with the second argument being a constant 1.
5231 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5235 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5240 /* Expand the operands. */
5241 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5242 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5244 return expand_sync_lock_test_and_set (target
, mem
, val
);
5247 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5250 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5254 /* Expand the operands. */
5255 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5257 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
5260 /* Given an integer representing an ``enum memmodel'', verify its
5261 correctness and return the memory model enum. */
5263 static enum memmodel
5264 get_memmodel (tree exp
)
5267 unsigned HOST_WIDE_INT val
;
5269 /* If the parameter is not a constant, it's a run time value so we'll just
5270 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5271 if (TREE_CODE (exp
) != INTEGER_CST
)
5272 return MEMMODEL_SEQ_CST
;
5274 op
= expand_normal (exp
);
5277 if (targetm
.memmodel_check
)
5278 val
= targetm
.memmodel_check (val
);
5279 else if (val
& ~MEMMODEL_MASK
)
5281 warning (OPT_Winvalid_memory_model
,
5282 "Unknown architecture specifier in memory model to builtin.");
5283 return MEMMODEL_SEQ_CST
;
5286 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5287 if (memmodel_base (val
) >= MEMMODEL_LAST
)
5289 warning (OPT_Winvalid_memory_model
,
5290 "invalid memory model argument to builtin");
5291 return MEMMODEL_SEQ_CST
;
5294 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5295 be conservative and promote consume to acquire. */
5296 if (val
== MEMMODEL_CONSUME
)
5297 val
= MEMMODEL_ACQUIRE
;
5299 return (enum memmodel
) val
;
5302 /* Expand the __atomic_exchange intrinsic:
5303 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5304 EXP is the CALL_EXPR.
5305 TARGET is an optional place for us to store the results. */
5308 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5311 enum memmodel model
;
5313 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5315 if (!flag_inline_atomics
)
5318 /* Expand the operands. */
5319 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5320 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5322 return expand_atomic_exchange (target
, mem
, val
, model
);
5325 /* Expand the __atomic_compare_exchange intrinsic:
5326 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5327 TYPE desired, BOOL weak,
5328 enum memmodel success,
5329 enum memmodel failure)
5330 EXP is the CALL_EXPR.
5331 TARGET is an optional place for us to store the results. */
5334 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5337 rtx expect
, desired
, mem
, oldval
;
5338 rtx_code_label
*label
;
5339 enum memmodel success
, failure
;
5343 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5344 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5346 if (failure
> success
)
5348 warning (OPT_Winvalid_memory_model
,
5349 "failure memory model cannot be stronger than success memory "
5350 "model for %<__atomic_compare_exchange%>");
5351 success
= MEMMODEL_SEQ_CST
;
5354 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5356 warning (OPT_Winvalid_memory_model
,
5357 "invalid failure memory model for "
5358 "%<__atomic_compare_exchange%>");
5359 failure
= MEMMODEL_SEQ_CST
;
5360 success
= MEMMODEL_SEQ_CST
;
5364 if (!flag_inline_atomics
)
5367 /* Expand the operands. */
5368 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5370 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5371 expect
= convert_memory_address (Pmode
, expect
);
5372 expect
= gen_rtx_MEM (mode
, expect
);
5373 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5375 weak
= CALL_EXPR_ARG (exp
, 3);
5377 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5380 if (target
== const0_rtx
)
5383 /* Lest the rtl backend create a race condition with an imporoper store
5384 to memory, always create a new pseudo for OLDVAL. */
5387 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5388 is_weak
, success
, failure
))
5391 /* Conditionally store back to EXPECT, lest we create a race condition
5392 with an improper store to memory. */
5393 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5394 the normal case where EXPECT is totally private, i.e. a register. At
5395 which point the store can be unconditional. */
5396 label
= gen_label_rtx ();
5397 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
5398 GET_MODE (target
), 1, label
);
5399 emit_move_insn (expect
, oldval
);
5405 /* Expand the __atomic_load intrinsic:
5406 TYPE __atomic_load (TYPE *object, enum memmodel)
5407 EXP is the CALL_EXPR.
5408 TARGET is an optional place for us to store the results. */
5411 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
5414 enum memmodel model
;
5416 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5417 if (is_mm_release (model
) || is_mm_acq_rel (model
))
5419 warning (OPT_Winvalid_memory_model
,
5420 "invalid memory model for %<__atomic_load%>");
5421 model
= MEMMODEL_SEQ_CST
;
5424 if (!flag_inline_atomics
)
5427 /* Expand the operand. */
5428 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5430 return expand_atomic_load (target
, mem
, model
);
5434 /* Expand the __atomic_store intrinsic:
5435 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5436 EXP is the CALL_EXPR.
5437 TARGET is an optional place for us to store the results. */
5440 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
5443 enum memmodel model
;
5445 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5446 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
5447 || is_mm_release (model
)))
5449 warning (OPT_Winvalid_memory_model
,
5450 "invalid memory model for %<__atomic_store%>");
5451 model
= MEMMODEL_SEQ_CST
;
5454 if (!flag_inline_atomics
)
5457 /* Expand the operands. */
5458 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5459 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5461 return expand_atomic_store (mem
, val
, model
, false);
5464 /* Expand the __atomic_fetch_XXX intrinsic:
5465 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5466 EXP is the CALL_EXPR.
5467 TARGET is an optional place for us to store the results.
5468 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5469 FETCH_AFTER is true if returning the result of the operation.
5470 FETCH_AFTER is false if returning the value before the operation.
5471 IGNORE is true if the result is not used.
5472 EXT_CALL is the correct builtin for an external call if this cannot be
5473 resolved to an instruction sequence. */
5476 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
5477 enum rtx_code code
, bool fetch_after
,
5478 bool ignore
, enum built_in_function ext_call
)
5481 enum memmodel model
;
5485 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5487 /* Expand the operands. */
5488 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5489 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5491 /* Only try generating instructions if inlining is turned on. */
5492 if (flag_inline_atomics
)
5494 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5499 /* Return if a different routine isn't needed for the library call. */
5500 if (ext_call
== BUILT_IN_NONE
)
5503 /* Change the call to the specified function. */
5504 fndecl
= get_callee_fndecl (exp
);
5505 addr
= CALL_EXPR_FN (exp
);
5508 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5509 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5511 /* Expand the call here so we can emit trailing code. */
5512 ret
= expand_call (exp
, target
, ignore
);
5514 /* Replace the original function just in case it matters. */
5515 TREE_OPERAND (addr
, 0) = fndecl
;
5517 /* Then issue the arithmetic correction to return the right result. */
5522 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5524 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5527 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5533 /* Expand an atomic clear operation.
5534 void _atomic_clear (BOOL *obj, enum memmodel)
5535 EXP is the call expression. */
5538 expand_builtin_atomic_clear (tree exp
)
5542 enum memmodel model
;
5544 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5545 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5546 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5548 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
5550 warning (OPT_Winvalid_memory_model
,
5551 "invalid memory model for %<__atomic_store%>");
5552 model
= MEMMODEL_SEQ_CST
;
5555 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5556 Failing that, a store is issued by __atomic_store. The only way this can
5557 fail is if the bool type is larger than a word size. Unlikely, but
5558 handle it anyway for completeness. Assume a single threaded model since
5559 there is no atomic support in this case, and no barriers are required. */
5560 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5562 emit_move_insn (mem
, const0_rtx
);
5566 /* Expand an atomic test_and_set operation.
5567 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5568 EXP is the call expression. */
5571 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5574 enum memmodel model
;
5577 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5578 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5579 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5581 return expand_atomic_test_and_set (target
, mem
, model
);
5585 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5586 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5589 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5593 unsigned int mode_align
, type_align
;
5595 if (TREE_CODE (arg0
) != INTEGER_CST
)
5598 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5599 mode
= mode_for_size (size
, MODE_INT
, 0);
5600 mode_align
= GET_MODE_ALIGNMENT (mode
);
5602 if (TREE_CODE (arg1
) == INTEGER_CST
)
5604 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
5606 /* Either this argument is null, or it's a fake pointer encoding
5607 the alignment of the object. */
5609 val
*= BITS_PER_UNIT
;
5611 if (val
== 0 || mode_align
< val
)
5612 type_align
= mode_align
;
5618 tree ttype
= TREE_TYPE (arg1
);
5620 /* This function is usually invoked and folded immediately by the front
5621 end before anything else has a chance to look at it. The pointer
5622 parameter at this point is usually cast to a void *, so check for that
5623 and look past the cast. */
5624 if (CONVERT_EXPR_P (arg1
) && POINTER_TYPE_P (ttype
)
5625 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5626 arg1
= TREE_OPERAND (arg1
, 0);
5628 ttype
= TREE_TYPE (arg1
);
5629 gcc_assert (POINTER_TYPE_P (ttype
));
5631 /* Get the underlying type of the object. */
5632 ttype
= TREE_TYPE (ttype
);
5633 type_align
= TYPE_ALIGN (ttype
);
5636 /* If the object has smaller alignment, the lock free routines cannot
5638 if (type_align
< mode_align
)
5639 return boolean_false_node
;
5641 /* Check if a compare_and_swap pattern exists for the mode which represents
5642 the required size. The pattern is not allowed to fail, so the existence
5643 of the pattern indicates support is present. */
5644 if (can_compare_and_swap_p (mode
, true))
5645 return boolean_true_node
;
5647 return boolean_false_node
;
5650 /* Return true if the parameters to call EXP represent an object which will
5651 always generate lock free instructions. The first argument represents the
5652 size of the object, and the second parameter is a pointer to the object
5653 itself. If NULL is passed for the object, then the result is based on
5654 typical alignment for an object of the specified size. Otherwise return
5658 expand_builtin_atomic_always_lock_free (tree exp
)
5661 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5662 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5664 if (TREE_CODE (arg0
) != INTEGER_CST
)
5666 error ("non-constant argument 1 to __atomic_always_lock_free");
5670 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5671 if (size
== boolean_true_node
)
5676 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5677 is lock free on this architecture. */
5680 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5682 if (!flag_inline_atomics
)
5685 /* If it isn't always lock free, don't generate a result. */
5686 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5687 return boolean_true_node
;
5692 /* Return true if the parameters to call EXP represent an object which will
5693 always generate lock free instructions. The first argument represents the
5694 size of the object, and the second parameter is a pointer to the object
5695 itself. If NULL is passed for the object, then the result is based on
5696 typical alignment for an object of the specified size. Otherwise return
5700 expand_builtin_atomic_is_lock_free (tree exp
)
5703 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5704 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5706 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5708 error ("non-integer argument 1 to __atomic_is_lock_free");
5712 if (!flag_inline_atomics
)
5715 /* If the value is known at compile time, return the RTX for it. */
5716 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5717 if (size
== boolean_true_node
)
5723 /* Expand the __atomic_thread_fence intrinsic:
5724 void __atomic_thread_fence (enum memmodel)
5725 EXP is the CALL_EXPR. */
5728 expand_builtin_atomic_thread_fence (tree exp
)
5730 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5731 expand_mem_thread_fence (model
);
5734 /* Expand the __atomic_signal_fence intrinsic:
5735 void __atomic_signal_fence (enum memmodel)
5736 EXP is the CALL_EXPR. */
5739 expand_builtin_atomic_signal_fence (tree exp
)
5741 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5742 expand_mem_signal_fence (model
);
5745 /* Expand the __sync_synchronize intrinsic. */
5748 expand_builtin_sync_synchronize (void)
5750 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
5754 expand_builtin_thread_pointer (tree exp
, rtx target
)
5756 enum insn_code icode
;
5757 if (!validate_arglist (exp
, VOID_TYPE
))
5759 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5760 if (icode
!= CODE_FOR_nothing
)
5762 struct expand_operand op
;
5763 /* If the target is not sutitable then create a new target. */
5764 if (target
== NULL_RTX
5766 || GET_MODE (target
) != Pmode
)
5767 target
= gen_reg_rtx (Pmode
);
5768 create_output_operand (&op
, target
, Pmode
);
5769 expand_insn (icode
, 1, &op
);
5772 error ("__builtin_thread_pointer is not supported on this target");
5777 expand_builtin_set_thread_pointer (tree exp
)
5779 enum insn_code icode
;
5780 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5782 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5783 if (icode
!= CODE_FOR_nothing
)
5785 struct expand_operand op
;
5786 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5787 Pmode
, EXPAND_NORMAL
);
5788 create_input_operand (&op
, val
, Pmode
);
5789 expand_insn (icode
, 1, &op
);
5792 error ("__builtin_set_thread_pointer is not supported on this target");
5796 /* Emit code to restore the current value of stack. */
5799 expand_stack_restore (tree var
)
5802 rtx sa
= expand_normal (var
);
5804 sa
= convert_memory_address (Pmode
, sa
);
5806 prev
= get_last_insn ();
5807 emit_stack_restore (SAVE_BLOCK
, sa
);
5809 record_new_stack_level ();
5811 fixup_args_size_notes (prev
, get_last_insn (), 0);
5814 /* Emit code to save the current value of stack. */
5817 expand_stack_save (void)
5821 emit_stack_save (SAVE_BLOCK
, &ret
);
5826 /* Expand an expression EXP that calls a built-in function,
5827 with result going to TARGET if that's convenient
5828 (and in mode MODE if that's convenient).
5829 SUBTARGET may be used as the target for computing one of EXP's operands.
5830 IGNORE is nonzero if the value is to be ignored. */
5833 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
5836 tree fndecl
= get_callee_fndecl (exp
);
5837 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5838 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5841 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5842 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5844 /* When ASan is enabled, we don't want to expand some memory/string
5845 builtins and rely on libsanitizer's hooks. This allows us to avoid
5846 redundant checks and be sure, that possible overflow will be detected
5849 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
5850 return expand_call (exp
, target
, ignore
);
5852 /* When not optimizing, generate calls to library functions for a certain
5855 && !called_as_built_in (fndecl
)
5856 && fcode
!= BUILT_IN_FORK
5857 && fcode
!= BUILT_IN_EXECL
5858 && fcode
!= BUILT_IN_EXECV
5859 && fcode
!= BUILT_IN_EXECLP
5860 && fcode
!= BUILT_IN_EXECLE
5861 && fcode
!= BUILT_IN_EXECVP
5862 && fcode
!= BUILT_IN_EXECVE
5863 && fcode
!= BUILT_IN_ALLOCA
5864 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5865 && fcode
!= BUILT_IN_FREE
5866 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
5867 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
5868 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
5869 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
5870 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5871 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
5872 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5873 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5874 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5875 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
5876 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
5877 && fcode
!= BUILT_IN_CHKP_BNDRET
)
5878 return expand_call (exp
, target
, ignore
);
5880 /* The built-in function expanders test for target == const0_rtx
5881 to determine whether the function's result will be ignored. */
5883 target
= const0_rtx
;
5885 /* If the result of a pure or const built-in function is ignored, and
5886 none of its arguments are volatile, we can avoid expanding the
5887 built-in call and just evaluate the arguments for side-effects. */
5888 if (target
== const0_rtx
5889 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5890 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5892 bool volatilep
= false;
5894 call_expr_arg_iterator iter
;
5896 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5897 if (TREE_THIS_VOLATILE (arg
))
5905 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5906 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5911 /* expand_builtin_with_bounds is supposed to be used for
5912 instrumented builtin calls. */
5913 gcc_assert (!CALL_WITH_BOUNDS_P (exp
));
5917 CASE_FLT_FN (BUILT_IN_FABS
):
5918 case BUILT_IN_FABSD32
:
5919 case BUILT_IN_FABSD64
:
5920 case BUILT_IN_FABSD128
:
5921 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5926 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5927 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5932 /* Just do a normal library call if we were unable to fold
5934 CASE_FLT_FN (BUILT_IN_CABS
):
5937 CASE_FLT_FN (BUILT_IN_EXP
):
5938 CASE_FLT_FN (BUILT_IN_EXP10
):
5939 CASE_FLT_FN (BUILT_IN_POW10
):
5940 CASE_FLT_FN (BUILT_IN_EXP2
):
5941 CASE_FLT_FN (BUILT_IN_EXPM1
):
5942 CASE_FLT_FN (BUILT_IN_LOGB
):
5943 CASE_FLT_FN (BUILT_IN_LOG
):
5944 CASE_FLT_FN (BUILT_IN_LOG10
):
5945 CASE_FLT_FN (BUILT_IN_LOG2
):
5946 CASE_FLT_FN (BUILT_IN_LOG1P
):
5947 CASE_FLT_FN (BUILT_IN_TAN
):
5948 CASE_FLT_FN (BUILT_IN_ASIN
):
5949 CASE_FLT_FN (BUILT_IN_ACOS
):
5950 CASE_FLT_FN (BUILT_IN_ATAN
):
5951 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
5952 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5953 because of possible accuracy problems. */
5954 if (! flag_unsafe_math_optimizations
)
5956 CASE_FLT_FN (BUILT_IN_SQRT
):
5957 CASE_FLT_FN (BUILT_IN_FLOOR
):
5958 CASE_FLT_FN (BUILT_IN_CEIL
):
5959 CASE_FLT_FN (BUILT_IN_TRUNC
):
5960 CASE_FLT_FN (BUILT_IN_ROUND
):
5961 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
5962 CASE_FLT_FN (BUILT_IN_RINT
):
5963 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
5968 CASE_FLT_FN (BUILT_IN_FMA
):
5969 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
5974 CASE_FLT_FN (BUILT_IN_ILOGB
):
5975 if (! flag_unsafe_math_optimizations
)
5977 CASE_FLT_FN (BUILT_IN_ISINF
):
5978 CASE_FLT_FN (BUILT_IN_FINITE
):
5979 case BUILT_IN_ISFINITE
:
5980 case BUILT_IN_ISNORMAL
:
5981 target
= expand_builtin_interclass_mathfn (exp
, target
);
5986 CASE_FLT_FN (BUILT_IN_ICEIL
):
5987 CASE_FLT_FN (BUILT_IN_LCEIL
):
5988 CASE_FLT_FN (BUILT_IN_LLCEIL
):
5989 CASE_FLT_FN (BUILT_IN_LFLOOR
):
5990 CASE_FLT_FN (BUILT_IN_IFLOOR
):
5991 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
5992 target
= expand_builtin_int_roundingfn (exp
, target
);
5997 CASE_FLT_FN (BUILT_IN_IRINT
):
5998 CASE_FLT_FN (BUILT_IN_LRINT
):
5999 CASE_FLT_FN (BUILT_IN_LLRINT
):
6000 CASE_FLT_FN (BUILT_IN_IROUND
):
6001 CASE_FLT_FN (BUILT_IN_LROUND
):
6002 CASE_FLT_FN (BUILT_IN_LLROUND
):
6003 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6008 CASE_FLT_FN (BUILT_IN_POWI
):
6009 target
= expand_builtin_powi (exp
, target
);
6014 CASE_FLT_FN (BUILT_IN_ATAN2
):
6015 CASE_FLT_FN (BUILT_IN_LDEXP
):
6016 CASE_FLT_FN (BUILT_IN_SCALB
):
6017 CASE_FLT_FN (BUILT_IN_SCALBN
):
6018 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6019 if (! flag_unsafe_math_optimizations
)
6022 CASE_FLT_FN (BUILT_IN_FMOD
):
6023 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6024 CASE_FLT_FN (BUILT_IN_DREM
):
6025 CASE_FLT_FN (BUILT_IN_POW
):
6026 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6031 CASE_FLT_FN (BUILT_IN_CEXPI
):
6032 target
= expand_builtin_cexpi (exp
, target
);
6033 gcc_assert (target
);
6036 CASE_FLT_FN (BUILT_IN_SIN
):
6037 CASE_FLT_FN (BUILT_IN_COS
):
6038 if (! flag_unsafe_math_optimizations
)
6040 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6045 CASE_FLT_FN (BUILT_IN_SINCOS
):
6046 if (! flag_unsafe_math_optimizations
)
6048 target
= expand_builtin_sincos (exp
);
6053 case BUILT_IN_APPLY_ARGS
:
6054 return expand_builtin_apply_args ();
6056 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6057 FUNCTION with a copy of the parameters described by
6058 ARGUMENTS, and ARGSIZE. It returns a block of memory
6059 allocated on the stack into which is stored all the registers
6060 that might possibly be used for returning the result of a
6061 function. ARGUMENTS is the value returned by
6062 __builtin_apply_args. ARGSIZE is the number of bytes of
6063 arguments that must be copied. ??? How should this value be
6064 computed? We'll also need a safe worst case value for varargs
6066 case BUILT_IN_APPLY
:
6067 if (!validate_arglist (exp
, POINTER_TYPE
,
6068 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6069 && !validate_arglist (exp
, REFERENCE_TYPE
,
6070 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6076 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6077 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6078 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6080 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6083 /* __builtin_return (RESULT) causes the function to return the
6084 value described by RESULT. RESULT is address of the block of
6085 memory returned by __builtin_apply. */
6086 case BUILT_IN_RETURN
:
6087 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6088 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6091 case BUILT_IN_SAVEREGS
:
6092 return expand_builtin_saveregs ();
6094 case BUILT_IN_VA_ARG_PACK
:
6095 /* All valid uses of __builtin_va_arg_pack () are removed during
6097 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6100 case BUILT_IN_VA_ARG_PACK_LEN
:
6101 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6103 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6106 /* Return the address of the first anonymous stack arg. */
6107 case BUILT_IN_NEXT_ARG
:
6108 if (fold_builtin_next_arg (exp
, false))
6110 return expand_builtin_next_arg ();
6112 case BUILT_IN_CLEAR_CACHE
:
6113 target
= expand_builtin___clear_cache (exp
);
6118 case BUILT_IN_CLASSIFY_TYPE
:
6119 return expand_builtin_classify_type (exp
);
6121 case BUILT_IN_CONSTANT_P
:
6124 case BUILT_IN_FRAME_ADDRESS
:
6125 case BUILT_IN_RETURN_ADDRESS
:
6126 return expand_builtin_frame_address (fndecl
, exp
);
6128 /* Returns the address of the area where the structure is returned.
6130 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6131 if (call_expr_nargs (exp
) != 0
6132 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6133 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6136 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6138 case BUILT_IN_ALLOCA
:
6139 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6140 /* If the allocation stems from the declaration of a variable-sized
6141 object, it cannot accumulate. */
6142 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6147 case BUILT_IN_STACK_SAVE
:
6148 return expand_stack_save ();
6150 case BUILT_IN_STACK_RESTORE
:
6151 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6154 case BUILT_IN_BSWAP16
:
6155 case BUILT_IN_BSWAP32
:
6156 case BUILT_IN_BSWAP64
:
6157 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6162 CASE_INT_FN (BUILT_IN_FFS
):
6163 target
= expand_builtin_unop (target_mode
, exp
, target
,
6164 subtarget
, ffs_optab
);
6169 CASE_INT_FN (BUILT_IN_CLZ
):
6170 target
= expand_builtin_unop (target_mode
, exp
, target
,
6171 subtarget
, clz_optab
);
6176 CASE_INT_FN (BUILT_IN_CTZ
):
6177 target
= expand_builtin_unop (target_mode
, exp
, target
,
6178 subtarget
, ctz_optab
);
6183 CASE_INT_FN (BUILT_IN_CLRSB
):
6184 target
= expand_builtin_unop (target_mode
, exp
, target
,
6185 subtarget
, clrsb_optab
);
6190 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6191 target
= expand_builtin_unop (target_mode
, exp
, target
,
6192 subtarget
, popcount_optab
);
6197 CASE_INT_FN (BUILT_IN_PARITY
):
6198 target
= expand_builtin_unop (target_mode
, exp
, target
,
6199 subtarget
, parity_optab
);
6204 case BUILT_IN_STRLEN
:
6205 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6210 case BUILT_IN_STRCPY
:
6211 target
= expand_builtin_strcpy (exp
, target
);
6216 case BUILT_IN_STRNCPY
:
6217 target
= expand_builtin_strncpy (exp
, target
);
6222 case BUILT_IN_STPCPY
:
6223 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6228 case BUILT_IN_MEMCPY
:
6229 target
= expand_builtin_memcpy (exp
, target
);
6234 case BUILT_IN_MEMPCPY
:
6235 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6240 case BUILT_IN_MEMSET
:
6241 target
= expand_builtin_memset (exp
, target
, mode
);
6246 case BUILT_IN_BZERO
:
6247 target
= expand_builtin_bzero (exp
);
6252 case BUILT_IN_STRCMP
:
6253 target
= expand_builtin_strcmp (exp
, target
);
6258 case BUILT_IN_STRNCMP
:
6259 target
= expand_builtin_strncmp (exp
, target
, mode
);
6265 case BUILT_IN_MEMCMP
:
6266 target
= expand_builtin_memcmp (exp
, target
);
6271 case BUILT_IN_SETJMP
:
6272 /* This should have been lowered to the builtins below. */
6275 case BUILT_IN_SETJMP_SETUP
:
6276 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6277 and the receiver label. */
6278 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6280 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6281 VOIDmode
, EXPAND_NORMAL
);
6282 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6283 rtx_insn
*label_r
= label_rtx (label
);
6285 /* This is copied from the handling of non-local gotos. */
6286 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6287 nonlocal_goto_handler_labels
6288 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
6289 nonlocal_goto_handler_labels
);
6290 /* ??? Do not let expand_label treat us as such since we would
6291 not want to be both on the list of non-local labels and on
6292 the list of forced labels. */
6293 FORCED_LABEL (label
) = 0;
6298 case BUILT_IN_SETJMP_RECEIVER
:
6299 /* __builtin_setjmp_receiver is passed the receiver label. */
6300 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6302 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6303 rtx_insn
*label_r
= label_rtx (label
);
6305 expand_builtin_setjmp_receiver (label_r
);
6310 /* __builtin_longjmp is passed a pointer to an array of five words.
6311 It's similar to the C library longjmp function but works with
6312 __builtin_setjmp above. */
6313 case BUILT_IN_LONGJMP
:
6314 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6316 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6317 VOIDmode
, EXPAND_NORMAL
);
6318 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6320 if (value
!= const1_rtx
)
6322 error ("%<__builtin_longjmp%> second argument must be 1");
6326 expand_builtin_longjmp (buf_addr
, value
);
6331 case BUILT_IN_NONLOCAL_GOTO
:
6332 target
= expand_builtin_nonlocal_goto (exp
);
6337 /* This updates the setjmp buffer that is its argument with the value
6338 of the current stack pointer. */
6339 case BUILT_IN_UPDATE_SETJMP_BUF
:
6340 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6343 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6345 expand_builtin_update_setjmp_buf (buf_addr
);
6351 expand_builtin_trap ();
6354 case BUILT_IN_UNREACHABLE
:
6355 expand_builtin_unreachable ();
6358 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6359 case BUILT_IN_SIGNBITD32
:
6360 case BUILT_IN_SIGNBITD64
:
6361 case BUILT_IN_SIGNBITD128
:
6362 target
= expand_builtin_signbit (exp
, target
);
6367 /* Various hooks for the DWARF 2 __throw routine. */
6368 case BUILT_IN_UNWIND_INIT
:
6369 expand_builtin_unwind_init ();
6371 case BUILT_IN_DWARF_CFA
:
6372 return virtual_cfa_rtx
;
6373 #ifdef DWARF2_UNWIND_INFO
6374 case BUILT_IN_DWARF_SP_COLUMN
:
6375 return expand_builtin_dwarf_sp_column ();
6376 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6377 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6380 case BUILT_IN_FROB_RETURN_ADDR
:
6381 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6382 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6383 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6384 case BUILT_IN_EH_RETURN
:
6385 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6386 CALL_EXPR_ARG (exp
, 1));
6388 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6389 return expand_builtin_eh_return_data_regno (exp
);
6390 case BUILT_IN_EXTEND_POINTER
:
6391 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6392 case BUILT_IN_EH_POINTER
:
6393 return expand_builtin_eh_pointer (exp
);
6394 case BUILT_IN_EH_FILTER
:
6395 return expand_builtin_eh_filter (exp
);
6396 case BUILT_IN_EH_COPY_VALUES
:
6397 return expand_builtin_eh_copy_values (exp
);
6399 case BUILT_IN_VA_START
:
6400 return expand_builtin_va_start (exp
);
6401 case BUILT_IN_VA_END
:
6402 return expand_builtin_va_end (exp
);
6403 case BUILT_IN_VA_COPY
:
6404 return expand_builtin_va_copy (exp
);
6405 case BUILT_IN_EXPECT
:
6406 return expand_builtin_expect (exp
, target
);
6407 case BUILT_IN_ASSUME_ALIGNED
:
6408 return expand_builtin_assume_aligned (exp
, target
);
6409 case BUILT_IN_PREFETCH
:
6410 expand_builtin_prefetch (exp
);
6413 case BUILT_IN_INIT_TRAMPOLINE
:
6414 return expand_builtin_init_trampoline (exp
, true);
6415 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6416 return expand_builtin_init_trampoline (exp
, false);
6417 case BUILT_IN_ADJUST_TRAMPOLINE
:
6418 return expand_builtin_adjust_trampoline (exp
);
6421 case BUILT_IN_EXECL
:
6422 case BUILT_IN_EXECV
:
6423 case BUILT_IN_EXECLP
:
6424 case BUILT_IN_EXECLE
:
6425 case BUILT_IN_EXECVP
:
6426 case BUILT_IN_EXECVE
:
6427 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6432 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6433 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6434 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6435 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6436 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6437 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6438 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6443 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6444 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6445 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6446 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6447 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6448 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6449 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6454 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6455 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6456 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6457 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6458 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6459 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6460 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6465 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6466 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6467 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6468 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6469 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6470 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6471 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6476 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6477 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6478 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6479 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6480 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6481 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6482 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6487 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6488 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6489 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6490 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6491 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6492 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6493 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6498 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6499 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6500 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6501 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6502 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6503 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6504 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6509 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6510 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6511 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6512 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6513 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6514 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6515 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6520 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6521 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6522 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6523 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6524 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6525 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6526 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6531 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6532 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6533 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6534 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6535 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6536 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6537 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6542 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6543 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6544 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6545 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6546 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6547 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6548 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6553 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6554 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6555 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6556 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6557 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6558 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6559 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6564 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6565 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6566 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6567 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6568 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6569 if (mode
== VOIDmode
)
6570 mode
= TYPE_MODE (boolean_type_node
);
6571 if (!target
|| !register_operand (target
, mode
))
6572 target
= gen_reg_rtx (mode
);
6574 mode
= get_builtin_sync_mode
6575 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6576 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6581 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6582 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6583 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6584 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6585 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6586 mode
= get_builtin_sync_mode
6587 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6588 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6593 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6594 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6595 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6596 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6597 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6598 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6599 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6604 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6605 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6606 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6607 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6608 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6609 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6610 expand_builtin_sync_lock_release (mode
, exp
);
6613 case BUILT_IN_SYNC_SYNCHRONIZE
:
6614 expand_builtin_sync_synchronize ();
6617 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6618 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6619 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6620 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6621 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6622 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6623 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6628 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6629 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6630 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6631 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6632 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6634 unsigned int nargs
, z
;
6635 vec
<tree
, va_gc
> *vec
;
6638 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6639 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6643 /* If this is turned into an external library call, the weak parameter
6644 must be dropped to match the expected parameter list. */
6645 nargs
= call_expr_nargs (exp
);
6646 vec_alloc (vec
, nargs
- 1);
6647 for (z
= 0; z
< 3; z
++)
6648 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6649 /* Skip the boolean weak parameter. */
6650 for (z
= 4; z
< 6; z
++)
6651 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6652 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6656 case BUILT_IN_ATOMIC_LOAD_1
:
6657 case BUILT_IN_ATOMIC_LOAD_2
:
6658 case BUILT_IN_ATOMIC_LOAD_4
:
6659 case BUILT_IN_ATOMIC_LOAD_8
:
6660 case BUILT_IN_ATOMIC_LOAD_16
:
6661 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6662 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6667 case BUILT_IN_ATOMIC_STORE_1
:
6668 case BUILT_IN_ATOMIC_STORE_2
:
6669 case BUILT_IN_ATOMIC_STORE_4
:
6670 case BUILT_IN_ATOMIC_STORE_8
:
6671 case BUILT_IN_ATOMIC_STORE_16
:
6672 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6673 target
= expand_builtin_atomic_store (mode
, exp
);
6678 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6679 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6680 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6681 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6682 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6684 enum built_in_function lib
;
6685 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6686 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6687 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6688 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6694 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6695 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6696 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6697 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6698 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6700 enum built_in_function lib
;
6701 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6702 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6703 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6704 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6710 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6711 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6712 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6713 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6714 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6716 enum built_in_function lib
;
6717 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6718 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6719 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6720 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6726 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6727 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6728 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6729 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6730 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6732 enum built_in_function lib
;
6733 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6734 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6735 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6736 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6742 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6743 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6744 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6745 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6746 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6748 enum built_in_function lib
;
6749 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6750 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6751 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6752 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6758 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6759 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6760 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6761 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6762 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6764 enum built_in_function lib
;
6765 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6766 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6767 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6768 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6774 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6775 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6776 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6777 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6778 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6779 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6780 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6781 ignore
, BUILT_IN_NONE
);
6786 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6787 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6788 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6789 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6790 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6791 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6792 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6793 ignore
, BUILT_IN_NONE
);
6798 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6799 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6800 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6801 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6802 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6803 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6804 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6805 ignore
, BUILT_IN_NONE
);
6810 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6811 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6812 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6813 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6814 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6815 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6816 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6817 ignore
, BUILT_IN_NONE
);
6822 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6823 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6824 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6825 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6826 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6827 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6828 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6829 ignore
, BUILT_IN_NONE
);
6834 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6835 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6836 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6837 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6838 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6839 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6840 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6841 ignore
, BUILT_IN_NONE
);
6846 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6847 return expand_builtin_atomic_test_and_set (exp
, target
);
6849 case BUILT_IN_ATOMIC_CLEAR
:
6850 return expand_builtin_atomic_clear (exp
);
6852 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6853 return expand_builtin_atomic_always_lock_free (exp
);
6855 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6856 target
= expand_builtin_atomic_is_lock_free (exp
);
6861 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6862 expand_builtin_atomic_thread_fence (exp
);
6865 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6866 expand_builtin_atomic_signal_fence (exp
);
6869 case BUILT_IN_OBJECT_SIZE
:
6870 return expand_builtin_object_size (exp
);
6872 case BUILT_IN_MEMCPY_CHK
:
6873 case BUILT_IN_MEMPCPY_CHK
:
6874 case BUILT_IN_MEMMOVE_CHK
:
6875 case BUILT_IN_MEMSET_CHK
:
6876 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6881 case BUILT_IN_STRCPY_CHK
:
6882 case BUILT_IN_STPCPY_CHK
:
6883 case BUILT_IN_STRNCPY_CHK
:
6884 case BUILT_IN_STPNCPY_CHK
:
6885 case BUILT_IN_STRCAT_CHK
:
6886 case BUILT_IN_STRNCAT_CHK
:
6887 case BUILT_IN_SNPRINTF_CHK
:
6888 case BUILT_IN_VSNPRINTF_CHK
:
6889 maybe_emit_chk_warning (exp
, fcode
);
6892 case BUILT_IN_SPRINTF_CHK
:
6893 case BUILT_IN_VSPRINTF_CHK
:
6894 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6898 if (warn_free_nonheap_object
)
6899 maybe_emit_free_warning (exp
);
6902 case BUILT_IN_THREAD_POINTER
:
6903 return expand_builtin_thread_pointer (exp
, target
);
6905 case BUILT_IN_SET_THREAD_POINTER
:
6906 expand_builtin_set_thread_pointer (exp
);
6909 case BUILT_IN_CILK_DETACH
:
6910 expand_builtin_cilk_detach (exp
);
6913 case BUILT_IN_CILK_POP_FRAME
:
6914 expand_builtin_cilk_pop_frame (exp
);
6917 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
6918 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
6919 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
6920 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
6921 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
6922 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
6923 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
6924 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
6925 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
6926 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
6927 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
6928 /* We allow user CHKP builtins if Pointer Bounds
6930 if (!chkp_function_instrumented_p (current_function_decl
))
6932 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
6933 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6934 || fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
6935 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
6936 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
6937 return expand_normal (CALL_EXPR_ARG (exp
, 0));
6938 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
6939 return expand_normal (size_zero_node
);
6940 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
6941 return expand_normal (size_int (-1));
6947 case BUILT_IN_CHKP_BNDMK
:
6948 case BUILT_IN_CHKP_BNDSTX
:
6949 case BUILT_IN_CHKP_BNDCL
:
6950 case BUILT_IN_CHKP_BNDCU
:
6951 case BUILT_IN_CHKP_BNDLDX
:
6952 case BUILT_IN_CHKP_BNDRET
:
6953 case BUILT_IN_CHKP_INTERSECT
:
6954 case BUILT_IN_CHKP_NARROW
:
6955 case BUILT_IN_CHKP_EXTRACT_LOWER
:
6956 case BUILT_IN_CHKP_EXTRACT_UPPER
:
6957 /* Software implementation of Pointer Bounds Checker is NYI.
6958 Target support is required. */
6959 error ("Your target platform does not support -fcheck-pointer-bounds");
6962 case BUILT_IN_ACC_ON_DEVICE
:
6963 /* Do library call, if we failed to expand the builtin when
6967 default: /* just do library call, if unknown builtin */
6971 /* The switch statement above can drop through to cause the function
6972 to be called normally. */
6973 return expand_call (exp
, target
, ignore
);
6976 /* Similar to expand_builtin but is used for instrumented calls. */
6979 expand_builtin_with_bounds (tree exp
, rtx target
,
6980 rtx subtarget ATTRIBUTE_UNUSED
,
6981 machine_mode mode
, int ignore
)
6983 tree fndecl
= get_callee_fndecl (exp
);
6984 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6986 gcc_assert (CALL_WITH_BOUNDS_P (exp
));
6988 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6989 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
6991 gcc_assert (fcode
> BEGIN_CHKP_BUILTINS
6992 && fcode
< END_CHKP_BUILTINS
);
6996 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
:
6997 target
= expand_builtin_memcpy_with_bounds (exp
, target
);
7002 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
:
7003 target
= expand_builtin_mempcpy_with_bounds (exp
, target
, mode
);
7008 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
:
7009 target
= expand_builtin_memset_with_bounds (exp
, target
, mode
);
7018 /* The switch statement above can drop through to cause the function
7019 to be called normally. */
7020 return expand_call (exp
, target
, ignore
);
7023 /* Determine whether a tree node represents a call to a built-in
7024 function. If the tree T is a call to a built-in function with
7025 the right number of arguments of the appropriate types, return
7026 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7027 Otherwise the return value is END_BUILTINS. */
7029 enum built_in_function
7030 builtin_mathfn_code (const_tree t
)
7032 const_tree fndecl
, arg
, parmlist
;
7033 const_tree argtype
, parmtype
;
7034 const_call_expr_arg_iterator iter
;
7036 if (TREE_CODE (t
) != CALL_EXPR
7037 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
7038 return END_BUILTINS
;
7040 fndecl
= get_callee_fndecl (t
);
7041 if (fndecl
== NULL_TREE
7042 || TREE_CODE (fndecl
) != FUNCTION_DECL
7043 || ! DECL_BUILT_IN (fndecl
)
7044 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7045 return END_BUILTINS
;
7047 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7048 init_const_call_expr_arg_iterator (t
, &iter
);
7049 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7051 /* If a function doesn't take a variable number of arguments,
7052 the last element in the list will have type `void'. */
7053 parmtype
= TREE_VALUE (parmlist
);
7054 if (VOID_TYPE_P (parmtype
))
7056 if (more_const_call_expr_args_p (&iter
))
7057 return END_BUILTINS
;
7058 return DECL_FUNCTION_CODE (fndecl
);
7061 if (! more_const_call_expr_args_p (&iter
))
7062 return END_BUILTINS
;
7064 arg
= next_const_call_expr_arg (&iter
);
7065 argtype
= TREE_TYPE (arg
);
7067 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7069 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7070 return END_BUILTINS
;
7072 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7074 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7075 return END_BUILTINS
;
7077 else if (POINTER_TYPE_P (parmtype
))
7079 if (! POINTER_TYPE_P (argtype
))
7080 return END_BUILTINS
;
7082 else if (INTEGRAL_TYPE_P (parmtype
))
7084 if (! INTEGRAL_TYPE_P (argtype
))
7085 return END_BUILTINS
;
7088 return END_BUILTINS
;
7091 /* Variable-length argument list. */
7092 return DECL_FUNCTION_CODE (fndecl
);
7095 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7096 evaluate to a constant. */
7099 fold_builtin_constant_p (tree arg
)
7101 /* We return 1 for a numeric type that's known to be a constant
7102 value at compile-time or for an aggregate type that's a
7103 literal constant. */
7106 /* If we know this is a constant, emit the constant of one. */
7107 if (CONSTANT_CLASS_P (arg
)
7108 || (TREE_CODE (arg
) == CONSTRUCTOR
7109 && TREE_CONSTANT (arg
)))
7110 return integer_one_node
;
7111 if (TREE_CODE (arg
) == ADDR_EXPR
)
7113 tree op
= TREE_OPERAND (arg
, 0);
7114 if (TREE_CODE (op
) == STRING_CST
7115 || (TREE_CODE (op
) == ARRAY_REF
7116 && integer_zerop (TREE_OPERAND (op
, 1))
7117 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7118 return integer_one_node
;
7121 /* If this expression has side effects, show we don't know it to be a
7122 constant. Likewise if it's a pointer or aggregate type since in
7123 those case we only want literals, since those are only optimized
7124 when generating RTL, not later.
7125 And finally, if we are compiling an initializer, not code, we
7126 need to return a definite result now; there's not going to be any
7127 more optimization done. */
7128 if (TREE_SIDE_EFFECTS (arg
)
7129 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7130 || POINTER_TYPE_P (TREE_TYPE (arg
))
7132 || folding_initializer
7133 || force_folding_builtin_constant_p
)
7134 return integer_zero_node
;
7139 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7140 return it as a truthvalue. */
7143 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
7146 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7148 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7149 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7150 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7151 pred_type
= TREE_VALUE (arg_types
);
7152 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7154 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7155 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7156 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
7159 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7160 build_int_cst (ret_type
, 0));
7163 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7164 NULL_TREE if no simplification is possible. */
7167 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
7169 tree inner
, fndecl
, inner_arg0
;
7170 enum tree_code code
;
7172 /* Distribute the expected value over short-circuiting operators.
7173 See through the cast from truthvalue_type_node to long. */
7175 while (CONVERT_EXPR_P (inner_arg0
)
7176 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7177 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7178 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7180 /* If this is a builtin_expect within a builtin_expect keep the
7181 inner one. See through a comparison against a constant. It
7182 might have been added to create a thruthvalue. */
7185 if (COMPARISON_CLASS_P (inner
)
7186 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7187 inner
= TREE_OPERAND (inner
, 0);
7189 if (TREE_CODE (inner
) == CALL_EXPR
7190 && (fndecl
= get_callee_fndecl (inner
))
7191 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7192 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7196 code
= TREE_CODE (inner
);
7197 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7199 tree op0
= TREE_OPERAND (inner
, 0);
7200 tree op1
= TREE_OPERAND (inner
, 1);
7202 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
7203 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
7204 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7206 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7209 /* If the argument isn't invariant then there's nothing else we can do. */
7210 if (!TREE_CONSTANT (inner_arg0
))
7213 /* If we expect that a comparison against the argument will fold to
7214 a constant return the constant. In practice, this means a true
7215 constant or the address of a non-weak symbol. */
7218 if (TREE_CODE (inner
) == ADDR_EXPR
)
7222 inner
= TREE_OPERAND (inner
, 0);
7224 while (TREE_CODE (inner
) == COMPONENT_REF
7225 || TREE_CODE (inner
) == ARRAY_REF
);
7226 if ((TREE_CODE (inner
) == VAR_DECL
7227 || TREE_CODE (inner
) == FUNCTION_DECL
)
7228 && DECL_WEAK (inner
))
7232 /* Otherwise, ARG0 already has the proper type for the return value. */
7236 /* Fold a call to __builtin_classify_type with argument ARG. */
7239 fold_builtin_classify_type (tree arg
)
7242 return build_int_cst (integer_type_node
, no_type_class
);
7244 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7247 /* Fold a call to __builtin_strlen with argument ARG. */
7250 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7252 if (!validate_arg (arg
, POINTER_TYPE
))
7256 tree len
= c_strlen (arg
, 0);
7259 return fold_convert_loc (loc
, type
, len
);
7265 /* If ARG is a foldable constant real, use FN to round it to an integer
7266 value and try to represent the result in integer type ITYPE. Return
7267 the value on success, otherwise return null. */
7270 do_real_to_int_conversion (tree itype
, tree arg
,
7271 void (*fn
) (REAL_VALUE_TYPE
*, format_helper
,
7272 const REAL_VALUE_TYPE
*))
7274 if (TREE_CODE (arg
) != REAL_CST
|| TREE_OVERFLOW (arg
))
7277 const REAL_VALUE_TYPE
*value
= TREE_REAL_CST_PTR (arg
);
7278 if (!real_isfinite (value
))
7281 tree ftype
= TREE_TYPE (arg
);
7282 REAL_VALUE_TYPE rounded
;
7283 fn (&rounded
, TYPE_MODE (ftype
), value
);
7286 wide_int ival
= real_to_integer (&rounded
, &fail
, TYPE_PRECISION (itype
));
7290 return wide_int_to_tree (itype
, ival
);
7294 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7297 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7299 REAL_VALUE_TYPE real
;
7301 /* __builtin_inff is intended to be usable to define INFINITY on all
7302 targets. If an infinity is not available, INFINITY expands "to a
7303 positive constant of type float that overflows at translation
7304 time", footnote "In this case, using INFINITY will violate the
7305 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7306 Thus we pedwarn to ensure this constraint violation is
7308 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7309 pedwarn (loc
, 0, "target format does not support infinity");
7312 return build_real (type
, real
);
7315 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7318 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7320 REAL_VALUE_TYPE real
;
7323 if (!validate_arg (arg
, POINTER_TYPE
))
7325 str
= c_getstr (arg
);
7329 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7332 return build_real (type
, real
);
7335 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7336 NULL_TREE if no simplification can be made. */
7339 fold_builtin_sincos (location_t loc
,
7340 tree arg0
, tree arg1
, tree arg2
)
7345 if (!validate_arg (arg0
, REAL_TYPE
)
7346 || !validate_arg (arg1
, POINTER_TYPE
)
7347 || !validate_arg (arg2
, POINTER_TYPE
))
7350 type
= TREE_TYPE (arg0
);
7352 /* Calculate the result when the argument is a constant. */
7353 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7356 /* Canonicalize sincos to cexpi. */
7357 if (!targetm
.libc_has_function (function_c99_math_complex
))
7359 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7363 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
7364 call
= builtin_save_expr (call
);
7366 return build2 (COMPOUND_EXPR
, void_type_node
,
7367 build2 (MODIFY_EXPR
, void_type_node
,
7368 build_fold_indirect_ref_loc (loc
, arg1
),
7369 build1 (IMAGPART_EXPR
, type
, call
)),
7370 build2 (MODIFY_EXPR
, void_type_node
,
7371 build_fold_indirect_ref_loc (loc
, arg2
),
7372 build1 (REALPART_EXPR
, type
, call
)));
7375 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7376 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7377 the argument to the call. Return NULL_TREE if no simplification can
7381 fold_builtin_bitop (tree fndecl
, tree arg
)
7383 if (!validate_arg (arg
, INTEGER_TYPE
))
7386 /* Optimize for constant argument. */
7387 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
7389 tree type
= TREE_TYPE (arg
);
7392 switch (DECL_FUNCTION_CODE (fndecl
))
7394 CASE_INT_FN (BUILT_IN_FFS
):
7395 result
= wi::ffs (arg
);
7398 CASE_INT_FN (BUILT_IN_CLZ
):
7399 if (wi::ne_p (arg
, 0))
7400 result
= wi::clz (arg
);
7401 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7402 result
= TYPE_PRECISION (type
);
7405 CASE_INT_FN (BUILT_IN_CTZ
):
7406 if (wi::ne_p (arg
, 0))
7407 result
= wi::ctz (arg
);
7408 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7409 result
= TYPE_PRECISION (type
);
7412 CASE_INT_FN (BUILT_IN_CLRSB
):
7413 result
= wi::clrsb (arg
);
7416 CASE_INT_FN (BUILT_IN_POPCOUNT
):
7417 result
= wi::popcount (arg
);
7420 CASE_INT_FN (BUILT_IN_PARITY
):
7421 result
= wi::parity (arg
);
7428 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
7434 /* Fold function call to builtin_bswap and the short, long and long long
7435 variants. Return NULL_TREE if no simplification can be made. */
7437 fold_builtin_bswap (tree fndecl
, tree arg
)
7439 if (! validate_arg (arg
, INTEGER_TYPE
))
7442 /* Optimize constant value. */
7443 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
7445 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7447 switch (DECL_FUNCTION_CODE (fndecl
))
7449 case BUILT_IN_BSWAP16
:
7450 case BUILT_IN_BSWAP32
:
7451 case BUILT_IN_BSWAP64
:
7453 signop sgn
= TYPE_SIGN (type
);
7455 wide_int_to_tree (type
,
7456 wide_int::from (arg
, TYPE_PRECISION (type
),
7468 /* Fold a builtin function call to pow, powf, or powl. Return
7469 NULL_TREE if no simplification can be made. */
7471 fold_const_builtin_pow (tree arg0
, tree arg1
, tree type
)
7475 if (!validate_arg (arg0
, REAL_TYPE
)
7476 || !validate_arg (arg1
, REAL_TYPE
))
7479 /* Calculate the result when the argument is a constant. */
7480 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
7483 /* Check for an integer exponent. */
7484 if (TREE_CODE (arg0
) == REAL_CST
7485 && !TREE_OVERFLOW (arg0
)
7486 && TREE_CODE (arg1
) == REAL_CST
7487 && !TREE_OVERFLOW (arg1
))
7489 REAL_VALUE_TYPE cint1
;
7490 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (arg0
);
7491 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (arg1
);
7492 HOST_WIDE_INT n1
= real_to_integer (c1
);
7493 real_from_integer (&cint1
, VOIDmode
, n1
, SIGNED
);
7494 /* Attempt to evaluate pow at compile-time, unless this should
7495 raise an exception. */
7496 if (real_identical (c1
, &cint1
)
7498 || (!flag_trapping_math
&& !flag_errno_math
)
7499 || !real_equal (c0
, &dconst0
)))
7502 bool inexact
= real_powi (&x
, TYPE_MODE (type
), c0
, n1
);
7503 if (flag_unsafe_math_optimizations
|| !inexact
)
7504 return build_real (type
, x
);
7511 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7512 arguments to the call, and TYPE is its return type.
7513 Return NULL_TREE if no simplification can be made. */
7516 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
7518 if (!validate_arg (arg1
, POINTER_TYPE
)
7519 || !validate_arg (arg2
, INTEGER_TYPE
)
7520 || !validate_arg (len
, INTEGER_TYPE
))
7526 if (TREE_CODE (arg2
) != INTEGER_CST
7527 || !tree_fits_uhwi_p (len
))
7530 p1
= c_getstr (arg1
);
7531 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
7537 if (target_char_cast (arg2
, &c
))
7540 r
= (const char *) memchr (p1
, c
, tree_to_uhwi (len
));
7543 return build_int_cst (TREE_TYPE (arg1
), 0);
7545 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
7546 return fold_convert_loc (loc
, type
, tem
);
7552 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7553 Return NULL_TREE if no simplification can be made. */
7556 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
7558 const char *p1
, *p2
;
7560 if (!validate_arg (arg1
, POINTER_TYPE
)
7561 || !validate_arg (arg2
, POINTER_TYPE
)
7562 || !validate_arg (len
, INTEGER_TYPE
))
7565 /* If the LEN parameter is zero, return zero. */
7566 if (integer_zerop (len
))
7567 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
7570 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7571 if (operand_equal_p (arg1
, arg2
, 0))
7572 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
7574 p1
= c_getstr (arg1
);
7575 p2
= c_getstr (arg2
);
7577 /* If all arguments are constant, and the value of len is not greater
7578 than the lengths of arg1 and arg2, evaluate at compile-time. */
7579 if (tree_fits_uhwi_p (len
) && p1
&& p2
7580 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
7581 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
7583 const int r
= memcmp (p1
, p2
, tree_to_uhwi (len
));
7586 return integer_one_node
;
7588 return integer_minus_one_node
;
7590 return integer_zero_node
;
7593 /* If len parameter is one, return an expression corresponding to
7594 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7595 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
7597 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
7598 tree cst_uchar_ptr_node
7599 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
7602 = fold_convert_loc (loc
, integer_type_node
,
7603 build1 (INDIRECT_REF
, cst_uchar_node
,
7604 fold_convert_loc (loc
,
7608 = fold_convert_loc (loc
, integer_type_node
,
7609 build1 (INDIRECT_REF
, cst_uchar_node
,
7610 fold_convert_loc (loc
,
7613 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
7619 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7620 Return NULL_TREE if no simplification can be made. */
7623 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
7625 const char *p1
, *p2
;
7627 if (!validate_arg (arg1
, POINTER_TYPE
)
7628 || !validate_arg (arg2
, POINTER_TYPE
))
7631 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7632 if (operand_equal_p (arg1
, arg2
, 0))
7633 return integer_zero_node
;
7635 p1
= c_getstr (arg1
);
7636 p2
= c_getstr (arg2
);
7640 const int i
= strcmp (p1
, p2
);
7642 return integer_minus_one_node
;
7644 return integer_one_node
;
7646 return integer_zero_node
;
7649 /* If the second arg is "", return *(const unsigned char*)arg1. */
7650 if (p2
&& *p2
== '\0')
7652 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
7653 tree cst_uchar_ptr_node
7654 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
7656 return fold_convert_loc (loc
, integer_type_node
,
7657 build1 (INDIRECT_REF
, cst_uchar_node
,
7658 fold_convert_loc (loc
,
7663 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7664 if (p1
&& *p1
== '\0')
7666 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
7667 tree cst_uchar_ptr_node
7668 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
7671 = fold_convert_loc (loc
, integer_type_node
,
7672 build1 (INDIRECT_REF
, cst_uchar_node
,
7673 fold_convert_loc (loc
,
7676 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
7682 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7683 Return NULL_TREE if no simplification can be made. */
7686 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
7688 const char *p1
, *p2
;
7690 if (!validate_arg (arg1
, POINTER_TYPE
)
7691 || !validate_arg (arg2
, POINTER_TYPE
)
7692 || !validate_arg (len
, INTEGER_TYPE
))
7695 /* If the LEN parameter is zero, return zero. */
7696 if (integer_zerop (len
))
7697 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
7700 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7701 if (operand_equal_p (arg1
, arg2
, 0))
7702 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
7704 p1
= c_getstr (arg1
);
7705 p2
= c_getstr (arg2
);
7707 if (tree_fits_uhwi_p (len
) && p1
&& p2
)
7709 const int i
= strncmp (p1
, p2
, tree_to_uhwi (len
));
7711 return integer_one_node
;
7713 return integer_minus_one_node
;
7715 return integer_zero_node
;
7718 /* If the second arg is "", and the length is greater than zero,
7719 return *(const unsigned char*)arg1. */
7720 if (p2
&& *p2
== '\0'
7721 && TREE_CODE (len
) == INTEGER_CST
7722 && tree_int_cst_sgn (len
) == 1)
7724 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
7725 tree cst_uchar_ptr_node
7726 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
7728 return fold_convert_loc (loc
, integer_type_node
,
7729 build1 (INDIRECT_REF
, cst_uchar_node
,
7730 fold_convert_loc (loc
,
7735 /* If the first arg is "", and the length is greater than zero,
7736 return -*(const unsigned char*)arg2. */
7737 if (p1
&& *p1
== '\0'
7738 && TREE_CODE (len
) == INTEGER_CST
7739 && tree_int_cst_sgn (len
) == 1)
7741 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
7742 tree cst_uchar_ptr_node
7743 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
7745 tree temp
= fold_convert_loc (loc
, integer_type_node
,
7746 build1 (INDIRECT_REF
, cst_uchar_node
,
7747 fold_convert_loc (loc
,
7750 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
7753 /* If len parameter is one, return an expression corresponding to
7754 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7755 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
7757 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
7758 tree cst_uchar_ptr_node
7759 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
7761 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
7762 build1 (INDIRECT_REF
, cst_uchar_node
,
7763 fold_convert_loc (loc
,
7766 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
7767 build1 (INDIRECT_REF
, cst_uchar_node
,
7768 fold_convert_loc (loc
,
7771 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
7777 /* Fold a call to builtin isascii with argument ARG. */
7780 fold_builtin_isascii (location_t loc
, tree arg
)
7782 if (!validate_arg (arg
, INTEGER_TYPE
))
7786 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7787 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
7788 build_int_cst (integer_type_node
,
7789 ~ (unsigned HOST_WIDE_INT
) 0x7f));
7790 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
7791 arg
, integer_zero_node
);
7795 /* Fold a call to builtin toascii with argument ARG. */
7798 fold_builtin_toascii (location_t loc
, tree arg
)
7800 if (!validate_arg (arg
, INTEGER_TYPE
))
7803 /* Transform toascii(c) -> (c & 0x7f). */
7804 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
7805 build_int_cst (integer_type_node
, 0x7f));
7808 /* Fold a call to builtin isdigit with argument ARG. */
7811 fold_builtin_isdigit (location_t loc
, tree arg
)
7813 if (!validate_arg (arg
, INTEGER_TYPE
))
7817 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7818 /* According to the C standard, isdigit is unaffected by locale.
7819 However, it definitely is affected by the target character set. */
7820 unsigned HOST_WIDE_INT target_digit0
7821 = lang_hooks
.to_target_charset ('0');
7823 if (target_digit0
== 0)
7826 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
7827 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
7828 build_int_cst (unsigned_type_node
, target_digit0
));
7829 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
7830 build_int_cst (unsigned_type_node
, 9));
7834 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7837 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
7839 if (!validate_arg (arg
, REAL_TYPE
))
7842 arg
= fold_convert_loc (loc
, type
, arg
);
7843 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
7846 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7849 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
7851 if (!validate_arg (arg
, INTEGER_TYPE
))
7854 arg
= fold_convert_loc (loc
, type
, arg
);
7855 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
7858 /* Fold a fma operation with arguments ARG[012]. */
7861 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
7862 tree type
, tree arg0
, tree arg1
, tree arg2
)
7864 if (TREE_CODE (arg0
) == REAL_CST
7865 && TREE_CODE (arg1
) == REAL_CST
7866 && TREE_CODE (arg2
) == REAL_CST
)
7867 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
7872 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7875 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
7877 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7878 if (validate_arg (arg0
, REAL_TYPE
)
7879 && validate_arg (arg1
, REAL_TYPE
)
7880 && validate_arg (arg2
, REAL_TYPE
)
7881 && optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
7882 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
7887 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7890 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
7892 if (validate_arg (arg
, COMPLEX_TYPE
)
7893 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7895 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
7899 tree new_arg
= builtin_save_expr (arg
);
7900 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
7901 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
7902 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
7909 /* Fold a call to builtin logb/ilogb. */
7912 fold_const_builtin_logb (location_t loc
, tree arg
, tree rettype
)
7914 if (! validate_arg (arg
, REAL_TYPE
))
7917 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
7919 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
7925 /* If arg is Inf or NaN and we're logb, return it. */
7926 if (TREE_CODE (rettype
) == REAL_TYPE
)
7928 /* For logb(-Inf) we have to return +Inf. */
7929 if (real_isinf (value
) && real_isneg (value
))
7931 REAL_VALUE_TYPE tem
;
7933 return build_real (rettype
, tem
);
7935 return fold_convert_loc (loc
, rettype
, arg
);
7937 /* Fall through... */
7939 /* Zero may set errno and/or raise an exception for logb, also
7940 for ilogb we don't know FP_ILOGB0. */
7943 /* For normal numbers, proceed iff radix == 2. In GCC,
7944 normalized significands are in the range [0.5, 1.0). We
7945 want the exponent as if they were [1.0, 2.0) so get the
7946 exponent and subtract 1. */
7947 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
7948 return fold_convert_loc (loc
, rettype
,
7949 build_int_cst (integer_type_node
,
7950 REAL_EXP (value
)-1));
7958 /* Fold a call to builtin significand, if radix == 2. */
7961 fold_const_builtin_significand (location_t loc
, tree arg
, tree rettype
)
7963 if (! validate_arg (arg
, REAL_TYPE
))
7966 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
7968 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
7975 /* If arg is +-0, +-Inf or +-NaN, then return it. */
7976 return fold_convert_loc (loc
, rettype
, arg
);
7978 /* For normal numbers, proceed iff radix == 2. */
7979 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
7981 REAL_VALUE_TYPE result
= *value
;
7982 /* In GCC, normalized significands are in the range [0.5,
7983 1.0). We want them to be [1.0, 2.0) so set the
7985 SET_REAL_EXP (&result
, 1);
7986 return build_real (rettype
, result
);
7995 /* Fold a call to builtin frexp, we can assume the base is 2. */
7998 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8000 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8005 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8008 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8010 /* Proceed if a valid pointer type was passed in. */
8011 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
8013 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8019 /* For +-0, return (*exp = 0, +-0). */
8020 exp
= integer_zero_node
;
8025 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8026 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
8029 /* Since the frexp function always expects base 2, and in
8030 GCC normalized significands are already in the range
8031 [0.5, 1.0), we have exactly what frexp wants. */
8032 REAL_VALUE_TYPE frac_rvt
= *value
;
8033 SET_REAL_EXP (&frac_rvt
, 0);
8034 frac
= build_real (rettype
, frac_rvt
);
8035 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
8042 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8043 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
8044 TREE_SIDE_EFFECTS (arg1
) = 1;
8045 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
8051 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
8052 then we can assume the base is two. If it's false, then we have to
8053 check the mode of the TYPE parameter in certain cases. */
8056 fold_const_builtin_load_exponent (tree arg0
, tree arg1
,
8057 tree type
, bool ldexp
)
8059 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
8061 /* If both arguments are constant, then try to evaluate it. */
8062 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
8063 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
8064 && tree_fits_shwi_p (arg1
))
8066 /* Bound the maximum adjustment to twice the range of the
8067 mode's valid exponents. Use abs to ensure the range is
8068 positive as a sanity check. */
8069 const long max_exp_adj
= 2 *
8070 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
8071 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
8073 /* Get the user-requested adjustment. */
8074 const HOST_WIDE_INT req_exp_adj
= tree_to_shwi (arg1
);
8076 /* The requested adjustment must be inside this range. This
8077 is a preliminary cap to avoid things like overflow, we
8078 may still fail to compute the result for other reasons. */
8079 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
8081 REAL_VALUE_TYPE initial_result
;
8083 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
8085 /* Ensure we didn't overflow. */
8086 if (! real_isinf (&initial_result
))
8088 const REAL_VALUE_TYPE trunc_result
8089 = real_value_truncate (TYPE_MODE (type
), initial_result
);
8091 /* Only proceed if the target mode can hold the
8093 if (real_equal (&initial_result
, &trunc_result
))
8094 return build_real (type
, trunc_result
);
8103 /* Fold a call to builtin modf. */
8106 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8108 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8113 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8116 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8118 /* Proceed if a valid pointer type was passed in. */
8119 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
8121 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8122 REAL_VALUE_TYPE trunc
, frac
;
8128 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8129 trunc
= frac
= *value
;
8132 /* For +-Inf, return (*arg1 = arg0, +-0). */
8134 frac
.sign
= value
->sign
;
8138 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8139 real_trunc (&trunc
, VOIDmode
, value
);
8140 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
8141 /* If the original number was negative and already
8142 integral, then the fractional part is -0.0. */
8143 if (value
->sign
&& frac
.cl
== rvc_zero
)
8144 frac
.sign
= value
->sign
;
8148 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8149 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
8150 build_real (rettype
, trunc
));
8151 TREE_SIDE_EFFECTS (arg1
) = 1;
8152 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
8153 build_real (rettype
, frac
));
8159 /* Given a location LOC, an interclass builtin function decl FNDECL
8160 and its single argument ARG, return an folded expression computing
8161 the same, or NULL_TREE if we either couldn't or didn't want to fold
8162 (the latter happen if there's an RTL instruction available). */
8165 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
8169 if (!validate_arg (arg
, REAL_TYPE
))
8172 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
8175 mode
= TYPE_MODE (TREE_TYPE (arg
));
8177 /* If there is no optab, try generic code. */
8178 switch (DECL_FUNCTION_CODE (fndecl
))
8182 CASE_FLT_FN (BUILT_IN_ISINF
):
8184 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8185 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
8186 tree
const type
= TREE_TYPE (arg
);
8190 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8191 real_from_string (&r
, buf
);
8192 result
= build_call_expr (isgr_fn
, 2,
8193 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
8194 build_real (type
, r
));
8197 CASE_FLT_FN (BUILT_IN_FINITE
):
8198 case BUILT_IN_ISFINITE
:
8200 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8201 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
8202 tree
const type
= TREE_TYPE (arg
);
8206 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8207 real_from_string (&r
, buf
);
8208 result
= build_call_expr (isle_fn
, 2,
8209 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
8210 build_real (type
, r
));
8211 /*result = fold_build2_loc (loc, UNGT_EXPR,
8212 TREE_TYPE (TREE_TYPE (fndecl)),
8213 fold_build1_loc (loc, ABS_EXPR, type, arg),
8214 build_real (type, r));
8215 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8216 TREE_TYPE (TREE_TYPE (fndecl)),
8220 case BUILT_IN_ISNORMAL
:
8222 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8223 islessequal(fabs(x),DBL_MAX). */
8224 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
8225 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
8226 tree
const type
= TREE_TYPE (arg
);
8227 REAL_VALUE_TYPE rmax
, rmin
;
8230 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8231 real_from_string (&rmax
, buf
);
8232 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
8233 real_from_string (&rmin
, buf
);
8234 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
8235 result
= build_call_expr (isle_fn
, 2, arg
,
8236 build_real (type
, rmax
));
8237 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
8238 build_call_expr (isge_fn
, 2, arg
,
8239 build_real (type
, rmin
)));
8249 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8250 ARG is the argument for the call. */
8253 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
8255 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8258 if (!validate_arg (arg
, REAL_TYPE
))
8261 switch (builtin_index
)
8263 case BUILT_IN_ISINF
:
8264 if (!HONOR_INFINITIES (arg
))
8265 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8267 if (TREE_CODE (arg
) == REAL_CST
)
8269 r
= TREE_REAL_CST (arg
);
8270 if (real_isinf (&r
))
8271 return real_compare (GT_EXPR
, &r
, &dconst0
)
8272 ? integer_one_node
: integer_minus_one_node
;
8274 return integer_zero_node
;
8279 case BUILT_IN_ISINF_SIGN
:
8281 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8282 /* In a boolean context, GCC will fold the inner COND_EXPR to
8283 1. So e.g. "if (isinf_sign(x))" would be folded to just
8284 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8285 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
8286 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
8287 tree tmp
= NULL_TREE
;
8289 arg
= builtin_save_expr (arg
);
8291 if (signbit_fn
&& isinf_fn
)
8293 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
8294 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
8296 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
8297 signbit_call
, integer_zero_node
);
8298 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
8299 isinf_call
, integer_zero_node
);
8301 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
8302 integer_minus_one_node
, integer_one_node
);
8303 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
8311 case BUILT_IN_ISFINITE
:
8312 if (!HONOR_NANS (arg
)
8313 && !HONOR_INFINITIES (arg
))
8314 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
8316 if (TREE_CODE (arg
) == REAL_CST
)
8318 r
= TREE_REAL_CST (arg
);
8319 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
8324 case BUILT_IN_ISNAN
:
8325 if (!HONOR_NANS (arg
))
8326 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8328 if (TREE_CODE (arg
) == REAL_CST
)
8330 r
= TREE_REAL_CST (arg
);
8331 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
8334 arg
= builtin_save_expr (arg
);
8335 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
8342 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8343 This builtin will generate code to return the appropriate floating
8344 point classification depending on the value of the floating point
8345 number passed in. The possible return values must be supplied as
8346 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8347 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8348 one floating point argument which is "type generic". */
8351 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
8353 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
8354 arg
, type
, res
, tmp
;
8359 /* Verify the required arguments in the original call. */
8361 || !validate_arg (args
[0], INTEGER_TYPE
)
8362 || !validate_arg (args
[1], INTEGER_TYPE
)
8363 || !validate_arg (args
[2], INTEGER_TYPE
)
8364 || !validate_arg (args
[3], INTEGER_TYPE
)
8365 || !validate_arg (args
[4], INTEGER_TYPE
)
8366 || !validate_arg (args
[5], REAL_TYPE
))
8370 fp_infinite
= args
[1];
8371 fp_normal
= args
[2];
8372 fp_subnormal
= args
[3];
8375 type
= TREE_TYPE (arg
);
8376 mode
= TYPE_MODE (type
);
8377 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
8381 (fabs(x) == Inf ? FP_INFINITE :
8382 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8383 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8385 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
8386 build_real (type
, dconst0
));
8387 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
8388 tmp
, fp_zero
, fp_subnormal
);
8390 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
8391 real_from_string (&r
, buf
);
8392 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
8393 arg
, build_real (type
, r
));
8394 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
8396 if (HONOR_INFINITIES (mode
))
8399 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
8400 build_real (type
, r
));
8401 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
8405 if (HONOR_NANS (mode
))
8407 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
8408 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
8414 /* Fold a call to an unordered comparison function such as
8415 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8416 being called and ARG0 and ARG1 are the arguments for the call.
8417 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8418 the opposite of the desired result. UNORDERED_CODE is used
8419 for modes that can hold NaNs and ORDERED_CODE is used for
8423 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
8424 enum tree_code unordered_code
,
8425 enum tree_code ordered_code
)
8427 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8428 enum tree_code code
;
8430 enum tree_code code0
, code1
;
8431 tree cmp_type
= NULL_TREE
;
8433 type0
= TREE_TYPE (arg0
);
8434 type1
= TREE_TYPE (arg1
);
8436 code0
= TREE_CODE (type0
);
8437 code1
= TREE_CODE (type1
);
8439 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
8440 /* Choose the wider of two real types. */
8441 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
8443 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
8445 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
8448 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
8449 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
8451 if (unordered_code
== UNORDERED_EXPR
)
8453 if (!HONOR_NANS (arg0
))
8454 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
8455 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
8458 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
8459 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
8460 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
8463 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8464 arithmetics if it can never overflow, or into internal functions that
8465 return both result of arithmetics and overflowed boolean flag in
8466 a complex integer result, or some other check for overflow. */
8469 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
8470 tree arg0
, tree arg1
, tree arg2
)
8472 enum internal_fn ifn
= IFN_LAST
;
8473 tree type
= TREE_TYPE (TREE_TYPE (arg2
));
8474 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
8477 case BUILT_IN_ADD_OVERFLOW
:
8478 case BUILT_IN_SADD_OVERFLOW
:
8479 case BUILT_IN_SADDL_OVERFLOW
:
8480 case BUILT_IN_SADDLL_OVERFLOW
:
8481 case BUILT_IN_UADD_OVERFLOW
:
8482 case BUILT_IN_UADDL_OVERFLOW
:
8483 case BUILT_IN_UADDLL_OVERFLOW
:
8484 ifn
= IFN_ADD_OVERFLOW
;
8486 case BUILT_IN_SUB_OVERFLOW
:
8487 case BUILT_IN_SSUB_OVERFLOW
:
8488 case BUILT_IN_SSUBL_OVERFLOW
:
8489 case BUILT_IN_SSUBLL_OVERFLOW
:
8490 case BUILT_IN_USUB_OVERFLOW
:
8491 case BUILT_IN_USUBL_OVERFLOW
:
8492 case BUILT_IN_USUBLL_OVERFLOW
:
8493 ifn
= IFN_SUB_OVERFLOW
;
8495 case BUILT_IN_MUL_OVERFLOW
:
8496 case BUILT_IN_SMUL_OVERFLOW
:
8497 case BUILT_IN_SMULL_OVERFLOW
:
8498 case BUILT_IN_SMULLL_OVERFLOW
:
8499 case BUILT_IN_UMUL_OVERFLOW
:
8500 case BUILT_IN_UMULL_OVERFLOW
:
8501 case BUILT_IN_UMULLL_OVERFLOW
:
8502 ifn
= IFN_MUL_OVERFLOW
;
8507 tree ctype
= build_complex_type (type
);
8508 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
8510 tree tgt
= save_expr (call
);
8511 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
8512 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
8513 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
8515 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
8516 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
8519 /* Fold a call to built-in function FNDECL with 0 arguments.
8520 This function returns NULL_TREE if no simplification was possible. */
8523 fold_builtin_0 (location_t loc
, tree fndecl
)
8525 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8526 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8529 CASE_FLT_FN (BUILT_IN_INF
):
8530 case BUILT_IN_INFD32
:
8531 case BUILT_IN_INFD64
:
8532 case BUILT_IN_INFD128
:
8533 return fold_builtin_inf (loc
, type
, true);
8535 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
8536 return fold_builtin_inf (loc
, type
, false);
8538 case BUILT_IN_CLASSIFY_TYPE
:
8539 return fold_builtin_classify_type (NULL_TREE
);
8547 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8548 This function returns NULL_TREE if no simplification was possible. */
8551 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
8553 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8554 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8557 case BUILT_IN_CONSTANT_P
:
8559 tree val
= fold_builtin_constant_p (arg0
);
8561 /* Gimplification will pull the CALL_EXPR for the builtin out of
8562 an if condition. When not optimizing, we'll not CSE it back.
8563 To avoid link error types of regressions, return false now. */
8564 if (!val
&& !optimize
)
8565 val
= integer_zero_node
;
8570 case BUILT_IN_CLASSIFY_TYPE
:
8571 return fold_builtin_classify_type (arg0
);
8573 case BUILT_IN_STRLEN
:
8574 return fold_builtin_strlen (loc
, type
, arg0
);
8576 CASE_FLT_FN (BUILT_IN_FABS
):
8577 case BUILT_IN_FABSD32
:
8578 case BUILT_IN_FABSD64
:
8579 case BUILT_IN_FABSD128
:
8580 return fold_builtin_fabs (loc
, arg0
, type
);
8584 case BUILT_IN_LLABS
:
8585 case BUILT_IN_IMAXABS
:
8586 return fold_builtin_abs (loc
, arg0
, type
);
8588 CASE_FLT_FN (BUILT_IN_CONJ
):
8589 if (validate_arg (arg0
, COMPLEX_TYPE
)
8590 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8591 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
8594 CASE_FLT_FN (BUILT_IN_CREAL
):
8595 if (validate_arg (arg0
, COMPLEX_TYPE
)
8596 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8597 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
8600 CASE_FLT_FN (BUILT_IN_CIMAG
):
8601 if (validate_arg (arg0
, COMPLEX_TYPE
)
8602 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8603 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
8606 CASE_FLT_FN (BUILT_IN_CCOS
):
8607 if (validate_arg (arg0
, COMPLEX_TYPE
)
8608 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8609 return do_mpc_arg1 (arg0
, type
, mpc_cos
);
8612 CASE_FLT_FN (BUILT_IN_CCOSH
):
8613 if (validate_arg (arg0
, COMPLEX_TYPE
)
8614 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8615 return do_mpc_arg1 (arg0
, type
, mpc_cosh
);
8618 CASE_FLT_FN (BUILT_IN_CPROJ
):
8619 if (TREE_CODE (arg0
) == COMPLEX_CST
8620 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8622 const REAL_VALUE_TYPE
*real
8623 = TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
8624 const REAL_VALUE_TYPE
*imag
8625 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
8627 if (real_isinf (real
) || real_isinf (imag
))
8628 return build_complex_inf (type
, imag
->sign
);
8634 CASE_FLT_FN (BUILT_IN_CSIN
):
8635 if (validate_arg (arg0
, COMPLEX_TYPE
)
8636 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8637 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
8640 CASE_FLT_FN (BUILT_IN_CSINH
):
8641 if (validate_arg (arg0
, COMPLEX_TYPE
)
8642 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8643 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
8646 CASE_FLT_FN (BUILT_IN_CTAN
):
8647 if (validate_arg (arg0
, COMPLEX_TYPE
)
8648 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8649 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
8652 CASE_FLT_FN (BUILT_IN_CTANH
):
8653 if (validate_arg (arg0
, COMPLEX_TYPE
)
8654 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8655 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
8658 CASE_FLT_FN (BUILT_IN_CLOG
):
8659 if (validate_arg (arg0
, COMPLEX_TYPE
)
8660 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8661 return do_mpc_arg1 (arg0
, type
, mpc_log
);
8664 CASE_FLT_FN (BUILT_IN_CSQRT
):
8665 if (validate_arg (arg0
, COMPLEX_TYPE
)
8666 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8667 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
8670 CASE_FLT_FN (BUILT_IN_CASIN
):
8671 if (validate_arg (arg0
, COMPLEX_TYPE
)
8672 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8673 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
8676 CASE_FLT_FN (BUILT_IN_CACOS
):
8677 if (validate_arg (arg0
, COMPLEX_TYPE
)
8678 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8679 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
8682 CASE_FLT_FN (BUILT_IN_CATAN
):
8683 if (validate_arg (arg0
, COMPLEX_TYPE
)
8684 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8685 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
8688 CASE_FLT_FN (BUILT_IN_CASINH
):
8689 if (validate_arg (arg0
, COMPLEX_TYPE
)
8690 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8691 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
8694 CASE_FLT_FN (BUILT_IN_CACOSH
):
8695 if (validate_arg (arg0
, COMPLEX_TYPE
)
8696 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8697 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
8700 CASE_FLT_FN (BUILT_IN_CATANH
):
8701 if (validate_arg (arg0
, COMPLEX_TYPE
)
8702 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8703 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
8706 CASE_FLT_FN (BUILT_IN_CABS
):
8707 if (TREE_CODE (arg0
) == COMPLEX_CST
8708 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8709 return do_mpfr_arg2 (TREE_REALPART (arg0
), TREE_IMAGPART (arg0
),
8713 CASE_FLT_FN (BUILT_IN_CARG
):
8714 return fold_builtin_carg (loc
, arg0
, type
);
8716 CASE_FLT_FN (BUILT_IN_SQRT
):
8717 if (validate_arg (arg0
, REAL_TYPE
))
8718 return do_mpfr_arg1 (arg0
, type
, mpfr_sqrt
, &dconst0
, NULL
, true);
8721 CASE_FLT_FN (BUILT_IN_CBRT
):
8722 if (validate_arg (arg0
, REAL_TYPE
))
8723 return do_mpfr_arg1 (arg0
, type
, mpfr_cbrt
, NULL
, NULL
, 0);
8726 CASE_FLT_FN (BUILT_IN_ASIN
):
8727 if (validate_arg (arg0
, REAL_TYPE
))
8728 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
8729 &dconstm1
, &dconst1
, true);
8732 CASE_FLT_FN (BUILT_IN_ACOS
):
8733 if (validate_arg (arg0
, REAL_TYPE
))
8734 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
8735 &dconstm1
, &dconst1
, true);
8738 CASE_FLT_FN (BUILT_IN_ATAN
):
8739 if (validate_arg (arg0
, REAL_TYPE
))
8740 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
8743 CASE_FLT_FN (BUILT_IN_ASINH
):
8744 if (validate_arg (arg0
, REAL_TYPE
))
8745 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
8748 CASE_FLT_FN (BUILT_IN_ACOSH
):
8749 if (validate_arg (arg0
, REAL_TYPE
))
8750 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
8751 &dconst1
, NULL
, true);
8754 CASE_FLT_FN (BUILT_IN_ATANH
):
8755 if (validate_arg (arg0
, REAL_TYPE
))
8756 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
8757 &dconstm1
, &dconst1
, false);
8760 CASE_FLT_FN (BUILT_IN_SIN
):
8761 if (validate_arg (arg0
, REAL_TYPE
))
8762 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
8765 CASE_FLT_FN (BUILT_IN_COS
):
8766 if (validate_arg (arg0
, REAL_TYPE
))
8767 return do_mpfr_arg1 (arg0
, type
, mpfr_cos
, NULL
, NULL
, 0);
8770 CASE_FLT_FN (BUILT_IN_TAN
):
8771 if (validate_arg (arg0
, REAL_TYPE
))
8772 return do_mpfr_arg1 (arg0
, type
, mpfr_tan
, NULL
, NULL
, 0);
8775 CASE_FLT_FN (BUILT_IN_CEXP
):
8776 if (validate_arg (arg0
, COMPLEX_TYPE
)
8777 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8778 return do_mpc_arg1 (arg0
, type
, mpc_exp
);
8781 CASE_FLT_FN (BUILT_IN_CEXPI
):
8782 if (validate_arg (arg0
, REAL_TYPE
))
8783 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
8786 CASE_FLT_FN (BUILT_IN_SINH
):
8787 if (validate_arg (arg0
, REAL_TYPE
))
8788 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
8791 CASE_FLT_FN (BUILT_IN_COSH
):
8792 if (validate_arg (arg0
, REAL_TYPE
))
8793 return do_mpfr_arg1 (arg0
, type
, mpfr_cosh
, NULL
, NULL
, 0);
8796 CASE_FLT_FN (BUILT_IN_TANH
):
8797 if (validate_arg (arg0
, REAL_TYPE
))
8798 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
8801 CASE_FLT_FN (BUILT_IN_ERF
):
8802 if (validate_arg (arg0
, REAL_TYPE
))
8803 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
8806 CASE_FLT_FN (BUILT_IN_ERFC
):
8807 if (validate_arg (arg0
, REAL_TYPE
))
8808 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
8811 CASE_FLT_FN (BUILT_IN_TGAMMA
):
8812 if (validate_arg (arg0
, REAL_TYPE
))
8813 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
8816 CASE_FLT_FN (BUILT_IN_EXP
):
8817 if (validate_arg (arg0
, REAL_TYPE
))
8818 return do_mpfr_arg1 (arg0
, type
, mpfr_exp
, NULL
, NULL
, 0);
8821 CASE_FLT_FN (BUILT_IN_EXP2
):
8822 if (validate_arg (arg0
, REAL_TYPE
))
8823 return do_mpfr_arg1 (arg0
, type
, mpfr_exp2
, NULL
, NULL
, 0);
8826 CASE_FLT_FN (BUILT_IN_EXP10
):
8827 CASE_FLT_FN (BUILT_IN_POW10
):
8828 if (validate_arg (arg0
, REAL_TYPE
))
8829 return do_mpfr_arg1 (arg0
, type
, mpfr_exp10
, NULL
, NULL
, 0);
8832 CASE_FLT_FN (BUILT_IN_EXPM1
):
8833 if (validate_arg (arg0
, REAL_TYPE
))
8834 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
8837 CASE_FLT_FN (BUILT_IN_LOG
):
8838 if (validate_arg (arg0
, REAL_TYPE
))
8839 return do_mpfr_arg1 (arg0
, type
, mpfr_log
, &dconst0
, NULL
, false);
8842 CASE_FLT_FN (BUILT_IN_LOG2
):
8843 if (validate_arg (arg0
, REAL_TYPE
))
8844 return do_mpfr_arg1 (arg0
, type
, mpfr_log2
, &dconst0
, NULL
, false);
8847 CASE_FLT_FN (BUILT_IN_LOG10
):
8848 if (validate_arg (arg0
, REAL_TYPE
))
8849 return do_mpfr_arg1 (arg0
, type
, mpfr_log10
, &dconst0
, NULL
, false);
8852 CASE_FLT_FN (BUILT_IN_LOG1P
):
8853 if (validate_arg (arg0
, REAL_TYPE
))
8854 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
8855 &dconstm1
, NULL
, false);
8858 CASE_FLT_FN (BUILT_IN_J0
):
8859 if (validate_arg (arg0
, REAL_TYPE
))
8860 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
8864 CASE_FLT_FN (BUILT_IN_J1
):
8865 if (validate_arg (arg0
, REAL_TYPE
))
8866 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
8870 CASE_FLT_FN (BUILT_IN_Y0
):
8871 if (validate_arg (arg0
, REAL_TYPE
))
8872 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
8873 &dconst0
, NULL
, false);
8876 CASE_FLT_FN (BUILT_IN_Y1
):
8877 if (validate_arg (arg0
, REAL_TYPE
))
8878 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
8879 &dconst0
, NULL
, false);
8882 CASE_FLT_FN (BUILT_IN_NAN
):
8883 case BUILT_IN_NAND32
:
8884 case BUILT_IN_NAND64
:
8885 case BUILT_IN_NAND128
:
8886 return fold_builtin_nan (arg0
, type
, true);
8888 CASE_FLT_FN (BUILT_IN_NANS
):
8889 return fold_builtin_nan (arg0
, type
, false);
8891 CASE_FLT_FN (BUILT_IN_FLOOR
):
8892 if (TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
))
8894 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg0
);
8895 if (!REAL_VALUE_ISNAN (x
) || !flag_errno_math
)
8897 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8899 real_floor (&r
, TYPE_MODE (type
), &x
);
8900 return build_real (type
, r
);
8905 CASE_FLT_FN (BUILT_IN_CEIL
):
8906 if (TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
))
8908 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg0
);
8909 if (!REAL_VALUE_ISNAN (x
) || !flag_errno_math
)
8911 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8913 real_ceil (&r
, TYPE_MODE (type
), &x
);
8914 return build_real (type
, r
);
8919 CASE_FLT_FN (BUILT_IN_TRUNC
):
8920 if (TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
))
8922 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg0
);
8924 real_trunc (&r
, TYPE_MODE (type
), &x
);
8925 return build_real (type
, r
);
8929 CASE_FLT_FN (BUILT_IN_ROUND
):
8930 if (TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
))
8932 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg0
);
8933 if (!REAL_VALUE_ISNAN (x
) || !flag_errno_math
)
8935 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8937 real_round (&r
, TYPE_MODE (type
), &x
);
8938 return build_real (type
, r
);
8943 CASE_FLT_FN (BUILT_IN_ICEIL
):
8944 CASE_FLT_FN (BUILT_IN_LCEIL
):
8945 CASE_FLT_FN (BUILT_IN_LLCEIL
):
8946 return do_real_to_int_conversion (type
, arg0
, real_ceil
);
8948 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8949 CASE_FLT_FN (BUILT_IN_IFLOOR
):
8950 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8951 return do_real_to_int_conversion (type
, arg0
, real_floor
);
8953 CASE_FLT_FN (BUILT_IN_IROUND
):
8954 CASE_FLT_FN (BUILT_IN_LROUND
):
8955 CASE_FLT_FN (BUILT_IN_LLROUND
):
8956 return do_real_to_int_conversion (type
, arg0
, real_round
);
8958 CASE_FLT_FN (BUILT_IN_IRINT
):
8959 CASE_FLT_FN (BUILT_IN_LRINT
):
8960 CASE_FLT_FN (BUILT_IN_LLRINT
):
8961 /* Not yet folded to a constant. */
8964 case BUILT_IN_BSWAP16
:
8965 case BUILT_IN_BSWAP32
:
8966 case BUILT_IN_BSWAP64
:
8967 return fold_builtin_bswap (fndecl
, arg0
);
8969 CASE_INT_FN (BUILT_IN_FFS
):
8970 CASE_INT_FN (BUILT_IN_CLZ
):
8971 CASE_INT_FN (BUILT_IN_CTZ
):
8972 CASE_INT_FN (BUILT_IN_CLRSB
):
8973 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8974 CASE_INT_FN (BUILT_IN_PARITY
):
8975 return fold_builtin_bitop (fndecl
, arg0
);
8977 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
8978 if (TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
))
8979 return (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
))
8980 ? build_one_cst (type
)
8981 : build_zero_cst (type
));
8984 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
8985 return fold_const_builtin_significand (loc
, arg0
, type
);
8987 CASE_FLT_FN (BUILT_IN_ILOGB
):
8988 CASE_FLT_FN (BUILT_IN_LOGB
):
8989 return fold_const_builtin_logb (loc
, arg0
, type
);
8991 case BUILT_IN_ISASCII
:
8992 return fold_builtin_isascii (loc
, arg0
);
8994 case BUILT_IN_TOASCII
:
8995 return fold_builtin_toascii (loc
, arg0
);
8997 case BUILT_IN_ISDIGIT
:
8998 return fold_builtin_isdigit (loc
, arg0
);
9000 CASE_FLT_FN (BUILT_IN_FINITE
):
9001 case BUILT_IN_FINITED32
:
9002 case BUILT_IN_FINITED64
:
9003 case BUILT_IN_FINITED128
:
9004 case BUILT_IN_ISFINITE
:
9006 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
9009 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9012 CASE_FLT_FN (BUILT_IN_ISINF
):
9013 case BUILT_IN_ISINFD32
:
9014 case BUILT_IN_ISINFD64
:
9015 case BUILT_IN_ISINFD128
:
9017 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
9020 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9023 case BUILT_IN_ISNORMAL
:
9024 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9026 case BUILT_IN_ISINF_SIGN
:
9027 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
9029 CASE_FLT_FN (BUILT_IN_ISNAN
):
9030 case BUILT_IN_ISNAND32
:
9031 case BUILT_IN_ISNAND64
:
9032 case BUILT_IN_ISNAND128
:
9033 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
9036 if (integer_zerop (arg0
))
9037 return build_empty_stmt (loc
);
9048 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9049 This function returns NULL_TREE if no simplification was possible. */
9052 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
9054 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9055 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9059 CASE_FLT_FN (BUILT_IN_JN
):
9060 if (validate_arg (arg0
, INTEGER_TYPE
)
9061 && validate_arg (arg1
, REAL_TYPE
))
9062 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
9065 CASE_FLT_FN (BUILT_IN_YN
):
9066 if (validate_arg (arg0
, INTEGER_TYPE
)
9067 && validate_arg (arg1
, REAL_TYPE
))
9068 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
9072 CASE_FLT_FN (BUILT_IN_DREM
):
9073 CASE_FLT_FN (BUILT_IN_REMAINDER
):
9074 if (validate_arg (arg0
, REAL_TYPE
)
9075 && validate_arg (arg1
, REAL_TYPE
))
9076 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
9079 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
9080 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
9081 if (validate_arg (arg0
, REAL_TYPE
)
9082 && validate_arg (arg1
, POINTER_TYPE
))
9083 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
9086 CASE_FLT_FN (BUILT_IN_ATAN2
):
9087 if (validate_arg (arg0
, REAL_TYPE
)
9088 && validate_arg (arg1
, REAL_TYPE
))
9089 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
9092 CASE_FLT_FN (BUILT_IN_FDIM
):
9093 if (validate_arg (arg0
, REAL_TYPE
)
9094 && validate_arg (arg1
, REAL_TYPE
))
9095 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
9098 CASE_FLT_FN (BUILT_IN_HYPOT
):
9099 if (validate_arg (arg0
, REAL_TYPE
)
9100 && validate_arg (arg1
, REAL_TYPE
))
9101 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
);
9104 CASE_FLT_FN (BUILT_IN_CPOW
):
9105 if (validate_arg (arg0
, COMPLEX_TYPE
)
9106 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
9107 && validate_arg (arg1
, COMPLEX_TYPE
)
9108 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
9109 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
9112 CASE_FLT_FN (BUILT_IN_LDEXP
):
9113 return fold_const_builtin_load_exponent (arg0
, arg1
, type
,
9115 CASE_FLT_FN (BUILT_IN_SCALBN
):
9116 CASE_FLT_FN (BUILT_IN_SCALBLN
):
9117 return fold_const_builtin_load_exponent (arg0
, arg1
, type
,
9120 CASE_FLT_FN (BUILT_IN_FREXP
):
9121 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
9123 CASE_FLT_FN (BUILT_IN_MODF
):
9124 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
9126 case BUILT_IN_STRSTR
:
9127 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
9129 case BUILT_IN_STRSPN
:
9130 return fold_builtin_strspn (loc
, arg0
, arg1
);
9132 case BUILT_IN_STRCSPN
:
9133 return fold_builtin_strcspn (loc
, arg0
, arg1
);
9135 case BUILT_IN_STRCHR
:
9136 case BUILT_IN_INDEX
:
9137 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
9139 case BUILT_IN_STRRCHR
:
9140 case BUILT_IN_RINDEX
:
9141 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
9143 case BUILT_IN_STRCMP
:
9144 return fold_builtin_strcmp (loc
, arg0
, arg1
);
9146 case BUILT_IN_STRPBRK
:
9147 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
9149 case BUILT_IN_EXPECT
:
9150 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
9152 CASE_FLT_FN (BUILT_IN_POW
):
9153 return fold_const_builtin_pow (arg0
, arg1
, type
);
9155 CASE_FLT_FN (BUILT_IN_POWI
):
9156 if (TREE_CODE (arg0
) == REAL_CST
9157 && !TREE_OVERFLOW (arg0
)
9158 && tree_fits_shwi_p (arg1
))
9160 HOST_WIDE_INT c
= tree_to_shwi (arg1
);
9162 real_powi (&x
, TYPE_MODE (type
), TREE_REAL_CST_PTR (arg0
), c
);
9163 return build_real (type
, x
);
9167 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
9168 if (TREE_CODE (arg0
) == REAL_CST
9169 && TREE_CODE (arg1
) == REAL_CST
9170 && !TREE_OVERFLOW (arg0
)
9171 && !TREE_OVERFLOW (arg1
))
9173 REAL_VALUE_TYPE c1
= TREE_REAL_CST (arg0
);
9174 real_copysign (&c1
, TREE_REAL_CST_PTR (arg1
));
9175 return build_real (type
, c1
);
9179 CASE_FLT_FN (BUILT_IN_FMIN
):
9180 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9181 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_min
);
9184 CASE_FLT_FN (BUILT_IN_FMAX
):
9185 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9186 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_max
);
9189 case BUILT_IN_ISGREATER
:
9190 return fold_builtin_unordered_cmp (loc
, fndecl
,
9191 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
9192 case BUILT_IN_ISGREATEREQUAL
:
9193 return fold_builtin_unordered_cmp (loc
, fndecl
,
9194 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
9195 case BUILT_IN_ISLESS
:
9196 return fold_builtin_unordered_cmp (loc
, fndecl
,
9197 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
9198 case BUILT_IN_ISLESSEQUAL
:
9199 return fold_builtin_unordered_cmp (loc
, fndecl
,
9200 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
9201 case BUILT_IN_ISLESSGREATER
:
9202 return fold_builtin_unordered_cmp (loc
, fndecl
,
9203 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
9204 case BUILT_IN_ISUNORDERED
:
9205 return fold_builtin_unordered_cmp (loc
, fndecl
,
9206 arg0
, arg1
, UNORDERED_EXPR
,
9209 /* We do the folding for va_start in the expander. */
9210 case BUILT_IN_VA_START
:
9213 case BUILT_IN_OBJECT_SIZE
:
9214 return fold_builtin_object_size (arg0
, arg1
);
9216 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
9217 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
9219 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
9220 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
9228 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9230 This function returns NULL_TREE if no simplification was possible. */
9233 fold_builtin_3 (location_t loc
, tree fndecl
,
9234 tree arg0
, tree arg1
, tree arg2
)
9236 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9237 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9241 CASE_FLT_FN (BUILT_IN_SINCOS
):
9242 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
9244 CASE_FLT_FN (BUILT_IN_FMA
):
9245 if (tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
))
9247 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
9249 CASE_FLT_FN (BUILT_IN_REMQUO
):
9250 if (validate_arg (arg0
, REAL_TYPE
)
9251 && validate_arg (arg1
, REAL_TYPE
)
9252 && validate_arg (arg2
, POINTER_TYPE
))
9253 return do_mpfr_remquo (arg0
, arg1
, arg2
);
9256 case BUILT_IN_STRNCMP
:
9257 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
9259 case BUILT_IN_MEMCHR
:
9260 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
9263 case BUILT_IN_MEMCMP
:
9264 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
9266 case BUILT_IN_EXPECT
:
9267 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
9269 case BUILT_IN_ADD_OVERFLOW
:
9270 case BUILT_IN_SUB_OVERFLOW
:
9271 case BUILT_IN_MUL_OVERFLOW
:
9272 case BUILT_IN_SADD_OVERFLOW
:
9273 case BUILT_IN_SADDL_OVERFLOW
:
9274 case BUILT_IN_SADDLL_OVERFLOW
:
9275 case BUILT_IN_SSUB_OVERFLOW
:
9276 case BUILT_IN_SSUBL_OVERFLOW
:
9277 case BUILT_IN_SSUBLL_OVERFLOW
:
9278 case BUILT_IN_SMUL_OVERFLOW
:
9279 case BUILT_IN_SMULL_OVERFLOW
:
9280 case BUILT_IN_SMULLL_OVERFLOW
:
9281 case BUILT_IN_UADD_OVERFLOW
:
9282 case BUILT_IN_UADDL_OVERFLOW
:
9283 case BUILT_IN_UADDLL_OVERFLOW
:
9284 case BUILT_IN_USUB_OVERFLOW
:
9285 case BUILT_IN_USUBL_OVERFLOW
:
9286 case BUILT_IN_USUBLL_OVERFLOW
:
9287 case BUILT_IN_UMUL_OVERFLOW
:
9288 case BUILT_IN_UMULL_OVERFLOW
:
9289 case BUILT_IN_UMULLL_OVERFLOW
:
9290 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
9298 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9299 arguments. IGNORE is true if the result of the
9300 function call is ignored. This function returns NULL_TREE if no
9301 simplification was possible. */
9304 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
9306 tree ret
= NULL_TREE
;
9311 ret
= fold_builtin_0 (loc
, fndecl
);
9314 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
9317 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
9320 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
9323 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
9328 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
9329 SET_EXPR_LOCATION (ret
, loc
);
9330 TREE_NO_WARNING (ret
) = 1;
9336 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9337 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9338 of arguments in ARGS to be omitted. OLDNARGS is the number of
9339 elements in ARGS. */
9342 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
9343 int skip
, tree fndecl
, int n
, va_list newargs
)
9345 int nargs
= oldnargs
- skip
+ n
;
9352 buffer
= XALLOCAVEC (tree
, nargs
);
9353 for (i
= 0; i
< n
; i
++)
9354 buffer
[i
] = va_arg (newargs
, tree
);
9355 for (j
= skip
; j
< oldnargs
; j
++, i
++)
9356 buffer
[i
] = args
[j
];
9359 buffer
= args
+ skip
;
9361 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
9364 /* Return true if FNDECL shouldn't be folded right now.
9365 If a built-in function has an inline attribute always_inline
9366 wrapper, defer folding it after always_inline functions have
9367 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9368 might not be performed. */
9371 avoid_folding_inline_builtin (tree fndecl
)
9373 return (DECL_DECLARED_INLINE_P (fndecl
)
9374 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
9376 && !cfun
->always_inline_functions_inlined
9377 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
9380 /* A wrapper function for builtin folding that prevents warnings for
9381 "statement without effect" and the like, caused by removing the
9382 call node earlier than the warning is generated. */
9385 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
9387 tree ret
= NULL_TREE
;
9388 tree fndecl
= get_callee_fndecl (exp
);
9390 && TREE_CODE (fndecl
) == FUNCTION_DECL
9391 && DECL_BUILT_IN (fndecl
)
9392 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9393 yet. Defer folding until we see all the arguments
9394 (after inlining). */
9395 && !CALL_EXPR_VA_ARG_PACK (exp
))
9397 int nargs
= call_expr_nargs (exp
);
9399 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9400 instead last argument is __builtin_va_arg_pack (). Defer folding
9401 even in that case, until arguments are finalized. */
9402 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
9404 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
9406 && TREE_CODE (fndecl2
) == FUNCTION_DECL
9407 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
9408 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
9412 if (avoid_folding_inline_builtin (fndecl
))
9415 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9416 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
9417 CALL_EXPR_ARGP (exp
), ignore
);
9420 tree
*args
= CALL_EXPR_ARGP (exp
);
9421 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
9429 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9430 N arguments are passed in the array ARGARRAY. Return a folded
9431 expression or NULL_TREE if no simplification was possible. */
9434 fold_builtin_call_array (location_t loc
, tree
,
9439 if (TREE_CODE (fn
) != ADDR_EXPR
)
9442 tree fndecl
= TREE_OPERAND (fn
, 0);
9443 if (TREE_CODE (fndecl
) == FUNCTION_DECL
9444 && DECL_BUILT_IN (fndecl
))
9446 /* If last argument is __builtin_va_arg_pack (), arguments to this
9447 function are not finalized yet. Defer folding until they are. */
9448 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
9450 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
9452 && TREE_CODE (fndecl2
) == FUNCTION_DECL
9453 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
9454 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
9457 if (avoid_folding_inline_builtin (fndecl
))
9459 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9460 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
9462 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
9468 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9469 along with N new arguments specified as the "..." parameters. SKIP
9470 is the number of arguments in EXP to be omitted. This function is used
9471 to do varargs-to-varargs transformations. */
9474 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
9480 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
9481 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
9487 /* Validate a single argument ARG against a tree code CODE representing
9491 validate_arg (const_tree arg
, enum tree_code code
)
9495 else if (code
== POINTER_TYPE
)
9496 return POINTER_TYPE_P (TREE_TYPE (arg
));
9497 else if (code
== INTEGER_TYPE
)
9498 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
9499 return code
== TREE_CODE (TREE_TYPE (arg
));
9502 /* This function validates the types of a function call argument list
9503 against a specified list of tree_codes. If the last specifier is a 0,
9504 that represents an ellipses, otherwise the last specifier must be a
9507 This is the GIMPLE version of validate_arglist. Eventually we want to
9508 completely convert builtins.c to work from GIMPLEs and the tree based
9509 validate_arglist will then be removed. */
9512 validate_gimple_arglist (const gcall
*call
, ...)
9514 enum tree_code code
;
9520 va_start (ap
, call
);
9525 code
= (enum tree_code
) va_arg (ap
, int);
9529 /* This signifies an ellipses, any further arguments are all ok. */
9533 /* This signifies an endlink, if no arguments remain, return
9534 true, otherwise return false. */
9535 res
= (i
== gimple_call_num_args (call
));
9538 /* If no parameters remain or the parameter's code does not
9539 match the specified code, return false. Otherwise continue
9540 checking any remaining arguments. */
9541 arg
= gimple_call_arg (call
, i
++);
9542 if (!validate_arg (arg
, code
))
9549 /* We need gotos here since we can only have one VA_CLOSE in a
9557 /* Default target-specific builtin expander that does nothing. */
9560 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
9561 rtx target ATTRIBUTE_UNUSED
,
9562 rtx subtarget ATTRIBUTE_UNUSED
,
9563 machine_mode mode ATTRIBUTE_UNUSED
,
9564 int ignore ATTRIBUTE_UNUSED
)
9569 /* Returns true is EXP represents data that would potentially reside
9570 in a readonly section. */
9573 readonly_data_expr (tree exp
)
9577 if (TREE_CODE (exp
) != ADDR_EXPR
)
9580 exp
= get_base_address (TREE_OPERAND (exp
, 0));
9584 /* Make sure we call decl_readonly_section only for trees it
9585 can handle (since it returns true for everything it doesn't
9587 if (TREE_CODE (exp
) == STRING_CST
9588 || TREE_CODE (exp
) == CONSTRUCTOR
9589 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
9590 return decl_readonly_section (exp
, 0);
9595 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
9596 to the call, and TYPE is its return type.
9598 Return NULL_TREE if no simplification was possible, otherwise return the
9599 simplified form of the call as a tree.
9601 The simplified form may be a constant or other expression which
9602 computes the same value, but in a more efficient manner (including
9603 calls to other builtin functions).
9605 The call may contain arguments which need to be evaluated, but
9606 which are not useful to determine the result of the call. In
9607 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9608 COMPOUND_EXPR will be an argument which must be evaluated.
9609 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9610 COMPOUND_EXPR in the chain will contain the tree for the simplified
9611 form of the builtin function call. */
9614 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
9616 if (!validate_arg (s1
, POINTER_TYPE
)
9617 || !validate_arg (s2
, POINTER_TYPE
))
9622 const char *p1
, *p2
;
9631 const char *r
= strstr (p1
, p2
);
9635 return build_int_cst (TREE_TYPE (s1
), 0);
9637 /* Return an offset into the constant string argument. */
9638 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
9639 return fold_convert_loc (loc
, type
, tem
);
9642 /* The argument is const char *, and the result is char *, so we need
9643 a type conversion here to avoid a warning. */
9645 return fold_convert_loc (loc
, type
, s1
);
9650 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
9654 /* New argument list transforming strstr(s1, s2) to
9655 strchr(s1, s2[0]). */
9656 return build_call_expr_loc (loc
, fn
, 2, s1
,
9657 build_int_cst (integer_type_node
, p2
[0]));
9661 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
9662 the call, and TYPE is its return type.
9664 Return NULL_TREE if no simplification was possible, otherwise return the
9665 simplified form of the call as a tree.
9667 The simplified form may be a constant or other expression which
9668 computes the same value, but in a more efficient manner (including
9669 calls to other builtin functions).
9671 The call may contain arguments which need to be evaluated, but
9672 which are not useful to determine the result of the call. In
9673 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9674 COMPOUND_EXPR will be an argument which must be evaluated.
9675 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9676 COMPOUND_EXPR in the chain will contain the tree for the simplified
9677 form of the builtin function call. */
9680 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
9682 if (!validate_arg (s1
, POINTER_TYPE
)
9683 || !validate_arg (s2
, INTEGER_TYPE
))
9689 if (TREE_CODE (s2
) != INTEGER_CST
)
9699 if (target_char_cast (s2
, &c
))
9705 return build_int_cst (TREE_TYPE (s1
), 0);
9707 /* Return an offset into the constant string argument. */
9708 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
9709 return fold_convert_loc (loc
, type
, tem
);
9715 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
9716 the call, and TYPE is its return type.
9718 Return NULL_TREE if no simplification was possible, otherwise return the
9719 simplified form of the call as a tree.
9721 The simplified form may be a constant or other expression which
9722 computes the same value, but in a more efficient manner (including
9723 calls to other builtin functions).
9725 The call may contain arguments which need to be evaluated, but
9726 which are not useful to determine the result of the call. In
9727 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9728 COMPOUND_EXPR will be an argument which must be evaluated.
9729 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9730 COMPOUND_EXPR in the chain will contain the tree for the simplified
9731 form of the builtin function call. */
9734 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
9736 if (!validate_arg (s1
, POINTER_TYPE
)
9737 || !validate_arg (s2
, INTEGER_TYPE
))
9744 if (TREE_CODE (s2
) != INTEGER_CST
)
9754 if (target_char_cast (s2
, &c
))
9757 r
= strrchr (p1
, c
);
9760 return build_int_cst (TREE_TYPE (s1
), 0);
9762 /* Return an offset into the constant string argument. */
9763 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
9764 return fold_convert_loc (loc
, type
, tem
);
9767 if (! integer_zerop (s2
))
9770 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
9774 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
9775 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
9779 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9780 to the call, and TYPE is its return type.
9782 Return NULL_TREE if no simplification was possible, otherwise return the
9783 simplified form of the call as a tree.
9785 The simplified form may be a constant or other expression which
9786 computes the same value, but in a more efficient manner (including
9787 calls to other builtin functions).
9789 The call may contain arguments which need to be evaluated, but
9790 which are not useful to determine the result of the call. In
9791 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9792 COMPOUND_EXPR will be an argument which must be evaluated.
9793 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9794 COMPOUND_EXPR in the chain will contain the tree for the simplified
9795 form of the builtin function call. */
9798 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
9800 if (!validate_arg (s1
, POINTER_TYPE
)
9801 || !validate_arg (s2
, POINTER_TYPE
))
9806 const char *p1
, *p2
;
9815 const char *r
= strpbrk (p1
, p2
);
9819 return build_int_cst (TREE_TYPE (s1
), 0);
9821 /* Return an offset into the constant string argument. */
9822 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
9823 return fold_convert_loc (loc
, type
, tem
);
9827 /* strpbrk(x, "") == NULL.
9828 Evaluate and ignore s1 in case it had side-effects. */
9829 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
9832 return NULL_TREE
; /* Really call strpbrk. */
9834 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
9838 /* New argument list transforming strpbrk(s1, s2) to
9839 strchr(s1, s2[0]). */
9840 return build_call_expr_loc (loc
, fn
, 2, s1
,
9841 build_int_cst (integer_type_node
, p2
[0]));
9845 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9848 Return NULL_TREE if no simplification was possible, otherwise return the
9849 simplified form of the call as a tree.
9851 The simplified form may be a constant or other expression which
9852 computes the same value, but in a more efficient manner (including
9853 calls to other builtin functions).
9855 The call may contain arguments which need to be evaluated, but
9856 which are not useful to determine the result of the call. In
9857 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9858 COMPOUND_EXPR will be an argument which must be evaluated.
9859 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9860 COMPOUND_EXPR in the chain will contain the tree for the simplified
9861 form of the builtin function call. */
9864 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
9866 if (!validate_arg (s1
, POINTER_TYPE
)
9867 || !validate_arg (s2
, POINTER_TYPE
))
9871 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
9873 /* If both arguments are constants, evaluate at compile-time. */
9876 const size_t r
= strspn (p1
, p2
);
9877 return build_int_cst (size_type_node
, r
);
9880 /* If either argument is "", return NULL_TREE. */
9881 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
9882 /* Evaluate and ignore both arguments in case either one has
9884 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
9890 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9893 Return NULL_TREE if no simplification was possible, otherwise return the
9894 simplified form of the call as a tree.
9896 The simplified form may be a constant or other expression which
9897 computes the same value, but in a more efficient manner (including
9898 calls to other builtin functions).
9900 The call may contain arguments which need to be evaluated, but
9901 which are not useful to determine the result of the call. In
9902 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9903 COMPOUND_EXPR will be an argument which must be evaluated.
9904 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9905 COMPOUND_EXPR in the chain will contain the tree for the simplified
9906 form of the builtin function call. */
9909 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
9911 if (!validate_arg (s1
, POINTER_TYPE
)
9912 || !validate_arg (s2
, POINTER_TYPE
))
9916 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
9918 /* If both arguments are constants, evaluate at compile-time. */
9921 const size_t r
= strcspn (p1
, p2
);
9922 return build_int_cst (size_type_node
, r
);
9925 /* If the first argument is "", return NULL_TREE. */
9926 if (p1
&& *p1
== '\0')
9928 /* Evaluate and ignore argument s2 in case it has
9930 return omit_one_operand_loc (loc
, size_type_node
,
9931 size_zero_node
, s2
);
9934 /* If the second argument is "", return __builtin_strlen(s1). */
9935 if (p2
&& *p2
== '\0')
9937 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
9939 /* If the replacement _DECL isn't initialized, don't do the
9944 return build_call_expr_loc (loc
, fn
, 1, s1
);
9950 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9951 produced. False otherwise. This is done so that we don't output the error
9952 or warning twice or three times. */
9955 fold_builtin_next_arg (tree exp
, bool va_start_p
)
9957 tree fntype
= TREE_TYPE (current_function_decl
);
9958 int nargs
= call_expr_nargs (exp
);
9960 /* There is good chance the current input_location points inside the
9961 definition of the va_start macro (perhaps on the token for
9962 builtin) in a system header, so warnings will not be emitted.
9963 Use the location in real source code. */
9964 source_location current_location
=
9965 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
9968 if (!stdarg_p (fntype
))
9970 error ("%<va_start%> used in function with fixed args");
9976 if (va_start_p
&& (nargs
!= 2))
9978 error ("wrong number of arguments to function %<va_start%>");
9981 arg
= CALL_EXPR_ARG (exp
, 1);
9983 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9984 when we checked the arguments and if needed issued a warning. */
9989 /* Evidently an out of date version of <stdarg.h>; can't validate
9990 va_start's second argument, but can still work as intended. */
9991 warning_at (current_location
,
9993 "%<__builtin_next_arg%> called without an argument");
9998 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10001 arg
= CALL_EXPR_ARG (exp
, 0);
10004 if (TREE_CODE (arg
) == SSA_NAME
)
10005 arg
= SSA_NAME_VAR (arg
);
10007 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10008 or __builtin_next_arg (0) the first time we see it, after checking
10009 the arguments and if needed issuing a warning. */
10010 if (!integer_zerop (arg
))
10012 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
10014 /* Strip off all nops for the sake of the comparison. This
10015 is not quite the same as STRIP_NOPS. It does more.
10016 We must also strip off INDIRECT_EXPR for C++ reference
10018 while (CONVERT_EXPR_P (arg
)
10019 || TREE_CODE (arg
) == INDIRECT_REF
)
10020 arg
= TREE_OPERAND (arg
, 0);
10021 if (arg
!= last_parm
)
10023 /* FIXME: Sometimes with the tree optimizers we can get the
10024 not the last argument even though the user used the last
10025 argument. We just warn and set the arg to be the last
10026 argument so that we will get wrong-code because of
10028 warning_at (current_location
,
10030 "second parameter of %<va_start%> not last named argument");
10033 /* Undefined by C99 7.15.1.4p4 (va_start):
10034 "If the parameter parmN is declared with the register storage
10035 class, with a function or array type, or with a type that is
10036 not compatible with the type that results after application of
10037 the default argument promotions, the behavior is undefined."
10039 else if (DECL_REGISTER (arg
))
10041 warning_at (current_location
,
10043 "undefined behaviour when second parameter of "
10044 "%<va_start%> is declared with %<register%> storage");
10047 /* We want to verify the second parameter just once before the tree
10048 optimizers are run and then avoid keeping it in the tree,
10049 as otherwise we could warn even for correct code like:
10050 void foo (int i, ...)
10051 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10053 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
10055 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
10061 /* Expand a call EXP to __builtin_object_size. */
10064 expand_builtin_object_size (tree exp
)
10067 int object_size_type
;
10068 tree fndecl
= get_callee_fndecl (exp
);
10070 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
10072 error ("%Kfirst argument of %D must be a pointer, second integer constant",
10074 expand_builtin_trap ();
10078 ost
= CALL_EXPR_ARG (exp
, 1);
10081 if (TREE_CODE (ost
) != INTEGER_CST
10082 || tree_int_cst_sgn (ost
) < 0
10083 || compare_tree_int (ost
, 3) > 0)
10085 error ("%Klast argument of %D is not integer constant between 0 and 3",
10087 expand_builtin_trap ();
10091 object_size_type
= tree_to_shwi (ost
);
10093 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
10096 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10097 FCODE is the BUILT_IN_* to use.
10098 Return NULL_RTX if we failed; the caller should emit a normal call,
10099 otherwise try to get the result in TARGET, if convenient (and in
10100 mode MODE if that's convenient). */
10103 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
10104 enum built_in_function fcode
)
10106 tree dest
, src
, len
, size
;
10108 if (!validate_arglist (exp
,
10110 fcode
== BUILT_IN_MEMSET_CHK
10111 ? INTEGER_TYPE
: POINTER_TYPE
,
10112 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
10115 dest
= CALL_EXPR_ARG (exp
, 0);
10116 src
= CALL_EXPR_ARG (exp
, 1);
10117 len
= CALL_EXPR_ARG (exp
, 2);
10118 size
= CALL_EXPR_ARG (exp
, 3);
10120 if (! tree_fits_uhwi_p (size
))
10123 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
10127 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
10129 warning_at (tree_nonartificial_location (exp
),
10130 0, "%Kcall to %D will always overflow destination buffer",
10131 exp
, get_callee_fndecl (exp
));
10136 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10137 mem{cpy,pcpy,move,set} is available. */
10140 case BUILT_IN_MEMCPY_CHK
:
10141 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
10143 case BUILT_IN_MEMPCPY_CHK
:
10144 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
10146 case BUILT_IN_MEMMOVE_CHK
:
10147 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
10149 case BUILT_IN_MEMSET_CHK
:
10150 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
10159 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
10160 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
10161 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
10162 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
10164 else if (fcode
== BUILT_IN_MEMSET_CHK
)
10168 unsigned int dest_align
= get_pointer_alignment (dest
);
10170 /* If DEST is not a pointer type, call the normal function. */
10171 if (dest_align
== 0)
10174 /* If SRC and DEST are the same (and not volatile), do nothing. */
10175 if (operand_equal_p (src
, dest
, 0))
10179 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
10181 /* Evaluate and ignore LEN in case it has side-effects. */
10182 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
10183 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
10186 expr
= fold_build_pointer_plus (dest
, len
);
10187 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
10190 /* __memmove_chk special case. */
10191 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
10193 unsigned int src_align
= get_pointer_alignment (src
);
10195 if (src_align
== 0)
10198 /* If src is categorized for a readonly section we can use
10199 normal __memcpy_chk. */
10200 if (readonly_data_expr (src
))
10202 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
10205 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
10206 dest
, src
, len
, size
);
10207 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
10208 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
10209 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
10216 /* Emit warning if a buffer overflow is detected at compile time. */
10219 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
10223 location_t loc
= tree_nonartificial_location (exp
);
10227 case BUILT_IN_STRCPY_CHK
:
10228 case BUILT_IN_STPCPY_CHK
:
10229 /* For __strcat_chk the warning will be emitted only if overflowing
10230 by at least strlen (dest) + 1 bytes. */
10231 case BUILT_IN_STRCAT_CHK
:
10232 len
= CALL_EXPR_ARG (exp
, 1);
10233 size
= CALL_EXPR_ARG (exp
, 2);
10236 case BUILT_IN_STRNCAT_CHK
:
10237 case BUILT_IN_STRNCPY_CHK
:
10238 case BUILT_IN_STPNCPY_CHK
:
10239 len
= CALL_EXPR_ARG (exp
, 2);
10240 size
= CALL_EXPR_ARG (exp
, 3);
10242 case BUILT_IN_SNPRINTF_CHK
:
10243 case BUILT_IN_VSNPRINTF_CHK
:
10244 len
= CALL_EXPR_ARG (exp
, 1);
10245 size
= CALL_EXPR_ARG (exp
, 3);
10248 gcc_unreachable ();
10254 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
10259 len
= c_strlen (len
, 1);
10260 if (! len
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
10263 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
10265 tree src
= CALL_EXPR_ARG (exp
, 1);
10266 if (! src
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
10268 src
= c_strlen (src
, 1);
10269 if (! src
|| ! tree_fits_uhwi_p (src
))
10271 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
10272 exp
, get_callee_fndecl (exp
));
10275 else if (tree_int_cst_lt (src
, size
))
10278 else if (! tree_fits_uhwi_p (len
) || ! tree_int_cst_lt (size
, len
))
10281 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
10282 exp
, get_callee_fndecl (exp
));
10285 /* Emit warning if a buffer overflow is detected at compile time
10286 in __sprintf_chk/__vsprintf_chk calls. */
10289 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
10291 tree size
, len
, fmt
;
10292 const char *fmt_str
;
10293 int nargs
= call_expr_nargs (exp
);
10295 /* Verify the required arguments in the original call. */
10299 size
= CALL_EXPR_ARG (exp
, 2);
10300 fmt
= CALL_EXPR_ARG (exp
, 3);
10302 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
10305 /* Check whether the format is a literal string constant. */
10306 fmt_str
= c_getstr (fmt
);
10307 if (fmt_str
== NULL
)
10310 if (!init_target_chars ())
10313 /* If the format doesn't contain % args or %%, we know its size. */
10314 if (strchr (fmt_str
, target_percent
) == 0)
10315 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
10316 /* If the format is "%s" and first ... argument is a string literal,
10318 else if (fcode
== BUILT_IN_SPRINTF_CHK
10319 && strcmp (fmt_str
, target_percent_s
) == 0)
10325 arg
= CALL_EXPR_ARG (exp
, 4);
10326 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
10329 len
= c_strlen (arg
, 1);
10330 if (!len
|| ! tree_fits_uhwi_p (len
))
10336 if (! tree_int_cst_lt (len
, size
))
10337 warning_at (tree_nonartificial_location (exp
),
10338 0, "%Kcall to %D will always overflow destination buffer",
10339 exp
, get_callee_fndecl (exp
));
10342 /* Emit warning if a free is called with address of a variable. */
10345 maybe_emit_free_warning (tree exp
)
10347 tree arg
= CALL_EXPR_ARG (exp
, 0);
10350 if (TREE_CODE (arg
) != ADDR_EXPR
)
10353 arg
= get_base_address (TREE_OPERAND (arg
, 0));
10354 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
10357 if (SSA_VAR_P (arg
))
10358 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
10359 "%Kattempt to free a non-heap object %qD", exp
, arg
);
10361 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
10362 "%Kattempt to free a non-heap object", exp
);
10365 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10369 fold_builtin_object_size (tree ptr
, tree ost
)
10371 unsigned HOST_WIDE_INT bytes
;
10372 int object_size_type
;
10374 if (!validate_arg (ptr
, POINTER_TYPE
)
10375 || !validate_arg (ost
, INTEGER_TYPE
))
10380 if (TREE_CODE (ost
) != INTEGER_CST
10381 || tree_int_cst_sgn (ost
) < 0
10382 || compare_tree_int (ost
, 3) > 0)
10385 object_size_type
= tree_to_shwi (ost
);
10387 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10388 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10389 and (size_t) 0 for types 2 and 3. */
10390 if (TREE_SIDE_EFFECTS (ptr
))
10391 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
10393 if (TREE_CODE (ptr
) == ADDR_EXPR
)
10395 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
10396 if (wi::fits_to_tree_p (bytes
, size_type_node
))
10397 return build_int_cstu (size_type_node
, bytes
);
10399 else if (TREE_CODE (ptr
) == SSA_NAME
)
10401 /* If object size is not known yet, delay folding until
10402 later. Maybe subsequent passes will help determining
10404 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
10405 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
10406 && wi::fits_to_tree_p (bytes
, size_type_node
))
10407 return build_int_cstu (size_type_node
, bytes
);
10413 /* Builtins with folding operations that operate on "..." arguments
10414 need special handling; we need to store the arguments in a convenient
10415 data structure before attempting any folding. Fortunately there are
10416 only a few builtins that fall into this category. FNDECL is the
10417 function, EXP is the CALL_EXPR for the call. */
10420 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
10422 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10423 tree ret
= NULL_TREE
;
10427 case BUILT_IN_FPCLASSIFY
:
10428 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
10436 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10437 SET_EXPR_LOCATION (ret
, loc
);
10438 TREE_NO_WARNING (ret
) = 1;
10444 /* Initialize format string characters in the target charset. */
10447 init_target_chars (void)
10452 target_newline
= lang_hooks
.to_target_charset ('\n');
10453 target_percent
= lang_hooks
.to_target_charset ('%');
10454 target_c
= lang_hooks
.to_target_charset ('c');
10455 target_s
= lang_hooks
.to_target_charset ('s');
10456 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
10460 target_percent_c
[0] = target_percent
;
10461 target_percent_c
[1] = target_c
;
10462 target_percent_c
[2] = '\0';
10464 target_percent_s
[0] = target_percent
;
10465 target_percent_s
[1] = target_s
;
10466 target_percent_s
[2] = '\0';
10468 target_percent_s_newline
[0] = target_percent
;
10469 target_percent_s_newline
[1] = target_s
;
10470 target_percent_s_newline
[2] = target_newline
;
10471 target_percent_s_newline
[3] = '\0';
10478 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10479 and no overflow/underflow occurred. INEXACT is true if M was not
10480 exactly calculated. TYPE is the tree type for the result. This
10481 function assumes that you cleared the MPFR flags and then
10482 calculated M to see if anything subsequently set a flag prior to
10483 entering this function. Return NULL_TREE if any checks fail. */
10486 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
10488 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10489 overflow/underflow occurred. If -frounding-math, proceed iff the
10490 result of calling FUNC was exact. */
10491 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10492 && (!flag_rounding_math
|| !inexact
))
10494 REAL_VALUE_TYPE rr
;
10496 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
10497 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10498 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10499 but the mpft_t is not, then we underflowed in the
10501 if (real_isfinite (&rr
)
10502 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
10504 REAL_VALUE_TYPE rmode
;
10506 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
10507 /* Proceed iff the specified mode can hold the value. */
10508 if (real_identical (&rmode
, &rr
))
10509 return build_real (type
, rmode
);
10515 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10516 number and no overflow/underflow occurred. INEXACT is true if M
10517 was not exactly calculated. TYPE is the tree type for the result.
10518 This function assumes that you cleared the MPFR flags and then
10519 calculated M to see if anything subsequently set a flag prior to
10520 entering this function. Return NULL_TREE if any checks fail, if
10521 FORCE_CONVERT is true, then bypass the checks. */
10524 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
10526 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10527 overflow/underflow occurred. If -frounding-math, proceed iff the
10528 result of calling FUNC was exact. */
10530 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
10531 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10532 && (!flag_rounding_math
|| !inexact
)))
10534 REAL_VALUE_TYPE re
, im
;
10536 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
10537 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
10538 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10539 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10540 but the mpft_t is not, then we underflowed in the
10543 || (real_isfinite (&re
) && real_isfinite (&im
)
10544 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
10545 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
10547 REAL_VALUE_TYPE re_mode
, im_mode
;
10549 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
10550 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
10551 /* Proceed iff the specified mode can hold the value. */
10553 || (real_identical (&re_mode
, &re
)
10554 && real_identical (&im_mode
, &im
)))
10555 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
10556 build_real (TREE_TYPE (type
), im_mode
));
10562 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
10563 FUNC on it and return the resulting value as a tree with type TYPE.
10564 If MIN and/or MAX are not NULL, then the supplied ARG must be
10565 within those bounds. If INCLUSIVE is true, then MIN/MAX are
10566 acceptable values, otherwise they are not. The mpfr precision is
10567 set to the precision of TYPE. We assume that function FUNC returns
10568 zero if the result could be calculated exactly within the requested
10572 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
10573 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
10576 tree result
= NULL_TREE
;
10580 /* To proceed, MPFR must exactly represent the target floating point
10581 format, which only happens when the target base equals two. */
10582 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10583 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
10585 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
10587 if (real_isfinite (ra
)
10588 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
10589 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
10591 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10592 const int prec
= fmt
->p
;
10593 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10597 mpfr_init2 (m
, prec
);
10598 mpfr_from_real (m
, ra
, GMP_RNDN
);
10599 mpfr_clear_flags ();
10600 inexact
= func (m
, m
, rnd
);
10601 result
= do_mpfr_ckconv (m
, type
, inexact
);
10609 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
10610 FUNC on it and return the resulting value as a tree with type TYPE.
10611 The mpfr precision is set to the precision of TYPE. We assume that
10612 function FUNC returns zero if the result could be calculated
10613 exactly within the requested precision. */
10616 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
10617 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
10619 tree result
= NULL_TREE
;
10624 /* To proceed, MPFR must exactly represent the target floating point
10625 format, which only happens when the target base equals two. */
10626 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10627 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
10628 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
10630 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
10631 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
10633 if (real_isfinite (ra1
) && real_isfinite (ra2
))
10635 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10636 const int prec
= fmt
->p
;
10637 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10641 mpfr_inits2 (prec
, m1
, m2
, NULL
);
10642 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
10643 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
10644 mpfr_clear_flags ();
10645 inexact
= func (m1
, m1
, m2
, rnd
);
10646 result
= do_mpfr_ckconv (m1
, type
, inexact
);
10647 mpfr_clears (m1
, m2
, NULL
);
10654 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
10655 FUNC on it and return the resulting value as a tree with type TYPE.
10656 The mpfr precision is set to the precision of TYPE. We assume that
10657 function FUNC returns zero if the result could be calculated
10658 exactly within the requested precision. */
10661 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
10662 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
10664 tree result
= NULL_TREE
;
10670 /* To proceed, MPFR must exactly represent the target floating point
10671 format, which only happens when the target base equals two. */
10672 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10673 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
10674 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
10675 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
10677 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
10678 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
10679 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
10681 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
10683 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10684 const int prec
= fmt
->p
;
10685 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10689 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
10690 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
10691 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
10692 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
10693 mpfr_clear_flags ();
10694 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
10695 result
= do_mpfr_ckconv (m1
, type
, inexact
);
10696 mpfr_clears (m1
, m2
, m3
, NULL
);
10703 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
10704 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
10705 If ARG_SINP and ARG_COSP are NULL then the result is returned
10706 as a complex value.
10707 The type is taken from the type of ARG and is used for setting the
10708 precision of the calculation and results. */
10711 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
10713 tree
const type
= TREE_TYPE (arg
);
10714 tree result
= NULL_TREE
;
10718 /* To proceed, MPFR must exactly represent the target floating point
10719 format, which only happens when the target base equals two. */
10720 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10721 && TREE_CODE (arg
) == REAL_CST
10722 && !TREE_OVERFLOW (arg
))
10724 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
10726 if (real_isfinite (ra
))
10728 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10729 const int prec
= fmt
->p
;
10730 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10731 tree result_s
, result_c
;
10735 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
10736 mpfr_from_real (m
, ra
, GMP_RNDN
);
10737 mpfr_clear_flags ();
10738 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
10739 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
10740 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
10741 mpfr_clears (m
, ms
, mc
, NULL
);
10742 if (result_s
&& result_c
)
10744 /* If we are to return in a complex value do so. */
10745 if (!arg_sinp
&& !arg_cosp
)
10746 return build_complex (build_complex_type (type
),
10747 result_c
, result_s
);
10749 /* Dereference the sin/cos pointer arguments. */
10750 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
10751 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
10752 /* Proceed if valid pointer type were passed in. */
10753 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
10754 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
10756 /* Set the values. */
10757 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
10759 TREE_SIDE_EFFECTS (result_s
) = 1;
10760 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
10762 TREE_SIDE_EFFECTS (result_c
) = 1;
10763 /* Combine the assignments into a compound expr. */
10764 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10765 result_s
, result_c
));
10773 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
10774 two-argument mpfr order N Bessel function FUNC on them and return
10775 the resulting value as a tree with type TYPE. The mpfr precision
10776 is set to the precision of TYPE. We assume that function FUNC
10777 returns zero if the result could be calculated exactly within the
10778 requested precision. */
10780 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
10781 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
10782 const REAL_VALUE_TYPE
*min
, bool inclusive
)
10784 tree result
= NULL_TREE
;
10789 /* To proceed, MPFR must exactly represent the target floating point
10790 format, which only happens when the target base equals two. */
10791 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10792 && tree_fits_shwi_p (arg1
)
10793 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
10795 const HOST_WIDE_INT n
= tree_to_shwi (arg1
);
10796 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
10799 && real_isfinite (ra
)
10800 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
10802 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10803 const int prec
= fmt
->p
;
10804 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10808 mpfr_init2 (m
, prec
);
10809 mpfr_from_real (m
, ra
, GMP_RNDN
);
10810 mpfr_clear_flags ();
10811 inexact
= func (m
, n
, m
, rnd
);
10812 result
= do_mpfr_ckconv (m
, type
, inexact
);
10820 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10821 the pointer *(ARG_QUO) and return the result. The type is taken
10822 from the type of ARG0 and is used for setting the precision of the
10823 calculation and results. */
10826 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
10828 tree
const type
= TREE_TYPE (arg0
);
10829 tree result
= NULL_TREE
;
10834 /* To proceed, MPFR must exactly represent the target floating point
10835 format, which only happens when the target base equals two. */
10836 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10837 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
10838 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
10840 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
10841 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
10843 if (real_isfinite (ra0
) && real_isfinite (ra1
))
10845 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10846 const int prec
= fmt
->p
;
10847 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10852 mpfr_inits2 (prec
, m0
, m1
, NULL
);
10853 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
10854 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
10855 mpfr_clear_flags ();
10856 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
10857 /* Remquo is independent of the rounding mode, so pass
10858 inexact=0 to do_mpfr_ckconv(). */
10859 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
10860 mpfr_clears (m0
, m1
, NULL
);
10863 /* MPFR calculates quo in the host's long so it may
10864 return more bits in quo than the target int can hold
10865 if sizeof(host long) > sizeof(target int). This can
10866 happen even for native compilers in LP64 mode. In
10867 these cases, modulo the quo value with the largest
10868 number that the target int can hold while leaving one
10869 bit for the sign. */
10870 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
10871 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
10873 /* Dereference the quo pointer argument. */
10874 arg_quo
= build_fold_indirect_ref (arg_quo
);
10875 /* Proceed iff a valid pointer type was passed in. */
10876 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
10878 /* Set the value. */
10880 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
10881 build_int_cst (TREE_TYPE (arg_quo
),
10883 TREE_SIDE_EFFECTS (result_quo
) = 1;
10884 /* Combine the quo assignment with the rem. */
10885 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10886 result_quo
, result_rem
));
10894 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10895 resulting value as a tree with type TYPE. The mpfr precision is
10896 set to the precision of TYPE. We assume that this mpfr function
10897 returns zero if the result could be calculated exactly within the
10898 requested precision. In addition, the integer pointer represented
10899 by ARG_SG will be dereferenced and set to the appropriate signgam
10903 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
10905 tree result
= NULL_TREE
;
10909 /* To proceed, MPFR must exactly represent the target floating point
10910 format, which only happens when the target base equals two. Also
10911 verify ARG is a constant and that ARG_SG is an int pointer. */
10912 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10913 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
10914 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
10915 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
10917 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
10919 /* In addition to NaN and Inf, the argument cannot be zero or a
10920 negative integer. */
10921 if (real_isfinite (ra
)
10922 && ra
->cl
!= rvc_zero
10923 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
10925 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10926 const int prec
= fmt
->p
;
10927 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10932 mpfr_init2 (m
, prec
);
10933 mpfr_from_real (m
, ra
, GMP_RNDN
);
10934 mpfr_clear_flags ();
10935 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
10936 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
10942 /* Dereference the arg_sg pointer argument. */
10943 arg_sg
= build_fold_indirect_ref (arg_sg
);
10944 /* Assign the signgam value into *arg_sg. */
10945 result_sg
= fold_build2 (MODIFY_EXPR
,
10946 TREE_TYPE (arg_sg
), arg_sg
,
10947 build_int_cst (TREE_TYPE (arg_sg
), sg
));
10948 TREE_SIDE_EFFECTS (result_sg
) = 1;
10949 /* Combine the signgam assignment with the lgamma result. */
10950 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10951 result_sg
, result_lg
));
10959 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
10960 function FUNC on it and return the resulting value as a tree with
10961 type TYPE. The mpfr precision is set to the precision of TYPE. We
10962 assume that function FUNC returns zero if the result could be
10963 calculated exactly within the requested precision. */
10966 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
10968 tree result
= NULL_TREE
;
10972 /* To proceed, MPFR must exactly represent the target floating point
10973 format, which only happens when the target base equals two. */
10974 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
10975 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
10976 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
10978 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
10979 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
10981 if (real_isfinite (re
) && real_isfinite (im
))
10983 const struct real_format
*const fmt
=
10984 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
10985 const int prec
= fmt
->p
;
10986 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10987 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
10991 mpc_init2 (m
, prec
);
10992 mpfr_from_real (mpc_realref (m
), re
, rnd
);
10993 mpfr_from_real (mpc_imagref (m
), im
, rnd
);
10994 mpfr_clear_flags ();
10995 inexact
= func (m
, m
, crnd
);
10996 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
11004 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11005 mpc function FUNC on it and return the resulting value as a tree
11006 with type TYPE. The mpfr precision is set to the precision of
11007 TYPE. We assume that function FUNC returns zero if the result
11008 could be calculated exactly within the requested precision. If
11009 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11010 in the arguments and/or results. */
11013 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
11014 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
11016 tree result
= NULL_TREE
;
11021 /* To proceed, MPFR must exactly represent the target floating point
11022 format, which only happens when the target base equals two. */
11023 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
11024 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
11025 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
11026 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
11027 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
11029 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
11030 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
11031 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
11032 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
11035 || (real_isfinite (re0
) && real_isfinite (im0
)
11036 && real_isfinite (re1
) && real_isfinite (im1
)))
11038 const struct real_format
*const fmt
=
11039 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
11040 const int prec
= fmt
->p
;
11041 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11042 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
11046 mpc_init2 (m0
, prec
);
11047 mpc_init2 (m1
, prec
);
11048 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
11049 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
11050 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
11051 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
11052 mpfr_clear_flags ();
11053 inexact
= func (m0
, m0
, m1
, crnd
);
11054 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
11063 /* A wrapper function for builtin folding that prevents warnings for
11064 "statement without effect" and the like, caused by removing the
11065 call node earlier than the warning is generated. */
11068 fold_call_stmt (gcall
*stmt
, bool ignore
)
11070 tree ret
= NULL_TREE
;
11071 tree fndecl
= gimple_call_fndecl (stmt
);
11072 location_t loc
= gimple_location (stmt
);
11074 && TREE_CODE (fndecl
) == FUNCTION_DECL
11075 && DECL_BUILT_IN (fndecl
)
11076 && !gimple_call_va_arg_pack_p (stmt
))
11078 int nargs
= gimple_call_num_args (stmt
);
11079 tree
*args
= (nargs
> 0
11080 ? gimple_call_arg_ptr (stmt
, 0)
11081 : &error_mark_node
);
11083 if (avoid_folding_inline_builtin (fndecl
))
11085 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11087 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
11091 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
11094 /* Propagate location information from original call to
11095 expansion of builtin. Otherwise things like
11096 maybe_emit_chk_warning, that operate on the expansion
11097 of a builtin, will use the wrong location information. */
11098 if (gimple_has_location (stmt
))
11100 tree realret
= ret
;
11101 if (TREE_CODE (ret
) == NOP_EXPR
)
11102 realret
= TREE_OPERAND (ret
, 0);
11103 if (CAN_HAVE_LOCATION_P (realret
)
11104 && !EXPR_HAS_LOCATION (realret
))
11105 SET_EXPR_LOCATION (realret
, loc
);
11115 /* Look up the function in builtin_decl that corresponds to DECL
11116 and set ASMSPEC as its user assembler name. DECL must be a
11117 function decl that declares a builtin. */
11120 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
11123 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
11124 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
11127 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
11128 set_user_assembler_name (builtin
, asmspec
);
11129 switch (DECL_FUNCTION_CODE (decl
))
11131 case BUILT_IN_MEMCPY
:
11132 init_block_move_fn (asmspec
);
11133 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
11135 case BUILT_IN_MEMSET
:
11136 init_block_clear_fn (asmspec
);
11137 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
11139 case BUILT_IN_MEMMOVE
:
11140 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
11142 case BUILT_IN_MEMCMP
:
11143 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
11145 case BUILT_IN_ABORT
:
11146 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
11149 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
11151 set_user_assembler_libfunc ("ffs", asmspec
);
11152 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
11153 MODE_INT
, 0), "ffs");
11161 /* Return true if DECL is a builtin that expands to a constant or similarly
11164 is_simple_builtin (tree decl
)
11166 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
11167 switch (DECL_FUNCTION_CODE (decl
))
11169 /* Builtins that expand to constants. */
11170 case BUILT_IN_CONSTANT_P
:
11171 case BUILT_IN_EXPECT
:
11172 case BUILT_IN_OBJECT_SIZE
:
11173 case BUILT_IN_UNREACHABLE
:
11174 /* Simple register moves or loads from stack. */
11175 case BUILT_IN_ASSUME_ALIGNED
:
11176 case BUILT_IN_RETURN_ADDRESS
:
11177 case BUILT_IN_EXTRACT_RETURN_ADDR
:
11178 case BUILT_IN_FROB_RETURN_ADDR
:
11179 case BUILT_IN_RETURN
:
11180 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
11181 case BUILT_IN_FRAME_ADDRESS
:
11182 case BUILT_IN_VA_END
:
11183 case BUILT_IN_STACK_SAVE
:
11184 case BUILT_IN_STACK_RESTORE
:
11185 /* Exception state returns or moves registers around. */
11186 case BUILT_IN_EH_FILTER
:
11187 case BUILT_IN_EH_POINTER
:
11188 case BUILT_IN_EH_COPY_VALUES
:
11198 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11199 most probably expanded inline into reasonably simple code. This is a
11200 superset of is_simple_builtin. */
11202 is_inexpensive_builtin (tree decl
)
11206 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
11208 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
11209 switch (DECL_FUNCTION_CODE (decl
))
11212 case BUILT_IN_ALLOCA
:
11213 case BUILT_IN_ALLOCA_WITH_ALIGN
:
11214 case BUILT_IN_BSWAP16
:
11215 case BUILT_IN_BSWAP32
:
11216 case BUILT_IN_BSWAP64
:
11218 case BUILT_IN_CLZIMAX
:
11219 case BUILT_IN_CLZL
:
11220 case BUILT_IN_CLZLL
:
11222 case BUILT_IN_CTZIMAX
:
11223 case BUILT_IN_CTZL
:
11224 case BUILT_IN_CTZLL
:
11226 case BUILT_IN_FFSIMAX
:
11227 case BUILT_IN_FFSL
:
11228 case BUILT_IN_FFSLL
:
11229 case BUILT_IN_IMAXABS
:
11230 case BUILT_IN_FINITE
:
11231 case BUILT_IN_FINITEF
:
11232 case BUILT_IN_FINITEL
:
11233 case BUILT_IN_FINITED32
:
11234 case BUILT_IN_FINITED64
:
11235 case BUILT_IN_FINITED128
:
11236 case BUILT_IN_FPCLASSIFY
:
11237 case BUILT_IN_ISFINITE
:
11238 case BUILT_IN_ISINF_SIGN
:
11239 case BUILT_IN_ISINF
:
11240 case BUILT_IN_ISINFF
:
11241 case BUILT_IN_ISINFL
:
11242 case BUILT_IN_ISINFD32
:
11243 case BUILT_IN_ISINFD64
:
11244 case BUILT_IN_ISINFD128
:
11245 case BUILT_IN_ISNAN
:
11246 case BUILT_IN_ISNANF
:
11247 case BUILT_IN_ISNANL
:
11248 case BUILT_IN_ISNAND32
:
11249 case BUILT_IN_ISNAND64
:
11250 case BUILT_IN_ISNAND128
:
11251 case BUILT_IN_ISNORMAL
:
11252 case BUILT_IN_ISGREATER
:
11253 case BUILT_IN_ISGREATEREQUAL
:
11254 case BUILT_IN_ISLESS
:
11255 case BUILT_IN_ISLESSEQUAL
:
11256 case BUILT_IN_ISLESSGREATER
:
11257 case BUILT_IN_ISUNORDERED
:
11258 case BUILT_IN_VA_ARG_PACK
:
11259 case BUILT_IN_VA_ARG_PACK_LEN
:
11260 case BUILT_IN_VA_COPY
:
11261 case BUILT_IN_TRAP
:
11262 case BUILT_IN_SAVEREGS
:
11263 case BUILT_IN_POPCOUNTL
:
11264 case BUILT_IN_POPCOUNTLL
:
11265 case BUILT_IN_POPCOUNTIMAX
:
11266 case BUILT_IN_POPCOUNT
:
11267 case BUILT_IN_PARITYL
:
11268 case BUILT_IN_PARITYLL
:
11269 case BUILT_IN_PARITYIMAX
:
11270 case BUILT_IN_PARITY
:
11271 case BUILT_IN_LABS
:
11272 case BUILT_IN_LLABS
:
11273 case BUILT_IN_PREFETCH
:
11274 case BUILT_IN_ACC_ON_DEVICE
:
11278 return is_simple_builtin (decl
);