1 /* Expand builtin functions.
2 Copyright (C) 1988-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
35 #include "stringpool.h"
37 #include "tree-ssanames.h"
42 #include "diagnostic-core.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
65 #include "stringpool.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 #include "gimple-ssa.h"
77 #include "tree-ssa-live.h"
78 #include "tree-outof-ssa.h"
80 struct target_builtins default_target_builtins
;
82 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
85 /* Define the names of the builtin function types and codes. */
86 const char *const built_in_class_names
[BUILT_IN_LAST
]
87 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
89 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
90 const char * built_in_names
[(int) END_BUILTINS
] =
92 #include "builtins.def"
95 /* Setup an array of builtin_info_type, make sure each element decl is
96 initialized to NULL_TREE. */
97 builtin_info_type builtin_info
[(int)END_BUILTINS
];
99 /* Non-zero if __builtin_constant_p should be folded right away. */
100 bool force_folding_builtin_constant_p
;
102 static int target_char_cast (tree
, char *);
103 static rtx
get_memory_rtx (tree
, tree
);
104 static int apply_args_size (void);
105 static int apply_result_size (void);
106 static rtx
result_vector (int, rtx
);
107 static void expand_builtin_prefetch (tree
);
108 static rtx
expand_builtin_apply_args (void);
109 static rtx
expand_builtin_apply_args_1 (void);
110 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
111 static void expand_builtin_return (rtx
);
112 static enum type_class
type_to_class (tree
);
113 static rtx
expand_builtin_classify_type (tree
);
114 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
115 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
116 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
117 static rtx
expand_builtin_sincos (tree
);
118 static rtx
expand_builtin_cexpi (tree
, rtx
);
119 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
120 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
121 static rtx
expand_builtin_next_arg (void);
122 static rtx
expand_builtin_va_start (tree
);
123 static rtx
expand_builtin_va_end (tree
);
124 static rtx
expand_builtin_va_copy (tree
);
125 static rtx
inline_expand_builtin_bytecmp (tree
, rtx
);
126 static rtx
expand_builtin_strcmp (tree
, rtx
);
127 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
128 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
129 static rtx
expand_builtin_memchr (tree
, rtx
);
130 static rtx
expand_builtin_memcpy (tree
, rtx
);
131 static rtx
expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
132 rtx target
, tree exp
,
135 static rtx
expand_builtin_memmove (tree
, rtx
);
136 static rtx
expand_builtin_mempcpy (tree
, rtx
);
137 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
, tree
, memop_ret
);
138 static rtx
expand_builtin_strcat (tree
);
139 static rtx
expand_builtin_strcpy (tree
, rtx
);
140 static rtx
expand_builtin_strcpy_args (tree
, tree
, tree
, rtx
);
141 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
142 static rtx
expand_builtin_stpncpy (tree
, rtx
);
143 static rtx
expand_builtin_strncat (tree
, rtx
);
144 static rtx
expand_builtin_strncpy (tree
, rtx
);
145 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
146 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
147 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
148 static rtx
expand_builtin_bzero (tree
);
149 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
150 static rtx
expand_builtin_strnlen (tree
, rtx
, machine_mode
);
151 static rtx
expand_builtin_alloca (tree
);
152 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
153 static rtx
expand_builtin_frame_address (tree
, tree
);
154 static tree
stabilize_va_list_loc (location_t
, tree
, int);
155 static rtx
expand_builtin_expect (tree
, rtx
);
156 static rtx
expand_builtin_expect_with_probability (tree
, rtx
);
157 static tree
fold_builtin_constant_p (tree
);
158 static tree
fold_builtin_classify_type (tree
);
159 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
160 static tree
fold_builtin_inf (location_t
, tree
, int);
161 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
162 static bool validate_arg (const_tree
, enum tree_code code
);
163 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
164 static rtx
expand_builtin_signbit (tree
, rtx
);
165 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
166 static tree
fold_builtin_isascii (location_t
, tree
);
167 static tree
fold_builtin_toascii (location_t
, tree
);
168 static tree
fold_builtin_isdigit (location_t
, tree
);
169 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
170 static tree
fold_builtin_abs (location_t
, tree
, tree
);
171 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
173 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
175 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
, tree
);
176 static tree
fold_builtin_strspn (location_t
, tree
, tree
, tree
);
177 static tree
fold_builtin_strcspn (location_t
, tree
, tree
, tree
);
179 static rtx
expand_builtin_object_size (tree
);
180 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
181 enum built_in_function
);
182 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
183 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
184 static void maybe_emit_free_warning (tree
);
185 static tree
fold_builtin_object_size (tree
, tree
);
187 unsigned HOST_WIDE_INT target_newline
;
188 unsigned HOST_WIDE_INT target_percent
;
189 static unsigned HOST_WIDE_INT target_c
;
190 static unsigned HOST_WIDE_INT target_s
;
191 char target_percent_c
[3];
192 char target_percent_s
[3];
193 char target_percent_s_newline
[4];
194 static tree
do_mpfr_remquo (tree
, tree
, tree
);
195 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
196 static void expand_builtin_sync_synchronize (void);
198 /* Return true if NAME starts with __builtin_ or __sync_. */
201 is_builtin_name (const char *name
)
203 if (strncmp (name
, "__builtin_", 10) == 0)
205 if (strncmp (name
, "__sync_", 7) == 0)
207 if (strncmp (name
, "__atomic_", 9) == 0)
212 /* Return true if NODE should be considered for inline expansion regardless
213 of the optimization level. This means whenever a function is invoked with
214 its "internal" name, which normally contains the prefix "__builtin". */
217 called_as_built_in (tree node
)
219 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
220 we want the name used to call the function, not the name it
222 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
223 return is_builtin_name (name
);
226 /* Compute values M and N such that M divides (address of EXP - N) and such
227 that N < M. If these numbers can be determined, store M in alignp and N in
228 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
229 *alignp and any bit-offset to *bitposp.
231 Note that the address (and thus the alignment) computed here is based
232 on the address to which a symbol resolves, whereas DECL_ALIGN is based
233 on the address at which an object is actually located. These two
234 addresses are not always the same. For example, on ARM targets,
235 the address &foo of a Thumb function foo() has the lowest bit set,
236 whereas foo() itself starts on an even address.
238 If ADDR_P is true we are taking the address of the memory reference EXP
239 and thus cannot rely on the access taking place. */
242 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
243 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
245 poly_int64 bitsize
, bitpos
;
248 int unsignedp
, reversep
, volatilep
;
249 unsigned int align
= BITS_PER_UNIT
;
250 bool known_alignment
= false;
252 /* Get the innermost object and the constant (bitpos) and possibly
253 variable (offset) offset of the access. */
254 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
255 &unsignedp
, &reversep
, &volatilep
);
257 /* Extract alignment information from the innermost object and
258 possibly adjust bitpos and offset. */
259 if (TREE_CODE (exp
) == FUNCTION_DECL
)
261 /* Function addresses can encode extra information besides their
262 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
263 allows the low bit to be used as a virtual bit, we know
264 that the address itself must be at least 2-byte aligned. */
265 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
266 align
= 2 * BITS_PER_UNIT
;
268 else if (TREE_CODE (exp
) == LABEL_DECL
)
270 else if (TREE_CODE (exp
) == CONST_DECL
)
272 /* The alignment of a CONST_DECL is determined by its initializer. */
273 exp
= DECL_INITIAL (exp
);
274 align
= TYPE_ALIGN (TREE_TYPE (exp
));
275 if (CONSTANT_CLASS_P (exp
))
276 align
= targetm
.constant_alignment (exp
, align
);
278 known_alignment
= true;
280 else if (DECL_P (exp
))
282 align
= DECL_ALIGN (exp
);
283 known_alignment
= true;
285 else if (TREE_CODE (exp
) == INDIRECT_REF
286 || TREE_CODE (exp
) == MEM_REF
287 || TREE_CODE (exp
) == TARGET_MEM_REF
)
289 tree addr
= TREE_OPERAND (exp
, 0);
291 unsigned HOST_WIDE_INT ptr_bitpos
;
292 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
294 /* If the address is explicitely aligned, handle that. */
295 if (TREE_CODE (addr
) == BIT_AND_EXPR
296 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
298 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
299 ptr_bitmask
*= BITS_PER_UNIT
;
300 align
= least_bit_hwi (ptr_bitmask
);
301 addr
= TREE_OPERAND (addr
, 0);
305 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
306 align
= MAX (ptr_align
, align
);
308 /* Re-apply explicit alignment to the bitpos. */
309 ptr_bitpos
&= ptr_bitmask
;
311 /* The alignment of the pointer operand in a TARGET_MEM_REF
312 has to take the variable offset parts into account. */
313 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
317 unsigned HOST_WIDE_INT step
= 1;
319 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
320 align
= MIN (align
, least_bit_hwi (step
) * BITS_PER_UNIT
);
322 if (TMR_INDEX2 (exp
))
323 align
= BITS_PER_UNIT
;
324 known_alignment
= false;
327 /* When EXP is an actual memory reference then we can use
328 TYPE_ALIGN of a pointer indirection to derive alignment.
329 Do so only if get_pointer_alignment_1 did not reveal absolute
330 alignment knowledge and if using that alignment would
331 improve the situation. */
333 if (!addr_p
&& !known_alignment
334 && (talign
= min_align_of_type (TREE_TYPE (exp
)) * BITS_PER_UNIT
)
339 /* Else adjust bitpos accordingly. */
340 bitpos
+= ptr_bitpos
;
341 if (TREE_CODE (exp
) == MEM_REF
342 || TREE_CODE (exp
) == TARGET_MEM_REF
)
343 bitpos
+= mem_ref_offset (exp
).force_shwi () * BITS_PER_UNIT
;
346 else if (TREE_CODE (exp
) == STRING_CST
)
348 /* STRING_CST are the only constant objects we allow to be not
349 wrapped inside a CONST_DECL. */
350 align
= TYPE_ALIGN (TREE_TYPE (exp
));
351 if (CONSTANT_CLASS_P (exp
))
352 align
= targetm
.constant_alignment (exp
, align
);
354 known_alignment
= true;
357 /* If there is a non-constant offset part extract the maximum
358 alignment that can prevail. */
361 unsigned int trailing_zeros
= tree_ctz (offset
);
362 if (trailing_zeros
< HOST_BITS_PER_INT
)
364 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
366 align
= MIN (align
, inner
);
370 /* Account for the alignment of runtime coefficients, so that the constant
371 bitpos is guaranteed to be accurate. */
372 unsigned int alt_align
= ::known_alignment (bitpos
- bitpos
.coeffs
[0]);
373 if (alt_align
!= 0 && alt_align
< align
)
376 known_alignment
= false;
380 *bitposp
= bitpos
.coeffs
[0] & (align
- 1);
381 return known_alignment
;
384 /* For a memory reference expression EXP compute values M and N such that M
385 divides (&EXP - N) and such that N < M. If these numbers can be determined,
386 store M in alignp and N in *BITPOSP and return true. Otherwise return false
387 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
390 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
391 unsigned HOST_WIDE_INT
*bitposp
)
393 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
396 /* Return the alignment in bits of EXP, an object. */
399 get_object_alignment (tree exp
)
401 unsigned HOST_WIDE_INT bitpos
= 0;
404 get_object_alignment_1 (exp
, &align
, &bitpos
);
406 /* align and bitpos now specify known low bits of the pointer.
407 ptr & (align - 1) == bitpos. */
410 align
= least_bit_hwi (bitpos
);
414 /* For a pointer valued expression EXP compute values M and N such that M
415 divides (EXP - N) and such that N < M. If these numbers can be determined,
416 store M in alignp and N in *BITPOSP and return true. Return false if
417 the results are just a conservative approximation.
419 If EXP is not a pointer, false is returned too. */
422 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
423 unsigned HOST_WIDE_INT
*bitposp
)
427 if (TREE_CODE (exp
) == ADDR_EXPR
)
428 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
429 alignp
, bitposp
, true);
430 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
433 unsigned HOST_WIDE_INT bitpos
;
434 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
436 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
437 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
440 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
441 if (trailing_zeros
< HOST_BITS_PER_INT
)
443 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
445 align
= MIN (align
, inner
);
449 *bitposp
= bitpos
& (align
- 1);
452 else if (TREE_CODE (exp
) == SSA_NAME
453 && POINTER_TYPE_P (TREE_TYPE (exp
)))
455 unsigned int ptr_align
, ptr_misalign
;
456 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
458 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
460 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
461 *alignp
= ptr_align
* BITS_PER_UNIT
;
462 /* Make sure to return a sensible alignment when the multiplication
463 by BITS_PER_UNIT overflowed. */
465 *alignp
= 1u << (HOST_BITS_PER_INT
- 1);
466 /* We cannot really tell whether this result is an approximation. */
472 *alignp
= BITS_PER_UNIT
;
476 else if (TREE_CODE (exp
) == INTEGER_CST
)
478 *alignp
= BIGGEST_ALIGNMENT
;
479 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
480 & (BIGGEST_ALIGNMENT
- 1));
485 *alignp
= BITS_PER_UNIT
;
489 /* Return the alignment in bits of EXP, a pointer valued expression.
490 The alignment returned is, by default, the alignment of the thing that
491 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
493 Otherwise, look at the expression to see if we can do better, i.e., if the
494 expression is actually pointing at an object whose alignment is tighter. */
497 get_pointer_alignment (tree exp
)
499 unsigned HOST_WIDE_INT bitpos
= 0;
502 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
504 /* align and bitpos now specify known low bits of the pointer.
505 ptr & (align - 1) == bitpos. */
508 align
= least_bit_hwi (bitpos
);
513 /* Return the number of leading non-zero elements in the sequence
514 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
515 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
518 string_length (const void *ptr
, unsigned eltsize
, unsigned maxelts
)
520 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
526 /* Optimize the common case of plain char. */
527 for (n
= 0; n
< maxelts
; n
++)
529 const char *elt
= (const char*) ptr
+ n
;
536 for (n
= 0; n
< maxelts
; n
++)
538 const char *elt
= (const char*) ptr
+ n
* eltsize
;
539 if (!memcmp (elt
, "\0\0\0\0", eltsize
))
546 /* For a call at LOC to a function FN that expects a string in the argument
547 ARG, issue a diagnostic due to it being a called with an argument
548 declared at NONSTR that is a character array with no terminating NUL. */
551 warn_string_no_nul (location_t loc
, const char *fn
, tree arg
, tree decl
)
553 if (TREE_NO_WARNING (arg
))
556 loc
= expansion_point_location_if_in_system_header (loc
);
558 if (warning_at (loc
, OPT_Wstringop_overflow_
,
559 "%qs argument missing terminating nul", fn
))
561 inform (DECL_SOURCE_LOCATION (decl
),
562 "referenced argument declared here");
563 TREE_NO_WARNING (arg
) = 1;
567 /* For a call EXPR (which may be null) that expects a string argument
568 and SRC as the argument, returns false if SRC is a character array
569 with no terminating NUL. When nonnull, BOUND is the number of
570 characters in which to expect the terminating NUL.
571 When EXPR is nonnull also issues a warning. */
574 check_nul_terminated_array (tree expr
, tree src
, tree bound
/* = NULL_TREE */)
578 tree nonstr
= unterminated_array (src
, &size
, &exact
);
582 /* NONSTR refers to the non-nul terminated constant array and SIZE
583 is the constant size of the array in bytes. EXACT is true when
589 if (TREE_CODE (bound
) == INTEGER_CST
)
590 min
= max
= wi::to_wide (bound
);
593 value_range_kind rng
= get_range_info (bound
, &min
, &max
);
598 if (wi::leu_p (min
, wi::to_wide (size
)))
602 if (expr
&& !TREE_NO_WARNING (expr
))
604 tree fndecl
= get_callee_fndecl (expr
);
605 const char *fname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
606 warn_string_no_nul (EXPR_LOCATION (expr
), fname
, src
, nonstr
);
612 /* If EXP refers to an unterminated constant character array return
613 the declaration of the object of which the array is a member or
614 element and if SIZE is not null, set *SIZE to the size of
615 the unterminated array and set *EXACT if the size is exact or
616 clear it otherwise. Otherwise return null. */
619 unterminated_array (tree exp
, tree
*size
/* = NULL */, bool *exact
/* = NULL */)
621 /* C_STRLEN will return NULL and set DECL in the info
622 structure if EXP references a unterminated array. */
623 c_strlen_data lendata
= { };
624 tree len
= c_strlen (exp
, 1, &lendata
);
625 if (len
== NULL_TREE
&& lendata
.minlen
&& lendata
.decl
)
629 len
= lendata
.minlen
;
632 /* Constant offsets are already accounted for in LENDATA.MINLEN,
633 but not in a SSA_NAME + CST expression. */
634 if (TREE_CODE (lendata
.off
) == INTEGER_CST
)
636 else if (TREE_CODE (lendata
.off
) == PLUS_EXPR
637 && TREE_CODE (TREE_OPERAND (lendata
.off
, 1)) == INTEGER_CST
)
639 /* Subtract the offset from the size of the array. */
641 tree temp
= TREE_OPERAND (lendata
.off
, 1);
642 temp
= fold_convert (ssizetype
, temp
);
643 len
= fold_build2 (MINUS_EXPR
, ssizetype
, len
, temp
);
659 /* Compute the length of a null-terminated character string or wide
660 character string handling character sizes of 1, 2, and 4 bytes.
661 TREE_STRING_LENGTH is not the right way because it evaluates to
662 the size of the character array in bytes (as opposed to characters)
663 and because it can contain a zero byte in the middle.
665 ONLY_VALUE should be nonzero if the result is not going to be emitted
666 into the instruction stream and zero if it is going to be expanded.
667 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
668 is returned, otherwise NULL, since
669 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
670 evaluate the side-effects.
672 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
673 accesses. Note that this implies the result is not going to be emitted
674 into the instruction stream.
676 Additional information about the string accessed may be recorded
677 in DATA. For example, if ARG references an unterminated string,
678 then the declaration will be stored in the DECL field. If the
679 length of the unterminated string can be determined, it'll be
680 stored in the LEN field. Note this length could well be different
681 than what a C strlen call would return.
683 ELTSIZE is 1 for normal single byte character strings, and 2 or
684 4 for wide characer strings. ELTSIZE is by default 1.
686 The value returned is of type `ssizetype'. */
689 c_strlen (tree arg
, int only_value
, c_strlen_data
*data
, unsigned eltsize
)
691 /* If we were not passed a DATA pointer, then get one to a local
692 structure. That avoids having to check DATA for NULL before
693 each time we want to use it. */
694 c_strlen_data local_strlen_data
= { };
696 data
= &local_strlen_data
;
698 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
700 tree src
= STRIP_NOPS (arg
);
701 if (TREE_CODE (src
) == COND_EXPR
702 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
706 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
, data
, eltsize
);
707 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
, data
, eltsize
);
708 if (tree_int_cst_equal (len1
, len2
))
712 if (TREE_CODE (src
) == COMPOUND_EXPR
713 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
714 return c_strlen (TREE_OPERAND (src
, 1), only_value
, data
, eltsize
);
716 location_t loc
= EXPR_LOC_OR_LOC (src
, input_location
);
718 /* Offset from the beginning of the string in bytes. */
722 src
= string_constant (src
, &byteoff
, &memsize
, &decl
);
726 /* Determine the size of the string element. */
727 if (eltsize
!= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src
)))))
730 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
731 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
732 in case the latter is less than the size of the array, such as when
733 SRC refers to a short string literal used to initialize a large array.
734 In that case, the elements of the array after the terminating NUL are
736 HOST_WIDE_INT strelts
= TREE_STRING_LENGTH (src
);
737 strelts
= strelts
/ eltsize
;
739 if (!tree_fits_uhwi_p (memsize
))
742 HOST_WIDE_INT maxelts
= tree_to_uhwi (memsize
) / eltsize
;
744 /* PTR can point to the byte representation of any string type, including
745 char* and wchar_t*. */
746 const char *ptr
= TREE_STRING_POINTER (src
);
748 if (byteoff
&& TREE_CODE (byteoff
) != INTEGER_CST
)
750 /* The code below works only for single byte character types. */
754 /* If the string has an internal NUL character followed by any
755 non-NUL characters (e.g., "foo\0bar"), we can't compute
756 the offset to the following NUL if we don't know where to
757 start searching for it. */
758 unsigned len
= string_length (ptr
, eltsize
, strelts
);
760 /* Return when an embedded null character is found or none at all.
761 In the latter case, set the DECL/LEN field in the DATA structure
762 so that callers may examine them. */
763 if (len
+ 1 < strelts
)
765 else if (len
>= maxelts
)
769 data
->minlen
= ssize_int (len
);
773 /* For empty strings the result should be zero. */
775 return ssize_int (0);
777 /* We don't know the starting offset, but we do know that the string
778 has no internal zero bytes. If the offset falls within the bounds
779 of the string subtract the offset from the length of the string,
780 and return that. Otherwise the length is zero. Take care to
781 use SAVE_EXPR in case the OFFSET has side-effects. */
782 tree offsave
= TREE_SIDE_EFFECTS (byteoff
) ? save_expr (byteoff
)
784 offsave
= fold_convert_loc (loc
, sizetype
, offsave
);
785 tree condexp
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
, offsave
,
787 tree lenexp
= fold_build2_loc (loc
, MINUS_EXPR
, sizetype
, size_int (len
),
789 lenexp
= fold_convert_loc (loc
, ssizetype
, lenexp
);
790 return fold_build3_loc (loc
, COND_EXPR
, ssizetype
, condexp
, lenexp
,
791 build_zero_cst (ssizetype
));
794 /* Offset from the beginning of the string in elements. */
795 HOST_WIDE_INT eltoff
;
797 /* We have a known offset into the string. Start searching there for
798 a null character if we can represent it as a single HOST_WIDE_INT. */
801 else if (! tree_fits_uhwi_p (byteoff
) || tree_to_uhwi (byteoff
) % eltsize
)
804 eltoff
= tree_to_uhwi (byteoff
) / eltsize
;
806 /* If the offset is known to be out of bounds, warn, and call strlen at
808 if (eltoff
< 0 || eltoff
>= maxelts
)
810 /* Suppress multiple warnings for propagated constant strings. */
812 && !TREE_NO_WARNING (arg
)
813 && warning_at (loc
, OPT_Warray_bounds
,
814 "offset %qwi outside bounds of constant string",
818 inform (DECL_SOURCE_LOCATION (decl
), "%qE declared here", decl
);
819 TREE_NO_WARNING (arg
) = 1;
824 /* If eltoff is larger than strelts but less than maxelts the
825 string length is zero, since the excess memory will be zero. */
826 if (eltoff
> strelts
)
827 return ssize_int (0);
829 /* Use strlen to search for the first zero byte. Since any strings
830 constructed with build_string will have nulls appended, we win even
831 if we get handed something like (char[4])"abcd".
833 Since ELTOFF is our starting index into the string, no further
834 calculation is needed. */
835 unsigned len
= string_length (ptr
+ eltoff
* eltsize
, eltsize
,
838 /* Don't know what to return if there was no zero termination.
839 Ideally this would turn into a gcc_checking_assert over time.
840 Set DECL/LEN so callers can examine them. */
841 if (len
>= maxelts
- eltoff
)
845 data
->minlen
= ssize_int (len
);
849 return ssize_int (len
);
852 /* Return a constant integer corresponding to target reading
853 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
854 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
855 are assumed to be zero, otherwise it reads as many characters
859 c_readstr (const char *str
, scalar_int_mode mode
,
860 bool null_terminated_p
/*=true*/)
864 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
866 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
867 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
868 / HOST_BITS_PER_WIDE_INT
;
870 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
871 for (i
= 0; i
< len
; i
++)
875 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
878 if (WORDS_BIG_ENDIAN
)
879 j
= GET_MODE_SIZE (mode
) - i
- 1;
880 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
881 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
882 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
885 if (ch
|| !null_terminated_p
)
886 ch
= (unsigned char) str
[i
];
887 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
890 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
891 return immed_wide_int_const (c
, mode
);
894 /* Cast a target constant CST to target CHAR and if that value fits into
895 host char type, return zero and put that value into variable pointed to by
899 target_char_cast (tree cst
, char *p
)
901 unsigned HOST_WIDE_INT val
, hostval
;
903 if (TREE_CODE (cst
) != INTEGER_CST
904 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
907 /* Do not care if it fits or not right here. */
908 val
= TREE_INT_CST_LOW (cst
);
910 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
911 val
&= (HOST_WIDE_INT_1U
<< CHAR_TYPE_SIZE
) - 1;
914 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
915 hostval
&= (HOST_WIDE_INT_1U
<< HOST_BITS_PER_CHAR
) - 1;
924 /* Similar to save_expr, but assumes that arbitrary code is not executed
925 in between the multiple evaluations. In particular, we assume that a
926 non-addressable local variable will not be modified. */
929 builtin_save_expr (tree exp
)
931 if (TREE_CODE (exp
) == SSA_NAME
932 || (TREE_ADDRESSABLE (exp
) == 0
933 && (TREE_CODE (exp
) == PARM_DECL
934 || (VAR_P (exp
) && !TREE_STATIC (exp
)))))
937 return save_expr (exp
);
940 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
941 times to get the address of either a higher stack frame, or a return
942 address located within it (depending on FNDECL_CODE). */
945 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
948 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
951 /* For a zero count with __builtin_return_address, we don't care what
952 frame address we return, because target-specific definitions will
953 override us. Therefore frame pointer elimination is OK, and using
954 the soft frame pointer is OK.
956 For a nonzero count, or a zero count with __builtin_frame_address,
957 we require a stable offset from the current frame pointer to the
958 previous one, so we must use the hard frame pointer, and
959 we must disable frame pointer elimination. */
960 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
961 tem
= frame_pointer_rtx
;
964 tem
= hard_frame_pointer_rtx
;
966 /* Tell reload not to eliminate the frame pointer. */
967 crtl
->accesses_prior_frames
= 1;
972 SETUP_FRAME_ADDRESSES ();
974 /* On the SPARC, the return address is not in the frame, it is in a
975 register. There is no way to access it off of the current frame
976 pointer, but it can be accessed off the previous frame pointer by
977 reading the value from the register window save area. */
978 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
981 /* Scan back COUNT frames to the specified frame. */
982 for (i
= 0; i
< count
; i
++)
984 /* Assume the dynamic chain pointer is in the word that the
985 frame address points to, unless otherwise specified. */
986 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
987 tem
= memory_address (Pmode
, tem
);
988 tem
= gen_frame_mem (Pmode
, tem
);
989 tem
= copy_to_reg (tem
);
992 /* For __builtin_frame_address, return what we've got. But, on
993 the SPARC for example, we may have to add a bias. */
994 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
995 return FRAME_ADDR_RTX (tem
);
997 /* For __builtin_return_address, get the return address from that frame. */
998 #ifdef RETURN_ADDR_RTX
999 tem
= RETURN_ADDR_RTX (count
, tem
);
1001 tem
= memory_address (Pmode
,
1002 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
1003 tem
= gen_frame_mem (Pmode
, tem
);
1008 /* Alias set used for setjmp buffer. */
1009 static alias_set_type setjmp_alias_set
= -1;
1011 /* Construct the leading half of a __builtin_setjmp call. Control will
1012 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1013 exception handling code. */
1016 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
1018 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1022 if (setjmp_alias_set
== -1)
1023 setjmp_alias_set
= new_alias_set ();
1025 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1027 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
1029 /* We store the frame pointer and the address of receiver_label in
1030 the buffer and use the rest of it for the stack save area, which
1031 is machine-dependent. */
1033 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
1034 set_mem_alias_set (mem
, setjmp_alias_set
);
1035 emit_move_insn (mem
, hard_frame_pointer_rtx
);
1037 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1038 GET_MODE_SIZE (Pmode
))),
1039 set_mem_alias_set (mem
, setjmp_alias_set
);
1041 emit_move_insn (validize_mem (mem
),
1042 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
1044 stack_save
= gen_rtx_MEM (sa_mode
,
1045 plus_constant (Pmode
, buf_addr
,
1046 2 * GET_MODE_SIZE (Pmode
)));
1047 set_mem_alias_set (stack_save
, setjmp_alias_set
);
1048 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1050 /* If there is further processing to do, do it. */
1051 if (targetm
.have_builtin_setjmp_setup ())
1052 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
1054 /* We have a nonlocal label. */
1055 cfun
->has_nonlocal_label
= 1;
1058 /* Construct the trailing part of a __builtin_setjmp call. This is
1059 also called directly by the SJLJ exception handling code.
1060 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1063 expand_builtin_setjmp_receiver (rtx receiver_label
)
1067 /* Mark the FP as used when we get here, so we have to make sure it's
1068 marked as used by this function. */
1069 emit_use (hard_frame_pointer_rtx
);
1071 /* Mark the static chain as clobbered here so life information
1072 doesn't get messed up for it. */
1073 chain
= rtx_for_static_chain (current_function_decl
, true);
1074 if (chain
&& REG_P (chain
))
1075 emit_clobber (chain
);
1077 if (!HARD_FRAME_POINTER_IS_ARG_POINTER
&& fixed_regs
[ARG_POINTER_REGNUM
])
1079 /* If the argument pointer can be eliminated in favor of the
1080 frame pointer, we don't need to restore it. We assume here
1081 that if such an elimination is present, it can always be used.
1082 This is the case on all known machines; if we don't make this
1083 assumption, we do unnecessary saving on many machines. */
1085 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
1087 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
1088 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
1089 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
1092 if (i
== ARRAY_SIZE (elim_regs
))
1094 /* Now restore our arg pointer from the address at which it
1095 was saved in our stack frame. */
1096 emit_move_insn (crtl
->args
.internal_arg_pointer
,
1097 copy_to_reg (get_arg_pointer_save_area ()));
1101 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
1102 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
1103 else if (targetm
.have_nonlocal_goto_receiver ())
1104 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
1108 /* We must not allow the code we just generated to be reordered by
1109 scheduling. Specifically, the update of the frame pointer must
1110 happen immediately, not later. */
1111 emit_insn (gen_blockage ());
1114 /* __builtin_longjmp is passed a pointer to an array of five words (not
1115 all will be used on all machines). It operates similarly to the C
1116 library function of the same name, but is more efficient. Much of
1117 the code below is copied from the handling of non-local gotos. */
1120 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
1123 rtx_insn
*insn
, *last
;
1124 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1126 /* DRAP is needed for stack realign if longjmp is expanded to current
1128 if (SUPPORTS_STACK_ALIGNMENT
)
1129 crtl
->need_drap
= true;
1131 if (setjmp_alias_set
== -1)
1132 setjmp_alias_set
= new_alias_set ();
1134 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1136 buf_addr
= force_reg (Pmode
, buf_addr
);
1138 /* We require that the user must pass a second argument of 1, because
1139 that is what builtin_setjmp will return. */
1140 gcc_assert (value
== const1_rtx
);
1142 last
= get_last_insn ();
1143 if (targetm
.have_builtin_longjmp ())
1144 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
1147 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1148 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1149 GET_MODE_SIZE (Pmode
)));
1151 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1152 2 * GET_MODE_SIZE (Pmode
)));
1153 set_mem_alias_set (fp
, setjmp_alias_set
);
1154 set_mem_alias_set (lab
, setjmp_alias_set
);
1155 set_mem_alias_set (stack
, setjmp_alias_set
);
1157 /* Pick up FP, label, and SP from the block and jump. This code is
1158 from expand_goto in stmt.c; see there for detailed comments. */
1159 if (targetm
.have_nonlocal_goto ())
1160 /* We have to pass a value to the nonlocal_goto pattern that will
1161 get copied into the static_chain pointer, but it does not matter
1162 what that value is, because builtin_setjmp does not use it. */
1163 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
1166 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1167 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1169 lab
= copy_to_reg (lab
);
1171 /* Restore the frame pointer and stack pointer. We must use a
1172 temporary since the setjmp buffer may be a local. */
1173 fp
= copy_to_reg (fp
);
1174 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1176 /* Ensure the frame pointer move is not optimized. */
1177 emit_insn (gen_blockage ());
1178 emit_clobber (hard_frame_pointer_rtx
);
1179 emit_clobber (frame_pointer_rtx
);
1180 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1182 emit_use (hard_frame_pointer_rtx
);
1183 emit_use (stack_pointer_rtx
);
1184 emit_indirect_jump (lab
);
1188 /* Search backwards and mark the jump insn as a non-local goto.
1189 Note that this precludes the use of __builtin_longjmp to a
1190 __builtin_setjmp target in the same function. However, we've
1191 already cautioned the user that these functions are for
1192 internal exception handling use only. */
1193 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1195 gcc_assert (insn
!= last
);
1199 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1202 else if (CALL_P (insn
))
1208 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1210 return (iter
->i
< iter
->n
);
1213 /* This function validates the types of a function call argument list
1214 against a specified list of tree_codes. If the last specifier is a 0,
1215 that represents an ellipsis, otherwise the last specifier must be a
1219 validate_arglist (const_tree callexpr
, ...)
1221 enum tree_code code
;
1224 const_call_expr_arg_iterator iter
;
1227 va_start (ap
, callexpr
);
1228 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1230 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1231 tree fn
= CALL_EXPR_FN (callexpr
);
1232 bitmap argmap
= get_nonnull_args (TREE_TYPE (TREE_TYPE (fn
)));
1234 for (unsigned argno
= 1; ; ++argno
)
1236 code
= (enum tree_code
) va_arg (ap
, int);
1241 /* This signifies an ellipses, any further arguments are all ok. */
1245 /* This signifies an endlink, if no arguments remain, return
1246 true, otherwise return false. */
1247 res
= !more_const_call_expr_args_p (&iter
);
1250 /* The actual argument must be nonnull when either the whole
1251 called function has been declared nonnull, or when the formal
1252 argument corresponding to the actual argument has been. */
1254 && (bitmap_empty_p (argmap
) || bitmap_bit_p (argmap
, argno
)))
1256 arg
= next_const_call_expr_arg (&iter
);
1257 if (!validate_arg (arg
, code
) || integer_zerop (arg
))
1263 /* If no parameters remain or the parameter's code does not
1264 match the specified code, return false. Otherwise continue
1265 checking any remaining arguments. */
1266 arg
= next_const_call_expr_arg (&iter
);
1267 if (!validate_arg (arg
, code
))
1273 /* We need gotos here since we can only have one VA_CLOSE in a
1278 BITMAP_FREE (argmap
);
1283 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1284 and the address of the save area. */
1287 expand_builtin_nonlocal_goto (tree exp
)
1289 tree t_label
, t_save_area
;
1290 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1293 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1296 t_label
= CALL_EXPR_ARG (exp
, 0);
1297 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1299 r_label
= expand_normal (t_label
);
1300 r_label
= convert_memory_address (Pmode
, r_label
);
1301 r_save_area
= expand_normal (t_save_area
);
1302 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1303 /* Copy the address of the save location to a register just in case it was
1304 based on the frame pointer. */
1305 r_save_area
= copy_to_reg (r_save_area
);
1306 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1307 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1308 plus_constant (Pmode
, r_save_area
,
1309 GET_MODE_SIZE (Pmode
)));
1311 crtl
->has_nonlocal_goto
= 1;
1313 /* ??? We no longer need to pass the static chain value, afaik. */
1314 if (targetm
.have_nonlocal_goto ())
1315 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1318 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1319 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1321 r_label
= copy_to_reg (r_label
);
1323 /* Restore the frame pointer and stack pointer. We must use a
1324 temporary since the setjmp buffer may be a local. */
1325 r_fp
= copy_to_reg (r_fp
);
1326 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1328 /* Ensure the frame pointer move is not optimized. */
1329 emit_insn (gen_blockage ());
1330 emit_clobber (hard_frame_pointer_rtx
);
1331 emit_clobber (frame_pointer_rtx
);
1332 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1334 /* USE of hard_frame_pointer_rtx added for consistency;
1335 not clear if really needed. */
1336 emit_use (hard_frame_pointer_rtx
);
1337 emit_use (stack_pointer_rtx
);
1339 /* If the architecture is using a GP register, we must
1340 conservatively assume that the target function makes use of it.
1341 The prologue of functions with nonlocal gotos must therefore
1342 initialize the GP register to the appropriate value, and we
1343 must then make sure that this value is live at the point
1344 of the jump. (Note that this doesn't necessarily apply
1345 to targets with a nonlocal_goto pattern; they are free
1346 to implement it in their own way. Note also that this is
1347 a no-op if the GP register is a global invariant.) */
1348 unsigned regnum
= PIC_OFFSET_TABLE_REGNUM
;
1349 if (regnum
!= INVALID_REGNUM
&& fixed_regs
[regnum
])
1350 emit_use (pic_offset_table_rtx
);
1352 emit_indirect_jump (r_label
);
1355 /* Search backwards to the jump insn and mark it as a
1357 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1361 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1364 else if (CALL_P (insn
))
1371 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1372 (not all will be used on all machines) that was passed to __builtin_setjmp.
1373 It updates the stack pointer in that block to the current value. This is
1374 also called directly by the SJLJ exception handling code. */
1377 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1379 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1380 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1382 = gen_rtx_MEM (sa_mode
,
1385 plus_constant (Pmode
, buf_addr
,
1386 2 * GET_MODE_SIZE (Pmode
))));
1388 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1391 /* Expand a call to __builtin_prefetch. For a target that does not support
1392 data prefetch, evaluate the memory address argument in case it has side
1396 expand_builtin_prefetch (tree exp
)
1398 tree arg0
, arg1
, arg2
;
1402 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1405 arg0
= CALL_EXPR_ARG (exp
, 0);
1407 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1408 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1410 nargs
= call_expr_nargs (exp
);
1412 arg1
= CALL_EXPR_ARG (exp
, 1);
1414 arg1
= integer_zero_node
;
1416 arg2
= CALL_EXPR_ARG (exp
, 2);
1418 arg2
= integer_three_node
;
1420 /* Argument 0 is an address. */
1421 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1423 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1424 if (TREE_CODE (arg1
) != INTEGER_CST
)
1426 error ("second argument to %<__builtin_prefetch%> must be a constant");
1427 arg1
= integer_zero_node
;
1429 op1
= expand_normal (arg1
);
1430 /* Argument 1 must be either zero or one. */
1431 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1433 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1438 /* Argument 2 (locality) must be a compile-time constant int. */
1439 if (TREE_CODE (arg2
) != INTEGER_CST
)
1441 error ("third argument to %<__builtin_prefetch%> must be a constant");
1442 arg2
= integer_zero_node
;
1444 op2
= expand_normal (arg2
);
1445 /* Argument 2 must be 0, 1, 2, or 3. */
1446 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1448 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1452 if (targetm
.have_prefetch ())
1454 class expand_operand ops
[3];
1456 create_address_operand (&ops
[0], op0
);
1457 create_integer_operand (&ops
[1], INTVAL (op1
));
1458 create_integer_operand (&ops
[2], INTVAL (op2
));
1459 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1463 /* Don't do anything with direct references to volatile memory, but
1464 generate code to handle other side effects. */
1465 if (!MEM_P (op0
) && side_effects_p (op0
))
1469 /* Get a MEM rtx for expression EXP which is the address of an operand
1470 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1471 the maximum length of the block of memory that might be accessed or
1475 get_memory_rtx (tree exp
, tree len
)
1477 tree orig_exp
= exp
;
1480 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1481 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1482 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1483 exp
= TREE_OPERAND (exp
, 0);
1485 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1486 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1488 /* Get an expression we can use to find the attributes to assign to MEM.
1489 First remove any nops. */
1490 while (CONVERT_EXPR_P (exp
)
1491 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1492 exp
= TREE_OPERAND (exp
, 0);
1494 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1495 (as builtin stringops may alias with anything). */
1496 exp
= fold_build2 (MEM_REF
,
1497 build_array_type (char_type_node
,
1498 build_range_type (sizetype
,
1499 size_one_node
, len
)),
1500 exp
, build_int_cst (ptr_type_node
, 0));
1502 /* If the MEM_REF has no acceptable address, try to get the base object
1503 from the original address we got, and build an all-aliasing
1504 unknown-sized access to that one. */
1505 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1506 set_mem_attributes (mem
, exp
, 0);
1507 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1508 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1511 exp
= build_fold_addr_expr (exp
);
1512 exp
= fold_build2 (MEM_REF
,
1513 build_array_type (char_type_node
,
1514 build_range_type (sizetype
,
1517 exp
, build_int_cst (ptr_type_node
, 0));
1518 set_mem_attributes (mem
, exp
, 0);
1520 set_mem_alias_set (mem
, 0);
1524 /* Built-in functions to perform an untyped call and return. */
1526 #define apply_args_mode \
1527 (this_target_builtins->x_apply_args_mode)
1528 #define apply_result_mode \
1529 (this_target_builtins->x_apply_result_mode)
1531 /* Return the size required for the block returned by __builtin_apply_args,
1532 and initialize apply_args_mode. */
1535 apply_args_size (void)
1537 static int size
= -1;
1541 /* The values computed by this function never change. */
1544 /* The first value is the incoming arg-pointer. */
1545 size
= GET_MODE_SIZE (Pmode
);
1547 /* The second value is the structure value address unless this is
1548 passed as an "invisible" first argument. */
1549 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1550 size
+= GET_MODE_SIZE (Pmode
);
1552 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1553 if (FUNCTION_ARG_REGNO_P (regno
))
1555 fixed_size_mode mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1557 gcc_assert (mode
!= VOIDmode
);
1559 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1560 if (size
% align
!= 0)
1561 size
= CEIL (size
, align
) * align
;
1562 size
+= GET_MODE_SIZE (mode
);
1563 apply_args_mode
[regno
] = mode
;
1567 apply_args_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1573 /* Return the size required for the block returned by __builtin_apply,
1574 and initialize apply_result_mode. */
1577 apply_result_size (void)
1579 static int size
= -1;
1582 /* The values computed by this function never change. */
1587 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1588 if (targetm
.calls
.function_value_regno_p (regno
))
1590 fixed_size_mode mode
= targetm
.calls
.get_raw_result_mode (regno
);
1592 gcc_assert (mode
!= VOIDmode
);
1594 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1595 if (size
% align
!= 0)
1596 size
= CEIL (size
, align
) * align
;
1597 size
+= GET_MODE_SIZE (mode
);
1598 apply_result_mode
[regno
] = mode
;
1601 apply_result_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1603 /* Allow targets that use untyped_call and untyped_return to override
1604 the size so that machine-specific information can be stored here. */
1605 #ifdef APPLY_RESULT_SIZE
1606 size
= APPLY_RESULT_SIZE
;
1612 /* Create a vector describing the result block RESULT. If SAVEP is true,
1613 the result block is used to save the values; otherwise it is used to
1614 restore the values. */
1617 result_vector (int savep
, rtx result
)
1619 int regno
, size
, align
, nelts
;
1620 fixed_size_mode mode
;
1622 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1625 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1626 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1628 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1629 if (size
% align
!= 0)
1630 size
= CEIL (size
, align
) * align
;
1631 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1632 mem
= adjust_address (result
, mode
, size
);
1633 savevec
[nelts
++] = (savep
1634 ? gen_rtx_SET (mem
, reg
)
1635 : gen_rtx_SET (reg
, mem
));
1636 size
+= GET_MODE_SIZE (mode
);
1638 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1641 /* Save the state required to perform an untyped call with the same
1642 arguments as were passed to the current function. */
1645 expand_builtin_apply_args_1 (void)
1648 int size
, align
, regno
;
1649 fixed_size_mode mode
;
1650 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1652 /* Create a block where the arg-pointer, structure value address,
1653 and argument registers can be saved. */
1654 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1656 /* Walk past the arg-pointer and structure value address. */
1657 size
= GET_MODE_SIZE (Pmode
);
1658 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1659 size
+= GET_MODE_SIZE (Pmode
);
1661 /* Save each register used in calling a function to the block. */
1662 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1663 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1665 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1666 if (size
% align
!= 0)
1667 size
= CEIL (size
, align
) * align
;
1669 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1671 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1672 size
+= GET_MODE_SIZE (mode
);
1675 /* Save the arg pointer to the block. */
1676 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1677 /* We need the pointer as the caller actually passed them to us, not
1678 as we might have pretended they were passed. Make sure it's a valid
1679 operand, as emit_move_insn isn't expected to handle a PLUS. */
1680 if (STACK_GROWS_DOWNWARD
)
1682 = force_operand (plus_constant (Pmode
, tem
,
1683 crtl
->args
.pretend_args_size
),
1685 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1687 size
= GET_MODE_SIZE (Pmode
);
1689 /* Save the structure value address unless this is passed as an
1690 "invisible" first argument. */
1691 if (struct_incoming_value
)
1692 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1693 copy_to_reg (struct_incoming_value
));
1695 /* Return the address of the block. */
1696 return copy_addr_to_reg (XEXP (registers
, 0));
1699 /* __builtin_apply_args returns block of memory allocated on
1700 the stack into which is stored the arg pointer, structure
1701 value address, static chain, and all the registers that might
1702 possibly be used in performing a function call. The code is
1703 moved to the start of the function so the incoming values are
1707 expand_builtin_apply_args (void)
1709 /* Don't do __builtin_apply_args more than once in a function.
1710 Save the result of the first call and reuse it. */
1711 if (apply_args_value
!= 0)
1712 return apply_args_value
;
1714 /* When this function is called, it means that registers must be
1715 saved on entry to this function. So we migrate the
1716 call to the first insn of this function. */
1720 temp
= expand_builtin_apply_args_1 ();
1721 rtx_insn
*seq
= get_insns ();
1724 apply_args_value
= temp
;
1726 /* Put the insns after the NOTE that starts the function.
1727 If this is inside a start_sequence, make the outer-level insn
1728 chain current, so the code is placed at the start of the
1729 function. If internal_arg_pointer is a non-virtual pseudo,
1730 it needs to be placed after the function that initializes
1732 push_topmost_sequence ();
1733 if (REG_P (crtl
->args
.internal_arg_pointer
)
1734 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1735 emit_insn_before (seq
, parm_birth_insn
);
1737 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1738 pop_topmost_sequence ();
1743 /* Perform an untyped call and save the state required to perform an
1744 untyped return of whatever value was returned by the given function. */
1747 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1749 int size
, align
, regno
;
1750 fixed_size_mode mode
;
1751 rtx incoming_args
, result
, reg
, dest
, src
;
1752 rtx_call_insn
*call_insn
;
1753 rtx old_stack_level
= 0;
1754 rtx call_fusage
= 0;
1755 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1757 arguments
= convert_memory_address (Pmode
, arguments
);
1759 /* Create a block where the return registers can be saved. */
1760 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1762 /* Fetch the arg pointer from the ARGUMENTS block. */
1763 incoming_args
= gen_reg_rtx (Pmode
);
1764 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1765 if (!STACK_GROWS_DOWNWARD
)
1766 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1767 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1769 /* Push a new argument block and copy the arguments. Do not allow
1770 the (potential) memcpy call below to interfere with our stack
1772 do_pending_stack_adjust ();
1775 /* Save the stack with nonlocal if available. */
1776 if (targetm
.have_save_stack_nonlocal ())
1777 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1779 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1781 /* Allocate a block of memory onto the stack and copy the memory
1782 arguments to the outgoing arguments address. We can pass TRUE
1783 as the 4th argument because we just saved the stack pointer
1784 and will restore it right after the call. */
1785 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, -1, true);
1787 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1788 may have already set current_function_calls_alloca to true.
1789 current_function_calls_alloca won't be set if argsize is zero,
1790 so we have to guarantee need_drap is true here. */
1791 if (SUPPORTS_STACK_ALIGNMENT
)
1792 crtl
->need_drap
= true;
1794 dest
= virtual_outgoing_args_rtx
;
1795 if (!STACK_GROWS_DOWNWARD
)
1797 if (CONST_INT_P (argsize
))
1798 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1800 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1802 dest
= gen_rtx_MEM (BLKmode
, dest
);
1803 set_mem_align (dest
, PARM_BOUNDARY
);
1804 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1805 set_mem_align (src
, PARM_BOUNDARY
);
1806 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1808 /* Refer to the argument block. */
1810 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1811 set_mem_align (arguments
, PARM_BOUNDARY
);
1813 /* Walk past the arg-pointer and structure value address. */
1814 size
= GET_MODE_SIZE (Pmode
);
1816 size
+= GET_MODE_SIZE (Pmode
);
1818 /* Restore each of the registers previously saved. Make USE insns
1819 for each of these registers for use in making the call. */
1820 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1821 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1823 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1824 if (size
% align
!= 0)
1825 size
= CEIL (size
, align
) * align
;
1826 reg
= gen_rtx_REG (mode
, regno
);
1827 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1828 use_reg (&call_fusage
, reg
);
1829 size
+= GET_MODE_SIZE (mode
);
1832 /* Restore the structure value address unless this is passed as an
1833 "invisible" first argument. */
1834 size
= GET_MODE_SIZE (Pmode
);
1837 rtx value
= gen_reg_rtx (Pmode
);
1838 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1839 emit_move_insn (struct_value
, value
);
1840 if (REG_P (struct_value
))
1841 use_reg (&call_fusage
, struct_value
);
1844 /* All arguments and registers used for the call are set up by now! */
1845 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1847 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1848 and we don't want to load it into a register as an optimization,
1849 because prepare_call_address already did it if it should be done. */
1850 if (GET_CODE (function
) != SYMBOL_REF
)
1851 function
= memory_address (FUNCTION_MODE
, function
);
1853 /* Generate the actual call instruction and save the return value. */
1854 if (targetm
.have_untyped_call ())
1856 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1857 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1858 result_vector (1, result
)));
1860 else if (targetm
.have_call_value ())
1864 /* Locate the unique return register. It is not possible to
1865 express a call that sets more than one return register using
1866 call_value; use untyped_call for that. In fact, untyped_call
1867 only needs to save the return registers in the given block. */
1868 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1869 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1871 gcc_assert (!valreg
); /* have_untyped_call required. */
1873 valreg
= gen_rtx_REG (mode
, regno
);
1876 emit_insn (targetm
.gen_call_value (valreg
,
1877 gen_rtx_MEM (FUNCTION_MODE
, function
),
1878 const0_rtx
, NULL_RTX
, const0_rtx
));
1880 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1885 /* Find the CALL insn we just emitted, and attach the register usage
1887 call_insn
= last_call_insn ();
1888 add_function_usage_to (call_insn
, call_fusage
);
1890 /* Restore the stack. */
1891 if (targetm
.have_save_stack_nonlocal ())
1892 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1894 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1895 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1899 /* Return the address of the result block. */
1900 result
= copy_addr_to_reg (XEXP (result
, 0));
1901 return convert_memory_address (ptr_mode
, result
);
1904 /* Perform an untyped return. */
1907 expand_builtin_return (rtx result
)
1909 int size
, align
, regno
;
1910 fixed_size_mode mode
;
1912 rtx_insn
*call_fusage
= 0;
1914 result
= convert_memory_address (Pmode
, result
);
1916 apply_result_size ();
1917 result
= gen_rtx_MEM (BLKmode
, result
);
1919 if (targetm
.have_untyped_return ())
1921 rtx vector
= result_vector (0, result
);
1922 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1927 /* Restore the return value and note that each value is used. */
1929 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1930 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1932 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1933 if (size
% align
!= 0)
1934 size
= CEIL (size
, align
) * align
;
1935 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1936 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1938 push_to_sequence (call_fusage
);
1940 call_fusage
= get_insns ();
1942 size
+= GET_MODE_SIZE (mode
);
1945 /* Put the USE insns before the return. */
1946 emit_insn (call_fusage
);
1948 /* Return whatever values was restored by jumping directly to the end
1950 expand_naked_return ();
1953 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1955 static enum type_class
1956 type_to_class (tree type
)
1958 switch (TREE_CODE (type
))
1960 case VOID_TYPE
: return void_type_class
;
1961 case INTEGER_TYPE
: return integer_type_class
;
1962 case ENUMERAL_TYPE
: return enumeral_type_class
;
1963 case BOOLEAN_TYPE
: return boolean_type_class
;
1964 case POINTER_TYPE
: return pointer_type_class
;
1965 case REFERENCE_TYPE
: return reference_type_class
;
1966 case OFFSET_TYPE
: return offset_type_class
;
1967 case REAL_TYPE
: return real_type_class
;
1968 case COMPLEX_TYPE
: return complex_type_class
;
1969 case FUNCTION_TYPE
: return function_type_class
;
1970 case METHOD_TYPE
: return method_type_class
;
1971 case RECORD_TYPE
: return record_type_class
;
1973 case QUAL_UNION_TYPE
: return union_type_class
;
1974 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1975 ? string_type_class
: array_type_class
);
1976 case LANG_TYPE
: return lang_type_class
;
1977 default: return no_type_class
;
1981 /* Expand a call EXP to __builtin_classify_type. */
1984 expand_builtin_classify_type (tree exp
)
1986 if (call_expr_nargs (exp
))
1987 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1988 return GEN_INT (no_type_class
);
1991 /* This helper macro, meant to be used in mathfn_built_in below, determines
1992 which among a set of builtin math functions is appropriate for a given type
1993 mode. The `F' (float) and `L' (long double) are automatically generated
1994 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1995 types, there are additional types that are considered with 'F32', 'F64',
1996 'F128', etc. suffixes. */
1997 #define CASE_MATHFN(MATHFN) \
1998 CASE_CFN_##MATHFN: \
1999 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2000 fcodel = BUILT_IN_##MATHFN##L ; break;
2001 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
2003 #define CASE_MATHFN_FLOATN(MATHFN) \
2004 CASE_CFN_##MATHFN: \
2005 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2006 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2007 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2008 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2009 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2011 /* Similar to above, but appends _R after any F/L suffix. */
2012 #define CASE_MATHFN_REENT(MATHFN) \
2013 case CFN_BUILT_IN_##MATHFN##_R: \
2014 case CFN_BUILT_IN_##MATHFN##F_R: \
2015 case CFN_BUILT_IN_##MATHFN##L_R: \
2016 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2017 fcodel = BUILT_IN_##MATHFN##L_R ; break;
2019 /* Return a function equivalent to FN but operating on floating-point
2020 values of type TYPE, or END_BUILTINS if no such function exists.
2021 This is purely an operation on function codes; it does not guarantee
2022 that the target actually has an implementation of the function. */
2024 static built_in_function
2025 mathfn_built_in_2 (tree type
, combined_fn fn
)
2028 built_in_function fcode
, fcodef
, fcodel
;
2029 built_in_function fcodef16
= END_BUILTINS
;
2030 built_in_function fcodef32
= END_BUILTINS
;
2031 built_in_function fcodef64
= END_BUILTINS
;
2032 built_in_function fcodef128
= END_BUILTINS
;
2033 built_in_function fcodef32x
= END_BUILTINS
;
2034 built_in_function fcodef64x
= END_BUILTINS
;
2035 built_in_function fcodef128x
= END_BUILTINS
;
2047 CASE_MATHFN_FLOATN (CEIL
)
2049 CASE_MATHFN_FLOATN (COPYSIGN
)
2061 CASE_MATHFN_FLOATN (FLOOR
)
2062 CASE_MATHFN_FLOATN (FMA
)
2063 CASE_MATHFN_FLOATN (FMAX
)
2064 CASE_MATHFN_FLOATN (FMIN
)
2068 CASE_MATHFN_REENT (GAMMA
) /* GAMMA_R */
2069 CASE_MATHFN (HUGE_VAL
)
2073 CASE_MATHFN (IFLOOR
)
2076 CASE_MATHFN (IROUND
)
2083 CASE_MATHFN (LFLOOR
)
2084 CASE_MATHFN (LGAMMA
)
2085 CASE_MATHFN_REENT (LGAMMA
) /* LGAMMA_R */
2086 CASE_MATHFN (LLCEIL
)
2087 CASE_MATHFN (LLFLOOR
)
2088 CASE_MATHFN (LLRINT
)
2089 CASE_MATHFN (LLROUND
)
2096 CASE_MATHFN (LROUND
)
2100 CASE_MATHFN_FLOATN (NEARBYINT
)
2101 CASE_MATHFN (NEXTAFTER
)
2102 CASE_MATHFN (NEXTTOWARD
)
2106 CASE_MATHFN (REMAINDER
)
2107 CASE_MATHFN (REMQUO
)
2108 CASE_MATHFN_FLOATN (RINT
)
2109 CASE_MATHFN_FLOATN (ROUND
)
2110 CASE_MATHFN_FLOATN (ROUNDEVEN
)
2112 CASE_MATHFN (SCALBLN
)
2113 CASE_MATHFN (SCALBN
)
2114 CASE_MATHFN (SIGNBIT
)
2115 CASE_MATHFN (SIGNIFICAND
)
2117 CASE_MATHFN (SINCOS
)
2119 CASE_MATHFN_FLOATN (SQRT
)
2122 CASE_MATHFN (TGAMMA
)
2123 CASE_MATHFN_FLOATN (TRUNC
)
2129 return END_BUILTINS
;
2132 mtype
= TYPE_MAIN_VARIANT (type
);
2133 if (mtype
== double_type_node
)
2135 else if (mtype
== float_type_node
)
2137 else if (mtype
== long_double_type_node
)
2139 else if (mtype
== float16_type_node
)
2141 else if (mtype
== float32_type_node
)
2143 else if (mtype
== float64_type_node
)
2145 else if (mtype
== float128_type_node
)
2147 else if (mtype
== float32x_type_node
)
2149 else if (mtype
== float64x_type_node
)
2151 else if (mtype
== float128x_type_node
)
2154 return END_BUILTINS
;
2157 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2158 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2159 otherwise use the explicit declaration. If we can't do the conversion,
2163 mathfn_built_in_1 (tree type
, combined_fn fn
, bool implicit_p
)
2165 built_in_function fcode2
= mathfn_built_in_2 (type
, fn
);
2166 if (fcode2
== END_BUILTINS
)
2169 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
2172 return builtin_decl_explicit (fcode2
);
2175 /* Like mathfn_built_in_1, but always use the implicit array. */
2178 mathfn_built_in (tree type
, combined_fn fn
)
2180 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
2183 /* Like mathfn_built_in_1, but take a built_in_function and
2184 always use the implicit array. */
2187 mathfn_built_in (tree type
, enum built_in_function fn
)
2189 return mathfn_built_in_1 (type
, as_combined_fn (fn
), /*implicit=*/ 1);
2192 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2193 return its code, otherwise return IFN_LAST. Note that this function
2194 only tests whether the function is defined in internals.def, not whether
2195 it is actually available on the target. */
2198 associated_internal_fn (tree fndecl
)
2200 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
);
2201 tree return_type
= TREE_TYPE (TREE_TYPE (fndecl
));
2202 switch (DECL_FUNCTION_CODE (fndecl
))
2204 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2205 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2206 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2207 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2208 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2209 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2210 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2211 #include "internal-fn.def"
2213 CASE_FLT_FN (BUILT_IN_POW10
):
2216 CASE_FLT_FN (BUILT_IN_DREM
):
2217 return IFN_REMAINDER
;
2219 CASE_FLT_FN (BUILT_IN_SCALBN
):
2220 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2221 if (REAL_MODE_FORMAT (TYPE_MODE (return_type
))->b
== 2)
2230 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2231 on the current target by a call to an internal function, return the
2232 code of that internal function, otherwise return IFN_LAST. The caller
2233 is responsible for ensuring that any side-effects of the built-in
2234 call are dealt with correctly. E.g. if CALL sets errno, the caller
2235 must decide that the errno result isn't needed or make it available
2236 in some other way. */
2239 replacement_internal_fn (gcall
*call
)
2241 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2243 internal_fn ifn
= associated_internal_fn (gimple_call_fndecl (call
));
2244 if (ifn
!= IFN_LAST
)
2246 tree_pair types
= direct_internal_fn_types (ifn
, call
);
2247 optimization_type opt_type
= bb_optimization_type (gimple_bb (call
));
2248 if (direct_internal_fn_supported_p (ifn
, types
, opt_type
))
2255 /* Expand a call to the builtin trinary math functions (fma).
2256 Return NULL_RTX if a normal call should be emitted rather than expanding the
2257 function in-line. EXP is the expression that is a call to the builtin
2258 function; if convenient, the result should be placed in TARGET.
2259 SUBTARGET may be used as the target for computing one of EXP's
2263 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2265 optab builtin_optab
;
2266 rtx op0
, op1
, op2
, result
;
2268 tree fndecl
= get_callee_fndecl (exp
);
2269 tree arg0
, arg1
, arg2
;
2272 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2275 arg0
= CALL_EXPR_ARG (exp
, 0);
2276 arg1
= CALL_EXPR_ARG (exp
, 1);
2277 arg2
= CALL_EXPR_ARG (exp
, 2);
2279 switch (DECL_FUNCTION_CODE (fndecl
))
2281 CASE_FLT_FN (BUILT_IN_FMA
):
2282 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
2283 builtin_optab
= fma_optab
; break;
2288 /* Make a suitable register to place result in. */
2289 mode
= TYPE_MODE (TREE_TYPE (exp
));
2291 /* Before working hard, check whether the instruction is available. */
2292 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2295 result
= gen_reg_rtx (mode
);
2297 /* Always stabilize the argument list. */
2298 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2299 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2300 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2302 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2303 op1
= expand_normal (arg1
);
2304 op2
= expand_normal (arg2
);
2308 /* Compute into RESULT.
2309 Set RESULT to wherever the result comes back. */
2310 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2313 /* If we were unable to expand via the builtin, stop the sequence
2314 (without outputting the insns) and call to the library function
2315 with the stabilized argument list. */
2319 return expand_call (exp
, target
, target
== const0_rtx
);
2322 /* Output the entire sequence. */
2323 insns
= get_insns ();
2330 /* Expand a call to the builtin sin and cos math functions.
2331 Return NULL_RTX if a normal call should be emitted rather than expanding the
2332 function in-line. EXP is the expression that is a call to the builtin
2333 function; if convenient, the result should be placed in TARGET.
2334 SUBTARGET may be used as the target for computing one of EXP's
2338 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2340 optab builtin_optab
;
2343 tree fndecl
= get_callee_fndecl (exp
);
2347 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2350 arg
= CALL_EXPR_ARG (exp
, 0);
2352 switch (DECL_FUNCTION_CODE (fndecl
))
2354 CASE_FLT_FN (BUILT_IN_SIN
):
2355 CASE_FLT_FN (BUILT_IN_COS
):
2356 builtin_optab
= sincos_optab
; break;
2361 /* Make a suitable register to place result in. */
2362 mode
= TYPE_MODE (TREE_TYPE (exp
));
2364 /* Check if sincos insn is available, otherwise fallback
2365 to sin or cos insn. */
2366 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2367 switch (DECL_FUNCTION_CODE (fndecl
))
2369 CASE_FLT_FN (BUILT_IN_SIN
):
2370 builtin_optab
= sin_optab
; break;
2371 CASE_FLT_FN (BUILT_IN_COS
):
2372 builtin_optab
= cos_optab
; break;
2377 /* Before working hard, check whether the instruction is available. */
2378 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2380 rtx result
= gen_reg_rtx (mode
);
2382 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2383 need to expand the argument again. This way, we will not perform
2384 side-effects more the once. */
2385 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2387 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2391 /* Compute into RESULT.
2392 Set RESULT to wherever the result comes back. */
2393 if (builtin_optab
== sincos_optab
)
2397 switch (DECL_FUNCTION_CODE (fndecl
))
2399 CASE_FLT_FN (BUILT_IN_SIN
):
2400 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2402 CASE_FLT_FN (BUILT_IN_COS
):
2403 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2411 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2415 /* Output the entire sequence. */
2416 insns
= get_insns ();
2422 /* If we were unable to expand via the builtin, stop the sequence
2423 (without outputting the insns) and call to the library function
2424 with the stabilized argument list. */
2428 return expand_call (exp
, target
, target
== const0_rtx
);
2431 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2432 return an RTL instruction code that implements the functionality.
2433 If that isn't possible or available return CODE_FOR_nothing. */
2435 static enum insn_code
2436 interclass_mathfn_icode (tree arg
, tree fndecl
)
2438 bool errno_set
= false;
2439 optab builtin_optab
= unknown_optab
;
2442 switch (DECL_FUNCTION_CODE (fndecl
))
2444 CASE_FLT_FN (BUILT_IN_ILOGB
):
2445 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2446 CASE_FLT_FN (BUILT_IN_ISINF
):
2447 builtin_optab
= isinf_optab
; break;
2448 case BUILT_IN_ISNORMAL
:
2449 case BUILT_IN_ISFINITE
:
2450 CASE_FLT_FN (BUILT_IN_FINITE
):
2451 case BUILT_IN_FINITED32
:
2452 case BUILT_IN_FINITED64
:
2453 case BUILT_IN_FINITED128
:
2454 case BUILT_IN_ISINFD32
:
2455 case BUILT_IN_ISINFD64
:
2456 case BUILT_IN_ISINFD128
:
2457 /* These builtins have no optabs (yet). */
2463 /* There's no easy way to detect the case we need to set EDOM. */
2464 if (flag_errno_math
&& errno_set
)
2465 return CODE_FOR_nothing
;
2467 /* Optab mode depends on the mode of the input argument. */
2468 mode
= TYPE_MODE (TREE_TYPE (arg
));
2471 return optab_handler (builtin_optab
, mode
);
2472 return CODE_FOR_nothing
;
2475 /* Expand a call to one of the builtin math functions that operate on
2476 floating point argument and output an integer result (ilogb, isinf,
2478 Return 0 if a normal call should be emitted rather than expanding the
2479 function in-line. EXP is the expression that is a call to the builtin
2480 function; if convenient, the result should be placed in TARGET. */
2483 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2485 enum insn_code icode
= CODE_FOR_nothing
;
2487 tree fndecl
= get_callee_fndecl (exp
);
2491 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2494 arg
= CALL_EXPR_ARG (exp
, 0);
2495 icode
= interclass_mathfn_icode (arg
, fndecl
);
2496 mode
= TYPE_MODE (TREE_TYPE (arg
));
2498 if (icode
!= CODE_FOR_nothing
)
2500 class expand_operand ops
[1];
2501 rtx_insn
*last
= get_last_insn ();
2502 tree orig_arg
= arg
;
2504 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2505 need to expand the argument again. This way, we will not perform
2506 side-effects more the once. */
2507 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2509 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2511 if (mode
!= GET_MODE (op0
))
2512 op0
= convert_to_mode (mode
, op0
, 0);
2514 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2515 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2516 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2517 return ops
[0].value
;
2519 delete_insns_since (last
);
2520 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2526 /* Expand a call to the builtin sincos math function.
2527 Return NULL_RTX if a normal call should be emitted rather than expanding the
2528 function in-line. EXP is the expression that is a call to the builtin
2532 expand_builtin_sincos (tree exp
)
2534 rtx op0
, op1
, op2
, target1
, target2
;
2536 tree arg
, sinp
, cosp
;
2538 location_t loc
= EXPR_LOCATION (exp
);
2539 tree alias_type
, alias_off
;
2541 if (!validate_arglist (exp
, REAL_TYPE
,
2542 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2545 arg
= CALL_EXPR_ARG (exp
, 0);
2546 sinp
= CALL_EXPR_ARG (exp
, 1);
2547 cosp
= CALL_EXPR_ARG (exp
, 2);
2549 /* Make a suitable register to place result in. */
2550 mode
= TYPE_MODE (TREE_TYPE (arg
));
2552 /* Check if sincos insn is available, otherwise emit the call. */
2553 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2556 target1
= gen_reg_rtx (mode
);
2557 target2
= gen_reg_rtx (mode
);
2559 op0
= expand_normal (arg
);
2560 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2561 alias_off
= build_int_cst (alias_type
, 0);
2562 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2564 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2567 /* Compute into target1 and target2.
2568 Set TARGET to wherever the result comes back. */
2569 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2570 gcc_assert (result
);
2572 /* Move target1 and target2 to the memory locations indicated
2574 emit_move_insn (op1
, target1
);
2575 emit_move_insn (op2
, target2
);
2580 /* Expand a call to the internal cexpi builtin to the sincos math function.
2581 EXP is the expression that is a call to the builtin function; if convenient,
2582 the result should be placed in TARGET. */
2585 expand_builtin_cexpi (tree exp
, rtx target
)
2587 tree fndecl
= get_callee_fndecl (exp
);
2591 location_t loc
= EXPR_LOCATION (exp
);
2593 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2596 arg
= CALL_EXPR_ARG (exp
, 0);
2597 type
= TREE_TYPE (arg
);
2598 mode
= TYPE_MODE (TREE_TYPE (arg
));
2600 /* Try expanding via a sincos optab, fall back to emitting a libcall
2601 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2602 is only generated from sincos, cexp or if we have either of them. */
2603 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2605 op1
= gen_reg_rtx (mode
);
2606 op2
= gen_reg_rtx (mode
);
2608 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2610 /* Compute into op1 and op2. */
2611 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2613 else if (targetm
.libc_has_function (function_sincos
))
2615 tree call
, fn
= NULL_TREE
;
2619 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2620 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2621 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2622 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2623 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2624 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2628 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2629 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2630 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2631 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2632 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2633 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2635 /* Make sure not to fold the sincos call again. */
2636 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2637 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2638 call
, 3, arg
, top1
, top2
));
2642 tree call
, fn
= NULL_TREE
, narg
;
2643 tree ctype
= build_complex_type (type
);
2645 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2646 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2647 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2648 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2649 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2650 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2654 /* If we don't have a decl for cexp create one. This is the
2655 friendliest fallback if the user calls __builtin_cexpi
2656 without full target C99 function support. */
2657 if (fn
== NULL_TREE
)
2660 const char *name
= NULL
;
2662 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2664 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2666 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2669 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2670 fn
= build_fn_decl (name
, fntype
);
2673 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2674 build_real (type
, dconst0
), arg
);
2676 /* Make sure not to fold the cexp call again. */
2677 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2678 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2679 target
, VOIDmode
, EXPAND_NORMAL
);
2682 /* Now build the proper return type. */
2683 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2684 make_tree (TREE_TYPE (arg
), op2
),
2685 make_tree (TREE_TYPE (arg
), op1
)),
2686 target
, VOIDmode
, EXPAND_NORMAL
);
2689 /* Conveniently construct a function call expression. FNDECL names the
2690 function to be called, N is the number of arguments, and the "..."
2691 parameters are the argument expressions. Unlike build_call_exr
2692 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2695 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2698 tree fntype
= TREE_TYPE (fndecl
);
2699 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2702 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2704 SET_EXPR_LOCATION (fn
, loc
);
2708 /* Expand a call to one of the builtin rounding functions gcc defines
2709 as an extension (lfloor and lceil). As these are gcc extensions we
2710 do not need to worry about setting errno to EDOM.
2711 If expanding via optab fails, lower expression to (int)(floor(x)).
2712 EXP is the expression that is a call to the builtin function;
2713 if convenient, the result should be placed in TARGET. */
2716 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2718 convert_optab builtin_optab
;
2721 tree fndecl
= get_callee_fndecl (exp
);
2722 enum built_in_function fallback_fn
;
2723 tree fallback_fndecl
;
2727 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2730 arg
= CALL_EXPR_ARG (exp
, 0);
2732 switch (DECL_FUNCTION_CODE (fndecl
))
2734 CASE_FLT_FN (BUILT_IN_ICEIL
):
2735 CASE_FLT_FN (BUILT_IN_LCEIL
):
2736 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2737 builtin_optab
= lceil_optab
;
2738 fallback_fn
= BUILT_IN_CEIL
;
2741 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2742 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2743 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2744 builtin_optab
= lfloor_optab
;
2745 fallback_fn
= BUILT_IN_FLOOR
;
2752 /* Make a suitable register to place result in. */
2753 mode
= TYPE_MODE (TREE_TYPE (exp
));
2755 target
= gen_reg_rtx (mode
);
2757 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2758 need to expand the argument again. This way, we will not perform
2759 side-effects more the once. */
2760 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2762 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2766 /* Compute into TARGET. */
2767 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2769 /* Output the entire sequence. */
2770 insns
= get_insns ();
2776 /* If we were unable to expand via the builtin, stop the sequence
2777 (without outputting the insns). */
2780 /* Fall back to floating point rounding optab. */
2781 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2783 /* For non-C99 targets we may end up without a fallback fndecl here
2784 if the user called __builtin_lfloor directly. In this case emit
2785 a call to the floor/ceil variants nevertheless. This should result
2786 in the best user experience for not full C99 targets. */
2787 if (fallback_fndecl
== NULL_TREE
)
2790 const char *name
= NULL
;
2792 switch (DECL_FUNCTION_CODE (fndecl
))
2794 case BUILT_IN_ICEIL
:
2795 case BUILT_IN_LCEIL
:
2796 case BUILT_IN_LLCEIL
:
2799 case BUILT_IN_ICEILF
:
2800 case BUILT_IN_LCEILF
:
2801 case BUILT_IN_LLCEILF
:
2804 case BUILT_IN_ICEILL
:
2805 case BUILT_IN_LCEILL
:
2806 case BUILT_IN_LLCEILL
:
2809 case BUILT_IN_IFLOOR
:
2810 case BUILT_IN_LFLOOR
:
2811 case BUILT_IN_LLFLOOR
:
2814 case BUILT_IN_IFLOORF
:
2815 case BUILT_IN_LFLOORF
:
2816 case BUILT_IN_LLFLOORF
:
2819 case BUILT_IN_IFLOORL
:
2820 case BUILT_IN_LFLOORL
:
2821 case BUILT_IN_LLFLOORL
:
2828 fntype
= build_function_type_list (TREE_TYPE (arg
),
2829 TREE_TYPE (arg
), NULL_TREE
);
2830 fallback_fndecl
= build_fn_decl (name
, fntype
);
2833 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2835 tmp
= expand_normal (exp
);
2836 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2838 /* Truncate the result of floating point optab to integer
2839 via expand_fix (). */
2840 target
= gen_reg_rtx (mode
);
2841 expand_fix (target
, tmp
, 0);
2846 /* Expand a call to one of the builtin math functions doing integer
2848 Return 0 if a normal call should be emitted rather than expanding the
2849 function in-line. EXP is the expression that is a call to the builtin
2850 function; if convenient, the result should be placed in TARGET. */
2853 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2855 convert_optab builtin_optab
;
2858 tree fndecl
= get_callee_fndecl (exp
);
2861 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2863 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2866 arg
= CALL_EXPR_ARG (exp
, 0);
2868 switch (DECL_FUNCTION_CODE (fndecl
))
2870 CASE_FLT_FN (BUILT_IN_IRINT
):
2871 fallback_fn
= BUILT_IN_LRINT
;
2873 CASE_FLT_FN (BUILT_IN_LRINT
):
2874 CASE_FLT_FN (BUILT_IN_LLRINT
):
2875 builtin_optab
= lrint_optab
;
2878 CASE_FLT_FN (BUILT_IN_IROUND
):
2879 fallback_fn
= BUILT_IN_LROUND
;
2881 CASE_FLT_FN (BUILT_IN_LROUND
):
2882 CASE_FLT_FN (BUILT_IN_LLROUND
):
2883 builtin_optab
= lround_optab
;
2890 /* There's no easy way to detect the case we need to set EDOM. */
2891 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2894 /* Make a suitable register to place result in. */
2895 mode
= TYPE_MODE (TREE_TYPE (exp
));
2897 /* There's no easy way to detect the case we need to set EDOM. */
2898 if (!flag_errno_math
)
2900 rtx result
= gen_reg_rtx (mode
);
2902 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2903 need to expand the argument again. This way, we will not perform
2904 side-effects more the once. */
2905 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2907 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2911 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2913 /* Output the entire sequence. */
2914 insns
= get_insns ();
2920 /* If we were unable to expand via the builtin, stop the sequence
2921 (without outputting the insns) and call to the library function
2922 with the stabilized argument list. */
2926 if (fallback_fn
!= BUILT_IN_NONE
)
2928 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2929 targets, (int) round (x) should never be transformed into
2930 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2931 a call to lround in the hope that the target provides at least some
2932 C99 functions. This should result in the best user experience for
2933 not full C99 targets. */
2934 tree fallback_fndecl
= mathfn_built_in_1
2935 (TREE_TYPE (arg
), as_combined_fn (fallback_fn
), 0);
2937 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2938 fallback_fndecl
, 1, arg
);
2940 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2941 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2942 return convert_to_mode (mode
, target
, 0);
2945 return expand_call (exp
, target
, target
== const0_rtx
);
2948 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2949 a normal call should be emitted rather than expanding the function
2950 in-line. EXP is the expression that is a call to the builtin
2951 function; if convenient, the result should be placed in TARGET. */
2954 expand_builtin_powi (tree exp
, rtx target
)
2961 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2964 arg0
= CALL_EXPR_ARG (exp
, 0);
2965 arg1
= CALL_EXPR_ARG (exp
, 1);
2966 mode
= TYPE_MODE (TREE_TYPE (exp
));
2968 /* Emit a libcall to libgcc. */
2970 /* Mode of the 2nd argument must match that of an int. */
2971 mode2
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
2973 if (target
== NULL_RTX
)
2974 target
= gen_reg_rtx (mode
);
2976 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2977 if (GET_MODE (op0
) != mode
)
2978 op0
= convert_to_mode (mode
, op0
, 0);
2979 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2980 if (GET_MODE (op1
) != mode2
)
2981 op1
= convert_to_mode (mode2
, op1
, 0);
2983 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2984 target
, LCT_CONST
, mode
,
2985 op0
, mode
, op1
, mode2
);
2990 /* Expand expression EXP which is a call to the strlen builtin. Return
2991 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2992 try to get the result in TARGET, if convenient. */
2995 expand_builtin_strlen (tree exp
, rtx target
,
2996 machine_mode target_mode
)
2998 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
3001 class expand_operand ops
[4];
3004 tree src
= CALL_EXPR_ARG (exp
, 0);
3006 rtx_insn
*before_strlen
;
3007 machine_mode insn_mode
;
3008 enum insn_code icode
= CODE_FOR_nothing
;
3011 /* If the length can be computed at compile-time, return it. */
3012 len
= c_strlen (src
, 0);
3014 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3016 /* If the length can be computed at compile-time and is constant
3017 integer, but there are side-effects in src, evaluate
3018 src for side-effects, then return len.
3019 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3020 can be optimized into: i++; x = 3; */
3021 len
= c_strlen (src
, 1);
3022 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3024 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3025 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3028 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
3030 /* If SRC is not a pointer type, don't do this operation inline. */
3034 /* Bail out if we can't compute strlen in the right mode. */
3035 FOR_EACH_MODE_FROM (insn_mode
, target_mode
)
3037 icode
= optab_handler (strlen_optab
, insn_mode
);
3038 if (icode
!= CODE_FOR_nothing
)
3041 if (insn_mode
== VOIDmode
)
3044 /* Make a place to hold the source address. We will not expand
3045 the actual source until we are sure that the expansion will
3046 not fail -- there are trees that cannot be expanded twice. */
3047 src_reg
= gen_reg_rtx (Pmode
);
3049 /* Mark the beginning of the strlen sequence so we can emit the
3050 source operand later. */
3051 before_strlen
= get_last_insn ();
3053 create_output_operand (&ops
[0], target
, insn_mode
);
3054 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3055 create_integer_operand (&ops
[2], 0);
3056 create_integer_operand (&ops
[3], align
);
3057 if (!maybe_expand_insn (icode
, 4, ops
))
3060 /* Check to see if the argument was declared attribute nonstring
3061 and if so, issue a warning since at this point it's not known
3062 to be nul-terminated. */
3063 maybe_warn_nonstring_arg (get_callee_fndecl (exp
), exp
);
3065 /* Now that we are assured of success, expand the source. */
3067 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3070 #ifdef POINTERS_EXTEND_UNSIGNED
3071 if (GET_MODE (pat
) != Pmode
)
3072 pat
= convert_to_mode (Pmode
, pat
,
3073 POINTERS_EXTEND_UNSIGNED
);
3075 emit_move_insn (src_reg
, pat
);
3081 emit_insn_after (pat
, before_strlen
);
3083 emit_insn_before (pat
, get_insns ());
3085 /* Return the value in the proper mode for this function. */
3086 if (GET_MODE (ops
[0].value
) == target_mode
)
3087 target
= ops
[0].value
;
3088 else if (target
!= 0)
3089 convert_move (target
, ops
[0].value
, 0);
3091 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3096 /* Expand call EXP to the strnlen built-in, returning the result
3097 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3100 expand_builtin_strnlen (tree exp
, rtx target
, machine_mode target_mode
)
3102 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3105 tree src
= CALL_EXPR_ARG (exp
, 0);
3106 tree bound
= CALL_EXPR_ARG (exp
, 1);
3111 location_t loc
= UNKNOWN_LOCATION
;
3112 if (EXPR_HAS_LOCATION (exp
))
3113 loc
= EXPR_LOCATION (exp
);
3115 tree maxobjsize
= max_object_size ();
3116 tree func
= get_callee_fndecl (exp
);
3118 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3119 so these conversions aren't necessary. */
3120 c_strlen_data lendata
= { };
3121 tree len
= c_strlen (src
, 0, &lendata
, 1);
3123 len
= fold_convert_loc (loc
, TREE_TYPE (bound
), len
);
3125 if (TREE_CODE (bound
) == INTEGER_CST
)
3127 if (!TREE_NO_WARNING (exp
)
3128 && tree_int_cst_lt (maxobjsize
, bound
)
3129 && warning_at (loc
, OPT_Wstringop_overflow_
,
3130 "%K%qD specified bound %E "
3131 "exceeds maximum object size %E",
3132 exp
, func
, bound
, maxobjsize
))
3133 TREE_NO_WARNING (exp
) = true;
3136 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
3138 /* Clear EXACT if LEN may be less than SRC suggests,
3140 strnlen (&a[i], sizeof a)
3141 where the value of i is unknown. Unless i's value is
3142 zero, the call is unsafe because the bound is greater. */
3143 lendata
.decl
= unterminated_array (src
, &len
, &exact
);
3148 if (lendata
.decl
&& (tree_int_cst_lt (len
, bound
) || !exact
))
3151 = expansion_point_location_if_in_system_header (loc
);
3153 if (!TREE_NO_WARNING (exp
)
3154 && warning_at (warnloc
, OPT_Wstringop_overflow_
,
3156 ? G_("%K%qD specified bound %E exceeds the size "
3157 "%E of unterminated array")
3158 : G_("%K%qD specified bound %E may exceed the "
3159 "size of at most %E of unterminated array"),
3160 exp
, func
, bound
, len
))
3162 inform (DECL_SOURCE_LOCATION (lendata
.decl
),
3163 "referenced argument declared here");
3164 TREE_NO_WARNING (exp
) = true;
3172 len
= fold_build2_loc (loc
, MIN_EXPR
, size_type_node
, len
, bound
);
3173 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3176 if (TREE_CODE (bound
) != SSA_NAME
)
3180 enum value_range_kind rng
= get_range_info (bound
, &min
, &max
);
3181 if (rng
!= VR_RANGE
)
3184 if (!TREE_NO_WARNING (exp
)
3185 && wi::ltu_p (wi::to_wide (maxobjsize
, min
.get_precision ()), min
)
3186 && warning_at (loc
, OPT_Wstringop_overflow_
,
3187 "%K%qD specified bound [%wu, %wu] "
3188 "exceeds maximum object size %E",
3189 exp
, func
, min
.to_uhwi (), max
.to_uhwi (), maxobjsize
))
3190 TREE_NO_WARNING (exp
) = true;
3193 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
3195 lendata
.decl
= unterminated_array (src
, &len
, &exact
);
3201 && !TREE_NO_WARNING (exp
)
3202 && (wi::ltu_p (wi::to_wide (len
), min
)
3206 = expansion_point_location_if_in_system_header (loc
);
3208 if (warning_at (warnloc
, OPT_Wstringop_overflow_
,
3210 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3211 "the size %E of unterminated array")
3212 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3213 "the size of at most %E of unterminated array"),
3214 exp
, func
, min
.to_uhwi (), max
.to_uhwi (), len
))
3216 inform (DECL_SOURCE_LOCATION (lendata
.decl
),
3217 "referenced argument declared here");
3218 TREE_NO_WARNING (exp
) = true;
3225 if (wi::gtu_p (min
, wi::to_wide (len
)))
3226 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3228 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, bound
);
3229 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3232 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3233 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3234 a target constant. */
3237 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3238 scalar_int_mode mode
)
3240 /* The REPresentation pointed to by DATA need not be a nul-terminated
3241 string but the caller guarantees it's large enough for MODE. */
3242 const char *rep
= (const char *) data
;
3244 return c_readstr (rep
+ offset
, mode
, /*nul_terminated=*/false);
3247 /* LEN specify length of the block of memcpy/memset operation.
3248 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3249 In some cases we can make very likely guess on max size, then we
3250 set it into PROBABLE_MAX_SIZE. */
3253 determine_block_size (tree len
, rtx len_rtx
,
3254 unsigned HOST_WIDE_INT
*min_size
,
3255 unsigned HOST_WIDE_INT
*max_size
,
3256 unsigned HOST_WIDE_INT
*probable_max_size
)
3258 if (CONST_INT_P (len_rtx
))
3260 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3266 enum value_range_kind range_type
= VR_UNDEFINED
;
3268 /* Determine bounds from the type. */
3269 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3270 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3273 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3274 *probable_max_size
= *max_size
3275 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3277 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3279 if (TREE_CODE (len
) == SSA_NAME
)
3280 range_type
= get_range_info (len
, &min
, &max
);
3281 if (range_type
== VR_RANGE
)
3283 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3284 *min_size
= min
.to_uhwi ();
3285 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3286 *probable_max_size
= *max_size
= max
.to_uhwi ();
3288 else if (range_type
== VR_ANTI_RANGE
)
3290 /* Anti range 0...N lets us to determine minimal size to N+1. */
3293 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3294 *min_size
= max
.to_uhwi () + 1;
3302 Produce anti range allowing negative values of N. We still
3303 can use the information and make a guess that N is not negative.
3305 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3306 *probable_max_size
= min
.to_uhwi () - 1;
3309 gcc_checking_assert (*max_size
<=
3310 (unsigned HOST_WIDE_INT
)
3311 GET_MODE_MASK (GET_MODE (len_rtx
)));
3314 /* For an expression EXP issue an access warning controlled by option OPT
3315 with access to a region SLEN bytes in size in the RANGE of sizes. */
3318 warn_for_access (location_t loc
, tree func
, tree exp
, int opt
, tree range
[2],
3319 tree slen
, bool access
)
3321 bool warned
= false;
3325 if (tree_int_cst_equal (range
[0], range
[1]))
3327 ? warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3328 "%K%qD reading %E byte from a region of size %E",
3329 "%K%qD reading %E bytes from a region of size %E",
3330 exp
, func
, range
[0], slen
)
3331 : warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3332 "%Kreading %E byte from a region of size %E",
3333 "%Kreading %E bytes from a region of size %E",
3334 exp
, range
[0], slen
));
3335 else if (tree_int_cst_sign_bit (range
[1]))
3337 /* Avoid printing the upper bound if it's invalid. */
3339 ? warning_at (loc
, opt
,
3340 "%K%qD reading %E or more bytes from a region "
3342 exp
, func
, range
[0], slen
)
3343 : warning_at (loc
, opt
,
3344 "%Kreading %E or more bytes from a region "
3346 exp
, range
[0], slen
));
3350 ? warning_at (loc
, opt
,
3351 "%K%qD reading between %E and %E bytes from "
3352 "a region of size %E",
3353 exp
, func
, range
[0], range
[1], slen
)
3354 : warning_at (loc
, opt
,
3355 "%Kreading between %E and %E bytes from "
3356 "a region of size %E",
3357 exp
, range
[0], range
[1], slen
));
3362 if (tree_int_cst_equal (range
[0], range
[1]))
3364 ? warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3365 "%K%qD epecting %E byte in a region of size %E",
3366 "%K%qD expecting %E bytes in a region of size %E",
3367 exp
, func
, range
[0], slen
)
3368 : warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3369 "%Kexpecting %E byte in a region of size %E",
3370 "%Kexpecting %E bytes in a region of size %E",
3371 exp
, range
[0], slen
));
3372 else if (tree_int_cst_sign_bit (range
[1]))
3374 /* Avoid printing the upper bound if it's invalid. */
3376 ? warning_at (loc
, opt
,
3377 "%K%qD expecting %E or more bytes in a region "
3379 exp
, func
, range
[0], slen
)
3380 : warning_at (loc
, opt
,
3381 "%Kexpecting %E or more bytes in a region "
3383 exp
, range
[0], slen
));
3387 ? warning_at (loc
, opt
,
3388 "%K%qD expecting between %E and %E bytes in "
3389 "a region of size %E",
3390 exp
, func
, range
[0], range
[1], slen
)
3391 : warning_at (loc
, opt
,
3392 "%Kexpectting between %E and %E bytes in "
3393 "a region of size %E",
3394 exp
, range
[0], range
[1], slen
));
3398 /* Issue an inform message describing the target of an access REF.
3399 WRITE is set for a write access and clear for a read access. */
3402 inform_access (const access_ref
&ref
, bool write
)
3407 /* Convert offset range and avoid including a zero range since it isn't
3408 necessarily meaningful. */
3409 long long minoff
= 0, maxoff
= 0;
3410 if (wi::fits_shwi_p (ref
.offrng
[0])
3411 && wi::fits_shwi_p (ref
.offrng
[1]))
3413 minoff
= ref
.offrng
[0].to_shwi ();
3414 maxoff
= ref
.offrng
[1].to_shwi ();
3417 /* Convert size range and always include it since all sizes are
3419 unsigned long long minsize
= 0, maxsize
= 0;
3420 if (wi::fits_shwi_p (ref
.sizrng
[0])
3421 && wi::fits_shwi_p (ref
.sizrng
[1]))
3423 minsize
= ref
.sizrng
[0].to_shwi ();
3424 maxsize
= ref
.sizrng
[1].to_shwi ();
3429 tree allocfn
= NULL_TREE
;
3430 if (TREE_CODE (ref
.ref
) == SSA_NAME
)
3432 gimple
*stmt
= SSA_NAME_DEF_STMT (ref
.ref
);
3433 gcc_assert (is_gimple_call (stmt
));
3434 loc
= gimple_location (stmt
);
3435 allocfn
= gimple_call_fndecl (stmt
);
3437 /* Handle calls through pointers to functions. */
3438 allocfn
= gimple_call_fn (stmt
);
3440 /* SIZRNG doesn't necessarily have the same range as the allocation
3441 size determined by gimple_call_alloc_size (). */
3443 if (minsize
== maxsize
)
3444 sprintf (sizestr
, "%llu", minsize
);
3446 sprintf (sizestr
, "[%llu, %llu]", minsize
, maxsize
);
3450 loc
= DECL_SOURCE_LOCATION (ref
.ref
);
3454 if (DECL_P (ref
.ref
))
3456 if (minoff
== maxoff
)
3459 inform (loc
, "destination object %qD", ref
.ref
);
3461 inform (loc
, "at offset %lli into destination object %qD",
3465 inform (loc
, "at offset [%lli, %lli] into destination object %qD",
3466 minoff
, maxoff
, ref
.ref
);
3470 if (minoff
== maxoff
)
3473 inform (loc
, "destination object of size %s allocated by %qE",
3477 "at offset %lli into destination object of size %s "
3478 "allocated by %qE", minoff
, sizestr
, allocfn
);
3482 "at offset [%lli, %lli] into destination object of size %s "
3484 minoff
, maxoff
, sizestr
, allocfn
);
3489 if (DECL_P (ref
.ref
))
3491 if (minoff
== maxoff
)
3494 inform (loc
, "source object %qD", ref
.ref
);
3496 inform (loc
, "at offset %lli into source object %qD",
3500 inform (loc
, "at offset [%lli, %lli] into source object %qD",
3501 minoff
, maxoff
, ref
.ref
);
3505 if (minoff
== maxoff
)
3508 inform (loc
, "source object of size %s allocated by %qE",
3512 "at offset %lli into source object of size %s "
3513 "allocated by %qE", minoff
, sizestr
, allocfn
);
3517 "at offset [%lli, %lli] into source object of size %s "
3519 minoff
, maxoff
, sizestr
, allocfn
);
3522 /* Try to verify that the sizes and lengths of the arguments to a string
3523 manipulation function given by EXP are within valid bounds and that
3524 the operation does not lead to buffer overflow or read past the end.
3525 Arguments other than EXP may be null. When non-null, the arguments
3526 have the following meaning:
3527 DST is the destination of a copy call or NULL otherwise.
3528 SRC is the source of a copy call or NULL otherwise.
3529 DSTWRITE is the number of bytes written into the destination obtained
3530 from the user-supplied size argument to the function (such as in
3531 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3532 MAXREAD is the user-supplied bound on the length of the source sequence
3533 (such as in strncat(d, s, N). It specifies the upper limit on the number
3534 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3535 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3536 expression EXP is a string function call (as opposed to a memory call
3537 like memcpy). As an exception, SRCSTR can also be an integer denoting
3538 the precomputed size of the source string or object (for functions like
3540 DSTSIZE is the size of the destination object specified by the last
3541 argument to the _chk builtins, typically resulting from the expansion
3542 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3545 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3548 ACCESS is true for accesses, false for simple size checks in calls
3549 to functions that neither read from nor write to the region.
3551 When nonnull, PAD points to a more detailed description of the access.
3553 If the call is successfully verified as safe return true, otherwise
3557 check_access (tree exp
, tree
, tree
, tree dstwrite
,
3558 tree maxread
, tree srcstr
, tree dstsize
,
3559 bool access
/* = true */,
3560 const access_data
*pad
/* = NULL */)
3562 int opt
= OPT_Wstringop_overflow_
;
3564 /* The size of the largest object is half the address space, or
3565 PTRDIFF_MAX. (This is way too permissive.) */
3566 tree maxobjsize
= max_object_size ();
3568 /* Either the length of the source string for string functions or
3569 the size of the source object for raw memory functions. */
3570 tree slen
= NULL_TREE
;
3572 tree range
[2] = { NULL_TREE
, NULL_TREE
};
3574 /* Set to true when the exact number of bytes written by a string
3575 function like strcpy is not known and the only thing that is
3576 known is that it must be at least one (for the terminating nul). */
3577 bool at_least_one
= false;
3580 /* SRCSTR is normally a pointer to string but as a special case
3581 it can be an integer denoting the length of a string. */
3582 if (POINTER_TYPE_P (TREE_TYPE (srcstr
)))
3584 /* Try to determine the range of lengths the source string
3585 refers to. If it can be determined and is less than
3586 the upper bound given by MAXREAD add one to it for
3587 the terminating nul. Otherwise, set it to one for
3588 the same reason, or to MAXREAD as appropriate. */
3589 c_strlen_data lendata
= { };
3590 get_range_strlen (srcstr
, &lendata
, /* eltsize = */ 1);
3591 range
[0] = lendata
.minlen
;
3592 range
[1] = lendata
.maxbound
? lendata
.maxbound
: lendata
.maxlen
;
3593 if (range
[0] && (!maxread
|| TREE_CODE (maxread
) == INTEGER_CST
))
3595 if (maxread
&& tree_int_cst_le (maxread
, range
[0]))
3596 range
[0] = range
[1] = maxread
;
3598 range
[0] = fold_build2 (PLUS_EXPR
, size_type_node
,
3599 range
[0], size_one_node
);
3601 if (maxread
&& tree_int_cst_le (maxread
, range
[1]))
3603 else if (!integer_all_onesp (range
[1]))
3604 range
[1] = fold_build2 (PLUS_EXPR
, size_type_node
,
3605 range
[1], size_one_node
);
3611 at_least_one
= true;
3612 slen
= size_one_node
;
3619 if (!dstwrite
&& !maxread
)
3621 /* When the only available piece of data is the object size
3622 there is nothing to do. */
3626 /* Otherwise, when the length of the source sequence is known
3627 (as with strlen), set DSTWRITE to it. */
3633 dstsize
= maxobjsize
;
3636 get_size_range (dstwrite
, range
);
3638 tree func
= get_callee_fndecl (exp
);
3640 /* First check the number of bytes to be written against the maximum
3643 && TREE_CODE (range
[0]) == INTEGER_CST
3644 && tree_int_cst_lt (maxobjsize
, range
[0]))
3646 if (TREE_NO_WARNING (exp
))
3649 location_t loc
= tree_nonartificial_location (exp
);
3650 loc
= expansion_point_location_if_in_system_header (loc
);
3653 if (range
[0] == range
[1])
3655 ? warning_at (loc
, opt
,
3656 "%K%qD specified size %E "
3657 "exceeds maximum object size %E",
3658 exp
, func
, range
[0], maxobjsize
)
3659 : warning_at (loc
, opt
,
3660 "%Kspecified size %E "
3661 "exceeds maximum object size %E",
3662 exp
, range
[0], maxobjsize
));
3665 ? warning_at (loc
, opt
,
3666 "%K%qD specified size between %E and %E "
3667 "exceeds maximum object size %E",
3669 range
[0], range
[1], maxobjsize
)
3670 : warning_at (loc
, opt
,
3671 "%Kspecified size between %E and %E "
3672 "exceeds maximum object size %E",
3673 exp
, range
[0], range
[1], maxobjsize
));
3675 TREE_NO_WARNING (exp
) = true;
3680 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3681 constant, and in range of unsigned HOST_WIDE_INT. */
3682 bool exactwrite
= dstwrite
&& tree_fits_uhwi_p (dstwrite
);
3684 /* Next check the number of bytes to be written against the destination
3686 if (range
[0] || !exactwrite
|| integer_all_onesp (dstwrite
))
3689 && TREE_CODE (range
[0]) == INTEGER_CST
3690 && ((tree_fits_uhwi_p (dstsize
)
3691 && tree_int_cst_lt (dstsize
, range
[0]))
3693 && tree_fits_uhwi_p (dstwrite
)
3694 && tree_int_cst_lt (dstwrite
, range
[0]))))
3696 if (TREE_NO_WARNING (exp
))
3699 location_t loc
= tree_nonartificial_location (exp
);
3700 loc
= expansion_point_location_if_in_system_header (loc
);
3702 bool warned
= false;
3703 if (dstwrite
== slen
&& at_least_one
)
3705 /* This is a call to strcpy with a destination of 0 size
3706 and a source of unknown length. The call will write
3707 at least one byte past the end of the destination. */
3709 ? warning_at (loc
, opt
,
3710 "%K%qD writing %E or more bytes into "
3711 "a region of size %E overflows "
3713 exp
, func
, range
[0], dstsize
)
3714 : warning_at (loc
, opt
,
3715 "%Kwriting %E or more bytes into "
3716 "a region of size %E overflows "
3718 exp
, range
[0], dstsize
));
3720 else if (tree_int_cst_equal (range
[0], range
[1]))
3722 ? warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3723 "%K%qD writing %E byte into a region "
3724 "of size %E overflows the destination",
3725 "%K%qD writing %E bytes into a region "
3726 "of size %E overflows the destination",
3727 exp
, func
, range
[0], dstsize
)
3728 : warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3729 "%Kwriting %E byte into a region "
3730 "of size %E overflows the destination",
3731 "%Kwriting %E bytes into a region "
3732 "of size %E overflows the destination",
3733 exp
, range
[0], dstsize
));
3734 else if (tree_int_cst_sign_bit (range
[1]))
3736 /* Avoid printing the upper bound if it's invalid. */
3738 ? warning_at (loc
, opt
,
3739 "%K%qD writing %E or more bytes into "
3740 "a region of size %E overflows "
3742 exp
, func
, range
[0], dstsize
)
3743 : warning_at (loc
, opt
,
3744 "%Kwriting %E or more bytes into "
3745 "a region of size %E overflows "
3747 exp
, range
[0], dstsize
));
3751 ? warning_at (loc
, opt
,
3752 "%K%qD writing between %E and %E bytes "
3753 "into a region of size %E overflows "
3755 exp
, func
, range
[0], range
[1],
3757 : warning_at (loc
, opt
,
3758 "%Kwriting between %E and %E bytes "
3759 "into a region of size %E overflows "
3761 exp
, range
[0], range
[1],
3765 TREE_NO_WARNING (exp
) = true;
3767 inform_access (pad
->dst
, true);
3770 /* Return error when an overflow has been detected. */
3775 /* Check the maximum length of the source sequence against the size
3776 of the destination object if known, or against the maximum size
3780 get_size_range (maxread
, range
);
3781 if (range
[0] && dstsize
&& tree_fits_uhwi_p (dstsize
))
3783 location_t loc
= tree_nonartificial_location (exp
);
3784 loc
= expansion_point_location_if_in_system_header (loc
);
3786 if (tree_int_cst_lt (maxobjsize
, range
[0]))
3788 if (TREE_NO_WARNING (exp
))
3791 bool warned
= false;
3793 /* Warn about crazy big sizes first since that's more
3794 likely to be meaningful than saying that the bound
3795 is greater than the object size if both are big. */
3796 if (range
[0] == range
[1])
3798 ? warning_at (loc
, opt
,
3799 "%K%qD specified bound %E "
3800 "exceeds maximum object size %E",
3801 exp
, func
, range
[0], maxobjsize
)
3802 : warning_at (loc
, opt
,
3803 "%Kspecified bound %E "
3804 "exceeds maximum object size %E",
3805 exp
, range
[0], maxobjsize
));
3808 ? warning_at (loc
, opt
,
3809 "%K%qD specified bound between "
3810 "%E and %E exceeds maximum object "
3813 range
[0], range
[1], maxobjsize
)
3814 : warning_at (loc
, opt
,
3815 "%Kspecified bound between "
3816 "%E and %E exceeds maximum object "
3818 exp
, range
[0], range
[1], maxobjsize
));
3820 TREE_NO_WARNING (exp
) = true;
3825 if (dstsize
!= maxobjsize
&& tree_int_cst_lt (dstsize
, range
[0]))
3827 if (TREE_NO_WARNING (exp
))
3830 bool warned
= false;
3832 if (tree_int_cst_equal (range
[0], range
[1]))
3834 ? warning_at (loc
, opt
,
3835 "%K%qD specified bound %E "
3836 "exceeds destination size %E",
3839 : warning_at (loc
, opt
,
3840 "%Kspecified bound %E "
3841 "exceeds destination size %E",
3842 exp
, range
[0], dstsize
));
3845 ? warning_at (loc
, opt
,
3846 "%K%qD specified bound between %E "
3847 "and %E exceeds destination size %E",
3849 range
[0], range
[1], dstsize
)
3850 : warning_at (loc
, opt
,
3851 "%Kspecified bound between %E "
3852 "and %E exceeds destination size %E",
3854 range
[0], range
[1], dstsize
));
3856 TREE_NO_WARNING (exp
) = true;
3863 /* Check for reading past the end of SRC. */
3866 && dstwrite
&& range
[0]
3867 && tree_int_cst_lt (slen
, range
[0]))
3869 if (TREE_NO_WARNING (exp
))
3872 location_t loc
= tree_nonartificial_location (exp
);
3873 loc
= expansion_point_location_if_in_system_header (loc
);
3875 if (warn_for_access (loc
, func
, exp
, opt
, range
, slen
, access
))
3877 TREE_NO_WARNING (exp
) = true;
3879 inform_access (pad
->src
, false);
3887 /* If STMT is a call to an allocation function, returns the constant
3888 size of the object allocated by the call represented as sizetype.
3889 If nonnull, sets RNG1[] to the range of the size. */
3892 gimple_call_alloc_size (gimple
*stmt
, wide_int rng1
[2] /* = NULL */,
3893 const vr_values
*rvals
/* = NULL */)
3899 if (tree fndecl
= gimple_call_fndecl (stmt
))
3900 allocfntype
= TREE_TYPE (fndecl
);
3902 allocfntype
= gimple_call_fntype (stmt
);
3907 unsigned argidx1
= UINT_MAX
, argidx2
= UINT_MAX
;
3908 tree at
= lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype
));
3911 if (!gimple_call_builtin_p (stmt
, BUILT_IN_ALLOCA_WITH_ALIGN
))
3917 unsigned nargs
= gimple_call_num_args (stmt
);
3919 if (argidx1
== UINT_MAX
)
3921 tree atval
= TREE_VALUE (at
);
3925 argidx1
= TREE_INT_CST_LOW (TREE_VALUE (atval
)) - 1;
3926 if (nargs
<= argidx1
)
3929 atval
= TREE_CHAIN (atval
);
3932 argidx2
= TREE_INT_CST_LOW (TREE_VALUE (atval
)) - 1;
3933 if (nargs
<= argidx2
)
3938 tree size
= gimple_call_arg (stmt
, argidx1
);
3940 wide_int rng1_buf
[2];
3941 /* If RNG1 is not set, use the buffer. */
3945 if (!get_range (size
, rng1
, rvals
))
3948 if (argidx2
> nargs
&& TREE_CODE (size
) == INTEGER_CST
)
3949 return fold_convert (sizetype
, size
);
3951 /* To handle ranges do the math in wide_int and return the product
3952 of the upper bounds as a constant. Ignore anti-ranges. */
3953 tree n
= argidx2
< nargs
? gimple_call_arg (stmt
, argidx2
) : integer_one_node
;
3955 if (!get_range (n
, rng2
, rvals
))
3958 /* Extend to the maximum precision to avoid overflow. */
3959 const int prec
= ADDR_MAX_PRECISION
;
3960 rng1
[0] = wide_int::from (rng1
[0], prec
, UNSIGNED
);
3961 rng1
[1] = wide_int::from (rng1
[1], prec
, UNSIGNED
);
3962 rng2
[0] = wide_int::from (rng2
[0], prec
, UNSIGNED
);
3963 rng2
[1] = wide_int::from (rng2
[1], prec
, UNSIGNED
);
3965 /* Compute products of both bounds for the caller but return the lesser
3966 of SIZE_MAX and the product of the upper bounds as a constant. */
3967 rng1
[0] = rng1
[0] * rng2
[0];
3968 rng1
[1] = rng1
[1] * rng2
[1];
3969 tree size_max
= TYPE_MAX_VALUE (sizetype
);
3970 if (wi::gtu_p (rng1
[1], wi::to_wide (size_max
, prec
)))
3972 rng1
[1] = wi::to_wide (size_max
);
3976 return wide_int_to_tree (sizetype
, rng1
[1]);
3979 /* Wrapper around the wide_int overload of get_range. Returns the same
3980 result but accepts offset_int instead. */
3983 get_range (tree x
, signop sgn
, offset_int r
[2],
3984 const vr_values
*rvals
/* = NULL */)
3987 if (!get_range (x
, wr
, rvals
))
3990 r
[0] = offset_int::from (wr
[0], sgn
);
3991 r
[1] = offset_int::from (wr
[1], sgn
);
3995 /* Helper to compute the size of the object referenced by the PTR
3996 expression which must have pointer type, using Object Size type
3997 OSTYPE (only the least significant 2 bits are used).
3998 On success, sets PREF->REF to the DECL of the referenced object
3999 if it's unique, otherwise to null, PREF->OFFRNG to the range of
4000 offsets into it, and PREF->SIZRNG to the range of sizes of
4002 VISITED is used to avoid visiting the same PHI operand multiple
4003 times, and, when nonnull, RVALS to determine range information.
4004 Returns true on success, false when the size cannot be determined.
4006 The function is intended for diagnostics and should not be used
4007 to influence code generation or optimization. */
4010 compute_objsize (tree ptr
, int ostype
, access_ref
*pref
,
4011 bitmap
*visited
, const vr_values
*rvals
/* = NULL */)
4013 const bool addr
= TREE_CODE (ptr
) == ADDR_EXPR
;
4015 ptr
= TREE_OPERAND (ptr
, 0);
4019 /* Bail if the reference is to the pointer itself (as opposed
4020 to what it points to). */
4021 if (!addr
&& POINTER_TYPE_P (TREE_TYPE (ptr
)))
4024 tree size
= decl_init_size (ptr
, false);
4025 if (!size
|| TREE_CODE (size
) != INTEGER_CST
)
4029 pref
->sizrng
[0] = pref
->sizrng
[1] = wi::to_offset (size
);
4033 const tree_code code
= TREE_CODE (ptr
);
4035 if (code
== COMPONENT_REF
)
4037 tree field
= TREE_OPERAND (ptr
, 1);
4041 /* For raw memory functions like memcpy bail if the size
4042 of the enclosing object cannot be determined. */
4043 tree ref
= TREE_OPERAND (ptr
, 0);
4044 if (!compute_objsize (ref
, ostype
, pref
, visited
, rvals
)
4048 /* Otherwise, use the size of the enclosing object and add
4049 the offset of the member to the offset computed so far. */
4050 tree offset
= byte_position (field
);
4051 if (TREE_CODE (offset
) != INTEGER_CST
)
4053 offset_int off
= wi::to_offset (offset
);
4054 pref
->offrng
[0] += off
;
4055 pref
->offrng
[1] += off
;
4059 /* Bail if the reference is to the pointer itself (as opposed
4060 to what it points to). */
4061 if (!addr
&& POINTER_TYPE_P (TREE_TYPE (field
)))
4065 /* Only return constant sizes for now while callers depend
4066 on it. INT0LEN is true for interior zero-length arrays. */
4067 bool int0len
= false;
4068 tree size
= component_ref_size (ptr
, &int0len
);
4071 pref
->sizrng
[0] = pref
->sizrng
[1] = 0;
4075 if (!size
|| TREE_CODE (size
) != INTEGER_CST
)
4078 pref
->sizrng
[0] = pref
->sizrng
[1] = wi::to_offset (size
);
4082 if (code
== ARRAY_REF
|| code
== MEM_REF
)
4084 tree ref
= TREE_OPERAND (ptr
, 0);
4085 tree reftype
= TREE_TYPE (ref
);
4086 if (code
== ARRAY_REF
4087 && TREE_CODE (TREE_TYPE (reftype
)) == POINTER_TYPE
)
4088 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
4092 if (code
== MEM_REF
&& TREE_CODE (reftype
) == POINTER_TYPE
)
4094 /* Give up for MEM_REFs of vector types; those may be synthesized
4095 from multiple assignments to consecutive data members. See PR
4097 FIXME: Deal with this more generally, e.g., by marking up such
4098 MEM_REFs at the time they're created. */
4099 reftype
= TREE_TYPE (reftype
);
4100 if (TREE_CODE (reftype
) == VECTOR_TYPE
)
4104 if (!compute_objsize (ref
, ostype
, pref
, visited
, rvals
))
4108 tree off
= TREE_OPERAND (ptr
, 1);
4109 if (!get_range (off
, SIGNED
, orng
, rvals
))
4110 /* Fail unless the size of the object is zero. */
4111 return pref
->sizrng
[0] == 0 && pref
->sizrng
[0] == pref
->sizrng
[1];
4113 if (TREE_CODE (ptr
) == ARRAY_REF
)
4115 /* Convert the array index range determined above to a byte
4117 tree lowbnd
= array_ref_low_bound (ptr
);
4118 if (!integer_zerop (lowbnd
) && tree_fits_uhwi_p (lowbnd
))
4120 /* Adjust the index by the low bound of the array domain
4121 (normally zero but 1 in Fortran). */
4122 unsigned HOST_WIDE_INT lb
= tree_to_uhwi (lowbnd
);
4127 tree eltype
= TREE_TYPE (ptr
);
4128 tree tpsize
= TYPE_SIZE_UNIT (eltype
);
4129 if (!tpsize
|| TREE_CODE (tpsize
) != INTEGER_CST
)
4132 offset_int sz
= wi::to_offset (tpsize
);
4136 if (ostype
&& TREE_CODE (eltype
) == ARRAY_TYPE
)
4138 /* Execpt for the permissive raw memory functions which
4139 use the size of the whole object determined above,
4140 use the size of the referenced array. */
4141 pref
->sizrng
[0] = pref
->offrng
[0] + orng
[0] + sz
;
4142 pref
->sizrng
[1] = pref
->offrng
[1] + orng
[1] + sz
;
4146 pref
->offrng
[0] += orng
[0];
4147 pref
->offrng
[1] += orng
[1];
4152 if (TREE_CODE (ptr
) == SSA_NAME
)
4154 gimple
*stmt
= SSA_NAME_DEF_STMT (ptr
);
4155 if (is_gimple_call (stmt
))
4157 /* If STMT is a call to an allocation function get the size
4158 from its argument(s). If successful, also set *PDECL to
4159 PTR for the caller to include in diagnostics. */
4161 if (gimple_call_alloc_size (stmt
, wr
, rvals
))
4164 pref
->sizrng
[0] = offset_int::from (wr
[0], UNSIGNED
);
4165 pref
->sizrng
[1] = offset_int::from (wr
[1], UNSIGNED
);
4171 /* TODO: Handle PHI. */
4173 if (!is_gimple_assign (stmt
))
4176 ptr
= gimple_assign_rhs1 (stmt
);
4178 tree_code code
= gimple_assign_rhs_code (stmt
);
4179 if (TREE_CODE (TREE_TYPE (ptr
)) != POINTER_TYPE
)
4180 /* Avoid conversions from non-pointers. */
4183 if (code
== POINTER_PLUS_EXPR
)
4185 /* If the the offset in the expression can be determined use
4186 it to adjust the overall offset. Otherwise, set the overall
4187 offset to the maximum. */
4189 tree off
= gimple_assign_rhs2 (stmt
);
4190 if (!get_range (off
, SIGNED
, orng
, rvals
)
4191 || !wi::les_p (orng
[0], orng
[1]))
4193 orng
[0] = wi::to_offset (TYPE_MIN_VALUE (ptrdiff_type_node
));
4194 orng
[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
4197 pref
->offrng
[0] += orng
[0];
4198 pref
->offrng
[1] += orng
[1];
4200 else if (code
!= ADDR_EXPR
)
4203 return compute_objsize (ptr
, ostype
, pref
, visited
, rvals
);
4206 tree type
= TREE_TYPE (ptr
);
4207 type
= TYPE_MAIN_VARIANT (type
);
4208 if (TREE_CODE (ptr
) == ADDR_EXPR
)
4209 ptr
= TREE_OPERAND (ptr
, 0);
4211 if (TREE_CODE (type
) == ARRAY_TYPE
4212 && !array_at_struct_end_p (ptr
))
4214 if (tree size
= TYPE_SIZE_UNIT (type
))
4215 return get_range (size
, UNSIGNED
, pref
->sizrng
, rvals
);
4221 /* Convenience wrapper around the above. */
4224 compute_objsize (tree ptr
, int ostype
, access_ref
*pref
,
4225 const vr_values
*rvals
= NULL
)
4227 bitmap visited
= NULL
;
4230 = compute_objsize (ptr
, ostype
, pref
, &visited
, rvals
);
4233 BITMAP_FREE (visited
);
4238 if (pref
->offrng
[0] < 0)
4240 if (pref
->offrng
[1] < 0)
4241 return size_zero_node
;
4243 pref
->offrng
[0] = 0;
4246 if (pref
->sizrng
[1] < pref
->offrng
[0])
4247 return size_zero_node
;
4249 return wide_int_to_tree (sizetype
, pref
->sizrng
[1] - pref
->offrng
[0]);
4252 /* Transitional wrapper around the above. The function should be removed
4253 once callers transition to one of the two above. */
4256 compute_objsize (tree ptr
, int ostype
, tree
*pdecl
/* = NULL */,
4257 tree
*poff
/* = NULL */, const vr_values
*rvals
/* = NULL */)
4259 /* Set the initial offsets to zero and size to negative to indicate
4260 none has been computed yet. */
4262 tree size
= compute_objsize (ptr
, ostype
, &ref
, rvals
);
4270 *poff
= wide_int_to_tree (ptrdiff_type_node
, ref
.offrng
[ref
.offrng
[0] < 0]);
4275 /* Helper to determine and check the sizes of the source and the destination
4276 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
4277 call expression, DEST is the destination argument, SRC is the source
4278 argument or null, and LEN is the number of bytes. Use Object Size type-0
4279 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
4280 (no overflow or invalid sizes), false otherwise. */
4283 check_memop_access (tree exp
, tree dest
, tree src
, tree size
)
4285 /* For functions like memset and memcpy that operate on raw memory
4286 try to determine the size of the largest source and destination
4287 object using type-0 Object Size regardless of the object size
4288 type specified by the option. */
4290 tree srcsize
= src
? compute_objsize (src
, 0, &data
.src
) : NULL_TREE
;
4291 tree dstsize
= compute_objsize (dest
, 0, &data
.dst
);
4293 return check_access (exp
, dest
, src
, size
, /*maxread=*/NULL_TREE
,
4294 srcsize
, dstsize
, true, &data
);
4297 /* Validate memchr arguments without performing any expansion.
4301 expand_builtin_memchr (tree exp
, rtx
)
4303 if (!validate_arglist (exp
,
4304 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4307 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4308 tree len
= CALL_EXPR_ARG (exp
, 2);
4310 /* Diagnose calls where the specified length exceeds the size
4312 if (warn_stringop_overflow
)
4315 tree size
= compute_objsize (arg1
, 0, &data
.src
);
4316 check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, len
,
4317 /*maxread=*/NULL_TREE
, size
, /*objsize=*/NULL_TREE
,
4324 /* Expand a call EXP to the memcpy builtin.
4325 Return NULL_RTX if we failed, the caller should emit a normal call,
4326 otherwise try to get the result in TARGET, if convenient (and in
4327 mode MODE if that's convenient). */
4330 expand_builtin_memcpy (tree exp
, rtx target
)
4332 if (!validate_arglist (exp
,
4333 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4336 tree dest
= CALL_EXPR_ARG (exp
, 0);
4337 tree src
= CALL_EXPR_ARG (exp
, 1);
4338 tree len
= CALL_EXPR_ARG (exp
, 2);
4340 check_memop_access (exp
, dest
, src
, len
);
4342 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
4343 /*retmode=*/ RETURN_BEGIN
, false);
4346 /* Check a call EXP to the memmove built-in for validity.
4347 Return NULL_RTX on both success and failure. */
4350 expand_builtin_memmove (tree exp
, rtx target
)
4352 if (!validate_arglist (exp
,
4353 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4356 tree dest
= CALL_EXPR_ARG (exp
, 0);
4357 tree src
= CALL_EXPR_ARG (exp
, 1);
4358 tree len
= CALL_EXPR_ARG (exp
, 2);
4360 check_memop_access (exp
, dest
, src
, len
);
4362 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
4363 /*retmode=*/ RETURN_BEGIN
, true);
4366 /* Expand a call EXP to the mempcpy builtin.
4367 Return NULL_RTX if we failed; the caller should emit a normal call,
4368 otherwise try to get the result in TARGET, if convenient (and in
4369 mode MODE if that's convenient). */
4372 expand_builtin_mempcpy (tree exp
, rtx target
)
4374 if (!validate_arglist (exp
,
4375 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4378 tree dest
= CALL_EXPR_ARG (exp
, 0);
4379 tree src
= CALL_EXPR_ARG (exp
, 1);
4380 tree len
= CALL_EXPR_ARG (exp
, 2);
4382 /* Policy does not generally allow using compute_objsize (which
4383 is used internally by check_memop_size) to change code generation
4384 or drive optimization decisions.
4386 In this instance it is safe because the code we generate has
4387 the same semantics regardless of the return value of
4388 check_memop_sizes. Exactly the same amount of data is copied
4389 and the return value is exactly the same in both cases.
4391 Furthermore, check_memop_size always uses mode 0 for the call to
4392 compute_objsize, so the imprecise nature of compute_objsize is
4395 /* Avoid expanding mempcpy into memcpy when the call is determined
4396 to overflow the buffer. This also prevents the same overflow
4397 from being diagnosed again when expanding memcpy. */
4398 if (!check_memop_access (exp
, dest
, src
, len
))
4401 return expand_builtin_mempcpy_args (dest
, src
, len
,
4402 target
, exp
, /*retmode=*/ RETURN_END
);
4405 /* Helper function to do the actual work for expand of memory copy family
4406 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
4407 of memory from SRC to DEST and assign to TARGET if convenient. Return
4408 value is based on RETMODE argument. */
4411 expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
4412 rtx target
, tree exp
, memop_ret retmode
,
4415 unsigned int src_align
= get_pointer_alignment (src
);
4416 unsigned int dest_align
= get_pointer_alignment (dest
);
4417 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
4418 HOST_WIDE_INT expected_size
= -1;
4419 unsigned int expected_align
= 0;
4420 unsigned HOST_WIDE_INT min_size
;
4421 unsigned HOST_WIDE_INT max_size
;
4422 unsigned HOST_WIDE_INT probable_max_size
;
4426 /* If DEST is not a pointer type, call the normal function. */
4427 if (dest_align
== 0)
4430 /* If either SRC is not a pointer type, don't do this
4431 operation in-line. */
4435 if (currently_expanding_gimple_stmt
)
4436 stringop_block_profile (currently_expanding_gimple_stmt
,
4437 &expected_align
, &expected_size
);
4439 if (expected_align
< dest_align
)
4440 expected_align
= dest_align
;
4441 dest_mem
= get_memory_rtx (dest
, len
);
4442 set_mem_align (dest_mem
, dest_align
);
4443 len_rtx
= expand_normal (len
);
4444 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
4445 &probable_max_size
);
4447 /* Try to get the byte representation of the constant SRC points to,
4448 with its byte size in NBYTES. */
4449 unsigned HOST_WIDE_INT nbytes
;
4450 const char *rep
= c_getstr (src
, &nbytes
);
4452 /* If the function's constant bound LEN_RTX is less than or equal
4453 to the byte size of the representation of the constant argument,
4454 and if block move would be done by pieces, we can avoid loading
4455 the bytes from memory and only store the computed constant.
4456 This works in the overlap (memmove) case as well because
4457 store_by_pieces just generates a series of stores of constants
4458 from the representation returned by c_getstr(). */
4460 && CONST_INT_P (len_rtx
)
4461 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= nbytes
4462 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
4463 CONST_CAST (char *, rep
),
4466 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
4467 builtin_memcpy_read_str
,
4468 CONST_CAST (char *, rep
),
4469 dest_align
, false, retmode
);
4470 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
4471 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4475 src_mem
= get_memory_rtx (src
, len
);
4476 set_mem_align (src_mem
, src_align
);
4478 /* Copy word part most expediently. */
4479 enum block_op_methods method
= BLOCK_OP_NORMAL
;
4480 if (CALL_EXPR_TAILCALL (exp
)
4481 && (retmode
== RETURN_BEGIN
|| target
== const0_rtx
))
4482 method
= BLOCK_OP_TAILCALL
;
4483 bool use_mempcpy_call
= (targetm
.libc_has_fast_function (BUILT_IN_MEMPCPY
)
4484 && retmode
== RETURN_END
4486 && target
!= const0_rtx
);
4487 if (use_mempcpy_call
)
4488 method
= BLOCK_OP_NO_LIBCALL_RET
;
4489 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
, method
,
4490 expected_align
, expected_size
,
4491 min_size
, max_size
, probable_max_size
,
4492 use_mempcpy_call
, &is_move_done
,
4495 /* Bail out when a mempcpy call would be expanded as libcall and when
4496 we have a target that provides a fast implementation
4497 of mempcpy routine. */
4501 if (dest_addr
== pc_rtx
)
4506 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
4507 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
4510 if (retmode
!= RETURN_BEGIN
&& target
!= const0_rtx
)
4512 dest_addr
= gen_rtx_PLUS (ptr_mode
, dest_addr
, len_rtx
);
4513 /* stpcpy pointer to last byte. */
4514 if (retmode
== RETURN_END_MINUS_ONE
)
4515 dest_addr
= gen_rtx_MINUS (ptr_mode
, dest_addr
, const1_rtx
);
4522 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
4523 rtx target
, tree orig_exp
, memop_ret retmode
)
4525 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, orig_exp
,
4529 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
4530 we failed, the caller should emit a normal call, otherwise try to
4531 get the result in TARGET, if convenient.
4532 Return value is based on RETMODE argument. */
4535 expand_movstr (tree dest
, tree src
, rtx target
, memop_ret retmode
)
4537 class expand_operand ops
[3];
4541 if (!targetm
.have_movstr ())
4544 dest_mem
= get_memory_rtx (dest
, NULL
);
4545 src_mem
= get_memory_rtx (src
, NULL
);
4546 if (retmode
== RETURN_BEGIN
)
4548 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
4549 dest_mem
= replace_equiv_address (dest_mem
, target
);
4552 create_output_operand (&ops
[0],
4553 retmode
!= RETURN_BEGIN
? target
: NULL_RTX
, Pmode
);
4554 create_fixed_operand (&ops
[1], dest_mem
);
4555 create_fixed_operand (&ops
[2], src_mem
);
4556 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
4559 if (retmode
!= RETURN_BEGIN
&& target
!= const0_rtx
)
4561 target
= ops
[0].value
;
4562 /* movstr is supposed to set end to the address of the NUL
4563 terminator. If the caller requested a mempcpy-like return value,
4565 if (retmode
== RETURN_END
)
4567 rtx tem
= plus_constant (GET_MODE (target
),
4568 gen_lowpart (GET_MODE (target
), target
), 1);
4569 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
4575 /* Do some very basic size validation of a call to the strcpy builtin
4576 given by EXP. Return NULL_RTX to have the built-in expand to a call
4577 to the library function. */
4580 expand_builtin_strcat (tree exp
)
4582 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
)
4583 || !warn_stringop_overflow
)
4586 tree dest
= CALL_EXPR_ARG (exp
, 0);
4587 tree src
= CALL_EXPR_ARG (exp
, 1);
4589 /* Detect unterminated source (only). */
4590 if (!check_nul_terminated_array (exp
, src
))
4593 /* There is no way here to determine the length of the string in
4594 the destination to which the SRC string is being appended so
4595 just diagnose cases when the souce string is longer than
4596 the destination object. */
4599 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1, &data
.dst
);
4601 check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
, src
,
4602 destsize
, true, &data
);
4607 /* Expand expression EXP, which is a call to the strcpy builtin. Return
4608 NULL_RTX if we failed the caller should emit a normal call, otherwise
4609 try to get the result in TARGET, if convenient (and in mode MODE if that's
4613 expand_builtin_strcpy (tree exp
, rtx target
)
4615 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4618 tree dest
= CALL_EXPR_ARG (exp
, 0);
4619 tree src
= CALL_EXPR_ARG (exp
, 1);
4621 if (warn_stringop_overflow
)
4624 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1,
4626 check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
,
4627 src
, destsize
, true, &data
);
4630 if (rtx ret
= expand_builtin_strcpy_args (exp
, dest
, src
, target
))
4632 /* Check to see if the argument was declared attribute nonstring
4633 and if so, issue a warning since at this point it's not known
4634 to be nul-terminated. */
4635 tree fndecl
= get_callee_fndecl (exp
);
4636 maybe_warn_nonstring_arg (fndecl
, exp
);
4643 /* Helper function to do the actual work for expand_builtin_strcpy. The
4644 arguments to the builtin_strcpy call DEST and SRC are broken out
4645 so that this can also be called without constructing an actual CALL_EXPR.
4646 The other arguments and return value are the same as for
4647 expand_builtin_strcpy. */
4650 expand_builtin_strcpy_args (tree exp
, tree dest
, tree src
, rtx target
)
4652 /* Detect strcpy calls with unterminated arrays.. */
4653 if (tree nonstr
= unterminated_array (src
))
4655 /* NONSTR refers to the non-nul terminated constant array. */
4656 if (!TREE_NO_WARNING (exp
))
4657 warn_string_no_nul (EXPR_LOCATION (exp
), "strcpy", src
, nonstr
);
4661 return expand_movstr (dest
, src
, target
, /*retmode=*/ RETURN_BEGIN
);
4664 /* Expand a call EXP to the stpcpy builtin.
4665 Return NULL_RTX if we failed the caller should emit a normal call,
4666 otherwise try to get the result in TARGET, if convenient (and in
4667 mode MODE if that's convenient). */
4670 expand_builtin_stpcpy_1 (tree exp
, rtx target
, machine_mode mode
)
4673 location_t loc
= EXPR_LOCATION (exp
);
4675 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4678 dst
= CALL_EXPR_ARG (exp
, 0);
4679 src
= CALL_EXPR_ARG (exp
, 1);
4681 if (warn_stringop_overflow
)
4684 tree destsize
= compute_objsize (dst
, warn_stringop_overflow
- 1,
4686 check_access (exp
, dst
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
,
4687 src
, destsize
, true, &data
);
4690 /* If return value is ignored, transform stpcpy into strcpy. */
4691 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
4693 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
4694 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
4695 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4702 /* Ensure we get an actual string whose length can be evaluated at
4703 compile-time, not an expression containing a string. This is
4704 because the latter will potentially produce pessimized code
4705 when used to produce the return value. */
4706 c_strlen_data lendata
= { };
4707 if (!c_getstr (src
, NULL
)
4708 || !(len
= c_strlen (src
, 0, &lendata
, 1)))
4709 return expand_movstr (dst
, src
, target
,
4710 /*retmode=*/ RETURN_END_MINUS_ONE
);
4712 if (lendata
.decl
&& !TREE_NO_WARNING (exp
))
4713 warn_string_no_nul (EXPR_LOCATION (exp
), "stpcpy", src
, lendata
.decl
);
4715 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
4716 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
4718 /*retmode=*/ RETURN_END_MINUS_ONE
);
4723 if (TREE_CODE (len
) == INTEGER_CST
)
4725 rtx len_rtx
= expand_normal (len
);
4727 if (CONST_INT_P (len_rtx
))
4729 ret
= expand_builtin_strcpy_args (exp
, dst
, src
, target
);
4735 if (mode
!= VOIDmode
)
4736 target
= gen_reg_rtx (mode
);
4738 target
= gen_reg_rtx (GET_MODE (ret
));
4740 if (GET_MODE (target
) != GET_MODE (ret
))
4741 ret
= gen_lowpart (GET_MODE (target
), ret
);
4743 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
4744 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
4752 return expand_movstr (dst
, src
, target
,
4753 /*retmode=*/ RETURN_END_MINUS_ONE
);
4757 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4758 arguments while being careful to avoid duplicate warnings (which could
4759 be issued if the expander were to expand the call, resulting in it
4760 being emitted in expand_call(). */
4763 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
4765 if (rtx ret
= expand_builtin_stpcpy_1 (exp
, target
, mode
))
4767 /* The call has been successfully expanded. Check for nonstring
4768 arguments and issue warnings as appropriate. */
4769 maybe_warn_nonstring_arg (get_callee_fndecl (exp
), exp
);
4776 /* Check a call EXP to the stpncpy built-in for validity.
4777 Return NULL_RTX on both success and failure. */
4780 expand_builtin_stpncpy (tree exp
, rtx
)
4782 if (!validate_arglist (exp
,
4783 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4784 || !warn_stringop_overflow
)
4787 /* The source and destination of the call. */
4788 tree dest
= CALL_EXPR_ARG (exp
, 0);
4789 tree src
= CALL_EXPR_ARG (exp
, 1);
4791 /* The exact number of bytes to write (not the maximum). */
4792 tree len
= CALL_EXPR_ARG (exp
, 2);
4793 if (!check_nul_terminated_array (exp
, src
, len
))
4797 /* The size of the destination object. */
4798 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1, &data
.dst
);
4800 check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
, src
, destsize
,
4806 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4807 bytes from constant string DATA + OFFSET and return it as target
4811 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
4812 scalar_int_mode mode
)
4814 const char *str
= (const char *) data
;
4816 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
4819 return c_readstr (str
+ offset
, mode
);
4822 /* Helper to check the sizes of sequences and the destination of calls
4823 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4824 success (no overflow or invalid sizes), false otherwise. */
4827 check_strncat_sizes (tree exp
, tree objsize
)
4829 tree dest
= CALL_EXPR_ARG (exp
, 0);
4830 tree src
= CALL_EXPR_ARG (exp
, 1);
4831 tree maxread
= CALL_EXPR_ARG (exp
, 2);
4833 /* Try to determine the range of lengths that the source expression
4835 c_strlen_data lendata
= { };
4836 get_range_strlen (src
, &lendata
, /* eltsize = */ 1);
4838 /* Try to verify that the destination is big enough for the shortest
4842 if (!objsize
&& warn_stringop_overflow
)
4844 /* If it hasn't been provided by __strncat_chk, try to determine
4845 the size of the destination object into which the source is
4847 objsize
= compute_objsize (dest
, warn_stringop_overflow
- 1, &data
.dst
);
4850 /* Add one for the terminating nul. */
4851 tree srclen
= (lendata
.minlen
4852 ? fold_build2 (PLUS_EXPR
, size_type_node
, lendata
.minlen
,
4856 /* The strncat function copies at most MAXREAD bytes and always appends
4857 the terminating nul so the specified upper bound should never be equal
4858 to (or greater than) the size of the destination. */
4859 if (tree_fits_uhwi_p (maxread
) && tree_fits_uhwi_p (objsize
)
4860 && tree_int_cst_equal (objsize
, maxread
))
4862 location_t loc
= tree_nonartificial_location (exp
);
4863 loc
= expansion_point_location_if_in_system_header (loc
);
4865 warning_at (loc
, OPT_Wstringop_overflow_
,
4866 "%K%qD specified bound %E equals destination size",
4867 exp
, get_callee_fndecl (exp
), maxread
);
4873 || (maxread
&& tree_fits_uhwi_p (maxread
)
4874 && tree_fits_uhwi_p (srclen
)
4875 && tree_int_cst_lt (maxread
, srclen
)))
4878 /* The number of bytes to write is LEN but check_access will alsoa
4879 check SRCLEN if LEN's value isn't known. */
4880 return check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, maxread
, srclen
,
4881 objsize
, true, &data
);
4884 /* Similar to expand_builtin_strcat, do some very basic size validation
4885 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4886 the built-in expand to a call to the library function. */
4889 expand_builtin_strncat (tree exp
, rtx
)
4891 if (!validate_arglist (exp
,
4892 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4893 || !warn_stringop_overflow
)
4896 tree dest
= CALL_EXPR_ARG (exp
, 0);
4897 tree src
= CALL_EXPR_ARG (exp
, 1);
4898 /* The upper bound on the number of bytes to write. */
4899 tree maxread
= CALL_EXPR_ARG (exp
, 2);
4901 /* Detect unterminated source (only). */
4902 if (!check_nul_terminated_array (exp
, src
, maxread
))
4905 /* The length of the source sequence. */
4906 tree slen
= c_strlen (src
, 1);
4908 /* Try to determine the range of lengths that the source expression
4909 refers to. Since the lengths are only used for warning and not
4910 for code generation disable strict mode below. */
4914 c_strlen_data lendata
= { };
4915 get_range_strlen (src
, &lendata
, /* eltsize = */ 1);
4916 maxlen
= lendata
.maxbound
;
4920 /* Try to verify that the destination is big enough for the shortest
4921 string. First try to determine the size of the destination object
4922 into which the source is being copied. */
4923 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1, &data
.dst
);
4925 /* Add one for the terminating nul. */
4926 tree srclen
= (maxlen
4927 ? fold_build2 (PLUS_EXPR
, size_type_node
, maxlen
,
4931 /* The strncat function copies at most MAXREAD bytes and always appends
4932 the terminating nul so the specified upper bound should never be equal
4933 to (or greater than) the size of the destination. */
4934 if (tree_fits_uhwi_p (maxread
) && tree_fits_uhwi_p (destsize
)
4935 && tree_int_cst_equal (destsize
, maxread
))
4937 location_t loc
= tree_nonartificial_location (exp
);
4938 loc
= expansion_point_location_if_in_system_header (loc
);
4940 warning_at (loc
, OPT_Wstringop_overflow_
,
4941 "%K%qD specified bound %E equals destination size",
4942 exp
, get_callee_fndecl (exp
), maxread
);
4948 || (maxread
&& tree_fits_uhwi_p (maxread
)
4949 && tree_fits_uhwi_p (srclen
)
4950 && tree_int_cst_lt (maxread
, srclen
)))
4953 check_access (exp
, dest
, src
, NULL_TREE
, maxread
, srclen
, destsize
,
4959 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4960 NULL_RTX if we failed the caller should emit a normal call. */
4963 expand_builtin_strncpy (tree exp
, rtx target
)
4965 location_t loc
= EXPR_LOCATION (exp
);
4967 if (!validate_arglist (exp
,
4968 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4970 tree dest
= CALL_EXPR_ARG (exp
, 0);
4971 tree src
= CALL_EXPR_ARG (exp
, 1);
4972 /* The number of bytes to write (not the maximum). */
4973 tree len
= CALL_EXPR_ARG (exp
, 2);
4975 if (!check_nul_terminated_array (exp
, src
, len
))
4978 /* The length of the source sequence. */
4979 tree slen
= c_strlen (src
, 1);
4981 if (warn_stringop_overflow
)
4984 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1,
4987 /* The number of bytes to write is LEN but check_access will also
4988 check SLEN if LEN's value isn't known. */
4989 check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
, src
,
4990 destsize
, true, &data
);
4993 /* We must be passed a constant len and src parameter. */
4994 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
4997 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
4999 /* We're required to pad with trailing zeros if the requested
5000 len is greater than strlen(s2)+1. In that case try to
5001 use store_by_pieces, if it fails, punt. */
5002 if (tree_int_cst_lt (slen
, len
))
5004 unsigned int dest_align
= get_pointer_alignment (dest
);
5005 const char *p
= c_getstr (src
);
5008 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
5009 || !can_store_by_pieces (tree_to_uhwi (len
),
5010 builtin_strncpy_read_str
,
5011 CONST_CAST (char *, p
),
5015 dest_mem
= get_memory_rtx (dest
, len
);
5016 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
5017 builtin_strncpy_read_str
,
5018 CONST_CAST (char *, p
), dest_align
, false,
5020 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
5021 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
5028 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
5029 bytes from constant string DATA + OFFSET and return it as target
5033 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
5034 scalar_int_mode mode
)
5036 const char *c
= (const char *) data
;
5037 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
5039 memset (p
, *c
, GET_MODE_SIZE (mode
));
5041 return c_readstr (p
, mode
);
5044 /* Callback routine for store_by_pieces. Return the RTL of a register
5045 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
5046 char value given in the RTL register data. For example, if mode is
5047 4 bytes wide, return the RTL for 0x01010101*data. */
5050 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
5051 scalar_int_mode mode
)
5057 size
= GET_MODE_SIZE (mode
);
5061 p
= XALLOCAVEC (char, size
);
5062 memset (p
, 1, size
);
5063 coeff
= c_readstr (p
, mode
);
5065 target
= convert_to_mode (mode
, (rtx
) data
, 1);
5066 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
5067 return force_reg (mode
, target
);
5070 /* Expand expression EXP, which is a call to the memset builtin. Return
5071 NULL_RTX if we failed the caller should emit a normal call, otherwise
5072 try to get the result in TARGET, if convenient (and in mode MODE if that's
5076 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
5078 if (!validate_arglist (exp
,
5079 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5082 tree dest
= CALL_EXPR_ARG (exp
, 0);
5083 tree val
= CALL_EXPR_ARG (exp
, 1);
5084 tree len
= CALL_EXPR_ARG (exp
, 2);
5086 check_memop_access (exp
, dest
, NULL_TREE
, len
);
5088 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
5091 /* Helper function to do the actual work for expand_builtin_memset. The
5092 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
5093 so that this can also be called without constructing an actual CALL_EXPR.
5094 The other arguments and return value are the same as for
5095 expand_builtin_memset. */
5098 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
5099 rtx target
, machine_mode mode
, tree orig_exp
)
5102 enum built_in_function fcode
;
5103 machine_mode val_mode
;
5105 unsigned int dest_align
;
5106 rtx dest_mem
, dest_addr
, len_rtx
;
5107 HOST_WIDE_INT expected_size
= -1;
5108 unsigned int expected_align
= 0;
5109 unsigned HOST_WIDE_INT min_size
;
5110 unsigned HOST_WIDE_INT max_size
;
5111 unsigned HOST_WIDE_INT probable_max_size
;
5113 dest_align
= get_pointer_alignment (dest
);
5115 /* If DEST is not a pointer type, don't do this operation in-line. */
5116 if (dest_align
== 0)
5119 if (currently_expanding_gimple_stmt
)
5120 stringop_block_profile (currently_expanding_gimple_stmt
,
5121 &expected_align
, &expected_size
);
5123 if (expected_align
< dest_align
)
5124 expected_align
= dest_align
;
5126 /* If the LEN parameter is zero, return DEST. */
5127 if (integer_zerop (len
))
5129 /* Evaluate and ignore VAL in case it has side-effects. */
5130 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5131 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
5134 /* Stabilize the arguments in case we fail. */
5135 dest
= builtin_save_expr (dest
);
5136 val
= builtin_save_expr (val
);
5137 len
= builtin_save_expr (len
);
5139 len_rtx
= expand_normal (len
);
5140 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
5141 &probable_max_size
);
5142 dest_mem
= get_memory_rtx (dest
, len
);
5143 val_mode
= TYPE_MODE (unsigned_char_type_node
);
5145 if (TREE_CODE (val
) != INTEGER_CST
)
5149 val_rtx
= expand_normal (val
);
5150 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
5152 /* Assume that we can memset by pieces if we can store
5153 * the coefficients by pieces (in the required modes).
5154 * We can't pass builtin_memset_gen_str as that emits RTL. */
5156 if (tree_fits_uhwi_p (len
)
5157 && can_store_by_pieces (tree_to_uhwi (len
),
5158 builtin_memset_read_str
, &c
, dest_align
,
5161 val_rtx
= force_reg (val_mode
, val_rtx
);
5162 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
5163 builtin_memset_gen_str
, val_rtx
, dest_align
,
5164 true, RETURN_BEGIN
);
5166 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
5167 dest_align
, expected_align
,
5168 expected_size
, min_size
, max_size
,
5172 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
5173 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
5177 if (target_char_cast (val
, &c
))
5182 if (tree_fits_uhwi_p (len
)
5183 && can_store_by_pieces (tree_to_uhwi (len
),
5184 builtin_memset_read_str
, &c
, dest_align
,
5186 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
5187 builtin_memset_read_str
, &c
, dest_align
, true,
5189 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
5190 gen_int_mode (c
, val_mode
),
5191 dest_align
, expected_align
,
5192 expected_size
, min_size
, max_size
,
5196 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
5197 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
5201 set_mem_align (dest_mem
, dest_align
);
5202 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
5203 CALL_EXPR_TAILCALL (orig_exp
)
5204 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
5205 expected_align
, expected_size
,
5211 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
5212 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
5218 fndecl
= get_callee_fndecl (orig_exp
);
5219 fcode
= DECL_FUNCTION_CODE (fndecl
);
5220 if (fcode
== BUILT_IN_MEMSET
)
5221 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
5223 else if (fcode
== BUILT_IN_BZERO
)
5224 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
5228 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
5229 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
5230 return expand_call (fn
, target
, target
== const0_rtx
);
5233 /* Expand expression EXP, which is a call to the bzero builtin. Return
5234 NULL_RTX if we failed the caller should emit a normal call. */
5237 expand_builtin_bzero (tree exp
)
5239 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5242 tree dest
= CALL_EXPR_ARG (exp
, 0);
5243 tree size
= CALL_EXPR_ARG (exp
, 1);
5245 check_memop_access (exp
, dest
, NULL_TREE
, size
);
5247 /* New argument list transforming bzero(ptr x, int y) to
5248 memset(ptr x, int 0, size_t y). This is done this way
5249 so that if it isn't expanded inline, we fallback to
5250 calling bzero instead of memset. */
5252 location_t loc
= EXPR_LOCATION (exp
);
5254 return expand_builtin_memset_args (dest
, integer_zero_node
,
5255 fold_convert_loc (loc
,
5256 size_type_node
, size
),
5257 const0_rtx
, VOIDmode
, exp
);
5260 /* Try to expand cmpstr operation ICODE with the given operands.
5261 Return the result rtx on success, otherwise return null. */
5264 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
5265 HOST_WIDE_INT align
)
5267 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
5269 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
5272 class expand_operand ops
[4];
5273 create_output_operand (&ops
[0], target
, insn_mode
);
5274 create_fixed_operand (&ops
[1], arg1_rtx
);
5275 create_fixed_operand (&ops
[2], arg2_rtx
);
5276 create_integer_operand (&ops
[3], align
);
5277 if (maybe_expand_insn (icode
, 4, ops
))
5278 return ops
[0].value
;
5282 /* Expand expression EXP, which is a call to the memcmp built-in function.
5283 Return NULL_RTX if we failed and the caller should emit a normal call,
5284 otherwise try to get the result in TARGET, if convenient.
5285 RESULT_EQ is true if we can relax the returned value to be either zero
5286 or nonzero, without caring about the sign. */
5289 expand_builtin_memcmp (tree exp
, rtx target
, bool result_eq
)
5291 if (!validate_arglist (exp
,
5292 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5295 tree arg1
= CALL_EXPR_ARG (exp
, 0);
5296 tree arg2
= CALL_EXPR_ARG (exp
, 1);
5297 tree len
= CALL_EXPR_ARG (exp
, 2);
5298 enum built_in_function fcode
= DECL_FUNCTION_CODE (get_callee_fndecl (exp
));
5299 bool no_overflow
= true;
5301 /* Diagnose calls where the specified length exceeds the size of either
5304 tree size
= compute_objsize (arg1
, 0, &data
.src
);
5305 no_overflow
= check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
,
5306 len
, /*maxread=*/NULL_TREE
, size
,
5307 /*objsize=*/NULL_TREE
, true, &data
);
5311 size
= compute_objsize (arg2
, 0, &data
.src
);
5312 no_overflow
= check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
,
5313 len
, /*maxread=*/NULL_TREE
, size
,
5314 /*objsize=*/NULL_TREE
, true, &data
);
5317 /* If the specified length exceeds the size of either object,
5318 call the function. */
5322 /* Due to the performance benefit, always inline the calls first
5323 when result_eq is false. */
5324 rtx result
= NULL_RTX
;
5326 if (!result_eq
&& fcode
!= BUILT_IN_BCMP
)
5328 result
= inline_expand_builtin_bytecmp (exp
, target
);
5333 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
5334 location_t loc
= EXPR_LOCATION (exp
);
5336 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
5337 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
5339 /* If we don't have POINTER_TYPE, call the function. */
5340 if (arg1_align
== 0 || arg2_align
== 0)
5343 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
5344 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
5345 rtx len_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
5347 /* Set MEM_SIZE as appropriate. */
5348 if (CONST_INT_P (len_rtx
))
5350 set_mem_size (arg1_rtx
, INTVAL (len_rtx
));
5351 set_mem_size (arg2_rtx
, INTVAL (len_rtx
));
5354 by_pieces_constfn constfn
= NULL
;
5356 /* Try to get the byte representation of the constant ARG2 (or, only
5357 when the function's result is used for equality to zero, ARG1)
5358 points to, with its byte size in NBYTES. */
5359 unsigned HOST_WIDE_INT nbytes
;
5360 const char *rep
= c_getstr (arg2
, &nbytes
);
5361 if (result_eq
&& rep
== NULL
)
5363 /* For equality to zero the arguments are interchangeable. */
5364 rep
= c_getstr (arg1
, &nbytes
);
5366 std::swap (arg1_rtx
, arg2_rtx
);
5369 /* If the function's constant bound LEN_RTX is less than or equal
5370 to the byte size of the representation of the constant argument,
5371 and if block move would be done by pieces, we can avoid loading
5372 the bytes from memory and only store the computed constant result. */
5374 && CONST_INT_P (len_rtx
)
5375 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= nbytes
)
5376 constfn
= builtin_memcpy_read_str
;
5378 result
= emit_block_cmp_hints (arg1_rtx
, arg2_rtx
, len_rtx
,
5379 TREE_TYPE (len
), target
,
5381 CONST_CAST (char *, rep
));
5385 /* Return the value in the proper mode for this function. */
5386 if (GET_MODE (result
) == mode
)
5391 convert_move (target
, result
, 0);
5395 return convert_to_mode (mode
, result
, 0);
5401 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
5402 if we failed the caller should emit a normal call, otherwise try to get
5403 the result in TARGET, if convenient. */
5406 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
5408 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5411 tree arg1
= CALL_EXPR_ARG (exp
, 0);
5412 tree arg2
= CALL_EXPR_ARG (exp
, 1);
5414 if (!check_nul_terminated_array (exp
, arg1
)
5415 || !check_nul_terminated_array (exp
, arg2
))
5418 /* Due to the performance benefit, always inline the calls first. */
5419 rtx result
= NULL_RTX
;
5420 result
= inline_expand_builtin_bytecmp (exp
, target
);
5424 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
5425 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
5426 if (cmpstr_icode
== CODE_FOR_nothing
&& cmpstrn_icode
== CODE_FOR_nothing
)
5429 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
5430 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
5432 /* If we don't have POINTER_TYPE, call the function. */
5433 if (arg1_align
== 0 || arg2_align
== 0)
5436 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
5437 arg1
= builtin_save_expr (arg1
);
5438 arg2
= builtin_save_expr (arg2
);
5440 rtx arg1_rtx
= get_memory_rtx (arg1
, NULL
);
5441 rtx arg2_rtx
= get_memory_rtx (arg2
, NULL
);
5443 /* Try to call cmpstrsi. */
5444 if (cmpstr_icode
!= CODE_FOR_nothing
)
5445 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
5446 MIN (arg1_align
, arg2_align
));
5448 /* Try to determine at least one length and call cmpstrnsi. */
5449 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
5454 tree len1
= c_strlen (arg1
, 1);
5455 tree len2
= c_strlen (arg2
, 1);
5458 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
5460 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
5462 /* If we don't have a constant length for the first, use the length
5463 of the second, if we know it. We don't require a constant for
5464 this case; some cost analysis could be done if both are available
5465 but neither is constant. For now, assume they're equally cheap,
5466 unless one has side effects. If both strings have constant lengths,
5473 else if (TREE_SIDE_EFFECTS (len1
))
5475 else if (TREE_SIDE_EFFECTS (len2
))
5477 else if (TREE_CODE (len1
) != INTEGER_CST
)
5479 else if (TREE_CODE (len2
) != INTEGER_CST
)
5481 else if (tree_int_cst_lt (len1
, len2
))
5486 /* If both arguments have side effects, we cannot optimize. */
5487 if (len
&& !TREE_SIDE_EFFECTS (len
))
5489 arg3_rtx
= expand_normal (len
);
5490 result
= expand_cmpstrn_or_cmpmem
5491 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
5492 arg3_rtx
, MIN (arg1_align
, arg2_align
));
5496 tree fndecl
= get_callee_fndecl (exp
);
5499 /* Check to see if the argument was declared attribute nonstring
5500 and if so, issue a warning since at this point it's not known
5501 to be nul-terminated. */
5502 maybe_warn_nonstring_arg (fndecl
, exp
);
5504 /* Return the value in the proper mode for this function. */
5505 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
5506 if (GET_MODE (result
) == mode
)
5509 return convert_to_mode (mode
, result
, 0);
5510 convert_move (target
, result
, 0);
5514 /* Expand the library call ourselves using a stabilized argument
5515 list to avoid re-evaluating the function's arguments twice. */
5516 tree fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
5517 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
5518 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
5519 return expand_call (fn
, target
, target
== const0_rtx
);
5522 /* Expand expression EXP, which is a call to the strncmp builtin. Return
5523 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
5524 the result in TARGET, if convenient. */
5527 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
5528 ATTRIBUTE_UNUSED machine_mode mode
)
5530 if (!validate_arglist (exp
,
5531 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5534 tree arg1
= CALL_EXPR_ARG (exp
, 0);
5535 tree arg2
= CALL_EXPR_ARG (exp
, 1);
5536 tree arg3
= CALL_EXPR_ARG (exp
, 2);
5538 if (!check_nul_terminated_array (exp
, arg1
, arg3
)
5539 || !check_nul_terminated_array (exp
, arg2
, arg3
))
5542 /* Due to the performance benefit, always inline the calls first. */
5543 rtx result
= NULL_RTX
;
5544 result
= inline_expand_builtin_bytecmp (exp
, target
);
5548 /* If c_strlen can determine an expression for one of the string
5549 lengths, and it doesn't have side effects, then emit cmpstrnsi
5550 using length MIN(strlen(string)+1, arg3). */
5551 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
5552 if (cmpstrn_icode
== CODE_FOR_nothing
)
5557 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
5558 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
5560 tree len1
= c_strlen (arg1
, 1);
5561 tree len2
= c_strlen (arg2
, 1);
5563 location_t loc
= EXPR_LOCATION (exp
);
5566 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
5568 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
5570 tree len3
= fold_convert_loc (loc
, sizetype
, arg3
);
5572 /* If we don't have a constant length for the first, use the length
5573 of the second, if we know it. If neither string is constant length,
5574 use the given length argument. We don't require a constant for
5575 this case; some cost analysis could be done if both are available
5576 but neither is constant. For now, assume they're equally cheap,
5577 unless one has side effects. If both strings have constant lengths,
5586 else if (TREE_SIDE_EFFECTS (len1
))
5588 else if (TREE_SIDE_EFFECTS (len2
))
5590 else if (TREE_CODE (len1
) != INTEGER_CST
)
5592 else if (TREE_CODE (len2
) != INTEGER_CST
)
5594 else if (tree_int_cst_lt (len1
, len2
))
5599 /* If we are not using the given length, we must incorporate it here.
5600 The actual new length parameter will be MIN(len,arg3) in this case. */
5603 len
= fold_convert_loc (loc
, sizetype
, len
);
5604 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, len3
);
5606 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
5607 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
5608 rtx arg3_rtx
= expand_normal (len
);
5609 result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
5610 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
5611 MIN (arg1_align
, arg2_align
));
5613 tree fndecl
= get_callee_fndecl (exp
);
5616 /* Check to see if the argument was declared attribute nonstring
5617 and if so, issue a warning since at this point it's not known
5618 to be nul-terminated. */
5619 maybe_warn_nonstring_arg (fndecl
, exp
);
5621 /* Return the value in the proper mode for this function. */
5622 mode
= TYPE_MODE (TREE_TYPE (exp
));
5623 if (GET_MODE (result
) == mode
)
5626 return convert_to_mode (mode
, result
, 0);
5627 convert_move (target
, result
, 0);
5631 /* Expand the library call ourselves using a stabilized argument
5632 list to avoid re-evaluating the function's arguments twice. */
5633 tree fn
= build_call_nofold_loc (loc
, fndecl
, 3, arg1
, arg2
, len
);
5634 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
5635 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
5636 return expand_call (fn
, target
, target
== const0_rtx
);
5639 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5640 if that's convenient. */
5643 expand_builtin_saveregs (void)
5648 /* Don't do __builtin_saveregs more than once in a function.
5649 Save the result of the first call and reuse it. */
5650 if (saveregs_value
!= 0)
5651 return saveregs_value
;
5653 /* When this function is called, it means that registers must be
5654 saved on entry to this function. So we migrate the call to the
5655 first insn of this function. */
5659 /* Do whatever the machine needs done in this case. */
5660 val
= targetm
.calls
.expand_builtin_saveregs ();
5665 saveregs_value
= val
;
5667 /* Put the insns after the NOTE that starts the function. If this
5668 is inside a start_sequence, make the outer-level insn chain current, so
5669 the code is placed at the start of the function. */
5670 push_topmost_sequence ();
5671 emit_insn_after (seq
, entry_of_function ());
5672 pop_topmost_sequence ();
5677 /* Expand a call to __builtin_next_arg. */
5680 expand_builtin_next_arg (void)
5682 /* Checking arguments is already done in fold_builtin_next_arg
5683 that must be called before this function. */
5684 return expand_binop (ptr_mode
, add_optab
,
5685 crtl
->args
.internal_arg_pointer
,
5686 crtl
->args
.arg_offset_rtx
,
5687 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
5690 /* Make it easier for the backends by protecting the valist argument
5691 from multiple evaluations. */
5694 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
5696 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
5698 /* The current way of determining the type of valist is completely
5699 bogus. We should have the information on the va builtin instead. */
5701 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
5703 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
5705 if (TREE_SIDE_EFFECTS (valist
))
5706 valist
= save_expr (valist
);
5708 /* For this case, the backends will be expecting a pointer to
5709 vatype, but it's possible we've actually been given an array
5710 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5712 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
5714 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
5715 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
5720 tree pt
= build_pointer_type (vatype
);
5724 if (! TREE_SIDE_EFFECTS (valist
))
5727 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
5728 TREE_SIDE_EFFECTS (valist
) = 1;
5731 if (TREE_SIDE_EFFECTS (valist
))
5732 valist
= save_expr (valist
);
5733 valist
= fold_build2_loc (loc
, MEM_REF
,
5734 vatype
, valist
, build_int_cst (pt
, 0));
5740 /* The "standard" definition of va_list is void*. */
5743 std_build_builtin_va_list (void)
5745 return ptr_type_node
;
5748 /* The "standard" abi va_list is va_list_type_node. */
5751 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
5753 return va_list_type_node
;
5756 /* The "standard" type of va_list is va_list_type_node. */
5759 std_canonical_va_list_type (tree type
)
5763 wtype
= va_list_type_node
;
5766 if (TREE_CODE (wtype
) == ARRAY_TYPE
)
5768 /* If va_list is an array type, the argument may have decayed
5769 to a pointer type, e.g. by being passed to another function.
5770 In that case, unwrap both types so that we can compare the
5771 underlying records. */
5772 if (TREE_CODE (htype
) == ARRAY_TYPE
5773 || POINTER_TYPE_P (htype
))
5775 wtype
= TREE_TYPE (wtype
);
5776 htype
= TREE_TYPE (htype
);
5779 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
5780 return va_list_type_node
;
5785 /* The "standard" implementation of va_start: just assign `nextarg' to
5789 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
5791 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5792 convert_move (va_r
, nextarg
, 0);
5795 /* Expand EXP, a call to __builtin_va_start. */
5798 expand_builtin_va_start (tree exp
)
5802 location_t loc
= EXPR_LOCATION (exp
);
5804 if (call_expr_nargs (exp
) < 2)
5806 error_at (loc
, "too few arguments to function %<va_start%>");
5810 if (fold_builtin_next_arg (exp
, true))
5813 nextarg
= expand_builtin_next_arg ();
5814 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
5816 if (targetm
.expand_builtin_va_start
)
5817 targetm
.expand_builtin_va_start (valist
, nextarg
);
5819 std_expand_builtin_va_start (valist
, nextarg
);
5824 /* Expand EXP, a call to __builtin_va_end. */
5827 expand_builtin_va_end (tree exp
)
5829 tree valist
= CALL_EXPR_ARG (exp
, 0);
5831 /* Evaluate for side effects, if needed. I hate macros that don't
5833 if (TREE_SIDE_EFFECTS (valist
))
5834 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5839 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5840 builtin rather than just as an assignment in stdarg.h because of the
5841 nastiness of array-type va_list types. */
5844 expand_builtin_va_copy (tree exp
)
5847 location_t loc
= EXPR_LOCATION (exp
);
5849 dst
= CALL_EXPR_ARG (exp
, 0);
5850 src
= CALL_EXPR_ARG (exp
, 1);
5852 dst
= stabilize_va_list_loc (loc
, dst
, 1);
5853 src
= stabilize_va_list_loc (loc
, src
, 0);
5855 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
5857 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
5859 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
5860 TREE_SIDE_EFFECTS (t
) = 1;
5861 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5865 rtx dstb
, srcb
, size
;
5867 /* Evaluate to pointers. */
5868 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5869 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5870 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
5871 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
5873 dstb
= convert_memory_address (Pmode
, dstb
);
5874 srcb
= convert_memory_address (Pmode
, srcb
);
5876 /* "Dereference" to BLKmode memories. */
5877 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
5878 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
5879 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
5880 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
5881 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
5882 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
5885 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
5891 /* Expand a call to one of the builtin functions __builtin_frame_address or
5892 __builtin_return_address. */
5895 expand_builtin_frame_address (tree fndecl
, tree exp
)
5897 /* The argument must be a nonnegative integer constant.
5898 It counts the number of frames to scan up the stack.
5899 The value is either the frame pointer value or the return
5900 address saved in that frame. */
5901 if (call_expr_nargs (exp
) == 0)
5902 /* Warning about missing arg was already issued. */
5904 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
5906 error ("invalid argument to %qD", fndecl
);
5911 /* Number of frames to scan up the stack. */
5912 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
5914 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
5916 /* Some ports cannot access arbitrary stack frames. */
5919 warning (0, "unsupported argument to %qD", fndecl
);
5925 /* Warn since no effort is made to ensure that any frame
5926 beyond the current one exists or can be safely reached. */
5927 warning (OPT_Wframe_address
, "calling %qD with "
5928 "a nonzero argument is unsafe", fndecl
);
5931 /* For __builtin_frame_address, return what we've got. */
5932 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5936 && ! CONSTANT_P (tem
))
5937 tem
= copy_addr_to_reg (tem
);
5942 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5943 failed and the caller should emit a normal call. */
5946 expand_builtin_alloca (tree exp
)
5951 tree fndecl
= get_callee_fndecl (exp
);
5952 HOST_WIDE_INT max_size
;
5953 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5954 bool alloca_for_var
= CALL_ALLOCA_FOR_VAR_P (exp
);
5956 = (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5957 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
,
5959 : fcode
== BUILT_IN_ALLOCA_WITH_ALIGN
5960 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5961 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
5967 && warn_vla_limit
>= HOST_WIDE_INT_MAX
5968 && warn_alloc_size_limit
< warn_vla_limit
)
5970 && warn_alloca_limit
>= HOST_WIDE_INT_MAX
5971 && warn_alloc_size_limit
< warn_alloca_limit
5974 /* -Walloca-larger-than and -Wvla-larger-than settings of
5975 less than HOST_WIDE_INT_MAX override the more general
5976 -Walloc-size-larger-than so unless either of the former
5977 options is smaller than the last one (wchich would imply
5978 that the call was already checked), check the alloca
5979 arguments for overflow. */
5980 tree args
[] = { CALL_EXPR_ARG (exp
, 0), NULL_TREE
};
5981 int idx
[] = { 0, -1 };
5982 maybe_warn_alloc_args_overflow (fndecl
, exp
, args
, idx
);
5985 /* Compute the argument. */
5986 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5988 /* Compute the alignment. */
5989 align
= (fcode
== BUILT_IN_ALLOCA
5991 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1)));
5993 /* Compute the maximum size. */
5994 max_size
= (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5995 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 2))
5998 /* Allocate the desired space. If the allocation stems from the declaration
5999 of a variable-sized object, it cannot accumulate. */
6001 = allocate_dynamic_stack_space (op0
, 0, align
, max_size
, alloca_for_var
);
6002 result
= convert_memory_address (ptr_mode
, result
);
6004 /* Dynamic allocations for variables are recorded during gimplification. */
6005 if (!alloca_for_var
&& (flag_callgraph_info
& CALLGRAPH_INFO_DYNAMIC_ALLOC
))
6006 record_dynamic_alloc (exp
);
6011 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
6012 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
6013 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
6014 handle_builtin_stack_restore function. */
6017 expand_asan_emit_allocas_unpoison (tree exp
)
6019 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6020 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6021 rtx top
= expand_expr (arg0
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
6022 rtx bot
= expand_expr (arg1
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
6023 rtx off
= expand_simple_binop (Pmode
, MINUS
, virtual_stack_dynamic_rtx
,
6024 stack_pointer_rtx
, NULL_RTX
, 0,
6026 off
= convert_modes (ptr_mode
, Pmode
, off
, 0);
6027 bot
= expand_simple_binop (ptr_mode
, PLUS
, bot
, off
, NULL_RTX
, 0,
6029 rtx ret
= init_one_libfunc ("__asan_allocas_unpoison");
6030 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
,
6031 top
, ptr_mode
, bot
, ptr_mode
);
6035 /* Expand a call to bswap builtin in EXP.
6036 Return NULL_RTX if a normal call should be emitted rather than expanding the
6037 function in-line. If convenient, the result should be placed in TARGET.
6038 SUBTARGET may be used as the target for computing one of EXP's operands. */
6041 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
6047 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
6050 arg
= CALL_EXPR_ARG (exp
, 0);
6051 op0
= expand_expr (arg
,
6052 subtarget
&& GET_MODE (subtarget
) == target_mode
6053 ? subtarget
: NULL_RTX
,
6054 target_mode
, EXPAND_NORMAL
);
6055 if (GET_MODE (op0
) != target_mode
)
6056 op0
= convert_to_mode (target_mode
, op0
, 1);
6058 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
6060 gcc_assert (target
);
6062 return convert_to_mode (target_mode
, target
, 1);
6065 /* Expand a call to a unary builtin in EXP.
6066 Return NULL_RTX if a normal call should be emitted rather than expanding the
6067 function in-line. If convenient, the result should be placed in TARGET.
6068 SUBTARGET may be used as the target for computing one of EXP's operands. */
6071 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
6072 rtx subtarget
, optab op_optab
)
6076 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
6079 /* Compute the argument. */
6080 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
6082 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
6083 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
6084 VOIDmode
, EXPAND_NORMAL
);
6085 /* Compute op, into TARGET if possible.
6086 Set TARGET to wherever the result comes back. */
6087 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
6088 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
6089 gcc_assert (target
);
6091 return convert_to_mode (target_mode
, target
, 0);
6094 /* Expand a call to __builtin_expect. We just return our argument
6095 as the builtin_expect semantic should've been already executed by
6096 tree branch prediction pass. */
6099 expand_builtin_expect (tree exp
, rtx target
)
6103 if (call_expr_nargs (exp
) < 2)
6105 arg
= CALL_EXPR_ARG (exp
, 0);
6107 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
6108 /* When guessing was done, the hints should be already stripped away. */
6109 gcc_assert (!flag_guess_branch_prob
6110 || optimize
== 0 || seen_error ());
6114 /* Expand a call to __builtin_expect_with_probability. We just return our
6115 argument as the builtin_expect semantic should've been already executed by
6116 tree branch prediction pass. */
6119 expand_builtin_expect_with_probability (tree exp
, rtx target
)
6123 if (call_expr_nargs (exp
) < 3)
6125 arg
= CALL_EXPR_ARG (exp
, 0);
6127 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
6128 /* When guessing was done, the hints should be already stripped away. */
6129 gcc_assert (!flag_guess_branch_prob
6130 || optimize
== 0 || seen_error ());
6135 /* Expand a call to __builtin_assume_aligned. We just return our first
6136 argument as the builtin_assume_aligned semantic should've been already
6140 expand_builtin_assume_aligned (tree exp
, rtx target
)
6142 if (call_expr_nargs (exp
) < 2)
6144 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
6146 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
6147 && (call_expr_nargs (exp
) < 3
6148 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
6153 expand_builtin_trap (void)
6155 if (targetm
.have_trap ())
6157 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
6158 /* For trap insns when not accumulating outgoing args force
6159 REG_ARGS_SIZE note to prevent crossjumping of calls with
6160 different args sizes. */
6161 if (!ACCUMULATE_OUTGOING_ARGS
)
6162 add_args_size_note (insn
, stack_pointer_delta
);
6166 tree fn
= builtin_decl_implicit (BUILT_IN_ABORT
);
6167 tree call_expr
= build_call_expr (fn
, 0);
6168 expand_call (call_expr
, NULL_RTX
, false);
6174 /* Expand a call to __builtin_unreachable. We do nothing except emit
6175 a barrier saying that control flow will not pass here.
6177 It is the responsibility of the program being compiled to ensure
6178 that control flow does never reach __builtin_unreachable. */
6180 expand_builtin_unreachable (void)
6185 /* Expand EXP, a call to fabs, fabsf or fabsl.
6186 Return NULL_RTX if a normal call should be emitted rather than expanding
6187 the function inline. If convenient, the result should be placed
6188 in TARGET. SUBTARGET may be used as the target for computing
6192 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
6198 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
6201 arg
= CALL_EXPR_ARG (exp
, 0);
6202 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
6203 mode
= TYPE_MODE (TREE_TYPE (arg
));
6204 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
6205 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
6208 /* Expand EXP, a call to copysign, copysignf, or copysignl.
6209 Return NULL is a normal call should be emitted rather than expanding the
6210 function inline. If convenient, the result should be placed in TARGET.
6211 SUBTARGET may be used as the target for computing the operand. */
6214 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
6219 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
6222 arg
= CALL_EXPR_ARG (exp
, 0);
6223 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
6225 arg
= CALL_EXPR_ARG (exp
, 1);
6226 op1
= expand_normal (arg
);
6228 return expand_copysign (op0
, op1
, target
);
6231 /* Expand a call to __builtin___clear_cache. */
6234 expand_builtin___clear_cache (tree exp
)
6236 if (!targetm
.code_for_clear_cache
)
6238 #ifdef CLEAR_INSN_CACHE
6239 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6240 does something. Just do the default expansion to a call to
6244 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6245 does nothing. There is no need to call it. Do nothing. */
6247 #endif /* CLEAR_INSN_CACHE */
6250 /* We have a "clear_cache" insn, and it will handle everything. */
6252 rtx begin_rtx
, end_rtx
;
6254 /* We must not expand to a library call. If we did, any
6255 fallback library function in libgcc that might contain a call to
6256 __builtin___clear_cache() would recurse infinitely. */
6257 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6259 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
6263 if (targetm
.have_clear_cache ())
6265 class expand_operand ops
[2];
6267 begin
= CALL_EXPR_ARG (exp
, 0);
6268 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
6270 end
= CALL_EXPR_ARG (exp
, 1);
6271 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
6273 create_address_operand (&ops
[0], begin_rtx
);
6274 create_address_operand (&ops
[1], end_rtx
);
6275 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
6281 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
6284 round_trampoline_addr (rtx tramp
)
6286 rtx temp
, addend
, mask
;
6288 /* If we don't need too much alignment, we'll have been guaranteed
6289 proper alignment by get_trampoline_type. */
6290 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
6293 /* Round address up to desired boundary. */
6294 temp
= gen_reg_rtx (Pmode
);
6295 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
6296 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
6298 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
6299 temp
, 0, OPTAB_LIB_WIDEN
);
6300 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
6301 temp
, 0, OPTAB_LIB_WIDEN
);
6307 expand_builtin_init_trampoline (tree exp
, bool onstack
)
6309 tree t_tramp
, t_func
, t_chain
;
6310 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
6312 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
6313 POINTER_TYPE
, VOID_TYPE
))
6316 t_tramp
= CALL_EXPR_ARG (exp
, 0);
6317 t_func
= CALL_EXPR_ARG (exp
, 1);
6318 t_chain
= CALL_EXPR_ARG (exp
, 2);
6320 r_tramp
= expand_normal (t_tramp
);
6321 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
6322 MEM_NOTRAP_P (m_tramp
) = 1;
6324 /* If ONSTACK, the TRAMP argument should be the address of a field
6325 within the local function's FRAME decl. Either way, let's see if
6326 we can fill in the MEM_ATTRs for this memory. */
6327 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
6328 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
6330 /* Creator of a heap trampoline is responsible for making sure the
6331 address is aligned to at least STACK_BOUNDARY. Normally malloc
6332 will ensure this anyhow. */
6333 tmp
= round_trampoline_addr (r_tramp
);
6336 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
6337 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
6338 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
6341 /* The FUNC argument should be the address of the nested function.
6342 Extract the actual function decl to pass to the hook. */
6343 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
6344 t_func
= TREE_OPERAND (t_func
, 0);
6345 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
6347 r_chain
= expand_normal (t_chain
);
6349 /* Generate insns to initialize the trampoline. */
6350 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
6354 trampolines_created
= 1;
6356 if (targetm
.calls
.custom_function_descriptors
!= 0)
6357 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
6358 "trampoline generated for nested function %qD", t_func
);
6365 expand_builtin_adjust_trampoline (tree exp
)
6369 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6372 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
6373 tramp
= round_trampoline_addr (tramp
);
6374 if (targetm
.calls
.trampoline_adjust_address
)
6375 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
6380 /* Expand a call to the builtin descriptor initialization routine.
6381 A descriptor is made up of a couple of pointers to the static
6382 chain and the code entry in this order. */
6385 expand_builtin_init_descriptor (tree exp
)
6387 tree t_descr
, t_func
, t_chain
;
6388 rtx m_descr
, r_descr
, r_func
, r_chain
;
6390 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, POINTER_TYPE
,
6394 t_descr
= CALL_EXPR_ARG (exp
, 0);
6395 t_func
= CALL_EXPR_ARG (exp
, 1);
6396 t_chain
= CALL_EXPR_ARG (exp
, 2);
6398 r_descr
= expand_normal (t_descr
);
6399 m_descr
= gen_rtx_MEM (BLKmode
, r_descr
);
6400 MEM_NOTRAP_P (m_descr
) = 1;
6401 set_mem_align (m_descr
, GET_MODE_ALIGNMENT (ptr_mode
));
6403 r_func
= expand_normal (t_func
);
6404 r_chain
= expand_normal (t_chain
);
6406 /* Generate insns to initialize the descriptor. */
6407 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
, 0), r_chain
);
6408 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
,
6409 POINTER_SIZE
/ BITS_PER_UNIT
), r_func
);
6414 /* Expand a call to the builtin descriptor adjustment routine. */
6417 expand_builtin_adjust_descriptor (tree exp
)
6421 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6424 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
6426 /* Unalign the descriptor to allow runtime identification. */
6427 tramp
= plus_constant (ptr_mode
, tramp
,
6428 targetm
.calls
.custom_function_descriptors
);
6430 return force_operand (tramp
, NULL_RTX
);
6433 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
6434 function. The function first checks whether the back end provides
6435 an insn to implement signbit for the respective mode. If not, it
6436 checks whether the floating point format of the value is such that
6437 the sign bit can be extracted. If that is not the case, error out.
6438 EXP is the expression that is a call to the builtin function; if
6439 convenient, the result should be placed in TARGET. */
6441 expand_builtin_signbit (tree exp
, rtx target
)
6443 const struct real_format
*fmt
;
6444 scalar_float_mode fmode
;
6445 scalar_int_mode rmode
, imode
;
6448 enum insn_code icode
;
6450 location_t loc
= EXPR_LOCATION (exp
);
6452 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
6455 arg
= CALL_EXPR_ARG (exp
, 0);
6456 fmode
= SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg
));
6457 rmode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
6458 fmt
= REAL_MODE_FORMAT (fmode
);
6460 arg
= builtin_save_expr (arg
);
6462 /* Expand the argument yielding a RTX expression. */
6463 temp
= expand_normal (arg
);
6465 /* Check if the back end provides an insn that handles signbit for the
6467 icode
= optab_handler (signbit_optab
, fmode
);
6468 if (icode
!= CODE_FOR_nothing
)
6470 rtx_insn
*last
= get_last_insn ();
6471 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
6472 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
6474 delete_insns_since (last
);
6477 /* For floating point formats without a sign bit, implement signbit
6479 bitpos
= fmt
->signbit_ro
;
6482 /* But we can't do this if the format supports signed zero. */
6483 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
6485 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
6486 build_real (TREE_TYPE (arg
), dconst0
));
6487 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
6490 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
6492 imode
= int_mode_for_mode (fmode
).require ();
6493 temp
= gen_lowpart (imode
, temp
);
6498 /* Handle targets with different FP word orders. */
6499 if (FLOAT_WORDS_BIG_ENDIAN
)
6500 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
6502 word
= bitpos
/ BITS_PER_WORD
;
6503 temp
= operand_subword_force (temp
, word
, fmode
);
6504 bitpos
= bitpos
% BITS_PER_WORD
;
6507 /* Force the intermediate word_mode (or narrower) result into a
6508 register. This avoids attempting to create paradoxical SUBREGs
6509 of floating point modes below. */
6510 temp
= force_reg (imode
, temp
);
6512 /* If the bitpos is within the "result mode" lowpart, the operation
6513 can be implement with a single bitwise AND. Otherwise, we need
6514 a right shift and an AND. */
6516 if (bitpos
< GET_MODE_BITSIZE (rmode
))
6518 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
6520 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
6521 temp
= gen_lowpart (rmode
, temp
);
6522 temp
= expand_binop (rmode
, and_optab
, temp
,
6523 immed_wide_int_const (mask
, rmode
),
6524 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
6528 /* Perform a logical right shift to place the signbit in the least
6529 significant bit, then truncate the result to the desired mode
6530 and mask just this bit. */
6531 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
6532 temp
= gen_lowpart (rmode
, temp
);
6533 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
6534 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
6540 /* Expand fork or exec calls. TARGET is the desired target of the
6541 call. EXP is the call. FN is the
6542 identificator of the actual function. IGNORE is nonzero if the
6543 value is to be ignored. */
6546 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
6551 if (DECL_FUNCTION_CODE (fn
) != BUILT_IN_FORK
)
6553 /* Detect unterminated path. */
6554 if (!check_nul_terminated_array (exp
, CALL_EXPR_ARG (exp
, 0)))
6557 /* Also detect unterminated first argument. */
6558 switch (DECL_FUNCTION_CODE (fn
))
6560 case BUILT_IN_EXECL
:
6561 case BUILT_IN_EXECLE
:
6562 case BUILT_IN_EXECLP
:
6563 if (!check_nul_terminated_array (exp
, CALL_EXPR_ARG (exp
, 0)))
6571 /* If we are not profiling, just call the function. */
6572 if (!profile_arc_flag
)
6575 /* Otherwise call the wrapper. This should be equivalent for the rest of
6576 compiler, so the code does not diverge, and the wrapper may run the
6577 code necessary for keeping the profiling sane. */
6579 switch (DECL_FUNCTION_CODE (fn
))
6582 id
= get_identifier ("__gcov_fork");
6585 case BUILT_IN_EXECL
:
6586 id
= get_identifier ("__gcov_execl");
6589 case BUILT_IN_EXECV
:
6590 id
= get_identifier ("__gcov_execv");
6593 case BUILT_IN_EXECLP
:
6594 id
= get_identifier ("__gcov_execlp");
6597 case BUILT_IN_EXECLE
:
6598 id
= get_identifier ("__gcov_execle");
6601 case BUILT_IN_EXECVP
:
6602 id
= get_identifier ("__gcov_execvp");
6605 case BUILT_IN_EXECVE
:
6606 id
= get_identifier ("__gcov_execve");
6613 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
6614 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
6615 DECL_EXTERNAL (decl
) = 1;
6616 TREE_PUBLIC (decl
) = 1;
6617 DECL_ARTIFICIAL (decl
) = 1;
6618 TREE_NOTHROW (decl
) = 1;
6619 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
6620 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
6621 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
6622 return expand_call (call
, target
, ignore
);
6627 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6628 the pointer in these functions is void*, the tree optimizers may remove
6629 casts. The mode computed in expand_builtin isn't reliable either, due
6630 to __sync_bool_compare_and_swap.
6632 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6633 group of builtins. This gives us log2 of the mode size. */
6635 static inline machine_mode
6636 get_builtin_sync_mode (int fcode_diff
)
6638 /* The size is not negotiable, so ask not to get BLKmode in return
6639 if the target indicates that a smaller size would be better. */
6640 return int_mode_for_size (BITS_PER_UNIT
<< fcode_diff
, 0).require ();
6643 /* Expand the memory expression LOC and return the appropriate memory operand
6644 for the builtin_sync operations. */
6647 get_builtin_sync_mem (tree loc
, machine_mode mode
)
6650 int addr_space
= TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc
))
6651 ? TREE_TYPE (TREE_TYPE (loc
))
6653 scalar_int_mode addr_mode
= targetm
.addr_space
.address_mode (addr_space
);
6655 addr
= expand_expr (loc
, NULL_RTX
, addr_mode
, EXPAND_SUM
);
6656 addr
= convert_memory_address (addr_mode
, addr
);
6658 /* Note that we explicitly do not want any alias information for this
6659 memory, so that we kill all other live memories. Otherwise we don't
6660 satisfy the full barrier semantics of the intrinsic. */
6661 mem
= gen_rtx_MEM (mode
, addr
);
6663 set_mem_addr_space (mem
, addr_space
);
6665 mem
= validize_mem (mem
);
6667 /* The alignment needs to be at least according to that of the mode. */
6668 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
6669 get_pointer_alignment (loc
)));
6670 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
6671 MEM_VOLATILE_P (mem
) = 1;
6676 /* Make sure an argument is in the right mode.
6677 EXP is the tree argument.
6678 MODE is the mode it should be in. */
6681 expand_expr_force_mode (tree exp
, machine_mode mode
)
6684 machine_mode old_mode
;
6686 if (TREE_CODE (exp
) == SSA_NAME
6687 && TYPE_MODE (TREE_TYPE (exp
)) != mode
)
6689 /* Undo argument promotion if possible, as combine might not
6690 be able to do it later due to MEM_VOLATILE_P uses in the
6692 gimple
*g
= get_gimple_for_ssa_name (exp
);
6693 if (g
&& gimple_assign_cast_p (g
))
6695 tree rhs
= gimple_assign_rhs1 (g
);
6696 tree_code code
= gimple_assign_rhs_code (g
);
6697 if (CONVERT_EXPR_CODE_P (code
)
6698 && TYPE_MODE (TREE_TYPE (rhs
)) == mode
6699 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
6700 && INTEGRAL_TYPE_P (TREE_TYPE (rhs
))
6701 && (TYPE_PRECISION (TREE_TYPE (exp
))
6702 > TYPE_PRECISION (TREE_TYPE (rhs
))))
6707 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
6708 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6709 of CONST_INTs, where we know the old_mode only from the call argument. */
6711 old_mode
= GET_MODE (val
);
6712 if (old_mode
== VOIDmode
)
6713 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
6714 val
= convert_modes (mode
, old_mode
, val
, 1);
6719 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6720 EXP is the CALL_EXPR. CODE is the rtx code
6721 that corresponds to the arithmetic or logical operation from the name;
6722 an exception here is that NOT actually means NAND. TARGET is an optional
6723 place for us to store the results; AFTER is true if this is the
6724 fetch_and_xxx form. */
6727 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
6728 enum rtx_code code
, bool after
,
6732 location_t loc
= EXPR_LOCATION (exp
);
6734 if (code
== NOT
&& warn_sync_nand
)
6736 tree fndecl
= get_callee_fndecl (exp
);
6737 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6739 static bool warned_f_a_n
, warned_n_a_f
;
6743 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6744 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6745 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6746 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6747 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6751 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
6752 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
6753 warned_f_a_n
= true;
6756 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6757 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6758 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6759 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6760 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6764 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
6765 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
6766 warned_n_a_f
= true;
6774 /* Expand the operands. */
6775 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6776 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6778 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
6782 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6783 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6784 true if this is the boolean form. TARGET is a place for us to store the
6785 results; this is NOT optional if IS_BOOL is true. */
6788 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
6789 bool is_bool
, rtx target
)
6791 rtx old_val
, new_val
, mem
;
6794 /* Expand the operands. */
6795 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6796 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6797 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
6799 pbool
= poval
= NULL
;
6800 if (target
!= const0_rtx
)
6807 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
6808 false, MEMMODEL_SYNC_SEQ_CST
,
6809 MEMMODEL_SYNC_SEQ_CST
))
6815 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6816 general form is actually an atomic exchange, and some targets only
6817 support a reduced form with the second argument being a constant 1.
6818 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6822 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
6827 /* Expand the operands. */
6828 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6829 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6831 return expand_sync_lock_test_and_set (target
, mem
, val
);
6834 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6837 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
6841 /* Expand the operands. */
6842 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6844 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
6847 /* Given an integer representing an ``enum memmodel'', verify its
6848 correctness and return the memory model enum. */
6850 static enum memmodel
6851 get_memmodel (tree exp
)
6854 unsigned HOST_WIDE_INT val
;
6856 = expansion_point_location_if_in_system_header (input_location
);
6858 /* If the parameter is not a constant, it's a run time value so we'll just
6859 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6860 if (TREE_CODE (exp
) != INTEGER_CST
)
6861 return MEMMODEL_SEQ_CST
;
6863 op
= expand_normal (exp
);
6866 if (targetm
.memmodel_check
)
6867 val
= targetm
.memmodel_check (val
);
6868 else if (val
& ~MEMMODEL_MASK
)
6870 warning_at (loc
, OPT_Winvalid_memory_model
,
6871 "unknown architecture specifier in memory model to builtin");
6872 return MEMMODEL_SEQ_CST
;
6875 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6876 if (memmodel_base (val
) >= MEMMODEL_LAST
)
6878 warning_at (loc
, OPT_Winvalid_memory_model
,
6879 "invalid memory model argument to builtin");
6880 return MEMMODEL_SEQ_CST
;
6883 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6884 be conservative and promote consume to acquire. */
6885 if (val
== MEMMODEL_CONSUME
)
6886 val
= MEMMODEL_ACQUIRE
;
6888 return (enum memmodel
) val
;
6891 /* Expand the __atomic_exchange intrinsic:
6892 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6893 EXP is the CALL_EXPR.
6894 TARGET is an optional place for us to store the results. */
6897 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
6900 enum memmodel model
;
6902 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6904 if (!flag_inline_atomics
)
6907 /* Expand the operands. */
6908 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6909 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6911 return expand_atomic_exchange (target
, mem
, val
, model
);
6914 /* Expand the __atomic_compare_exchange intrinsic:
6915 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6916 TYPE desired, BOOL weak,
6917 enum memmodel success,
6918 enum memmodel failure)
6919 EXP is the CALL_EXPR.
6920 TARGET is an optional place for us to store the results. */
6923 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
6926 rtx expect
, desired
, mem
, oldval
;
6927 rtx_code_label
*label
;
6928 enum memmodel success
, failure
;
6932 = expansion_point_location_if_in_system_header (input_location
);
6934 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
6935 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
6937 if (failure
> success
)
6939 warning_at (loc
, OPT_Winvalid_memory_model
,
6940 "failure memory model cannot be stronger than success "
6941 "memory model for %<__atomic_compare_exchange%>");
6942 success
= MEMMODEL_SEQ_CST
;
6945 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
6947 warning_at (loc
, OPT_Winvalid_memory_model
,
6948 "invalid failure memory model for "
6949 "%<__atomic_compare_exchange%>");
6950 failure
= MEMMODEL_SEQ_CST
;
6951 success
= MEMMODEL_SEQ_CST
;
6955 if (!flag_inline_atomics
)
6958 /* Expand the operands. */
6959 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6961 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6962 expect
= convert_memory_address (Pmode
, expect
);
6963 expect
= gen_rtx_MEM (mode
, expect
);
6964 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
6966 weak
= CALL_EXPR_ARG (exp
, 3);
6968 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
6971 if (target
== const0_rtx
)
6974 /* Lest the rtl backend create a race condition with an imporoper store
6975 to memory, always create a new pseudo for OLDVAL. */
6978 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
6979 is_weak
, success
, failure
))
6982 /* Conditionally store back to EXPECT, lest we create a race condition
6983 with an improper store to memory. */
6984 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6985 the normal case where EXPECT is totally private, i.e. a register. At
6986 which point the store can be unconditional. */
6987 label
= gen_label_rtx ();
6988 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
6989 GET_MODE (target
), 1, label
);
6990 emit_move_insn (expect
, oldval
);
6996 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6997 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6998 call. The weak parameter must be dropped to match the expected parameter
6999 list and the expected argument changed from value to pointer to memory
7003 expand_ifn_atomic_compare_exchange_into_call (gcall
*call
, machine_mode mode
)
7006 vec
<tree
, va_gc
> *vec
;
7009 vec
->quick_push (gimple_call_arg (call
, 0));
7010 tree expected
= gimple_call_arg (call
, 1);
7011 rtx x
= assign_stack_temp_for_type (mode
, GET_MODE_SIZE (mode
),
7012 TREE_TYPE (expected
));
7013 rtx expd
= expand_expr (expected
, x
, mode
, EXPAND_NORMAL
);
7015 emit_move_insn (x
, expd
);
7016 tree v
= make_tree (TREE_TYPE (expected
), x
);
7017 vec
->quick_push (build1 (ADDR_EXPR
,
7018 build_pointer_type (TREE_TYPE (expected
)), v
));
7019 vec
->quick_push (gimple_call_arg (call
, 2));
7020 /* Skip the boolean weak parameter. */
7021 for (z
= 4; z
< 6; z
++)
7022 vec
->quick_push (gimple_call_arg (call
, z
));
7023 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
7024 unsigned int bytes_log2
= exact_log2 (GET_MODE_SIZE (mode
).to_constant ());
7025 gcc_assert (bytes_log2
< 5);
7026 built_in_function fncode
7027 = (built_in_function
) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
7029 tree fndecl
= builtin_decl_explicit (fncode
);
7030 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fndecl
)),
7032 tree exp
= build_call_vec (boolean_type_node
, fn
, vec
);
7033 tree lhs
= gimple_call_lhs (call
);
7034 rtx boolret
= expand_call (exp
, NULL_RTX
, lhs
== NULL_TREE
);
7037 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
7038 if (GET_MODE (boolret
) != mode
)
7039 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
7040 x
= force_reg (mode
, x
);
7041 write_complex_part (target
, boolret
, true);
7042 write_complex_part (target
, x
, false);
7046 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
7049 expand_ifn_atomic_compare_exchange (gcall
*call
)
7051 int size
= tree_to_shwi (gimple_call_arg (call
, 3)) & 255;
7052 gcc_assert (size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
7053 machine_mode mode
= int_mode_for_size (BITS_PER_UNIT
* size
, 0).require ();
7054 rtx expect
, desired
, mem
, oldval
, boolret
;
7055 enum memmodel success
, failure
;
7059 = expansion_point_location_if_in_system_header (gimple_location (call
));
7061 success
= get_memmodel (gimple_call_arg (call
, 4));
7062 failure
= get_memmodel (gimple_call_arg (call
, 5));
7064 if (failure
> success
)
7066 warning_at (loc
, OPT_Winvalid_memory_model
,
7067 "failure memory model cannot be stronger than success "
7068 "memory model for %<__atomic_compare_exchange%>");
7069 success
= MEMMODEL_SEQ_CST
;
7072 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
7074 warning_at (loc
, OPT_Winvalid_memory_model
,
7075 "invalid failure memory model for "
7076 "%<__atomic_compare_exchange%>");
7077 failure
= MEMMODEL_SEQ_CST
;
7078 success
= MEMMODEL_SEQ_CST
;
7081 if (!flag_inline_atomics
)
7083 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
7087 /* Expand the operands. */
7088 mem
= get_builtin_sync_mem (gimple_call_arg (call
, 0), mode
);
7090 expect
= expand_expr_force_mode (gimple_call_arg (call
, 1), mode
);
7091 desired
= expand_expr_force_mode (gimple_call_arg (call
, 2), mode
);
7093 is_weak
= (tree_to_shwi (gimple_call_arg (call
, 3)) & 256) != 0;
7098 if (!expand_atomic_compare_and_swap (&boolret
, &oldval
, mem
, expect
, desired
,
7099 is_weak
, success
, failure
))
7101 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
7105 lhs
= gimple_call_lhs (call
);
7108 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
7109 if (GET_MODE (boolret
) != mode
)
7110 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
7111 write_complex_part (target
, boolret
, true);
7112 write_complex_part (target
, oldval
, false);
7116 /* Expand the __atomic_load intrinsic:
7117 TYPE __atomic_load (TYPE *object, enum memmodel)
7118 EXP is the CALL_EXPR.
7119 TARGET is an optional place for us to store the results. */
7122 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
7125 enum memmodel model
;
7127 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
7128 if (is_mm_release (model
) || is_mm_acq_rel (model
))
7131 = expansion_point_location_if_in_system_header (input_location
);
7132 warning_at (loc
, OPT_Winvalid_memory_model
,
7133 "invalid memory model for %<__atomic_load%>");
7134 model
= MEMMODEL_SEQ_CST
;
7137 if (!flag_inline_atomics
)
7140 /* Expand the operand. */
7141 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
7143 return expand_atomic_load (target
, mem
, model
);
7147 /* Expand the __atomic_store intrinsic:
7148 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
7149 EXP is the CALL_EXPR.
7150 TARGET is an optional place for us to store the results. */
7153 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
7156 enum memmodel model
;
7158 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
7159 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
7160 || is_mm_release (model
)))
7163 = expansion_point_location_if_in_system_header (input_location
);
7164 warning_at (loc
, OPT_Winvalid_memory_model
,
7165 "invalid memory model for %<__atomic_store%>");
7166 model
= MEMMODEL_SEQ_CST
;
7169 if (!flag_inline_atomics
)
7172 /* Expand the operands. */
7173 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
7174 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
7176 return expand_atomic_store (mem
, val
, model
, false);
7179 /* Expand the __atomic_fetch_XXX intrinsic:
7180 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
7181 EXP is the CALL_EXPR.
7182 TARGET is an optional place for us to store the results.
7183 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
7184 FETCH_AFTER is true if returning the result of the operation.
7185 FETCH_AFTER is false if returning the value before the operation.
7186 IGNORE is true if the result is not used.
7187 EXT_CALL is the correct builtin for an external call if this cannot be
7188 resolved to an instruction sequence. */
7191 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
7192 enum rtx_code code
, bool fetch_after
,
7193 bool ignore
, enum built_in_function ext_call
)
7196 enum memmodel model
;
7200 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
7202 /* Expand the operands. */
7203 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
7204 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
7206 /* Only try generating instructions if inlining is turned on. */
7207 if (flag_inline_atomics
)
7209 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
7214 /* Return if a different routine isn't needed for the library call. */
7215 if (ext_call
== BUILT_IN_NONE
)
7218 /* Change the call to the specified function. */
7219 fndecl
= get_callee_fndecl (exp
);
7220 addr
= CALL_EXPR_FN (exp
);
7223 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
7224 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
7226 /* If we will emit code after the call, the call cannot be a tail call.
7227 If it is emitted as a tail call, a barrier is emitted after it, and
7228 then all trailing code is removed. */
7230 CALL_EXPR_TAILCALL (exp
) = 0;
7232 /* Expand the call here so we can emit trailing code. */
7233 ret
= expand_call (exp
, target
, ignore
);
7235 /* Replace the original function just in case it matters. */
7236 TREE_OPERAND (addr
, 0) = fndecl
;
7238 /* Then issue the arithmetic correction to return the right result. */
7243 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
7245 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
7248 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
7254 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
7257 expand_ifn_atomic_bit_test_and (gcall
*call
)
7259 tree ptr
= gimple_call_arg (call
, 0);
7260 tree bit
= gimple_call_arg (call
, 1);
7261 tree flag
= gimple_call_arg (call
, 2);
7262 tree lhs
= gimple_call_lhs (call
);
7263 enum memmodel model
= MEMMODEL_SYNC_SEQ_CST
;
7264 machine_mode mode
= TYPE_MODE (TREE_TYPE (flag
));
7267 class expand_operand ops
[5];
7269 gcc_assert (flag_inline_atomics
);
7271 if (gimple_call_num_args (call
) == 4)
7272 model
= get_memmodel (gimple_call_arg (call
, 3));
7274 rtx mem
= get_builtin_sync_mem (ptr
, mode
);
7275 rtx val
= expand_expr_force_mode (bit
, mode
);
7277 switch (gimple_call_internal_fn (call
))
7279 case IFN_ATOMIC_BIT_TEST_AND_SET
:
7281 optab
= atomic_bit_test_and_set_optab
;
7283 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
:
7285 optab
= atomic_bit_test_and_complement_optab
;
7287 case IFN_ATOMIC_BIT_TEST_AND_RESET
:
7289 optab
= atomic_bit_test_and_reset_optab
;
7295 if (lhs
== NULL_TREE
)
7297 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
7298 val
, NULL_RTX
, true, OPTAB_DIRECT
);
7300 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
7301 expand_atomic_fetch_op (const0_rtx
, mem
, val
, code
, model
, false);
7305 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
7306 enum insn_code icode
= direct_optab_handler (optab
, mode
);
7307 gcc_assert (icode
!= CODE_FOR_nothing
);
7308 create_output_operand (&ops
[0], target
, mode
);
7309 create_fixed_operand (&ops
[1], mem
);
7310 create_convert_operand_to (&ops
[2], val
, mode
, true);
7311 create_integer_operand (&ops
[3], model
);
7312 create_integer_operand (&ops
[4], integer_onep (flag
));
7313 if (maybe_expand_insn (icode
, 5, ops
))
7317 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
7318 val
, NULL_RTX
, true, OPTAB_DIRECT
);
7321 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
7322 rtx result
= expand_atomic_fetch_op (gen_reg_rtx (mode
), mem
, val
,
7323 code
, model
, false);
7324 if (integer_onep (flag
))
7326 result
= expand_simple_binop (mode
, ASHIFTRT
, result
, bitval
,
7327 NULL_RTX
, true, OPTAB_DIRECT
);
7328 result
= expand_simple_binop (mode
, AND
, result
, const1_rtx
, target
,
7329 true, OPTAB_DIRECT
);
7332 result
= expand_simple_binop (mode
, AND
, result
, maskval
, target
, true,
7334 if (result
!= target
)
7335 emit_move_insn (target
, result
);
7338 /* Expand an atomic clear operation.
7339 void _atomic_clear (BOOL *obj, enum memmodel)
7340 EXP is the call expression. */
7343 expand_builtin_atomic_clear (tree exp
)
7347 enum memmodel model
;
7349 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
7350 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
7351 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
7353 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
7356 = expansion_point_location_if_in_system_header (input_location
);
7357 warning_at (loc
, OPT_Winvalid_memory_model
,
7358 "invalid memory model for %<__atomic_store%>");
7359 model
= MEMMODEL_SEQ_CST
;
7362 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
7363 Failing that, a store is issued by __atomic_store. The only way this can
7364 fail is if the bool type is larger than a word size. Unlikely, but
7365 handle it anyway for completeness. Assume a single threaded model since
7366 there is no atomic support in this case, and no barriers are required. */
7367 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
7369 emit_move_insn (mem
, const0_rtx
);
7373 /* Expand an atomic test_and_set operation.
7374 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
7375 EXP is the call expression. */
7378 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
7381 enum memmodel model
;
7384 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
7385 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
7386 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
7388 return expand_atomic_test_and_set (target
, mem
, model
);
7392 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
7393 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
7396 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
7400 unsigned int mode_align
, type_align
;
7402 if (TREE_CODE (arg0
) != INTEGER_CST
)
7405 /* We need a corresponding integer mode for the access to be lock-free. */
7406 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
7407 if (!int_mode_for_size (size
, 0).exists (&mode
))
7408 return boolean_false_node
;
7410 mode_align
= GET_MODE_ALIGNMENT (mode
);
7412 if (TREE_CODE (arg1
) == INTEGER_CST
)
7414 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
7416 /* Either this argument is null, or it's a fake pointer encoding
7417 the alignment of the object. */
7418 val
= least_bit_hwi (val
);
7419 val
*= BITS_PER_UNIT
;
7421 if (val
== 0 || mode_align
< val
)
7422 type_align
= mode_align
;
7428 tree ttype
= TREE_TYPE (arg1
);
7430 /* This function is usually invoked and folded immediately by the front
7431 end before anything else has a chance to look at it. The pointer
7432 parameter at this point is usually cast to a void *, so check for that
7433 and look past the cast. */
7434 if (CONVERT_EXPR_P (arg1
)
7435 && POINTER_TYPE_P (ttype
)
7436 && VOID_TYPE_P (TREE_TYPE (ttype
))
7437 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
7438 arg1
= TREE_OPERAND (arg1
, 0);
7440 ttype
= TREE_TYPE (arg1
);
7441 gcc_assert (POINTER_TYPE_P (ttype
));
7443 /* Get the underlying type of the object. */
7444 ttype
= TREE_TYPE (ttype
);
7445 type_align
= TYPE_ALIGN (ttype
);
7448 /* If the object has smaller alignment, the lock free routines cannot
7450 if (type_align
< mode_align
)
7451 return boolean_false_node
;
7453 /* Check if a compare_and_swap pattern exists for the mode which represents
7454 the required size. The pattern is not allowed to fail, so the existence
7455 of the pattern indicates support is present. Also require that an
7456 atomic load exists for the required size. */
7457 if (can_compare_and_swap_p (mode
, true) && can_atomic_load_p (mode
))
7458 return boolean_true_node
;
7460 return boolean_false_node
;
7463 /* Return true if the parameters to call EXP represent an object which will
7464 always generate lock free instructions. The first argument represents the
7465 size of the object, and the second parameter is a pointer to the object
7466 itself. If NULL is passed for the object, then the result is based on
7467 typical alignment for an object of the specified size. Otherwise return
7471 expand_builtin_atomic_always_lock_free (tree exp
)
7474 tree arg0
= CALL_EXPR_ARG (exp
, 0);
7475 tree arg1
= CALL_EXPR_ARG (exp
, 1);
7477 if (TREE_CODE (arg0
) != INTEGER_CST
)
7479 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
7483 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
7484 if (size
== boolean_true_node
)
7489 /* Return a one or zero if it can be determined that object ARG1 of size ARG
7490 is lock free on this architecture. */
7493 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
7495 if (!flag_inline_atomics
)
7498 /* If it isn't always lock free, don't generate a result. */
7499 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
7500 return boolean_true_node
;
7505 /* Return true if the parameters to call EXP represent an object which will
7506 always generate lock free instructions. The first argument represents the
7507 size of the object, and the second parameter is a pointer to the object
7508 itself. If NULL is passed for the object, then the result is based on
7509 typical alignment for an object of the specified size. Otherwise return
7513 expand_builtin_atomic_is_lock_free (tree exp
)
7516 tree arg0
= CALL_EXPR_ARG (exp
, 0);
7517 tree arg1
= CALL_EXPR_ARG (exp
, 1);
7519 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
7521 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
7525 if (!flag_inline_atomics
)
7528 /* If the value is known at compile time, return the RTX for it. */
7529 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
7530 if (size
== boolean_true_node
)
7536 /* Expand the __atomic_thread_fence intrinsic:
7537 void __atomic_thread_fence (enum memmodel)
7538 EXP is the CALL_EXPR. */
7541 expand_builtin_atomic_thread_fence (tree exp
)
7543 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
7544 expand_mem_thread_fence (model
);
7547 /* Expand the __atomic_signal_fence intrinsic:
7548 void __atomic_signal_fence (enum memmodel)
7549 EXP is the CALL_EXPR. */
7552 expand_builtin_atomic_signal_fence (tree exp
)
7554 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
7555 expand_mem_signal_fence (model
);
7558 /* Expand the __sync_synchronize intrinsic. */
7561 expand_builtin_sync_synchronize (void)
7563 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
7567 expand_builtin_thread_pointer (tree exp
, rtx target
)
7569 enum insn_code icode
;
7570 if (!validate_arglist (exp
, VOID_TYPE
))
7572 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
7573 if (icode
!= CODE_FOR_nothing
)
7575 class expand_operand op
;
7576 /* If the target is not sutitable then create a new target. */
7577 if (target
== NULL_RTX
7579 || GET_MODE (target
) != Pmode
)
7580 target
= gen_reg_rtx (Pmode
);
7581 create_output_operand (&op
, target
, Pmode
);
7582 expand_insn (icode
, 1, &op
);
7585 error ("%<__builtin_thread_pointer%> is not supported on this target");
7590 expand_builtin_set_thread_pointer (tree exp
)
7592 enum insn_code icode
;
7593 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7595 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
7596 if (icode
!= CODE_FOR_nothing
)
7598 class expand_operand op
;
7599 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
7600 Pmode
, EXPAND_NORMAL
);
7601 create_input_operand (&op
, val
, Pmode
);
7602 expand_insn (icode
, 1, &op
);
7605 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
7609 /* Emit code to restore the current value of stack. */
7612 expand_stack_restore (tree var
)
7615 rtx sa
= expand_normal (var
);
7617 sa
= convert_memory_address (Pmode
, sa
);
7619 prev
= get_last_insn ();
7620 emit_stack_restore (SAVE_BLOCK
, sa
);
7622 record_new_stack_level ();
7624 fixup_args_size_notes (prev
, get_last_insn (), 0);
7627 /* Emit code to save the current value of stack. */
7630 expand_stack_save (void)
7634 emit_stack_save (SAVE_BLOCK
, &ret
);
7638 /* Emit code to get the openacc gang, worker or vector id or size. */
7641 expand_builtin_goacc_parlevel_id_size (tree exp
, rtx target
, int ignore
)
7644 rtx fallback_retval
;
7645 rtx_insn
*(*gen_fn
) (rtx
, rtx
);
7646 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp
)))
7648 case BUILT_IN_GOACC_PARLEVEL_ID
:
7649 name
= "__builtin_goacc_parlevel_id";
7650 fallback_retval
= const0_rtx
;
7651 gen_fn
= targetm
.gen_oacc_dim_pos
;
7653 case BUILT_IN_GOACC_PARLEVEL_SIZE
:
7654 name
= "__builtin_goacc_parlevel_size";
7655 fallback_retval
= const1_rtx
;
7656 gen_fn
= targetm
.gen_oacc_dim_size
;
7662 if (oacc_get_fn_attrib (current_function_decl
) == NULL_TREE
)
7664 error ("%qs only supported in OpenACC code", name
);
7668 tree arg
= CALL_EXPR_ARG (exp
, 0);
7669 if (TREE_CODE (arg
) != INTEGER_CST
)
7671 error ("non-constant argument 0 to %qs", name
);
7675 int dim
= TREE_INT_CST_LOW (arg
);
7679 case GOMP_DIM_WORKER
:
7680 case GOMP_DIM_VECTOR
:
7683 error ("illegal argument 0 to %qs", name
);
7690 if (target
== NULL_RTX
)
7691 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
7693 if (!targetm
.have_oacc_dim_size ())
7695 emit_move_insn (target
, fallback_retval
);
7699 rtx reg
= MEM_P (target
) ? gen_reg_rtx (GET_MODE (target
)) : target
;
7700 emit_insn (gen_fn (reg
, GEN_INT (dim
)));
7702 emit_move_insn (target
, reg
);
7707 /* Expand a string compare operation using a sequence of char comparison
7708 to get rid of the calling overhead, with result going to TARGET if
7711 VAR_STR is the variable string source;
7712 CONST_STR is the constant string source;
7713 LENGTH is the number of chars to compare;
7714 CONST_STR_N indicates which source string is the constant string;
7715 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7717 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7719 target = (int) (unsigned char) var_str[0]
7720 - (int) (unsigned char) const_str[0];
7724 target = (int) (unsigned char) var_str[length - 2]
7725 - (int) (unsigned char) const_str[length - 2];
7728 target = (int) (unsigned char) var_str[length - 1]
7729 - (int) (unsigned char) const_str[length - 1];
7734 inline_string_cmp (rtx target
, tree var_str
, const char *const_str
,
7735 unsigned HOST_WIDE_INT length
,
7736 int const_str_n
, machine_mode mode
)
7738 HOST_WIDE_INT offset
= 0;
7740 = get_memory_rtx (var_str
, build_int_cst (unsigned_type_node
,length
));
7741 rtx var_rtx
= NULL_RTX
;
7742 rtx const_rtx
= NULL_RTX
;
7743 rtx result
= target
? target
: gen_reg_rtx (mode
);
7744 rtx_code_label
*ne_label
= gen_label_rtx ();
7745 tree unit_type_node
= unsigned_char_type_node
;
7746 scalar_int_mode unit_mode
7747 = as_a
<scalar_int_mode
> TYPE_MODE (unit_type_node
);
7751 for (unsigned HOST_WIDE_INT i
= 0; i
< length
; i
++)
7754 = adjust_address (var_rtx_array
, TYPE_MODE (unit_type_node
), offset
);
7755 const_rtx
= c_readstr (const_str
+ offset
, unit_mode
);
7756 rtx op0
= (const_str_n
== 1) ? const_rtx
: var_rtx
;
7757 rtx op1
= (const_str_n
== 1) ? var_rtx
: const_rtx
;
7759 op0
= convert_modes (mode
, unit_mode
, op0
, 1);
7760 op1
= convert_modes (mode
, unit_mode
, op1
, 1);
7761 result
= expand_simple_binop (mode
, MINUS
, op0
, op1
,
7762 result
, 1, OPTAB_WIDEN
);
7764 emit_cmp_and_jump_insns (result
, CONST0_RTX (mode
), NE
, NULL_RTX
,
7765 mode
, true, ne_label
);
7766 offset
+= GET_MODE_SIZE (unit_mode
);
7769 emit_label (ne_label
);
7770 rtx_insn
*insns
= get_insns ();
7777 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
7778 to TARGET if that's convenient.
7779 If the call is not been inlined, return NULL_RTX. */
7782 inline_expand_builtin_bytecmp (tree exp
, rtx target
)
7784 tree fndecl
= get_callee_fndecl (exp
);
7785 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7786 bool is_ncmp
= (fcode
== BUILT_IN_STRNCMP
|| fcode
== BUILT_IN_MEMCMP
);
7788 /* Do NOT apply this inlining expansion when optimizing for size or
7789 optimization level below 2. */
7790 if (optimize
< 2 || optimize_insn_for_size_p ())
7793 gcc_checking_assert (fcode
== BUILT_IN_STRCMP
7794 || fcode
== BUILT_IN_STRNCMP
7795 || fcode
== BUILT_IN_MEMCMP
);
7797 /* On a target where the type of the call (int) has same or narrower presicion
7798 than unsigned char, give up the inlining expansion. */
7799 if (TYPE_PRECISION (unsigned_char_type_node
)
7800 >= TYPE_PRECISION (TREE_TYPE (exp
)))
7803 tree arg1
= CALL_EXPR_ARG (exp
, 0);
7804 tree arg2
= CALL_EXPR_ARG (exp
, 1);
7805 tree len3_tree
= is_ncmp
? CALL_EXPR_ARG (exp
, 2) : NULL_TREE
;
7807 unsigned HOST_WIDE_INT len1
= 0;
7808 unsigned HOST_WIDE_INT len2
= 0;
7809 unsigned HOST_WIDE_INT len3
= 0;
7811 /* Get the object representation of the initializers of ARG1 and ARG2
7812 as strings, provided they refer to constant objects, with their byte
7813 sizes in LEN1 and LEN2, respectively. */
7814 const char *bytes1
= c_getstr (arg1
, &len1
);
7815 const char *bytes2
= c_getstr (arg2
, &len2
);
7817 /* Fail if neither argument refers to an initialized constant. */
7818 if (!bytes1
&& !bytes2
)
7823 /* Fail if the memcmp/strncmp bound is not a constant. */
7824 if (!tree_fits_uhwi_p (len3_tree
))
7827 len3
= tree_to_uhwi (len3_tree
);
7829 if (fcode
== BUILT_IN_MEMCMP
)
7831 /* Fail if the memcmp bound is greater than the size of either
7832 of the two constant objects. */
7833 if ((bytes1
&& len1
< len3
)
7834 || (bytes2
&& len2
< len3
))
7839 if (fcode
!= BUILT_IN_MEMCMP
)
7841 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
7842 and LEN2 to the length of the nul-terminated string stored
7845 len1
= strnlen (bytes1
, len1
) + 1;
7847 len2
= strnlen (bytes2
, len2
) + 1;
7850 /* See inline_string_cmp. */
7856 else if (len2
> len1
)
7861 /* For strncmp only, compute the new bound as the smallest of
7862 the lengths of the two strings (plus 1) and the bound provided
7864 unsigned HOST_WIDE_INT bound
= (const_str_n
== 1) ? len1
: len2
;
7865 if (is_ncmp
&& len3
< bound
)
7868 /* If the bound of the comparison is larger than the threshold,
7870 if (bound
> (unsigned HOST_WIDE_INT
) param_builtin_string_cmp_inline_length
)
7873 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
7875 /* Now, start inline expansion the call. */
7876 return inline_string_cmp (target
, (const_str_n
== 1) ? arg2
: arg1
,
7877 (const_str_n
== 1) ? bytes1
: bytes2
, bound
,
7881 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7882 represents the size of the first argument to that call, or VOIDmode
7883 if the argument is a pointer. IGNORE will be true if the result
7886 expand_speculation_safe_value (machine_mode mode
, tree exp
, rtx target
,
7890 unsigned nargs
= call_expr_nargs (exp
);
7892 tree arg0
= CALL_EXPR_ARG (exp
, 0);
7894 if (mode
== VOIDmode
)
7896 mode
= TYPE_MODE (TREE_TYPE (arg0
));
7897 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
7900 val
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
7902 /* An optional second argument can be used as a failsafe value on
7903 some machines. If it isn't present, then the failsafe value is
7907 tree arg1
= CALL_EXPR_ARG (exp
, 1);
7908 failsafe
= expand_expr (arg1
, NULL_RTX
, mode
, EXPAND_NORMAL
);
7911 failsafe
= const0_rtx
;
7913 /* If the result isn't used, the behavior is undefined. It would be
7914 nice to emit a warning here, but path splitting means this might
7915 happen with legitimate code. So simply drop the builtin
7916 expansion in that case; we've handled any side-effects above. */
7920 /* If we don't have a suitable target, create one to hold the result. */
7921 if (target
== NULL
|| GET_MODE (target
) != mode
)
7922 target
= gen_reg_rtx (mode
);
7924 if (GET_MODE (val
) != mode
&& GET_MODE (val
) != VOIDmode
)
7925 val
= convert_modes (mode
, VOIDmode
, val
, false);
7927 return targetm
.speculation_safe_value (mode
, target
, val
, failsafe
);
7930 /* Expand an expression EXP that calls a built-in function,
7931 with result going to TARGET if that's convenient
7932 (and in mode MODE if that's convenient).
7933 SUBTARGET may be used as the target for computing one of EXP's operands.
7934 IGNORE is nonzero if the value is to be ignored. */
7937 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
7940 tree fndecl
= get_callee_fndecl (exp
);
7941 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
7944 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7945 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7947 /* When ASan is enabled, we don't want to expand some memory/string
7948 builtins and rely on libsanitizer's hooks. This allows us to avoid
7949 redundant checks and be sure, that possible overflow will be detected
7952 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7953 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
7954 return expand_call (exp
, target
, ignore
);
7956 /* When not optimizing, generate calls to library functions for a certain
7959 && !called_as_built_in (fndecl
)
7960 && fcode
!= BUILT_IN_FORK
7961 && fcode
!= BUILT_IN_EXECL
7962 && fcode
!= BUILT_IN_EXECV
7963 && fcode
!= BUILT_IN_EXECLP
7964 && fcode
!= BUILT_IN_EXECLE
7965 && fcode
!= BUILT_IN_EXECVP
7966 && fcode
!= BUILT_IN_EXECVE
7967 && !ALLOCA_FUNCTION_CODE_P (fcode
)
7968 && fcode
!= BUILT_IN_FREE
)
7969 return expand_call (exp
, target
, ignore
);
7971 /* The built-in function expanders test for target == const0_rtx
7972 to determine whether the function's result will be ignored. */
7974 target
= const0_rtx
;
7976 /* If the result of a pure or const built-in function is ignored, and
7977 none of its arguments are volatile, we can avoid expanding the
7978 built-in call and just evaluate the arguments for side-effects. */
7979 if (target
== const0_rtx
7980 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
7981 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
7983 bool volatilep
= false;
7985 call_expr_arg_iterator iter
;
7987 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
7988 if (TREE_THIS_VOLATILE (arg
))
7996 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
7997 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
8004 CASE_FLT_FN (BUILT_IN_FABS
):
8005 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
8006 case BUILT_IN_FABSD32
:
8007 case BUILT_IN_FABSD64
:
8008 case BUILT_IN_FABSD128
:
8009 target
= expand_builtin_fabs (exp
, target
, subtarget
);
8014 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
8015 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
8016 target
= expand_builtin_copysign (exp
, target
, subtarget
);
8021 /* Just do a normal library call if we were unable to fold
8023 CASE_FLT_FN (BUILT_IN_CABS
):
8026 CASE_FLT_FN (BUILT_IN_FMA
):
8027 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
8028 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
8033 CASE_FLT_FN (BUILT_IN_ILOGB
):
8034 if (! flag_unsafe_math_optimizations
)
8037 CASE_FLT_FN (BUILT_IN_ISINF
):
8038 CASE_FLT_FN (BUILT_IN_FINITE
):
8039 case BUILT_IN_ISFINITE
:
8040 case BUILT_IN_ISNORMAL
:
8041 target
= expand_builtin_interclass_mathfn (exp
, target
);
8046 CASE_FLT_FN (BUILT_IN_ICEIL
):
8047 CASE_FLT_FN (BUILT_IN_LCEIL
):
8048 CASE_FLT_FN (BUILT_IN_LLCEIL
):
8049 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8050 CASE_FLT_FN (BUILT_IN_IFLOOR
):
8051 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8052 target
= expand_builtin_int_roundingfn (exp
, target
);
8057 CASE_FLT_FN (BUILT_IN_IRINT
):
8058 CASE_FLT_FN (BUILT_IN_LRINT
):
8059 CASE_FLT_FN (BUILT_IN_LLRINT
):
8060 CASE_FLT_FN (BUILT_IN_IROUND
):
8061 CASE_FLT_FN (BUILT_IN_LROUND
):
8062 CASE_FLT_FN (BUILT_IN_LLROUND
):
8063 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
8068 CASE_FLT_FN (BUILT_IN_POWI
):
8069 target
= expand_builtin_powi (exp
, target
);
8074 CASE_FLT_FN (BUILT_IN_CEXPI
):
8075 target
= expand_builtin_cexpi (exp
, target
);
8076 gcc_assert (target
);
8079 CASE_FLT_FN (BUILT_IN_SIN
):
8080 CASE_FLT_FN (BUILT_IN_COS
):
8081 if (! flag_unsafe_math_optimizations
)
8083 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
8088 CASE_FLT_FN (BUILT_IN_SINCOS
):
8089 if (! flag_unsafe_math_optimizations
)
8091 target
= expand_builtin_sincos (exp
);
8096 case BUILT_IN_APPLY_ARGS
:
8097 return expand_builtin_apply_args ();
8099 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8100 FUNCTION with a copy of the parameters described by
8101 ARGUMENTS, and ARGSIZE. It returns a block of memory
8102 allocated on the stack into which is stored all the registers
8103 that might possibly be used for returning the result of a
8104 function. ARGUMENTS is the value returned by
8105 __builtin_apply_args. ARGSIZE is the number of bytes of
8106 arguments that must be copied. ??? How should this value be
8107 computed? We'll also need a safe worst case value for varargs
8109 case BUILT_IN_APPLY
:
8110 if (!validate_arglist (exp
, POINTER_TYPE
,
8111 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
8112 && !validate_arglist (exp
, REFERENCE_TYPE
,
8113 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
8119 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
8120 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
8121 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
8123 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
8126 /* __builtin_return (RESULT) causes the function to return the
8127 value described by RESULT. RESULT is address of the block of
8128 memory returned by __builtin_apply. */
8129 case BUILT_IN_RETURN
:
8130 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
8131 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
8134 case BUILT_IN_SAVEREGS
:
8135 return expand_builtin_saveregs ();
8137 case BUILT_IN_VA_ARG_PACK
:
8138 /* All valid uses of __builtin_va_arg_pack () are removed during
8140 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
8143 case BUILT_IN_VA_ARG_PACK_LEN
:
8144 /* All valid uses of __builtin_va_arg_pack_len () are removed during
8146 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
8149 /* Return the address of the first anonymous stack arg. */
8150 case BUILT_IN_NEXT_ARG
:
8151 if (fold_builtin_next_arg (exp
, false))
8153 return expand_builtin_next_arg ();
8155 case BUILT_IN_CLEAR_CACHE
:
8156 target
= expand_builtin___clear_cache (exp
);
8161 case BUILT_IN_CLASSIFY_TYPE
:
8162 return expand_builtin_classify_type (exp
);
8164 case BUILT_IN_CONSTANT_P
:
8167 case BUILT_IN_FRAME_ADDRESS
:
8168 case BUILT_IN_RETURN_ADDRESS
:
8169 return expand_builtin_frame_address (fndecl
, exp
);
8171 /* Returns the address of the area where the structure is returned.
8173 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
8174 if (call_expr_nargs (exp
) != 0
8175 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
8176 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
8179 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
8181 CASE_BUILT_IN_ALLOCA
:
8182 target
= expand_builtin_alloca (exp
);
8187 case BUILT_IN_ASAN_ALLOCAS_UNPOISON
:
8188 return expand_asan_emit_allocas_unpoison (exp
);
8190 case BUILT_IN_STACK_SAVE
:
8191 return expand_stack_save ();
8193 case BUILT_IN_STACK_RESTORE
:
8194 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
8197 case BUILT_IN_BSWAP16
:
8198 case BUILT_IN_BSWAP32
:
8199 case BUILT_IN_BSWAP64
:
8200 case BUILT_IN_BSWAP128
:
8201 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
8206 CASE_INT_FN (BUILT_IN_FFS
):
8207 target
= expand_builtin_unop (target_mode
, exp
, target
,
8208 subtarget
, ffs_optab
);
8213 CASE_INT_FN (BUILT_IN_CLZ
):
8214 target
= expand_builtin_unop (target_mode
, exp
, target
,
8215 subtarget
, clz_optab
);
8220 CASE_INT_FN (BUILT_IN_CTZ
):
8221 target
= expand_builtin_unop (target_mode
, exp
, target
,
8222 subtarget
, ctz_optab
);
8227 CASE_INT_FN (BUILT_IN_CLRSB
):
8228 target
= expand_builtin_unop (target_mode
, exp
, target
,
8229 subtarget
, clrsb_optab
);
8234 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8235 target
= expand_builtin_unop (target_mode
, exp
, target
,
8236 subtarget
, popcount_optab
);
8241 CASE_INT_FN (BUILT_IN_PARITY
):
8242 target
= expand_builtin_unop (target_mode
, exp
, target
,
8243 subtarget
, parity_optab
);
8248 case BUILT_IN_STRLEN
:
8249 target
= expand_builtin_strlen (exp
, target
, target_mode
);
8254 case BUILT_IN_STRNLEN
:
8255 target
= expand_builtin_strnlen (exp
, target
, target_mode
);
8260 case BUILT_IN_STRCAT
:
8261 target
= expand_builtin_strcat (exp
);
8266 case BUILT_IN_GETTEXT
:
8268 case BUILT_IN_PUTS_UNLOCKED
:
8269 case BUILT_IN_STRDUP
:
8270 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
8271 check_nul_terminated_array (exp
, CALL_EXPR_ARG (exp
, 0));
8274 case BUILT_IN_INDEX
:
8275 case BUILT_IN_RINDEX
:
8276 case BUILT_IN_STRCHR
:
8277 case BUILT_IN_STRRCHR
:
8278 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
8279 check_nul_terminated_array (exp
, CALL_EXPR_ARG (exp
, 0));
8282 case BUILT_IN_FPUTS
:
8283 case BUILT_IN_FPUTS_UNLOCKED
:
8284 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
8285 check_nul_terminated_array (exp
, CALL_EXPR_ARG (exp
, 0));
8288 case BUILT_IN_STRNDUP
:
8289 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
8290 check_nul_terminated_array (exp
,
8291 CALL_EXPR_ARG (exp
, 0),
8292 CALL_EXPR_ARG (exp
, 1));
8295 case BUILT_IN_STRCASECMP
:
8296 case BUILT_IN_STRSTR
:
8297 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
8299 check_nul_terminated_array (exp
, CALL_EXPR_ARG (exp
, 0));
8300 check_nul_terminated_array (exp
, CALL_EXPR_ARG (exp
, 1));
8304 case BUILT_IN_STRCPY
:
8305 target
= expand_builtin_strcpy (exp
, target
);
8310 case BUILT_IN_STRNCAT
:
8311 target
= expand_builtin_strncat (exp
, target
);
8316 case BUILT_IN_STRNCPY
:
8317 target
= expand_builtin_strncpy (exp
, target
);
8322 case BUILT_IN_STPCPY
:
8323 target
= expand_builtin_stpcpy (exp
, target
, mode
);
8328 case BUILT_IN_STPNCPY
:
8329 target
= expand_builtin_stpncpy (exp
, target
);
8334 case BUILT_IN_MEMCHR
:
8335 target
= expand_builtin_memchr (exp
, target
);
8340 case BUILT_IN_MEMCPY
:
8341 target
= expand_builtin_memcpy (exp
, target
);
8346 case BUILT_IN_MEMMOVE
:
8347 target
= expand_builtin_memmove (exp
, target
);
8352 case BUILT_IN_MEMPCPY
:
8353 target
= expand_builtin_mempcpy (exp
, target
);
8358 case BUILT_IN_MEMSET
:
8359 target
= expand_builtin_memset (exp
, target
, mode
);
8364 case BUILT_IN_BZERO
:
8365 target
= expand_builtin_bzero (exp
);
8370 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8371 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
8372 when changing it to a strcmp call. */
8373 case BUILT_IN_STRCMP_EQ
:
8374 target
= expand_builtin_memcmp (exp
, target
, true);
8378 /* Change this call back to a BUILT_IN_STRCMP. */
8379 TREE_OPERAND (exp
, 1)
8380 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP
));
8382 /* Delete the last parameter. */
8384 vec
<tree
, va_gc
> *arg_vec
;
8385 vec_alloc (arg_vec
, 2);
8386 for (i
= 0; i
< 2; i
++)
8387 arg_vec
->quick_push (CALL_EXPR_ARG (exp
, i
));
8388 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), arg_vec
);
8391 case BUILT_IN_STRCMP
:
8392 target
= expand_builtin_strcmp (exp
, target
);
8397 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8398 back to a BUILT_IN_STRNCMP. */
8399 case BUILT_IN_STRNCMP_EQ
:
8400 target
= expand_builtin_memcmp (exp
, target
, true);
8404 /* Change it back to a BUILT_IN_STRNCMP. */
8405 TREE_OPERAND (exp
, 1)
8406 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP
));
8409 case BUILT_IN_STRNCMP
:
8410 target
= expand_builtin_strncmp (exp
, target
, mode
);
8416 case BUILT_IN_MEMCMP
:
8417 case BUILT_IN_MEMCMP_EQ
:
8418 target
= expand_builtin_memcmp (exp
, target
, fcode
== BUILT_IN_MEMCMP_EQ
);
8421 if (fcode
== BUILT_IN_MEMCMP_EQ
)
8423 tree newdecl
= builtin_decl_explicit (BUILT_IN_MEMCMP
);
8424 TREE_OPERAND (exp
, 1) = build_fold_addr_expr (newdecl
);
8428 case BUILT_IN_SETJMP
:
8429 /* This should have been lowered to the builtins below. */
8432 case BUILT_IN_SETJMP_SETUP
:
8433 /* __builtin_setjmp_setup is passed a pointer to an array of five words
8434 and the receiver label. */
8435 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
8437 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
8438 VOIDmode
, EXPAND_NORMAL
);
8439 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
8440 rtx_insn
*label_r
= label_rtx (label
);
8442 /* This is copied from the handling of non-local gotos. */
8443 expand_builtin_setjmp_setup (buf_addr
, label_r
);
8444 nonlocal_goto_handler_labels
8445 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
8446 nonlocal_goto_handler_labels
);
8447 /* ??? Do not let expand_label treat us as such since we would
8448 not want to be both on the list of non-local labels and on
8449 the list of forced labels. */
8450 FORCED_LABEL (label
) = 0;
8455 case BUILT_IN_SETJMP_RECEIVER
:
8456 /* __builtin_setjmp_receiver is passed the receiver label. */
8457 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
8459 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
8460 rtx_insn
*label_r
= label_rtx (label
);
8462 expand_builtin_setjmp_receiver (label_r
);
8467 /* __builtin_longjmp is passed a pointer to an array of five words.
8468 It's similar to the C library longjmp function but works with
8469 __builtin_setjmp above. */
8470 case BUILT_IN_LONGJMP
:
8471 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
8473 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
8474 VOIDmode
, EXPAND_NORMAL
);
8475 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
8477 if (value
!= const1_rtx
)
8479 error ("%<__builtin_longjmp%> second argument must be 1");
8483 expand_builtin_longjmp (buf_addr
, value
);
8488 case BUILT_IN_NONLOCAL_GOTO
:
8489 target
= expand_builtin_nonlocal_goto (exp
);
8494 /* This updates the setjmp buffer that is its argument with the value
8495 of the current stack pointer. */
8496 case BUILT_IN_UPDATE_SETJMP_BUF
:
8497 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
8500 = expand_normal (CALL_EXPR_ARG (exp
, 0));
8502 expand_builtin_update_setjmp_buf (buf_addr
);
8508 expand_builtin_trap ();
8511 case BUILT_IN_UNREACHABLE
:
8512 expand_builtin_unreachable ();
8515 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
8516 case BUILT_IN_SIGNBITD32
:
8517 case BUILT_IN_SIGNBITD64
:
8518 case BUILT_IN_SIGNBITD128
:
8519 target
= expand_builtin_signbit (exp
, target
);
8524 /* Various hooks for the DWARF 2 __throw routine. */
8525 case BUILT_IN_UNWIND_INIT
:
8526 expand_builtin_unwind_init ();
8528 case BUILT_IN_DWARF_CFA
:
8529 return virtual_cfa_rtx
;
8530 #ifdef DWARF2_UNWIND_INFO
8531 case BUILT_IN_DWARF_SP_COLUMN
:
8532 return expand_builtin_dwarf_sp_column ();
8533 case BUILT_IN_INIT_DWARF_REG_SIZES
:
8534 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
8537 case BUILT_IN_FROB_RETURN_ADDR
:
8538 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
8539 case BUILT_IN_EXTRACT_RETURN_ADDR
:
8540 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
8541 case BUILT_IN_EH_RETURN
:
8542 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
8543 CALL_EXPR_ARG (exp
, 1));
8545 case BUILT_IN_EH_RETURN_DATA_REGNO
:
8546 return expand_builtin_eh_return_data_regno (exp
);
8547 case BUILT_IN_EXTEND_POINTER
:
8548 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
8549 case BUILT_IN_EH_POINTER
:
8550 return expand_builtin_eh_pointer (exp
);
8551 case BUILT_IN_EH_FILTER
:
8552 return expand_builtin_eh_filter (exp
);
8553 case BUILT_IN_EH_COPY_VALUES
:
8554 return expand_builtin_eh_copy_values (exp
);
8556 case BUILT_IN_VA_START
:
8557 return expand_builtin_va_start (exp
);
8558 case BUILT_IN_VA_END
:
8559 return expand_builtin_va_end (exp
);
8560 case BUILT_IN_VA_COPY
:
8561 return expand_builtin_va_copy (exp
);
8562 case BUILT_IN_EXPECT
:
8563 return expand_builtin_expect (exp
, target
);
8564 case BUILT_IN_EXPECT_WITH_PROBABILITY
:
8565 return expand_builtin_expect_with_probability (exp
, target
);
8566 case BUILT_IN_ASSUME_ALIGNED
:
8567 return expand_builtin_assume_aligned (exp
, target
);
8568 case BUILT_IN_PREFETCH
:
8569 expand_builtin_prefetch (exp
);
8572 case BUILT_IN_INIT_TRAMPOLINE
:
8573 return expand_builtin_init_trampoline (exp
, true);
8574 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
8575 return expand_builtin_init_trampoline (exp
, false);
8576 case BUILT_IN_ADJUST_TRAMPOLINE
:
8577 return expand_builtin_adjust_trampoline (exp
);
8579 case BUILT_IN_INIT_DESCRIPTOR
:
8580 return expand_builtin_init_descriptor (exp
);
8581 case BUILT_IN_ADJUST_DESCRIPTOR
:
8582 return expand_builtin_adjust_descriptor (exp
);
8585 case BUILT_IN_EXECL
:
8586 case BUILT_IN_EXECV
:
8587 case BUILT_IN_EXECLP
:
8588 case BUILT_IN_EXECLE
:
8589 case BUILT_IN_EXECVP
:
8590 case BUILT_IN_EXECVE
:
8591 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
8596 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
8597 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
8598 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
8599 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
8600 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
8601 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
8602 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
8607 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
8608 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
8609 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
8610 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
8611 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
8612 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
8613 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
8618 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
8619 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
8620 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
8621 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
8622 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
8623 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
8624 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
8629 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
8630 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
8631 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
8632 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
8633 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
8634 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
8635 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
8640 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
8641 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
8642 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
8643 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
8644 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
8645 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
8646 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
8651 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
8652 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
8653 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
8654 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
8655 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
8656 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
8657 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
8662 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
8663 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
8664 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
8665 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
8666 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
8667 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
8668 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
8673 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
8674 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
8675 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
8676 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
8677 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
8678 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
8679 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
8684 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
8685 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
8686 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
8687 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
8688 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
8689 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
8690 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
8695 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
8696 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
8697 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
8698 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
8699 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
8700 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
8701 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
8706 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
8707 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
8708 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
8709 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
8710 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
8711 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
8712 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
8717 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
8718 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
8719 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
8720 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
8721 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
8722 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
8723 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
8728 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
8729 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
8730 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
8731 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
8732 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
8733 if (mode
== VOIDmode
)
8734 mode
= TYPE_MODE (boolean_type_node
);
8735 if (!target
|| !register_operand (target
, mode
))
8736 target
= gen_reg_rtx (mode
);
8738 mode
= get_builtin_sync_mode
8739 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
8740 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
8745 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
8746 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
8747 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
8748 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
8749 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
8750 mode
= get_builtin_sync_mode
8751 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
8752 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
8757 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
8758 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
8759 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
8760 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
8761 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
8762 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
8763 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
8768 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
8769 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
8770 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
8771 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
8772 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
8773 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
8774 expand_builtin_sync_lock_release (mode
, exp
);
8777 case BUILT_IN_SYNC_SYNCHRONIZE
:
8778 expand_builtin_sync_synchronize ();
8781 case BUILT_IN_ATOMIC_EXCHANGE_1
:
8782 case BUILT_IN_ATOMIC_EXCHANGE_2
:
8783 case BUILT_IN_ATOMIC_EXCHANGE_4
:
8784 case BUILT_IN_ATOMIC_EXCHANGE_8
:
8785 case BUILT_IN_ATOMIC_EXCHANGE_16
:
8786 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
8787 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
8792 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
8793 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
8794 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
8795 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
8796 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
8798 unsigned int nargs
, z
;
8799 vec
<tree
, va_gc
> *vec
;
8802 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
8803 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
8807 /* If this is turned into an external library call, the weak parameter
8808 must be dropped to match the expected parameter list. */
8809 nargs
= call_expr_nargs (exp
);
8810 vec_alloc (vec
, nargs
- 1);
8811 for (z
= 0; z
< 3; z
++)
8812 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
8813 /* Skip the boolean weak parameter. */
8814 for (z
= 4; z
< 6; z
++)
8815 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
8816 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
8820 case BUILT_IN_ATOMIC_LOAD_1
:
8821 case BUILT_IN_ATOMIC_LOAD_2
:
8822 case BUILT_IN_ATOMIC_LOAD_4
:
8823 case BUILT_IN_ATOMIC_LOAD_8
:
8824 case BUILT_IN_ATOMIC_LOAD_16
:
8825 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
8826 target
= expand_builtin_atomic_load (mode
, exp
, target
);
8831 case BUILT_IN_ATOMIC_STORE_1
:
8832 case BUILT_IN_ATOMIC_STORE_2
:
8833 case BUILT_IN_ATOMIC_STORE_4
:
8834 case BUILT_IN_ATOMIC_STORE_8
:
8835 case BUILT_IN_ATOMIC_STORE_16
:
8836 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
8837 target
= expand_builtin_atomic_store (mode
, exp
);
8842 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
8843 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
8844 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
8845 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
8846 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
8848 enum built_in_function lib
;
8849 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
8850 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
8851 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
8852 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
8858 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
8859 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
8860 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
8861 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
8862 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
8864 enum built_in_function lib
;
8865 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
8866 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
8867 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
8868 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
8874 case BUILT_IN_ATOMIC_AND_FETCH_1
:
8875 case BUILT_IN_ATOMIC_AND_FETCH_2
:
8876 case BUILT_IN_ATOMIC_AND_FETCH_4
:
8877 case BUILT_IN_ATOMIC_AND_FETCH_8
:
8878 case BUILT_IN_ATOMIC_AND_FETCH_16
:
8880 enum built_in_function lib
;
8881 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
8882 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
8883 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
8884 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
8890 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
8891 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
8892 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
8893 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
8894 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
8896 enum built_in_function lib
;
8897 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
8898 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
8899 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
8900 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
8906 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
8907 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
8908 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
8909 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
8910 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
8912 enum built_in_function lib
;
8913 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
8914 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
8915 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
8916 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
8922 case BUILT_IN_ATOMIC_OR_FETCH_1
:
8923 case BUILT_IN_ATOMIC_OR_FETCH_2
:
8924 case BUILT_IN_ATOMIC_OR_FETCH_4
:
8925 case BUILT_IN_ATOMIC_OR_FETCH_8
:
8926 case BUILT_IN_ATOMIC_OR_FETCH_16
:
8928 enum built_in_function lib
;
8929 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
8930 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
8931 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
8932 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
8938 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
8939 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
8940 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
8941 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
8942 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
8943 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
8944 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
8945 ignore
, BUILT_IN_NONE
);
8950 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
8951 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
8952 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
8953 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
8954 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
8955 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
8956 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
8957 ignore
, BUILT_IN_NONE
);
8962 case BUILT_IN_ATOMIC_FETCH_AND_1
:
8963 case BUILT_IN_ATOMIC_FETCH_AND_2
:
8964 case BUILT_IN_ATOMIC_FETCH_AND_4
:
8965 case BUILT_IN_ATOMIC_FETCH_AND_8
:
8966 case BUILT_IN_ATOMIC_FETCH_AND_16
:
8967 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
8968 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
8969 ignore
, BUILT_IN_NONE
);
8974 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
8975 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
8976 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
8977 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
8978 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
8979 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
8980 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
8981 ignore
, BUILT_IN_NONE
);
8986 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
8987 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
8988 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
8989 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
8990 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
8991 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
8992 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
8993 ignore
, BUILT_IN_NONE
);
8998 case BUILT_IN_ATOMIC_FETCH_OR_1
:
8999 case BUILT_IN_ATOMIC_FETCH_OR_2
:
9000 case BUILT_IN_ATOMIC_FETCH_OR_4
:
9001 case BUILT_IN_ATOMIC_FETCH_OR_8
:
9002 case BUILT_IN_ATOMIC_FETCH_OR_16
:
9003 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
9004 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
9005 ignore
, BUILT_IN_NONE
);
9010 case BUILT_IN_ATOMIC_TEST_AND_SET
:
9011 return expand_builtin_atomic_test_and_set (exp
, target
);
9013 case BUILT_IN_ATOMIC_CLEAR
:
9014 return expand_builtin_atomic_clear (exp
);
9016 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
9017 return expand_builtin_atomic_always_lock_free (exp
);
9019 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
9020 target
= expand_builtin_atomic_is_lock_free (exp
);
9025 case BUILT_IN_ATOMIC_THREAD_FENCE
:
9026 expand_builtin_atomic_thread_fence (exp
);
9029 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
9030 expand_builtin_atomic_signal_fence (exp
);
9033 case BUILT_IN_OBJECT_SIZE
:
9034 return expand_builtin_object_size (exp
);
9036 case BUILT_IN_MEMCPY_CHK
:
9037 case BUILT_IN_MEMPCPY_CHK
:
9038 case BUILT_IN_MEMMOVE_CHK
:
9039 case BUILT_IN_MEMSET_CHK
:
9040 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
9045 case BUILT_IN_STRCPY_CHK
:
9046 case BUILT_IN_STPCPY_CHK
:
9047 case BUILT_IN_STRNCPY_CHK
:
9048 case BUILT_IN_STPNCPY_CHK
:
9049 case BUILT_IN_STRCAT_CHK
:
9050 case BUILT_IN_STRNCAT_CHK
:
9051 case BUILT_IN_SNPRINTF_CHK
:
9052 case BUILT_IN_VSNPRINTF_CHK
:
9053 maybe_emit_chk_warning (exp
, fcode
);
9056 case BUILT_IN_SPRINTF_CHK
:
9057 case BUILT_IN_VSPRINTF_CHK
:
9058 maybe_emit_sprintf_chk_warning (exp
, fcode
);
9062 if (warn_free_nonheap_object
)
9063 maybe_emit_free_warning (exp
);
9066 case BUILT_IN_THREAD_POINTER
:
9067 return expand_builtin_thread_pointer (exp
, target
);
9069 case BUILT_IN_SET_THREAD_POINTER
:
9070 expand_builtin_set_thread_pointer (exp
);
9073 case BUILT_IN_ACC_ON_DEVICE
:
9074 /* Do library call, if we failed to expand the builtin when
9078 case BUILT_IN_GOACC_PARLEVEL_ID
:
9079 case BUILT_IN_GOACC_PARLEVEL_SIZE
:
9080 return expand_builtin_goacc_parlevel_id_size (exp
, target
, ignore
);
9082 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR
:
9083 return expand_speculation_safe_value (VOIDmode
, exp
, target
, ignore
);
9085 case BUILT_IN_SPECULATION_SAFE_VALUE_1
:
9086 case BUILT_IN_SPECULATION_SAFE_VALUE_2
:
9087 case BUILT_IN_SPECULATION_SAFE_VALUE_4
:
9088 case BUILT_IN_SPECULATION_SAFE_VALUE_8
:
9089 case BUILT_IN_SPECULATION_SAFE_VALUE_16
:
9090 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SPECULATION_SAFE_VALUE_1
);
9091 return expand_speculation_safe_value (mode
, exp
, target
, ignore
);
9093 default: /* just do library call, if unknown builtin */
9097 /* The switch statement above can drop through to cause the function
9098 to be called normally. */
9099 return expand_call (exp
, target
, ignore
);
9102 /* Determine whether a tree node represents a call to a built-in
9103 function. If the tree T is a call to a built-in function with
9104 the right number of arguments of the appropriate types, return
9105 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
9106 Otherwise the return value is END_BUILTINS. */
9108 enum built_in_function
9109 builtin_mathfn_code (const_tree t
)
9111 const_tree fndecl
, arg
, parmlist
;
9112 const_tree argtype
, parmtype
;
9113 const_call_expr_arg_iterator iter
;
9115 if (TREE_CODE (t
) != CALL_EXPR
)
9116 return END_BUILTINS
;
9118 fndecl
= get_callee_fndecl (t
);
9119 if (fndecl
== NULL_TREE
|| !fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
9120 return END_BUILTINS
;
9122 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
9123 init_const_call_expr_arg_iterator (t
, &iter
);
9124 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
9126 /* If a function doesn't take a variable number of arguments,
9127 the last element in the list will have type `void'. */
9128 parmtype
= TREE_VALUE (parmlist
);
9129 if (VOID_TYPE_P (parmtype
))
9131 if (more_const_call_expr_args_p (&iter
))
9132 return END_BUILTINS
;
9133 return DECL_FUNCTION_CODE (fndecl
);
9136 if (! more_const_call_expr_args_p (&iter
))
9137 return END_BUILTINS
;
9139 arg
= next_const_call_expr_arg (&iter
);
9140 argtype
= TREE_TYPE (arg
);
9142 if (SCALAR_FLOAT_TYPE_P (parmtype
))
9144 if (! SCALAR_FLOAT_TYPE_P (argtype
))
9145 return END_BUILTINS
;
9147 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
9149 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
9150 return END_BUILTINS
;
9152 else if (POINTER_TYPE_P (parmtype
))
9154 if (! POINTER_TYPE_P (argtype
))
9155 return END_BUILTINS
;
9157 else if (INTEGRAL_TYPE_P (parmtype
))
9159 if (! INTEGRAL_TYPE_P (argtype
))
9160 return END_BUILTINS
;
9163 return END_BUILTINS
;
9166 /* Variable-length argument list. */
9167 return DECL_FUNCTION_CODE (fndecl
);
9170 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
9171 evaluate to a constant. */
9174 fold_builtin_constant_p (tree arg
)
9176 /* We return 1 for a numeric type that's known to be a constant
9177 value at compile-time or for an aggregate type that's a
9178 literal constant. */
9181 /* If we know this is a constant, emit the constant of one. */
9182 if (CONSTANT_CLASS_P (arg
)
9183 || (TREE_CODE (arg
) == CONSTRUCTOR
9184 && TREE_CONSTANT (arg
)))
9185 return integer_one_node
;
9186 if (TREE_CODE (arg
) == ADDR_EXPR
)
9188 tree op
= TREE_OPERAND (arg
, 0);
9189 if (TREE_CODE (op
) == STRING_CST
9190 || (TREE_CODE (op
) == ARRAY_REF
9191 && integer_zerop (TREE_OPERAND (op
, 1))
9192 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
9193 return integer_one_node
;
9196 /* If this expression has side effects, show we don't know it to be a
9197 constant. Likewise if it's a pointer or aggregate type since in
9198 those case we only want literals, since those are only optimized
9199 when generating RTL, not later.
9200 And finally, if we are compiling an initializer, not code, we
9201 need to return a definite result now; there's not going to be any
9202 more optimization done. */
9203 if (TREE_SIDE_EFFECTS (arg
)
9204 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
9205 || POINTER_TYPE_P (TREE_TYPE (arg
))
9207 || folding_initializer
9208 || force_folding_builtin_constant_p
)
9209 return integer_zero_node
;
9214 /* Create builtin_expect or builtin_expect_with_probability
9215 with PRED and EXPECTED as its arguments and return it as a truthvalue.
9216 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
9217 builtin_expect_with_probability instead uses third argument as PROBABILITY
9221 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
9222 tree predictor
, tree probability
)
9224 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
9226 fn
= builtin_decl_explicit (probability
== NULL_TREE
? BUILT_IN_EXPECT
9227 : BUILT_IN_EXPECT_WITH_PROBABILITY
);
9228 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
9229 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
9230 pred_type
= TREE_VALUE (arg_types
);
9231 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
9233 pred
= fold_convert_loc (loc
, pred_type
, pred
);
9234 expected
= fold_convert_loc (loc
, expected_type
, expected
);
9237 call_expr
= build_call_expr_loc (loc
, fn
, 3, pred
, expected
, probability
);
9239 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
9242 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
9243 build_int_cst (ret_type
, 0));
9246 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
9247 NULL_TREE if no simplification is possible. */
9250 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
,
9253 tree inner
, fndecl
, inner_arg0
;
9254 enum tree_code code
;
9256 /* Distribute the expected value over short-circuiting operators.
9257 See through the cast from truthvalue_type_node to long. */
9259 while (CONVERT_EXPR_P (inner_arg0
)
9260 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
9261 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
9262 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
9264 /* If this is a builtin_expect within a builtin_expect keep the
9265 inner one. See through a comparison against a constant. It
9266 might have been added to create a thruthvalue. */
9269 if (COMPARISON_CLASS_P (inner
)
9270 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
9271 inner
= TREE_OPERAND (inner
, 0);
9273 if (TREE_CODE (inner
) == CALL_EXPR
9274 && (fndecl
= get_callee_fndecl (inner
))
9275 && (fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
9276 || fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT_WITH_PROBABILITY
)))
9280 code
= TREE_CODE (inner
);
9281 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
9283 tree op0
= TREE_OPERAND (inner
, 0);
9284 tree op1
= TREE_OPERAND (inner
, 1);
9285 arg1
= save_expr (arg1
);
9287 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
, arg3
);
9288 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
, arg3
);
9289 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
9291 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
9294 /* If the argument isn't invariant then there's nothing else we can do. */
9295 if (!TREE_CONSTANT (inner_arg0
))
9298 /* If we expect that a comparison against the argument will fold to
9299 a constant return the constant. In practice, this means a true
9300 constant or the address of a non-weak symbol. */
9303 if (TREE_CODE (inner
) == ADDR_EXPR
)
9307 inner
= TREE_OPERAND (inner
, 0);
9309 while (TREE_CODE (inner
) == COMPONENT_REF
9310 || TREE_CODE (inner
) == ARRAY_REF
);
9311 if (VAR_OR_FUNCTION_DECL_P (inner
) && DECL_WEAK (inner
))
9315 /* Otherwise, ARG0 already has the proper type for the return value. */
9319 /* Fold a call to __builtin_classify_type with argument ARG. */
9322 fold_builtin_classify_type (tree arg
)
9325 return build_int_cst (integer_type_node
, no_type_class
);
9327 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
9330 /* Fold a call to __builtin_strlen with argument ARG. */
9333 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
9335 if (!validate_arg (arg
, POINTER_TYPE
))
9339 c_strlen_data lendata
= { };
9340 tree len
= c_strlen (arg
, 0, &lendata
);
9343 return fold_convert_loc (loc
, type
, len
);
9346 c_strlen (arg
, 1, &lendata
);
9350 if (EXPR_HAS_LOCATION (arg
))
9351 loc
= EXPR_LOCATION (arg
);
9352 else if (loc
== UNKNOWN_LOCATION
)
9353 loc
= input_location
;
9354 warn_string_no_nul (loc
, "strlen", arg
, lendata
.decl
);
9361 /* Fold a call to __builtin_inf or __builtin_huge_val. */
9364 fold_builtin_inf (location_t loc
, tree type
, int warn
)
9366 REAL_VALUE_TYPE real
;
9368 /* __builtin_inff is intended to be usable to define INFINITY on all
9369 targets. If an infinity is not available, INFINITY expands "to a
9370 positive constant of type float that overflows at translation
9371 time", footnote "In this case, using INFINITY will violate the
9372 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
9373 Thus we pedwarn to ensure this constraint violation is
9375 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
9376 pedwarn (loc
, 0, "target format does not support infinity");
9379 return build_real (type
, real
);
9382 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
9383 NULL_TREE if no simplification can be made. */
9386 fold_builtin_sincos (location_t loc
,
9387 tree arg0
, tree arg1
, tree arg2
)
9390 tree fndecl
, call
= NULL_TREE
;
9392 if (!validate_arg (arg0
, REAL_TYPE
)
9393 || !validate_arg (arg1
, POINTER_TYPE
)
9394 || !validate_arg (arg2
, POINTER_TYPE
))
9397 type
= TREE_TYPE (arg0
);
9399 /* Calculate the result when the argument is a constant. */
9400 built_in_function fn
= mathfn_built_in_2 (type
, CFN_BUILT_IN_CEXPI
);
9401 if (fn
== END_BUILTINS
)
9404 /* Canonicalize sincos to cexpi. */
9405 if (TREE_CODE (arg0
) == REAL_CST
)
9407 tree complex_type
= build_complex_type (type
);
9408 call
= fold_const_call (as_combined_fn (fn
), complex_type
, arg0
);
9412 if (!targetm
.libc_has_function (function_c99_math_complex
)
9413 || !builtin_decl_implicit_p (fn
))
9415 fndecl
= builtin_decl_explicit (fn
);
9416 call
= build_call_expr_loc (loc
, fndecl
, 1, arg0
);
9417 call
= builtin_save_expr (call
);
9420 tree ptype
= build_pointer_type (type
);
9421 arg1
= fold_convert (ptype
, arg1
);
9422 arg2
= fold_convert (ptype
, arg2
);
9423 return build2 (COMPOUND_EXPR
, void_type_node
,
9424 build2 (MODIFY_EXPR
, void_type_node
,
9425 build_fold_indirect_ref_loc (loc
, arg1
),
9426 fold_build1_loc (loc
, IMAGPART_EXPR
, type
, call
)),
9427 build2 (MODIFY_EXPR
, void_type_node
,
9428 build_fold_indirect_ref_loc (loc
, arg2
),
9429 fold_build1_loc (loc
, REALPART_EXPR
, type
, call
)));
9432 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9433 Return NULL_TREE if no simplification can be made. */
9436 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
9438 if (!validate_arg (arg1
, POINTER_TYPE
)
9439 || !validate_arg (arg2
, POINTER_TYPE
)
9440 || !validate_arg (len
, INTEGER_TYPE
))
9443 /* If the LEN parameter is zero, return zero. */
9444 if (integer_zerop (len
))
9445 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
9448 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9449 if (operand_equal_p (arg1
, arg2
, 0))
9450 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
9452 /* If len parameter is one, return an expression corresponding to
9453 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9454 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
9456 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9457 tree cst_uchar_ptr_node
9458 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9461 = fold_convert_loc (loc
, integer_type_node
,
9462 build1 (INDIRECT_REF
, cst_uchar_node
,
9463 fold_convert_loc (loc
,
9467 = fold_convert_loc (loc
, integer_type_node
,
9468 build1 (INDIRECT_REF
, cst_uchar_node
,
9469 fold_convert_loc (loc
,
9472 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9478 /* Fold a call to builtin isascii with argument ARG. */
9481 fold_builtin_isascii (location_t loc
, tree arg
)
9483 if (!validate_arg (arg
, INTEGER_TYPE
))
9487 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9488 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9489 build_int_cst (integer_type_node
,
9490 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9491 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
9492 arg
, integer_zero_node
);
9496 /* Fold a call to builtin toascii with argument ARG. */
9499 fold_builtin_toascii (location_t loc
, tree arg
)
9501 if (!validate_arg (arg
, INTEGER_TYPE
))
9504 /* Transform toascii(c) -> (c & 0x7f). */
9505 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
9506 build_int_cst (integer_type_node
, 0x7f));
9509 /* Fold a call to builtin isdigit with argument ARG. */
9512 fold_builtin_isdigit (location_t loc
, tree arg
)
9514 if (!validate_arg (arg
, INTEGER_TYPE
))
9518 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9519 /* According to the C standard, isdigit is unaffected by locale.
9520 However, it definitely is affected by the target character set. */
9521 unsigned HOST_WIDE_INT target_digit0
9522 = lang_hooks
.to_target_charset ('0');
9524 if (target_digit0
== 0)
9527 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
9528 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9529 build_int_cst (unsigned_type_node
, target_digit0
));
9530 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
9531 build_int_cst (unsigned_type_node
, 9));
9535 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9538 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
9540 if (!validate_arg (arg
, REAL_TYPE
))
9543 arg
= fold_convert_loc (loc
, type
, arg
);
9544 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9547 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9550 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
9552 if (!validate_arg (arg
, INTEGER_TYPE
))
9555 arg
= fold_convert_loc (loc
, type
, arg
);
9556 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9559 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9562 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9564 if (validate_arg (arg
, COMPLEX_TYPE
)
9565 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9567 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9571 tree new_arg
= builtin_save_expr (arg
);
9572 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9573 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9574 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9581 /* Fold a call to builtin frexp, we can assume the base is 2. */
9584 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9586 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9591 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9594 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9596 /* Proceed if a valid pointer type was passed in. */
9597 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9599 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9605 /* For +-0, return (*exp = 0, +-0). */
9606 exp
= integer_zero_node
;
9611 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9612 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9615 /* Since the frexp function always expects base 2, and in
9616 GCC normalized significands are already in the range
9617 [0.5, 1.0), we have exactly what frexp wants. */
9618 REAL_VALUE_TYPE frac_rvt
= *value
;
9619 SET_REAL_EXP (&frac_rvt
, 0);
9620 frac
= build_real (rettype
, frac_rvt
);
9621 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9628 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9629 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9630 TREE_SIDE_EFFECTS (arg1
) = 1;
9631 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9637 /* Fold a call to builtin modf. */
9640 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9642 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9647 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9650 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9652 /* Proceed if a valid pointer type was passed in. */
9653 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9655 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9656 REAL_VALUE_TYPE trunc
, frac
;
9662 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9663 trunc
= frac
= *value
;
9666 /* For +-Inf, return (*arg1 = arg0, +-0). */
9668 frac
.sign
= value
->sign
;
9672 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9673 real_trunc (&trunc
, VOIDmode
, value
);
9674 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9675 /* If the original number was negative and already
9676 integral, then the fractional part is -0.0. */
9677 if (value
->sign
&& frac
.cl
== rvc_zero
)
9678 frac
.sign
= value
->sign
;
9682 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9683 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9684 build_real (rettype
, trunc
));
9685 TREE_SIDE_EFFECTS (arg1
) = 1;
9686 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9687 build_real (rettype
, frac
));
9693 /* Given a location LOC, an interclass builtin function decl FNDECL
9694 and its single argument ARG, return an folded expression computing
9695 the same, or NULL_TREE if we either couldn't or didn't want to fold
9696 (the latter happen if there's an RTL instruction available). */
9699 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9703 if (!validate_arg (arg
, REAL_TYPE
))
9706 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9709 mode
= TYPE_MODE (TREE_TYPE (arg
));
9711 bool is_ibm_extended
= MODE_COMPOSITE_P (mode
);
9713 /* If there is no optab, try generic code. */
9714 switch (DECL_FUNCTION_CODE (fndecl
))
9718 CASE_FLT_FN (BUILT_IN_ISINF
):
9720 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9721 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9722 tree type
= TREE_TYPE (arg
);
9726 if (is_ibm_extended
)
9728 /* NaN and Inf are encoded in the high-order double value
9729 only. The low-order value is not significant. */
9730 type
= double_type_node
;
9732 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
9734 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
), false);
9735 real_from_string (&r
, buf
);
9736 result
= build_call_expr (isgr_fn
, 2,
9737 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9738 build_real (type
, r
));
9741 CASE_FLT_FN (BUILT_IN_FINITE
):
9742 case BUILT_IN_ISFINITE
:
9744 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9745 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9746 tree type
= TREE_TYPE (arg
);
9750 if (is_ibm_extended
)
9752 /* NaN and Inf are encoded in the high-order double value
9753 only. The low-order value is not significant. */
9754 type
= double_type_node
;
9756 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
9758 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
), false);
9759 real_from_string (&r
, buf
);
9760 result
= build_call_expr (isle_fn
, 2,
9761 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9762 build_real (type
, r
));
9763 /*result = fold_build2_loc (loc, UNGT_EXPR,
9764 TREE_TYPE (TREE_TYPE (fndecl)),
9765 fold_build1_loc (loc, ABS_EXPR, type, arg),
9766 build_real (type, r));
9767 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9768 TREE_TYPE (TREE_TYPE (fndecl)),
9772 case BUILT_IN_ISNORMAL
:
9774 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9775 islessequal(fabs(x),DBL_MAX). */
9776 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9777 tree type
= TREE_TYPE (arg
);
9778 tree orig_arg
, max_exp
, min_exp
;
9779 machine_mode orig_mode
= mode
;
9780 REAL_VALUE_TYPE rmax
, rmin
;
9783 orig_arg
= arg
= builtin_save_expr (arg
);
9784 if (is_ibm_extended
)
9786 /* Use double to test the normal range of IBM extended
9787 precision. Emin for IBM extended precision is
9788 different to emin for IEEE double, being 53 higher
9789 since the low double exponent is at least 53 lower
9790 than the high double exponent. */
9791 type
= double_type_node
;
9793 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
9795 arg
= fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9797 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
), false);
9798 real_from_string (&rmax
, buf
);
9799 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (orig_mode
)->emin
- 1);
9800 real_from_string (&rmin
, buf
);
9801 max_exp
= build_real (type
, rmax
);
9802 min_exp
= build_real (type
, rmin
);
9804 max_exp
= build_call_expr (isle_fn
, 2, arg
, max_exp
);
9805 if (is_ibm_extended
)
9807 /* Testing the high end of the range is done just using
9808 the high double, using the same test as isfinite().
9809 For the subnormal end of the range we first test the
9810 high double, then if its magnitude is equal to the
9811 limit of 0x1p-969, we test whether the low double is
9812 non-zero and opposite sign to the high double. */
9813 tree
const islt_fn
= builtin_decl_explicit (BUILT_IN_ISLESS
);
9814 tree
const isgt_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9815 tree gt_min
= build_call_expr (isgt_fn
, 2, arg
, min_exp
);
9816 tree eq_min
= fold_build2 (EQ_EXPR
, integer_type_node
,
9818 tree as_complex
= build1 (VIEW_CONVERT_EXPR
,
9819 complex_double_type_node
, orig_arg
);
9820 tree hi_dbl
= build1 (REALPART_EXPR
, type
, as_complex
);
9821 tree lo_dbl
= build1 (IMAGPART_EXPR
, type
, as_complex
);
9822 tree zero
= build_real (type
, dconst0
);
9823 tree hilt
= build_call_expr (islt_fn
, 2, hi_dbl
, zero
);
9824 tree lolt
= build_call_expr (islt_fn
, 2, lo_dbl
, zero
);
9825 tree logt
= build_call_expr (isgt_fn
, 2, lo_dbl
, zero
);
9826 tree ok_lo
= fold_build1 (TRUTH_NOT_EXPR
, integer_type_node
,
9827 fold_build3 (COND_EXPR
,
9830 eq_min
= fold_build2 (TRUTH_ANDIF_EXPR
, integer_type_node
,
9832 min_exp
= fold_build2 (TRUTH_ORIF_EXPR
, integer_type_node
,
9838 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9839 min_exp
= build_call_expr (isge_fn
, 2, arg
, min_exp
);
9841 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
,
9852 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9853 ARG is the argument for the call. */
9856 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9858 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9860 if (!validate_arg (arg
, REAL_TYPE
))
9863 switch (builtin_index
)
9865 case BUILT_IN_ISINF
:
9866 if (!HONOR_INFINITIES (arg
))
9867 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9871 case BUILT_IN_ISINF_SIGN
:
9873 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9874 /* In a boolean context, GCC will fold the inner COND_EXPR to
9875 1. So e.g. "if (isinf_sign(x))" would be folded to just
9876 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9877 tree signbit_fn
= builtin_decl_explicit (BUILT_IN_SIGNBIT
);
9878 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9879 tree tmp
= NULL_TREE
;
9881 arg
= builtin_save_expr (arg
);
9883 if (signbit_fn
&& isinf_fn
)
9885 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9886 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9888 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9889 signbit_call
, integer_zero_node
);
9890 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9891 isinf_call
, integer_zero_node
);
9893 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9894 integer_minus_one_node
, integer_one_node
);
9895 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9903 case BUILT_IN_ISFINITE
:
9904 if (!HONOR_NANS (arg
)
9905 && !HONOR_INFINITIES (arg
))
9906 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9910 case BUILT_IN_ISNAN
:
9911 if (!HONOR_NANS (arg
))
9912 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9915 bool is_ibm_extended
= MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg
)));
9916 if (is_ibm_extended
)
9918 /* NaN and Inf are encoded in the high-order double value
9919 only. The low-order value is not significant. */
9920 arg
= fold_build1_loc (loc
, NOP_EXPR
, double_type_node
, arg
);
9923 arg
= builtin_save_expr (arg
);
9924 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9931 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9932 This builtin will generate code to return the appropriate floating
9933 point classification depending on the value of the floating point
9934 number passed in. The possible return values must be supplied as
9935 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9936 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9937 one floating point argument which is "type generic". */
9940 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
9942 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9943 arg
, type
, res
, tmp
;
9948 /* Verify the required arguments in the original call. */
9950 || !validate_arg (args
[0], INTEGER_TYPE
)
9951 || !validate_arg (args
[1], INTEGER_TYPE
)
9952 || !validate_arg (args
[2], INTEGER_TYPE
)
9953 || !validate_arg (args
[3], INTEGER_TYPE
)
9954 || !validate_arg (args
[4], INTEGER_TYPE
)
9955 || !validate_arg (args
[5], REAL_TYPE
))
9959 fp_infinite
= args
[1];
9960 fp_normal
= args
[2];
9961 fp_subnormal
= args
[3];
9964 type
= TREE_TYPE (arg
);
9965 mode
= TYPE_MODE (type
);
9966 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9970 (fabs(x) == Inf ? FP_INFINITE :
9971 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9972 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9974 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9975 build_real (type
, dconst0
));
9976 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9977 tmp
, fp_zero
, fp_subnormal
);
9979 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9980 real_from_string (&r
, buf
);
9981 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9982 arg
, build_real (type
, r
));
9983 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9985 if (HONOR_INFINITIES (mode
))
9988 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9989 build_real (type
, r
));
9990 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9994 if (HONOR_NANS (mode
))
9996 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9997 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
10003 /* Fold a call to an unordered comparison function such as
10004 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10005 being called and ARG0 and ARG1 are the arguments for the call.
10006 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10007 the opposite of the desired result. UNORDERED_CODE is used
10008 for modes that can hold NaNs and ORDERED_CODE is used for
10012 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
10013 enum tree_code unordered_code
,
10014 enum tree_code ordered_code
)
10016 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10017 enum tree_code code
;
10019 enum tree_code code0
, code1
;
10020 tree cmp_type
= NULL_TREE
;
10022 type0
= TREE_TYPE (arg0
);
10023 type1
= TREE_TYPE (arg1
);
10025 code0
= TREE_CODE (type0
);
10026 code1
= TREE_CODE (type1
);
10028 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
10029 /* Choose the wider of two real types. */
10030 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
10032 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
10034 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
10037 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
10038 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
10040 if (unordered_code
== UNORDERED_EXPR
)
10042 if (!HONOR_NANS (arg0
))
10043 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
10044 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
10047 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
10048 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
10049 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
10052 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
10053 arithmetics if it can never overflow, or into internal functions that
10054 return both result of arithmetics and overflowed boolean flag in
10055 a complex integer result, or some other check for overflow.
10056 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
10057 checking part of that. */
10060 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
10061 tree arg0
, tree arg1
, tree arg2
)
10063 enum internal_fn ifn
= IFN_LAST
;
10064 /* The code of the expression corresponding to the built-in. */
10065 enum tree_code opcode
= ERROR_MARK
;
10066 bool ovf_only
= false;
10070 case BUILT_IN_ADD_OVERFLOW_P
:
10073 case BUILT_IN_ADD_OVERFLOW
:
10074 case BUILT_IN_SADD_OVERFLOW
:
10075 case BUILT_IN_SADDL_OVERFLOW
:
10076 case BUILT_IN_SADDLL_OVERFLOW
:
10077 case BUILT_IN_UADD_OVERFLOW
:
10078 case BUILT_IN_UADDL_OVERFLOW
:
10079 case BUILT_IN_UADDLL_OVERFLOW
:
10080 opcode
= PLUS_EXPR
;
10081 ifn
= IFN_ADD_OVERFLOW
;
10083 case BUILT_IN_SUB_OVERFLOW_P
:
10086 case BUILT_IN_SUB_OVERFLOW
:
10087 case BUILT_IN_SSUB_OVERFLOW
:
10088 case BUILT_IN_SSUBL_OVERFLOW
:
10089 case BUILT_IN_SSUBLL_OVERFLOW
:
10090 case BUILT_IN_USUB_OVERFLOW
:
10091 case BUILT_IN_USUBL_OVERFLOW
:
10092 case BUILT_IN_USUBLL_OVERFLOW
:
10093 opcode
= MINUS_EXPR
;
10094 ifn
= IFN_SUB_OVERFLOW
;
10096 case BUILT_IN_MUL_OVERFLOW_P
:
10099 case BUILT_IN_MUL_OVERFLOW
:
10100 case BUILT_IN_SMUL_OVERFLOW
:
10101 case BUILT_IN_SMULL_OVERFLOW
:
10102 case BUILT_IN_SMULLL_OVERFLOW
:
10103 case BUILT_IN_UMUL_OVERFLOW
:
10104 case BUILT_IN_UMULL_OVERFLOW
:
10105 case BUILT_IN_UMULLL_OVERFLOW
:
10106 opcode
= MULT_EXPR
;
10107 ifn
= IFN_MUL_OVERFLOW
;
10110 gcc_unreachable ();
10113 /* For the "generic" overloads, the first two arguments can have different
10114 types and the last argument determines the target type to use to check
10115 for overflow. The arguments of the other overloads all have the same
10117 tree type
= ovf_only
? TREE_TYPE (arg2
) : TREE_TYPE (TREE_TYPE (arg2
));
10119 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
10120 arguments are constant, attempt to fold the built-in call into a constant
10121 expression indicating whether or not it detected an overflow. */
10123 && TREE_CODE (arg0
) == INTEGER_CST
10124 && TREE_CODE (arg1
) == INTEGER_CST
)
10125 /* Perform the computation in the target type and check for overflow. */
10126 return omit_one_operand_loc (loc
, boolean_type_node
,
10127 arith_overflowed_p (opcode
, type
, arg0
, arg1
)
10128 ? boolean_true_node
: boolean_false_node
,
10131 tree intres
, ovfres
;
10132 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10134 intres
= fold_binary_loc (loc
, opcode
, type
,
10135 fold_convert_loc (loc
, type
, arg0
),
10136 fold_convert_loc (loc
, type
, arg1
));
10137 if (TREE_OVERFLOW (intres
))
10138 intres
= drop_tree_overflow (intres
);
10139 ovfres
= (arith_overflowed_p (opcode
, type
, arg0
, arg1
)
10140 ? boolean_true_node
: boolean_false_node
);
10144 tree ctype
= build_complex_type (type
);
10145 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
, 2,
10147 tree tgt
= save_expr (call
);
10148 intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
10149 ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
10150 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
10154 return omit_one_operand_loc (loc
, boolean_type_node
, ovfres
, arg2
);
10156 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
10158 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
10159 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
10162 /* Fold a call to __builtin_FILE to a constant string. */
10165 fold_builtin_FILE (location_t loc
)
10167 if (const char *fname
= LOCATION_FILE (loc
))
10169 /* The documentation says this builtin is equivalent to the preprocessor
10170 __FILE__ macro so it appears appropriate to use the same file prefix
10172 fname
= remap_macro_filename (fname
);
10173 return build_string_literal (strlen (fname
) + 1, fname
);
10176 return build_string_literal (1, "");
10179 /* Fold a call to __builtin_FUNCTION to a constant string. */
10182 fold_builtin_FUNCTION ()
10184 const char *name
= "";
10186 if (current_function_decl
)
10187 name
= lang_hooks
.decl_printable_name (current_function_decl
, 0);
10189 return build_string_literal (strlen (name
) + 1, name
);
10192 /* Fold a call to __builtin_LINE to an integer constant. */
10195 fold_builtin_LINE (location_t loc
, tree type
)
10197 return build_int_cst (type
, LOCATION_LINE (loc
));
10200 /* Fold a call to built-in function FNDECL with 0 arguments.
10201 This function returns NULL_TREE if no simplification was possible. */
10204 fold_builtin_0 (location_t loc
, tree fndecl
)
10206 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10207 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10210 case BUILT_IN_FILE
:
10211 return fold_builtin_FILE (loc
);
10213 case BUILT_IN_FUNCTION
:
10214 return fold_builtin_FUNCTION ();
10216 case BUILT_IN_LINE
:
10217 return fold_builtin_LINE (loc
, type
);
10219 CASE_FLT_FN (BUILT_IN_INF
):
10220 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF
):
10221 case BUILT_IN_INFD32
:
10222 case BUILT_IN_INFD64
:
10223 case BUILT_IN_INFD128
:
10224 return fold_builtin_inf (loc
, type
, true);
10226 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
10227 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL
):
10228 return fold_builtin_inf (loc
, type
, false);
10230 case BUILT_IN_CLASSIFY_TYPE
:
10231 return fold_builtin_classify_type (NULL_TREE
);
10239 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10240 This function returns NULL_TREE if no simplification was possible. */
10243 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
10245 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10246 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10248 if (TREE_CODE (arg0
) == ERROR_MARK
)
10251 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
))
10256 case BUILT_IN_CONSTANT_P
:
10258 tree val
= fold_builtin_constant_p (arg0
);
10260 /* Gimplification will pull the CALL_EXPR for the builtin out of
10261 an if condition. When not optimizing, we'll not CSE it back.
10262 To avoid link error types of regressions, return false now. */
10263 if (!val
&& !optimize
)
10264 val
= integer_zero_node
;
10269 case BUILT_IN_CLASSIFY_TYPE
:
10270 return fold_builtin_classify_type (arg0
);
10272 case BUILT_IN_STRLEN
:
10273 return fold_builtin_strlen (loc
, type
, arg0
);
10275 CASE_FLT_FN (BUILT_IN_FABS
):
10276 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
10277 case BUILT_IN_FABSD32
:
10278 case BUILT_IN_FABSD64
:
10279 case BUILT_IN_FABSD128
:
10280 return fold_builtin_fabs (loc
, arg0
, type
);
10283 case BUILT_IN_LABS
:
10284 case BUILT_IN_LLABS
:
10285 case BUILT_IN_IMAXABS
:
10286 return fold_builtin_abs (loc
, arg0
, type
);
10288 CASE_FLT_FN (BUILT_IN_CONJ
):
10289 if (validate_arg (arg0
, COMPLEX_TYPE
)
10290 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10291 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
10294 CASE_FLT_FN (BUILT_IN_CREAL
):
10295 if (validate_arg (arg0
, COMPLEX_TYPE
)
10296 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10297 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
10300 CASE_FLT_FN (BUILT_IN_CIMAG
):
10301 if (validate_arg (arg0
, COMPLEX_TYPE
)
10302 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10303 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
10306 CASE_FLT_FN (BUILT_IN_CARG
):
10307 return fold_builtin_carg (loc
, arg0
, type
);
10309 case BUILT_IN_ISASCII
:
10310 return fold_builtin_isascii (loc
, arg0
);
10312 case BUILT_IN_TOASCII
:
10313 return fold_builtin_toascii (loc
, arg0
);
10315 case BUILT_IN_ISDIGIT
:
10316 return fold_builtin_isdigit (loc
, arg0
);
10318 CASE_FLT_FN (BUILT_IN_FINITE
):
10319 case BUILT_IN_FINITED32
:
10320 case BUILT_IN_FINITED64
:
10321 case BUILT_IN_FINITED128
:
10322 case BUILT_IN_ISFINITE
:
10324 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10327 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10330 CASE_FLT_FN (BUILT_IN_ISINF
):
10331 case BUILT_IN_ISINFD32
:
10332 case BUILT_IN_ISINFD64
:
10333 case BUILT_IN_ISINFD128
:
10335 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10338 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10341 case BUILT_IN_ISNORMAL
:
10342 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10344 case BUILT_IN_ISINF_SIGN
:
10345 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10347 CASE_FLT_FN (BUILT_IN_ISNAN
):
10348 case BUILT_IN_ISNAND32
:
10349 case BUILT_IN_ISNAND64
:
10350 case BUILT_IN_ISNAND128
:
10351 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10353 case BUILT_IN_FREE
:
10354 if (integer_zerop (arg0
))
10355 return build_empty_stmt (loc
);
10366 /* Folds a call EXPR (which may be null) to built-in function FNDECL
10367 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10368 if no simplification was possible. */
10371 fold_builtin_2 (location_t loc
, tree expr
, tree fndecl
, tree arg0
, tree arg1
)
10373 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10374 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10376 if (TREE_CODE (arg0
) == ERROR_MARK
10377 || TREE_CODE (arg1
) == ERROR_MARK
)
10380 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
, arg1
))
10385 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10386 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10387 if (validate_arg (arg0
, REAL_TYPE
)
10388 && validate_arg (arg1
, POINTER_TYPE
))
10389 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10392 CASE_FLT_FN (BUILT_IN_FREXP
):
10393 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10395 CASE_FLT_FN (BUILT_IN_MODF
):
10396 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10398 case BUILT_IN_STRSPN
:
10399 return fold_builtin_strspn (loc
, expr
, arg0
, arg1
);
10401 case BUILT_IN_STRCSPN
:
10402 return fold_builtin_strcspn (loc
, expr
, arg0
, arg1
);
10404 case BUILT_IN_STRPBRK
:
10405 return fold_builtin_strpbrk (loc
, expr
, arg0
, arg1
, type
);
10407 case BUILT_IN_EXPECT
:
10408 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
, NULL_TREE
);
10410 case BUILT_IN_ISGREATER
:
10411 return fold_builtin_unordered_cmp (loc
, fndecl
,
10412 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10413 case BUILT_IN_ISGREATEREQUAL
:
10414 return fold_builtin_unordered_cmp (loc
, fndecl
,
10415 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10416 case BUILT_IN_ISLESS
:
10417 return fold_builtin_unordered_cmp (loc
, fndecl
,
10418 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10419 case BUILT_IN_ISLESSEQUAL
:
10420 return fold_builtin_unordered_cmp (loc
, fndecl
,
10421 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10422 case BUILT_IN_ISLESSGREATER
:
10423 return fold_builtin_unordered_cmp (loc
, fndecl
,
10424 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10425 case BUILT_IN_ISUNORDERED
:
10426 return fold_builtin_unordered_cmp (loc
, fndecl
,
10427 arg0
, arg1
, UNORDERED_EXPR
,
10430 /* We do the folding for va_start in the expander. */
10431 case BUILT_IN_VA_START
:
10434 case BUILT_IN_OBJECT_SIZE
:
10435 return fold_builtin_object_size (arg0
, arg1
);
10437 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10438 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10440 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10441 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10449 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10451 This function returns NULL_TREE if no simplification was possible. */
10454 fold_builtin_3 (location_t loc
, tree fndecl
,
10455 tree arg0
, tree arg1
, tree arg2
)
10457 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10458 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10460 if (TREE_CODE (arg0
) == ERROR_MARK
10461 || TREE_CODE (arg1
) == ERROR_MARK
10462 || TREE_CODE (arg2
) == ERROR_MARK
)
10465 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
,
10472 CASE_FLT_FN (BUILT_IN_SINCOS
):
10473 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10475 CASE_FLT_FN (BUILT_IN_REMQUO
):
10476 if (validate_arg (arg0
, REAL_TYPE
)
10477 && validate_arg (arg1
, REAL_TYPE
)
10478 && validate_arg (arg2
, POINTER_TYPE
))
10479 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10482 case BUILT_IN_MEMCMP
:
10483 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);
10485 case BUILT_IN_EXPECT
:
10486 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
, NULL_TREE
);
10488 case BUILT_IN_EXPECT_WITH_PROBABILITY
:
10489 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
, arg2
);
10491 case BUILT_IN_ADD_OVERFLOW
:
10492 case BUILT_IN_SUB_OVERFLOW
:
10493 case BUILT_IN_MUL_OVERFLOW
:
10494 case BUILT_IN_ADD_OVERFLOW_P
:
10495 case BUILT_IN_SUB_OVERFLOW_P
:
10496 case BUILT_IN_MUL_OVERFLOW_P
:
10497 case BUILT_IN_SADD_OVERFLOW
:
10498 case BUILT_IN_SADDL_OVERFLOW
:
10499 case BUILT_IN_SADDLL_OVERFLOW
:
10500 case BUILT_IN_SSUB_OVERFLOW
:
10501 case BUILT_IN_SSUBL_OVERFLOW
:
10502 case BUILT_IN_SSUBLL_OVERFLOW
:
10503 case BUILT_IN_SMUL_OVERFLOW
:
10504 case BUILT_IN_SMULL_OVERFLOW
:
10505 case BUILT_IN_SMULLL_OVERFLOW
:
10506 case BUILT_IN_UADD_OVERFLOW
:
10507 case BUILT_IN_UADDL_OVERFLOW
:
10508 case BUILT_IN_UADDLL_OVERFLOW
:
10509 case BUILT_IN_USUB_OVERFLOW
:
10510 case BUILT_IN_USUBL_OVERFLOW
:
10511 case BUILT_IN_USUBLL_OVERFLOW
:
10512 case BUILT_IN_UMUL_OVERFLOW
:
10513 case BUILT_IN_UMULL_OVERFLOW
:
10514 case BUILT_IN_UMULLL_OVERFLOW
:
10515 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
10523 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
10524 ARGS is an array of NARGS arguments. IGNORE is true if the result
10525 of the function call is ignored. This function returns NULL_TREE
10526 if no simplification was possible. */
10529 fold_builtin_n (location_t loc
, tree expr
, tree fndecl
, tree
*args
,
10532 tree ret
= NULL_TREE
;
10537 ret
= fold_builtin_0 (loc
, fndecl
);
10540 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
10543 ret
= fold_builtin_2 (loc
, expr
, fndecl
, args
[0], args
[1]);
10546 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
10549 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
10554 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10555 SET_EXPR_LOCATION (ret
, loc
);
10561 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10562 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10563 of arguments in ARGS to be omitted. OLDNARGS is the number of
10564 elements in ARGS. */
10567 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
10568 int skip
, tree fndecl
, int n
, va_list newargs
)
10570 int nargs
= oldnargs
- skip
+ n
;
10577 buffer
= XALLOCAVEC (tree
, nargs
);
10578 for (i
= 0; i
< n
; i
++)
10579 buffer
[i
] = va_arg (newargs
, tree
);
10580 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10581 buffer
[i
] = args
[j
];
10584 buffer
= args
+ skip
;
10586 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
10589 /* Return true if FNDECL shouldn't be folded right now.
10590 If a built-in function has an inline attribute always_inline
10591 wrapper, defer folding it after always_inline functions have
10592 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10593 might not be performed. */
10596 avoid_folding_inline_builtin (tree fndecl
)
10598 return (DECL_DECLARED_INLINE_P (fndecl
)
10599 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
10601 && !cfun
->always_inline_functions_inlined
10602 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
10605 /* A wrapper function for builtin folding that prevents warnings for
10606 "statement without effect" and the like, caused by removing the
10607 call node earlier than the warning is generated. */
10610 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
10612 tree ret
= NULL_TREE
;
10613 tree fndecl
= get_callee_fndecl (exp
);
10614 if (fndecl
&& fndecl_built_in_p (fndecl
)
10615 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10616 yet. Defer folding until we see all the arguments
10617 (after inlining). */
10618 && !CALL_EXPR_VA_ARG_PACK (exp
))
10620 int nargs
= call_expr_nargs (exp
);
10622 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10623 instead last argument is __builtin_va_arg_pack (). Defer folding
10624 even in that case, until arguments are finalized. */
10625 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10627 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10628 if (fndecl2
&& fndecl_built_in_p (fndecl2
, BUILT_IN_VA_ARG_PACK
))
10632 if (avoid_folding_inline_builtin (fndecl
))
10635 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10636 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
10637 CALL_EXPR_ARGP (exp
), ignore
);
10640 tree
*args
= CALL_EXPR_ARGP (exp
);
10641 ret
= fold_builtin_n (loc
, exp
, fndecl
, args
, nargs
, ignore
);
10649 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10650 N arguments are passed in the array ARGARRAY. Return a folded
10651 expression or NULL_TREE if no simplification was possible. */
10654 fold_builtin_call_array (location_t loc
, tree
,
10659 if (TREE_CODE (fn
) != ADDR_EXPR
)
10662 tree fndecl
= TREE_OPERAND (fn
, 0);
10663 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10664 && fndecl_built_in_p (fndecl
))
10666 /* If last argument is __builtin_va_arg_pack (), arguments to this
10667 function are not finalized yet. Defer folding until they are. */
10668 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10670 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10671 if (fndecl2
&& fndecl_built_in_p (fndecl2
, BUILT_IN_VA_ARG_PACK
))
10674 if (avoid_folding_inline_builtin (fndecl
))
10676 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10677 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
10679 return fold_builtin_n (loc
, NULL_TREE
, fndecl
, argarray
, n
, false);
10685 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10686 along with N new arguments specified as the "..." parameters. SKIP
10687 is the number of arguments in EXP to be omitted. This function is used
10688 to do varargs-to-varargs transformations. */
10691 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
10697 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
10698 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
10704 /* Validate a single argument ARG against a tree code CODE representing
10705 a type. Return true when argument is valid. */
10708 validate_arg (const_tree arg
, enum tree_code code
)
10712 else if (code
== POINTER_TYPE
)
10713 return POINTER_TYPE_P (TREE_TYPE (arg
));
10714 else if (code
== INTEGER_TYPE
)
10715 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
10716 return code
== TREE_CODE (TREE_TYPE (arg
));
10719 /* This function validates the types of a function call argument list
10720 against a specified list of tree_codes. If the last specifier is a 0,
10721 that represents an ellipses, otherwise the last specifier must be a
10724 This is the GIMPLE version of validate_arglist. Eventually we want to
10725 completely convert builtins.c to work from GIMPLEs and the tree based
10726 validate_arglist will then be removed. */
10729 validate_gimple_arglist (const gcall
*call
, ...)
10731 enum tree_code code
;
10737 va_start (ap
, call
);
10742 code
= (enum tree_code
) va_arg (ap
, int);
10746 /* This signifies an ellipses, any further arguments are all ok. */
10750 /* This signifies an endlink, if no arguments remain, return
10751 true, otherwise return false. */
10752 res
= (i
== gimple_call_num_args (call
));
10755 /* If no parameters remain or the parameter's code does not
10756 match the specified code, return false. Otherwise continue
10757 checking any remaining arguments. */
10758 arg
= gimple_call_arg (call
, i
++);
10759 if (!validate_arg (arg
, code
))
10766 /* We need gotos here since we can only have one VA_CLOSE in a
10774 /* Default target-specific builtin expander that does nothing. */
10777 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10778 rtx target ATTRIBUTE_UNUSED
,
10779 rtx subtarget ATTRIBUTE_UNUSED
,
10780 machine_mode mode ATTRIBUTE_UNUSED
,
10781 int ignore ATTRIBUTE_UNUSED
)
10786 /* Returns true is EXP represents data that would potentially reside
10787 in a readonly section. */
10790 readonly_data_expr (tree exp
)
10794 if (TREE_CODE (exp
) != ADDR_EXPR
)
10797 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10801 /* Make sure we call decl_readonly_section only for trees it
10802 can handle (since it returns true for everything it doesn't
10804 if (TREE_CODE (exp
) == STRING_CST
10805 || TREE_CODE (exp
) == CONSTRUCTOR
10806 || (VAR_P (exp
) && TREE_STATIC (exp
)))
10807 return decl_readonly_section (exp
, 0);
10812 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10813 to the call, and TYPE is its return type.
10815 Return NULL_TREE if no simplification was possible, otherwise return the
10816 simplified form of the call as a tree.
10818 The simplified form may be a constant or other expression which
10819 computes the same value, but in a more efficient manner (including
10820 calls to other builtin functions).
10822 The call may contain arguments which need to be evaluated, but
10823 which are not useful to determine the result of the call. In
10824 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10825 COMPOUND_EXPR will be an argument which must be evaluated.
10826 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10827 COMPOUND_EXPR in the chain will contain the tree for the simplified
10828 form of the builtin function call. */
10831 fold_builtin_strpbrk (location_t loc
, tree expr
, tree s1
, tree s2
, tree type
)
10833 if (!validate_arg (s1
, POINTER_TYPE
)
10834 || !validate_arg (s2
, POINTER_TYPE
))
10837 if (!check_nul_terminated_array (expr
, s1
)
10838 || !check_nul_terminated_array (expr
, s2
))
10842 const char *p1
, *p2
;
10844 p2
= c_getstr (s2
);
10848 p1
= c_getstr (s1
);
10851 const char *r
= strpbrk (p1
, p2
);
10855 return build_int_cst (TREE_TYPE (s1
), 0);
10857 /* Return an offset into the constant string argument. */
10858 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10859 return fold_convert_loc (loc
, type
, tem
);
10863 /* strpbrk(x, "") == NULL.
10864 Evaluate and ignore s1 in case it had side-effects. */
10865 return omit_one_operand_loc (loc
, type
, integer_zero_node
, s1
);
10868 return NULL_TREE
; /* Really call strpbrk. */
10870 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10874 /* New argument list transforming strpbrk(s1, s2) to
10875 strchr(s1, s2[0]). */
10876 return build_call_expr_loc (loc
, fn
, 2, s1
,
10877 build_int_cst (integer_type_node
, p2
[0]));
10880 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10883 Return NULL_TREE if no simplification was possible, otherwise return the
10884 simplified form of the call as a tree.
10886 The simplified form may be a constant or other expression which
10887 computes the same value, but in a more efficient manner (including
10888 calls to other builtin functions).
10890 The call may contain arguments which need to be evaluated, but
10891 which are not useful to determine the result of the call. In
10892 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10893 COMPOUND_EXPR will be an argument which must be evaluated.
10894 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10895 COMPOUND_EXPR in the chain will contain the tree for the simplified
10896 form of the builtin function call. */
10899 fold_builtin_strspn (location_t loc
, tree expr
, tree s1
, tree s2
)
10901 if (!validate_arg (s1
, POINTER_TYPE
)
10902 || !validate_arg (s2
, POINTER_TYPE
))
10905 if (!check_nul_terminated_array (expr
, s1
)
10906 || !check_nul_terminated_array (expr
, s2
))
10909 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10911 /* If either argument is "", return NULL_TREE. */
10912 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
10913 /* Evaluate and ignore both arguments in case either one has
10915 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
10920 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10923 Return NULL_TREE if no simplification was possible, otherwise return the
10924 simplified form of the call as a tree.
10926 The simplified form may be a constant or other expression which
10927 computes the same value, but in a more efficient manner (including
10928 calls to other builtin functions).
10930 The call may contain arguments which need to be evaluated, but
10931 which are not useful to determine the result of the call. In
10932 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10933 COMPOUND_EXPR will be an argument which must be evaluated.
10934 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10935 COMPOUND_EXPR in the chain will contain the tree for the simplified
10936 form of the builtin function call. */
10939 fold_builtin_strcspn (location_t loc
, tree expr
, tree s1
, tree s2
)
10941 if (!validate_arg (s1
, POINTER_TYPE
)
10942 || !validate_arg (s2
, POINTER_TYPE
))
10945 if (!check_nul_terminated_array (expr
, s1
)
10946 || !check_nul_terminated_array (expr
, s2
))
10949 /* If the first argument is "", return NULL_TREE. */
10950 const char *p1
= c_getstr (s1
);
10951 if (p1
&& *p1
== '\0')
10953 /* Evaluate and ignore argument s2 in case it has
10955 return omit_one_operand_loc (loc
, size_type_node
,
10956 size_zero_node
, s2
);
10959 /* If the second argument is "", return __builtin_strlen(s1). */
10960 const char *p2
= c_getstr (s2
);
10961 if (p2
&& *p2
== '\0')
10963 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
10965 /* If the replacement _DECL isn't initialized, don't do the
10970 return build_call_expr_loc (loc
, fn
, 1, s1
);
10975 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10976 produced. False otherwise. This is done so that we don't output the error
10977 or warning twice or three times. */
10980 fold_builtin_next_arg (tree exp
, bool va_start_p
)
10982 tree fntype
= TREE_TYPE (current_function_decl
);
10983 int nargs
= call_expr_nargs (exp
);
10985 /* There is good chance the current input_location points inside the
10986 definition of the va_start macro (perhaps on the token for
10987 builtin) in a system header, so warnings will not be emitted.
10988 Use the location in real source code. */
10989 location_t current_location
=
10990 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
10993 if (!stdarg_p (fntype
))
10995 error ("%<va_start%> used in function with fixed arguments");
11001 if (va_start_p
&& (nargs
!= 2))
11003 error ("wrong number of arguments to function %<va_start%>");
11006 arg
= CALL_EXPR_ARG (exp
, 1);
11008 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11009 when we checked the arguments and if needed issued a warning. */
11014 /* Evidently an out of date version of <stdarg.h>; can't validate
11015 va_start's second argument, but can still work as intended. */
11016 warning_at (current_location
,
11018 "%<__builtin_next_arg%> called without an argument");
11021 else if (nargs
> 1)
11023 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11026 arg
= CALL_EXPR_ARG (exp
, 0);
11029 if (TREE_CODE (arg
) == SSA_NAME
)
11030 arg
= SSA_NAME_VAR (arg
);
11032 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11033 or __builtin_next_arg (0) the first time we see it, after checking
11034 the arguments and if needed issuing a warning. */
11035 if (!integer_zerop (arg
))
11037 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11039 /* Strip off all nops for the sake of the comparison. This
11040 is not quite the same as STRIP_NOPS. It does more.
11041 We must also strip off INDIRECT_EXPR for C++ reference
11043 while (CONVERT_EXPR_P (arg
)
11044 || TREE_CODE (arg
) == INDIRECT_REF
)
11045 arg
= TREE_OPERAND (arg
, 0);
11046 if (arg
!= last_parm
)
11048 /* FIXME: Sometimes with the tree optimizers we can get the
11049 not the last argument even though the user used the last
11050 argument. We just warn and set the arg to be the last
11051 argument so that we will get wrong-code because of
11053 warning_at (current_location
,
11055 "second parameter of %<va_start%> not last named argument");
11058 /* Undefined by C99 7.15.1.4p4 (va_start):
11059 "If the parameter parmN is declared with the register storage
11060 class, with a function or array type, or with a type that is
11061 not compatible with the type that results after application of
11062 the default argument promotions, the behavior is undefined."
11064 else if (DECL_REGISTER (arg
))
11066 warning_at (current_location
,
11068 "undefined behavior when second parameter of "
11069 "%<va_start%> is declared with %<register%> storage");
11072 /* We want to verify the second parameter just once before the tree
11073 optimizers are run and then avoid keeping it in the tree,
11074 as otherwise we could warn even for correct code like:
11075 void foo (int i, ...)
11076 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11078 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11080 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11086 /* Expand a call EXP to __builtin_object_size. */
11089 expand_builtin_object_size (tree exp
)
11092 int object_size_type
;
11093 tree fndecl
= get_callee_fndecl (exp
);
11095 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11097 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
11099 expand_builtin_trap ();
11103 ost
= CALL_EXPR_ARG (exp
, 1);
11106 if (TREE_CODE (ost
) != INTEGER_CST
11107 || tree_int_cst_sgn (ost
) < 0
11108 || compare_tree_int (ost
, 3) > 0)
11110 error ("%Klast argument of %qD is not integer constant between 0 and 3",
11112 expand_builtin_trap ();
11116 object_size_type
= tree_to_shwi (ost
);
11118 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11121 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11122 FCODE is the BUILT_IN_* to use.
11123 Return NULL_RTX if we failed; the caller should emit a normal call,
11124 otherwise try to get the result in TARGET, if convenient (and in
11125 mode MODE if that's convenient). */
11128 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
11129 enum built_in_function fcode
)
11131 if (!validate_arglist (exp
,
11133 fcode
== BUILT_IN_MEMSET_CHK
11134 ? INTEGER_TYPE
: POINTER_TYPE
,
11135 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11138 tree dest
= CALL_EXPR_ARG (exp
, 0);
11139 tree src
= CALL_EXPR_ARG (exp
, 1);
11140 tree len
= CALL_EXPR_ARG (exp
, 2);
11141 tree size
= CALL_EXPR_ARG (exp
, 3);
11143 bool sizes_ok
= check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
,
11144 /*str=*/NULL_TREE
, size
);
11146 if (!tree_fits_uhwi_p (size
))
11149 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
11151 /* Avoid transforming the checking call to an ordinary one when
11152 an overflow has been detected or when the call couldn't be
11153 validated because the size is not constant. */
11154 if (!sizes_ok
&& !integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11157 tree fn
= NULL_TREE
;
11158 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11159 mem{cpy,pcpy,move,set} is available. */
11162 case BUILT_IN_MEMCPY_CHK
:
11163 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
11165 case BUILT_IN_MEMPCPY_CHK
:
11166 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
11168 case BUILT_IN_MEMMOVE_CHK
:
11169 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
11171 case BUILT_IN_MEMSET_CHK
:
11172 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
11181 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
11182 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11183 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11184 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11186 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11190 unsigned int dest_align
= get_pointer_alignment (dest
);
11192 /* If DEST is not a pointer type, call the normal function. */
11193 if (dest_align
== 0)
11196 /* If SRC and DEST are the same (and not volatile), do nothing. */
11197 if (operand_equal_p (src
, dest
, 0))
11201 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11203 /* Evaluate and ignore LEN in case it has side-effects. */
11204 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11205 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11208 expr
= fold_build_pointer_plus (dest
, len
);
11209 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11212 /* __memmove_chk special case. */
11213 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11215 unsigned int src_align
= get_pointer_alignment (src
);
11217 if (src_align
== 0)
11220 /* If src is categorized for a readonly section we can use
11221 normal __memcpy_chk. */
11222 if (readonly_data_expr (src
))
11224 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
11227 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
11228 dest
, src
, len
, size
);
11229 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11230 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11231 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11238 /* Emit warning if a buffer overflow is detected at compile time. */
11241 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11243 /* The source string. */
11244 tree srcstr
= NULL_TREE
;
11245 /* The size of the destination object. */
11246 tree objsize
= NULL_TREE
;
11247 /* The string that is being concatenated with (as in __strcat_chk)
11248 or null if it isn't. */
11249 tree catstr
= NULL_TREE
;
11250 /* The maximum length of the source sequence in a bounded operation
11251 (such as __strncat_chk) or null if the operation isn't bounded
11252 (such as __strcat_chk). */
11253 tree maxread
= NULL_TREE
;
11254 /* The exact size of the access (such as in __strncpy_chk). */
11255 tree size
= NULL_TREE
;
11259 case BUILT_IN_STRCPY_CHK
:
11260 case BUILT_IN_STPCPY_CHK
:
11261 srcstr
= CALL_EXPR_ARG (exp
, 1);
11262 objsize
= CALL_EXPR_ARG (exp
, 2);
11265 case BUILT_IN_STRCAT_CHK
:
11266 /* For __strcat_chk the warning will be emitted only if overflowing
11267 by at least strlen (dest) + 1 bytes. */
11268 catstr
= CALL_EXPR_ARG (exp
, 0);
11269 srcstr
= CALL_EXPR_ARG (exp
, 1);
11270 objsize
= CALL_EXPR_ARG (exp
, 2);
11273 case BUILT_IN_STRNCAT_CHK
:
11274 catstr
= CALL_EXPR_ARG (exp
, 0);
11275 srcstr
= CALL_EXPR_ARG (exp
, 1);
11276 maxread
= CALL_EXPR_ARG (exp
, 2);
11277 objsize
= CALL_EXPR_ARG (exp
, 3);
11280 case BUILT_IN_STRNCPY_CHK
:
11281 case BUILT_IN_STPNCPY_CHK
:
11282 srcstr
= CALL_EXPR_ARG (exp
, 1);
11283 size
= CALL_EXPR_ARG (exp
, 2);
11284 objsize
= CALL_EXPR_ARG (exp
, 3);
11287 case BUILT_IN_SNPRINTF_CHK
:
11288 case BUILT_IN_VSNPRINTF_CHK
:
11289 maxread
= CALL_EXPR_ARG (exp
, 1);
11290 objsize
= CALL_EXPR_ARG (exp
, 3);
11293 gcc_unreachable ();
11296 if (catstr
&& maxread
)
11298 /* Check __strncat_chk. There is no way to determine the length
11299 of the string to which the source string is being appended so
11300 just warn when the length of the source string is not known. */
11301 check_strncat_sizes (exp
, objsize
);
11305 /* The destination argument is the first one for all built-ins above. */
11306 tree dst
= CALL_EXPR_ARG (exp
, 0);
11308 check_access (exp
, dst
, srcstr
, size
, maxread
, srcstr
, objsize
);
11311 /* Emit warning if a buffer overflow is detected at compile time
11312 in __sprintf_chk/__vsprintf_chk calls. */
11315 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11317 tree size
, len
, fmt
;
11318 const char *fmt_str
;
11319 int nargs
= call_expr_nargs (exp
);
11321 /* Verify the required arguments in the original call. */
11325 size
= CALL_EXPR_ARG (exp
, 2);
11326 fmt
= CALL_EXPR_ARG (exp
, 3);
11328 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11331 /* Check whether the format is a literal string constant. */
11332 fmt_str
= c_getstr (fmt
);
11333 if (fmt_str
== NULL
)
11336 if (!init_target_chars ())
11339 /* If the format doesn't contain % args or %%, we know its size. */
11340 if (strchr (fmt_str
, target_percent
) == 0)
11341 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11342 /* If the format is "%s" and first ... argument is a string literal,
11344 else if (fcode
== BUILT_IN_SPRINTF_CHK
11345 && strcmp (fmt_str
, target_percent_s
) == 0)
11351 arg
= CALL_EXPR_ARG (exp
, 4);
11352 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11355 len
= c_strlen (arg
, 1);
11356 if (!len
|| ! tree_fits_uhwi_p (len
))
11362 /* Add one for the terminating nul. */
11363 len
= fold_build2 (PLUS_EXPR
, TREE_TYPE (len
), len
, size_one_node
);
11365 check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, /*size=*/NULL_TREE
,
11366 /*maxread=*/NULL_TREE
, len
, size
);
11369 /* Emit warning if a free is called with address of a variable. */
11372 maybe_emit_free_warning (tree exp
)
11374 if (call_expr_nargs (exp
) != 1)
11377 tree arg
= CALL_EXPR_ARG (exp
, 0);
11380 if (TREE_CODE (arg
) != ADDR_EXPR
)
11383 arg
= get_base_address (TREE_OPERAND (arg
, 0));
11384 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
11387 if (SSA_VAR_P (arg
))
11388 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11389 "%Kattempt to free a non-heap object %qD", exp
, arg
);
11391 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11392 "%Kattempt to free a non-heap object", exp
);
11395 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11399 fold_builtin_object_size (tree ptr
, tree ost
)
11401 unsigned HOST_WIDE_INT bytes
;
11402 int object_size_type
;
11404 if (!validate_arg (ptr
, POINTER_TYPE
)
11405 || !validate_arg (ost
, INTEGER_TYPE
))
11410 if (TREE_CODE (ost
) != INTEGER_CST
11411 || tree_int_cst_sgn (ost
) < 0
11412 || compare_tree_int (ost
, 3) > 0)
11415 object_size_type
= tree_to_shwi (ost
);
11417 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11418 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11419 and (size_t) 0 for types 2 and 3. */
11420 if (TREE_SIDE_EFFECTS (ptr
))
11421 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11423 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11425 compute_builtin_object_size (ptr
, object_size_type
, &bytes
);
11426 if (wi::fits_to_tree_p (bytes
, size_type_node
))
11427 return build_int_cstu (size_type_node
, bytes
);
11429 else if (TREE_CODE (ptr
) == SSA_NAME
)
11431 /* If object size is not known yet, delay folding until
11432 later. Maybe subsequent passes will help determining
11434 if (compute_builtin_object_size (ptr
, object_size_type
, &bytes
)
11435 && wi::fits_to_tree_p (bytes
, size_type_node
))
11436 return build_int_cstu (size_type_node
, bytes
);
11442 /* Builtins with folding operations that operate on "..." arguments
11443 need special handling; we need to store the arguments in a convenient
11444 data structure before attempting any folding. Fortunately there are
11445 only a few builtins that fall into this category. FNDECL is the
11446 function, EXP is the CALL_EXPR for the call. */
11449 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
11451 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11452 tree ret
= NULL_TREE
;
11456 case BUILT_IN_FPCLASSIFY
:
11457 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
11465 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11466 SET_EXPR_LOCATION (ret
, loc
);
11467 TREE_NO_WARNING (ret
) = 1;
11473 /* Initialize format string characters in the target charset. */
11476 init_target_chars (void)
11481 target_newline
= lang_hooks
.to_target_charset ('\n');
11482 target_percent
= lang_hooks
.to_target_charset ('%');
11483 target_c
= lang_hooks
.to_target_charset ('c');
11484 target_s
= lang_hooks
.to_target_charset ('s');
11485 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
11489 target_percent_c
[0] = target_percent
;
11490 target_percent_c
[1] = target_c
;
11491 target_percent_c
[2] = '\0';
11493 target_percent_s
[0] = target_percent
;
11494 target_percent_s
[1] = target_s
;
11495 target_percent_s
[2] = '\0';
11497 target_percent_s_newline
[0] = target_percent
;
11498 target_percent_s_newline
[1] = target_s
;
11499 target_percent_s_newline
[2] = target_newline
;
11500 target_percent_s_newline
[3] = '\0';
11507 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11508 and no overflow/underflow occurred. INEXACT is true if M was not
11509 exactly calculated. TYPE is the tree type for the result. This
11510 function assumes that you cleared the MPFR flags and then
11511 calculated M to see if anything subsequently set a flag prior to
11512 entering this function. Return NULL_TREE if any checks fail. */
11515 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
11517 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11518 overflow/underflow occurred. If -frounding-math, proceed iff the
11519 result of calling FUNC was exact. */
11520 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11521 && (!flag_rounding_math
|| !inexact
))
11523 REAL_VALUE_TYPE rr
;
11525 real_from_mpfr (&rr
, m
, type
, MPFR_RNDN
);
11526 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11527 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11528 but the mpft_t is not, then we underflowed in the
11530 if (real_isfinite (&rr
)
11531 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
11533 REAL_VALUE_TYPE rmode
;
11535 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
11536 /* Proceed iff the specified mode can hold the value. */
11537 if (real_identical (&rmode
, &rr
))
11538 return build_real (type
, rmode
);
11544 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11545 number and no overflow/underflow occurred. INEXACT is true if M
11546 was not exactly calculated. TYPE is the tree type for the result.
11547 This function assumes that you cleared the MPFR flags and then
11548 calculated M to see if anything subsequently set a flag prior to
11549 entering this function. Return NULL_TREE if any checks fail, if
11550 FORCE_CONVERT is true, then bypass the checks. */
11553 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
11555 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11556 overflow/underflow occurred. If -frounding-math, proceed iff the
11557 result of calling FUNC was exact. */
11559 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
11560 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11561 && (!flag_rounding_math
|| !inexact
)))
11563 REAL_VALUE_TYPE re
, im
;
11565 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), MPFR_RNDN
);
11566 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), MPFR_RNDN
);
11567 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11568 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11569 but the mpft_t is not, then we underflowed in the
11572 || (real_isfinite (&re
) && real_isfinite (&im
)
11573 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
11574 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
11576 REAL_VALUE_TYPE re_mode
, im_mode
;
11578 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
11579 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
11580 /* Proceed iff the specified mode can hold the value. */
11582 || (real_identical (&re_mode
, &re
)
11583 && real_identical (&im_mode
, &im
)))
11584 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
11585 build_real (TREE_TYPE (type
), im_mode
));
11591 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11592 the pointer *(ARG_QUO) and return the result. The type is taken
11593 from the type of ARG0 and is used for setting the precision of the
11594 calculation and results. */
11597 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
11599 tree
const type
= TREE_TYPE (arg0
);
11600 tree result
= NULL_TREE
;
11605 /* To proceed, MPFR must exactly represent the target floating point
11606 format, which only happens when the target base equals two. */
11607 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11608 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
11609 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
11611 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
11612 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
11614 if (real_isfinite (ra0
) && real_isfinite (ra1
))
11616 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11617 const int prec
= fmt
->p
;
11618 const mpfr_rnd_t rnd
= fmt
->round_towards_zero
? MPFR_RNDZ
: MPFR_RNDN
;
11623 mpfr_inits2 (prec
, m0
, m1
, NULL
);
11624 mpfr_from_real (m0
, ra0
, MPFR_RNDN
);
11625 mpfr_from_real (m1
, ra1
, MPFR_RNDN
);
11626 mpfr_clear_flags ();
11627 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
11628 /* Remquo is independent of the rounding mode, so pass
11629 inexact=0 to do_mpfr_ckconv(). */
11630 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
11631 mpfr_clears (m0
, m1
, NULL
);
11634 /* MPFR calculates quo in the host's long so it may
11635 return more bits in quo than the target int can hold
11636 if sizeof(host long) > sizeof(target int). This can
11637 happen even for native compilers in LP64 mode. In
11638 these cases, modulo the quo value with the largest
11639 number that the target int can hold while leaving one
11640 bit for the sign. */
11641 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
11642 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
11644 /* Dereference the quo pointer argument. */
11645 arg_quo
= build_fold_indirect_ref (arg_quo
);
11646 /* Proceed iff a valid pointer type was passed in. */
11647 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
11649 /* Set the value. */
11651 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
11652 build_int_cst (TREE_TYPE (arg_quo
),
11654 TREE_SIDE_EFFECTS (result_quo
) = 1;
11655 /* Combine the quo assignment with the rem. */
11656 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11657 result_quo
, result_rem
));
11665 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11666 resulting value as a tree with type TYPE. The mpfr precision is
11667 set to the precision of TYPE. We assume that this mpfr function
11668 returns zero if the result could be calculated exactly within the
11669 requested precision. In addition, the integer pointer represented
11670 by ARG_SG will be dereferenced and set to the appropriate signgam
11674 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
11676 tree result
= NULL_TREE
;
11680 /* To proceed, MPFR must exactly represent the target floating point
11681 format, which only happens when the target base equals two. Also
11682 verify ARG is a constant and that ARG_SG is an int pointer. */
11683 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11684 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
11685 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
11686 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
11688 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
11690 /* In addition to NaN and Inf, the argument cannot be zero or a
11691 negative integer. */
11692 if (real_isfinite (ra
)
11693 && ra
->cl
!= rvc_zero
11694 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
11696 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11697 const int prec
= fmt
->p
;
11698 const mpfr_rnd_t rnd
= fmt
->round_towards_zero
? MPFR_RNDZ
: MPFR_RNDN
;
11703 mpfr_init2 (m
, prec
);
11704 mpfr_from_real (m
, ra
, MPFR_RNDN
);
11705 mpfr_clear_flags ();
11706 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
11707 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
11713 /* Dereference the arg_sg pointer argument. */
11714 arg_sg
= build_fold_indirect_ref (arg_sg
);
11715 /* Assign the signgam value into *arg_sg. */
11716 result_sg
= fold_build2 (MODIFY_EXPR
,
11717 TREE_TYPE (arg_sg
), arg_sg
,
11718 build_int_cst (TREE_TYPE (arg_sg
), sg
));
11719 TREE_SIDE_EFFECTS (result_sg
) = 1;
11720 /* Combine the signgam assignment with the lgamma result. */
11721 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11722 result_sg
, result_lg
));
11730 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11731 mpc function FUNC on it and return the resulting value as a tree
11732 with type TYPE. The mpfr precision is set to the precision of
11733 TYPE. We assume that function FUNC returns zero if the result
11734 could be calculated exactly within the requested precision. If
11735 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11736 in the arguments and/or results. */
11739 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
11740 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
11742 tree result
= NULL_TREE
;
11747 /* To proceed, MPFR must exactly represent the target floating point
11748 format, which only happens when the target base equals two. */
11749 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
11750 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
11751 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
11752 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
11753 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
11755 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
11756 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
11757 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
11758 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
11761 || (real_isfinite (re0
) && real_isfinite (im0
)
11762 && real_isfinite (re1
) && real_isfinite (im1
)))
11764 const struct real_format
*const fmt
=
11765 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
11766 const int prec
= fmt
->p
;
11767 const mpfr_rnd_t rnd
= fmt
->round_towards_zero
11768 ? MPFR_RNDZ
: MPFR_RNDN
;
11769 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
11773 mpc_init2 (m0
, prec
);
11774 mpc_init2 (m1
, prec
);
11775 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
11776 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
11777 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
11778 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
11779 mpfr_clear_flags ();
11780 inexact
= func (m0
, m0
, m1
, crnd
);
11781 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
11790 /* A wrapper function for builtin folding that prevents warnings for
11791 "statement without effect" and the like, caused by removing the
11792 call node earlier than the warning is generated. */
11795 fold_call_stmt (gcall
*stmt
, bool ignore
)
11797 tree ret
= NULL_TREE
;
11798 tree fndecl
= gimple_call_fndecl (stmt
);
11799 location_t loc
= gimple_location (stmt
);
11800 if (fndecl
&& fndecl_built_in_p (fndecl
)
11801 && !gimple_call_va_arg_pack_p (stmt
))
11803 int nargs
= gimple_call_num_args (stmt
);
11804 tree
*args
= (nargs
> 0
11805 ? gimple_call_arg_ptr (stmt
, 0)
11806 : &error_mark_node
);
11808 if (avoid_folding_inline_builtin (fndecl
))
11810 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11812 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
11816 ret
= fold_builtin_n (loc
, NULL_TREE
, fndecl
, args
, nargs
, ignore
);
11819 /* Propagate location information from original call to
11820 expansion of builtin. Otherwise things like
11821 maybe_emit_chk_warning, that operate on the expansion
11822 of a builtin, will use the wrong location information. */
11823 if (gimple_has_location (stmt
))
11825 tree realret
= ret
;
11826 if (TREE_CODE (ret
) == NOP_EXPR
)
11827 realret
= TREE_OPERAND (ret
, 0);
11828 if (CAN_HAVE_LOCATION_P (realret
)
11829 && !EXPR_HAS_LOCATION (realret
))
11830 SET_EXPR_LOCATION (realret
, loc
);
11840 /* Look up the function in builtin_decl that corresponds to DECL
11841 and set ASMSPEC as its user assembler name. DECL must be a
11842 function decl that declares a builtin. */
11845 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
11847 gcc_assert (fndecl_built_in_p (decl
, BUILT_IN_NORMAL
)
11850 tree builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
11851 set_user_assembler_name (builtin
, asmspec
);
11853 if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_FFS
11854 && INT_TYPE_SIZE
< BITS_PER_WORD
)
11856 scalar_int_mode mode
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
11857 set_user_assembler_libfunc ("ffs", asmspec
);
11858 set_optab_libfunc (ffs_optab
, mode
, "ffs");
11862 /* Return true if DECL is a builtin that expands to a constant or similarly
11865 is_simple_builtin (tree decl
)
11867 if (decl
&& fndecl_built_in_p (decl
, BUILT_IN_NORMAL
))
11868 switch (DECL_FUNCTION_CODE (decl
))
11870 /* Builtins that expand to constants. */
11871 case BUILT_IN_CONSTANT_P
:
11872 case BUILT_IN_EXPECT
:
11873 case BUILT_IN_OBJECT_SIZE
:
11874 case BUILT_IN_UNREACHABLE
:
11875 /* Simple register moves or loads from stack. */
11876 case BUILT_IN_ASSUME_ALIGNED
:
11877 case BUILT_IN_RETURN_ADDRESS
:
11878 case BUILT_IN_EXTRACT_RETURN_ADDR
:
11879 case BUILT_IN_FROB_RETURN_ADDR
:
11880 case BUILT_IN_RETURN
:
11881 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
11882 case BUILT_IN_FRAME_ADDRESS
:
11883 case BUILT_IN_VA_END
:
11884 case BUILT_IN_STACK_SAVE
:
11885 case BUILT_IN_STACK_RESTORE
:
11886 /* Exception state returns or moves registers around. */
11887 case BUILT_IN_EH_FILTER
:
11888 case BUILT_IN_EH_POINTER
:
11889 case BUILT_IN_EH_COPY_VALUES
:
11899 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11900 most probably expanded inline into reasonably simple code. This is a
11901 superset of is_simple_builtin. */
11903 is_inexpensive_builtin (tree decl
)
11907 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
11909 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
11910 switch (DECL_FUNCTION_CODE (decl
))
11913 CASE_BUILT_IN_ALLOCA
:
11914 case BUILT_IN_BSWAP16
:
11915 case BUILT_IN_BSWAP32
:
11916 case BUILT_IN_BSWAP64
:
11917 case BUILT_IN_BSWAP128
:
11919 case BUILT_IN_CLZIMAX
:
11920 case BUILT_IN_CLZL
:
11921 case BUILT_IN_CLZLL
:
11923 case BUILT_IN_CTZIMAX
:
11924 case BUILT_IN_CTZL
:
11925 case BUILT_IN_CTZLL
:
11927 case BUILT_IN_FFSIMAX
:
11928 case BUILT_IN_FFSL
:
11929 case BUILT_IN_FFSLL
:
11930 case BUILT_IN_IMAXABS
:
11931 case BUILT_IN_FINITE
:
11932 case BUILT_IN_FINITEF
:
11933 case BUILT_IN_FINITEL
:
11934 case BUILT_IN_FINITED32
:
11935 case BUILT_IN_FINITED64
:
11936 case BUILT_IN_FINITED128
:
11937 case BUILT_IN_FPCLASSIFY
:
11938 case BUILT_IN_ISFINITE
:
11939 case BUILT_IN_ISINF_SIGN
:
11940 case BUILT_IN_ISINF
:
11941 case BUILT_IN_ISINFF
:
11942 case BUILT_IN_ISINFL
:
11943 case BUILT_IN_ISINFD32
:
11944 case BUILT_IN_ISINFD64
:
11945 case BUILT_IN_ISINFD128
:
11946 case BUILT_IN_ISNAN
:
11947 case BUILT_IN_ISNANF
:
11948 case BUILT_IN_ISNANL
:
11949 case BUILT_IN_ISNAND32
:
11950 case BUILT_IN_ISNAND64
:
11951 case BUILT_IN_ISNAND128
:
11952 case BUILT_IN_ISNORMAL
:
11953 case BUILT_IN_ISGREATER
:
11954 case BUILT_IN_ISGREATEREQUAL
:
11955 case BUILT_IN_ISLESS
:
11956 case BUILT_IN_ISLESSEQUAL
:
11957 case BUILT_IN_ISLESSGREATER
:
11958 case BUILT_IN_ISUNORDERED
:
11959 case BUILT_IN_VA_ARG_PACK
:
11960 case BUILT_IN_VA_ARG_PACK_LEN
:
11961 case BUILT_IN_VA_COPY
:
11962 case BUILT_IN_TRAP
:
11963 case BUILT_IN_SAVEREGS
:
11964 case BUILT_IN_POPCOUNTL
:
11965 case BUILT_IN_POPCOUNTLL
:
11966 case BUILT_IN_POPCOUNTIMAX
:
11967 case BUILT_IN_POPCOUNT
:
11968 case BUILT_IN_PARITYL
:
11969 case BUILT_IN_PARITYLL
:
11970 case BUILT_IN_PARITYIMAX
:
11971 case BUILT_IN_PARITY
:
11972 case BUILT_IN_LABS
:
11973 case BUILT_IN_LLABS
:
11974 case BUILT_IN_PREFETCH
:
11975 case BUILT_IN_ACC_ON_DEVICE
:
11979 return is_simple_builtin (decl
);
11985 /* Return true if T is a constant and the value cast to a target char
11986 can be represented by a host char.
11987 Store the casted char constant in *P if so. */
11990 target_char_cst_p (tree t
, char *p
)
11992 if (!tree_fits_uhwi_p (t
) || CHAR_TYPE_SIZE
!= HOST_BITS_PER_CHAR
)
11995 *p
= (char)tree_to_uhwi (t
);
11999 /* Return true if the builtin DECL is implemented in a standard library.
12000 Otherwise returns false which doesn't guarantee it is not (thus the list of
12001 handled builtins below may be incomplete). */
12004 builtin_with_linkage_p (tree decl
)
12006 if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12007 switch (DECL_FUNCTION_CODE (decl
))
12009 CASE_FLT_FN (BUILT_IN_ACOS
):
12010 CASE_FLT_FN (BUILT_IN_ACOSH
):
12011 CASE_FLT_FN (BUILT_IN_ASIN
):
12012 CASE_FLT_FN (BUILT_IN_ASINH
):
12013 CASE_FLT_FN (BUILT_IN_ATAN
):
12014 CASE_FLT_FN (BUILT_IN_ATANH
):
12015 CASE_FLT_FN (BUILT_IN_ATAN2
):
12016 CASE_FLT_FN (BUILT_IN_CBRT
):
12017 CASE_FLT_FN (BUILT_IN_CEIL
):
12018 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL
):
12019 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
12020 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
12021 CASE_FLT_FN (BUILT_IN_COS
):
12022 CASE_FLT_FN (BUILT_IN_COSH
):
12023 CASE_FLT_FN (BUILT_IN_ERF
):
12024 CASE_FLT_FN (BUILT_IN_ERFC
):
12025 CASE_FLT_FN (BUILT_IN_EXP
):
12026 CASE_FLT_FN (BUILT_IN_EXP2
):
12027 CASE_FLT_FN (BUILT_IN_EXPM1
):
12028 CASE_FLT_FN (BUILT_IN_FABS
):
12029 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
12030 CASE_FLT_FN (BUILT_IN_FDIM
):
12031 CASE_FLT_FN (BUILT_IN_FLOOR
):
12032 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR
):
12033 CASE_FLT_FN (BUILT_IN_FMA
):
12034 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
12035 CASE_FLT_FN (BUILT_IN_FMAX
):
12036 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX
):
12037 CASE_FLT_FN (BUILT_IN_FMIN
):
12038 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN
):
12039 CASE_FLT_FN (BUILT_IN_FMOD
):
12040 CASE_FLT_FN (BUILT_IN_FREXP
):
12041 CASE_FLT_FN (BUILT_IN_HYPOT
):
12042 CASE_FLT_FN (BUILT_IN_ILOGB
):
12043 CASE_FLT_FN (BUILT_IN_LDEXP
):
12044 CASE_FLT_FN (BUILT_IN_LGAMMA
):
12045 CASE_FLT_FN (BUILT_IN_LLRINT
):
12046 CASE_FLT_FN (BUILT_IN_LLROUND
):
12047 CASE_FLT_FN (BUILT_IN_LOG
):
12048 CASE_FLT_FN (BUILT_IN_LOG10
):
12049 CASE_FLT_FN (BUILT_IN_LOG1P
):
12050 CASE_FLT_FN (BUILT_IN_LOG2
):
12051 CASE_FLT_FN (BUILT_IN_LOGB
):
12052 CASE_FLT_FN (BUILT_IN_LRINT
):
12053 CASE_FLT_FN (BUILT_IN_LROUND
):
12054 CASE_FLT_FN (BUILT_IN_MODF
):
12055 CASE_FLT_FN (BUILT_IN_NAN
):
12056 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
12057 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT
):
12058 CASE_FLT_FN (BUILT_IN_NEXTAFTER
):
12059 CASE_FLT_FN (BUILT_IN_NEXTTOWARD
):
12060 CASE_FLT_FN (BUILT_IN_POW
):
12061 CASE_FLT_FN (BUILT_IN_REMAINDER
):
12062 CASE_FLT_FN (BUILT_IN_REMQUO
):
12063 CASE_FLT_FN (BUILT_IN_RINT
):
12064 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT
):
12065 CASE_FLT_FN (BUILT_IN_ROUND
):
12066 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND
):
12067 CASE_FLT_FN (BUILT_IN_SCALBLN
):
12068 CASE_FLT_FN (BUILT_IN_SCALBN
):
12069 CASE_FLT_FN (BUILT_IN_SIN
):
12070 CASE_FLT_FN (BUILT_IN_SINH
):
12071 CASE_FLT_FN (BUILT_IN_SINCOS
):
12072 CASE_FLT_FN (BUILT_IN_SQRT
):
12073 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT
):
12074 CASE_FLT_FN (BUILT_IN_TAN
):
12075 CASE_FLT_FN (BUILT_IN_TANH
):
12076 CASE_FLT_FN (BUILT_IN_TGAMMA
):
12077 CASE_FLT_FN (BUILT_IN_TRUNC
):
12078 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC
):