1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
31 #include "tree-gimple.h"
34 #include "hard-reg-set.h"
37 #include "insn-config.h"
43 #include "typeclass.h"
48 #include "langhooks.h"
49 #include "basic-block.h"
50 #include "tree-mudflap.h"
51 #include "tree-flow.h"
52 #include "value-prof.h"
53 #include "diagnostic.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
59 /* Define the names of the builtin function types and codes. */
60 const char *const built_in_class_names
[4]
61 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
63 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
64 const char * built_in_names
[(int) END_BUILTINS
] =
66 #include "builtins.def"
70 /* Setup an array of _DECL trees, make sure each element is
71 initialized to NULL_TREE. */
72 tree built_in_decls
[(int) END_BUILTINS
];
73 /* Declarations used when constructing the builtin implicitly in the compiler.
74 It may be NULL_TREE when this is invalid (for instance runtime is not
75 required to implement the function call in all cases). */
76 tree implicit_built_in_decls
[(int) END_BUILTINS
];
78 static const char *c_getstr (tree
);
79 static rtx
c_readstr (const char *, enum machine_mode
);
80 static int target_char_cast (tree
, char *);
81 static rtx
get_memory_rtx (tree
, tree
);
82 static int apply_args_size (void);
83 static int apply_result_size (void);
84 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
85 static rtx
result_vector (int, rtx
);
87 static void expand_builtin_update_setjmp_buf (rtx
);
88 static void expand_builtin_prefetch (tree
);
89 static rtx
expand_builtin_apply_args (void);
90 static rtx
expand_builtin_apply_args_1 (void);
91 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
92 static void expand_builtin_return (rtx
);
93 static enum type_class
type_to_class (tree
);
94 static rtx
expand_builtin_classify_type (tree
);
95 static void expand_errno_check (tree
, rtx
);
96 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
97 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
98 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
99 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
, rtx
);
100 static rtx
expand_builtin_sincos (tree
);
101 static rtx
expand_builtin_cexpi (tree
, rtx
, rtx
);
102 static rtx
expand_builtin_int_roundingfn (tree
, rtx
, rtx
);
103 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
, rtx
);
104 static rtx
expand_builtin_args_info (tree
);
105 static rtx
expand_builtin_next_arg (void);
106 static rtx
expand_builtin_va_start (tree
);
107 static rtx
expand_builtin_va_end (tree
);
108 static rtx
expand_builtin_va_copy (tree
);
109 static rtx
expand_builtin_memchr (tree
, rtx
, enum machine_mode
);
110 static rtx
expand_builtin_memcmp (tree
, rtx
, enum machine_mode
);
111 static rtx
expand_builtin_strcmp (tree
, rtx
, enum machine_mode
);
112 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
113 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
114 static rtx
expand_builtin_strcat (tree
, tree
, rtx
, enum machine_mode
);
115 static rtx
expand_builtin_strncat (tree
, rtx
, enum machine_mode
);
116 static rtx
expand_builtin_strspn (tree
, rtx
, enum machine_mode
);
117 static rtx
expand_builtin_strcspn (tree
, rtx
, enum machine_mode
);
118 static rtx
expand_builtin_memcpy (tree
, rtx
, enum machine_mode
);
119 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
);
120 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, tree
, rtx
,
121 enum machine_mode
, int);
122 static rtx
expand_builtin_memmove (tree
, rtx
, enum machine_mode
, int);
123 static rtx
expand_builtin_memmove_args (tree
, tree
, tree
, tree
, rtx
,
124 enum machine_mode
, int);
125 static rtx
expand_builtin_bcopy (tree
, int);
126 static rtx
expand_builtin_strcpy (tree
, tree
, rtx
, enum machine_mode
);
127 static rtx
expand_builtin_strcpy_args (tree
, tree
, tree
, rtx
, enum machine_mode
);
128 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
129 static rtx
builtin_strncpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
130 static rtx
expand_builtin_strncpy (tree
, rtx
, enum machine_mode
);
131 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
132 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
133 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, enum machine_mode
, tree
);
134 static rtx
expand_builtin_bzero (tree
);
135 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
136 static rtx
expand_builtin_strstr (tree
, rtx
, enum machine_mode
);
137 static rtx
expand_builtin_strpbrk (tree
, rtx
, enum machine_mode
);
138 static rtx
expand_builtin_strchr (tree
, rtx
, enum machine_mode
);
139 static rtx
expand_builtin_strrchr (tree
, rtx
, enum machine_mode
);
140 static rtx
expand_builtin_alloca (tree
, rtx
);
141 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
142 static rtx
expand_builtin_frame_address (tree
, tree
);
143 static rtx
expand_builtin_fputs (tree
, rtx
, bool);
144 static rtx
expand_builtin_printf (tree
, rtx
, enum machine_mode
, bool);
145 static rtx
expand_builtin_fprintf (tree
, rtx
, enum machine_mode
, bool);
146 static rtx
expand_builtin_sprintf (tree
, rtx
, enum machine_mode
);
147 static tree
stabilize_va_list (tree
, int);
148 static rtx
expand_builtin_expect (tree
, rtx
);
149 static tree
fold_builtin_constant_p (tree
);
150 static tree
fold_builtin_expect (tree
);
151 static tree
fold_builtin_classify_type (tree
);
152 static tree
fold_builtin_strlen (tree
);
153 static tree
fold_builtin_inf (tree
, int);
154 static tree
fold_builtin_nan (tree
, tree
, int);
155 static tree
rewrite_call_expr (tree
, int, tree
, int, ...);
156 static bool validate_arg (tree
, enum tree_code code
);
157 static bool integer_valued_real_p (tree
);
158 static tree
fold_trunc_transparent_mathfn (tree
, tree
);
159 static bool readonly_data_expr (tree
);
160 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
161 static rtx
expand_builtin_signbit (tree
, rtx
);
162 static tree
fold_builtin_sqrt (tree
, tree
);
163 static tree
fold_builtin_cbrt (tree
, tree
);
164 static tree
fold_builtin_pow (tree
, tree
, tree
, tree
);
165 static tree
fold_builtin_powi (tree
, tree
, tree
, tree
);
166 static tree
fold_builtin_cos (tree
, tree
, tree
);
167 static tree
fold_builtin_cosh (tree
, tree
, tree
);
168 static tree
fold_builtin_tan (tree
, tree
);
169 static tree
fold_builtin_trunc (tree
, tree
);
170 static tree
fold_builtin_floor (tree
, tree
);
171 static tree
fold_builtin_ceil (tree
, tree
);
172 static tree
fold_builtin_round (tree
, tree
);
173 static tree
fold_builtin_int_roundingfn (tree
, tree
);
174 static tree
fold_builtin_bitop (tree
, tree
);
175 static tree
fold_builtin_memory_op (tree
, tree
, tree
, tree
, bool, int);
176 static tree
fold_builtin_strchr (tree
, tree
, tree
);
177 static tree
fold_builtin_memchr (tree
, tree
, tree
, tree
);
178 static tree
fold_builtin_memcmp (tree
, tree
, tree
);
179 static tree
fold_builtin_strcmp (tree
, tree
);
180 static tree
fold_builtin_strncmp (tree
, tree
, tree
);
181 static tree
fold_builtin_signbit (tree
, tree
);
182 static tree
fold_builtin_copysign (tree
, tree
, tree
, tree
);
183 static tree
fold_builtin_isascii (tree
);
184 static tree
fold_builtin_toascii (tree
);
185 static tree
fold_builtin_isdigit (tree
);
186 static tree
fold_builtin_fabs (tree
, tree
);
187 static tree
fold_builtin_abs (tree
, tree
);
188 static tree
fold_builtin_unordered_cmp (tree
, tree
, tree
, enum tree_code
,
190 static tree
fold_builtin_n (tree
, tree
*, int, bool);
191 static tree
fold_builtin_0 (tree
, bool);
192 static tree
fold_builtin_1 (tree
, tree
, bool);
193 static tree
fold_builtin_2 (tree
, tree
, tree
, bool);
194 static tree
fold_builtin_3 (tree
, tree
, tree
, tree
, bool);
195 static tree
fold_builtin_4 (tree
, tree
, tree
, tree
, tree
, bool);
196 static tree
fold_builtin_varargs (tree
, tree
, bool);
198 static tree
fold_builtin_strpbrk (tree
, tree
, tree
);
199 static tree
fold_builtin_strstr (tree
, tree
, tree
);
200 static tree
fold_builtin_strrchr (tree
, tree
, tree
);
201 static tree
fold_builtin_strcat (tree
, tree
);
202 static tree
fold_builtin_strncat (tree
, tree
, tree
);
203 static tree
fold_builtin_strspn (tree
, tree
);
204 static tree
fold_builtin_strcspn (tree
, tree
);
205 static tree
fold_builtin_sprintf (tree
, tree
, tree
, int);
207 static rtx
expand_builtin_object_size (tree
);
208 static rtx
expand_builtin_memory_chk (tree
, rtx
, enum machine_mode
,
209 enum built_in_function
);
210 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
211 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
212 static tree
fold_builtin_object_size (tree
, tree
);
213 static tree
fold_builtin_strcat_chk (tree
, tree
, tree
, tree
);
214 static tree
fold_builtin_strncat_chk (tree
, tree
, tree
, tree
, tree
);
215 static tree
fold_builtin_sprintf_chk (tree
, enum built_in_function
);
216 static tree
fold_builtin_printf (tree
, tree
, tree
, bool, enum built_in_function
);
217 static tree
fold_builtin_fprintf (tree
, tree
, tree
, tree
, bool,
218 enum built_in_function
);
219 static bool init_target_chars (void);
221 static unsigned HOST_WIDE_INT target_newline
;
222 static unsigned HOST_WIDE_INT target_percent
;
223 static unsigned HOST_WIDE_INT target_c
;
224 static unsigned HOST_WIDE_INT target_s
;
225 static char target_percent_c
[3];
226 static char target_percent_s
[3];
227 static char target_percent_s_newline
[4];
228 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
229 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
230 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
231 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
232 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
233 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
234 static tree
do_mpfr_sincos (tree
, tree
, tree
);
235 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
236 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
237 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
238 const REAL_VALUE_TYPE
*, bool);
239 static tree
do_mpfr_remquo (tree
, tree
, tree
);
240 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
243 /* This array records the insn_code of insns to imlement the signbit
245 enum insn_code signbit_optab
[NUM_MACHINE_MODES
];
248 /* Return true if NODE should be considered for inline expansion regardless
249 of the optimization level. This means whenever a function is invoked with
250 its "internal" name, which normally contains the prefix "__builtin". */
252 static bool called_as_built_in (tree node
)
254 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
255 if (strncmp (name
, "__builtin_", 10) == 0)
257 if (strncmp (name
, "__sync_", 7) == 0)
262 /* Return the alignment in bits of EXP, a pointer valued expression.
263 But don't return more than MAX_ALIGN no matter what.
264 The alignment returned is, by default, the alignment of the thing that
265 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
267 Otherwise, look at the expression to see if we can do better, i.e., if the
268 expression is actually pointing at an object whose alignment is tighter. */
271 get_pointer_alignment (tree exp
, unsigned int max_align
)
273 unsigned int align
, inner
;
275 /* We rely on TER to compute accurate alignment information. */
276 if (!(optimize
&& flag_tree_ter
))
279 if (!POINTER_TYPE_P (TREE_TYPE (exp
)))
282 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
283 align
= MIN (align
, max_align
);
287 switch (TREE_CODE (exp
))
291 case NON_LVALUE_EXPR
:
292 exp
= TREE_OPERAND (exp
, 0);
293 if (! POINTER_TYPE_P (TREE_TYPE (exp
)))
296 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
297 align
= MIN (inner
, max_align
);
300 case POINTER_PLUS_EXPR
:
301 /* If sum of pointer + int, restrict our maximum alignment to that
302 imposed by the integer. If not, we can't do any better than
304 if (! host_integerp (TREE_OPERAND (exp
, 1), 1))
307 while (((tree_low_cst (TREE_OPERAND (exp
, 1), 1))
308 & (max_align
/ BITS_PER_UNIT
- 1))
312 exp
= TREE_OPERAND (exp
, 0);
316 /* See what we are pointing at and look at its alignment. */
317 exp
= TREE_OPERAND (exp
, 0);
319 if (handled_component_p (exp
))
321 HOST_WIDE_INT bitsize
, bitpos
;
323 enum machine_mode mode
;
324 int unsignedp
, volatilep
;
326 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
327 &mode
, &unsignedp
, &volatilep
, true);
329 inner
= MIN (inner
, (unsigned) (bitpos
& -bitpos
));
330 if (offset
&& TREE_CODE (offset
) == PLUS_EXPR
331 && host_integerp (TREE_OPERAND (offset
, 1), 1))
333 /* Any overflow in calculating offset_bits won't change
336 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
340 inner
= MIN (inner
, (offset_bits
& -offset_bits
));
341 offset
= TREE_OPERAND (offset
, 0);
343 if (offset
&& TREE_CODE (offset
) == MULT_EXPR
344 && host_integerp (TREE_OPERAND (offset
, 1), 1))
346 /* Any overflow in calculating offset_factor won't change
348 unsigned offset_factor
349 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
353 inner
= MIN (inner
, (offset_factor
& -offset_factor
));
356 inner
= MIN (inner
, BITS_PER_UNIT
);
359 align
= MIN (inner
, DECL_ALIGN (exp
));
360 #ifdef CONSTANT_ALIGNMENT
361 else if (CONSTANT_CLASS_P (exp
))
362 align
= MIN (inner
, (unsigned)CONSTANT_ALIGNMENT (exp
, align
));
364 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
365 || TREE_CODE (exp
) == INDIRECT_REF
)
366 align
= MIN (TYPE_ALIGN (TREE_TYPE (exp
)), inner
);
368 align
= MIN (align
, inner
);
369 return MIN (align
, max_align
);
377 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
378 way, because it could contain a zero byte in the middle.
379 TREE_STRING_LENGTH is the size of the character array, not the string.
381 ONLY_VALUE should be nonzero if the result is not going to be emitted
382 into the instruction stream and zero if it is going to be expanded.
383 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
384 is returned, otherwise NULL, since
385 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
386 evaluate the side-effects.
388 The value returned is of type `ssizetype'.
390 Unfortunately, string_constant can't access the values of const char
391 arrays with initializers, so neither can we do so here. */
394 c_strlen (tree src
, int only_value
)
397 HOST_WIDE_INT offset
;
402 if (TREE_CODE (src
) == COND_EXPR
403 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
407 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
408 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
409 if (tree_int_cst_equal (len1
, len2
))
413 if (TREE_CODE (src
) == COMPOUND_EXPR
414 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
415 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
417 src
= string_constant (src
, &offset_node
);
421 max
= TREE_STRING_LENGTH (src
) - 1;
422 ptr
= TREE_STRING_POINTER (src
);
424 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
426 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
427 compute the offset to the following null if we don't know where to
428 start searching for it. */
431 for (i
= 0; i
< max
; i
++)
435 /* We don't know the starting offset, but we do know that the string
436 has no internal zero bytes. We can assume that the offset falls
437 within the bounds of the string; otherwise, the programmer deserves
438 what he gets. Subtract the offset from the length of the string,
439 and return that. This would perhaps not be valid if we were dealing
440 with named arrays in addition to literal string constants. */
442 return size_diffop (size_int (max
), offset_node
);
445 /* We have a known offset into the string. Start searching there for
446 a null character if we can represent it as a single HOST_WIDE_INT. */
447 if (offset_node
== 0)
449 else if (! host_integerp (offset_node
, 0))
452 offset
= tree_low_cst (offset_node
, 0);
454 /* If the offset is known to be out of bounds, warn, and call strlen at
456 if (offset
< 0 || offset
> max
)
458 warning (0, "offset outside bounds of constant string");
462 /* Use strlen to search for the first zero byte. Since any strings
463 constructed with build_string will have nulls appended, we win even
464 if we get handed something like (char[4])"abcd".
466 Since OFFSET is our starting index into the string, no further
467 calculation is needed. */
468 return ssize_int (strlen (ptr
+ offset
));
471 /* Return a char pointer for a C string if it is a string constant
472 or sum of string constant and integer constant. */
479 src
= string_constant (src
, &offset_node
);
483 if (offset_node
== 0)
484 return TREE_STRING_POINTER (src
);
485 else if (!host_integerp (offset_node
, 1)
486 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
489 return TREE_STRING_POINTER (src
) + tree_low_cst (offset_node
, 1);
492 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
493 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
496 c_readstr (const char *str
, enum machine_mode mode
)
502 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
507 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
510 if (WORDS_BIG_ENDIAN
)
511 j
= GET_MODE_SIZE (mode
) - i
- 1;
512 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
513 && GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
514 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
516 gcc_assert (j
<= 2 * HOST_BITS_PER_WIDE_INT
);
519 ch
= (unsigned char) str
[i
];
520 c
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
522 return immed_double_const (c
[0], c
[1], mode
);
525 /* Cast a target constant CST to target CHAR and if that value fits into
526 host char type, return zero and put that value into variable pointed to by
530 target_char_cast (tree cst
, char *p
)
532 unsigned HOST_WIDE_INT val
, hostval
;
534 if (!host_integerp (cst
, 1)
535 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
538 val
= tree_low_cst (cst
, 1);
539 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
540 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
543 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
544 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
553 /* Similar to save_expr, but assumes that arbitrary code is not executed
554 in between the multiple evaluations. In particular, we assume that a
555 non-addressable local variable will not be modified. */
558 builtin_save_expr (tree exp
)
560 if (TREE_ADDRESSABLE (exp
) == 0
561 && (TREE_CODE (exp
) == PARM_DECL
562 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
))))
565 return save_expr (exp
);
568 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
569 times to get the address of either a higher stack frame, or a return
570 address located within it (depending on FNDECL_CODE). */
573 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
577 #ifdef INITIAL_FRAME_ADDRESS_RTX
578 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
582 /* For a zero count with __builtin_return_address, we don't care what
583 frame address we return, because target-specific definitions will
584 override us. Therefore frame pointer elimination is OK, and using
585 the soft frame pointer is OK.
587 For a nonzero count, or a zero count with __builtin_frame_address,
588 we require a stable offset from the current frame pointer to the
589 previous one, so we must use the hard frame pointer, and
590 we must disable frame pointer elimination. */
591 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
592 tem
= frame_pointer_rtx
;
595 tem
= hard_frame_pointer_rtx
;
597 /* Tell reload not to eliminate the frame pointer. */
598 current_function_accesses_prior_frames
= 1;
602 /* Some machines need special handling before we can access
603 arbitrary frames. For example, on the SPARC, we must first flush
604 all register windows to the stack. */
605 #ifdef SETUP_FRAME_ADDRESSES
607 SETUP_FRAME_ADDRESSES ();
610 /* On the SPARC, the return address is not in the frame, it is in a
611 register. There is no way to access it off of the current frame
612 pointer, but it can be accessed off the previous frame pointer by
613 reading the value from the register window save area. */
614 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
615 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
619 /* Scan back COUNT frames to the specified frame. */
620 for (i
= 0; i
< count
; i
++)
622 /* Assume the dynamic chain pointer is in the word that the
623 frame address points to, unless otherwise specified. */
624 #ifdef DYNAMIC_CHAIN_ADDRESS
625 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
627 tem
= memory_address (Pmode
, tem
);
628 tem
= gen_frame_mem (Pmode
, tem
);
629 tem
= copy_to_reg (tem
);
632 /* For __builtin_frame_address, return what we've got. But, on
633 the SPARC for example, we may have to add a bias. */
634 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
635 #ifdef FRAME_ADDR_RTX
636 return FRAME_ADDR_RTX (tem
);
641 /* For __builtin_return_address, get the return address from that frame. */
642 #ifdef RETURN_ADDR_RTX
643 tem
= RETURN_ADDR_RTX (count
, tem
);
645 tem
= memory_address (Pmode
,
646 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
647 tem
= gen_frame_mem (Pmode
, tem
);
652 /* Alias set used for setjmp buffer. */
653 static HOST_WIDE_INT setjmp_alias_set
= -1;
655 /* Construct the leading half of a __builtin_setjmp call. Control will
656 return to RECEIVER_LABEL. This is also called directly by the SJLJ
657 exception handling code. */
660 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
662 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
666 if (setjmp_alias_set
== -1)
667 setjmp_alias_set
= new_alias_set ();
669 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
671 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
673 /* We store the frame pointer and the address of receiver_label in
674 the buffer and use the rest of it for the stack save area, which
675 is machine-dependent. */
677 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
678 set_mem_alias_set (mem
, setjmp_alias_set
);
679 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
681 mem
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
))),
682 set_mem_alias_set (mem
, setjmp_alias_set
);
684 emit_move_insn (validize_mem (mem
),
685 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
687 stack_save
= gen_rtx_MEM (sa_mode
,
688 plus_constant (buf_addr
,
689 2 * GET_MODE_SIZE (Pmode
)));
690 set_mem_alias_set (stack_save
, setjmp_alias_set
);
691 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
693 /* If there is further processing to do, do it. */
694 #ifdef HAVE_builtin_setjmp_setup
695 if (HAVE_builtin_setjmp_setup
)
696 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
699 /* Tell optimize_save_area_alloca that extra work is going to
700 need to go on during alloca. */
701 current_function_calls_setjmp
= 1;
703 /* We have a nonlocal label. */
704 current_function_has_nonlocal_label
= 1;
707 /* Construct the trailing part of a __builtin_setjmp call. This is
708 also called directly by the SJLJ exception handling code. */
711 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
713 /* Clobber the FP when we get here, so we have to make sure it's
714 marked as used by this function. */
715 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
717 /* Mark the static chain as clobbered here so life information
718 doesn't get messed up for it. */
719 emit_insn (gen_rtx_CLOBBER (VOIDmode
, static_chain_rtx
));
721 /* Now put in the code to restore the frame pointer, and argument
722 pointer, if needed. */
723 #ifdef HAVE_nonlocal_goto
724 if (! HAVE_nonlocal_goto
)
727 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
728 /* This might change the hard frame pointer in ways that aren't
729 apparent to early optimization passes, so force a clobber. */
730 emit_insn (gen_rtx_CLOBBER (VOIDmode
, hard_frame_pointer_rtx
));
733 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
734 if (fixed_regs
[ARG_POINTER_REGNUM
])
736 #ifdef ELIMINABLE_REGS
738 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
740 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
741 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
742 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
745 if (i
== ARRAY_SIZE (elim_regs
))
748 /* Now restore our arg pointer from the address at which it
749 was saved in our stack frame. */
750 emit_move_insn (virtual_incoming_args_rtx
,
751 copy_to_reg (get_arg_pointer_save_area (cfun
)));
756 #ifdef HAVE_builtin_setjmp_receiver
757 if (HAVE_builtin_setjmp_receiver
)
758 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
761 #ifdef HAVE_nonlocal_goto_receiver
762 if (HAVE_nonlocal_goto_receiver
)
763 emit_insn (gen_nonlocal_goto_receiver ());
768 /* We must not allow the code we just generated to be reordered by
769 scheduling. Specifically, the update of the frame pointer must
770 happen immediately, not later. */
771 emit_insn (gen_blockage ());
774 /* __builtin_longjmp is passed a pointer to an array of five words (not
775 all will be used on all machines). It operates similarly to the C
776 library function of the same name, but is more efficient. Much of
777 the code below is copied from the handling of non-local gotos. */
780 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
782 rtx fp
, lab
, stack
, insn
, last
;
783 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
785 if (setjmp_alias_set
== -1)
786 setjmp_alias_set
= new_alias_set ();
788 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
790 buf_addr
= force_reg (Pmode
, buf_addr
);
792 /* We used to store value in static_chain_rtx, but that fails if pointers
793 are smaller than integers. We instead require that the user must pass
794 a second argument of 1, because that is what builtin_setjmp will
795 return. This also makes EH slightly more efficient, since we are no
796 longer copying around a value that we don't care about. */
797 gcc_assert (value
== const1_rtx
);
799 last
= get_last_insn ();
800 #ifdef HAVE_builtin_longjmp
801 if (HAVE_builtin_longjmp
)
802 emit_insn (gen_builtin_longjmp (buf_addr
));
806 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
807 lab
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
,
808 GET_MODE_SIZE (Pmode
)));
810 stack
= gen_rtx_MEM (sa_mode
, plus_constant (buf_addr
,
811 2 * GET_MODE_SIZE (Pmode
)));
812 set_mem_alias_set (fp
, setjmp_alias_set
);
813 set_mem_alias_set (lab
, setjmp_alias_set
);
814 set_mem_alias_set (stack
, setjmp_alias_set
);
816 /* Pick up FP, label, and SP from the block and jump. This code is
817 from expand_goto in stmt.c; see there for detailed comments. */
818 #ifdef HAVE_nonlocal_goto
819 if (HAVE_nonlocal_goto
)
820 /* We have to pass a value to the nonlocal_goto pattern that will
821 get copied into the static_chain pointer, but it does not matter
822 what that value is, because builtin_setjmp does not use it. */
823 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
827 lab
= copy_to_reg (lab
);
829 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
830 gen_rtx_MEM (BLKmode
,
831 gen_rtx_SCRATCH (VOIDmode
))));
832 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
833 gen_rtx_MEM (BLKmode
,
834 hard_frame_pointer_rtx
)));
836 emit_move_insn (hard_frame_pointer_rtx
, fp
);
837 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
839 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
840 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
841 emit_indirect_jump (lab
);
845 /* Search backwards and mark the jump insn as a non-local goto.
846 Note that this precludes the use of __builtin_longjmp to a
847 __builtin_setjmp target in the same function. However, we've
848 already cautioned the user that these functions are for
849 internal exception handling use only. */
850 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
852 gcc_assert (insn
!= last
);
856 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO
, const0_rtx
,
860 else if (CALL_P (insn
))
865 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
866 and the address of the save area. */
869 expand_builtin_nonlocal_goto (tree exp
)
871 tree t_label
, t_save_area
;
872 rtx r_label
, r_save_area
, r_fp
, r_sp
, insn
;
874 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
877 t_label
= CALL_EXPR_ARG (exp
, 0);
878 t_save_area
= CALL_EXPR_ARG (exp
, 1);
880 r_label
= expand_normal (t_label
);
881 r_label
= convert_memory_address (Pmode
, r_label
);
882 r_save_area
= expand_normal (t_save_area
);
883 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
884 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
885 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
886 plus_constant (r_save_area
, GET_MODE_SIZE (Pmode
)));
888 current_function_has_nonlocal_goto
= 1;
890 #ifdef HAVE_nonlocal_goto
891 /* ??? We no longer need to pass the static chain value, afaik. */
892 if (HAVE_nonlocal_goto
)
893 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
897 r_label
= copy_to_reg (r_label
);
899 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
900 gen_rtx_MEM (BLKmode
,
901 gen_rtx_SCRATCH (VOIDmode
))));
903 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
904 gen_rtx_MEM (BLKmode
,
905 hard_frame_pointer_rtx
)));
907 /* Restore frame pointer for containing function.
908 This sets the actual hard register used for the frame pointer
909 to the location of the function's incoming static chain info.
910 The non-local goto handler will then adjust it to contain the
911 proper value and reload the argument pointer, if needed. */
912 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
913 emit_stack_restore (SAVE_NONLOCAL
, r_sp
, NULL_RTX
);
915 /* USE of hard_frame_pointer_rtx added for consistency;
916 not clear if really needed. */
917 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
918 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
919 emit_indirect_jump (r_label
);
922 /* Search backwards to the jump insn and mark it as a
924 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
928 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO
,
929 const0_rtx
, REG_NOTES (insn
));
932 else if (CALL_P (insn
))
939 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
940 (not all will be used on all machines) that was passed to __builtin_setjmp.
941 It updates the stack pointer in that block to correspond to the current
945 expand_builtin_update_setjmp_buf (rtx buf_addr
)
947 enum machine_mode sa_mode
= Pmode
;
951 #ifdef HAVE_save_stack_nonlocal
952 if (HAVE_save_stack_nonlocal
)
953 sa_mode
= insn_data
[(int) CODE_FOR_save_stack_nonlocal
].operand
[0].mode
;
955 #ifdef STACK_SAVEAREA_MODE
956 sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
960 = gen_rtx_MEM (sa_mode
,
963 plus_constant (buf_addr
, 2 * GET_MODE_SIZE (Pmode
))));
967 emit_insn (gen_setjmp ());
970 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
973 /* Expand a call to __builtin_prefetch. For a target that does not support
974 data prefetch, evaluate the memory address argument in case it has side
978 expand_builtin_prefetch (tree exp
)
980 tree arg0
, arg1
, arg2
;
984 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
987 arg0
= CALL_EXPR_ARG (exp
, 0);
989 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
990 zero (read) and argument 2 (locality) defaults to 3 (high degree of
992 nargs
= call_expr_nargs (exp
);
994 arg1
= CALL_EXPR_ARG (exp
, 1);
996 arg1
= integer_zero_node
;
998 arg2
= CALL_EXPR_ARG (exp
, 2);
1000 arg2
= build_int_cst (NULL_TREE
, 3);
1002 /* Argument 0 is an address. */
1003 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1005 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1006 if (TREE_CODE (arg1
) != INTEGER_CST
)
1008 error ("second argument to %<__builtin_prefetch%> must be a constant");
1009 arg1
= integer_zero_node
;
1011 op1
= expand_normal (arg1
);
1012 /* Argument 1 must be either zero or one. */
1013 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1015 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1020 /* Argument 2 (locality) must be a compile-time constant int. */
1021 if (TREE_CODE (arg2
) != INTEGER_CST
)
1023 error ("third argument to %<__builtin_prefetch%> must be a constant");
1024 arg2
= integer_zero_node
;
1026 op2
= expand_normal (arg2
);
1027 /* Argument 2 must be 0, 1, 2, or 3. */
1028 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1030 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1034 #ifdef HAVE_prefetch
1037 if ((! (*insn_data
[(int) CODE_FOR_prefetch
].operand
[0].predicate
)
1039 insn_data
[(int) CODE_FOR_prefetch
].operand
[0].mode
))
1040 || (GET_MODE (op0
) != Pmode
))
1042 op0
= convert_memory_address (Pmode
, op0
);
1043 op0
= force_reg (Pmode
, op0
);
1045 emit_insn (gen_prefetch (op0
, op1
, op2
));
1049 /* Don't do anything with direct references to volatile memory, but
1050 generate code to handle other side effects. */
1051 if (!MEM_P (op0
) && side_effects_p (op0
))
1055 /* Get a MEM rtx for expression EXP which is the address of an operand
1056 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1057 the maximum length of the block of memory that might be accessed or
1061 get_memory_rtx (tree exp
, tree len
)
1063 rtx addr
= expand_expr (exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1064 rtx mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1066 /* Get an expression we can use to find the attributes to assign to MEM.
1067 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1068 we can. First remove any nops. */
1069 while ((TREE_CODE (exp
) == NOP_EXPR
|| TREE_CODE (exp
) == CONVERT_EXPR
1070 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
1071 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1072 exp
= TREE_OPERAND (exp
, 0);
1074 if (TREE_CODE (exp
) == ADDR_EXPR
)
1075 exp
= TREE_OPERAND (exp
, 0);
1076 else if (POINTER_TYPE_P (TREE_TYPE (exp
)))
1077 exp
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (exp
)), exp
);
1081 /* Honor attributes derived from exp, except for the alias set
1082 (as builtin stringops may alias with anything) and the size
1083 (as stringops may access multiple array elements). */
1086 set_mem_attributes (mem
, exp
, 0);
1088 /* Allow the string and memory builtins to overflow from one
1089 field into another, see http://gcc.gnu.org/PR23561.
1090 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1091 memory accessed by the string or memory builtin will fit
1092 within the field. */
1093 if (MEM_EXPR (mem
) && TREE_CODE (MEM_EXPR (mem
)) == COMPONENT_REF
)
1095 tree mem_expr
= MEM_EXPR (mem
);
1096 HOST_WIDE_INT offset
= -1, length
= -1;
1099 while (TREE_CODE (inner
) == ARRAY_REF
1100 || TREE_CODE (inner
) == NOP_EXPR
1101 || TREE_CODE (inner
) == CONVERT_EXPR
1102 || TREE_CODE (inner
) == NON_LVALUE_EXPR
1103 || TREE_CODE (inner
) == VIEW_CONVERT_EXPR
1104 || TREE_CODE (inner
) == SAVE_EXPR
)
1105 inner
= TREE_OPERAND (inner
, 0);
1107 gcc_assert (TREE_CODE (inner
) == COMPONENT_REF
);
1109 if (MEM_OFFSET (mem
)
1110 && GET_CODE (MEM_OFFSET (mem
)) == CONST_INT
)
1111 offset
= INTVAL (MEM_OFFSET (mem
));
1113 if (offset
>= 0 && len
&& host_integerp (len
, 0))
1114 length
= tree_low_cst (len
, 0);
1116 while (TREE_CODE (inner
) == COMPONENT_REF
)
1118 tree field
= TREE_OPERAND (inner
, 1);
1119 gcc_assert (! DECL_BIT_FIELD (field
));
1120 gcc_assert (TREE_CODE (mem_expr
) == COMPONENT_REF
);
1121 gcc_assert (field
== TREE_OPERAND (mem_expr
, 1));
1124 && TYPE_SIZE_UNIT (TREE_TYPE (inner
))
1125 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner
)), 0))
1128 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner
)), 0);
1129 /* If we can prove the memory starting at XEXP (mem, 0)
1130 and ending at XEXP (mem, 0) + LENGTH will fit into
1131 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1134 && offset
+ length
<= size
)
1139 && host_integerp (DECL_FIELD_OFFSET (field
), 0))
1140 offset
+= tree_low_cst (DECL_FIELD_OFFSET (field
), 0)
1141 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
1149 mem_expr
= TREE_OPERAND (mem_expr
, 0);
1150 inner
= TREE_OPERAND (inner
, 0);
1153 if (mem_expr
== NULL
)
1155 if (mem_expr
!= MEM_EXPR (mem
))
1157 set_mem_expr (mem
, mem_expr
);
1158 set_mem_offset (mem
, offset
>= 0 ? GEN_INT (offset
) : NULL_RTX
);
1161 set_mem_alias_set (mem
, 0);
1162 set_mem_size (mem
, NULL_RTX
);
1168 /* Built-in functions to perform an untyped call and return. */
1170 /* For each register that may be used for calling a function, this
1171 gives a mode used to copy the register's value. VOIDmode indicates
1172 the register is not used for calling a function. If the machine
1173 has register windows, this gives only the outbound registers.
1174 INCOMING_REGNO gives the corresponding inbound register. */
1175 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
1177 /* For each register that may be used for returning values, this gives
1178 a mode used to copy the register's value. VOIDmode indicates the
1179 register is not used for returning values. If the machine has
1180 register windows, this gives only the outbound registers.
1181 INCOMING_REGNO gives the corresponding inbound register. */
1182 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
1184 /* For each register that may be used for calling a function, this
1185 gives the offset of that register into the block returned by
1186 __builtin_apply_args. 0 indicates that the register is not
1187 used for calling a function. */
1188 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
1190 /* Return the size required for the block returned by __builtin_apply_args,
1191 and initialize apply_args_mode. */
1194 apply_args_size (void)
1196 static int size
= -1;
1199 enum machine_mode mode
;
1201 /* The values computed by this function never change. */
1204 /* The first value is the incoming arg-pointer. */
1205 size
= GET_MODE_SIZE (Pmode
);
1207 /* The second value is the structure value address unless this is
1208 passed as an "invisible" first argument. */
1209 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1210 size
+= GET_MODE_SIZE (Pmode
);
1212 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1213 if (FUNCTION_ARG_REGNO_P (regno
))
1215 mode
= reg_raw_mode
[regno
];
1217 gcc_assert (mode
!= VOIDmode
);
1219 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1220 if (size
% align
!= 0)
1221 size
= CEIL (size
, align
) * align
;
1222 apply_args_reg_offset
[regno
] = size
;
1223 size
+= GET_MODE_SIZE (mode
);
1224 apply_args_mode
[regno
] = mode
;
1228 apply_args_mode
[regno
] = VOIDmode
;
1229 apply_args_reg_offset
[regno
] = 0;
1235 /* Return the size required for the block returned by __builtin_apply,
1236 and initialize apply_result_mode. */
1239 apply_result_size (void)
1241 static int size
= -1;
1243 enum machine_mode mode
;
1245 /* The values computed by this function never change. */
1250 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1251 if (FUNCTION_VALUE_REGNO_P (regno
))
1253 mode
= reg_raw_mode
[regno
];
1255 gcc_assert (mode
!= VOIDmode
);
1257 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1258 if (size
% align
!= 0)
1259 size
= CEIL (size
, align
) * align
;
1260 size
+= GET_MODE_SIZE (mode
);
1261 apply_result_mode
[regno
] = mode
;
1264 apply_result_mode
[regno
] = VOIDmode
;
1266 /* Allow targets that use untyped_call and untyped_return to override
1267 the size so that machine-specific information can be stored here. */
1268 #ifdef APPLY_RESULT_SIZE
1269 size
= APPLY_RESULT_SIZE
;
1275 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1276 /* Create a vector describing the result block RESULT. If SAVEP is true,
1277 the result block is used to save the values; otherwise it is used to
1278 restore the values. */
1281 result_vector (int savep
, rtx result
)
1283 int regno
, size
, align
, nelts
;
1284 enum machine_mode mode
;
1286 rtx
*savevec
= alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
1289 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1290 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1292 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1293 if (size
% align
!= 0)
1294 size
= CEIL (size
, align
) * align
;
1295 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1296 mem
= adjust_address (result
, mode
, size
);
1297 savevec
[nelts
++] = (savep
1298 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1299 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1300 size
+= GET_MODE_SIZE (mode
);
1302 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1304 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1306 /* Save the state required to perform an untyped call with the same
1307 arguments as were passed to the current function. */
1310 expand_builtin_apply_args_1 (void)
1313 int size
, align
, regno
;
1314 enum machine_mode mode
;
1315 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1317 /* Create a block where the arg-pointer, structure value address,
1318 and argument registers can be saved. */
1319 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1321 /* Walk past the arg-pointer and structure value address. */
1322 size
= GET_MODE_SIZE (Pmode
);
1323 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1324 size
+= GET_MODE_SIZE (Pmode
);
1326 /* Save each register used in calling a function to the block. */
1327 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1328 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1330 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1331 if (size
% align
!= 0)
1332 size
= CEIL (size
, align
) * align
;
1334 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1336 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1337 size
+= GET_MODE_SIZE (mode
);
1340 /* Save the arg pointer to the block. */
1341 tem
= copy_to_reg (virtual_incoming_args_rtx
);
1342 #ifdef STACK_GROWS_DOWNWARD
1343 /* We need the pointer as the caller actually passed them to us, not
1344 as we might have pretended they were passed. Make sure it's a valid
1345 operand, as emit_move_insn isn't expected to handle a PLUS. */
1347 = force_operand (plus_constant (tem
, current_function_pretend_args_size
),
1350 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1352 size
= GET_MODE_SIZE (Pmode
);
1354 /* Save the structure value address unless this is passed as an
1355 "invisible" first argument. */
1356 if (struct_incoming_value
)
1358 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1359 copy_to_reg (struct_incoming_value
));
1360 size
+= GET_MODE_SIZE (Pmode
);
1363 /* Return the address of the block. */
1364 return copy_addr_to_reg (XEXP (registers
, 0));
1367 /* __builtin_apply_args returns block of memory allocated on
1368 the stack into which is stored the arg pointer, structure
1369 value address, static chain, and all the registers that might
1370 possibly be used in performing a function call. The code is
1371 moved to the start of the function so the incoming values are
1375 expand_builtin_apply_args (void)
1377 /* Don't do __builtin_apply_args more than once in a function.
1378 Save the result of the first call and reuse it. */
1379 if (apply_args_value
!= 0)
1380 return apply_args_value
;
1382 /* When this function is called, it means that registers must be
1383 saved on entry to this function. So we migrate the
1384 call to the first insn of this function. */
1389 temp
= expand_builtin_apply_args_1 ();
1393 apply_args_value
= temp
;
1395 /* Put the insns after the NOTE that starts the function.
1396 If this is inside a start_sequence, make the outer-level insn
1397 chain current, so the code is placed at the start of the
1399 push_topmost_sequence ();
1400 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1401 pop_topmost_sequence ();
1406 /* Perform an untyped call and save the state required to perform an
1407 untyped return of whatever value was returned by the given function. */
1410 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1412 int size
, align
, regno
;
1413 enum machine_mode mode
;
1414 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1415 rtx old_stack_level
= 0;
1416 rtx call_fusage
= 0;
1417 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1419 arguments
= convert_memory_address (Pmode
, arguments
);
1421 /* Create a block where the return registers can be saved. */
1422 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1424 /* Fetch the arg pointer from the ARGUMENTS block. */
1425 incoming_args
= gen_reg_rtx (Pmode
);
1426 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1427 #ifndef STACK_GROWS_DOWNWARD
1428 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1429 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1432 /* Push a new argument block and copy the arguments. Do not allow
1433 the (potential) memcpy call below to interfere with our stack
1435 do_pending_stack_adjust ();
1438 /* Save the stack with nonlocal if available. */
1439 #ifdef HAVE_save_stack_nonlocal
1440 if (HAVE_save_stack_nonlocal
)
1441 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
, NULL_RTX
);
1444 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
1446 /* Allocate a block of memory onto the stack and copy the memory
1447 arguments to the outgoing arguments address. */
1448 allocate_dynamic_stack_space (argsize
, 0, BITS_PER_UNIT
);
1449 dest
= virtual_outgoing_args_rtx
;
1450 #ifndef STACK_GROWS_DOWNWARD
1451 if (GET_CODE (argsize
) == CONST_INT
)
1452 dest
= plus_constant (dest
, -INTVAL (argsize
));
1454 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1456 dest
= gen_rtx_MEM (BLKmode
, dest
);
1457 set_mem_align (dest
, PARM_BOUNDARY
);
1458 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1459 set_mem_align (src
, PARM_BOUNDARY
);
1460 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1462 /* Refer to the argument block. */
1464 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1465 set_mem_align (arguments
, PARM_BOUNDARY
);
1467 /* Walk past the arg-pointer and structure value address. */
1468 size
= GET_MODE_SIZE (Pmode
);
1470 size
+= GET_MODE_SIZE (Pmode
);
1472 /* Restore each of the registers previously saved. Make USE insns
1473 for each of these registers for use in making the call. */
1474 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1475 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1477 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1478 if (size
% align
!= 0)
1479 size
= CEIL (size
, align
) * align
;
1480 reg
= gen_rtx_REG (mode
, regno
);
1481 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1482 use_reg (&call_fusage
, reg
);
1483 size
+= GET_MODE_SIZE (mode
);
1486 /* Restore the structure value address unless this is passed as an
1487 "invisible" first argument. */
1488 size
= GET_MODE_SIZE (Pmode
);
1491 rtx value
= gen_reg_rtx (Pmode
);
1492 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1493 emit_move_insn (struct_value
, value
);
1494 if (REG_P (struct_value
))
1495 use_reg (&call_fusage
, struct_value
);
1496 size
+= GET_MODE_SIZE (Pmode
);
1499 /* All arguments and registers used for the call are set up by now! */
1500 function
= prepare_call_address (function
, NULL
, &call_fusage
, 0, 0);
1502 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1503 and we don't want to load it into a register as an optimization,
1504 because prepare_call_address already did it if it should be done. */
1505 if (GET_CODE (function
) != SYMBOL_REF
)
1506 function
= memory_address (FUNCTION_MODE
, function
);
1508 /* Generate the actual call instruction and save the return value. */
1509 #ifdef HAVE_untyped_call
1510 if (HAVE_untyped_call
)
1511 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1512 result
, result_vector (1, result
)));
1515 #ifdef HAVE_call_value
1516 if (HAVE_call_value
)
1520 /* Locate the unique return register. It is not possible to
1521 express a call that sets more than one return register using
1522 call_value; use untyped_call for that. In fact, untyped_call
1523 only needs to save the return registers in the given block. */
1524 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1525 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1527 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1529 valreg
= gen_rtx_REG (mode
, regno
);
1532 emit_call_insn (GEN_CALL_VALUE (valreg
,
1533 gen_rtx_MEM (FUNCTION_MODE
, function
),
1534 const0_rtx
, NULL_RTX
, const0_rtx
));
1536 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1542 /* Find the CALL insn we just emitted, and attach the register usage
1544 call_insn
= last_call_insn ();
1545 add_function_usage_to (call_insn
, call_fusage
);
1547 /* Restore the stack. */
1548 #ifdef HAVE_save_stack_nonlocal
1549 if (HAVE_save_stack_nonlocal
)
1550 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
, NULL_RTX
);
1553 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1557 /* Return the address of the result block. */
1558 result
= copy_addr_to_reg (XEXP (result
, 0));
1559 return convert_memory_address (ptr_mode
, result
);
1562 /* Perform an untyped return. */
1565 expand_builtin_return (rtx result
)
1567 int size
, align
, regno
;
1568 enum machine_mode mode
;
1570 rtx call_fusage
= 0;
1572 result
= convert_memory_address (Pmode
, result
);
1574 apply_result_size ();
1575 result
= gen_rtx_MEM (BLKmode
, result
);
1577 #ifdef HAVE_untyped_return
1578 if (HAVE_untyped_return
)
1580 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1586 /* Restore the return value and note that each value is used. */
1588 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1589 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1591 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1592 if (size
% align
!= 0)
1593 size
= CEIL (size
, align
) * align
;
1594 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1595 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1597 push_to_sequence (call_fusage
);
1598 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
1599 call_fusage
= get_insns ();
1601 size
+= GET_MODE_SIZE (mode
);
1604 /* Put the USE insns before the return. */
1605 emit_insn (call_fusage
);
1607 /* Return whatever values was restored by jumping directly to the end
1609 expand_naked_return ();
1612 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1614 static enum type_class
1615 type_to_class (tree type
)
1617 switch (TREE_CODE (type
))
1619 case VOID_TYPE
: return void_type_class
;
1620 case INTEGER_TYPE
: return integer_type_class
;
1621 case ENUMERAL_TYPE
: return enumeral_type_class
;
1622 case BOOLEAN_TYPE
: return boolean_type_class
;
1623 case POINTER_TYPE
: return pointer_type_class
;
1624 case REFERENCE_TYPE
: return reference_type_class
;
1625 case OFFSET_TYPE
: return offset_type_class
;
1626 case REAL_TYPE
: return real_type_class
;
1627 case COMPLEX_TYPE
: return complex_type_class
;
1628 case FUNCTION_TYPE
: return function_type_class
;
1629 case METHOD_TYPE
: return method_type_class
;
1630 case RECORD_TYPE
: return record_type_class
;
1632 case QUAL_UNION_TYPE
: return union_type_class
;
1633 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1634 ? string_type_class
: array_type_class
);
1635 case LANG_TYPE
: return lang_type_class
;
1636 default: return no_type_class
;
1640 /* Expand a call EXP to __builtin_classify_type. */
1643 expand_builtin_classify_type (tree exp
)
1645 if (call_expr_nargs (exp
))
1646 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1647 return GEN_INT (no_type_class
);
1650 /* This helper macro, meant to be used in mathfn_built_in below,
1651 determines which among a set of three builtin math functions is
1652 appropriate for a given type mode. The `F' and `L' cases are
1653 automatically generated from the `double' case. */
1654 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1655 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1656 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1657 fcodel = BUILT_IN_MATHFN##L ; break;
1658 /* Similar to above, but appends _R after any F/L suffix. */
1659 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1660 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1661 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1662 fcodel = BUILT_IN_MATHFN##L_R ; break;
1664 /* Return mathematic function equivalent to FN but operating directly
1665 on TYPE, if available. If we can't do the conversion, return zero. */
1667 mathfn_built_in (tree type
, enum built_in_function fn
)
1669 enum built_in_function fcode
, fcodef
, fcodel
;
1673 CASE_MATHFN (BUILT_IN_ACOS
)
1674 CASE_MATHFN (BUILT_IN_ACOSH
)
1675 CASE_MATHFN (BUILT_IN_ASIN
)
1676 CASE_MATHFN (BUILT_IN_ASINH
)
1677 CASE_MATHFN (BUILT_IN_ATAN
)
1678 CASE_MATHFN (BUILT_IN_ATAN2
)
1679 CASE_MATHFN (BUILT_IN_ATANH
)
1680 CASE_MATHFN (BUILT_IN_CBRT
)
1681 CASE_MATHFN (BUILT_IN_CEIL
)
1682 CASE_MATHFN (BUILT_IN_CEXPI
)
1683 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1684 CASE_MATHFN (BUILT_IN_COS
)
1685 CASE_MATHFN (BUILT_IN_COSH
)
1686 CASE_MATHFN (BUILT_IN_DREM
)
1687 CASE_MATHFN (BUILT_IN_ERF
)
1688 CASE_MATHFN (BUILT_IN_ERFC
)
1689 CASE_MATHFN (BUILT_IN_EXP
)
1690 CASE_MATHFN (BUILT_IN_EXP10
)
1691 CASE_MATHFN (BUILT_IN_EXP2
)
1692 CASE_MATHFN (BUILT_IN_EXPM1
)
1693 CASE_MATHFN (BUILT_IN_FABS
)
1694 CASE_MATHFN (BUILT_IN_FDIM
)
1695 CASE_MATHFN (BUILT_IN_FLOOR
)
1696 CASE_MATHFN (BUILT_IN_FMA
)
1697 CASE_MATHFN (BUILT_IN_FMAX
)
1698 CASE_MATHFN (BUILT_IN_FMIN
)
1699 CASE_MATHFN (BUILT_IN_FMOD
)
1700 CASE_MATHFN (BUILT_IN_FREXP
)
1701 CASE_MATHFN (BUILT_IN_GAMMA
)
1702 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1703 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1704 CASE_MATHFN (BUILT_IN_HYPOT
)
1705 CASE_MATHFN (BUILT_IN_ILOGB
)
1706 CASE_MATHFN (BUILT_IN_INF
)
1707 CASE_MATHFN (BUILT_IN_ISINF
)
1708 CASE_MATHFN (BUILT_IN_J0
)
1709 CASE_MATHFN (BUILT_IN_J1
)
1710 CASE_MATHFN (BUILT_IN_JN
)
1711 CASE_MATHFN (BUILT_IN_LCEIL
)
1712 CASE_MATHFN (BUILT_IN_LDEXP
)
1713 CASE_MATHFN (BUILT_IN_LFLOOR
)
1714 CASE_MATHFN (BUILT_IN_LGAMMA
)
1715 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1716 CASE_MATHFN (BUILT_IN_LLCEIL
)
1717 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1718 CASE_MATHFN (BUILT_IN_LLRINT
)
1719 CASE_MATHFN (BUILT_IN_LLROUND
)
1720 CASE_MATHFN (BUILT_IN_LOG
)
1721 CASE_MATHFN (BUILT_IN_LOG10
)
1722 CASE_MATHFN (BUILT_IN_LOG1P
)
1723 CASE_MATHFN (BUILT_IN_LOG2
)
1724 CASE_MATHFN (BUILT_IN_LOGB
)
1725 CASE_MATHFN (BUILT_IN_LRINT
)
1726 CASE_MATHFN (BUILT_IN_LROUND
)
1727 CASE_MATHFN (BUILT_IN_MODF
)
1728 CASE_MATHFN (BUILT_IN_NAN
)
1729 CASE_MATHFN (BUILT_IN_NANS
)
1730 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1731 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1732 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1733 CASE_MATHFN (BUILT_IN_POW
)
1734 CASE_MATHFN (BUILT_IN_POWI
)
1735 CASE_MATHFN (BUILT_IN_POW10
)
1736 CASE_MATHFN (BUILT_IN_REMAINDER
)
1737 CASE_MATHFN (BUILT_IN_REMQUO
)
1738 CASE_MATHFN (BUILT_IN_RINT
)
1739 CASE_MATHFN (BUILT_IN_ROUND
)
1740 CASE_MATHFN (BUILT_IN_SCALB
)
1741 CASE_MATHFN (BUILT_IN_SCALBLN
)
1742 CASE_MATHFN (BUILT_IN_SCALBN
)
1743 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1744 CASE_MATHFN (BUILT_IN_SIN
)
1745 CASE_MATHFN (BUILT_IN_SINCOS
)
1746 CASE_MATHFN (BUILT_IN_SINH
)
1747 CASE_MATHFN (BUILT_IN_SQRT
)
1748 CASE_MATHFN (BUILT_IN_TAN
)
1749 CASE_MATHFN (BUILT_IN_TANH
)
1750 CASE_MATHFN (BUILT_IN_TGAMMA
)
1751 CASE_MATHFN (BUILT_IN_TRUNC
)
1752 CASE_MATHFN (BUILT_IN_Y0
)
1753 CASE_MATHFN (BUILT_IN_Y1
)
1754 CASE_MATHFN (BUILT_IN_YN
)
1760 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1761 return implicit_built_in_decls
[fcode
];
1762 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1763 return implicit_built_in_decls
[fcodef
];
1764 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1765 return implicit_built_in_decls
[fcodel
];
1770 /* If errno must be maintained, expand the RTL to check if the result,
1771 TARGET, of a built-in function call, EXP, is NaN, and if so set
1775 expand_errno_check (tree exp
, rtx target
)
1777 rtx lab
= gen_label_rtx ();
1779 /* Test the result; if it is NaN, set errno=EDOM because
1780 the argument was not in the domain. */
1781 emit_cmp_and_jump_insns (target
, target
, EQ
, 0, GET_MODE (target
),
1785 /* If this built-in doesn't throw an exception, set errno directly. */
1786 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1788 #ifdef GEN_ERRNO_RTX
1789 rtx errno_rtx
= GEN_ERRNO_RTX
;
1792 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1794 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
1800 /* We can't set errno=EDOM directly; let the library call do it.
1801 Pop the arguments right away in case the call gets deleted. */
1803 expand_call (exp
, target
, 0);
1808 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1809 Return NULL_RTX if a normal call should be emitted rather than expanding
1810 the function in-line. EXP is the expression that is a call to the builtin
1811 function; if convenient, the result should be placed in TARGET.
1812 SUBTARGET may be used as the target for computing one of EXP's operands. */
1815 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1817 optab builtin_optab
;
1818 rtx op0
, insns
, before_call
;
1819 tree fndecl
= get_callee_fndecl (exp
);
1820 enum machine_mode mode
;
1821 bool errno_set
= false;
1824 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
1827 arg
= CALL_EXPR_ARG (exp
, 0);
1829 switch (DECL_FUNCTION_CODE (fndecl
))
1831 CASE_FLT_FN (BUILT_IN_SQRT
):
1832 errno_set
= ! tree_expr_nonnegative_p (arg
);
1833 builtin_optab
= sqrt_optab
;
1835 CASE_FLT_FN (BUILT_IN_EXP
):
1836 errno_set
= true; builtin_optab
= exp_optab
; break;
1837 CASE_FLT_FN (BUILT_IN_EXP10
):
1838 CASE_FLT_FN (BUILT_IN_POW10
):
1839 errno_set
= true; builtin_optab
= exp10_optab
; break;
1840 CASE_FLT_FN (BUILT_IN_EXP2
):
1841 errno_set
= true; builtin_optab
= exp2_optab
; break;
1842 CASE_FLT_FN (BUILT_IN_EXPM1
):
1843 errno_set
= true; builtin_optab
= expm1_optab
; break;
1844 CASE_FLT_FN (BUILT_IN_LOGB
):
1845 errno_set
= true; builtin_optab
= logb_optab
; break;
1846 CASE_FLT_FN (BUILT_IN_LOG
):
1847 errno_set
= true; builtin_optab
= log_optab
; break;
1848 CASE_FLT_FN (BUILT_IN_LOG10
):
1849 errno_set
= true; builtin_optab
= log10_optab
; break;
1850 CASE_FLT_FN (BUILT_IN_LOG2
):
1851 errno_set
= true; builtin_optab
= log2_optab
; break;
1852 CASE_FLT_FN (BUILT_IN_LOG1P
):
1853 errno_set
= true; builtin_optab
= log1p_optab
; break;
1854 CASE_FLT_FN (BUILT_IN_ASIN
):
1855 builtin_optab
= asin_optab
; break;
1856 CASE_FLT_FN (BUILT_IN_ACOS
):
1857 builtin_optab
= acos_optab
; break;
1858 CASE_FLT_FN (BUILT_IN_TAN
):
1859 builtin_optab
= tan_optab
; break;
1860 CASE_FLT_FN (BUILT_IN_ATAN
):
1861 builtin_optab
= atan_optab
; break;
1862 CASE_FLT_FN (BUILT_IN_FLOOR
):
1863 builtin_optab
= floor_optab
; break;
1864 CASE_FLT_FN (BUILT_IN_CEIL
):
1865 builtin_optab
= ceil_optab
; break;
1866 CASE_FLT_FN (BUILT_IN_TRUNC
):
1867 builtin_optab
= btrunc_optab
; break;
1868 CASE_FLT_FN (BUILT_IN_ROUND
):
1869 builtin_optab
= round_optab
; break;
1870 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
1871 builtin_optab
= nearbyint_optab
;
1872 if (flag_trapping_math
)
1874 /* Else fallthrough and expand as rint. */
1875 CASE_FLT_FN (BUILT_IN_RINT
):
1876 builtin_optab
= rint_optab
; break;
1881 /* Make a suitable register to place result in. */
1882 mode
= TYPE_MODE (TREE_TYPE (exp
));
1884 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
1887 /* Before working hard, check whether the instruction is available. */
1888 if (builtin_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
1890 target
= gen_reg_rtx (mode
);
1892 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1893 need to expand the argument again. This way, we will not perform
1894 side-effects more the once. */
1895 narg
= builtin_save_expr (arg
);
1899 exp
= build_call_expr (fndecl
, 1, arg
);
1902 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
1906 /* Compute into TARGET.
1907 Set TARGET to wherever the result comes back. */
1908 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
1913 expand_errno_check (exp
, target
);
1915 /* Output the entire sequence. */
1916 insns
= get_insns ();
1922 /* If we were unable to expand via the builtin, stop the sequence
1923 (without outputting the insns) and call to the library function
1924 with the stabilized argument list. */
1928 before_call
= get_last_insn ();
1930 target
= expand_call (exp
, target
, target
== const0_rtx
);
1932 /* If this is a sqrt operation and we don't care about errno, try to
1933 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1934 This allows the semantics of the libcall to be visible to the RTL
1936 if (builtin_optab
== sqrt_optab
&& !errno_set
)
1938 /* Search backwards through the insns emitted by expand_call looking
1939 for the instruction with the REG_RETVAL note. */
1940 rtx last
= get_last_insn ();
1941 while (last
!= before_call
)
1943 if (find_reg_note (last
, REG_RETVAL
, NULL
))
1945 rtx note
= find_reg_note (last
, REG_EQUAL
, NULL
);
1946 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1947 two elements, i.e. symbol_ref(sqrt) and the operand. */
1949 && GET_CODE (note
) == EXPR_LIST
1950 && GET_CODE (XEXP (note
, 0)) == EXPR_LIST
1951 && XEXP (XEXP (note
, 0), 1) != NULL_RTX
1952 && XEXP (XEXP (XEXP (note
, 0), 1), 1) == NULL_RTX
)
1954 rtx operand
= XEXP (XEXP (XEXP (note
, 0), 1), 0);
1955 /* Check operand is a register with expected mode. */
1958 && GET_MODE (operand
) == mode
)
1960 /* Replace the REG_EQUAL note with a SQRT rtx. */
1961 rtx equiv
= gen_rtx_SQRT (mode
, operand
);
1962 set_unique_reg_note (last
, REG_EQUAL
, equiv
);
1967 last
= PREV_INSN (last
);
1974 /* Expand a call to the builtin binary math functions (pow and atan2).
1975 Return NULL_RTX if a normal call should be emitted rather than expanding the
1976 function in-line. EXP is the expression that is a call to the builtin
1977 function; if convenient, the result should be placed in TARGET.
1978 SUBTARGET may be used as the target for computing one of EXP's
1982 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
1984 optab builtin_optab
;
1985 rtx op0
, op1
, insns
;
1986 int op1_type
= REAL_TYPE
;
1987 tree fndecl
= get_callee_fndecl (exp
);
1988 tree arg0
, arg1
, narg
;
1989 enum machine_mode mode
;
1990 bool errno_set
= true;
1993 switch (DECL_FUNCTION_CODE (fndecl
))
1995 CASE_FLT_FN (BUILT_IN_SCALBN
):
1996 CASE_FLT_FN (BUILT_IN_SCALBLN
):
1997 CASE_FLT_FN (BUILT_IN_LDEXP
):
1998 op1_type
= INTEGER_TYPE
;
2003 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2006 arg0
= CALL_EXPR_ARG (exp
, 0);
2007 arg1
= CALL_EXPR_ARG (exp
, 1);
2009 switch (DECL_FUNCTION_CODE (fndecl
))
2011 CASE_FLT_FN (BUILT_IN_POW
):
2012 builtin_optab
= pow_optab
; break;
2013 CASE_FLT_FN (BUILT_IN_ATAN2
):
2014 builtin_optab
= atan2_optab
; break;
2015 CASE_FLT_FN (BUILT_IN_SCALB
):
2016 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2018 builtin_optab
= scalb_optab
; break;
2019 CASE_FLT_FN (BUILT_IN_SCALBN
):
2020 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2021 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2023 /* Fall through... */
2024 CASE_FLT_FN (BUILT_IN_LDEXP
):
2025 builtin_optab
= ldexp_optab
; break;
2026 CASE_FLT_FN (BUILT_IN_FMOD
):
2027 builtin_optab
= fmod_optab
; break;
2028 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2029 CASE_FLT_FN (BUILT_IN_DREM
):
2030 builtin_optab
= remainder_optab
; break;
2035 /* Make a suitable register to place result in. */
2036 mode
= TYPE_MODE (TREE_TYPE (exp
));
2038 /* Before working hard, check whether the instruction is available. */
2039 if (builtin_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
2042 target
= gen_reg_rtx (mode
);
2044 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2047 /* Always stabilize the argument list. */
2048 narg
= builtin_save_expr (arg1
);
2054 narg
= builtin_save_expr (arg0
);
2062 exp
= build_call_expr (fndecl
, 2, arg0
, arg1
);
2064 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2065 op1
= expand_normal (arg1
);
2069 /* Compute into TARGET.
2070 Set TARGET to wherever the result comes back. */
2071 target
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2072 target
, 0, OPTAB_DIRECT
);
2074 /* If we were unable to expand via the builtin, stop the sequence
2075 (without outputting the insns) and call to the library function
2076 with the stabilized argument list. */
2080 return expand_call (exp
, target
, target
== const0_rtx
);
2084 expand_errno_check (exp
, target
);
2086 /* Output the entire sequence. */
2087 insns
= get_insns ();
2094 /* Expand a call to the builtin sin and cos math functions.
2095 Return NULL_RTX if a normal call should be emitted rather than expanding the
2096 function in-line. EXP is the expression that is a call to the builtin
2097 function; if convenient, the result should be placed in TARGET.
2098 SUBTARGET may be used as the target for computing one of EXP's
2102 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2104 optab builtin_optab
;
2106 tree fndecl
= get_callee_fndecl (exp
);
2107 enum machine_mode mode
;
2110 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2113 arg
= CALL_EXPR_ARG (exp
, 0);
2115 switch (DECL_FUNCTION_CODE (fndecl
))
2117 CASE_FLT_FN (BUILT_IN_SIN
):
2118 CASE_FLT_FN (BUILT_IN_COS
):
2119 builtin_optab
= sincos_optab
; break;
2124 /* Make a suitable register to place result in. */
2125 mode
= TYPE_MODE (TREE_TYPE (exp
));
2127 /* Check if sincos insn is available, otherwise fallback
2128 to sin or cos insn. */
2129 if (builtin_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
2130 switch (DECL_FUNCTION_CODE (fndecl
))
2132 CASE_FLT_FN (BUILT_IN_SIN
):
2133 builtin_optab
= sin_optab
; break;
2134 CASE_FLT_FN (BUILT_IN_COS
):
2135 builtin_optab
= cos_optab
; break;
2140 /* Before working hard, check whether the instruction is available. */
2141 if (builtin_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2143 target
= gen_reg_rtx (mode
);
2145 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2146 need to expand the argument again. This way, we will not perform
2147 side-effects more the once. */
2148 narg
= save_expr (arg
);
2152 exp
= build_call_expr (fndecl
, 1, arg
);
2155 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2159 /* Compute into TARGET.
2160 Set TARGET to wherever the result comes back. */
2161 if (builtin_optab
== sincos_optab
)
2165 switch (DECL_FUNCTION_CODE (fndecl
))
2167 CASE_FLT_FN (BUILT_IN_SIN
):
2168 result
= expand_twoval_unop (builtin_optab
, op0
, 0, target
, 0);
2170 CASE_FLT_FN (BUILT_IN_COS
):
2171 result
= expand_twoval_unop (builtin_optab
, op0
, target
, 0, 0);
2176 gcc_assert (result
);
2180 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2185 /* Output the entire sequence. */
2186 insns
= get_insns ();
2192 /* If we were unable to expand via the builtin, stop the sequence
2193 (without outputting the insns) and call to the library function
2194 with the stabilized argument list. */
2198 target
= expand_call (exp
, target
, target
== const0_rtx
);
2203 /* Expand a call to one of the builtin math functions that operate on
2204 floating point argument and output an integer result (ilogb, isinf,
2206 Return 0 if a normal call should be emitted rather than expanding the
2207 function in-line. EXP is the expression that is a call to the builtin
2208 function; if convenient, the result should be placed in TARGET.
2209 SUBTARGET may be used as the target for computing one of EXP's operands. */
2212 expand_builtin_interclass_mathfn (tree exp
, rtx target
, rtx subtarget
)
2214 optab builtin_optab
= 0;
2215 enum insn_code icode
= CODE_FOR_nothing
;
2217 tree fndecl
= get_callee_fndecl (exp
);
2218 enum machine_mode mode
;
2219 bool errno_set
= false;
2222 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2225 arg
= CALL_EXPR_ARG (exp
, 0);
2227 switch (DECL_FUNCTION_CODE (fndecl
))
2229 CASE_FLT_FN (BUILT_IN_ILOGB
):
2230 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2231 CASE_FLT_FN (BUILT_IN_ISINF
):
2232 builtin_optab
= isinf_optab
; break;
2233 case BUILT_IN_ISFINITE
:
2234 CASE_FLT_FN (BUILT_IN_FINITE
):
2235 /* These builtins have no optabs (yet). */
2241 /* There's no easy way to detect the case we need to set EDOM. */
2242 if (flag_errno_math
&& errno_set
)
2245 /* Optab mode depends on the mode of the input argument. */
2246 mode
= TYPE_MODE (TREE_TYPE (arg
));
2249 icode
= builtin_optab
->handlers
[(int) mode
].insn_code
;
2251 /* Before working hard, check whether the instruction is available. */
2252 if (icode
!= CODE_FOR_nothing
)
2254 /* Make a suitable register to place result in. */
2256 || GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
2257 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
2259 gcc_assert (insn_data
[icode
].operand
[0].predicate
2260 (target
, GET_MODE (target
)));
2262 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2263 need to expand the argument again. This way, we will not perform
2264 side-effects more the once. */
2265 narg
= builtin_save_expr (arg
);
2269 exp
= build_call_expr (fndecl
, 1, arg
);
2272 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2274 if (mode
!= GET_MODE (op0
))
2275 op0
= convert_to_mode (mode
, op0
, 0);
2277 /* Compute into TARGET.
2278 Set TARGET to wherever the result comes back. */
2279 emit_unop_insn (icode
, target
, op0
, UNKNOWN
);
2283 /* If there is no optab, try generic code. */
2284 switch (DECL_FUNCTION_CODE (fndecl
))
2288 CASE_FLT_FN (BUILT_IN_ISINF
):
2290 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2291 tree
const isgr_fn
= built_in_decls
[BUILT_IN_ISGREATER
];
2292 tree
const type
= TREE_TYPE (arg
);
2296 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
2297 real_from_string (&r
, buf
);
2298 result
= build_call_expr (isgr_fn
, 2,
2299 fold_build1 (ABS_EXPR
, type
, arg
),
2300 build_real (type
, r
));
2301 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
2303 CASE_FLT_FN (BUILT_IN_FINITE
):
2304 case BUILT_IN_ISFINITE
:
2306 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2307 tree
const isle_fn
= built_in_decls
[BUILT_IN_ISLESSEQUAL
];
2308 tree
const type
= TREE_TYPE (arg
);
2312 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
2313 real_from_string (&r
, buf
);
2314 result
= build_call_expr (isle_fn
, 2,
2315 fold_build1 (ABS_EXPR
, type
, arg
),
2316 build_real (type
, r
));
2317 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
2323 target
= expand_call (exp
, target
, target
== const0_rtx
);
2328 /* Expand a call to the builtin sincos math function.
2329 Return NULL_RTX if a normal call should be emitted rather than expanding the
2330 function in-line. EXP is the expression that is a call to the builtin
2334 expand_builtin_sincos (tree exp
)
2336 rtx op0
, op1
, op2
, target1
, target2
;
2337 enum machine_mode mode
;
2338 tree arg
, sinp
, cosp
;
2341 if (!validate_arglist (exp
, REAL_TYPE
,
2342 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2345 arg
= CALL_EXPR_ARG (exp
, 0);
2346 sinp
= CALL_EXPR_ARG (exp
, 1);
2347 cosp
= CALL_EXPR_ARG (exp
, 2);
2349 /* Make a suitable register to place result in. */
2350 mode
= TYPE_MODE (TREE_TYPE (arg
));
2352 /* Check if sincos insn is available, otherwise emit the call. */
2353 if (sincos_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
2356 target1
= gen_reg_rtx (mode
);
2357 target2
= gen_reg_rtx (mode
);
2359 op0
= expand_normal (arg
);
2360 op1
= expand_normal (build_fold_indirect_ref (sinp
));
2361 op2
= expand_normal (build_fold_indirect_ref (cosp
));
2363 /* Compute into target1 and target2.
2364 Set TARGET to wherever the result comes back. */
2365 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2366 gcc_assert (result
);
2368 /* Move target1 and target2 to the memory locations indicated
2370 emit_move_insn (op1
, target1
);
2371 emit_move_insn (op2
, target2
);
2376 /* Expand a call to the internal cexpi builtin to the sincos math function.
2377 EXP is the expression that is a call to the builtin function; if convenient,
2378 the result should be placed in TARGET. SUBTARGET may be used as the target
2379 for computing one of EXP's operands. */
2382 expand_builtin_cexpi (tree exp
, rtx target
, rtx subtarget
)
2384 tree fndecl
= get_callee_fndecl (exp
);
2386 enum machine_mode mode
;
2389 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2392 arg
= CALL_EXPR_ARG (exp
, 0);
2393 type
= TREE_TYPE (arg
);
2394 mode
= TYPE_MODE (TREE_TYPE (arg
));
2396 /* Try expanding via a sincos optab, fall back to emitting a libcall
2397 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2398 is only generated from sincos, cexp or if we have either of them. */
2399 if (sincos_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2401 op1
= gen_reg_rtx (mode
);
2402 op2
= gen_reg_rtx (mode
);
2404 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2406 /* Compute into op1 and op2. */
2407 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2409 else if (TARGET_HAS_SINCOS
)
2411 tree call
, fn
= NULL_TREE
;
2415 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2416 fn
= built_in_decls
[BUILT_IN_SINCOSF
];
2417 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2418 fn
= built_in_decls
[BUILT_IN_SINCOS
];
2419 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2420 fn
= built_in_decls
[BUILT_IN_SINCOSL
];
2424 op1
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2425 op2
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2426 op1a
= copy_to_mode_reg (Pmode
, XEXP (op1
, 0));
2427 op2a
= copy_to_mode_reg (Pmode
, XEXP (op2
, 0));
2428 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2429 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2431 /* Make sure not to fold the sincos call again. */
2432 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2433 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2434 call
, 3, arg
, top1
, top2
));
2438 tree call
, fn
= NULL_TREE
, narg
;
2439 tree ctype
= build_complex_type (type
);
2441 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2442 fn
= built_in_decls
[BUILT_IN_CEXPF
];
2443 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2444 fn
= built_in_decls
[BUILT_IN_CEXP
];
2445 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2446 fn
= built_in_decls
[BUILT_IN_CEXPL
];
2450 /* If we don't have a decl for cexp create one. This is the
2451 friendliest fallback if the user calls __builtin_cexpi
2452 without full target C99 function support. */
2453 if (fn
== NULL_TREE
)
2456 const char *name
= NULL
;
2458 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2460 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2462 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2465 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2466 fn
= build_fn_decl (name
, fntype
);
2469 narg
= fold_build2 (COMPLEX_EXPR
, ctype
,
2470 build_real (type
, dconst0
), arg
);
2472 /* Make sure not to fold the cexp call again. */
2473 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2474 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2475 target
, VOIDmode
, EXPAND_NORMAL
);
2478 /* Now build the proper return type. */
2479 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2480 make_tree (TREE_TYPE (arg
), op2
),
2481 make_tree (TREE_TYPE (arg
), op1
)),
2482 target
, VOIDmode
, EXPAND_NORMAL
);
2485 /* Expand a call to one of the builtin rounding functions gcc defines
2486 as an extension (lfloor and lceil). As these are gcc extensions we
2487 do not need to worry about setting errno to EDOM.
2488 If expanding via optab fails, lower expression to (int)(floor(x)).
2489 EXP is the expression that is a call to the builtin function;
2490 if convenient, the result should be placed in TARGET. SUBTARGET may
2491 be used as the target for computing one of EXP's operands. */
2494 expand_builtin_int_roundingfn (tree exp
, rtx target
, rtx subtarget
)
2496 convert_optab builtin_optab
;
2497 rtx op0
, insns
, tmp
;
2498 tree fndecl
= get_callee_fndecl (exp
);
2499 enum built_in_function fallback_fn
;
2500 tree fallback_fndecl
;
2501 enum machine_mode mode
;
2504 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2507 arg
= CALL_EXPR_ARG (exp
, 0);
2509 switch (DECL_FUNCTION_CODE (fndecl
))
2511 CASE_FLT_FN (BUILT_IN_LCEIL
):
2512 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2513 builtin_optab
= lceil_optab
;
2514 fallback_fn
= BUILT_IN_CEIL
;
2517 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2518 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2519 builtin_optab
= lfloor_optab
;
2520 fallback_fn
= BUILT_IN_FLOOR
;
2527 /* Make a suitable register to place result in. */
2528 mode
= TYPE_MODE (TREE_TYPE (exp
));
2530 target
= gen_reg_rtx (mode
);
2532 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2533 need to expand the argument again. This way, we will not perform
2534 side-effects more the once. */
2535 narg
= builtin_save_expr (arg
);
2539 exp
= build_call_expr (fndecl
, 1, arg
);
2542 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2546 /* Compute into TARGET. */
2547 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2549 /* Output the entire sequence. */
2550 insns
= get_insns ();
2556 /* If we were unable to expand via the builtin, stop the sequence
2557 (without outputting the insns). */
2560 /* Fall back to floating point rounding optab. */
2561 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2563 /* For non-C99 targets we may end up without a fallback fndecl here
2564 if the user called __builtin_lfloor directly. In this case emit
2565 a call to the floor/ceil variants nevertheless. This should result
2566 in the best user experience for not full C99 targets. */
2567 if (fallback_fndecl
== NULL_TREE
)
2570 const char *name
= NULL
;
2572 switch (DECL_FUNCTION_CODE (fndecl
))
2574 case BUILT_IN_LCEIL
:
2575 case BUILT_IN_LLCEIL
:
2578 case BUILT_IN_LCEILF
:
2579 case BUILT_IN_LLCEILF
:
2582 case BUILT_IN_LCEILL
:
2583 case BUILT_IN_LLCEILL
:
2586 case BUILT_IN_LFLOOR
:
2587 case BUILT_IN_LLFLOOR
:
2590 case BUILT_IN_LFLOORF
:
2591 case BUILT_IN_LLFLOORF
:
2594 case BUILT_IN_LFLOORL
:
2595 case BUILT_IN_LLFLOORL
:
2602 fntype
= build_function_type_list (TREE_TYPE (arg
),
2603 TREE_TYPE (arg
), NULL_TREE
);
2604 fallback_fndecl
= build_fn_decl (name
, fntype
);
2607 exp
= build_call_expr (fallback_fndecl
, 1, arg
);
2609 tmp
= expand_normal (exp
);
2611 /* Truncate the result of floating point optab to integer
2612 via expand_fix (). */
2613 target
= gen_reg_rtx (mode
);
2614 expand_fix (target
, tmp
, 0);
2619 /* Expand a call to one of the builtin math functions doing integer
2621 Return 0 if a normal call should be emitted rather than expanding the
2622 function in-line. EXP is the expression that is a call to the builtin
2623 function; if convenient, the result should be placed in TARGET.
2624 SUBTARGET may be used as the target for computing one of EXP's operands. */
2627 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
, rtx subtarget
)
2629 convert_optab builtin_optab
;
2631 tree fndecl
= get_callee_fndecl (exp
);
2633 enum machine_mode mode
;
2635 /* There's no easy way to detect the case we need to set EDOM. */
2636 if (flag_errno_math
)
2639 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2642 arg
= CALL_EXPR_ARG (exp
, 0);
2644 switch (DECL_FUNCTION_CODE (fndecl
))
2646 CASE_FLT_FN (BUILT_IN_LRINT
):
2647 CASE_FLT_FN (BUILT_IN_LLRINT
):
2648 builtin_optab
= lrint_optab
; break;
2649 CASE_FLT_FN (BUILT_IN_LROUND
):
2650 CASE_FLT_FN (BUILT_IN_LLROUND
):
2651 builtin_optab
= lround_optab
; break;
2656 /* Make a suitable register to place result in. */
2657 mode
= TYPE_MODE (TREE_TYPE (exp
));
2659 target
= gen_reg_rtx (mode
);
2661 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2662 need to expand the argument again. This way, we will not perform
2663 side-effects more the once. */
2664 narg
= builtin_save_expr (arg
);
2668 exp
= build_call_expr (fndecl
, 1, arg
);
2671 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2675 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2677 /* Output the entire sequence. */
2678 insns
= get_insns ();
2684 /* If we were unable to expand via the builtin, stop the sequence
2685 (without outputting the insns) and call to the library function
2686 with the stabilized argument list. */
2689 target
= expand_call (exp
, target
, target
== const0_rtx
);
2694 /* To evaluate powi(x,n), the floating point value x raised to the
2695 constant integer exponent n, we use a hybrid algorithm that
2696 combines the "window method" with look-up tables. For an
2697 introduction to exponentiation algorithms and "addition chains",
2698 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2699 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2700 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2701 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2703 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2704 multiplications to inline before calling the system library's pow
2705 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2706 so this default never requires calling pow, powf or powl. */
2708 #ifndef POWI_MAX_MULTS
2709 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2712 /* The size of the "optimal power tree" lookup table. All
2713 exponents less than this value are simply looked up in the
2714 powi_table below. This threshold is also used to size the
2715 cache of pseudo registers that hold intermediate results. */
2716 #define POWI_TABLE_SIZE 256
2718 /* The size, in bits of the window, used in the "window method"
2719 exponentiation algorithm. This is equivalent to a radix of
2720 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2721 #define POWI_WINDOW_SIZE 3
2723 /* The following table is an efficient representation of an
2724 "optimal power tree". For each value, i, the corresponding
2725 value, j, in the table states than an optimal evaluation
2726 sequence for calculating pow(x,i) can be found by evaluating
2727 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2728 100 integers is given in Knuth's "Seminumerical algorithms". */
2730 static const unsigned char powi_table
[POWI_TABLE_SIZE
] =
2732 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2733 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2734 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2735 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2736 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2737 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2738 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2739 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2740 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2741 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2742 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2743 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2744 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2745 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2746 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2747 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2748 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2749 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2750 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2751 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2752 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2753 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2754 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2755 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2756 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2757 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2758 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2759 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2760 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2761 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2762 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2763 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2767 /* Return the number of multiplications required to calculate
2768 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2769 subroutine of powi_cost. CACHE is an array indicating
2770 which exponents have already been calculated. */
2773 powi_lookup_cost (unsigned HOST_WIDE_INT n
, bool *cache
)
2775 /* If we've already calculated this exponent, then this evaluation
2776 doesn't require any additional multiplications. */
2781 return powi_lookup_cost (n
- powi_table
[n
], cache
)
2782 + powi_lookup_cost (powi_table
[n
], cache
) + 1;
2785 /* Return the number of multiplications required to calculate
2786 powi(x,n) for an arbitrary x, given the exponent N. This
2787 function needs to be kept in sync with expand_powi below. */
2790 powi_cost (HOST_WIDE_INT n
)
2792 bool cache
[POWI_TABLE_SIZE
];
2793 unsigned HOST_WIDE_INT digit
;
2794 unsigned HOST_WIDE_INT val
;
2800 /* Ignore the reciprocal when calculating the cost. */
2801 val
= (n
< 0) ? -n
: n
;
2803 /* Initialize the exponent cache. */
2804 memset (cache
, 0, POWI_TABLE_SIZE
* sizeof (bool));
2809 while (val
>= POWI_TABLE_SIZE
)
2813 digit
= val
& ((1 << POWI_WINDOW_SIZE
) - 1);
2814 result
+= powi_lookup_cost (digit
, cache
)
2815 + POWI_WINDOW_SIZE
+ 1;
2816 val
>>= POWI_WINDOW_SIZE
;
2825 return result
+ powi_lookup_cost (val
, cache
);
2828 /* Recursive subroutine of expand_powi. This function takes the array,
2829 CACHE, of already calculated exponents and an exponent N and returns
2830 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2833 expand_powi_1 (enum machine_mode mode
, unsigned HOST_WIDE_INT n
, rtx
*cache
)
2835 unsigned HOST_WIDE_INT digit
;
2839 if (n
< POWI_TABLE_SIZE
)
2844 target
= gen_reg_rtx (mode
);
2847 op0
= expand_powi_1 (mode
, n
- powi_table
[n
], cache
);
2848 op1
= expand_powi_1 (mode
, powi_table
[n
], cache
);
2852 target
= gen_reg_rtx (mode
);
2853 digit
= n
& ((1 << POWI_WINDOW_SIZE
) - 1);
2854 op0
= expand_powi_1 (mode
, n
- digit
, cache
);
2855 op1
= expand_powi_1 (mode
, digit
, cache
);
2859 target
= gen_reg_rtx (mode
);
2860 op0
= expand_powi_1 (mode
, n
>> 1, cache
);
2864 result
= expand_mult (mode
, op0
, op1
, target
, 0);
2865 if (result
!= target
)
2866 emit_move_insn (target
, result
);
2870 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2871 floating point operand in mode MODE, and N is the exponent. This
2872 function needs to be kept in sync with powi_cost above. */
2875 expand_powi (rtx x
, enum machine_mode mode
, HOST_WIDE_INT n
)
2877 unsigned HOST_WIDE_INT val
;
2878 rtx cache
[POWI_TABLE_SIZE
];
2882 return CONST1_RTX (mode
);
2884 val
= (n
< 0) ? -n
: n
;
2886 memset (cache
, 0, sizeof (cache
));
2889 result
= expand_powi_1 (mode
, (n
< 0) ? -n
: n
, cache
);
2891 /* If the original exponent was negative, reciprocate the result. */
2893 result
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2894 result
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2899 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2900 a normal call should be emitted rather than expanding the function
2901 in-line. EXP is the expression that is a call to the builtin
2902 function; if convenient, the result should be placed in TARGET. */
2905 expand_builtin_pow (tree exp
, rtx target
, rtx subtarget
)
2909 tree type
= TREE_TYPE (exp
);
2910 REAL_VALUE_TYPE cint
, c
, c2
;
2913 enum machine_mode mode
= TYPE_MODE (type
);
2915 if (! validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2918 arg0
= CALL_EXPR_ARG (exp
, 0);
2919 arg1
= CALL_EXPR_ARG (exp
, 1);
2921 if (TREE_CODE (arg1
) != REAL_CST
2922 || TREE_OVERFLOW (arg1
))
2923 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
2925 /* Handle constant exponents. */
2927 /* For integer valued exponents we can expand to an optimal multiplication
2928 sequence using expand_powi. */
2929 c
= TREE_REAL_CST (arg1
);
2930 n
= real_to_integer (&c
);
2931 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2932 if (real_identical (&c
, &cint
)
2933 && ((n
>= -1 && n
<= 2)
2934 || (flag_unsafe_math_optimizations
2936 && powi_cost (n
) <= POWI_MAX_MULTS
)))
2938 op
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2941 op
= force_reg (mode
, op
);
2942 op
= expand_powi (op
, mode
, n
);
2947 narg0
= builtin_save_expr (arg0
);
2949 /* If the exponent is not integer valued, check if it is half of an integer.
2950 In this case we can expand to sqrt (x) * x**(n/2). */
2951 fn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
2952 if (fn
!= NULL_TREE
)
2954 real_arithmetic (&c2
, MULT_EXPR
, &c
, &dconst2
);
2955 n
= real_to_integer (&c2
);
2956 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2957 if (real_identical (&c2
, &cint
)
2958 && ((flag_unsafe_math_optimizations
2960 && powi_cost (n
/2) <= POWI_MAX_MULTS
)
2963 tree call_expr
= build_call_expr (fn
, 1, narg0
);
2964 op
= expand_builtin (call_expr
, NULL_RTX
, subtarget
, mode
, 0);
2967 op2
= expand_expr (narg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2968 op2
= force_reg (mode
, op2
);
2969 op2
= expand_powi (op2
, mode
, abs (n
/ 2));
2970 op
= expand_simple_binop (mode
, MULT
, op
, op2
, NULL_RTX
,
2971 0, OPTAB_LIB_WIDEN
);
2972 /* If the original exponent was negative, reciprocate the
2975 op
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2976 op
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2982 /* Try if the exponent is a third of an integer. In this case
2983 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2984 different from pow (x, 1./3.) due to rounding and behavior
2985 with negative x we need to constrain this transformation to
2986 unsafe math and positive x or finite math. */
2987 fn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
2989 && flag_unsafe_math_optimizations
2990 && (tree_expr_nonnegative_p (arg0
)
2991 || !HONOR_NANS (mode
)))
2993 real_arithmetic (&c2
, MULT_EXPR
, &c
, &dconst3
);
2994 real_round (&c2
, mode
, &c2
);
2995 n
= real_to_integer (&c2
);
2996 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2997 real_arithmetic (&c2
, RDIV_EXPR
, &cint
, &dconst3
);
2998 real_convert (&c2
, mode
, &c2
);
2999 if (real_identical (&c2
, &c
)
3001 && powi_cost (n
/3) <= POWI_MAX_MULTS
)
3004 tree call_expr
= build_call_expr (fn
, 1,narg0
);
3005 op
= expand_builtin (call_expr
, NULL_RTX
, subtarget
, mode
, 0);
3006 if (abs (n
) % 3 == 2)
3007 op
= expand_simple_binop (mode
, MULT
, op
, op
, op
,
3008 0, OPTAB_LIB_WIDEN
);
3011 op2
= expand_expr (narg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
3012 op2
= force_reg (mode
, op2
);
3013 op2
= expand_powi (op2
, mode
, abs (n
/ 3));
3014 op
= expand_simple_binop (mode
, MULT
, op
, op2
, NULL_RTX
,
3015 0, OPTAB_LIB_WIDEN
);
3016 /* If the original exponent was negative, reciprocate the
3019 op
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
3020 op
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
3026 /* Fall back to optab expansion. */
3027 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
3030 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3031 a normal call should be emitted rather than expanding the function
3032 in-line. EXP is the expression that is a call to the builtin
3033 function; if convenient, the result should be placed in TARGET. */
3036 expand_builtin_powi (tree exp
, rtx target
, rtx subtarget
)
3040 enum machine_mode mode
;
3041 enum machine_mode mode2
;
3043 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3046 arg0
= CALL_EXPR_ARG (exp
, 0);
3047 arg1
= CALL_EXPR_ARG (exp
, 1);
3048 mode
= TYPE_MODE (TREE_TYPE (exp
));
3050 /* Handle constant power. */
3052 if (TREE_CODE (arg1
) == INTEGER_CST
3053 && !TREE_OVERFLOW (arg1
))
3055 HOST_WIDE_INT n
= TREE_INT_CST_LOW (arg1
);
3057 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3058 Otherwise, check the number of multiplications required. */
3059 if ((TREE_INT_CST_HIGH (arg1
) == 0
3060 || TREE_INT_CST_HIGH (arg1
) == -1)
3061 && ((n
>= -1 && n
<= 2)
3063 && powi_cost (n
) <= POWI_MAX_MULTS
)))
3065 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
3066 op0
= force_reg (mode
, op0
);
3067 return expand_powi (op0
, mode
, n
);
3071 /* Emit a libcall to libgcc. */
3073 /* Mode of the 2nd argument must match that of an int. */
3074 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
3076 if (target
== NULL_RTX
)
3077 target
= gen_reg_rtx (mode
);
3079 op0
= expand_expr (arg0
, subtarget
, mode
, EXPAND_NORMAL
);
3080 if (GET_MODE (op0
) != mode
)
3081 op0
= convert_to_mode (mode
, op0
, 0);
3082 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
3083 if (GET_MODE (op1
) != mode2
)
3084 op1
= convert_to_mode (mode2
, op1
, 0);
3086 target
= emit_library_call_value (powi_optab
->handlers
[(int) mode
].libfunc
,
3087 target
, LCT_CONST_MAKE_BLOCK
, mode
, 2,
3088 op0
, mode
, op1
, mode2
);
3093 /* Expand expression EXP which is a call to the strlen builtin. Return
3094 NULL_RTX if we failed the caller should emit a normal call, otherwise
3095 try to get the result in TARGET, if convenient. */
3098 expand_builtin_strlen (tree exp
, rtx target
,
3099 enum machine_mode target_mode
)
3101 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
3107 tree src
= CALL_EXPR_ARG (exp
, 0);
3108 rtx result
, src_reg
, char_rtx
, before_strlen
;
3109 enum machine_mode insn_mode
= target_mode
, char_mode
;
3110 enum insn_code icode
= CODE_FOR_nothing
;
3113 /* If the length can be computed at compile-time, return it. */
3114 len
= c_strlen (src
, 0);
3116 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3118 /* If the length can be computed at compile-time and is constant
3119 integer, but there are side-effects in src, evaluate
3120 src for side-effects, then return len.
3121 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3122 can be optimized into: i++; x = 3; */
3123 len
= c_strlen (src
, 1);
3124 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3126 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3127 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3130 align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3132 /* If SRC is not a pointer type, don't do this operation inline. */
3136 /* Bail out if we can't compute strlen in the right mode. */
3137 while (insn_mode
!= VOIDmode
)
3139 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
3140 if (icode
!= CODE_FOR_nothing
)
3143 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3145 if (insn_mode
== VOIDmode
)
3148 /* Make a place to write the result of the instruction. */
3152 && GET_MODE (result
) == insn_mode
3153 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3154 result
= gen_reg_rtx (insn_mode
);
3156 /* Make a place to hold the source address. We will not expand
3157 the actual source until we are sure that the expansion will
3158 not fail -- there are trees that cannot be expanded twice. */
3159 src_reg
= gen_reg_rtx (Pmode
);
3161 /* Mark the beginning of the strlen sequence so we can emit the
3162 source operand later. */
3163 before_strlen
= get_last_insn ();
3165 char_rtx
= const0_rtx
;
3166 char_mode
= insn_data
[(int) icode
].operand
[2].mode
;
3167 if (! (*insn_data
[(int) icode
].operand
[2].predicate
) (char_rtx
,
3169 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
3171 pat
= GEN_FCN (icode
) (result
, gen_rtx_MEM (BLKmode
, src_reg
),
3172 char_rtx
, GEN_INT (align
));
3177 /* Now that we are assured of success, expand the source. */
3179 pat
= expand_expr (src
, src_reg
, ptr_mode
, EXPAND_NORMAL
);
3181 emit_move_insn (src_reg
, pat
);
3186 emit_insn_after (pat
, before_strlen
);
3188 emit_insn_before (pat
, get_insns ());
3190 /* Return the value in the proper mode for this function. */
3191 if (GET_MODE (result
) == target_mode
)
3193 else if (target
!= 0)
3194 convert_move (target
, result
, 0);
3196 target
= convert_to_mode (target_mode
, result
, 0);
3202 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3203 caller should emit a normal call, otherwise try to get the result
3204 in TARGET, if convenient (and in mode MODE if that's convenient). */
3207 expand_builtin_strstr (tree exp
, rtx target
, enum machine_mode mode
)
3209 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3211 tree type
= TREE_TYPE (exp
);
3212 tree result
= fold_builtin_strstr (CALL_EXPR_ARG (exp
, 0),
3213 CALL_EXPR_ARG (exp
, 1), type
);
3215 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3220 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3221 caller should emit a normal call, otherwise try to get the result
3222 in TARGET, if convenient (and in mode MODE if that's convenient). */
3225 expand_builtin_strchr (tree exp
, rtx target
, enum machine_mode mode
)
3227 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3229 tree type
= TREE_TYPE (exp
);
3230 tree result
= fold_builtin_strchr (CALL_EXPR_ARG (exp
, 0),
3231 CALL_EXPR_ARG (exp
, 1), type
);
3233 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3235 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3240 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3241 caller should emit a normal call, otherwise try to get the result
3242 in TARGET, if convenient (and in mode MODE if that's convenient). */
3245 expand_builtin_strrchr (tree exp
, rtx target
, enum machine_mode mode
)
3247 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3249 tree type
= TREE_TYPE (exp
);
3250 tree result
= fold_builtin_strrchr (CALL_EXPR_ARG (exp
, 0),
3251 CALL_EXPR_ARG (exp
, 1), type
);
3253 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3258 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3259 caller should emit a normal call, otherwise try to get the result
3260 in TARGET, if convenient (and in mode MODE if that's convenient). */
3263 expand_builtin_strpbrk (tree exp
, rtx target
, enum machine_mode mode
)
3265 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3267 tree type
= TREE_TYPE (exp
);
3268 tree result
= fold_builtin_strpbrk (CALL_EXPR_ARG (exp
, 0),
3269 CALL_EXPR_ARG (exp
, 1), type
);
3271 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3276 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3277 bytes from constant string DATA + OFFSET and return it as target
3281 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3282 enum machine_mode mode
)
3284 const char *str
= (const char *) data
;
3286 gcc_assert (offset
>= 0
3287 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3288 <= strlen (str
) + 1));
3290 return c_readstr (str
+ offset
, mode
);
3293 /* Expand a call EXP to the memcpy builtin.
3294 Return NULL_RTX if we failed, the caller should emit a normal call,
3295 otherwise try to get the result in TARGET, if convenient (and in
3296 mode MODE if that's convenient). */
3299 expand_builtin_memcpy (tree exp
, rtx target
, enum machine_mode mode
)
3301 tree fndecl
= get_callee_fndecl (exp
);
3303 if (!validate_arglist (exp
,
3304 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3308 tree dest
= CALL_EXPR_ARG (exp
, 0);
3309 tree src
= CALL_EXPR_ARG (exp
, 1);
3310 tree len
= CALL_EXPR_ARG (exp
, 2);
3311 const char *src_str
;
3312 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
3313 unsigned int dest_align
3314 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3315 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3316 tree result
= fold_builtin_memory_op (dest
, src
, len
,
3317 TREE_TYPE (TREE_TYPE (fndecl
)),
3319 HOST_WIDE_INT expected_size
= -1;
3320 unsigned int expected_align
= 0;
3324 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3326 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3328 result
= TREE_OPERAND (result
, 1);
3330 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3333 /* If DEST is not a pointer type, call the normal function. */
3334 if (dest_align
== 0)
3337 /* If either SRC is not a pointer type, don't do this
3338 operation in-line. */
3342 stringop_block_profile (exp
, &expected_align
, &expected_size
);
3343 if (expected_align
< dest_align
)
3344 expected_align
= dest_align
;
3345 dest_mem
= get_memory_rtx (dest
, len
);
3346 set_mem_align (dest_mem
, dest_align
);
3347 len_rtx
= expand_normal (len
);
3348 src_str
= c_getstr (src
);
3350 /* If SRC is a string constant and block move would be done
3351 by pieces, we can avoid loading the string from memory
3352 and only stored the computed constants. */
3354 && GET_CODE (len_rtx
) == CONST_INT
3355 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3356 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3357 (void *) src_str
, dest_align
))
3359 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3360 builtin_memcpy_read_str
,
3361 (void *) src_str
, dest_align
, 0);
3362 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3363 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3367 src_mem
= get_memory_rtx (src
, len
);
3368 set_mem_align (src_mem
, src_align
);
3370 /* Copy word part most expediently. */
3371 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3372 CALL_EXPR_TAILCALL (exp
)
3373 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3374 expected_align
, expected_size
);
3378 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3379 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3385 /* Expand a call EXP to the mempcpy builtin.
3386 Return NULL_RTX if we failed; the caller should emit a normal call,
3387 otherwise try to get the result in TARGET, if convenient (and in
3388 mode MODE if that's convenient). If ENDP is 0 return the
3389 destination pointer, if ENDP is 1 return the end pointer ala
3390 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3394 expand_builtin_mempcpy(tree exp
, rtx target
, enum machine_mode mode
)
3396 if (!validate_arglist (exp
,
3397 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3401 tree dest
= CALL_EXPR_ARG (exp
, 0);
3402 tree src
= CALL_EXPR_ARG (exp
, 1);
3403 tree len
= CALL_EXPR_ARG (exp
, 2);
3404 return expand_builtin_mempcpy_args (dest
, src
, len
,
3406 target
, mode
, /*endp=*/ 1);
3410 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3411 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3412 so that this can also be called without constructing an actual CALL_EXPR.
3413 TYPE is the return type of the call. The other arguments and return value
3414 are the same as for expand_builtin_mempcpy. */
3417 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
, tree type
,
3418 rtx target
, enum machine_mode mode
, int endp
)
3420 /* If return value is ignored, transform mempcpy into memcpy. */
3421 if (target
== const0_rtx
)
3423 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
3428 return expand_expr (build_call_expr (fn
, 3, dest
, src
, len
),
3429 target
, mode
, EXPAND_NORMAL
);
3433 const char *src_str
;
3434 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
3435 unsigned int dest_align
3436 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3437 rtx dest_mem
, src_mem
, len_rtx
;
3438 tree result
= fold_builtin_memory_op (dest
, src
, len
, type
, false, endp
);
3442 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3444 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3446 result
= TREE_OPERAND (result
, 1);
3448 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3451 /* If either SRC or DEST is not a pointer type, don't do this
3452 operation in-line. */
3453 if (dest_align
== 0 || src_align
== 0)
3456 /* If LEN is not constant, call the normal function. */
3457 if (! host_integerp (len
, 1))
3460 len_rtx
= expand_normal (len
);
3461 src_str
= c_getstr (src
);
3463 /* If SRC is a string constant and block move would be done
3464 by pieces, we can avoid loading the string from memory
3465 and only stored the computed constants. */
3467 && GET_CODE (len_rtx
) == CONST_INT
3468 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3469 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3470 (void *) src_str
, dest_align
))
3472 dest_mem
= get_memory_rtx (dest
, len
);
3473 set_mem_align (dest_mem
, dest_align
);
3474 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3475 builtin_memcpy_read_str
,
3476 (void *) src_str
, dest_align
, endp
);
3477 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3478 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3482 if (GET_CODE (len_rtx
) == CONST_INT
3483 && can_move_by_pieces (INTVAL (len_rtx
),
3484 MIN (dest_align
, src_align
)))
3486 dest_mem
= get_memory_rtx (dest
, len
);
3487 set_mem_align (dest_mem
, dest_align
);
3488 src_mem
= get_memory_rtx (src
, len
);
3489 set_mem_align (src_mem
, src_align
);
3490 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3491 MIN (dest_align
, src_align
), endp
);
3492 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3493 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3501 /* Expand expression EXP, which is a call to the memmove builtin. Return
3502 NULL_RTX if we failed; the caller should emit a normal call. */
3505 expand_builtin_memmove (tree exp
, rtx target
, enum machine_mode mode
, int ignore
)
3507 if (!validate_arglist (exp
,
3508 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3512 tree dest
= CALL_EXPR_ARG (exp
, 0);
3513 tree src
= CALL_EXPR_ARG (exp
, 1);
3514 tree len
= CALL_EXPR_ARG (exp
, 2);
3515 return expand_builtin_memmove_args (dest
, src
, len
, TREE_TYPE (exp
),
3516 target
, mode
, ignore
);
3520 /* Helper function to do the actual work for expand_builtin_memmove. The
3521 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3522 so that this can also be called without constructing an actual CALL_EXPR.
3523 TYPE is the return type of the call. The other arguments and return value
3524 are the same as for expand_builtin_memmove. */
3527 expand_builtin_memmove_args (tree dest
, tree src
, tree len
,
3528 tree type
, rtx target
, enum machine_mode mode
,
3531 tree result
= fold_builtin_memory_op (dest
, src
, len
, type
, ignore
, /*endp=*/3);
3535 STRIP_TYPE_NOPS (result
);
3536 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3538 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3540 result
= TREE_OPERAND (result
, 1);
3542 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3545 /* Otherwise, call the normal function. */
3549 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3550 NULL_RTX if we failed the caller should emit a normal call. */
3553 expand_builtin_bcopy (tree exp
, int ignore
)
3555 tree type
= TREE_TYPE (exp
);
3556 tree src
, dest
, size
;
3558 if (!validate_arglist (exp
,
3559 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3562 src
= CALL_EXPR_ARG (exp
, 0);
3563 dest
= CALL_EXPR_ARG (exp
, 1);
3564 size
= CALL_EXPR_ARG (exp
, 2);
3566 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3567 This is done this way so that if it isn't expanded inline, we fall
3568 back to calling bcopy instead of memmove. */
3569 return expand_builtin_memmove_args (dest
, src
,
3570 fold_convert (sizetype
, size
),
3571 type
, const0_rtx
, VOIDmode
,
3576 # define HAVE_movstr 0
3577 # define CODE_FOR_movstr CODE_FOR_nothing
3580 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3581 we failed, the caller should emit a normal call, otherwise try to
3582 get the result in TARGET, if convenient. If ENDP is 0 return the
3583 destination pointer, if ENDP is 1 return the end pointer ala
3584 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3588 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3594 const struct insn_data
* data
;
3599 dest_mem
= get_memory_rtx (dest
, NULL
);
3600 src_mem
= get_memory_rtx (src
, NULL
);
3603 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3604 dest_mem
= replace_equiv_address (dest_mem
, target
);
3605 end
= gen_reg_rtx (Pmode
);
3609 if (target
== 0 || target
== const0_rtx
)
3611 end
= gen_reg_rtx (Pmode
);
3619 data
= insn_data
+ CODE_FOR_movstr
;
3621 if (data
->operand
[0].mode
!= VOIDmode
)
3622 end
= gen_lowpart (data
->operand
[0].mode
, end
);
3624 insn
= data
->genfun (end
, dest_mem
, src_mem
);
3630 /* movstr is supposed to set end to the address of the NUL
3631 terminator. If the caller requested a mempcpy-like return value,
3633 if (endp
== 1 && target
!= const0_rtx
)
3635 rtx tem
= plus_constant (gen_lowpart (GET_MODE (target
), end
), 1);
3636 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3642 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3643 NULL_RTX if we failed the caller should emit a normal call, otherwise
3644 try to get the result in TARGET, if convenient (and in mode MODE if that's
3648 expand_builtin_strcpy (tree fndecl
, tree exp
, rtx target
, enum machine_mode mode
)
3650 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3652 tree dest
= CALL_EXPR_ARG (exp
, 0);
3653 tree src
= CALL_EXPR_ARG (exp
, 1);
3654 return expand_builtin_strcpy_args (fndecl
, dest
, src
, target
, mode
);
3659 /* Helper function to do the actual work for expand_builtin_strcpy. The
3660 arguments to the builtin_strcpy call DEST and SRC are broken out
3661 so that this can also be called without constructing an actual CALL_EXPR.
3662 The other arguments and return value are the same as for
3663 expand_builtin_strcpy. */
3666 expand_builtin_strcpy_args (tree fndecl
, tree dest
, tree src
,
3667 rtx target
, enum machine_mode mode
)
3669 tree result
= fold_builtin_strcpy (fndecl
, dest
, src
, 0);
3671 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3672 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3676 /* Expand a call EXP to the stpcpy builtin.
3677 Return NULL_RTX if we failed the caller should emit a normal call,
3678 otherwise try to get the result in TARGET, if convenient (and in
3679 mode MODE if that's convenient). */
3682 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3686 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3689 dst
= CALL_EXPR_ARG (exp
, 0);
3690 src
= CALL_EXPR_ARG (exp
, 1);
3692 /* If return value is ignored, transform stpcpy into strcpy. */
3693 if (target
== const0_rtx
)
3695 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
3699 return expand_expr (build_call_expr (fn
, 2, dst
, src
),
3700 target
, mode
, EXPAND_NORMAL
);
3707 /* Ensure we get an actual string whose length can be evaluated at
3708 compile-time, not an expression containing a string. This is
3709 because the latter will potentially produce pessimized code
3710 when used to produce the return value. */
3711 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3712 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3714 lenp1
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
3715 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
, TREE_TYPE (exp
),
3716 target
, mode
, /*endp=*/2);
3721 if (TREE_CODE (len
) == INTEGER_CST
)
3723 rtx len_rtx
= expand_normal (len
);
3725 if (GET_CODE (len_rtx
) == CONST_INT
)
3727 ret
= expand_builtin_strcpy_args (get_callee_fndecl (exp
),
3728 dst
, src
, target
, mode
);
3734 if (mode
!= VOIDmode
)
3735 target
= gen_reg_rtx (mode
);
3737 target
= gen_reg_rtx (GET_MODE (ret
));
3739 if (GET_MODE (target
) != GET_MODE (ret
))
3740 ret
= gen_lowpart (GET_MODE (target
), ret
);
3742 ret
= plus_constant (ret
, INTVAL (len_rtx
));
3743 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3751 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3755 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3756 bytes from constant string DATA + OFFSET and return it as target
3760 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3761 enum machine_mode mode
)
3763 const char *str
= (const char *) data
;
3765 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3768 return c_readstr (str
+ offset
, mode
);
3771 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3772 NULL_RTX if we failed the caller should emit a normal call. */
3775 expand_builtin_strncpy (tree exp
, rtx target
, enum machine_mode mode
)
3777 tree fndecl
= get_callee_fndecl (exp
);
3779 if (validate_arglist (exp
,
3780 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3782 tree dest
= CALL_EXPR_ARG (exp
, 0);
3783 tree src
= CALL_EXPR_ARG (exp
, 1);
3784 tree len
= CALL_EXPR_ARG (exp
, 2);
3785 tree slen
= c_strlen (src
, 1);
3786 tree result
= fold_builtin_strncpy (fndecl
, dest
, src
, len
, slen
);
3790 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3792 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3794 result
= TREE_OPERAND (result
, 1);
3796 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3799 /* We must be passed a constant len and src parameter. */
3800 if (!host_integerp (len
, 1) || !slen
|| !host_integerp (slen
, 1))
3803 slen
= size_binop (PLUS_EXPR
, slen
, ssize_int (1));
3805 /* We're required to pad with trailing zeros if the requested
3806 len is greater than strlen(s2)+1. In that case try to
3807 use store_by_pieces, if it fails, punt. */
3808 if (tree_int_cst_lt (slen
, len
))
3810 unsigned int dest_align
3811 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3812 const char *p
= c_getstr (src
);
3815 if (!p
|| dest_align
== 0 || !host_integerp (len
, 1)
3816 || !can_store_by_pieces (tree_low_cst (len
, 1),
3817 builtin_strncpy_read_str
,
3818 (void *) p
, dest_align
))
3821 dest_mem
= get_memory_rtx (dest
, len
);
3822 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3823 builtin_strncpy_read_str
,
3824 (void *) p
, dest_align
, 0);
3825 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3826 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3833 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3834 bytes from constant string DATA + OFFSET and return it as target
3838 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3839 enum machine_mode mode
)
3841 const char *c
= (const char *) data
;
3842 char *p
= alloca (GET_MODE_SIZE (mode
));
3844 memset (p
, *c
, GET_MODE_SIZE (mode
));
3846 return c_readstr (p
, mode
);
3849 /* Callback routine for store_by_pieces. Return the RTL of a register
3850 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3851 char value given in the RTL register data. For example, if mode is
3852 4 bytes wide, return the RTL for 0x01010101*data. */
3855 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3856 enum machine_mode mode
)
3862 size
= GET_MODE_SIZE (mode
);
3867 memset (p
, 1, size
);
3868 coeff
= c_readstr (p
, mode
);
3870 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3871 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3872 return force_reg (mode
, target
);
3875 /* Expand expression EXP, which is a call to the memset builtin. Return
3876 NULL_RTX if we failed the caller should emit a normal call, otherwise
3877 try to get the result in TARGET, if convenient (and in mode MODE if that's
3881 expand_builtin_memset (tree exp
, rtx target
, enum machine_mode mode
)
3883 if (!validate_arglist (exp
,
3884 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3888 tree dest
= CALL_EXPR_ARG (exp
, 0);
3889 tree val
= CALL_EXPR_ARG (exp
, 1);
3890 tree len
= CALL_EXPR_ARG (exp
, 2);
3891 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3895 /* Helper function to do the actual work for expand_builtin_memset. The
3896 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3897 so that this can also be called without constructing an actual CALL_EXPR.
3898 The other arguments and return value are the same as for
3899 expand_builtin_memset. */
3902 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3903 rtx target
, enum machine_mode mode
, tree orig_exp
)
3906 enum built_in_function fcode
;
3908 unsigned int dest_align
;
3909 rtx dest_mem
, dest_addr
, len_rtx
;
3910 HOST_WIDE_INT expected_size
= -1;
3911 unsigned int expected_align
= 0;
3913 dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3915 /* If DEST is not a pointer type, don't do this operation in-line. */
3916 if (dest_align
== 0)
3919 stringop_block_profile (orig_exp
, &expected_align
, &expected_size
);
3920 if (expected_align
< dest_align
)
3921 expected_align
= dest_align
;
3923 /* If the LEN parameter is zero, return DEST. */
3924 if (integer_zerop (len
))
3926 /* Evaluate and ignore VAL in case it has side-effects. */
3927 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3928 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3931 /* Stabilize the arguments in case we fail. */
3932 dest
= builtin_save_expr (dest
);
3933 val
= builtin_save_expr (val
);
3934 len
= builtin_save_expr (len
);
3936 len_rtx
= expand_normal (len
);
3937 dest_mem
= get_memory_rtx (dest
, len
);
3939 if (TREE_CODE (val
) != INTEGER_CST
)
3943 val_rtx
= expand_normal (val
);
3944 val_rtx
= convert_to_mode (TYPE_MODE (unsigned_char_type_node
),
3947 /* Assume that we can memset by pieces if we can store
3948 * the coefficients by pieces (in the required modes).
3949 * We can't pass builtin_memset_gen_str as that emits RTL. */
3951 if (host_integerp (len
, 1)
3952 && !(optimize_size
&& tree_low_cst (len
, 1) > 1)
3953 && can_store_by_pieces (tree_low_cst (len
, 1),
3954 builtin_memset_read_str
, &c
, dest_align
))
3956 val_rtx
= force_reg (TYPE_MODE (unsigned_char_type_node
),
3958 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3959 builtin_memset_gen_str
, val_rtx
, dest_align
, 0);
3961 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3962 dest_align
, expected_align
,
3966 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3967 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3971 if (target_char_cast (val
, &c
))
3976 if (host_integerp (len
, 1)
3977 && !(optimize_size
&& tree_low_cst (len
, 1) > 1)
3978 && can_store_by_pieces (tree_low_cst (len
, 1),
3979 builtin_memset_read_str
, &c
, dest_align
))
3980 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3981 builtin_memset_read_str
, &c
, dest_align
, 0);
3982 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, GEN_INT (c
),
3983 dest_align
, expected_align
,
3987 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3988 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3992 set_mem_align (dest_mem
, dest_align
);
3993 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3994 CALL_EXPR_TAILCALL (orig_exp
)
3995 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3996 expected_align
, expected_size
);
4000 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4001 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
4007 fndecl
= get_callee_fndecl (orig_exp
);
4008 fcode
= DECL_FUNCTION_CODE (fndecl
);
4009 if (fcode
== BUILT_IN_MEMSET
)
4010 fn
= build_call_expr (fndecl
, 3, dest
, val
, len
);
4011 else if (fcode
== BUILT_IN_BZERO
)
4012 fn
= build_call_expr (fndecl
, 2, dest
, len
);
4015 if (TREE_CODE (fn
) == CALL_EXPR
)
4016 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
4017 return expand_call (fn
, target
, target
== const0_rtx
);
4020 /* Expand expression EXP, which is a call to the bzero builtin. Return
4021 NULL_RTX if we failed the caller should emit a normal call. */
4024 expand_builtin_bzero (tree exp
)
4028 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4031 dest
= CALL_EXPR_ARG (exp
, 0);
4032 size
= CALL_EXPR_ARG (exp
, 1);
4034 /* New argument list transforming bzero(ptr x, int y) to
4035 memset(ptr x, int 0, size_t y). This is done this way
4036 so that if it isn't expanded inline, we fallback to
4037 calling bzero instead of memset. */
4039 return expand_builtin_memset_args (dest
, integer_zero_node
,
4040 fold_convert (sizetype
, size
),
4041 const0_rtx
, VOIDmode
, exp
);
4044 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4045 caller should emit a normal call, otherwise try to get the result
4046 in TARGET, if convenient (and in mode MODE if that's convenient). */
4049 expand_builtin_memchr (tree exp
, rtx target
, enum machine_mode mode
)
4051 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
,
4052 INTEGER_TYPE
, VOID_TYPE
))
4054 tree type
= TREE_TYPE (exp
);
4055 tree result
= fold_builtin_memchr (CALL_EXPR_ARG (exp
, 0),
4056 CALL_EXPR_ARG (exp
, 1),
4057 CALL_EXPR_ARG (exp
, 2), type
);
4059 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4064 /* Expand expression EXP, which is a call to the memcmp built-in function.
4065 Return NULL_RTX if we failed and the
4066 caller should emit a normal call, otherwise try to get the result in
4067 TARGET, if convenient (and in mode MODE, if that's convenient). */
4070 expand_builtin_memcmp (tree exp
, rtx target
, enum machine_mode mode
)
4072 if (!validate_arglist (exp
,
4073 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4077 tree result
= fold_builtin_memcmp (CALL_EXPR_ARG (exp
, 0),
4078 CALL_EXPR_ARG (exp
, 1),
4079 CALL_EXPR_ARG (exp
, 2));
4081 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4084 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4086 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4089 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4090 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4091 tree len
= CALL_EXPR_ARG (exp
, 2);
4094 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4096 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4097 enum machine_mode insn_mode
;
4099 #ifdef HAVE_cmpmemsi
4101 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
4104 #ifdef HAVE_cmpstrnsi
4106 insn_mode
= insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4111 /* If we don't have POINTER_TYPE, call the function. */
4112 if (arg1_align
== 0 || arg2_align
== 0)
4115 /* Make a place to write the result of the instruction. */
4118 && REG_P (result
) && GET_MODE (result
) == insn_mode
4119 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4120 result
= gen_reg_rtx (insn_mode
);
4122 arg1_rtx
= get_memory_rtx (arg1
, len
);
4123 arg2_rtx
= get_memory_rtx (arg2
, len
);
4124 arg3_rtx
= expand_normal (len
);
4126 /* Set MEM_SIZE as appropriate. */
4127 if (GET_CODE (arg3_rtx
) == CONST_INT
)
4129 set_mem_size (arg1_rtx
, arg3_rtx
);
4130 set_mem_size (arg2_rtx
, arg3_rtx
);
4133 #ifdef HAVE_cmpmemsi
4135 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4136 GEN_INT (MIN (arg1_align
, arg2_align
)));
4139 #ifdef HAVE_cmpstrnsi
4141 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4142 GEN_INT (MIN (arg1_align
, arg2_align
)));
4150 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE_MAKE_BLOCK
,
4151 TYPE_MODE (integer_type_node
), 3,
4152 XEXP (arg1_rtx
, 0), Pmode
,
4153 XEXP (arg2_rtx
, 0), Pmode
,
4154 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
4155 TYPE_UNSIGNED (sizetype
)),
4156 TYPE_MODE (sizetype
));
4158 /* Return the value in the proper mode for this function. */
4159 mode
= TYPE_MODE (TREE_TYPE (exp
));
4160 if (GET_MODE (result
) == mode
)
4162 else if (target
!= 0)
4164 convert_move (target
, result
, 0);
4168 return convert_to_mode (mode
, result
, 0);
4175 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4176 if we failed the caller should emit a normal call, otherwise try to get
4177 the result in TARGET, if convenient. */
4180 expand_builtin_strcmp (tree exp
, rtx target
, enum machine_mode mode
)
4182 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4186 tree result
= fold_builtin_strcmp (CALL_EXPR_ARG (exp
, 0),
4187 CALL_EXPR_ARG (exp
, 1));
4189 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4192 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4193 if (cmpstr_optab
[SImode
] != CODE_FOR_nothing
4194 || cmpstrn_optab
[SImode
] != CODE_FOR_nothing
)
4196 rtx arg1_rtx
, arg2_rtx
;
4197 rtx result
, insn
= NULL_RTX
;
4199 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4200 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4203 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4205 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4207 /* If we don't have POINTER_TYPE, call the function. */
4208 if (arg1_align
== 0 || arg2_align
== 0)
4211 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4212 arg1
= builtin_save_expr (arg1
);
4213 arg2
= builtin_save_expr (arg2
);
4215 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4216 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4218 #ifdef HAVE_cmpstrsi
4219 /* Try to call cmpstrsi. */
4222 enum machine_mode insn_mode
4223 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
4225 /* Make a place to write the result of the instruction. */
4228 && REG_P (result
) && GET_MODE (result
) == insn_mode
4229 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4230 result
= gen_reg_rtx (insn_mode
);
4232 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
4233 GEN_INT (MIN (arg1_align
, arg2_align
)));
4236 #ifdef HAVE_cmpstrnsi
4237 /* Try to determine at least one length and call cmpstrnsi. */
4238 if (!insn
&& HAVE_cmpstrnsi
)
4243 enum machine_mode insn_mode
4244 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4245 tree len1
= c_strlen (arg1
, 1);
4246 tree len2
= c_strlen (arg2
, 1);
4249 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4251 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4253 /* If we don't have a constant length for the first, use the length
4254 of the second, if we know it. We don't require a constant for
4255 this case; some cost analysis could be done if both are available
4256 but neither is constant. For now, assume they're equally cheap,
4257 unless one has side effects. If both strings have constant lengths,
4264 else if (TREE_SIDE_EFFECTS (len1
))
4266 else if (TREE_SIDE_EFFECTS (len2
))
4268 else if (TREE_CODE (len1
) != INTEGER_CST
)
4270 else if (TREE_CODE (len2
) != INTEGER_CST
)
4272 else if (tree_int_cst_lt (len1
, len2
))
4277 /* If both arguments have side effects, we cannot optimize. */
4278 if (!len
|| TREE_SIDE_EFFECTS (len
))
4281 arg3_rtx
= expand_normal (len
);
4283 /* Make a place to write the result of the instruction. */
4286 && REG_P (result
) && GET_MODE (result
) == insn_mode
4287 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4288 result
= gen_reg_rtx (insn_mode
);
4290 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4291 GEN_INT (MIN (arg1_align
, arg2_align
)));
4299 /* Return the value in the proper mode for this function. */
4300 mode
= TYPE_MODE (TREE_TYPE (exp
));
4301 if (GET_MODE (result
) == mode
)
4304 return convert_to_mode (mode
, result
, 0);
4305 convert_move (target
, result
, 0);
4309 /* Expand the library call ourselves using a stabilized argument
4310 list to avoid re-evaluating the function's arguments twice. */
4311 #ifdef HAVE_cmpstrnsi
4314 fndecl
= get_callee_fndecl (exp
);
4315 fn
= build_call_expr (fndecl
, 2, arg1
, arg2
);
4316 if (TREE_CODE (fn
) == CALL_EXPR
)
4317 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4318 return expand_call (fn
, target
, target
== const0_rtx
);
4324 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4325 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4326 the result in TARGET, if convenient. */
4329 expand_builtin_strncmp (tree exp
, rtx target
, enum machine_mode mode
)
4331 if (!validate_arglist (exp
,
4332 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4336 tree result
= fold_builtin_strncmp (CALL_EXPR_ARG (exp
, 0),
4337 CALL_EXPR_ARG (exp
, 1),
4338 CALL_EXPR_ARG (exp
, 2));
4340 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4343 /* If c_strlen can determine an expression for one of the string
4344 lengths, and it doesn't have side effects, then emit cmpstrnsi
4345 using length MIN(strlen(string)+1, arg3). */
4346 #ifdef HAVE_cmpstrnsi
4349 tree len
, len1
, len2
;
4350 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4353 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4354 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4355 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4358 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4360 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4361 enum machine_mode insn_mode
4362 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4364 len1
= c_strlen (arg1
, 1);
4365 len2
= c_strlen (arg2
, 1);
4368 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4370 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4372 /* If we don't have a constant length for the first, use the length
4373 of the second, if we know it. We don't require a constant for
4374 this case; some cost analysis could be done if both are available
4375 but neither is constant. For now, assume they're equally cheap,
4376 unless one has side effects. If both strings have constant lengths,
4383 else if (TREE_SIDE_EFFECTS (len1
))
4385 else if (TREE_SIDE_EFFECTS (len2
))
4387 else if (TREE_CODE (len1
) != INTEGER_CST
)
4389 else if (TREE_CODE (len2
) != INTEGER_CST
)
4391 else if (tree_int_cst_lt (len1
, len2
))
4396 /* If both arguments have side effects, we cannot optimize. */
4397 if (!len
|| TREE_SIDE_EFFECTS (len
))
4400 /* The actual new length parameter is MIN(len,arg3). */
4401 len
= fold_build2 (MIN_EXPR
, TREE_TYPE (len
), len
,
4402 fold_convert (TREE_TYPE (len
), arg3
));
4404 /* If we don't have POINTER_TYPE, call the function. */
4405 if (arg1_align
== 0 || arg2_align
== 0)
4408 /* Make a place to write the result of the instruction. */
4411 && REG_P (result
) && GET_MODE (result
) == insn_mode
4412 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4413 result
= gen_reg_rtx (insn_mode
);
4415 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4416 arg1
= builtin_save_expr (arg1
);
4417 arg2
= builtin_save_expr (arg2
);
4418 len
= builtin_save_expr (len
);
4420 arg1_rtx
= get_memory_rtx (arg1
, len
);
4421 arg2_rtx
= get_memory_rtx (arg2
, len
);
4422 arg3_rtx
= expand_normal (len
);
4423 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4424 GEN_INT (MIN (arg1_align
, arg2_align
)));
4429 /* Return the value in the proper mode for this function. */
4430 mode
= TYPE_MODE (TREE_TYPE (exp
));
4431 if (GET_MODE (result
) == mode
)
4434 return convert_to_mode (mode
, result
, 0);
4435 convert_move (target
, result
, 0);
4439 /* Expand the library call ourselves using a stabilized argument
4440 list to avoid re-evaluating the function's arguments twice. */
4441 fndecl
= get_callee_fndecl (exp
);
4442 fn
= build_call_expr (fndecl
, 3, arg1
, arg2
, len
);
4443 if (TREE_CODE (fn
) == CALL_EXPR
)
4444 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4445 return expand_call (fn
, target
, target
== const0_rtx
);
4451 /* Expand expression EXP, which is a call to the strcat builtin.
4452 Return NULL_RTX if we failed the caller should emit a normal call,
4453 otherwise try to get the result in TARGET, if convenient. */
4456 expand_builtin_strcat (tree fndecl
, tree exp
, rtx target
, enum machine_mode mode
)
4458 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4462 tree dst
= CALL_EXPR_ARG (exp
, 0);
4463 tree src
= CALL_EXPR_ARG (exp
, 1);
4464 const char *p
= c_getstr (src
);
4466 /* If the string length is zero, return the dst parameter. */
4467 if (p
&& *p
== '\0')
4468 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
4472 /* See if we can store by pieces into (dst + strlen(dst)). */
4473 tree newsrc
, newdst
,
4474 strlen_fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
4477 /* Stabilize the argument list. */
4478 newsrc
= builtin_save_expr (src
);
4479 dst
= builtin_save_expr (dst
);
4483 /* Create strlen (dst). */
4484 newdst
= build_call_expr (strlen_fn
, 1, dst
);
4485 /* Create (dst p+ strlen (dst)). */
4487 newdst
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (dst
), dst
, newdst
);
4488 newdst
= builtin_save_expr (newdst
);
4490 if (!expand_builtin_strcpy_args (fndecl
, newdst
, newsrc
, target
, mode
))
4492 end_sequence (); /* Stop sequence. */
4496 /* Output the entire sequence. */
4497 insns
= get_insns ();
4501 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
4508 /* Expand expression EXP, which is a call to the strncat builtin.
4509 Return NULL_RTX if we failed the caller should emit a normal call,
4510 otherwise try to get the result in TARGET, if convenient. */
4513 expand_builtin_strncat (tree exp
, rtx target
, enum machine_mode mode
)
4515 if (validate_arglist (exp
,
4516 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4518 tree result
= fold_builtin_strncat (CALL_EXPR_ARG (exp
, 0),
4519 CALL_EXPR_ARG (exp
, 1),
4520 CALL_EXPR_ARG (exp
, 2));
4522 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4527 /* Expand expression EXP, which is a call to the strspn builtin.
4528 Return NULL_RTX if we failed the caller should emit a normal call,
4529 otherwise try to get the result in TARGET, if convenient. */
4532 expand_builtin_strspn (tree exp
, rtx target
, enum machine_mode mode
)
4534 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4536 tree result
= fold_builtin_strspn (CALL_EXPR_ARG (exp
, 0),
4537 CALL_EXPR_ARG (exp
, 1));
4539 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4544 /* Expand expression EXP, which is a call to the strcspn builtin.
4545 Return NULL_RTX if we failed the caller should emit a normal call,
4546 otherwise try to get the result in TARGET, if convenient. */
4549 expand_builtin_strcspn (tree exp
, rtx target
, enum machine_mode mode
)
4551 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4553 tree result
= fold_builtin_strcspn (CALL_EXPR_ARG (exp
, 0),
4554 CALL_EXPR_ARG (exp
, 1));
4556 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4561 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4562 if that's convenient. */
4565 expand_builtin_saveregs (void)
4569 /* Don't do __builtin_saveregs more than once in a function.
4570 Save the result of the first call and reuse it. */
4571 if (saveregs_value
!= 0)
4572 return saveregs_value
;
4574 /* When this function is called, it means that registers must be
4575 saved on entry to this function. So we migrate the call to the
4576 first insn of this function. */
4580 /* Do whatever the machine needs done in this case. */
4581 val
= targetm
.calls
.expand_builtin_saveregs ();
4586 saveregs_value
= val
;
4588 /* Put the insns after the NOTE that starts the function. If this
4589 is inside a start_sequence, make the outer-level insn chain current, so
4590 the code is placed at the start of the function. */
4591 push_topmost_sequence ();
4592 emit_insn_after (seq
, entry_of_function ());
4593 pop_topmost_sequence ();
4598 /* __builtin_args_info (N) returns word N of the arg space info
4599 for the current function. The number and meanings of words
4600 is controlled by the definition of CUMULATIVE_ARGS. */
4603 expand_builtin_args_info (tree exp
)
4605 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
4606 int *word_ptr
= (int *) ¤t_function_args_info
;
4608 gcc_assert (sizeof (CUMULATIVE_ARGS
) % sizeof (int) == 0);
4610 if (call_expr_nargs (exp
) != 0)
4612 if (!host_integerp (CALL_EXPR_ARG (exp
, 0), 0))
4613 error ("argument of %<__builtin_args_info%> must be constant");
4616 HOST_WIDE_INT wordnum
= tree_low_cst (CALL_EXPR_ARG (exp
, 0), 0);
4618 if (wordnum
< 0 || wordnum
>= nwords
)
4619 error ("argument of %<__builtin_args_info%> out of range");
4621 return GEN_INT (word_ptr
[wordnum
]);
4625 error ("missing argument in %<__builtin_args_info%>");
4630 /* Expand a call to __builtin_next_arg. */
4633 expand_builtin_next_arg (void)
4635 /* Checking arguments is already done in fold_builtin_next_arg
4636 that must be called before this function. */
4637 return expand_binop (ptr_mode
, add_optab
,
4638 current_function_internal_arg_pointer
,
4639 current_function_arg_offset_rtx
,
4640 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4643 /* Make it easier for the backends by protecting the valist argument
4644 from multiple evaluations. */
4647 stabilize_va_list (tree valist
, int needs_lvalue
)
4649 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
4651 if (TREE_SIDE_EFFECTS (valist
))
4652 valist
= save_expr (valist
);
4654 /* For this case, the backends will be expecting a pointer to
4655 TREE_TYPE (va_list_type_node), but it's possible we've
4656 actually been given an array (an actual va_list_type_node).
4658 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4660 tree p1
= build_pointer_type (TREE_TYPE (va_list_type_node
));
4661 valist
= build_fold_addr_expr_with_type (valist
, p1
);
4670 if (! TREE_SIDE_EFFECTS (valist
))
4673 pt
= build_pointer_type (va_list_type_node
);
4674 valist
= fold_build1 (ADDR_EXPR
, pt
, valist
);
4675 TREE_SIDE_EFFECTS (valist
) = 1;
4678 if (TREE_SIDE_EFFECTS (valist
))
4679 valist
= save_expr (valist
);
4680 valist
= build_fold_indirect_ref (valist
);
4686 /* The "standard" definition of va_list is void*. */
4689 std_build_builtin_va_list (void)
4691 return ptr_type_node
;
4694 /* The "standard" implementation of va_start: just assign `nextarg' to
4698 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4700 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4701 convert_move (va_r
, nextarg
, 0);
4704 /* Expand EXP, a call to __builtin_va_start. */
4707 expand_builtin_va_start (tree exp
)
4712 if (call_expr_nargs (exp
) < 2)
4714 error ("too few arguments to function %<va_start%>");
4718 if (fold_builtin_next_arg (exp
, true))
4721 nextarg
= expand_builtin_next_arg ();
4722 valist
= stabilize_va_list (CALL_EXPR_ARG (exp
, 0), 1);
4724 #ifdef EXPAND_BUILTIN_VA_START
4725 EXPAND_BUILTIN_VA_START (valist
, nextarg
);
4727 std_expand_builtin_va_start (valist
, nextarg
);
4733 /* The "standard" implementation of va_arg: read the value from the
4734 current (padded) address and increment by the (padded) size. */
4737 std_gimplify_va_arg_expr (tree valist
, tree type
, tree
*pre_p
, tree
*post_p
)
4739 tree addr
, t
, type_size
, rounded_size
, valist_tmp
;
4740 unsigned HOST_WIDE_INT align
, boundary
;
4743 #ifdef ARGS_GROW_DOWNWARD
4744 /* All of the alignment and movement below is for args-grow-up machines.
4745 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4746 implement their own specialized gimplify_va_arg_expr routines. */
4750 indirect
= pass_by_reference (NULL
, TYPE_MODE (type
), type
, false);
4752 type
= build_pointer_type (type
);
4754 align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
4755 boundary
= FUNCTION_ARG_BOUNDARY (TYPE_MODE (type
), type
) / BITS_PER_UNIT
;
4757 /* Hoist the valist value into a temporary for the moment. */
4758 valist_tmp
= get_initialized_tmp_var (valist
, pre_p
, NULL
);
4760 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4761 requires greater alignment, we must perform dynamic alignment. */
4762 if (boundary
> align
4763 && !integer_zerop (TYPE_SIZE (type
)))
4765 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4766 fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (valist
),
4767 valist_tmp
, size_int (boundary
- 1)));
4768 gimplify_and_add (t
, pre_p
);
4770 t
= fold_convert (sizetype
, valist_tmp
);
4771 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4772 fold_convert (TREE_TYPE (valist
),
4773 fold_build2 (BIT_AND_EXPR
, sizetype
, t
,
4774 size_int (-boundary
))));
4775 gimplify_and_add (t
, pre_p
);
4780 /* If the actual alignment is less than the alignment of the type,
4781 adjust the type accordingly so that we don't assume strict alignment
4782 when deferencing the pointer. */
4783 boundary
*= BITS_PER_UNIT
;
4784 if (boundary
< TYPE_ALIGN (type
))
4786 type
= build_variant_type_copy (type
);
4787 TYPE_ALIGN (type
) = boundary
;
4790 /* Compute the rounded size of the type. */
4791 type_size
= size_in_bytes (type
);
4792 rounded_size
= round_up (type_size
, align
);
4794 /* Reduce rounded_size so it's sharable with the postqueue. */
4795 gimplify_expr (&rounded_size
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4799 if (PAD_VARARGS_DOWN
&& !integer_zerop (rounded_size
))
4801 /* Small args are padded downward. */
4802 t
= fold_build2 (GT_EXPR
, sizetype
, rounded_size
, size_int (align
));
4803 t
= fold_build3 (COND_EXPR
, sizetype
, t
, size_zero_node
,
4804 size_binop (MINUS_EXPR
, rounded_size
, type_size
));
4805 addr
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (addr
), addr
, t
);
4808 /* Compute new value for AP. */
4809 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (valist
), valist_tmp
, rounded_size
);
4810 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
, t
);
4811 gimplify_and_add (t
, pre_p
);
4813 addr
= fold_convert (build_pointer_type (type
), addr
);
4816 addr
= build_va_arg_indirect_ref (addr
);
4818 return build_va_arg_indirect_ref (addr
);
4821 /* Build an indirect-ref expression over the given TREE, which represents a
4822 piece of a va_arg() expansion. */
4824 build_va_arg_indirect_ref (tree addr
)
4826 addr
= build_fold_indirect_ref (addr
);
4828 if (flag_mudflap
) /* Don't instrument va_arg INDIRECT_REF. */
4834 /* Return a dummy expression of type TYPE in order to keep going after an
4838 dummy_object (tree type
)
4840 tree t
= build_int_cst (build_pointer_type (type
), 0);
4841 return build1 (INDIRECT_REF
, type
, t
);
4844 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4845 builtin function, but a very special sort of operator. */
4847 enum gimplify_status
4848 gimplify_va_arg_expr (tree
*expr_p
, tree
*pre_p
, tree
*post_p
)
4850 tree promoted_type
, want_va_type
, have_va_type
;
4851 tree valist
= TREE_OPERAND (*expr_p
, 0);
4852 tree type
= TREE_TYPE (*expr_p
);
4855 /* Verify that valist is of the proper type. */
4856 want_va_type
= va_list_type_node
;
4857 have_va_type
= TREE_TYPE (valist
);
4859 if (have_va_type
== error_mark_node
)
4862 if (TREE_CODE (want_va_type
) == ARRAY_TYPE
)
4864 /* If va_list is an array type, the argument may have decayed
4865 to a pointer type, e.g. by being passed to another function.
4866 In that case, unwrap both types so that we can compare the
4867 underlying records. */
4868 if (TREE_CODE (have_va_type
) == ARRAY_TYPE
4869 || POINTER_TYPE_P (have_va_type
))
4871 want_va_type
= TREE_TYPE (want_va_type
);
4872 have_va_type
= TREE_TYPE (have_va_type
);
4876 if (TYPE_MAIN_VARIANT (want_va_type
) != TYPE_MAIN_VARIANT (have_va_type
))
4878 error ("first argument to %<va_arg%> not of type %<va_list%>");
4882 /* Generate a diagnostic for requesting data of a type that cannot
4883 be passed through `...' due to type promotion at the call site. */
4884 else if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
4887 static bool gave_help
;
4889 /* Unfortunately, this is merely undefined, rather than a constraint
4890 violation, so we cannot make this an error. If this call is never
4891 executed, the program is still strictly conforming. */
4892 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4893 type
, promoted_type
);
4897 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4898 promoted_type
, type
);
4901 /* We can, however, treat "undefined" any way we please.
4902 Call abort to encourage the user to fix the program. */
4903 inform ("if this code is reached, the program will abort");
4904 t
= build_call_expr (implicit_built_in_decls
[BUILT_IN_TRAP
], 0);
4905 append_to_statement_list (t
, pre_p
);
4907 /* This is dead code, but go ahead and finish so that the
4908 mode of the result comes out right. */
4909 *expr_p
= dummy_object (type
);
4914 /* Make it easier for the backends by protecting the valist argument
4915 from multiple evaluations. */
4916 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
4918 /* For this case, the backends will be expecting a pointer to
4919 TREE_TYPE (va_list_type_node), but it's possible we've
4920 actually been given an array (an actual va_list_type_node).
4922 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4924 tree p1
= build_pointer_type (TREE_TYPE (va_list_type_node
));
4925 valist
= build_fold_addr_expr_with_type (valist
, p1
);
4927 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4930 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_min_lval
, fb_lvalue
);
4932 if (!targetm
.gimplify_va_arg_expr
)
4933 /* FIXME:Once most targets are converted we should merely
4934 assert this is non-null. */
4937 *expr_p
= targetm
.gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
4942 /* Expand EXP, a call to __builtin_va_end. */
4945 expand_builtin_va_end (tree exp
)
4947 tree valist
= CALL_EXPR_ARG (exp
, 0);
4949 /* Evaluate for side effects, if needed. I hate macros that don't
4951 if (TREE_SIDE_EFFECTS (valist
))
4952 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4957 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4958 builtin rather than just as an assignment in stdarg.h because of the
4959 nastiness of array-type va_list types. */
4962 expand_builtin_va_copy (tree exp
)
4966 dst
= CALL_EXPR_ARG (exp
, 0);
4967 src
= CALL_EXPR_ARG (exp
, 1);
4969 dst
= stabilize_va_list (dst
, 1);
4970 src
= stabilize_va_list (src
, 0);
4972 if (TREE_CODE (va_list_type_node
) != ARRAY_TYPE
)
4974 t
= build2 (MODIFY_EXPR
, va_list_type_node
, dst
, src
);
4975 TREE_SIDE_EFFECTS (t
) = 1;
4976 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4980 rtx dstb
, srcb
, size
;
4982 /* Evaluate to pointers. */
4983 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4984 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4985 size
= expand_expr (TYPE_SIZE_UNIT (va_list_type_node
), NULL_RTX
,
4986 VOIDmode
, EXPAND_NORMAL
);
4988 dstb
= convert_memory_address (Pmode
, dstb
);
4989 srcb
= convert_memory_address (Pmode
, srcb
);
4991 /* "Dereference" to BLKmode memories. */
4992 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4993 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4994 set_mem_align (dstb
, TYPE_ALIGN (va_list_type_node
));
4995 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4996 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4997 set_mem_align (srcb
, TYPE_ALIGN (va_list_type_node
));
5000 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
5006 /* Expand a call to one of the builtin functions __builtin_frame_address or
5007 __builtin_return_address. */
5010 expand_builtin_frame_address (tree fndecl
, tree exp
)
5012 /* The argument must be a nonnegative integer constant.
5013 It counts the number of frames to scan up the stack.
5014 The value is the return address saved in that frame. */
5015 if (call_expr_nargs (exp
) == 0)
5016 /* Warning about missing arg was already issued. */
5018 else if (! host_integerp (CALL_EXPR_ARG (exp
, 0), 1))
5020 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5021 error ("invalid argument to %<__builtin_frame_address%>");
5023 error ("invalid argument to %<__builtin_return_address%>");
5029 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
5030 tree_low_cst (CALL_EXPR_ARG (exp
, 0), 1));
5032 /* Some ports cannot access arbitrary stack frames. */
5035 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5036 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5038 warning (0, "unsupported argument to %<__builtin_return_address%>");
5042 /* For __builtin_frame_address, return what we've got. */
5043 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5047 && ! CONSTANT_P (tem
))
5048 tem
= copy_to_mode_reg (Pmode
, tem
);
5053 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5054 we failed and the caller should emit a normal call, otherwise try to get
5055 the result in TARGET, if convenient. */
5058 expand_builtin_alloca (tree exp
, rtx target
)
5063 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5064 should always expand to function calls. These can be intercepted
5069 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5072 /* Compute the argument. */
5073 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5075 /* Allocate the desired space. */
5076 result
= allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
5077 result
= convert_memory_address (ptr_mode
, result
);
5082 /* Expand a call to a bswap builtin with argument ARG0. MODE
5083 is the mode to expand with. */
5086 expand_builtin_bswap (tree exp
, rtx target
, rtx subtarget
)
5088 enum machine_mode mode
;
5092 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5095 arg
= CALL_EXPR_ARG (exp
, 0);
5096 mode
= TYPE_MODE (TREE_TYPE (arg
));
5097 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5099 target
= expand_unop (mode
, bswap_optab
, op0
, target
, 1);
5101 gcc_assert (target
);
5103 return convert_to_mode (mode
, target
, 0);
5106 /* Expand a call to a unary builtin in EXP.
5107 Return NULL_RTX if a normal call should be emitted rather than expanding the
5108 function in-line. If convenient, the result should be placed in TARGET.
5109 SUBTARGET may be used as the target for computing one of EXP's operands. */
5112 expand_builtin_unop (enum machine_mode target_mode
, tree exp
, rtx target
,
5113 rtx subtarget
, optab op_optab
)
5117 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5120 /* Compute the argument. */
5121 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
5122 VOIDmode
, EXPAND_NORMAL
);
5123 /* Compute op, into TARGET if possible.
5124 Set TARGET to wherever the result comes back. */
5125 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
5126 op_optab
, op0
, target
, 1);
5127 gcc_assert (target
);
5129 return convert_to_mode (target_mode
, target
, 0);
5132 /* If the string passed to fputs is a constant and is one character
5133 long, we attempt to transform this call into __builtin_fputc(). */
5136 expand_builtin_fputs (tree exp
, rtx target
, bool unlocked
)
5138 /* Verify the arguments in the original call. */
5139 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5141 tree result
= fold_builtin_fputs (CALL_EXPR_ARG (exp
, 0),
5142 CALL_EXPR_ARG (exp
, 1),
5143 (target
== const0_rtx
),
5144 unlocked
, NULL_TREE
);
5146 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
5151 /* Expand a call to __builtin_expect. We just return our argument
5152 as the builtin_expect semantic should've been already executed by
5153 tree branch prediction pass. */
5156 expand_builtin_expect (tree exp
, rtx target
)
5160 if (call_expr_nargs (exp
) < 2)
5162 arg
= CALL_EXPR_ARG (exp
, 0);
5163 c
= CALL_EXPR_ARG (exp
, 1);
5165 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5166 /* When guessing was done, the hints should be already stripped away. */
5167 gcc_assert (!flag_guess_branch_prob
5168 || optimize
== 0 || errorcount
|| sorrycount
);
5173 expand_builtin_trap (void)
5177 emit_insn (gen_trap ());
5180 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
5184 /* Expand EXP, a call to fabs, fabsf or fabsl.
5185 Return NULL_RTX if a normal call should be emitted rather than expanding
5186 the function inline. If convenient, the result should be placed
5187 in TARGET. SUBTARGET may be used as the target for computing
5191 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5193 enum machine_mode mode
;
5197 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5200 arg
= CALL_EXPR_ARG (exp
, 0);
5201 mode
= TYPE_MODE (TREE_TYPE (arg
));
5202 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5203 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5206 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5207 Return NULL is a normal call should be emitted rather than expanding the
5208 function inline. If convenient, the result should be placed in TARGET.
5209 SUBTARGET may be used as the target for computing the operand. */
5212 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5217 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5220 arg
= CALL_EXPR_ARG (exp
, 0);
5221 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5223 arg
= CALL_EXPR_ARG (exp
, 1);
5224 op1
= expand_normal (arg
);
5226 return expand_copysign (op0
, op1
, target
);
5229 /* Create a new constant string literal and return a char* pointer to it.
5230 The STRING_CST value is the LEN characters at STR. */
5232 build_string_literal (int len
, const char *str
)
5234 tree t
, elem
, index
, type
;
5236 t
= build_string (len
, str
);
5237 elem
= build_type_variant (char_type_node
, 1, 0);
5238 index
= build_index_type (build_int_cst (NULL_TREE
, len
- 1));
5239 type
= build_array_type (elem
, index
);
5240 TREE_TYPE (t
) = type
;
5241 TREE_CONSTANT (t
) = 1;
5242 TREE_INVARIANT (t
) = 1;
5243 TREE_READONLY (t
) = 1;
5244 TREE_STATIC (t
) = 1;
5246 type
= build_pointer_type (type
);
5247 t
= build1 (ADDR_EXPR
, type
, t
);
5249 type
= build_pointer_type (elem
);
5250 t
= build1 (NOP_EXPR
, type
, t
);
5254 /* Expand EXP, a call to printf or printf_unlocked.
5255 Return NULL_RTX if a normal call should be emitted rather than transforming
5256 the function inline. If convenient, the result should be placed in
5257 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5260 expand_builtin_printf (tree exp
, rtx target
, enum machine_mode mode
,
5263 /* If we're using an unlocked function, assume the other unlocked
5264 functions exist explicitly. */
5265 tree
const fn_putchar
= unlocked
? built_in_decls
[BUILT_IN_PUTCHAR_UNLOCKED
]
5266 : implicit_built_in_decls
[BUILT_IN_PUTCHAR
];
5267 tree
const fn_puts
= unlocked
? built_in_decls
[BUILT_IN_PUTS_UNLOCKED
]
5268 : implicit_built_in_decls
[BUILT_IN_PUTS
];
5269 const char *fmt_str
;
5272 int nargs
= call_expr_nargs (exp
);
5274 /* If the return value is used, don't do the transformation. */
5275 if (target
!= const0_rtx
)
5278 /* Verify the required arguments in the original call. */
5281 fmt
= CALL_EXPR_ARG (exp
, 0);
5282 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5285 /* Check whether the format is a literal string constant. */
5286 fmt_str
= c_getstr (fmt
);
5287 if (fmt_str
== NULL
)
5290 if (!init_target_chars ())
5293 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5294 if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
5297 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp
, 1))))
5300 fn
= build_call_expr (fn_puts
, 1, CALL_EXPR_ARG (exp
, 1));
5302 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5303 else if (strcmp (fmt_str
, target_percent_c
) == 0)
5306 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1))) != INTEGER_TYPE
)
5309 fn
= build_call_expr (fn_putchar
, 1, CALL_EXPR_ARG (exp
, 1));
5313 /* We can't handle anything else with % args or %% ... yet. */
5314 if (strchr (fmt_str
, target_percent
))
5320 /* If the format specifier was "", printf does nothing. */
5321 if (fmt_str
[0] == '\0')
5323 /* If the format specifier has length of 1, call putchar. */
5324 if (fmt_str
[1] == '\0')
5326 /* Given printf("c"), (where c is any one character,)
5327 convert "c"[0] to an int and pass that to the replacement
5329 arg
= build_int_cst (NULL_TREE
, fmt_str
[0]);
5331 fn
= build_call_expr (fn_putchar
, 1, arg
);
5335 /* If the format specifier was "string\n", call puts("string"). */
5336 size_t len
= strlen (fmt_str
);
5337 if ((unsigned char)fmt_str
[len
- 1] == target_newline
)
5339 /* Create a NUL-terminated string that's one char shorter
5340 than the original, stripping off the trailing '\n'. */
5341 char *newstr
= alloca (len
);
5342 memcpy (newstr
, fmt_str
, len
- 1);
5343 newstr
[len
- 1] = 0;
5344 arg
= build_string_literal (len
, newstr
);
5346 fn
= build_call_expr (fn_puts
, 1, arg
);
5349 /* We'd like to arrange to call fputs(string,stdout) here,
5350 but we need stdout and don't have a way to get it yet. */
5357 if (TREE_CODE (fn
) == CALL_EXPR
)
5358 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
5359 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
5362 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5363 Return NULL_RTX if a normal call should be emitted rather than transforming
5364 the function inline. If convenient, the result should be placed in
5365 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5368 expand_builtin_fprintf (tree exp
, rtx target
, enum machine_mode mode
,
5371 /* If we're using an unlocked function, assume the other unlocked
5372 functions exist explicitly. */
5373 tree
const fn_fputc
= unlocked
? built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
5374 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
5375 tree
const fn_fputs
= unlocked
? built_in_decls
[BUILT_IN_FPUTS_UNLOCKED
]
5376 : implicit_built_in_decls
[BUILT_IN_FPUTS
];
5377 const char *fmt_str
;
5380 int nargs
= call_expr_nargs (exp
);
5382 /* If the return value is used, don't do the transformation. */
5383 if (target
!= const0_rtx
)
5386 /* Verify the required arguments in the original call. */
5389 fp
= CALL_EXPR_ARG (exp
, 0);
5390 if (! POINTER_TYPE_P (TREE_TYPE (fp
)))
5392 fmt
= CALL_EXPR_ARG (exp
, 1);
5393 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5396 /* Check whether the format is a literal string constant. */
5397 fmt_str
= c_getstr (fmt
);
5398 if (fmt_str
== NULL
)
5401 if (!init_target_chars ())
5404 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5405 if (strcmp (fmt_str
, target_percent_s
) == 0)
5408 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp
, 2))))
5410 arg
= CALL_EXPR_ARG (exp
, 2);
5412 fn
= build_call_expr (fn_fputs
, 2, arg
, fp
);
5414 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5415 else if (strcmp (fmt_str
, target_percent_c
) == 0)
5418 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 2))) != INTEGER_TYPE
)
5420 arg
= CALL_EXPR_ARG (exp
, 2);
5422 fn
= build_call_expr (fn_fputc
, 2, arg
, fp
);
5426 /* We can't handle anything else with % args or %% ... yet. */
5427 if (strchr (fmt_str
, target_percent
))
5433 /* If the format specifier was "", fprintf does nothing. */
5434 if (fmt_str
[0] == '\0')
5436 /* Evaluate and ignore FILE* argument for side-effects. */
5437 expand_expr (fp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5441 /* When "string" doesn't contain %, replace all cases of
5442 fprintf(stream,string) with fputs(string,stream). The fputs
5443 builtin will take care of special cases like length == 1. */
5445 fn
= build_call_expr (fn_fputs
, 2, fmt
, fp
);
5450 if (TREE_CODE (fn
) == CALL_EXPR
)
5451 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
5452 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
5455 /* Expand a call EXP to sprintf. Return NULL_RTX if
5456 a normal call should be emitted rather than expanding the function
5457 inline. If convenient, the result should be placed in TARGET with
5461 expand_builtin_sprintf (tree exp
, rtx target
, enum machine_mode mode
)
5464 const char *fmt_str
;
5465 int nargs
= call_expr_nargs (exp
);
5467 /* Verify the required arguments in the original call. */
5470 dest
= CALL_EXPR_ARG (exp
, 0);
5471 if (! POINTER_TYPE_P (TREE_TYPE (dest
)))
5473 fmt
= CALL_EXPR_ARG (exp
, 0);
5474 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5477 /* Check whether the format is a literal string constant. */
5478 fmt_str
= c_getstr (fmt
);
5479 if (fmt_str
== NULL
)
5482 if (!init_target_chars ())
5485 /* If the format doesn't contain % args or %%, use strcpy. */
5486 if (strchr (fmt_str
, target_percent
) == 0)
5488 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
5491 if ((nargs
> 2) || ! fn
)
5493 expand_expr (build_call_expr (fn
, 2, dest
, fmt
),
5494 const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5495 if (target
== const0_rtx
)
5497 exp
= build_int_cst (NULL_TREE
, strlen (fmt_str
));
5498 return expand_expr (exp
, target
, mode
, EXPAND_NORMAL
);
5500 /* If the format is "%s", use strcpy if the result isn't used. */
5501 else if (strcmp (fmt_str
, target_percent_s
) == 0)
5504 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
5510 arg
= CALL_EXPR_ARG (exp
, 2);
5511 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
5514 if (target
!= const0_rtx
)
5516 len
= c_strlen (arg
, 1);
5517 if (! len
|| TREE_CODE (len
) != INTEGER_CST
)
5523 expand_expr (build_call_expr (fn
, 2, dest
, arg
),
5524 const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5526 if (target
== const0_rtx
)
5528 return expand_expr (len
, target
, mode
, EXPAND_NORMAL
);
5534 /* Expand a call to either the entry or exit function profiler. */
5537 expand_builtin_profile_func (bool exitp
)
5541 this = DECL_RTL (current_function_decl
);
5542 gcc_assert (MEM_P (this));
5543 this = XEXP (this, 0);
5546 which
= profile_function_exit_libfunc
;
5548 which
= profile_function_entry_libfunc
;
5550 emit_library_call (which
, LCT_NORMAL
, VOIDmode
, 2, this, Pmode
,
5551 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
5558 /* Expand a call to __builtin___clear_cache. */
5561 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
5563 #ifndef HAVE_clear_cache
5564 #ifdef CLEAR_INSN_CACHE
5565 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5566 does something. Just do the default expansion to a call to
5570 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5571 does nothing. There is no need to call it. Do nothing. */
5573 #endif /* CLEAR_INSN_CACHE */
5575 /* We have a "clear_cache" insn, and it will handle everything. */
5577 rtx begin_rtx
, end_rtx
;
5578 enum insn_code icode
;
5580 /* We must not expand to a library call. If we did, any
5581 fallback library function in libgcc that might contain a call to
5582 __builtin___clear_cache() would recurse infinitely. */
5583 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5585 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5589 if (HAVE_clear_cache
)
5591 icode
= CODE_FOR_clear_cache
;
5593 begin
= CALL_EXPR_ARG (exp
, 0);
5594 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5595 begin_rtx
= convert_memory_address (Pmode
, begin_rtx
);
5596 if (!insn_data
[icode
].operand
[0].predicate (begin_rtx
, Pmode
))
5597 begin_rtx
= copy_to_mode_reg (Pmode
, begin_rtx
);
5599 end
= CALL_EXPR_ARG (exp
, 1);
5600 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5601 end_rtx
= convert_memory_address (Pmode
, end_rtx
);
5602 if (!insn_data
[icode
].operand
[1].predicate (end_rtx
, Pmode
))
5603 end_rtx
= copy_to_mode_reg (Pmode
, end_rtx
);
5605 emit_insn (gen_clear_cache (begin_rtx
, end_rtx
));
5608 #endif /* HAVE_clear_cache */
5611 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5614 round_trampoline_addr (rtx tramp
)
5616 rtx temp
, addend
, mask
;
5618 /* If we don't need too much alignment, we'll have been guaranteed
5619 proper alignment by get_trampoline_type. */
5620 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5623 /* Round address up to desired boundary. */
5624 temp
= gen_reg_rtx (Pmode
);
5625 addend
= GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1);
5626 mask
= GEN_INT (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
5628 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5629 temp
, 0, OPTAB_LIB_WIDEN
);
5630 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5631 temp
, 0, OPTAB_LIB_WIDEN
);
5637 expand_builtin_init_trampoline (tree exp
)
5639 tree t_tramp
, t_func
, t_chain
;
5640 rtx r_tramp
, r_func
, r_chain
;
5641 #ifdef TRAMPOLINE_TEMPLATE
5645 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5646 POINTER_TYPE
, VOID_TYPE
))
5649 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5650 t_func
= CALL_EXPR_ARG (exp
, 1);
5651 t_chain
= CALL_EXPR_ARG (exp
, 2);
5653 r_tramp
= expand_normal (t_tramp
);
5654 r_func
= expand_normal (t_func
);
5655 r_chain
= expand_normal (t_chain
);
5657 /* Generate insns to initialize the trampoline. */
5658 r_tramp
= round_trampoline_addr (r_tramp
);
5659 #ifdef TRAMPOLINE_TEMPLATE
5660 blktramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5661 set_mem_align (blktramp
, TRAMPOLINE_ALIGNMENT
);
5662 emit_block_move (blktramp
, assemble_trampoline_template (),
5663 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
5665 trampolines_created
= 1;
5666 INITIALIZE_TRAMPOLINE (r_tramp
, r_func
, r_chain
);
5672 expand_builtin_adjust_trampoline (tree exp
)
5676 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5679 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5680 tramp
= round_trampoline_addr (tramp
);
5681 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5682 TRAMPOLINE_ADJUST_ADDRESS (tramp
);
5688 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5689 function. The function first checks whether the back end provides
5690 an insn to implement signbit for the respective mode. If not, it
5691 checks whether the floating point format of the value is such that
5692 the sign bit can be extracted. If that is not the case, the
5693 function returns NULL_RTX to indicate that a normal call should be
5694 emitted rather than expanding the function in-line. EXP is the
5695 expression that is a call to the builtin function; if convenient,
5696 the result should be placed in TARGET. */
5698 expand_builtin_signbit (tree exp
, rtx target
)
5700 const struct real_format
*fmt
;
5701 enum machine_mode fmode
, imode
, rmode
;
5702 HOST_WIDE_INT hi
, lo
;
5705 enum insn_code signbit_insn_code
;
5708 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5711 arg
= CALL_EXPR_ARG (exp
, 0);
5712 fmode
= TYPE_MODE (TREE_TYPE (arg
));
5713 rmode
= TYPE_MODE (TREE_TYPE (exp
));
5714 fmt
= REAL_MODE_FORMAT (fmode
);
5716 arg
= builtin_save_expr (arg
);
5718 /* Expand the argument yielding a RTX expression. */
5719 temp
= expand_normal (arg
);
5721 /* Check if the back end provides an insn that handles signbit for the
5723 signbit_insn_code
= signbit_optab
[(int) fmode
];
5724 if (signbit_insn_code
!= CODE_FOR_nothing
)
5726 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5727 emit_unop_insn (signbit_insn_code
, target
, temp
, UNKNOWN
);
5731 /* For floating point formats without a sign bit, implement signbit
5733 bitpos
= fmt
->signbit_ro
;
5736 /* But we can't do this if the format supports signed zero. */
5737 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
5740 arg
= fold_build2 (LT_EXPR
, TREE_TYPE (exp
), arg
,
5741 build_real (TREE_TYPE (arg
), dconst0
));
5742 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5745 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5747 imode
= int_mode_for_mode (fmode
);
5748 if (imode
== BLKmode
)
5750 temp
= gen_lowpart (imode
, temp
);
5755 /* Handle targets with different FP word orders. */
5756 if (FLOAT_WORDS_BIG_ENDIAN
)
5757 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5759 word
= bitpos
/ BITS_PER_WORD
;
5760 temp
= operand_subword_force (temp
, word
, fmode
);
5761 bitpos
= bitpos
% BITS_PER_WORD
;
5764 /* Force the intermediate word_mode (or narrower) result into a
5765 register. This avoids attempting to create paradoxical SUBREGs
5766 of floating point modes below. */
5767 temp
= force_reg (imode
, temp
);
5769 /* If the bitpos is within the "result mode" lowpart, the operation
5770 can be implement with a single bitwise AND. Otherwise, we need
5771 a right shift and an AND. */
5773 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5775 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
5778 lo
= (HOST_WIDE_INT
) 1 << bitpos
;
5782 hi
= (HOST_WIDE_INT
) 1 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
5787 temp
= gen_lowpart (rmode
, temp
);
5788 temp
= expand_binop (rmode
, and_optab
, temp
,
5789 immed_double_const (lo
, hi
, rmode
),
5790 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5794 /* Perform a logical right shift to place the signbit in the least
5795 significant bit, then truncate the result to the desired mode
5796 and mask just this bit. */
5797 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
,
5798 build_int_cst (NULL_TREE
, bitpos
), NULL_RTX
, 1);
5799 temp
= gen_lowpart (rmode
, temp
);
5800 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5801 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5807 /* Expand fork or exec calls. TARGET is the desired target of the
5808 call. EXP is the call. FN is the
5809 identificator of the actual function. IGNORE is nonzero if the
5810 value is to be ignored. */
5813 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5818 /* If we are not profiling, just call the function. */
5819 if (!profile_arc_flag
)
5822 /* Otherwise call the wrapper. This should be equivalent for the rest of
5823 compiler, so the code does not diverge, and the wrapper may run the
5824 code necessary for keeping the profiling sane. */
5826 switch (DECL_FUNCTION_CODE (fn
))
5829 id
= get_identifier ("__gcov_fork");
5832 case BUILT_IN_EXECL
:
5833 id
= get_identifier ("__gcov_execl");
5836 case BUILT_IN_EXECV
:
5837 id
= get_identifier ("__gcov_execv");
5840 case BUILT_IN_EXECLP
:
5841 id
= get_identifier ("__gcov_execlp");
5844 case BUILT_IN_EXECLE
:
5845 id
= get_identifier ("__gcov_execle");
5848 case BUILT_IN_EXECVP
:
5849 id
= get_identifier ("__gcov_execvp");
5852 case BUILT_IN_EXECVE
:
5853 id
= get_identifier ("__gcov_execve");
5860 decl
= build_decl (FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5861 DECL_EXTERNAL (decl
) = 1;
5862 TREE_PUBLIC (decl
) = 1;
5863 DECL_ARTIFICIAL (decl
) = 1;
5864 TREE_NOTHROW (decl
) = 1;
5865 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5866 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5867 call
= rewrite_call_expr (exp
, 0, decl
, 0);
5868 return expand_call (call
, target
, ignore
);
5873 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5874 the pointer in these functions is void*, the tree optimizers may remove
5875 casts. The mode computed in expand_builtin isn't reliable either, due
5876 to __sync_bool_compare_and_swap.
5878 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5879 group of builtins. This gives us log2 of the mode size. */
5881 static inline enum machine_mode
5882 get_builtin_sync_mode (int fcode_diff
)
5884 /* The size is not negotiable, so ask not to get BLKmode in return
5885 if the target indicates that a smaller size would be better. */
5886 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5889 /* Expand the memory expression LOC and return the appropriate memory operand
5890 for the builtin_sync operations. */
5893 get_builtin_sync_mem (tree loc
, enum machine_mode mode
)
5897 addr
= expand_expr (loc
, NULL_RTX
, Pmode
, EXPAND_SUM
);
5899 /* Note that we explicitly do not want any alias information for this
5900 memory, so that we kill all other live memories. Otherwise we don't
5901 satisfy the full barrier semantics of the intrinsic. */
5902 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5904 set_mem_align (mem
, get_pointer_alignment (loc
, BIGGEST_ALIGNMENT
));
5905 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5906 MEM_VOLATILE_P (mem
) = 1;
5911 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5912 EXP is the CALL_EXPR. CODE is the rtx code
5913 that corresponds to the arithmetic or logical operation from the name;
5914 an exception here is that NOT actually means NAND. TARGET is an optional
5915 place for us to store the results; AFTER is true if this is the
5916 fetch_and_xxx form. IGNORE is true if we don't actually care about
5917 the result of the operation at all. */
5920 expand_builtin_sync_operation (enum machine_mode mode
, tree exp
,
5921 enum rtx_code code
, bool after
,
5922 rtx target
, bool ignore
)
5925 enum machine_mode old_mode
;
5927 /* Expand the operands. */
5928 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5930 val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
, mode
, EXPAND_NORMAL
);
5931 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5932 of CONST_INTs, where we know the old_mode only from the call argument. */
5933 old_mode
= GET_MODE (val
);
5934 if (old_mode
== VOIDmode
)
5935 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
5936 val
= convert_modes (mode
, old_mode
, val
, 1);
5939 return expand_sync_operation (mem
, val
, code
);
5941 return expand_sync_fetch_operation (mem
, val
, code
, after
, target
);
5944 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5945 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5946 true if this is the boolean form. TARGET is a place for us to store the
5947 results; this is NOT optional if IS_BOOL is true. */
5950 expand_builtin_compare_and_swap (enum machine_mode mode
, tree exp
,
5951 bool is_bool
, rtx target
)
5953 rtx old_val
, new_val
, mem
;
5954 enum machine_mode old_mode
;
5956 /* Expand the operands. */
5957 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5960 old_val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
,
5961 mode
, EXPAND_NORMAL
);
5962 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5963 of CONST_INTs, where we know the old_mode only from the call argument. */
5964 old_mode
= GET_MODE (old_val
);
5965 if (old_mode
== VOIDmode
)
5966 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
5967 old_val
= convert_modes (mode
, old_mode
, old_val
, 1);
5969 new_val
= expand_expr (CALL_EXPR_ARG (exp
, 2), NULL_RTX
,
5970 mode
, EXPAND_NORMAL
);
5971 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5972 of CONST_INTs, where we know the old_mode only from the call argument. */
5973 old_mode
= GET_MODE (new_val
);
5974 if (old_mode
== VOIDmode
)
5975 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 2)));
5976 new_val
= convert_modes (mode
, old_mode
, new_val
, 1);
5979 return expand_bool_compare_and_swap (mem
, old_val
, new_val
, target
);
5981 return expand_val_compare_and_swap (mem
, old_val
, new_val
, target
);
5984 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5985 general form is actually an atomic exchange, and some targets only
5986 support a reduced form with the second argument being a constant 1.
5987 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5991 expand_builtin_lock_test_and_set (enum machine_mode mode
, tree exp
,
5995 enum machine_mode old_mode
;
5997 /* Expand the operands. */
5998 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5999 val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
, mode
, EXPAND_NORMAL
);
6000 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6001 of CONST_INTs, where we know the old_mode only from the call argument. */
6002 old_mode
= GET_MODE (val
);
6003 if (old_mode
== VOIDmode
)
6004 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
6005 val
= convert_modes (mode
, old_mode
, val
, 1);
6007 return expand_sync_lock_test_and_set (mem
, val
, target
);
6010 /* Expand the __sync_synchronize intrinsic. */
6013 expand_builtin_synchronize (void)
6017 #ifdef HAVE_memory_barrier
6018 if (HAVE_memory_barrier
)
6020 emit_insn (gen_memory_barrier ());
6025 /* If no explicit memory barrier instruction is available, create an
6026 empty asm stmt with a memory clobber. */
6027 x
= build4 (ASM_EXPR
, void_type_node
, build_string (0, ""), NULL
, NULL
,
6028 tree_cons (NULL
, build_string (6, "memory"), NULL
));
6029 ASM_VOLATILE_P (x
) = 1;
6030 expand_asm_expr (x
);
6033 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6036 expand_builtin_lock_release (enum machine_mode mode
, tree exp
)
6038 enum insn_code icode
;
6040 rtx val
= const0_rtx
;
6042 /* Expand the operands. */
6043 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6045 /* If there is an explicit operation in the md file, use it. */
6046 icode
= sync_lock_release
[mode
];
6047 if (icode
!= CODE_FOR_nothing
)
6049 if (!insn_data
[icode
].operand
[1].predicate (val
, mode
))
6050 val
= force_reg (mode
, val
);
6052 insn
= GEN_FCN (icode
) (mem
, val
);
6060 /* Otherwise we can implement this operation by emitting a barrier
6061 followed by a store of zero. */
6062 expand_builtin_synchronize ();
6063 emit_move_insn (mem
, val
);
6066 /* Expand an expression EXP that calls a built-in function,
6067 with result going to TARGET if that's convenient
6068 (and in mode MODE if that's convenient).
6069 SUBTARGET may be used as the target for computing one of EXP's operands.
6070 IGNORE is nonzero if the value is to be ignored. */
6073 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
6076 tree fndecl
= get_callee_fndecl (exp
);
6077 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6078 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
6080 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6081 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
6083 /* When not optimizing, generate calls to library functions for a certain
6086 && !called_as_built_in (fndecl
)
6087 && DECL_ASSEMBLER_NAME_SET_P (fndecl
)
6088 && fcode
!= BUILT_IN_ALLOCA
)
6089 return expand_call (exp
, target
, ignore
);
6091 /* The built-in function expanders test for target == const0_rtx
6092 to determine whether the function's result will be ignored. */
6094 target
= const0_rtx
;
6096 /* If the result of a pure or const built-in function is ignored, and
6097 none of its arguments are volatile, we can avoid expanding the
6098 built-in call and just evaluate the arguments for side-effects. */
6099 if (target
== const0_rtx
6100 && (DECL_IS_PURE (fndecl
) || TREE_READONLY (fndecl
)))
6102 bool volatilep
= false;
6104 call_expr_arg_iterator iter
;
6106 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6107 if (TREE_THIS_VOLATILE (arg
))
6115 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6116 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6123 CASE_FLT_FN (BUILT_IN_FABS
):
6124 target
= expand_builtin_fabs (exp
, target
, subtarget
);
6129 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
6130 target
= expand_builtin_copysign (exp
, target
, subtarget
);
6135 /* Just do a normal library call if we were unable to fold
6137 CASE_FLT_FN (BUILT_IN_CABS
):
6140 CASE_FLT_FN (BUILT_IN_EXP
):
6141 CASE_FLT_FN (BUILT_IN_EXP10
):
6142 CASE_FLT_FN (BUILT_IN_POW10
):
6143 CASE_FLT_FN (BUILT_IN_EXP2
):
6144 CASE_FLT_FN (BUILT_IN_EXPM1
):
6145 CASE_FLT_FN (BUILT_IN_LOGB
):
6146 CASE_FLT_FN (BUILT_IN_LOG
):
6147 CASE_FLT_FN (BUILT_IN_LOG10
):
6148 CASE_FLT_FN (BUILT_IN_LOG2
):
6149 CASE_FLT_FN (BUILT_IN_LOG1P
):
6150 CASE_FLT_FN (BUILT_IN_TAN
):
6151 CASE_FLT_FN (BUILT_IN_ASIN
):
6152 CASE_FLT_FN (BUILT_IN_ACOS
):
6153 CASE_FLT_FN (BUILT_IN_ATAN
):
6154 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6155 because of possible accuracy problems. */
6156 if (! flag_unsafe_math_optimizations
)
6158 CASE_FLT_FN (BUILT_IN_SQRT
):
6159 CASE_FLT_FN (BUILT_IN_FLOOR
):
6160 CASE_FLT_FN (BUILT_IN_CEIL
):
6161 CASE_FLT_FN (BUILT_IN_TRUNC
):
6162 CASE_FLT_FN (BUILT_IN_ROUND
):
6163 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
6164 CASE_FLT_FN (BUILT_IN_RINT
):
6165 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
6170 CASE_FLT_FN (BUILT_IN_ILOGB
):
6171 if (! flag_unsafe_math_optimizations
)
6173 CASE_FLT_FN (BUILT_IN_ISINF
):
6174 CASE_FLT_FN (BUILT_IN_FINITE
):
6175 case BUILT_IN_ISFINITE
:
6176 target
= expand_builtin_interclass_mathfn (exp
, target
, subtarget
);
6181 CASE_FLT_FN (BUILT_IN_LCEIL
):
6182 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6183 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6184 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6185 target
= expand_builtin_int_roundingfn (exp
, target
, subtarget
);
6190 CASE_FLT_FN (BUILT_IN_LRINT
):
6191 CASE_FLT_FN (BUILT_IN_LLRINT
):
6192 CASE_FLT_FN (BUILT_IN_LROUND
):
6193 CASE_FLT_FN (BUILT_IN_LLROUND
):
6194 target
= expand_builtin_int_roundingfn_2 (exp
, target
, subtarget
);
6199 CASE_FLT_FN (BUILT_IN_POW
):
6200 target
= expand_builtin_pow (exp
, target
, subtarget
);
6205 CASE_FLT_FN (BUILT_IN_POWI
):
6206 target
= expand_builtin_powi (exp
, target
, subtarget
);
6211 CASE_FLT_FN (BUILT_IN_ATAN2
):
6212 CASE_FLT_FN (BUILT_IN_LDEXP
):
6213 CASE_FLT_FN (BUILT_IN_SCALB
):
6214 CASE_FLT_FN (BUILT_IN_SCALBN
):
6215 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6216 if (! flag_unsafe_math_optimizations
)
6219 CASE_FLT_FN (BUILT_IN_FMOD
):
6220 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6221 CASE_FLT_FN (BUILT_IN_DREM
):
6222 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6227 CASE_FLT_FN (BUILT_IN_CEXPI
):
6228 target
= expand_builtin_cexpi (exp
, target
, subtarget
);
6229 gcc_assert (target
);
6232 CASE_FLT_FN (BUILT_IN_SIN
):
6233 CASE_FLT_FN (BUILT_IN_COS
):
6234 if (! flag_unsafe_math_optimizations
)
6236 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6241 CASE_FLT_FN (BUILT_IN_SINCOS
):
6242 if (! flag_unsafe_math_optimizations
)
6244 target
= expand_builtin_sincos (exp
);
6249 case BUILT_IN_APPLY_ARGS
:
6250 return expand_builtin_apply_args ();
6252 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6253 FUNCTION with a copy of the parameters described by
6254 ARGUMENTS, and ARGSIZE. It returns a block of memory
6255 allocated on the stack into which is stored all the registers
6256 that might possibly be used for returning the result of a
6257 function. ARGUMENTS is the value returned by
6258 __builtin_apply_args. ARGSIZE is the number of bytes of
6259 arguments that must be copied. ??? How should this value be
6260 computed? We'll also need a safe worst case value for varargs
6262 case BUILT_IN_APPLY
:
6263 if (!validate_arglist (exp
, POINTER_TYPE
,
6264 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6265 && !validate_arglist (exp
, REFERENCE_TYPE
,
6266 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6272 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6273 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6274 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6276 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6279 /* __builtin_return (RESULT) causes the function to return the
6280 value described by RESULT. RESULT is address of the block of
6281 memory returned by __builtin_apply. */
6282 case BUILT_IN_RETURN
:
6283 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6284 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6287 case BUILT_IN_SAVEREGS
:
6288 return expand_builtin_saveregs ();
6290 case BUILT_IN_ARGS_INFO
:
6291 return expand_builtin_args_info (exp
);
6293 /* Return the address of the first anonymous stack arg. */
6294 case BUILT_IN_NEXT_ARG
:
6295 if (fold_builtin_next_arg (exp
, false))
6297 return expand_builtin_next_arg ();
6299 case BUILT_IN_CLEAR_CACHE
:
6300 target
= expand_builtin___clear_cache (exp
);
6305 case BUILT_IN_CLASSIFY_TYPE
:
6306 return expand_builtin_classify_type (exp
);
6308 case BUILT_IN_CONSTANT_P
:
6311 case BUILT_IN_FRAME_ADDRESS
:
6312 case BUILT_IN_RETURN_ADDRESS
:
6313 return expand_builtin_frame_address (fndecl
, exp
);
6315 /* Returns the address of the area where the structure is returned.
6317 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6318 if (call_expr_nargs (exp
) != 0
6319 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6320 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6323 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6325 case BUILT_IN_ALLOCA
:
6326 target
= expand_builtin_alloca (exp
, target
);
6331 case BUILT_IN_STACK_SAVE
:
6332 return expand_stack_save ();
6334 case BUILT_IN_STACK_RESTORE
:
6335 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6338 case BUILT_IN_BSWAP32
:
6339 case BUILT_IN_BSWAP64
:
6340 target
= expand_builtin_bswap (exp
, target
, subtarget
);
6346 CASE_INT_FN (BUILT_IN_FFS
):
6347 case BUILT_IN_FFSIMAX
:
6348 target
= expand_builtin_unop (target_mode
, exp
, target
,
6349 subtarget
, ffs_optab
);
6354 CASE_INT_FN (BUILT_IN_CLZ
):
6355 case BUILT_IN_CLZIMAX
:
6356 target
= expand_builtin_unop (target_mode
, exp
, target
,
6357 subtarget
, clz_optab
);
6362 CASE_INT_FN (BUILT_IN_CTZ
):
6363 case BUILT_IN_CTZIMAX
:
6364 target
= expand_builtin_unop (target_mode
, exp
, target
,
6365 subtarget
, ctz_optab
);
6370 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6371 case BUILT_IN_POPCOUNTIMAX
:
6372 target
= expand_builtin_unop (target_mode
, exp
, target
,
6373 subtarget
, popcount_optab
);
6378 CASE_INT_FN (BUILT_IN_PARITY
):
6379 case BUILT_IN_PARITYIMAX
:
6380 target
= expand_builtin_unop (target_mode
, exp
, target
,
6381 subtarget
, parity_optab
);
6386 case BUILT_IN_STRLEN
:
6387 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6392 case BUILT_IN_STRCPY
:
6393 target
= expand_builtin_strcpy (fndecl
, exp
, target
, mode
);
6398 case BUILT_IN_STRNCPY
:
6399 target
= expand_builtin_strncpy (exp
, target
, mode
);
6404 case BUILT_IN_STPCPY
:
6405 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6410 case BUILT_IN_STRCAT
:
6411 target
= expand_builtin_strcat (fndecl
, exp
, target
, mode
);
6416 case BUILT_IN_STRNCAT
:
6417 target
= expand_builtin_strncat (exp
, target
, mode
);
6422 case BUILT_IN_STRSPN
:
6423 target
= expand_builtin_strspn (exp
, target
, mode
);
6428 case BUILT_IN_STRCSPN
:
6429 target
= expand_builtin_strcspn (exp
, target
, mode
);
6434 case BUILT_IN_STRSTR
:
6435 target
= expand_builtin_strstr (exp
, target
, mode
);
6440 case BUILT_IN_STRPBRK
:
6441 target
= expand_builtin_strpbrk (exp
, target
, mode
);
6446 case BUILT_IN_INDEX
:
6447 case BUILT_IN_STRCHR
:
6448 target
= expand_builtin_strchr (exp
, target
, mode
);
6453 case BUILT_IN_RINDEX
:
6454 case BUILT_IN_STRRCHR
:
6455 target
= expand_builtin_strrchr (exp
, target
, mode
);
6460 case BUILT_IN_MEMCPY
:
6461 target
= expand_builtin_memcpy (exp
, target
, mode
);
6466 case BUILT_IN_MEMPCPY
:
6467 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6472 case BUILT_IN_MEMMOVE
:
6473 target
= expand_builtin_memmove (exp
, target
, mode
, ignore
);
6478 case BUILT_IN_BCOPY
:
6479 target
= expand_builtin_bcopy (exp
, ignore
);
6484 case BUILT_IN_MEMSET
:
6485 target
= expand_builtin_memset (exp
, target
, mode
);
6490 case BUILT_IN_BZERO
:
6491 target
= expand_builtin_bzero (exp
);
6496 case BUILT_IN_STRCMP
:
6497 target
= expand_builtin_strcmp (exp
, target
, mode
);
6502 case BUILT_IN_STRNCMP
:
6503 target
= expand_builtin_strncmp (exp
, target
, mode
);
6508 case BUILT_IN_MEMCHR
:
6509 target
= expand_builtin_memchr (exp
, target
, mode
);
6515 case BUILT_IN_MEMCMP
:
6516 target
= expand_builtin_memcmp (exp
, target
, mode
);
6521 case BUILT_IN_SETJMP
:
6522 /* This should have been lowered to the builtins below. */
6525 case BUILT_IN_SETJMP_SETUP
:
6526 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6527 and the receiver label. */
6528 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6530 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6531 VOIDmode
, EXPAND_NORMAL
);
6532 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6533 rtx label_r
= label_rtx (label
);
6535 /* This is copied from the handling of non-local gotos. */
6536 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6537 nonlocal_goto_handler_labels
6538 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
6539 nonlocal_goto_handler_labels
);
6540 /* ??? Do not let expand_label treat us as such since we would
6541 not want to be both on the list of non-local labels and on
6542 the list of forced labels. */
6543 FORCED_LABEL (label
) = 0;
6548 case BUILT_IN_SETJMP_DISPATCHER
:
6549 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6550 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6552 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6553 rtx label_r
= label_rtx (label
);
6555 /* Remove the dispatcher label from the list of non-local labels
6556 since the receiver labels have been added to it above. */
6557 remove_node_from_expr_list (label_r
, &nonlocal_goto_handler_labels
);
6562 case BUILT_IN_SETJMP_RECEIVER
:
6563 /* __builtin_setjmp_receiver is passed the receiver label. */
6564 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6566 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6567 rtx label_r
= label_rtx (label
);
6569 expand_builtin_setjmp_receiver (label_r
);
6574 /* __builtin_longjmp is passed a pointer to an array of five words.
6575 It's similar to the C library longjmp function but works with
6576 __builtin_setjmp above. */
6577 case BUILT_IN_LONGJMP
:
6578 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6580 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6581 VOIDmode
, EXPAND_NORMAL
);
6582 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6584 if (value
!= const1_rtx
)
6586 error ("%<__builtin_longjmp%> second argument must be 1");
6590 expand_builtin_longjmp (buf_addr
, value
);
6595 case BUILT_IN_NONLOCAL_GOTO
:
6596 target
= expand_builtin_nonlocal_goto (exp
);
6601 /* This updates the setjmp buffer that is its argument with the value
6602 of the current stack pointer. */
6603 case BUILT_IN_UPDATE_SETJMP_BUF
:
6604 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6607 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6609 expand_builtin_update_setjmp_buf (buf_addr
);
6615 expand_builtin_trap ();
6618 case BUILT_IN_PRINTF
:
6619 target
= expand_builtin_printf (exp
, target
, mode
, false);
6624 case BUILT_IN_PRINTF_UNLOCKED
:
6625 target
= expand_builtin_printf (exp
, target
, mode
, true);
6630 case BUILT_IN_FPUTS
:
6631 target
= expand_builtin_fputs (exp
, target
, false);
6635 case BUILT_IN_FPUTS_UNLOCKED
:
6636 target
= expand_builtin_fputs (exp
, target
, true);
6641 case BUILT_IN_FPRINTF
:
6642 target
= expand_builtin_fprintf (exp
, target
, mode
, false);
6647 case BUILT_IN_FPRINTF_UNLOCKED
:
6648 target
= expand_builtin_fprintf (exp
, target
, mode
, true);
6653 case BUILT_IN_SPRINTF
:
6654 target
= expand_builtin_sprintf (exp
, target
, mode
);
6659 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6660 case BUILT_IN_SIGNBITD32
:
6661 case BUILT_IN_SIGNBITD64
:
6662 case BUILT_IN_SIGNBITD128
:
6663 target
= expand_builtin_signbit (exp
, target
);
6668 /* Various hooks for the DWARF 2 __throw routine. */
6669 case BUILT_IN_UNWIND_INIT
:
6670 expand_builtin_unwind_init ();
6672 case BUILT_IN_DWARF_CFA
:
6673 return virtual_cfa_rtx
;
6674 #ifdef DWARF2_UNWIND_INFO
6675 case BUILT_IN_DWARF_SP_COLUMN
:
6676 return expand_builtin_dwarf_sp_column ();
6677 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6678 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6681 case BUILT_IN_FROB_RETURN_ADDR
:
6682 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6683 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6684 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6685 case BUILT_IN_EH_RETURN
:
6686 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6687 CALL_EXPR_ARG (exp
, 1));
6689 #ifdef EH_RETURN_DATA_REGNO
6690 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6691 return expand_builtin_eh_return_data_regno (exp
);
6693 case BUILT_IN_EXTEND_POINTER
:
6694 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6696 case BUILT_IN_VA_START
:
6697 case BUILT_IN_STDARG_START
:
6698 return expand_builtin_va_start (exp
);
6699 case BUILT_IN_VA_END
:
6700 return expand_builtin_va_end (exp
);
6701 case BUILT_IN_VA_COPY
:
6702 return expand_builtin_va_copy (exp
);
6703 case BUILT_IN_EXPECT
:
6704 return expand_builtin_expect (exp
, target
);
6705 case BUILT_IN_PREFETCH
:
6706 expand_builtin_prefetch (exp
);
6709 case BUILT_IN_PROFILE_FUNC_ENTER
:
6710 return expand_builtin_profile_func (false);
6711 case BUILT_IN_PROFILE_FUNC_EXIT
:
6712 return expand_builtin_profile_func (true);
6714 case BUILT_IN_INIT_TRAMPOLINE
:
6715 return expand_builtin_init_trampoline (exp
);
6716 case BUILT_IN_ADJUST_TRAMPOLINE
:
6717 return expand_builtin_adjust_trampoline (exp
);
6720 case BUILT_IN_EXECL
:
6721 case BUILT_IN_EXECV
:
6722 case BUILT_IN_EXECLP
:
6723 case BUILT_IN_EXECLE
:
6724 case BUILT_IN_EXECVP
:
6725 case BUILT_IN_EXECVE
:
6726 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6731 case BUILT_IN_FETCH_AND_ADD_1
:
6732 case BUILT_IN_FETCH_AND_ADD_2
:
6733 case BUILT_IN_FETCH_AND_ADD_4
:
6734 case BUILT_IN_FETCH_AND_ADD_8
:
6735 case BUILT_IN_FETCH_AND_ADD_16
:
6736 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_ADD_1
);
6737 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
,
6738 false, target
, ignore
);
6743 case BUILT_IN_FETCH_AND_SUB_1
:
6744 case BUILT_IN_FETCH_AND_SUB_2
:
6745 case BUILT_IN_FETCH_AND_SUB_4
:
6746 case BUILT_IN_FETCH_AND_SUB_8
:
6747 case BUILT_IN_FETCH_AND_SUB_16
:
6748 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_SUB_1
);
6749 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
,
6750 false, target
, ignore
);
6755 case BUILT_IN_FETCH_AND_OR_1
:
6756 case BUILT_IN_FETCH_AND_OR_2
:
6757 case BUILT_IN_FETCH_AND_OR_4
:
6758 case BUILT_IN_FETCH_AND_OR_8
:
6759 case BUILT_IN_FETCH_AND_OR_16
:
6760 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_OR_1
);
6761 target
= expand_builtin_sync_operation (mode
, exp
, IOR
,
6762 false, target
, ignore
);
6767 case BUILT_IN_FETCH_AND_AND_1
:
6768 case BUILT_IN_FETCH_AND_AND_2
:
6769 case BUILT_IN_FETCH_AND_AND_4
:
6770 case BUILT_IN_FETCH_AND_AND_8
:
6771 case BUILT_IN_FETCH_AND_AND_16
:
6772 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_AND_1
);
6773 target
= expand_builtin_sync_operation (mode
, exp
, AND
,
6774 false, target
, ignore
);
6779 case BUILT_IN_FETCH_AND_XOR_1
:
6780 case BUILT_IN_FETCH_AND_XOR_2
:
6781 case BUILT_IN_FETCH_AND_XOR_4
:
6782 case BUILT_IN_FETCH_AND_XOR_8
:
6783 case BUILT_IN_FETCH_AND_XOR_16
:
6784 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_XOR_1
);
6785 target
= expand_builtin_sync_operation (mode
, exp
, XOR
,
6786 false, target
, ignore
);
6791 case BUILT_IN_FETCH_AND_NAND_1
:
6792 case BUILT_IN_FETCH_AND_NAND_2
:
6793 case BUILT_IN_FETCH_AND_NAND_4
:
6794 case BUILT_IN_FETCH_AND_NAND_8
:
6795 case BUILT_IN_FETCH_AND_NAND_16
:
6796 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_NAND_1
);
6797 target
= expand_builtin_sync_operation (mode
, exp
, NOT
,
6798 false, target
, ignore
);
6803 case BUILT_IN_ADD_AND_FETCH_1
:
6804 case BUILT_IN_ADD_AND_FETCH_2
:
6805 case BUILT_IN_ADD_AND_FETCH_4
:
6806 case BUILT_IN_ADD_AND_FETCH_8
:
6807 case BUILT_IN_ADD_AND_FETCH_16
:
6808 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ADD_AND_FETCH_1
);
6809 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
,
6810 true, target
, ignore
);
6815 case BUILT_IN_SUB_AND_FETCH_1
:
6816 case BUILT_IN_SUB_AND_FETCH_2
:
6817 case BUILT_IN_SUB_AND_FETCH_4
:
6818 case BUILT_IN_SUB_AND_FETCH_8
:
6819 case BUILT_IN_SUB_AND_FETCH_16
:
6820 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SUB_AND_FETCH_1
);
6821 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
,
6822 true, target
, ignore
);
6827 case BUILT_IN_OR_AND_FETCH_1
:
6828 case BUILT_IN_OR_AND_FETCH_2
:
6829 case BUILT_IN_OR_AND_FETCH_4
:
6830 case BUILT_IN_OR_AND_FETCH_8
:
6831 case BUILT_IN_OR_AND_FETCH_16
:
6832 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_OR_AND_FETCH_1
);
6833 target
= expand_builtin_sync_operation (mode
, exp
, IOR
,
6834 true, target
, ignore
);
6839 case BUILT_IN_AND_AND_FETCH_1
:
6840 case BUILT_IN_AND_AND_FETCH_2
:
6841 case BUILT_IN_AND_AND_FETCH_4
:
6842 case BUILT_IN_AND_AND_FETCH_8
:
6843 case BUILT_IN_AND_AND_FETCH_16
:
6844 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_AND_AND_FETCH_1
);
6845 target
= expand_builtin_sync_operation (mode
, exp
, AND
,
6846 true, target
, ignore
);
6851 case BUILT_IN_XOR_AND_FETCH_1
:
6852 case BUILT_IN_XOR_AND_FETCH_2
:
6853 case BUILT_IN_XOR_AND_FETCH_4
:
6854 case BUILT_IN_XOR_AND_FETCH_8
:
6855 case BUILT_IN_XOR_AND_FETCH_16
:
6856 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_XOR_AND_FETCH_1
);
6857 target
= expand_builtin_sync_operation (mode
, exp
, XOR
,
6858 true, target
, ignore
);
6863 case BUILT_IN_NAND_AND_FETCH_1
:
6864 case BUILT_IN_NAND_AND_FETCH_2
:
6865 case BUILT_IN_NAND_AND_FETCH_4
:
6866 case BUILT_IN_NAND_AND_FETCH_8
:
6867 case BUILT_IN_NAND_AND_FETCH_16
:
6868 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_NAND_AND_FETCH_1
);
6869 target
= expand_builtin_sync_operation (mode
, exp
, NOT
,
6870 true, target
, ignore
);
6875 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1
:
6876 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2
:
6877 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4
:
6878 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8
:
6879 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16
:
6880 if (mode
== VOIDmode
)
6881 mode
= TYPE_MODE (boolean_type_node
);
6882 if (!target
|| !register_operand (target
, mode
))
6883 target
= gen_reg_rtx (mode
);
6885 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_BOOL_COMPARE_AND_SWAP_1
);
6886 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6891 case BUILT_IN_VAL_COMPARE_AND_SWAP_1
:
6892 case BUILT_IN_VAL_COMPARE_AND_SWAP_2
:
6893 case BUILT_IN_VAL_COMPARE_AND_SWAP_4
:
6894 case BUILT_IN_VAL_COMPARE_AND_SWAP_8
:
6895 case BUILT_IN_VAL_COMPARE_AND_SWAP_16
:
6896 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_VAL_COMPARE_AND_SWAP_1
);
6897 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6902 case BUILT_IN_LOCK_TEST_AND_SET_1
:
6903 case BUILT_IN_LOCK_TEST_AND_SET_2
:
6904 case BUILT_IN_LOCK_TEST_AND_SET_4
:
6905 case BUILT_IN_LOCK_TEST_AND_SET_8
:
6906 case BUILT_IN_LOCK_TEST_AND_SET_16
:
6907 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_LOCK_TEST_AND_SET_1
);
6908 target
= expand_builtin_lock_test_and_set (mode
, exp
, target
);
6913 case BUILT_IN_LOCK_RELEASE_1
:
6914 case BUILT_IN_LOCK_RELEASE_2
:
6915 case BUILT_IN_LOCK_RELEASE_4
:
6916 case BUILT_IN_LOCK_RELEASE_8
:
6917 case BUILT_IN_LOCK_RELEASE_16
:
6918 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_LOCK_RELEASE_1
);
6919 expand_builtin_lock_release (mode
, exp
);
6922 case BUILT_IN_SYNCHRONIZE
:
6923 expand_builtin_synchronize ();
6926 case BUILT_IN_OBJECT_SIZE
:
6927 return expand_builtin_object_size (exp
);
6929 case BUILT_IN_MEMCPY_CHK
:
6930 case BUILT_IN_MEMPCPY_CHK
:
6931 case BUILT_IN_MEMMOVE_CHK
:
6932 case BUILT_IN_MEMSET_CHK
:
6933 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6938 case BUILT_IN_STRCPY_CHK
:
6939 case BUILT_IN_STPCPY_CHK
:
6940 case BUILT_IN_STRNCPY_CHK
:
6941 case BUILT_IN_STRCAT_CHK
:
6942 case BUILT_IN_STRNCAT_CHK
:
6943 case BUILT_IN_SNPRINTF_CHK
:
6944 case BUILT_IN_VSNPRINTF_CHK
:
6945 maybe_emit_chk_warning (exp
, fcode
);
6948 case BUILT_IN_SPRINTF_CHK
:
6949 case BUILT_IN_VSPRINTF_CHK
:
6950 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6953 default: /* just do library call, if unknown builtin */
6957 /* The switch statement above can drop through to cause the function
6958 to be called normally. */
6959 return expand_call (exp
, target
, ignore
);
6962 /* Determine whether a tree node represents a call to a built-in
6963 function. If the tree T is a call to a built-in function with
6964 the right number of arguments of the appropriate types, return
6965 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6966 Otherwise the return value is END_BUILTINS. */
6968 enum built_in_function
6969 builtin_mathfn_code (tree t
)
6971 tree fndecl
, arg
, parmlist
;
6972 tree argtype
, parmtype
;
6973 call_expr_arg_iterator iter
;
6975 if (TREE_CODE (t
) != CALL_EXPR
6976 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
6977 return END_BUILTINS
;
6979 fndecl
= get_callee_fndecl (t
);
6980 if (fndecl
== NULL_TREE
6981 || TREE_CODE (fndecl
) != FUNCTION_DECL
6982 || ! DECL_BUILT_IN (fndecl
)
6983 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6984 return END_BUILTINS
;
6986 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
6987 init_call_expr_arg_iterator (t
, &iter
);
6988 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
6990 /* If a function doesn't take a variable number of arguments,
6991 the last element in the list will have type `void'. */
6992 parmtype
= TREE_VALUE (parmlist
);
6993 if (VOID_TYPE_P (parmtype
))
6995 if (more_call_expr_args_p (&iter
))
6996 return END_BUILTINS
;
6997 return DECL_FUNCTION_CODE (fndecl
);
7000 if (! more_call_expr_args_p (&iter
))
7001 return END_BUILTINS
;
7003 arg
= next_call_expr_arg (&iter
);
7004 argtype
= TREE_TYPE (arg
);
7006 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7008 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7009 return END_BUILTINS
;
7011 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7013 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7014 return END_BUILTINS
;
7016 else if (POINTER_TYPE_P (parmtype
))
7018 if (! POINTER_TYPE_P (argtype
))
7019 return END_BUILTINS
;
7021 else if (INTEGRAL_TYPE_P (parmtype
))
7023 if (! INTEGRAL_TYPE_P (argtype
))
7024 return END_BUILTINS
;
7027 return END_BUILTINS
;
7030 /* Variable-length argument list. */
7031 return DECL_FUNCTION_CODE (fndecl
);
7034 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7035 evaluate to a constant. */
7038 fold_builtin_constant_p (tree arg
)
7040 /* We return 1 for a numeric type that's known to be a constant
7041 value at compile-time or for an aggregate type that's a
7042 literal constant. */
7045 /* If we know this is a constant, emit the constant of one. */
7046 if (CONSTANT_CLASS_P (arg
)
7047 || (TREE_CODE (arg
) == CONSTRUCTOR
7048 && TREE_CONSTANT (arg
)))
7049 return integer_one_node
;
7050 if (TREE_CODE (arg
) == ADDR_EXPR
)
7052 tree op
= TREE_OPERAND (arg
, 0);
7053 if (TREE_CODE (op
) == STRING_CST
7054 || (TREE_CODE (op
) == ARRAY_REF
7055 && integer_zerop (TREE_OPERAND (op
, 1))
7056 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7057 return integer_one_node
;
7060 /* If this expression has side effects, show we don't know it to be a
7061 constant. Likewise if it's a pointer or aggregate type since in
7062 those case we only want literals, since those are only optimized
7063 when generating RTL, not later.
7064 And finally, if we are compiling an initializer, not code, we
7065 need to return a definite result now; there's not going to be any
7066 more optimization done. */
7067 if (TREE_SIDE_EFFECTS (arg
)
7068 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7069 || POINTER_TYPE_P (TREE_TYPE (arg
))
7071 || folding_initializer
)
7072 return integer_zero_node
;
7077 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
7078 comparison against the argument will fold to a constant. In practice,
7079 this means a true constant or the address of a non-weak symbol. */
7082 fold_builtin_expect (tree arg
)
7086 /* If the argument isn't invariant, then there's nothing we can do. */
7087 if (!TREE_INVARIANT (arg
))
7090 /* If we're looking at an address of a weak decl, then do not fold. */
7093 if (TREE_CODE (inner
) == ADDR_EXPR
)
7097 inner
= TREE_OPERAND (inner
, 0);
7099 while (TREE_CODE (inner
) == COMPONENT_REF
7100 || TREE_CODE (inner
) == ARRAY_REF
);
7101 if (DECL_P (inner
) && DECL_WEAK (inner
))
7105 /* Otherwise, ARG already has the proper type for the return value. */
7109 /* Fold a call to __builtin_classify_type with argument ARG. */
7112 fold_builtin_classify_type (tree arg
)
7115 return build_int_cst (NULL_TREE
, no_type_class
);
7117 return build_int_cst (NULL_TREE
, type_to_class (TREE_TYPE (arg
)));
7120 /* Fold a call to __builtin_strlen with argument ARG. */
7123 fold_builtin_strlen (tree arg
)
7125 if (!validate_arg (arg
, POINTER_TYPE
))
7129 tree len
= c_strlen (arg
, 0);
7133 /* Convert from the internal "sizetype" type to "size_t". */
7135 len
= fold_convert (size_type_node
, len
);
7143 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7146 fold_builtin_inf (tree type
, int warn
)
7148 REAL_VALUE_TYPE real
;
7150 /* __builtin_inff is intended to be usable to define INFINITY on all
7151 targets. If an infinity is not available, INFINITY expands "to a
7152 positive constant of type float that overflows at translation
7153 time", footnote "In this case, using INFINITY will violate the
7154 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7155 Thus we pedwarn to ensure this constraint violation is
7157 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7158 pedwarn ("target format does not support infinity");
7161 return build_real (type
, real
);
7164 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7167 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7169 REAL_VALUE_TYPE real
;
7172 if (!validate_arg (arg
, POINTER_TYPE
))
7174 str
= c_getstr (arg
);
7178 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7181 return build_real (type
, real
);
7184 /* Return true if the floating point expression T has an integer value.
7185 We also allow +Inf, -Inf and NaN to be considered integer values. */
7188 integer_valued_real_p (tree t
)
7190 switch (TREE_CODE (t
))
7197 case NON_LVALUE_EXPR
:
7198 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7203 return integer_valued_real_p (GENERIC_TREE_OPERAND (t
, 1));
7210 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7211 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7214 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7215 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7218 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7222 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7223 if (TREE_CODE (type
) == INTEGER_TYPE
)
7225 if (TREE_CODE (type
) == REAL_TYPE
)
7226 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7231 switch (builtin_mathfn_code (t
))
7233 CASE_FLT_FN (BUILT_IN_CEIL
):
7234 CASE_FLT_FN (BUILT_IN_FLOOR
):
7235 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7236 CASE_FLT_FN (BUILT_IN_RINT
):
7237 CASE_FLT_FN (BUILT_IN_ROUND
):
7238 CASE_FLT_FN (BUILT_IN_TRUNC
):
7241 CASE_FLT_FN (BUILT_IN_FMIN
):
7242 CASE_FLT_FN (BUILT_IN_FMAX
):
7243 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7244 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7257 /* FNDECL is assumed to be a builtin where truncation can be propagated
7258 across (for instance floor((double)f) == (double)floorf (f).
7259 Do the transformation for a call with argument ARG. */
7262 fold_trunc_transparent_mathfn (tree fndecl
, tree arg
)
7264 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7266 if (!validate_arg (arg
, REAL_TYPE
))
7269 /* Integer rounding functions are idempotent. */
7270 if (fcode
== builtin_mathfn_code (arg
))
7273 /* If argument is already integer valued, and we don't need to worry
7274 about setting errno, there's no need to perform rounding. */
7275 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7280 tree arg0
= strip_float_extensions (arg
);
7281 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7282 tree newtype
= TREE_TYPE (arg0
);
7285 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7286 && (decl
= mathfn_built_in (newtype
, fcode
)))
7287 return fold_convert (ftype
,
7288 build_call_expr (decl
, 1,
7289 fold_convert (newtype
, arg0
)));
7294 /* FNDECL is assumed to be builtin which can narrow the FP type of
7295 the argument, for instance lround((double)f) -> lroundf (f).
7296 Do the transformation for a call with argument ARG. */
7299 fold_fixed_mathfn (tree fndecl
, tree arg
)
7301 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7303 if (!validate_arg (arg
, REAL_TYPE
))
7306 /* If argument is already integer valued, and we don't need to worry
7307 about setting errno, there's no need to perform rounding. */
7308 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7309 return fold_build1 (FIX_TRUNC_EXPR
, TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7313 tree ftype
= TREE_TYPE (arg
);
7314 tree arg0
= strip_float_extensions (arg
);
7315 tree newtype
= TREE_TYPE (arg0
);
7318 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7319 && (decl
= mathfn_built_in (newtype
, fcode
)))
7320 return build_call_expr (decl
, 1, fold_convert (newtype
, arg0
));
7323 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7324 sizeof (long long) == sizeof (long). */
7325 if (TYPE_PRECISION (long_long_integer_type_node
)
7326 == TYPE_PRECISION (long_integer_type_node
))
7328 tree newfn
= NULL_TREE
;
7331 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7332 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7335 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7336 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7339 CASE_FLT_FN (BUILT_IN_LLROUND
):
7340 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7343 CASE_FLT_FN (BUILT_IN_LLRINT
):
7344 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7353 tree newcall
= build_call_expr(newfn
, 1, arg
);
7354 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7361 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7362 return type. Return NULL_TREE if no simplification can be made. */
7365 fold_builtin_cabs (tree arg
, tree type
, tree fndecl
)
7369 if (TREE_CODE (TREE_TYPE (arg
)) != COMPLEX_TYPE
7370 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7373 /* Calculate the result when the argument is a constant. */
7374 if (TREE_CODE (arg
) == COMPLEX_CST
7375 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7379 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7381 tree real
= TREE_OPERAND (arg
, 0);
7382 tree imag
= TREE_OPERAND (arg
, 1);
7384 /* If either part is zero, cabs is fabs of the other. */
7385 if (real_zerop (real
))
7386 return fold_build1 (ABS_EXPR
, type
, imag
);
7387 if (real_zerop (imag
))
7388 return fold_build1 (ABS_EXPR
, type
, real
);
7390 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7391 if (flag_unsafe_math_optimizations
7392 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7394 const REAL_VALUE_TYPE sqrt2_trunc
7395 = real_value_truncate (TYPE_MODE (type
), dconstsqrt2
);
7397 return fold_build2 (MULT_EXPR
, type
,
7398 fold_build1 (ABS_EXPR
, type
, real
),
7399 build_real (type
, sqrt2_trunc
));
7403 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7404 if (TREE_CODE (arg
) == NEGATE_EXPR
7405 || TREE_CODE (arg
) == CONJ_EXPR
)
7406 return build_call_expr (fndecl
, 1, TREE_OPERAND (arg
, 0));
7408 /* Don't do this when optimizing for size. */
7409 if (flag_unsafe_math_optimizations
7410 && optimize
&& !optimize_size
)
7412 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7414 if (sqrtfn
!= NULL_TREE
)
7416 tree rpart
, ipart
, result
;
7418 arg
= builtin_save_expr (arg
);
7420 rpart
= fold_build1 (REALPART_EXPR
, type
, arg
);
7421 ipart
= fold_build1 (IMAGPART_EXPR
, type
, arg
);
7423 rpart
= builtin_save_expr (rpart
);
7424 ipart
= builtin_save_expr (ipart
);
7426 result
= fold_build2 (PLUS_EXPR
, type
,
7427 fold_build2 (MULT_EXPR
, type
,
7429 fold_build2 (MULT_EXPR
, type
,
7432 return build_call_expr (sqrtfn
, 1, result
);
7439 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7440 Return NULL_TREE if no simplification can be made. */
7443 fold_builtin_sqrt (tree arg
, tree type
)
7446 enum built_in_function fcode
;
7449 if (!validate_arg (arg
, REAL_TYPE
))
7452 /* Calculate the result when the argument is a constant. */
7453 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7456 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7457 fcode
= builtin_mathfn_code (arg
);
7458 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7460 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7461 arg
= fold_build2 (MULT_EXPR
, type
,
7462 CALL_EXPR_ARG (arg
, 0),
7463 build_real (type
, dconsthalf
));
7464 return build_call_expr (expfn
, 1, arg
);
7467 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7468 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7470 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7474 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7476 /* The inner root was either sqrt or cbrt. */
7477 REAL_VALUE_TYPE dconstroot
=
7478 BUILTIN_SQRT_P (fcode
) ? dconsthalf
: dconstthird
;
7480 /* Adjust for the outer root. */
7481 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7482 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7483 tree_root
= build_real (type
, dconstroot
);
7484 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7488 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7489 if (flag_unsafe_math_optimizations
7490 && (fcode
== BUILT_IN_POW
7491 || fcode
== BUILT_IN_POWF
7492 || fcode
== BUILT_IN_POWL
))
7494 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7495 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7496 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7498 if (!tree_expr_nonnegative_p (arg0
))
7499 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7500 narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
7501 build_real (type
, dconsthalf
));
7502 return build_call_expr (powfn
, 2, arg0
, narg1
);
7508 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7509 Return NULL_TREE if no simplification can be made. */
7512 fold_builtin_cbrt (tree arg
, tree type
)
7514 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7517 if (!validate_arg (arg
, REAL_TYPE
))
7520 /* Calculate the result when the argument is a constant. */
7521 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7524 if (flag_unsafe_math_optimizations
)
7526 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7527 if (BUILTIN_EXPONENT_P (fcode
))
7529 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7530 const REAL_VALUE_TYPE third_trunc
=
7531 real_value_truncate (TYPE_MODE (type
), dconstthird
);
7532 arg
= fold_build2 (MULT_EXPR
, type
,
7533 CALL_EXPR_ARG (arg
, 0),
7534 build_real (type
, third_trunc
));
7535 return build_call_expr (expfn
, 1, arg
);
7538 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7539 if (BUILTIN_SQRT_P (fcode
))
7541 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7545 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7547 REAL_VALUE_TYPE dconstroot
= dconstthird
;
7549 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7550 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7551 tree_root
= build_real (type
, dconstroot
);
7552 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7556 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7557 if (BUILTIN_CBRT_P (fcode
))
7559 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7560 if (tree_expr_nonnegative_p (arg0
))
7562 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7567 REAL_VALUE_TYPE dconstroot
;
7569 real_arithmetic (&dconstroot
, MULT_EXPR
, &dconstthird
, &dconstthird
);
7570 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7571 tree_root
= build_real (type
, dconstroot
);
7572 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7577 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7578 if (fcode
== BUILT_IN_POW
7579 || fcode
== BUILT_IN_POWF
7580 || fcode
== BUILT_IN_POWL
)
7582 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7583 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7584 if (tree_expr_nonnegative_p (arg00
))
7586 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7587 const REAL_VALUE_TYPE dconstroot
7588 = real_value_truncate (TYPE_MODE (type
), dconstthird
);
7589 tree narg01
= fold_build2 (MULT_EXPR
, type
, arg01
,
7590 build_real (type
, dconstroot
));
7591 return build_call_expr (powfn
, 2, arg00
, narg01
);
7598 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7599 TYPE is the type of the return value. Return NULL_TREE if no
7600 simplification can be made. */
7603 fold_builtin_cos (tree arg
, tree type
, tree fndecl
)
7607 if (!validate_arg (arg
, REAL_TYPE
))
7610 /* Calculate the result when the argument is a constant. */
7611 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7614 /* Optimize cos(-x) into cos (x). */
7615 if ((narg
= fold_strip_sign_ops (arg
)))
7616 return build_call_expr (fndecl
, 1, narg
);
7621 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7622 Return NULL_TREE if no simplification can be made. */
7625 fold_builtin_cosh (tree arg
, tree type
, tree fndecl
)
7627 if (validate_arg (arg
, REAL_TYPE
))
7631 /* Calculate the result when the argument is a constant. */
7632 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7635 /* Optimize cosh(-x) into cosh (x). */
7636 if ((narg
= fold_strip_sign_ops (arg
)))
7637 return build_call_expr (fndecl
, 1, narg
);
7643 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7644 Return NULL_TREE if no simplification can be made. */
7647 fold_builtin_tan (tree arg
, tree type
)
7649 enum built_in_function fcode
;
7652 if (!validate_arg (arg
, REAL_TYPE
))
7655 /* Calculate the result when the argument is a constant. */
7656 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7659 /* Optimize tan(atan(x)) = x. */
7660 fcode
= builtin_mathfn_code (arg
);
7661 if (flag_unsafe_math_optimizations
7662 && (fcode
== BUILT_IN_ATAN
7663 || fcode
== BUILT_IN_ATANF
7664 || fcode
== BUILT_IN_ATANL
))
7665 return CALL_EXPR_ARG (arg
, 0);
7670 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7671 NULL_TREE if no simplification can be made. */
7674 fold_builtin_sincos (tree arg0
, tree arg1
, tree arg2
)
7679 if (!validate_arg (arg0
, REAL_TYPE
)
7680 || !validate_arg (arg1
, POINTER_TYPE
)
7681 || !validate_arg (arg2
, POINTER_TYPE
))
7684 type
= TREE_TYPE (arg0
);
7686 /* Calculate the result when the argument is a constant. */
7687 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7690 /* Canonicalize sincos to cexpi. */
7691 if (!TARGET_C99_FUNCTIONS
)
7693 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7697 call
= build_call_expr (fn
, 1, arg0
);
7698 call
= builtin_save_expr (call
);
7700 return build2 (COMPOUND_EXPR
, type
,
7701 build2 (MODIFY_EXPR
, void_type_node
,
7702 build_fold_indirect_ref (arg1
),
7703 build1 (IMAGPART_EXPR
, type
, call
)),
7704 build2 (MODIFY_EXPR
, void_type_node
,
7705 build_fold_indirect_ref (arg2
),
7706 build1 (REALPART_EXPR
, type
, call
)));
7709 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7710 NULL_TREE if no simplification can be made. */
7713 fold_builtin_cexp (tree arg0
, tree type
)
7716 tree realp
, imagp
, ifn
;
7718 if (!validate_arg (arg0
, COMPLEX_TYPE
))
7721 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7723 /* In case we can figure out the real part of arg0 and it is constant zero
7725 if (!TARGET_C99_FUNCTIONS
)
7727 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7731 if ((realp
= fold_unary (REALPART_EXPR
, rtype
, arg0
))
7732 && real_zerop (realp
))
7734 tree narg
= fold_build1 (IMAGPART_EXPR
, rtype
, arg0
);
7735 return build_call_expr (ifn
, 1, narg
);
7738 /* In case we can easily decompose real and imaginary parts split cexp
7739 to exp (r) * cexpi (i). */
7740 if (flag_unsafe_math_optimizations
7743 tree rfn
, rcall
, icall
;
7745 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7749 imagp
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
7753 icall
= build_call_expr (ifn
, 1, imagp
);
7754 icall
= builtin_save_expr (icall
);
7755 rcall
= build_call_expr (rfn
, 1, realp
);
7756 rcall
= builtin_save_expr (rcall
);
7757 return build2 (COMPLEX_EXPR
, type
,
7758 build2 (MULT_EXPR
, rtype
,
7760 build1 (REALPART_EXPR
, rtype
, icall
)),
7761 build2 (MULT_EXPR
, rtype
,
7763 build1 (IMAGPART_EXPR
, rtype
, icall
)));
7769 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7770 Return NULL_TREE if no simplification can be made. */
7773 fold_builtin_trunc (tree fndecl
, tree arg
)
7775 if (!validate_arg (arg
, REAL_TYPE
))
7778 /* Optimize trunc of constant value. */
7779 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7781 REAL_VALUE_TYPE r
, x
;
7782 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7784 x
= TREE_REAL_CST (arg
);
7785 real_trunc (&r
, TYPE_MODE (type
), &x
);
7786 return build_real (type
, r
);
7789 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7792 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7793 Return NULL_TREE if no simplification can be made. */
7796 fold_builtin_floor (tree fndecl
, tree arg
)
7798 if (!validate_arg (arg
, REAL_TYPE
))
7801 /* Optimize floor of constant value. */
7802 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7806 x
= TREE_REAL_CST (arg
);
7807 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7809 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7812 real_floor (&r
, TYPE_MODE (type
), &x
);
7813 return build_real (type
, r
);
7817 /* Fold floor (x) where x is nonnegative to trunc (x). */
7818 if (tree_expr_nonnegative_p (arg
))
7820 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7822 return build_call_expr (truncfn
, 1, arg
);
7825 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7828 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7829 Return NULL_TREE if no simplification can be made. */
7832 fold_builtin_ceil (tree fndecl
, tree arg
)
7834 if (!validate_arg (arg
, REAL_TYPE
))
7837 /* Optimize ceil of constant value. */
7838 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7842 x
= TREE_REAL_CST (arg
);
7843 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7845 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7848 real_ceil (&r
, TYPE_MODE (type
), &x
);
7849 return build_real (type
, r
);
7853 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7856 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7857 Return NULL_TREE if no simplification can be made. */
7860 fold_builtin_round (tree fndecl
, tree arg
)
7862 if (!validate_arg (arg
, REAL_TYPE
))
7865 /* Optimize round of constant value. */
7866 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7870 x
= TREE_REAL_CST (arg
);
7871 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7873 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7876 real_round (&r
, TYPE_MODE (type
), &x
);
7877 return build_real (type
, r
);
7881 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7884 /* Fold function call to builtin lround, lroundf or lroundl (or the
7885 corresponding long long versions) and other rounding functions. ARG
7886 is the argument to the call. Return NULL_TREE if no simplification
7890 fold_builtin_int_roundingfn (tree fndecl
, tree arg
)
7892 if (!validate_arg (arg
, REAL_TYPE
))
7895 /* Optimize lround of constant value. */
7896 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7898 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
7900 if (real_isfinite (&x
))
7902 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
7903 tree ftype
= TREE_TYPE (arg
);
7904 unsigned HOST_WIDE_INT lo2
;
7905 HOST_WIDE_INT hi
, lo
;
7908 switch (DECL_FUNCTION_CODE (fndecl
))
7910 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7911 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7912 real_floor (&r
, TYPE_MODE (ftype
), &x
);
7915 CASE_FLT_FN (BUILT_IN_LCEIL
):
7916 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7917 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
7920 CASE_FLT_FN (BUILT_IN_LROUND
):
7921 CASE_FLT_FN (BUILT_IN_LLROUND
):
7922 real_round (&r
, TYPE_MODE (ftype
), &x
);
7929 REAL_VALUE_TO_INT (&lo
, &hi
, r
);
7930 if (!fit_double_type (lo
, hi
, &lo2
, &hi
, itype
))
7931 return build_int_cst_wide (itype
, lo2
, hi
);
7935 switch (DECL_FUNCTION_CODE (fndecl
))
7937 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7938 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7939 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7940 if (tree_expr_nonnegative_p (arg
))
7941 return fold_build1 (FIX_TRUNC_EXPR
, TREE_TYPE (TREE_TYPE (fndecl
)),
7947 return fold_fixed_mathfn (fndecl
, arg
);
7950 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7951 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7952 the argument to the call. Return NULL_TREE if no simplification can
7956 fold_builtin_bitop (tree fndecl
, tree arg
)
7958 if (!validate_arg (arg
, INTEGER_TYPE
))
7961 /* Optimize for constant argument. */
7962 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
7964 HOST_WIDE_INT hi
, width
, result
;
7965 unsigned HOST_WIDE_INT lo
;
7968 type
= TREE_TYPE (arg
);
7969 width
= TYPE_PRECISION (type
);
7970 lo
= TREE_INT_CST_LOW (arg
);
7972 /* Clear all the bits that are beyond the type's precision. */
7973 if (width
> HOST_BITS_PER_WIDE_INT
)
7975 hi
= TREE_INT_CST_HIGH (arg
);
7976 if (width
< 2 * HOST_BITS_PER_WIDE_INT
)
7977 hi
&= ~((HOST_WIDE_INT
) (-1) >> (width
- HOST_BITS_PER_WIDE_INT
));
7982 if (width
< HOST_BITS_PER_WIDE_INT
)
7983 lo
&= ~((unsigned HOST_WIDE_INT
) (-1) << width
);
7986 switch (DECL_FUNCTION_CODE (fndecl
))
7988 CASE_INT_FN (BUILT_IN_FFS
):
7990 result
= exact_log2 (lo
& -lo
) + 1;
7992 result
= HOST_BITS_PER_WIDE_INT
+ exact_log2 (hi
& -hi
) + 1;
7997 CASE_INT_FN (BUILT_IN_CLZ
):
7999 result
= width
- floor_log2 (hi
) - 1 - HOST_BITS_PER_WIDE_INT
;
8001 result
= width
- floor_log2 (lo
) - 1;
8002 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8006 CASE_INT_FN (BUILT_IN_CTZ
):
8008 result
= exact_log2 (lo
& -lo
);
8010 result
= HOST_BITS_PER_WIDE_INT
+ exact_log2 (hi
& -hi
);
8011 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8015 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8018 result
++, lo
&= lo
- 1;
8020 result
++, hi
&= hi
- 1;
8023 CASE_INT_FN (BUILT_IN_PARITY
):
8026 result
++, lo
&= lo
- 1;
8028 result
++, hi
&= hi
- 1;
8036 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8042 /* Fold function call to builtin_bswap and the long and long long
8043 variants. Return NULL_TREE if no simplification can be made. */
8045 fold_builtin_bswap (tree fndecl
, tree arg
)
8047 if (! validate_arg (arg
, INTEGER_TYPE
))
8050 /* Optimize constant value. */
8051 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8053 HOST_WIDE_INT hi
, width
, r_hi
= 0;
8054 unsigned HOST_WIDE_INT lo
, r_lo
= 0;
8057 type
= TREE_TYPE (arg
);
8058 width
= TYPE_PRECISION (type
);
8059 lo
= TREE_INT_CST_LOW (arg
);
8060 hi
= TREE_INT_CST_HIGH (arg
);
8062 switch (DECL_FUNCTION_CODE (fndecl
))
8064 case BUILT_IN_BSWAP32
:
8065 case BUILT_IN_BSWAP64
:
8069 for (s
= 0; s
< width
; s
+= 8)
8071 int d
= width
- s
- 8;
8072 unsigned HOST_WIDE_INT byte
;
8074 if (s
< HOST_BITS_PER_WIDE_INT
)
8075 byte
= (lo
>> s
) & 0xff;
8077 byte
= (hi
>> (s
- HOST_BITS_PER_WIDE_INT
)) & 0xff;
8079 if (d
< HOST_BITS_PER_WIDE_INT
)
8082 r_hi
|= byte
<< (d
- HOST_BITS_PER_WIDE_INT
);
8092 if (width
< HOST_BITS_PER_WIDE_INT
)
8093 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), r_lo
);
8095 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl
)), r_lo
, r_hi
);
8101 /* Return true if EXPR is the real constant contained in VALUE. */
8104 real_dconstp (tree expr
, const REAL_VALUE_TYPE
*value
)
8108 return ((TREE_CODE (expr
) == REAL_CST
8109 && !TREE_OVERFLOW (expr
)
8110 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), *value
))
8111 || (TREE_CODE (expr
) == COMPLEX_CST
8112 && real_dconstp (TREE_REALPART (expr
), value
)
8113 && real_zerop (TREE_IMAGPART (expr
))));
8116 /* A subroutine of fold_builtin to fold the various logarithmic
8117 functions. Return NULL_TREE if no simplification can me made.
8118 FUNC is the corresponding MPFR logarithm function. */
8121 fold_builtin_logarithm (tree fndecl
, tree arg
,
8122 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8124 if (validate_arg (arg
, REAL_TYPE
))
8126 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8128 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8130 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8131 instead we'll look for 'e' truncated to MODE. So only do
8132 this if flag_unsafe_math_optimizations is set. */
8133 if (flag_unsafe_math_optimizations
&& func
== mpfr_log
)
8135 const REAL_VALUE_TYPE e_truncated
=
8136 real_value_truncate (TYPE_MODE (type
), dconste
);
8137 if (real_dconstp (arg
, &e_truncated
))
8138 return build_real (type
, dconst1
);
8141 /* Calculate the result when the argument is a constant. */
8142 if ((res
= do_mpfr_arg1 (arg
, type
, func
, &dconst0
, NULL
, false)))
8145 /* Special case, optimize logN(expN(x)) = x. */
8146 if (flag_unsafe_math_optimizations
8147 && ((func
== mpfr_log
8148 && (fcode
== BUILT_IN_EXP
8149 || fcode
== BUILT_IN_EXPF
8150 || fcode
== BUILT_IN_EXPL
))
8151 || (func
== mpfr_log2
8152 && (fcode
== BUILT_IN_EXP2
8153 || fcode
== BUILT_IN_EXP2F
8154 || fcode
== BUILT_IN_EXP2L
))
8155 || (func
== mpfr_log10
&& (BUILTIN_EXP10_P (fcode
)))))
8156 return fold_convert (type
, CALL_EXPR_ARG (arg
, 0));
8158 /* Optimize logN(func()) for various exponential functions. We
8159 want to determine the value "x" and the power "exponent" in
8160 order to transform logN(x**exponent) into exponent*logN(x). */
8161 if (flag_unsafe_math_optimizations
)
8163 tree exponent
= 0, x
= 0;
8167 CASE_FLT_FN (BUILT_IN_EXP
):
8168 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8169 x
= build_real (type
,
8170 real_value_truncate (TYPE_MODE (type
), dconste
));
8171 exponent
= CALL_EXPR_ARG (arg
, 0);
8173 CASE_FLT_FN (BUILT_IN_EXP2
):
8174 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8175 x
= build_real (type
, dconst2
);
8176 exponent
= CALL_EXPR_ARG (arg
, 0);
8178 CASE_FLT_FN (BUILT_IN_EXP10
):
8179 CASE_FLT_FN (BUILT_IN_POW10
):
8180 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8181 x
= build_real (type
, dconst10
);
8182 exponent
= CALL_EXPR_ARG (arg
, 0);
8184 CASE_FLT_FN (BUILT_IN_SQRT
):
8185 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8186 x
= CALL_EXPR_ARG (arg
, 0);
8187 exponent
= build_real (type
, dconsthalf
);
8189 CASE_FLT_FN (BUILT_IN_CBRT
):
8190 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8191 x
= CALL_EXPR_ARG (arg
, 0);
8192 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8195 CASE_FLT_FN (BUILT_IN_POW
):
8196 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8197 x
= CALL_EXPR_ARG (arg
, 0);
8198 exponent
= CALL_EXPR_ARG (arg
, 1);
8204 /* Now perform the optimization. */
8207 tree logfn
= build_call_expr (fndecl
, 1, x
);
8208 return fold_build2 (MULT_EXPR
, type
, exponent
, logfn
);
8216 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8217 NULL_TREE if no simplification can be made. */
8220 fold_builtin_hypot (tree fndecl
, tree arg0
, tree arg1
, tree type
)
8222 tree res
, narg0
, narg1
;
8224 if (!validate_arg (arg0
, REAL_TYPE
)
8225 || !validate_arg (arg1
, REAL_TYPE
))
8228 /* Calculate the result when the argument is a constant. */
8229 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8232 /* If either argument to hypot has a negate or abs, strip that off.
8233 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8234 narg0
= fold_strip_sign_ops (arg0
);
8235 narg1
= fold_strip_sign_ops (arg1
);
8238 return build_call_expr (fndecl
, 2, narg0
? narg0
: arg0
,
8239 narg1
? narg1
: arg1
);
8242 /* If either argument is zero, hypot is fabs of the other. */
8243 if (real_zerop (arg0
))
8244 return fold_build1 (ABS_EXPR
, type
, arg1
);
8245 else if (real_zerop (arg1
))
8246 return fold_build1 (ABS_EXPR
, type
, arg0
);
8248 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8249 if (flag_unsafe_math_optimizations
8250 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8252 const REAL_VALUE_TYPE sqrt2_trunc
8253 = real_value_truncate (TYPE_MODE (type
), dconstsqrt2
);
8254 return fold_build2 (MULT_EXPR
, type
,
8255 fold_build1 (ABS_EXPR
, type
, arg0
),
8256 build_real (type
, sqrt2_trunc
));
8263 /* Fold a builtin function call to pow, powf, or powl. Return
8264 NULL_TREE if no simplification can be made. */
8266 fold_builtin_pow (tree fndecl
, tree arg0
, tree arg1
, tree type
)
8270 if (!validate_arg (arg0
, REAL_TYPE
)
8271 || !validate_arg (arg1
, REAL_TYPE
))
8274 /* Calculate the result when the argument is a constant. */
8275 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8278 /* Optimize pow(1.0,y) = 1.0. */
8279 if (real_onep (arg0
))
8280 return omit_one_operand (type
, build_real (type
, dconst1
), arg1
);
8282 if (TREE_CODE (arg1
) == REAL_CST
8283 && !TREE_OVERFLOW (arg1
))
8285 REAL_VALUE_TYPE cint
;
8289 c
= TREE_REAL_CST (arg1
);
8291 /* Optimize pow(x,0.0) = 1.0. */
8292 if (REAL_VALUES_EQUAL (c
, dconst0
))
8293 return omit_one_operand (type
, build_real (type
, dconst1
),
8296 /* Optimize pow(x,1.0) = x. */
8297 if (REAL_VALUES_EQUAL (c
, dconst1
))
8300 /* Optimize pow(x,-1.0) = 1.0/x. */
8301 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8302 return fold_build2 (RDIV_EXPR
, type
,
8303 build_real (type
, dconst1
), arg0
);
8305 /* Optimize pow(x,0.5) = sqrt(x). */
8306 if (flag_unsafe_math_optimizations
8307 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8309 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8311 if (sqrtfn
!= NULL_TREE
)
8312 return build_call_expr (sqrtfn
, 1, arg0
);
8315 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8316 if (flag_unsafe_math_optimizations
)
8318 const REAL_VALUE_TYPE dconstroot
8319 = real_value_truncate (TYPE_MODE (type
), dconstthird
);
8321 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8323 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8324 if (cbrtfn
!= NULL_TREE
)
8325 return build_call_expr (cbrtfn
, 1, arg0
);
8329 /* Check for an integer exponent. */
8330 n
= real_to_integer (&c
);
8331 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
8332 if (real_identical (&c
, &cint
))
8334 /* Attempt to evaluate pow at compile-time. */
8335 if (TREE_CODE (arg0
) == REAL_CST
8336 && !TREE_OVERFLOW (arg0
))
8341 x
= TREE_REAL_CST (arg0
);
8342 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8343 if (flag_unsafe_math_optimizations
|| !inexact
)
8344 return build_real (type
, x
);
8347 /* Strip sign ops from even integer powers. */
8348 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8350 tree narg0
= fold_strip_sign_ops (arg0
);
8352 return build_call_expr (fndecl
, 2, narg0
, arg1
);
8357 if (flag_unsafe_math_optimizations
)
8359 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8361 /* Optimize pow(expN(x),y) = expN(x*y). */
8362 if (BUILTIN_EXPONENT_P (fcode
))
8364 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8365 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8366 arg
= fold_build2 (MULT_EXPR
, type
, arg
, arg1
);
8367 return build_call_expr (expfn
, 1, arg
);
8370 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8371 if (BUILTIN_SQRT_P (fcode
))
8373 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8374 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
8375 build_real (type
, dconsthalf
));
8376 return build_call_expr (fndecl
, 2, narg0
, narg1
);
8379 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8380 if (BUILTIN_CBRT_P (fcode
))
8382 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8383 if (tree_expr_nonnegative_p (arg
))
8385 const REAL_VALUE_TYPE dconstroot
8386 = real_value_truncate (TYPE_MODE (type
), dconstthird
);
8387 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
8388 build_real (type
, dconstroot
));
8389 return build_call_expr (fndecl
, 2, arg
, narg1
);
8393 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8394 if (fcode
== BUILT_IN_POW
8395 || fcode
== BUILT_IN_POWF
8396 || fcode
== BUILT_IN_POWL
)
8398 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8399 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8400 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg01
, arg1
);
8401 return build_call_expr (fndecl
, 2, arg00
, narg1
);
8408 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8409 Return NULL_TREE if no simplification can be made. */
8411 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED
,
8412 tree arg0
, tree arg1
, tree type
)
8414 if (!validate_arg (arg0
, REAL_TYPE
)
8415 || !validate_arg (arg1
, INTEGER_TYPE
))
8418 /* Optimize pow(1.0,y) = 1.0. */
8419 if (real_onep (arg0
))
8420 return omit_one_operand (type
, build_real (type
, dconst1
), arg1
);
8422 if (host_integerp (arg1
, 0))
8424 HOST_WIDE_INT c
= TREE_INT_CST_LOW (arg1
);
8426 /* Evaluate powi at compile-time. */
8427 if (TREE_CODE (arg0
) == REAL_CST
8428 && !TREE_OVERFLOW (arg0
))
8431 x
= TREE_REAL_CST (arg0
);
8432 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8433 return build_real (type
, x
);
8436 /* Optimize pow(x,0) = 1.0. */
8438 return omit_one_operand (type
, build_real (type
, dconst1
),
8441 /* Optimize pow(x,1) = x. */
8445 /* Optimize pow(x,-1) = 1.0/x. */
8447 return fold_build2 (RDIV_EXPR
, type
,
8448 build_real (type
, dconst1
), arg0
);
8454 /* A subroutine of fold_builtin to fold the various exponent
8455 functions. Return NULL_TREE if no simplification can be made.
8456 FUNC is the corresponding MPFR exponent function. */
8459 fold_builtin_exponent (tree fndecl
, tree arg
,
8460 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8462 if (validate_arg (arg
, REAL_TYPE
))
8464 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8467 /* Calculate the result when the argument is a constant. */
8468 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8471 /* Optimize expN(logN(x)) = x. */
8472 if (flag_unsafe_math_optimizations
)
8474 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8476 if ((func
== mpfr_exp
8477 && (fcode
== BUILT_IN_LOG
8478 || fcode
== BUILT_IN_LOGF
8479 || fcode
== BUILT_IN_LOGL
))
8480 || (func
== mpfr_exp2
8481 && (fcode
== BUILT_IN_LOG2
8482 || fcode
== BUILT_IN_LOG2F
8483 || fcode
== BUILT_IN_LOG2L
))
8484 || (func
== mpfr_exp10
8485 && (fcode
== BUILT_IN_LOG10
8486 || fcode
== BUILT_IN_LOG10F
8487 || fcode
== BUILT_IN_LOG10L
)))
8488 return fold_convert (type
, CALL_EXPR_ARG (arg
, 0));
8495 /* Return true if VAR is a VAR_DECL or a component thereof. */
8498 var_decl_component_p (tree var
)
8501 while (handled_component_p (inner
))
8502 inner
= TREE_OPERAND (inner
, 0);
8503 return SSA_VAR_P (inner
);
8506 /* Fold function call to builtin memset. Return
8507 NULL_TREE if no simplification can be made. */
8510 fold_builtin_memset (tree dest
, tree c
, tree len
, tree type
, bool ignore
)
8513 unsigned HOST_WIDE_INT length
, cval
;
8515 if (! validate_arg (dest
, POINTER_TYPE
)
8516 || ! validate_arg (c
, INTEGER_TYPE
)
8517 || ! validate_arg (len
, INTEGER_TYPE
))
8520 if (! host_integerp (len
, 1))
8523 /* If the LEN parameter is zero, return DEST. */
8524 if (integer_zerop (len
))
8525 return omit_one_operand (type
, dest
, c
);
8527 if (! host_integerp (c
, 1) || TREE_SIDE_EFFECTS (dest
))
8532 if (TREE_CODE (var
) != ADDR_EXPR
)
8535 var
= TREE_OPERAND (var
, 0);
8536 if (TREE_THIS_VOLATILE (var
))
8539 if (!INTEGRAL_TYPE_P (TREE_TYPE (var
))
8540 && !POINTER_TYPE_P (TREE_TYPE (var
)))
8543 if (! var_decl_component_p (var
))
8546 length
= tree_low_cst (len
, 1);
8547 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var
))) != length
8548 || get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
8552 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
8555 if (integer_zerop (c
))
8559 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
8562 cval
= tree_low_cst (c
, 1);
8566 cval
|= (cval
<< 31) << 1;
8569 ret
= build_int_cst_type (TREE_TYPE (var
), cval
);
8570 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, ret
);
8574 return omit_one_operand (type
, dest
, ret
);
8577 /* Fold function call to builtin memset. Return
8578 NULL_TREE if no simplification can be made. */
8581 fold_builtin_bzero (tree dest
, tree size
, bool ignore
)
8583 if (! validate_arg (dest
, POINTER_TYPE
)
8584 || ! validate_arg (size
, INTEGER_TYPE
))
8590 /* New argument list transforming bzero(ptr x, int y) to
8591 memset(ptr x, int 0, size_t y). This is done this way
8592 so that if it isn't expanded inline, we fallback to
8593 calling bzero instead of memset. */
8595 return fold_builtin_memset (dest
, integer_zero_node
,
8596 fold_convert (sizetype
, size
),
8597 void_type_node
, ignore
);
8600 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8601 NULL_TREE if no simplification can be made.
8602 If ENDP is 0, return DEST (like memcpy).
8603 If ENDP is 1, return DEST+LEN (like mempcpy).
8604 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8605 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8609 fold_builtin_memory_op (tree dest
, tree src
, tree len
, tree type
, bool ignore
, int endp
)
8611 tree destvar
, srcvar
, expr
;
8613 if (! validate_arg (dest
, POINTER_TYPE
)
8614 || ! validate_arg (src
, POINTER_TYPE
)
8615 || ! validate_arg (len
, INTEGER_TYPE
))
8618 /* If the LEN parameter is zero, return DEST. */
8619 if (integer_zerop (len
))
8620 return omit_one_operand (type
, dest
, src
);
8622 /* If SRC and DEST are the same (and not volatile), return
8623 DEST{,+LEN,+LEN-1}. */
8624 if (operand_equal_p (src
, dest
, 0))
8628 tree srctype
, desttype
;
8631 int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
8632 int dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
8634 /* Both DEST and SRC must be pointer types.
8635 ??? This is what old code did. Is the testing for pointer types
8638 If either SRC is readonly or length is 1, we can use memcpy. */
8639 if (dest_align
&& src_align
8640 && (readonly_data_expr (src
)
8641 || (host_integerp (len
, 1)
8642 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
>=
8643 tree_low_cst (len
, 1)))))
8645 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8648 return build_call_expr (fn
, 3, dest
, src
, len
);
8653 if (!host_integerp (len
, 0))
8656 This logic lose for arguments like (type *)malloc (sizeof (type)),
8657 since we strip the casts of up to VOID return value from malloc.
8658 Perhaps we ought to inherit type from non-VOID argument here? */
8661 srctype
= TREE_TYPE (TREE_TYPE (src
));
8662 desttype
= TREE_TYPE (TREE_TYPE (dest
));
8663 if (!srctype
|| !desttype
8664 || !TYPE_SIZE_UNIT (srctype
)
8665 || !TYPE_SIZE_UNIT (desttype
)
8666 || TREE_CODE (TYPE_SIZE_UNIT (srctype
)) != INTEGER_CST
8667 || TREE_CODE (TYPE_SIZE_UNIT (desttype
)) != INTEGER_CST
8668 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
8669 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8672 if (get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
)
8673 < (int) TYPE_ALIGN (desttype
)
8674 || (get_pointer_alignment (src
, BIGGEST_ALIGNMENT
)
8675 < (int) TYPE_ALIGN (srctype
)))
8679 dest
= builtin_save_expr (dest
);
8681 srcvar
= build_fold_indirect_ref (src
);
8682 if (TREE_THIS_VOLATILE (srcvar
))
8684 if (!tree_int_cst_equal (lang_hooks
.expr_size (srcvar
), len
))
8686 /* With memcpy, it is possible to bypass aliasing rules, so without
8687 this check i. e. execute/20060930-2.c would be misoptimized, because
8688 it use conflicting alias set to hold argument for the memcpy call.
8689 This check is probably unnecesary with -fno-strict-aliasing.
8690 Similarly for destvar. See also PR29286. */
8691 if (!var_decl_component_p (srcvar
)
8692 /* Accept: memcpy (*char_var, "test", 1); that simplify
8694 || is_gimple_min_invariant (srcvar
)
8695 || readonly_data_expr (src
))
8698 destvar
= build_fold_indirect_ref (dest
);
8699 if (TREE_THIS_VOLATILE (destvar
))
8701 if (!tree_int_cst_equal (lang_hooks
.expr_size (destvar
), len
))
8703 if (!var_decl_component_p (destvar
))
8706 if (srctype
== desttype
8707 || (gimple_in_ssa_p (cfun
)
8708 && useless_type_conversion_p (desttype
, srctype
)))
8710 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar
))
8711 || POINTER_TYPE_P (TREE_TYPE (srcvar
)))
8712 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar
))
8713 || POINTER_TYPE_P (TREE_TYPE (destvar
))))
8714 expr
= fold_convert (TREE_TYPE (destvar
), srcvar
);
8716 expr
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (destvar
), srcvar
);
8717 expr
= build2 (MODIFY_EXPR
, TREE_TYPE (destvar
), destvar
, expr
);
8723 if (endp
== 0 || endp
== 3)
8724 return omit_one_operand (type
, dest
, expr
);
8730 len
= fold_build2 (MINUS_EXPR
, TREE_TYPE (len
), len
,
8733 dest
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
8734 dest
= fold_convert (type
, dest
);
8736 dest
= omit_one_operand (type
, dest
, expr
);
8740 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8741 If LEN is not NULL, it represents the length of the string to be
8742 copied. Return NULL_TREE if no simplification can be made. */
8745 fold_builtin_strcpy (tree fndecl
, tree dest
, tree src
, tree len
)
8749 if (!validate_arg (dest
, POINTER_TYPE
)
8750 || !validate_arg (src
, POINTER_TYPE
))
8753 /* If SRC and DEST are the same (and not volatile), return DEST. */
8754 if (operand_equal_p (src
, dest
, 0))
8755 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
8760 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8766 len
= c_strlen (src
, 1);
8767 if (! len
|| TREE_SIDE_EFFECTS (len
))
8771 len
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
8772 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
8773 build_call_expr (fn
, 3, dest
, src
, len
));
8776 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8777 If SLEN is not NULL, it represents the length of the source string.
8778 Return NULL_TREE if no simplification can be made. */
8781 fold_builtin_strncpy (tree fndecl
, tree dest
, tree src
, tree len
, tree slen
)
8785 if (!validate_arg (dest
, POINTER_TYPE
)
8786 || !validate_arg (src
, POINTER_TYPE
)
8787 || !validate_arg (len
, INTEGER_TYPE
))
8790 /* If the LEN parameter is zero, return DEST. */
8791 if (integer_zerop (len
))
8792 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
8794 /* We can't compare slen with len as constants below if len is not a
8796 if (len
== 0 || TREE_CODE (len
) != INTEGER_CST
)
8800 slen
= c_strlen (src
, 1);
8802 /* Now, we must be passed a constant src ptr parameter. */
8803 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
8806 slen
= size_binop (PLUS_EXPR
, slen
, ssize_int (1));
8808 /* We do not support simplification of this case, though we do
8809 support it when expanding trees into RTL. */
8810 /* FIXME: generate a call to __builtin_memset. */
8811 if (tree_int_cst_lt (slen
, len
))
8814 /* OK transform into builtin memcpy. */
8815 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8818 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
8819 build_call_expr (fn
, 3, dest
, src
, len
));
8822 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8823 arguments to the call, and TYPE is its return type.
8824 Return NULL_TREE if no simplification can be made. */
8827 fold_builtin_memchr (tree arg1
, tree arg2
, tree len
, tree type
)
8829 if (!validate_arg (arg1
, POINTER_TYPE
)
8830 || !validate_arg (arg2
, INTEGER_TYPE
)
8831 || !validate_arg (len
, INTEGER_TYPE
))
8837 if (TREE_CODE (arg2
) != INTEGER_CST
8838 || !host_integerp (len
, 1))
8841 p1
= c_getstr (arg1
);
8842 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8848 if (target_char_cast (arg2
, &c
))
8851 r
= memchr (p1
, c
, tree_low_cst (len
, 1));
8854 return build_int_cst (TREE_TYPE (arg1
), 0);
8856 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (arg1
), arg1
,
8858 return fold_convert (type
, tem
);
8864 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8865 Return NULL_TREE if no simplification can be made. */
8868 fold_builtin_memcmp (tree arg1
, tree arg2
, tree len
)
8870 const char *p1
, *p2
;
8872 if (!validate_arg (arg1
, POINTER_TYPE
)
8873 || !validate_arg (arg2
, POINTER_TYPE
)
8874 || !validate_arg (len
, INTEGER_TYPE
))
8877 /* If the LEN parameter is zero, return zero. */
8878 if (integer_zerop (len
))
8879 return omit_two_operands (integer_type_node
, integer_zero_node
,
8882 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8883 if (operand_equal_p (arg1
, arg2
, 0))
8884 return omit_one_operand (integer_type_node
, integer_zero_node
, len
);
8886 p1
= c_getstr (arg1
);
8887 p2
= c_getstr (arg2
);
8889 /* If all arguments are constant, and the value of len is not greater
8890 than the lengths of arg1 and arg2, evaluate at compile-time. */
8891 if (host_integerp (len
, 1) && p1
&& p2
8892 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8893 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8895 const int r
= memcmp (p1
, p2
, tree_low_cst (len
, 1));
8898 return integer_one_node
;
8900 return integer_minus_one_node
;
8902 return integer_zero_node
;
8905 /* If len parameter is one, return an expression corresponding to
8906 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8907 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
8909 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8910 tree cst_uchar_ptr_node
8911 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8913 tree ind1
= fold_convert (integer_type_node
,
8914 build1 (INDIRECT_REF
, cst_uchar_node
,
8915 fold_convert (cst_uchar_ptr_node
,
8917 tree ind2
= fold_convert (integer_type_node
,
8918 build1 (INDIRECT_REF
, cst_uchar_node
,
8919 fold_convert (cst_uchar_ptr_node
,
8921 return fold_build2 (MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8927 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8928 Return NULL_TREE if no simplification can be made. */
8931 fold_builtin_strcmp (tree arg1
, tree arg2
)
8933 const char *p1
, *p2
;
8935 if (!validate_arg (arg1
, POINTER_TYPE
)
8936 || !validate_arg (arg2
, POINTER_TYPE
))
8939 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8940 if (operand_equal_p (arg1
, arg2
, 0))
8941 return integer_zero_node
;
8943 p1
= c_getstr (arg1
);
8944 p2
= c_getstr (arg2
);
8948 const int i
= strcmp (p1
, p2
);
8950 return integer_minus_one_node
;
8952 return integer_one_node
;
8954 return integer_zero_node
;
8957 /* If the second arg is "", return *(const unsigned char*)arg1. */
8958 if (p2
&& *p2
== '\0')
8960 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8961 tree cst_uchar_ptr_node
8962 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8964 return fold_convert (integer_type_node
,
8965 build1 (INDIRECT_REF
, cst_uchar_node
,
8966 fold_convert (cst_uchar_ptr_node
,
8970 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8971 if (p1
&& *p1
== '\0')
8973 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8974 tree cst_uchar_ptr_node
8975 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8977 tree temp
= fold_convert (integer_type_node
,
8978 build1 (INDIRECT_REF
, cst_uchar_node
,
8979 fold_convert (cst_uchar_ptr_node
,
8981 return fold_build1 (NEGATE_EXPR
, integer_type_node
, temp
);
8987 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8988 Return NULL_TREE if no simplification can be made. */
8991 fold_builtin_strncmp (tree arg1
, tree arg2
, tree len
)
8993 const char *p1
, *p2
;
8995 if (!validate_arg (arg1
, POINTER_TYPE
)
8996 || !validate_arg (arg2
, POINTER_TYPE
)
8997 || !validate_arg (len
, INTEGER_TYPE
))
9000 /* If the LEN parameter is zero, return zero. */
9001 if (integer_zerop (len
))
9002 return omit_two_operands (integer_type_node
, integer_zero_node
,
9005 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9006 if (operand_equal_p (arg1
, arg2
, 0))
9007 return omit_one_operand (integer_type_node
, integer_zero_node
, len
);
9009 p1
= c_getstr (arg1
);
9010 p2
= c_getstr (arg2
);
9012 if (host_integerp (len
, 1) && p1
&& p2
)
9014 const int i
= strncmp (p1
, p2
, tree_low_cst (len
, 1));
9016 return integer_one_node
;
9018 return integer_minus_one_node
;
9020 return integer_zero_node
;
9023 /* If the second arg is "", and the length is greater than zero,
9024 return *(const unsigned char*)arg1. */
9025 if (p2
&& *p2
== '\0'
9026 && TREE_CODE (len
) == INTEGER_CST
9027 && tree_int_cst_sgn (len
) == 1)
9029 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9030 tree cst_uchar_ptr_node
9031 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9033 return fold_convert (integer_type_node
,
9034 build1 (INDIRECT_REF
, cst_uchar_node
,
9035 fold_convert (cst_uchar_ptr_node
,
9039 /* If the first arg is "", and the length is greater than zero,
9040 return -*(const unsigned char*)arg2. */
9041 if (p1
&& *p1
== '\0'
9042 && TREE_CODE (len
) == INTEGER_CST
9043 && tree_int_cst_sgn (len
) == 1)
9045 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9046 tree cst_uchar_ptr_node
9047 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9049 tree temp
= fold_convert (integer_type_node
,
9050 build1 (INDIRECT_REF
, cst_uchar_node
,
9051 fold_convert (cst_uchar_ptr_node
,
9053 return fold_build1 (NEGATE_EXPR
, integer_type_node
, temp
);
9056 /* If len parameter is one, return an expression corresponding to
9057 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9058 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
9060 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9061 tree cst_uchar_ptr_node
9062 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9064 tree ind1
= fold_convert (integer_type_node
,
9065 build1 (INDIRECT_REF
, cst_uchar_node
,
9066 fold_convert (cst_uchar_ptr_node
,
9068 tree ind2
= fold_convert (integer_type_node
,
9069 build1 (INDIRECT_REF
, cst_uchar_node
,
9070 fold_convert (cst_uchar_ptr_node
,
9072 return fold_build2 (MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9078 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9079 ARG. Return NULL_TREE if no simplification can be made. */
9082 fold_builtin_signbit (tree arg
, tree type
)
9086 if (!validate_arg (arg
, REAL_TYPE
))
9089 /* If ARG is a compile-time constant, determine the result. */
9090 if (TREE_CODE (arg
) == REAL_CST
9091 && !TREE_OVERFLOW (arg
))
9095 c
= TREE_REAL_CST (arg
);
9096 temp
= REAL_VALUE_NEGATIVE (c
) ? integer_one_node
: integer_zero_node
;
9097 return fold_convert (type
, temp
);
9100 /* If ARG is non-negative, the result is always zero. */
9101 if (tree_expr_nonnegative_p (arg
))
9102 return omit_one_operand (type
, integer_zero_node
, arg
);
9104 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9105 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
9106 return fold_build2 (LT_EXPR
, type
, arg
,
9107 build_real (TREE_TYPE (arg
), dconst0
));
9112 /* Fold function call to builtin copysign, copysignf or copysignl with
9113 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9117 fold_builtin_copysign (tree fndecl
, tree arg1
, tree arg2
, tree type
)
9121 if (!validate_arg (arg1
, REAL_TYPE
)
9122 || !validate_arg (arg2
, REAL_TYPE
))
9125 /* copysign(X,X) is X. */
9126 if (operand_equal_p (arg1
, arg2
, 0))
9127 return fold_convert (type
, arg1
);
9129 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9130 if (TREE_CODE (arg1
) == REAL_CST
9131 && TREE_CODE (arg2
) == REAL_CST
9132 && !TREE_OVERFLOW (arg1
)
9133 && !TREE_OVERFLOW (arg2
))
9135 REAL_VALUE_TYPE c1
, c2
;
9137 c1
= TREE_REAL_CST (arg1
);
9138 c2
= TREE_REAL_CST (arg2
);
9139 /* c1.sign := c2.sign. */
9140 real_copysign (&c1
, &c2
);
9141 return build_real (type
, c1
);
9144 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9145 Remember to evaluate Y for side-effects. */
9146 if (tree_expr_nonnegative_p (arg2
))
9147 return omit_one_operand (type
,
9148 fold_build1 (ABS_EXPR
, type
, arg1
),
9151 /* Strip sign changing operations for the first argument. */
9152 tem
= fold_strip_sign_ops (arg1
);
9154 return build_call_expr (fndecl
, 2, tem
, arg2
);
9159 /* Fold a call to builtin isascii with argument ARG. */
9162 fold_builtin_isascii (tree arg
)
9164 if (!validate_arg (arg
, INTEGER_TYPE
))
9168 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9169 arg
= build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9170 build_int_cst (NULL_TREE
,
9171 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9172 return fold_build2 (EQ_EXPR
, integer_type_node
,
9173 arg
, integer_zero_node
);
9177 /* Fold a call to builtin toascii with argument ARG. */
9180 fold_builtin_toascii (tree arg
)
9182 if (!validate_arg (arg
, INTEGER_TYPE
))
9185 /* Transform toascii(c) -> (c & 0x7f). */
9186 return fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9187 build_int_cst (NULL_TREE
, 0x7f));
9190 /* Fold a call to builtin isdigit with argument ARG. */
9193 fold_builtin_isdigit (tree arg
)
9195 if (!validate_arg (arg
, INTEGER_TYPE
))
9199 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9200 /* According to the C standard, isdigit is unaffected by locale.
9201 However, it definitely is affected by the target character set. */
9202 unsigned HOST_WIDE_INT target_digit0
9203 = lang_hooks
.to_target_charset ('0');
9205 if (target_digit0
== 0)
9208 arg
= fold_convert (unsigned_type_node
, arg
);
9209 arg
= build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9210 build_int_cst (unsigned_type_node
, target_digit0
));
9211 return fold_build2 (LE_EXPR
, integer_type_node
, arg
,
9212 build_int_cst (unsigned_type_node
, 9));
9216 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9219 fold_builtin_fabs (tree arg
, tree type
)
9221 if (!validate_arg (arg
, REAL_TYPE
))
9224 arg
= fold_convert (type
, arg
);
9225 if (TREE_CODE (arg
) == REAL_CST
)
9226 return fold_abs_const (arg
, type
);
9227 return fold_build1 (ABS_EXPR
, type
, arg
);
9230 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9233 fold_builtin_abs (tree arg
, tree type
)
9235 if (!validate_arg (arg
, INTEGER_TYPE
))
9238 arg
= fold_convert (type
, arg
);
9239 if (TREE_CODE (arg
) == INTEGER_CST
)
9240 return fold_abs_const (arg
, type
);
9241 return fold_build1 (ABS_EXPR
, type
, arg
);
9244 /* Fold a call to builtin fmin or fmax. */
9247 fold_builtin_fmin_fmax (tree arg0
, tree arg1
, tree type
, bool max
)
9249 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9251 /* Calculate the result when the argument is a constant. */
9252 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9257 /* If either argument is NaN, return the other one. Avoid the
9258 transformation if we get (and honor) a signalling NaN. Using
9259 omit_one_operand() ensures we create a non-lvalue. */
9260 if (TREE_CODE (arg0
) == REAL_CST
9261 && real_isnan (&TREE_REAL_CST (arg0
))
9262 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9263 || ! TREE_REAL_CST (arg0
).signalling
))
9264 return omit_one_operand (type
, arg1
, arg0
);
9265 if (TREE_CODE (arg1
) == REAL_CST
9266 && real_isnan (&TREE_REAL_CST (arg1
))
9267 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
9268 || ! TREE_REAL_CST (arg1
).signalling
))
9269 return omit_one_operand (type
, arg0
, arg1
);
9271 /* Transform fmin/fmax(x,x) -> x. */
9272 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9273 return omit_one_operand (type
, arg0
, arg1
);
9275 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9276 functions to return the numeric arg if the other one is NaN.
9277 These tree codes don't honor that, so only transform if
9278 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9279 handled, so we don't have to worry about it either. */
9280 if (flag_finite_math_only
)
9281 return fold_build2 ((max
? MAX_EXPR
: MIN_EXPR
), type
,
9282 fold_convert (type
, arg0
),
9283 fold_convert (type
, arg1
));
9288 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9291 fold_builtin_carg (tree arg
, tree type
)
9293 if (validate_arg (arg
, COMPLEX_TYPE
))
9295 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9299 tree new_arg
= builtin_save_expr (arg
);
9300 tree r_arg
= fold_build1 (REALPART_EXPR
, type
, new_arg
);
9301 tree i_arg
= fold_build1 (IMAGPART_EXPR
, type
, new_arg
);
9302 return build_call_expr (atan2_fn
, 2, i_arg
, r_arg
);
9309 /* Fold a call to builtin logb/ilogb. */
9312 fold_builtin_logb (tree arg
, tree rettype
)
9314 if (! validate_arg (arg
, REAL_TYPE
))
9319 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9321 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9327 /* If arg is Inf or NaN and we're logb, return it. */
9328 if (TREE_CODE (rettype
) == REAL_TYPE
)
9329 return fold_convert (rettype
, arg
);
9330 /* Fall through... */
9332 /* Zero may set errno and/or raise an exception for logb, also
9333 for ilogb we don't know FP_ILOGB0. */
9336 /* For normal numbers, proceed iff radix == 2. In GCC,
9337 normalized significands are in the range [0.5, 1.0). We
9338 want the exponent as if they were [1.0, 2.0) so get the
9339 exponent and subtract 1. */
9340 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9341 return fold_convert (rettype
, build_int_cst (NULL_TREE
,
9342 REAL_EXP (value
)-1));
9350 /* Fold a call to builtin significand, if radix == 2. */
9353 fold_builtin_significand (tree arg
, tree rettype
)
9355 if (! validate_arg (arg
, REAL_TYPE
))
9360 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9362 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9369 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9370 return fold_convert (rettype
, arg
);
9372 /* For normal numbers, proceed iff radix == 2. */
9373 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9375 REAL_VALUE_TYPE result
= *value
;
9376 /* In GCC, normalized significands are in the range [0.5,
9377 1.0). We want them to be [1.0, 2.0) so set the
9379 SET_REAL_EXP (&result
, 1);
9380 return build_real (rettype
, result
);
9389 /* Fold a call to builtin frexp, we can assume the base is 2. */
9392 fold_builtin_frexp (tree arg0
, tree arg1
, tree rettype
)
9394 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9399 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9402 arg1
= build_fold_indirect_ref (arg1
);
9404 /* Proceed if a valid pointer type was passed in. */
9405 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9407 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9413 /* For +-0, return (*exp = 0, +-0). */
9414 exp
= integer_zero_node
;
9419 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9420 return omit_one_operand (rettype
, arg0
, arg1
);
9423 /* Since the frexp function always expects base 2, and in
9424 GCC normalized significands are already in the range
9425 [0.5, 1.0), we have exactly what frexp wants. */
9426 REAL_VALUE_TYPE frac_rvt
= *value
;
9427 SET_REAL_EXP (&frac_rvt
, 0);
9428 frac
= build_real (rettype
, frac_rvt
);
9429 exp
= build_int_cst (NULL_TREE
, REAL_EXP (value
));
9436 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9437 arg1
= fold_build2 (MODIFY_EXPR
, rettype
, arg1
, exp
);
9438 TREE_SIDE_EFFECTS (arg1
) = 1;
9439 return fold_build2 (COMPOUND_EXPR
, rettype
, arg1
, frac
);
9445 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9446 then we can assume the base is two. If it's false, then we have to
9447 check the mode of the TYPE parameter in certain cases. */
9450 fold_builtin_load_exponent (tree arg0
, tree arg1
, tree type
, bool ldexp
)
9452 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9457 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9458 if (real_zerop (arg0
) || integer_zerop (arg1
)
9459 || (TREE_CODE (arg0
) == REAL_CST
9460 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9461 return omit_one_operand (type
, arg0
, arg1
);
9463 /* If both arguments are constant, then try to evaluate it. */
9464 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9465 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9466 && host_integerp (arg1
, 0))
9468 /* Bound the maximum adjustment to twice the range of the
9469 mode's valid exponents. Use abs to ensure the range is
9470 positive as a sanity check. */
9471 const long max_exp_adj
= 2 *
9472 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9473 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9475 /* Get the user-requested adjustment. */
9476 const HOST_WIDE_INT req_exp_adj
= tree_low_cst (arg1
, 0);
9478 /* The requested adjustment must be inside this range. This
9479 is a preliminary cap to avoid things like overflow, we
9480 may still fail to compute the result for other reasons. */
9481 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9483 REAL_VALUE_TYPE initial_result
;
9485 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9487 /* Ensure we didn't overflow. */
9488 if (! real_isinf (&initial_result
))
9490 const REAL_VALUE_TYPE trunc_result
9491 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9493 /* Only proceed if the target mode can hold the
9495 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9496 return build_real (type
, trunc_result
);
9505 /* Fold a call to builtin modf. */
9508 fold_builtin_modf (tree arg0
, tree arg1
, tree rettype
)
9510 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9515 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9518 arg1
= build_fold_indirect_ref (arg1
);
9520 /* Proceed if a valid pointer type was passed in. */
9521 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9523 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9524 REAL_VALUE_TYPE trunc
, frac
;
9530 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9531 trunc
= frac
= *value
;
9534 /* For +-Inf, return (*arg1 = arg0, +-0). */
9536 frac
.sign
= value
->sign
;
9540 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9541 real_trunc (&trunc
, VOIDmode
, value
);
9542 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9543 /* If the original number was negative and already
9544 integral, then the fractional part is -0.0. */
9545 if (value
->sign
&& frac
.cl
== rvc_zero
)
9546 frac
.sign
= value
->sign
;
9550 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9551 arg1
= fold_build2 (MODIFY_EXPR
, rettype
, arg1
,
9552 build_real (rettype
, trunc
));
9553 TREE_SIDE_EFFECTS (arg1
) = 1;
9554 return fold_build2 (COMPOUND_EXPR
, rettype
, arg1
,
9555 build_real (rettype
, frac
));
9561 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9562 ARG is the argument for the call. */
9565 fold_builtin_classify (tree fndecl
, tree arg
, int builtin_index
)
9567 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9570 if (!validate_arg (arg
, REAL_TYPE
))
9572 error ("non-floating-point argument to function %qs",
9573 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
9574 return error_mark_node
;
9577 switch (builtin_index
)
9579 case BUILT_IN_ISINF
:
9580 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9581 return omit_one_operand (type
, integer_zero_node
, arg
);
9583 if (TREE_CODE (arg
) == REAL_CST
)
9585 r
= TREE_REAL_CST (arg
);
9586 if (real_isinf (&r
))
9587 return real_compare (GT_EXPR
, &r
, &dconst0
)
9588 ? integer_one_node
: integer_minus_one_node
;
9590 return integer_zero_node
;
9595 case BUILT_IN_ISFINITE
:
9596 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
)))
9597 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9598 return omit_one_operand (type
, integer_one_node
, arg
);
9600 if (TREE_CODE (arg
) == REAL_CST
)
9602 r
= TREE_REAL_CST (arg
);
9603 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9608 case BUILT_IN_ISNAN
:
9609 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
))))
9610 return omit_one_operand (type
, integer_zero_node
, arg
);
9612 if (TREE_CODE (arg
) == REAL_CST
)
9614 r
= TREE_REAL_CST (arg
);
9615 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9618 arg
= builtin_save_expr (arg
);
9619 return fold_build2 (UNORDERED_EXPR
, type
, arg
, arg
);
9626 /* Fold a call to an unordered comparison function such as
9627 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9628 being called and ARG0 and ARG1 are the arguments for the call.
9629 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9630 the opposite of the desired result. UNORDERED_CODE is used
9631 for modes that can hold NaNs and ORDERED_CODE is used for
9635 fold_builtin_unordered_cmp (tree fndecl
, tree arg0
, tree arg1
,
9636 enum tree_code unordered_code
,
9637 enum tree_code ordered_code
)
9639 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9640 enum tree_code code
;
9642 enum tree_code code0
, code1
;
9643 tree cmp_type
= NULL_TREE
;
9645 type0
= TREE_TYPE (arg0
);
9646 type1
= TREE_TYPE (arg1
);
9648 code0
= TREE_CODE (type0
);
9649 code1
= TREE_CODE (type1
);
9651 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9652 /* Choose the wider of two real types. */
9653 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9655 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9657 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9661 error ("non-floating-point argument to function %qs",
9662 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
9663 return error_mark_node
;
9666 arg0
= fold_convert (cmp_type
, arg0
);
9667 arg1
= fold_convert (cmp_type
, arg1
);
9669 if (unordered_code
== UNORDERED_EXPR
)
9671 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9672 return omit_two_operands (type
, integer_zero_node
, arg0
, arg1
);
9673 return fold_build2 (UNORDERED_EXPR
, type
, arg0
, arg1
);
9676 code
= HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
9678 return fold_build1 (TRUTH_NOT_EXPR
, type
,
9679 fold_build2 (code
, type
, arg0
, arg1
));
9682 /* Fold a call to built-in function FNDECL with 0 arguments.
9683 IGNORE is true if the result of the function call is ignored. This
9684 function returns NULL_TREE if no simplification was possible. */
9687 fold_builtin_0 (tree fndecl
, bool ignore ATTRIBUTE_UNUSED
)
9689 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9690 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9693 CASE_FLT_FN (BUILT_IN_INF
):
9694 case BUILT_IN_INFD32
:
9695 case BUILT_IN_INFD64
:
9696 case BUILT_IN_INFD128
:
9697 return fold_builtin_inf (type
, true);
9699 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9700 return fold_builtin_inf (type
, false);
9702 case BUILT_IN_CLASSIFY_TYPE
:
9703 return fold_builtin_classify_type (NULL_TREE
);
9711 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9712 IGNORE is true if the result of the function call is ignored. This
9713 function returns NULL_TREE if no simplification was possible. */
9716 fold_builtin_1 (tree fndecl
, tree arg0
, bool ignore
)
9718 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9719 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9723 case BUILT_IN_CONSTANT_P
:
9725 tree val
= fold_builtin_constant_p (arg0
);
9727 /* Gimplification will pull the CALL_EXPR for the builtin out of
9728 an if condition. When not optimizing, we'll not CSE it back.
9729 To avoid link error types of regressions, return false now. */
9730 if (!val
&& !optimize
)
9731 val
= integer_zero_node
;
9736 case BUILT_IN_CLASSIFY_TYPE
:
9737 return fold_builtin_classify_type (arg0
);
9739 case BUILT_IN_STRLEN
:
9740 return fold_builtin_strlen (arg0
);
9742 CASE_FLT_FN (BUILT_IN_FABS
):
9743 return fold_builtin_fabs (arg0
, type
);
9747 case BUILT_IN_LLABS
:
9748 case BUILT_IN_IMAXABS
:
9749 return fold_builtin_abs (arg0
, type
);
9751 CASE_FLT_FN (BUILT_IN_CONJ
):
9752 if (validate_arg (arg0
, COMPLEX_TYPE
))
9753 return fold_build1 (CONJ_EXPR
, type
, arg0
);
9756 CASE_FLT_FN (BUILT_IN_CREAL
):
9757 if (validate_arg (arg0
, COMPLEX_TYPE
))
9758 return non_lvalue (fold_build1 (REALPART_EXPR
, type
, arg0
));;
9761 CASE_FLT_FN (BUILT_IN_CIMAG
):
9762 if (validate_arg (arg0
, COMPLEX_TYPE
))
9763 return non_lvalue (fold_build1 (IMAGPART_EXPR
, type
, arg0
));
9766 CASE_FLT_FN (BUILT_IN_CCOS
):
9767 CASE_FLT_FN (BUILT_IN_CCOSH
):
9768 /* These functions are "even", i.e. f(x) == f(-x). */
9769 if (validate_arg (arg0
, COMPLEX_TYPE
))
9771 tree narg
= fold_strip_sign_ops (arg0
);
9773 return build_call_expr (fndecl
, 1, narg
);
9777 CASE_FLT_FN (BUILT_IN_CABS
):
9778 return fold_builtin_cabs (arg0
, type
, fndecl
);
9780 CASE_FLT_FN (BUILT_IN_CARG
):
9781 return fold_builtin_carg (arg0
, type
);
9783 CASE_FLT_FN (BUILT_IN_SQRT
):
9784 return fold_builtin_sqrt (arg0
, type
);
9786 CASE_FLT_FN (BUILT_IN_CBRT
):
9787 return fold_builtin_cbrt (arg0
, type
);
9789 CASE_FLT_FN (BUILT_IN_ASIN
):
9790 if (validate_arg (arg0
, REAL_TYPE
))
9791 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
9792 &dconstm1
, &dconst1
, true);
9795 CASE_FLT_FN (BUILT_IN_ACOS
):
9796 if (validate_arg (arg0
, REAL_TYPE
))
9797 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
9798 &dconstm1
, &dconst1
, true);
9801 CASE_FLT_FN (BUILT_IN_ATAN
):
9802 if (validate_arg (arg0
, REAL_TYPE
))
9803 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
9806 CASE_FLT_FN (BUILT_IN_ASINH
):
9807 if (validate_arg (arg0
, REAL_TYPE
))
9808 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
9811 CASE_FLT_FN (BUILT_IN_ACOSH
):
9812 if (validate_arg (arg0
, REAL_TYPE
))
9813 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
9814 &dconst1
, NULL
, true);
9817 CASE_FLT_FN (BUILT_IN_ATANH
):
9818 if (validate_arg (arg0
, REAL_TYPE
))
9819 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
9820 &dconstm1
, &dconst1
, false);
9823 CASE_FLT_FN (BUILT_IN_SIN
):
9824 if (validate_arg (arg0
, REAL_TYPE
))
9825 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
9828 CASE_FLT_FN (BUILT_IN_COS
):
9829 return fold_builtin_cos (arg0
, type
, fndecl
);
9832 CASE_FLT_FN (BUILT_IN_TAN
):
9833 return fold_builtin_tan (arg0
, type
);
9835 CASE_FLT_FN (BUILT_IN_CEXP
):
9836 return fold_builtin_cexp (arg0
, type
);
9838 CASE_FLT_FN (BUILT_IN_CEXPI
):
9839 if (validate_arg (arg0
, REAL_TYPE
))
9840 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
9843 CASE_FLT_FN (BUILT_IN_SINH
):
9844 if (validate_arg (arg0
, REAL_TYPE
))
9845 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
9848 CASE_FLT_FN (BUILT_IN_COSH
):
9849 return fold_builtin_cosh (arg0
, type
, fndecl
);
9851 CASE_FLT_FN (BUILT_IN_TANH
):
9852 if (validate_arg (arg0
, REAL_TYPE
))
9853 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
9856 CASE_FLT_FN (BUILT_IN_ERF
):
9857 if (validate_arg (arg0
, REAL_TYPE
))
9858 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
9861 CASE_FLT_FN (BUILT_IN_ERFC
):
9862 if (validate_arg (arg0
, REAL_TYPE
))
9863 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
9866 CASE_FLT_FN (BUILT_IN_TGAMMA
):
9867 if (validate_arg (arg0
, REAL_TYPE
))
9868 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
9871 CASE_FLT_FN (BUILT_IN_EXP
):
9872 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp
);
9874 CASE_FLT_FN (BUILT_IN_EXP2
):
9875 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp2
);
9877 CASE_FLT_FN (BUILT_IN_EXP10
):
9878 CASE_FLT_FN (BUILT_IN_POW10
):
9879 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp10
);
9881 CASE_FLT_FN (BUILT_IN_EXPM1
):
9882 if (validate_arg (arg0
, REAL_TYPE
))
9883 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
9886 CASE_FLT_FN (BUILT_IN_LOG
):
9887 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log
);
9889 CASE_FLT_FN (BUILT_IN_LOG2
):
9890 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log2
);
9892 CASE_FLT_FN (BUILT_IN_LOG10
):
9893 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log10
);
9895 CASE_FLT_FN (BUILT_IN_LOG1P
):
9896 if (validate_arg (arg0
, REAL_TYPE
))
9897 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
9898 &dconstm1
, NULL
, false);
9901 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9902 CASE_FLT_FN (BUILT_IN_J0
):
9903 if (validate_arg (arg0
, REAL_TYPE
))
9904 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
9908 CASE_FLT_FN (BUILT_IN_J1
):
9909 if (validate_arg (arg0
, REAL_TYPE
))
9910 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
9914 CASE_FLT_FN (BUILT_IN_Y0
):
9915 if (validate_arg (arg0
, REAL_TYPE
))
9916 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
9917 &dconst0
, NULL
, false);
9920 CASE_FLT_FN (BUILT_IN_Y1
):
9921 if (validate_arg (arg0
, REAL_TYPE
))
9922 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
9923 &dconst0
, NULL
, false);
9927 CASE_FLT_FN (BUILT_IN_NAN
):
9928 case BUILT_IN_NAND32
:
9929 case BUILT_IN_NAND64
:
9930 case BUILT_IN_NAND128
:
9931 return fold_builtin_nan (arg0
, type
, true);
9933 CASE_FLT_FN (BUILT_IN_NANS
):
9934 return fold_builtin_nan (arg0
, type
, false);
9936 CASE_FLT_FN (BUILT_IN_FLOOR
):
9937 return fold_builtin_floor (fndecl
, arg0
);
9939 CASE_FLT_FN (BUILT_IN_CEIL
):
9940 return fold_builtin_ceil (fndecl
, arg0
);
9942 CASE_FLT_FN (BUILT_IN_TRUNC
):
9943 return fold_builtin_trunc (fndecl
, arg0
);
9945 CASE_FLT_FN (BUILT_IN_ROUND
):
9946 return fold_builtin_round (fndecl
, arg0
);
9948 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
9949 CASE_FLT_FN (BUILT_IN_RINT
):
9950 return fold_trunc_transparent_mathfn (fndecl
, arg0
);
9952 CASE_FLT_FN (BUILT_IN_LCEIL
):
9953 CASE_FLT_FN (BUILT_IN_LLCEIL
):
9954 CASE_FLT_FN (BUILT_IN_LFLOOR
):
9955 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
9956 CASE_FLT_FN (BUILT_IN_LROUND
):
9957 CASE_FLT_FN (BUILT_IN_LLROUND
):
9958 return fold_builtin_int_roundingfn (fndecl
, arg0
);
9960 CASE_FLT_FN (BUILT_IN_LRINT
):
9961 CASE_FLT_FN (BUILT_IN_LLRINT
):
9962 return fold_fixed_mathfn (fndecl
, arg0
);
9964 case BUILT_IN_BSWAP32
:
9965 case BUILT_IN_BSWAP64
:
9966 return fold_builtin_bswap (fndecl
, arg0
);
9968 CASE_INT_FN (BUILT_IN_FFS
):
9969 CASE_INT_FN (BUILT_IN_CLZ
):
9970 CASE_INT_FN (BUILT_IN_CTZ
):
9971 CASE_INT_FN (BUILT_IN_POPCOUNT
):
9972 CASE_INT_FN (BUILT_IN_PARITY
):
9973 return fold_builtin_bitop (fndecl
, arg0
);
9975 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
9976 return fold_builtin_signbit (arg0
, type
);
9978 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
9979 return fold_builtin_significand (arg0
, type
);
9981 CASE_FLT_FN (BUILT_IN_ILOGB
):
9982 CASE_FLT_FN (BUILT_IN_LOGB
):
9983 return fold_builtin_logb (arg0
, type
);
9985 case BUILT_IN_ISASCII
:
9986 return fold_builtin_isascii (arg0
);
9988 case BUILT_IN_TOASCII
:
9989 return fold_builtin_toascii (arg0
);
9991 case BUILT_IN_ISDIGIT
:
9992 return fold_builtin_isdigit (arg0
);
9994 CASE_FLT_FN (BUILT_IN_FINITE
):
9995 case BUILT_IN_FINITED32
:
9996 case BUILT_IN_FINITED64
:
9997 case BUILT_IN_FINITED128
:
9998 case BUILT_IN_ISFINITE
:
9999 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISFINITE
);
10001 CASE_FLT_FN (BUILT_IN_ISINF
):
10002 case BUILT_IN_ISINFD32
:
10003 case BUILT_IN_ISINFD64
:
10004 case BUILT_IN_ISINFD128
:
10005 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISINF
);
10007 CASE_FLT_FN (BUILT_IN_ISNAN
):
10008 case BUILT_IN_ISNAND32
:
10009 case BUILT_IN_ISNAND64
:
10010 case BUILT_IN_ISNAND128
:
10011 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISNAN
);
10013 case BUILT_IN_PRINTF
:
10014 case BUILT_IN_PRINTF_UNLOCKED
:
10015 case BUILT_IN_VPRINTF
:
10016 return fold_builtin_printf (fndecl
, arg0
, NULL_TREE
, ignore
, fcode
);
10026 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10027 IGNORE is true if the result of the function call is ignored. This
10028 function returns NULL_TREE if no simplification was possible. */
10031 fold_builtin_2 (tree fndecl
, tree arg0
, tree arg1
, bool ignore
)
10033 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10034 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10038 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10039 CASE_FLT_FN (BUILT_IN_JN
):
10040 if (validate_arg (arg0
, INTEGER_TYPE
)
10041 && validate_arg (arg1
, REAL_TYPE
))
10042 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10045 CASE_FLT_FN (BUILT_IN_YN
):
10046 if (validate_arg (arg0
, INTEGER_TYPE
)
10047 && validate_arg (arg1
, REAL_TYPE
))
10048 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10052 CASE_FLT_FN (BUILT_IN_DREM
):
10053 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10054 if (validate_arg (arg0
, REAL_TYPE
)
10055 && validate_arg(arg1
, REAL_TYPE
))
10056 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10059 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10060 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10061 if (validate_arg (arg0
, REAL_TYPE
)
10062 && validate_arg(arg1
, POINTER_TYPE
))
10063 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10067 CASE_FLT_FN (BUILT_IN_ATAN2
):
10068 if (validate_arg (arg0
, REAL_TYPE
)
10069 && validate_arg(arg1
, REAL_TYPE
))
10070 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10073 CASE_FLT_FN (BUILT_IN_FDIM
):
10074 if (validate_arg (arg0
, REAL_TYPE
)
10075 && validate_arg(arg1
, REAL_TYPE
))
10076 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10079 CASE_FLT_FN (BUILT_IN_HYPOT
):
10080 return fold_builtin_hypot (fndecl
, arg0
, arg1
, type
);
10082 CASE_FLT_FN (BUILT_IN_LDEXP
):
10083 return fold_builtin_load_exponent (arg0
, arg1
, type
, /*ldexp=*/true);
10084 CASE_FLT_FN (BUILT_IN_SCALBN
):
10085 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10086 return fold_builtin_load_exponent (arg0
, arg1
, type
, /*ldexp=*/false);
10088 CASE_FLT_FN (BUILT_IN_FREXP
):
10089 return fold_builtin_frexp (arg0
, arg1
, type
);
10091 CASE_FLT_FN (BUILT_IN_MODF
):
10092 return fold_builtin_modf (arg0
, arg1
, type
);
10094 case BUILT_IN_BZERO
:
10095 return fold_builtin_bzero (arg0
, arg1
, ignore
);
10097 case BUILT_IN_FPUTS
:
10098 return fold_builtin_fputs (arg0
, arg1
, ignore
, false, NULL_TREE
);
10100 case BUILT_IN_FPUTS_UNLOCKED
:
10101 return fold_builtin_fputs (arg0
, arg1
, ignore
, true, NULL_TREE
);
10103 case BUILT_IN_STRSTR
:
10104 return fold_builtin_strstr (arg0
, arg1
, type
);
10106 case BUILT_IN_STRCAT
:
10107 return fold_builtin_strcat (arg0
, arg1
);
10109 case BUILT_IN_STRSPN
:
10110 return fold_builtin_strspn (arg0
, arg1
);
10112 case BUILT_IN_STRCSPN
:
10113 return fold_builtin_strcspn (arg0
, arg1
);
10115 case BUILT_IN_STRCHR
:
10116 case BUILT_IN_INDEX
:
10117 return fold_builtin_strchr (arg0
, arg1
, type
);
10119 case BUILT_IN_STRRCHR
:
10120 case BUILT_IN_RINDEX
:
10121 return fold_builtin_strrchr (arg0
, arg1
, type
);
10123 case BUILT_IN_STRCPY
:
10124 return fold_builtin_strcpy (fndecl
, arg0
, arg1
, NULL_TREE
);
10126 case BUILT_IN_STRCMP
:
10127 return fold_builtin_strcmp (arg0
, arg1
);
10129 case BUILT_IN_STRPBRK
:
10130 return fold_builtin_strpbrk (arg0
, arg1
, type
);
10132 case BUILT_IN_EXPECT
:
10133 return fold_builtin_expect (arg0
);
10135 CASE_FLT_FN (BUILT_IN_POW
):
10136 return fold_builtin_pow (fndecl
, arg0
, arg1
, type
);
10138 CASE_FLT_FN (BUILT_IN_POWI
):
10139 return fold_builtin_powi (fndecl
, arg0
, arg1
, type
);
10141 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10142 return fold_builtin_copysign (fndecl
, arg0
, arg1
, type
);
10144 CASE_FLT_FN (BUILT_IN_FMIN
):
10145 return fold_builtin_fmin_fmax (arg0
, arg1
, type
, /*max=*/false);
10147 CASE_FLT_FN (BUILT_IN_FMAX
):
10148 return fold_builtin_fmin_fmax (arg0
, arg1
, type
, /*max=*/true);
10150 case BUILT_IN_ISGREATER
:
10151 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10152 case BUILT_IN_ISGREATEREQUAL
:
10153 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10154 case BUILT_IN_ISLESS
:
10155 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10156 case BUILT_IN_ISLESSEQUAL
:
10157 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10158 case BUILT_IN_ISLESSGREATER
:
10159 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10160 case BUILT_IN_ISUNORDERED
:
10161 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNORDERED_EXPR
,
10164 /* We do the folding for va_start in the expander. */
10165 case BUILT_IN_VA_START
:
10168 case BUILT_IN_SPRINTF
:
10169 return fold_builtin_sprintf (arg0
, arg1
, NULL_TREE
, ignore
);
10171 case BUILT_IN_OBJECT_SIZE
:
10172 return fold_builtin_object_size (arg0
, arg1
);
10174 case BUILT_IN_PRINTF
:
10175 case BUILT_IN_PRINTF_UNLOCKED
:
10176 case BUILT_IN_VPRINTF
:
10177 return fold_builtin_printf (fndecl
, arg0
, arg1
, ignore
, fcode
);
10179 case BUILT_IN_PRINTF_CHK
:
10180 case BUILT_IN_VPRINTF_CHK
:
10181 if (!validate_arg (arg0
, INTEGER_TYPE
)
10182 || TREE_SIDE_EFFECTS (arg0
))
10185 return fold_builtin_printf (fndecl
, arg1
, NULL_TREE
, ignore
, fcode
);
10188 case BUILT_IN_FPRINTF
:
10189 case BUILT_IN_FPRINTF_UNLOCKED
:
10190 case BUILT_IN_VFPRINTF
:
10191 return fold_builtin_fprintf (fndecl
, arg0
, arg1
, NULL_TREE
,
10200 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10201 and ARG2. IGNORE is true if the result of the function call is ignored.
10202 This function returns NULL_TREE if no simplification was possible. */
10205 fold_builtin_3 (tree fndecl
, tree arg0
, tree arg1
, tree arg2
, bool ignore
)
10207 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10208 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10212 CASE_FLT_FN (BUILT_IN_SINCOS
):
10213 return fold_builtin_sincos (arg0
, arg1
, arg2
);
10215 CASE_FLT_FN (BUILT_IN_FMA
):
10216 if (validate_arg (arg0
, REAL_TYPE
)
10217 && validate_arg(arg1
, REAL_TYPE
)
10218 && validate_arg(arg2
, REAL_TYPE
))
10219 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
10222 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10223 CASE_FLT_FN (BUILT_IN_REMQUO
):
10224 if (validate_arg (arg0
, REAL_TYPE
)
10225 && validate_arg(arg1
, REAL_TYPE
)
10226 && validate_arg(arg2
, POINTER_TYPE
))
10227 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10231 case BUILT_IN_MEMSET
:
10232 return fold_builtin_memset (arg0
, arg1
, arg2
, type
, ignore
);
10234 case BUILT_IN_BCOPY
:
10235 return fold_builtin_memory_op (arg1
, arg0
, arg2
, void_type_node
, true, /*endp=*/3);
10237 case BUILT_IN_MEMCPY
:
10238 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/0);
10240 case BUILT_IN_MEMPCPY
:
10241 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/1);
10243 case BUILT_IN_MEMMOVE
:
10244 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/3);
10246 case BUILT_IN_STRNCAT
:
10247 return fold_builtin_strncat (arg0
, arg1
, arg2
);
10249 case BUILT_IN_STRNCPY
:
10250 return fold_builtin_strncpy (fndecl
, arg0
, arg1
, arg2
, NULL_TREE
);
10252 case BUILT_IN_STRNCMP
:
10253 return fold_builtin_strncmp (arg0
, arg1
, arg2
);
10255 case BUILT_IN_MEMCHR
:
10256 return fold_builtin_memchr (arg0
, arg1
, arg2
, type
);
10258 case BUILT_IN_BCMP
:
10259 case BUILT_IN_MEMCMP
:
10260 return fold_builtin_memcmp (arg0
, arg1
, arg2
);;
10262 case BUILT_IN_SPRINTF
:
10263 return fold_builtin_sprintf (arg0
, arg1
, arg2
, ignore
);
10265 case BUILT_IN_STRCPY_CHK
:
10266 case BUILT_IN_STPCPY_CHK
:
10267 return fold_builtin_stxcpy_chk (fndecl
, arg0
, arg1
, arg2
, NULL_TREE
,
10270 case BUILT_IN_STRCAT_CHK
:
10271 return fold_builtin_strcat_chk (fndecl
, arg0
, arg1
, arg2
);
10273 case BUILT_IN_PRINTF_CHK
:
10274 case BUILT_IN_VPRINTF_CHK
:
10275 if (!validate_arg (arg0
, INTEGER_TYPE
)
10276 || TREE_SIDE_EFFECTS (arg0
))
10279 return fold_builtin_printf (fndecl
, arg1
, arg2
, ignore
, fcode
);
10282 case BUILT_IN_FPRINTF
:
10283 case BUILT_IN_FPRINTF_UNLOCKED
:
10284 case BUILT_IN_VFPRINTF
:
10285 return fold_builtin_fprintf (fndecl
, arg0
, arg1
, arg2
, ignore
, fcode
);
10287 case BUILT_IN_FPRINTF_CHK
:
10288 case BUILT_IN_VFPRINTF_CHK
:
10289 if (!validate_arg (arg1
, INTEGER_TYPE
)
10290 || TREE_SIDE_EFFECTS (arg1
))
10293 return fold_builtin_fprintf (fndecl
, arg0
, arg2
, NULL_TREE
,
10302 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10303 ARG2, and ARG3. IGNORE is true if the result of the function call is
10304 ignored. This function returns NULL_TREE if no simplification was
10308 fold_builtin_4 (tree fndecl
, tree arg0
, tree arg1
, tree arg2
, tree arg3
,
10311 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10315 case BUILT_IN_MEMCPY_CHK
:
10316 case BUILT_IN_MEMPCPY_CHK
:
10317 case BUILT_IN_MEMMOVE_CHK
:
10318 case BUILT_IN_MEMSET_CHK
:
10319 return fold_builtin_memory_chk (fndecl
, arg0
, arg1
, arg2
, arg3
,
10321 DECL_FUNCTION_CODE (fndecl
));
10323 case BUILT_IN_STRNCPY_CHK
:
10324 return fold_builtin_strncpy_chk (arg0
, arg1
, arg2
, arg3
, NULL_TREE
);
10326 case BUILT_IN_STRNCAT_CHK
:
10327 return fold_builtin_strncat_chk (fndecl
, arg0
, arg1
, arg2
, arg3
);
10329 case BUILT_IN_FPRINTF_CHK
:
10330 case BUILT_IN_VFPRINTF_CHK
:
10331 if (!validate_arg (arg1
, INTEGER_TYPE
)
10332 || TREE_SIDE_EFFECTS (arg1
))
10335 return fold_builtin_fprintf (fndecl
, arg0
, arg2
, arg3
,
10345 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10346 arguments, where NARGS <= 4. IGNORE is true if the result of the
10347 function call is ignored. This function returns NULL_TREE if no
10348 simplification was possible. Note that this only folds builtins with
10349 fixed argument patterns. Foldings that do varargs-to-varargs
10350 transformations, or that match calls with more than 4 arguments,
10351 need to be handled with fold_builtin_varargs instead. */
10353 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10356 fold_builtin_n (tree fndecl
, tree
*args
, int nargs
, bool ignore
)
10358 tree ret
= NULL_TREE
;
10362 ret
= fold_builtin_0 (fndecl
, ignore
);
10365 ret
= fold_builtin_1 (fndecl
, args
[0], ignore
);
10368 ret
= fold_builtin_2 (fndecl
, args
[0], args
[1], ignore
);
10371 ret
= fold_builtin_3 (fndecl
, args
[0], args
[1], args
[2], ignore
);
10374 ret
= fold_builtin_4 (fndecl
, args
[0], args
[1], args
[2], args
[3],
10382 ret
= build1 (NOP_EXPR
, GENERIC_TREE_TYPE (ret
), ret
);
10383 TREE_NO_WARNING (ret
) = 1;
10389 /* Builtins with folding operations that operate on "..." arguments
10390 need special handling; we need to store the arguments in a convenient
10391 data structure before attempting any folding. Fortunately there are
10392 only a few builtins that fall into this category. FNDECL is the
10393 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10394 result of the function call is ignored. */
10397 fold_builtin_varargs (tree fndecl
, tree exp
, bool ignore ATTRIBUTE_UNUSED
)
10399 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10400 tree ret
= NULL_TREE
;
10404 case BUILT_IN_SPRINTF_CHK
:
10405 case BUILT_IN_VSPRINTF_CHK
:
10406 ret
= fold_builtin_sprintf_chk (exp
, fcode
);
10409 case BUILT_IN_SNPRINTF_CHK
:
10410 case BUILT_IN_VSNPRINTF_CHK
:
10411 ret
= fold_builtin_snprintf_chk (exp
, NULL_TREE
, fcode
);
10418 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10419 TREE_NO_WARNING (ret
) = 1;
10425 /* A wrapper function for builtin folding that prevents warnings for
10426 "statement without effect" and the like, caused by removing the
10427 call node earlier than the warning is generated. */
10430 fold_call_expr (tree exp
, bool ignore
)
10432 tree ret
= NULL_TREE
;
10433 tree fndecl
= get_callee_fndecl (exp
);
10435 && TREE_CODE (fndecl
) == FUNCTION_DECL
10436 && DECL_BUILT_IN (fndecl
))
10438 /* FIXME: Don't use a list in this interface. */
10439 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10440 return targetm
.fold_builtin (fndecl
, CALL_EXPR_ARGS (exp
), ignore
);
10443 int nargs
= call_expr_nargs (exp
);
10444 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10446 tree
*args
= CALL_EXPR_ARGP (exp
);
10447 ret
= fold_builtin_n (fndecl
, args
, nargs
, ignore
);
10450 ret
= fold_builtin_varargs (fndecl
, exp
, ignore
);
10453 /* Propagate location information from original call to
10454 expansion of builtin. Otherwise things like
10455 maybe_emit_chk_warning, that operate on the expansion
10456 of a builtin, will use the wrong location information. */
10457 if (CAN_HAVE_LOCATION_P (exp
) && EXPR_HAS_LOCATION (exp
))
10459 tree realret
= ret
;
10460 if (TREE_CODE (ret
) == NOP_EXPR
)
10461 realret
= TREE_OPERAND (ret
, 0);
10462 if (CAN_HAVE_LOCATION_P (realret
)
10463 && !EXPR_HAS_LOCATION (realret
))
10464 SET_EXPR_LOCATION (realret
, EXPR_LOCATION (exp
));
10473 /* Conveniently construct a function call expression. FNDECL names the
10474 function to be called and ARGLIST is a TREE_LIST of arguments. */
10477 build_function_call_expr (tree fndecl
, tree arglist
)
10479 tree fntype
= TREE_TYPE (fndecl
);
10480 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10481 int n
= list_length (arglist
);
10482 tree
*argarray
= (tree
*) alloca (n
* sizeof (tree
));
10485 for (i
= 0; i
< n
; i
++, arglist
= TREE_CHAIN (arglist
))
10486 argarray
[i
] = TREE_VALUE (arglist
);
10487 return fold_builtin_call_array (TREE_TYPE (fntype
), fn
, n
, argarray
);
10490 /* Conveniently construct a function call expression. FNDECL names the
10491 function to be called, N is the number of arguments, and the "..."
10492 parameters are the argument expressions. */
10495 build_call_expr (tree fndecl
, int n
, ...)
10498 tree fntype
= TREE_TYPE (fndecl
);
10499 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10500 tree
*argarray
= (tree
*) alloca (n
* sizeof (tree
));
10504 for (i
= 0; i
< n
; i
++)
10505 argarray
[i
] = va_arg (ap
, tree
);
10507 return fold_builtin_call_array (TREE_TYPE (fntype
), fn
, n
, argarray
);
10510 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10511 N arguments are passed in the array ARGARRAY. */
10514 fold_builtin_call_array (tree type
,
10519 tree ret
= NULL_TREE
;
10523 if (TREE_CODE (fn
) == ADDR_EXPR
)
10525 tree fndecl
= TREE_OPERAND (fn
, 0);
10526 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10527 && DECL_BUILT_IN (fndecl
))
10529 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10531 tree arglist
= NULL_TREE
;
10532 for (i
= n
- 1; i
>= 0; i
--)
10533 arglist
= tree_cons (NULL_TREE
, argarray
[i
], arglist
);
10534 ret
= targetm
.fold_builtin (fndecl
, arglist
, false);
10538 else if (n
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10540 /* First try the transformations that don't require consing up
10542 ret
= fold_builtin_n (fndecl
, argarray
, n
, false);
10547 /* If we got this far, we need to build an exp. */
10548 exp
= build_call_array (type
, fn
, n
, argarray
);
10549 ret
= fold_builtin_varargs (fndecl
, exp
, false);
10550 return ret
? ret
: exp
;
10554 return build_call_array (type
, fn
, n
, argarray
);
10557 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10558 along with N new arguments specified as the "..." parameters. SKIP
10559 is the number of arguments in EXP to be omitted. This function is used
10560 to do varargs-to-varargs transformations. */
10563 rewrite_call_expr (tree exp
, int skip
, tree fndecl
, int n
, ...)
10565 int oldnargs
= call_expr_nargs (exp
);
10566 int nargs
= oldnargs
- skip
+ n
;
10567 tree fntype
= TREE_TYPE (fndecl
);
10568 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10576 buffer
= alloca (nargs
* sizeof (tree
));
10578 for (i
= 0; i
< n
; i
++)
10579 buffer
[i
] = va_arg (ap
, tree
);
10581 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10582 buffer
[i
] = CALL_EXPR_ARG (exp
, j
);
10585 buffer
= CALL_EXPR_ARGP (exp
) + skip
;
10587 return fold (build_call_array (TREE_TYPE (exp
), fn
, nargs
, buffer
));
10590 /* Validate a single argument ARG against a tree code CODE representing
10594 validate_arg (tree arg
, enum tree_code code
)
10598 else if (code
== POINTER_TYPE
)
10599 return POINTER_TYPE_P (TREE_TYPE (arg
));
10600 return code
== TREE_CODE (TREE_TYPE (arg
));
10603 /* This function validates the types of a function call argument list
10604 against a specified list of tree_codes. If the last specifier is a 0,
10605 that represents an ellipses, otherwise the last specifier must be a
10609 validate_arglist (tree callexpr
, ...)
10611 enum tree_code code
;
10614 call_expr_arg_iterator iter
;
10617 va_start (ap
, callexpr
);
10618 init_call_expr_arg_iterator (callexpr
, &iter
);
10622 code
= va_arg (ap
, enum tree_code
);
10626 /* This signifies an ellipses, any further arguments are all ok. */
10630 /* This signifies an endlink, if no arguments remain, return
10631 true, otherwise return false. */
10632 res
= !more_call_expr_args_p (&iter
);
10635 /* If no parameters remain or the parameter's code does not
10636 match the specified code, return false. Otherwise continue
10637 checking any remaining arguments. */
10638 arg
= next_call_expr_arg (&iter
);
10639 if (!validate_arg (arg
, code
))
10646 /* We need gotos here since we can only have one VA_CLOSE in a
10654 /* Default target-specific builtin expander that does nothing. */
10657 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10658 rtx target ATTRIBUTE_UNUSED
,
10659 rtx subtarget ATTRIBUTE_UNUSED
,
10660 enum machine_mode mode ATTRIBUTE_UNUSED
,
10661 int ignore ATTRIBUTE_UNUSED
)
10666 /* Returns true is EXP represents data that would potentially reside
10667 in a readonly section. */
10670 readonly_data_expr (tree exp
)
10674 if (TREE_CODE (exp
) != ADDR_EXPR
)
10677 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10681 /* Make sure we call decl_readonly_section only for trees it
10682 can handle (since it returns true for everything it doesn't
10684 if (TREE_CODE (exp
) == STRING_CST
10685 || TREE_CODE (exp
) == CONSTRUCTOR
10686 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
10687 return decl_readonly_section (exp
, 0);
10692 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10693 to the call, and TYPE is its return type.
10695 Return NULL_TREE if no simplification was possible, otherwise return the
10696 simplified form of the call as a tree.
10698 The simplified form may be a constant or other expression which
10699 computes the same value, but in a more efficient manner (including
10700 calls to other builtin functions).
10702 The call may contain arguments which need to be evaluated, but
10703 which are not useful to determine the result of the call. In
10704 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10705 COMPOUND_EXPR will be an argument which must be evaluated.
10706 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10707 COMPOUND_EXPR in the chain will contain the tree for the simplified
10708 form of the builtin function call. */
10711 fold_builtin_strstr (tree s1
, tree s2
, tree type
)
10713 if (!validate_arg (s1
, POINTER_TYPE
)
10714 || !validate_arg (s2
, POINTER_TYPE
))
10719 const char *p1
, *p2
;
10721 p2
= c_getstr (s2
);
10725 p1
= c_getstr (s1
);
10728 const char *r
= strstr (p1
, p2
);
10732 return build_int_cst (TREE_TYPE (s1
), 0);
10734 /* Return an offset into the constant string argument. */
10735 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
10736 s1
, size_int (r
- p1
));
10737 return fold_convert (type
, tem
);
10740 /* The argument is const char *, and the result is char *, so we need
10741 a type conversion here to avoid a warning. */
10743 return fold_convert (type
, s1
);
10748 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
10752 /* New argument list transforming strstr(s1, s2) to
10753 strchr(s1, s2[0]). */
10754 return build_call_expr (fn
, 2, s1
, build_int_cst (NULL_TREE
, p2
[0]));
10758 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10759 the call, and TYPE is its return type.
10761 Return NULL_TREE if no simplification was possible, otherwise return the
10762 simplified form of the call as a tree.
10764 The simplified form may be a constant or other expression which
10765 computes the same value, but in a more efficient manner (including
10766 calls to other builtin functions).
10768 The call may contain arguments which need to be evaluated, but
10769 which are not useful to determine the result of the call. In
10770 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10771 COMPOUND_EXPR will be an argument which must be evaluated.
10772 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10773 COMPOUND_EXPR in the chain will contain the tree for the simplified
10774 form of the builtin function call. */
10777 fold_builtin_strchr (tree s1
, tree s2
, tree type
)
10779 if (!validate_arg (s1
, POINTER_TYPE
)
10780 || !validate_arg (s2
, INTEGER_TYPE
))
10786 if (TREE_CODE (s2
) != INTEGER_CST
)
10789 p1
= c_getstr (s1
);
10796 if (target_char_cast (s2
, &c
))
10799 r
= strchr (p1
, c
);
10802 return build_int_cst (TREE_TYPE (s1
), 0);
10804 /* Return an offset into the constant string argument. */
10805 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
10806 s1
, size_int (r
- p1
));
10807 return fold_convert (type
, tem
);
10813 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10814 the call, and TYPE is its return type.
10816 Return NULL_TREE if no simplification was possible, otherwise return the
10817 simplified form of the call as a tree.
10819 The simplified form may be a constant or other expression which
10820 computes the same value, but in a more efficient manner (including
10821 calls to other builtin functions).
10823 The call may contain arguments which need to be evaluated, but
10824 which are not useful to determine the result of the call. In
10825 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10826 COMPOUND_EXPR will be an argument which must be evaluated.
10827 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10828 COMPOUND_EXPR in the chain will contain the tree for the simplified
10829 form of the builtin function call. */
10832 fold_builtin_strrchr (tree s1
, tree s2
, tree type
)
10834 if (!validate_arg (s1
, POINTER_TYPE
)
10835 || !validate_arg (s2
, INTEGER_TYPE
))
10842 if (TREE_CODE (s2
) != INTEGER_CST
)
10845 p1
= c_getstr (s1
);
10852 if (target_char_cast (s2
, &c
))
10855 r
= strrchr (p1
, c
);
10858 return build_int_cst (TREE_TYPE (s1
), 0);
10860 /* Return an offset into the constant string argument. */
10861 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
10862 s1
, size_int (r
- p1
));
10863 return fold_convert (type
, tem
);
10866 if (! integer_zerop (s2
))
10869 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
10873 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10874 return build_call_expr (fn
, 2, s1
, s2
);
10878 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10879 to the call, and TYPE is its return type.
10881 Return NULL_TREE if no simplification was possible, otherwise return the
10882 simplified form of the call as a tree.
10884 The simplified form may be a constant or other expression which
10885 computes the same value, but in a more efficient manner (including
10886 calls to other builtin functions).
10888 The call may contain arguments which need to be evaluated, but
10889 which are not useful to determine the result of the call. In
10890 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10891 COMPOUND_EXPR will be an argument which must be evaluated.
10892 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10893 COMPOUND_EXPR in the chain will contain the tree for the simplified
10894 form of the builtin function call. */
10897 fold_builtin_strpbrk (tree s1
, tree s2
, tree type
)
10899 if (!validate_arg (s1
, POINTER_TYPE
)
10900 || !validate_arg (s2
, POINTER_TYPE
))
10905 const char *p1
, *p2
;
10907 p2
= c_getstr (s2
);
10911 p1
= c_getstr (s1
);
10914 const char *r
= strpbrk (p1
, p2
);
10918 return build_int_cst (TREE_TYPE (s1
), 0);
10920 /* Return an offset into the constant string argument. */
10921 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
10922 s1
, size_int (r
- p1
));
10923 return fold_convert (type
, tem
);
10927 /* strpbrk(x, "") == NULL.
10928 Evaluate and ignore s1 in case it had side-effects. */
10929 return omit_one_operand (TREE_TYPE (s1
), integer_zero_node
, s1
);
10932 return NULL_TREE
; /* Really call strpbrk. */
10934 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
10938 /* New argument list transforming strpbrk(s1, s2) to
10939 strchr(s1, s2[0]). */
10940 return build_call_expr (fn
, 2, s1
, build_int_cst (NULL_TREE
, p2
[0]));
10944 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10947 Return NULL_TREE if no simplification was possible, otherwise return the
10948 simplified form of the call as a tree.
10950 The simplified form may be a constant or other expression which
10951 computes the same value, but in a more efficient manner (including
10952 calls to other builtin functions).
10954 The call may contain arguments which need to be evaluated, but
10955 which are not useful to determine the result of the call. In
10956 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10957 COMPOUND_EXPR will be an argument which must be evaluated.
10958 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10959 COMPOUND_EXPR in the chain will contain the tree for the simplified
10960 form of the builtin function call. */
10963 fold_builtin_strcat (tree dst
, tree src
)
10965 if (!validate_arg (dst
, POINTER_TYPE
)
10966 || !validate_arg (src
, POINTER_TYPE
))
10970 const char *p
= c_getstr (src
);
10972 /* If the string length is zero, return the dst parameter. */
10973 if (p
&& *p
== '\0')
10980 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10981 arguments to the call.
10983 Return NULL_TREE if no simplification was possible, otherwise return the
10984 simplified form of the call as a tree.
10986 The simplified form may be a constant or other expression which
10987 computes the same value, but in a more efficient manner (including
10988 calls to other builtin functions).
10990 The call may contain arguments which need to be evaluated, but
10991 which are not useful to determine the result of the call. In
10992 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10993 COMPOUND_EXPR will be an argument which must be evaluated.
10994 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10995 COMPOUND_EXPR in the chain will contain the tree for the simplified
10996 form of the builtin function call. */
10999 fold_builtin_strncat (tree dst
, tree src
, tree len
)
11001 if (!validate_arg (dst
, POINTER_TYPE
)
11002 || !validate_arg (src
, POINTER_TYPE
)
11003 || !validate_arg (len
, INTEGER_TYPE
))
11007 const char *p
= c_getstr (src
);
11009 /* If the requested length is zero, or the src parameter string
11010 length is zero, return the dst parameter. */
11011 if (integer_zerop (len
) || (p
&& *p
== '\0'))
11012 return omit_two_operands (TREE_TYPE (dst
), dst
, src
, len
);
11014 /* If the requested len is greater than or equal to the string
11015 length, call strcat. */
11016 if (TREE_CODE (len
) == INTEGER_CST
&& p
11017 && compare_tree_int (len
, strlen (p
)) >= 0)
11019 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCAT
];
11021 /* If the replacement _DECL isn't initialized, don't do the
11026 return build_call_expr (fn
, 2, dst
, src
);
11032 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11035 Return NULL_TREE if no simplification was possible, otherwise return the
11036 simplified form of the call as a tree.
11038 The simplified form may be a constant or other expression which
11039 computes the same value, but in a more efficient manner (including
11040 calls to other builtin functions).
11042 The call may contain arguments which need to be evaluated, but
11043 which are not useful to determine the result of the call. In
11044 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11045 COMPOUND_EXPR will be an argument which must be evaluated.
11046 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11047 COMPOUND_EXPR in the chain will contain the tree for the simplified
11048 form of the builtin function call. */
11051 fold_builtin_strspn (tree s1
, tree s2
)
11053 if (!validate_arg (s1
, POINTER_TYPE
)
11054 || !validate_arg (s2
, POINTER_TYPE
))
11058 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11060 /* If both arguments are constants, evaluate at compile-time. */
11063 const size_t r
= strspn (p1
, p2
);
11064 return size_int (r
);
11067 /* If either argument is "", return NULL_TREE. */
11068 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11069 /* Evaluate and ignore both arguments in case either one has
11071 return omit_two_operands (integer_type_node
, integer_zero_node
,
11077 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11080 Return NULL_TREE if no simplification was possible, otherwise return the
11081 simplified form of the call as a tree.
11083 The simplified form may be a constant or other expression which
11084 computes the same value, but in a more efficient manner (including
11085 calls to other builtin functions).
11087 The call may contain arguments which need to be evaluated, but
11088 which are not useful to determine the result of the call. In
11089 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11090 COMPOUND_EXPR will be an argument which must be evaluated.
11091 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11092 COMPOUND_EXPR in the chain will contain the tree for the simplified
11093 form of the builtin function call. */
11096 fold_builtin_strcspn (tree s1
, tree s2
)
11098 if (!validate_arg (s1
, POINTER_TYPE
)
11099 || !validate_arg (s2
, POINTER_TYPE
))
11103 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11105 /* If both arguments are constants, evaluate at compile-time. */
11108 const size_t r
= strcspn (p1
, p2
);
11109 return size_int (r
);
11112 /* If the first argument is "", return NULL_TREE. */
11113 if (p1
&& *p1
== '\0')
11115 /* Evaluate and ignore argument s2 in case it has
11117 return omit_one_operand (integer_type_node
,
11118 integer_zero_node
, s2
);
11121 /* If the second argument is "", return __builtin_strlen(s1). */
11122 if (p2
&& *p2
== '\0')
11124 tree fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
11126 /* If the replacement _DECL isn't initialized, don't do the
11131 return build_call_expr (fn
, 1, s1
);
11137 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11138 to the call. IGNORE is true if the value returned
11139 by the builtin will be ignored. UNLOCKED is true is true if this
11140 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11141 the known length of the string. Return NULL_TREE if no simplification
11145 fold_builtin_fputs (tree arg0
, tree arg1
, bool ignore
, bool unlocked
, tree len
)
11147 /* If we're using an unlocked function, assume the other unlocked
11148 functions exist explicitly. */
11149 tree
const fn_fputc
= unlocked
? built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
11150 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
11151 tree
const fn_fwrite
= unlocked
? built_in_decls
[BUILT_IN_FWRITE_UNLOCKED
]
11152 : implicit_built_in_decls
[BUILT_IN_FWRITE
];
11154 /* If the return value is used, don't do the transformation. */
11158 /* Verify the arguments in the original call. */
11159 if (!validate_arg (arg0
, POINTER_TYPE
)
11160 || !validate_arg (arg1
, POINTER_TYPE
))
11164 len
= c_strlen (arg0
, 0);
11166 /* Get the length of the string passed to fputs. If the length
11167 can't be determined, punt. */
11169 || TREE_CODE (len
) != INTEGER_CST
)
11172 switch (compare_tree_int (len
, 1))
11174 case -1: /* length is 0, delete the call entirely . */
11175 return omit_one_operand (integer_type_node
, integer_zero_node
, arg1
);;
11177 case 0: /* length is 1, call fputc. */
11179 const char *p
= c_getstr (arg0
);
11184 return build_call_expr (fn_fputc
, 2,
11185 build_int_cst (NULL_TREE
, p
[0]), arg1
);
11191 case 1: /* length is greater than 1, call fwrite. */
11193 /* If optimizing for size keep fputs. */
11196 /* New argument list transforming fputs(string, stream) to
11197 fwrite(string, 1, len, stream). */
11199 return build_call_expr (fn_fwrite
, 4, arg0
, size_one_node
, len
, arg1
);
11204 gcc_unreachable ();
11209 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11210 produced. False otherwise. This is done so that we don't output the error
11211 or warning twice or three times. */
11213 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11215 tree fntype
= TREE_TYPE (current_function_decl
);
11216 int nargs
= call_expr_nargs (exp
);
11219 if (TYPE_ARG_TYPES (fntype
) == 0
11220 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
11221 == void_type_node
))
11223 error ("%<va_start%> used in function with fixed args");
11229 if (va_start_p
&& (nargs
!= 2))
11231 error ("wrong number of arguments to function %<va_start%>");
11234 arg
= CALL_EXPR_ARG (exp
, 1);
11236 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11237 when we checked the arguments and if needed issued a warning. */
11242 /* Evidently an out of date version of <stdarg.h>; can't validate
11243 va_start's second argument, but can still work as intended. */
11244 warning (0, "%<__builtin_next_arg%> called without an argument");
11247 else if (nargs
> 1)
11249 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11252 arg
= CALL_EXPR_ARG (exp
, 0);
11255 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11256 or __builtin_next_arg (0) the first time we see it, after checking
11257 the arguments and if needed issuing a warning. */
11258 if (!integer_zerop (arg
))
11260 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11262 /* Strip off all nops for the sake of the comparison. This
11263 is not quite the same as STRIP_NOPS. It does more.
11264 We must also strip off INDIRECT_EXPR for C++ reference
11266 while (TREE_CODE (arg
) == NOP_EXPR
11267 || TREE_CODE (arg
) == CONVERT_EXPR
11268 || TREE_CODE (arg
) == NON_LVALUE_EXPR
11269 || TREE_CODE (arg
) == INDIRECT_REF
)
11270 arg
= TREE_OPERAND (arg
, 0);
11271 if (arg
!= last_parm
)
11273 /* FIXME: Sometimes with the tree optimizers we can get the
11274 not the last argument even though the user used the last
11275 argument. We just warn and set the arg to be the last
11276 argument so that we will get wrong-code because of
11278 warning (0, "second parameter of %<va_start%> not last named argument");
11280 /* We want to verify the second parameter just once before the tree
11281 optimizers are run and then avoid keeping it in the tree,
11282 as otherwise we could warn even for correct code like:
11283 void foo (int i, ...)
11284 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11286 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11288 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11294 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11295 ORIG may be null if this is a 2-argument call. We don't attempt to
11296 simplify calls with more than 3 arguments.
11298 Return NULL_TREE if no simplification was possible, otherwise return the
11299 simplified form of the call as a tree. If IGNORED is true, it means that
11300 the caller does not use the returned value of the function. */
11303 fold_builtin_sprintf (tree dest
, tree fmt
, tree orig
, int ignored
)
11306 const char *fmt_str
= NULL
;
11308 /* Verify the required arguments in the original call. We deal with two
11309 types of sprintf() calls: 'sprintf (str, fmt)' and
11310 'sprintf (dest, "%s", orig)'. */
11311 if (!validate_arg (dest
, POINTER_TYPE
)
11312 || !validate_arg (fmt
, POINTER_TYPE
))
11314 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
11317 /* Check whether the format is a literal string constant. */
11318 fmt_str
= c_getstr (fmt
);
11319 if (fmt_str
== NULL
)
11323 retval
= NULL_TREE
;
11325 if (!init_target_chars ())
11328 /* If the format doesn't contain % args or %%, use strcpy. */
11329 if (strchr (fmt_str
, target_percent
) == NULL
)
11331 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11336 /* Don't optimize sprintf (buf, "abc", ptr++). */
11340 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11341 'format' is known to contain no % formats. */
11342 call
= build_call_expr (fn
, 2, dest
, fmt
);
11344 retval
= build_int_cst (NULL_TREE
, strlen (fmt_str
));
11347 /* If the format is "%s", use strcpy if the result isn't used. */
11348 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
11351 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11356 /* Don't crash on sprintf (str1, "%s"). */
11360 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11363 retval
= c_strlen (orig
, 1);
11364 if (!retval
|| TREE_CODE (retval
) != INTEGER_CST
)
11367 call
= build_call_expr (fn
, 2, dest
, orig
);
11370 if (call
&& retval
)
11372 retval
= fold_convert
11373 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls
[BUILT_IN_SPRINTF
])),
11375 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
11381 /* Expand a call EXP to __builtin_object_size. */
11384 expand_builtin_object_size (tree exp
)
11387 int object_size_type
;
11388 tree fndecl
= get_callee_fndecl (exp
);
11389 location_t locus
= EXPR_LOCATION (exp
);
11391 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11393 error ("%Hfirst argument of %D must be a pointer, second integer constant",
11395 expand_builtin_trap ();
11399 ost
= CALL_EXPR_ARG (exp
, 1);
11402 if (TREE_CODE (ost
) != INTEGER_CST
11403 || tree_int_cst_sgn (ost
) < 0
11404 || compare_tree_int (ost
, 3) > 0)
11406 error ("%Hlast argument of %D is not integer constant between 0 and 3",
11408 expand_builtin_trap ();
11412 object_size_type
= tree_low_cst (ost
, 0);
11414 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11417 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11418 FCODE is the BUILT_IN_* to use.
11419 Return NULL_RTX if we failed; the caller should emit a normal call,
11420 otherwise try to get the result in TARGET, if convenient (and in
11421 mode MODE if that's convenient). */
11424 expand_builtin_memory_chk (tree exp
, rtx target
, enum machine_mode mode
,
11425 enum built_in_function fcode
)
11427 tree dest
, src
, len
, size
;
11429 if (!validate_arglist (exp
,
11431 fcode
== BUILT_IN_MEMSET_CHK
11432 ? INTEGER_TYPE
: POINTER_TYPE
,
11433 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11436 dest
= CALL_EXPR_ARG (exp
, 0);
11437 src
= CALL_EXPR_ARG (exp
, 1);
11438 len
= CALL_EXPR_ARG (exp
, 2);
11439 size
= CALL_EXPR_ARG (exp
, 3);
11441 if (! host_integerp (size
, 1))
11444 if (host_integerp (len
, 1) || integer_all_onesp (size
))
11448 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11450 location_t locus
= EXPR_LOCATION (exp
);
11451 warning (0, "%Hcall to %D will always overflow destination buffer",
11452 &locus
, get_callee_fndecl (exp
));
11457 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11458 mem{cpy,pcpy,move,set} is available. */
11461 case BUILT_IN_MEMCPY_CHK
:
11462 fn
= built_in_decls
[BUILT_IN_MEMCPY
];
11464 case BUILT_IN_MEMPCPY_CHK
:
11465 fn
= built_in_decls
[BUILT_IN_MEMPCPY
];
11467 case BUILT_IN_MEMMOVE_CHK
:
11468 fn
= built_in_decls
[BUILT_IN_MEMMOVE
];
11470 case BUILT_IN_MEMSET_CHK
:
11471 fn
= built_in_decls
[BUILT_IN_MEMSET
];
11480 fn
= build_call_expr (fn
, 3, dest
, src
, len
);
11481 if (TREE_CODE (fn
) == CALL_EXPR
)
11482 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11483 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11485 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11489 unsigned int dest_align
11490 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
11492 /* If DEST is not a pointer type, call the normal function. */
11493 if (dest_align
== 0)
11496 /* If SRC and DEST are the same (and not volatile), do nothing. */
11497 if (operand_equal_p (src
, dest
, 0))
11501 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11503 /* Evaluate and ignore LEN in case it has side-effects. */
11504 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11505 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11508 expr
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
11509 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11512 /* __memmove_chk special case. */
11513 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11515 unsigned int src_align
11516 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
11518 if (src_align
== 0)
11521 /* If src is categorized for a readonly section we can use
11522 normal __memcpy_chk. */
11523 if (readonly_data_expr (src
))
11525 tree fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
11528 fn
= build_call_expr (fn
, 4, dest
, src
, len
, size
);
11529 if (TREE_CODE (fn
) == CALL_EXPR
)
11530 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11531 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11538 /* Emit warning if a buffer overflow is detected at compile time. */
11541 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11549 case BUILT_IN_STRCPY_CHK
:
11550 case BUILT_IN_STPCPY_CHK
:
11551 /* For __strcat_chk the warning will be emitted only if overflowing
11552 by at least strlen (dest) + 1 bytes. */
11553 case BUILT_IN_STRCAT_CHK
:
11554 len
= CALL_EXPR_ARG (exp
, 1);
11555 size
= CALL_EXPR_ARG (exp
, 2);
11558 case BUILT_IN_STRNCAT_CHK
:
11559 case BUILT_IN_STRNCPY_CHK
:
11560 len
= CALL_EXPR_ARG (exp
, 2);
11561 size
= CALL_EXPR_ARG (exp
, 3);
11563 case BUILT_IN_SNPRINTF_CHK
:
11564 case BUILT_IN_VSNPRINTF_CHK
:
11565 len
= CALL_EXPR_ARG (exp
, 1);
11566 size
= CALL_EXPR_ARG (exp
, 3);
11569 gcc_unreachable ();
11575 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
11580 len
= c_strlen (len
, 1);
11581 if (! len
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
11584 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11586 tree src
= CALL_EXPR_ARG (exp
, 1);
11587 if (! src
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
11589 src
= c_strlen (src
, 1);
11590 if (! src
|| ! host_integerp (src
, 1))
11592 locus
= EXPR_LOCATION (exp
);
11593 warning (0, "%Hcall to %D might overflow destination buffer",
11594 &locus
, get_callee_fndecl (exp
));
11597 else if (tree_int_cst_lt (src
, size
))
11600 else if (! host_integerp (len
, 1) || ! tree_int_cst_lt (size
, len
))
11603 locus
= EXPR_LOCATION (exp
);
11604 warning (0, "%Hcall to %D will always overflow destination buffer",
11605 &locus
, get_callee_fndecl (exp
));
11608 /* Emit warning if a buffer overflow is detected at compile time
11609 in __sprintf_chk/__vsprintf_chk calls. */
11612 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11614 tree dest
, size
, len
, fmt
, flag
;
11615 const char *fmt_str
;
11616 int nargs
= call_expr_nargs (exp
);
11618 /* Verify the required arguments in the original call. */
11622 dest
= CALL_EXPR_ARG (exp
, 0);
11623 flag
= CALL_EXPR_ARG (exp
, 1);
11624 size
= CALL_EXPR_ARG (exp
, 2);
11625 fmt
= CALL_EXPR_ARG (exp
, 3);
11627 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
11630 /* Check whether the format is a literal string constant. */
11631 fmt_str
= c_getstr (fmt
);
11632 if (fmt_str
== NULL
)
11635 if (!init_target_chars ())
11638 /* If the format doesn't contain % args or %%, we know its size. */
11639 if (strchr (fmt_str
, target_percent
) == 0)
11640 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11641 /* If the format is "%s" and first ... argument is a string literal,
11643 else if (fcode
== BUILT_IN_SPRINTF_CHK
11644 && strcmp (fmt_str
, target_percent_s
) == 0)
11650 arg
= CALL_EXPR_ARG (exp
, 4);
11651 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11654 len
= c_strlen (arg
, 1);
11655 if (!len
|| ! host_integerp (len
, 1))
11661 if (! tree_int_cst_lt (len
, size
))
11663 location_t locus
= EXPR_LOCATION (exp
);
11664 warning (0, "%Hcall to %D will always overflow destination buffer",
11665 &locus
, get_callee_fndecl (exp
));
11669 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11673 fold_builtin_object_size (tree ptr
, tree ost
)
11675 tree ret
= NULL_TREE
;
11676 int object_size_type
;
11678 if (!validate_arg (ptr
, POINTER_TYPE
)
11679 || !validate_arg (ost
, INTEGER_TYPE
))
11684 if (TREE_CODE (ost
) != INTEGER_CST
11685 || tree_int_cst_sgn (ost
) < 0
11686 || compare_tree_int (ost
, 3) > 0)
11689 object_size_type
= tree_low_cst (ost
, 0);
11691 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11692 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11693 and (size_t) 0 for types 2 and 3. */
11694 if (TREE_SIDE_EFFECTS (ptr
))
11695 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11697 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11698 ret
= build_int_cstu (size_type_node
,
11699 compute_builtin_object_size (ptr
, object_size_type
));
11701 else if (TREE_CODE (ptr
) == SSA_NAME
)
11703 unsigned HOST_WIDE_INT bytes
;
11705 /* If object size is not known yet, delay folding until
11706 later. Maybe subsequent passes will help determining
11708 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11709 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2
11711 ret
= build_int_cstu (size_type_node
, bytes
);
11716 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (ret
);
11717 HOST_WIDE_INT high
= TREE_INT_CST_HIGH (ret
);
11718 if (fit_double_type (low
, high
, &low
, &high
, TREE_TYPE (ret
)))
11725 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11726 DEST, SRC, LEN, and SIZE are the arguments to the call.
11727 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11728 code of the builtin. If MAXLEN is not NULL, it is maximum length
11729 passed as third argument. */
11732 fold_builtin_memory_chk (tree fndecl
,
11733 tree dest
, tree src
, tree len
, tree size
,
11734 tree maxlen
, bool ignore
,
11735 enum built_in_function fcode
)
11739 if (!validate_arg (dest
, POINTER_TYPE
)
11740 || !validate_arg (src
,
11741 (fcode
== BUILT_IN_MEMSET_CHK
11742 ? INTEGER_TYPE
: POINTER_TYPE
))
11743 || !validate_arg (len
, INTEGER_TYPE
)
11744 || !validate_arg (size
, INTEGER_TYPE
))
11747 /* If SRC and DEST are the same (and not volatile), return DEST
11748 (resp. DEST+LEN for __mempcpy_chk). */
11749 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
11751 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11752 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
11755 tree temp
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
11756 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), temp
);
11760 if (! host_integerp (size
, 1))
11763 if (! integer_all_onesp (size
))
11765 if (! host_integerp (len
, 1))
11767 /* If LEN is not constant, try MAXLEN too.
11768 For MAXLEN only allow optimizing into non-_ocs function
11769 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11770 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
11772 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
11774 /* (void) __mempcpy_chk () can be optimized into
11775 (void) __memcpy_chk (). */
11776 fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
11780 return build_call_expr (fn
, 4, dest
, src
, len
, size
);
11788 if (tree_int_cst_lt (size
, maxlen
))
11793 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11794 mem{cpy,pcpy,move,set} is available. */
11797 case BUILT_IN_MEMCPY_CHK
:
11798 fn
= built_in_decls
[BUILT_IN_MEMCPY
];
11800 case BUILT_IN_MEMPCPY_CHK
:
11801 fn
= built_in_decls
[BUILT_IN_MEMPCPY
];
11803 case BUILT_IN_MEMMOVE_CHK
:
11804 fn
= built_in_decls
[BUILT_IN_MEMMOVE
];
11806 case BUILT_IN_MEMSET_CHK
:
11807 fn
= built_in_decls
[BUILT_IN_MEMSET
];
11816 return build_call_expr (fn
, 3, dest
, src
, len
);
11819 /* Fold a call to the __st[rp]cpy_chk builtin.
11820 DEST, SRC, and SIZE are the arguments to the call.
11821 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11822 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11823 strings passed as second argument. */
11826 fold_builtin_stxcpy_chk (tree fndecl
, tree dest
, tree src
, tree size
,
11827 tree maxlen
, bool ignore
,
11828 enum built_in_function fcode
)
11832 if (!validate_arg (dest
, POINTER_TYPE
)
11833 || !validate_arg (src
, POINTER_TYPE
)
11834 || !validate_arg (size
, INTEGER_TYPE
))
11837 /* If SRC and DEST are the same (and not volatile), return DEST. */
11838 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
11839 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
11841 if (! host_integerp (size
, 1))
11844 if (! integer_all_onesp (size
))
11846 len
= c_strlen (src
, 1);
11847 if (! len
|| ! host_integerp (len
, 1))
11849 /* If LEN is not constant, try MAXLEN too.
11850 For MAXLEN only allow optimizing into non-_ocs function
11851 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11852 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
11854 if (fcode
== BUILT_IN_STPCPY_CHK
)
11859 /* If return value of __stpcpy_chk is ignored,
11860 optimize into __strcpy_chk. */
11861 fn
= built_in_decls
[BUILT_IN_STRCPY_CHK
];
11865 return build_call_expr (fn
, 3, dest
, src
, size
);
11868 if (! len
|| TREE_SIDE_EFFECTS (len
))
11871 /* If c_strlen returned something, but not a constant,
11872 transform __strcpy_chk into __memcpy_chk. */
11873 fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
11877 len
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
11878 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
11879 build_call_expr (fn
, 4,
11880 dest
, src
, len
, size
));
11886 if (! tree_int_cst_lt (maxlen
, size
))
11890 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11891 fn
= built_in_decls
[fcode
== BUILT_IN_STPCPY_CHK
11892 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
];
11896 return build_call_expr (fn
, 2, dest
, src
);
11899 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11900 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11901 length passed as third argument. */
11904 fold_builtin_strncpy_chk (tree dest
, tree src
, tree len
, tree size
,
11909 if (!validate_arg (dest
, POINTER_TYPE
)
11910 || !validate_arg (src
, POINTER_TYPE
)
11911 || !validate_arg (len
, INTEGER_TYPE
)
11912 || !validate_arg (size
, INTEGER_TYPE
))
11915 if (! host_integerp (size
, 1))
11918 if (! integer_all_onesp (size
))
11920 if (! host_integerp (len
, 1))
11922 /* If LEN is not constant, try MAXLEN too.
11923 For MAXLEN only allow optimizing into non-_ocs function
11924 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11925 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
11931 if (tree_int_cst_lt (size
, maxlen
))
11935 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11936 fn
= built_in_decls
[BUILT_IN_STRNCPY
];
11940 return build_call_expr (fn
, 3, dest
, src
, len
);
11943 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11944 are the arguments to the call. */
11947 fold_builtin_strcat_chk (tree fndecl
, tree dest
, tree src
, tree size
)
11952 if (!validate_arg (dest
, POINTER_TYPE
)
11953 || !validate_arg (src
, POINTER_TYPE
)
11954 || !validate_arg (size
, INTEGER_TYPE
))
11957 p
= c_getstr (src
);
11958 /* If the SRC parameter is "", return DEST. */
11959 if (p
&& *p
== '\0')
11960 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
11962 if (! host_integerp (size
, 1) || ! integer_all_onesp (size
))
11965 /* If __builtin_strcat_chk is used, assume strcat is available. */
11966 fn
= built_in_decls
[BUILT_IN_STRCAT
];
11970 return build_call_expr (fn
, 2, dest
, src
);
11973 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11977 fold_builtin_strncat_chk (tree fndecl
,
11978 tree dest
, tree src
, tree len
, tree size
)
11983 if (!validate_arg (dest
, POINTER_TYPE
)
11984 || !validate_arg (src
, POINTER_TYPE
)
11985 || !validate_arg (size
, INTEGER_TYPE
)
11986 || !validate_arg (size
, INTEGER_TYPE
))
11989 p
= c_getstr (src
);
11990 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11991 if (p
&& *p
== '\0')
11992 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
11993 else if (integer_zerop (len
))
11994 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
11996 if (! host_integerp (size
, 1))
11999 if (! integer_all_onesp (size
))
12001 tree src_len
= c_strlen (src
, 1);
12003 && host_integerp (src_len
, 1)
12004 && host_integerp (len
, 1)
12005 && ! tree_int_cst_lt (len
, src_len
))
12007 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12008 fn
= built_in_decls
[BUILT_IN_STRCAT_CHK
];
12012 return build_call_expr (fn
, 3, dest
, src
, size
);
12017 /* If __builtin_strncat_chk is used, assume strncat is available. */
12018 fn
= built_in_decls
[BUILT_IN_STRNCAT
];
12022 return build_call_expr (fn
, 3, dest
, src
, len
);
12025 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12026 a normal call should be emitted rather than expanding the function
12027 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12030 fold_builtin_sprintf_chk (tree exp
, enum built_in_function fcode
)
12032 tree dest
, size
, len
, fn
, fmt
, flag
;
12033 const char *fmt_str
;
12034 int nargs
= call_expr_nargs (exp
);
12036 /* Verify the required arguments in the original call. */
12039 dest
= CALL_EXPR_ARG (exp
, 0);
12040 if (!validate_arg (dest
, POINTER_TYPE
))
12042 flag
= CALL_EXPR_ARG (exp
, 1);
12043 if (!validate_arg (flag
, INTEGER_TYPE
))
12045 size
= CALL_EXPR_ARG (exp
, 2);
12046 if (!validate_arg (size
, INTEGER_TYPE
))
12048 fmt
= CALL_EXPR_ARG (exp
, 3);
12049 if (!validate_arg (fmt
, POINTER_TYPE
))
12052 if (! host_integerp (size
, 1))
12057 if (!init_target_chars ())
12060 /* Check whether the format is a literal string constant. */
12061 fmt_str
= c_getstr (fmt
);
12062 if (fmt_str
!= NULL
)
12064 /* If the format doesn't contain % args or %%, we know the size. */
12065 if (strchr (fmt_str
, target_percent
) == 0)
12067 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
12068 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
12070 /* If the format is "%s" and first ... argument is a string literal,
12071 we know the size too. */
12072 else if (fcode
== BUILT_IN_SPRINTF_CHK
12073 && strcmp (fmt_str
, target_percent_s
) == 0)
12079 arg
= CALL_EXPR_ARG (exp
, 4);
12080 if (validate_arg (arg
, POINTER_TYPE
))
12082 len
= c_strlen (arg
, 1);
12083 if (! len
|| ! host_integerp (len
, 1))
12090 if (! integer_all_onesp (size
))
12092 if (! len
|| ! tree_int_cst_lt (len
, size
))
12096 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12097 or if format doesn't contain % chars or is "%s". */
12098 if (! integer_zerop (flag
))
12100 if (fmt_str
== NULL
)
12102 if (strchr (fmt_str
, target_percent
) != NULL
12103 && strcmp (fmt_str
, target_percent_s
))
12107 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12108 fn
= built_in_decls
[fcode
== BUILT_IN_VSPRINTF_CHK
12109 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
];
12113 return rewrite_call_expr (exp
, 4, fn
, 2, dest
, fmt
);
12116 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12117 a normal call should be emitted rather than expanding the function
12118 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12119 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12120 passed as second argument. */
12123 fold_builtin_snprintf_chk (tree exp
, tree maxlen
,
12124 enum built_in_function fcode
)
12126 tree dest
, size
, len
, fn
, fmt
, flag
;
12127 const char *fmt_str
;
12129 /* Verify the required arguments in the original call. */
12130 if (call_expr_nargs (exp
) < 5)
12132 dest
= CALL_EXPR_ARG (exp
, 0);
12133 if (!validate_arg (dest
, POINTER_TYPE
))
12135 len
= CALL_EXPR_ARG (exp
, 1);
12136 if (!validate_arg (len
, INTEGER_TYPE
))
12138 flag
= CALL_EXPR_ARG (exp
, 2);
12139 if (!validate_arg (flag
, INTEGER_TYPE
))
12141 size
= CALL_EXPR_ARG (exp
, 3);
12142 if (!validate_arg (size
, INTEGER_TYPE
))
12144 fmt
= CALL_EXPR_ARG (exp
, 4);
12145 if (!validate_arg (fmt
, POINTER_TYPE
))
12148 if (! host_integerp (size
, 1))
12151 if (! integer_all_onesp (size
))
12153 if (! host_integerp (len
, 1))
12155 /* If LEN is not constant, try MAXLEN too.
12156 For MAXLEN only allow optimizing into non-_ocs function
12157 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12158 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12164 if (tree_int_cst_lt (size
, maxlen
))
12168 if (!init_target_chars ())
12171 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12172 or if format doesn't contain % chars or is "%s". */
12173 if (! integer_zerop (flag
))
12175 fmt_str
= c_getstr (fmt
);
12176 if (fmt_str
== NULL
)
12178 if (strchr (fmt_str
, target_percent
) != NULL
12179 && strcmp (fmt_str
, target_percent_s
))
12183 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12185 fn
= built_in_decls
[fcode
== BUILT_IN_VSNPRINTF_CHK
12186 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
];
12190 return rewrite_call_expr (exp
, 5, fn
, 3, dest
, len
, fmt
);
12193 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12194 FMT and ARG are the arguments to the call; we don't fold cases with
12195 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12197 Return NULL_TREE if no simplification was possible, otherwise return the
12198 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12199 code of the function to be simplified. */
12202 fold_builtin_printf (tree fndecl
, tree fmt
, tree arg
, bool ignore
,
12203 enum built_in_function fcode
)
12205 tree fn_putchar
, fn_puts
, newarg
, call
= NULL_TREE
;
12206 const char *fmt_str
= NULL
;
12208 /* If the return value is used, don't do the transformation. */
12212 /* Verify the required arguments in the original call. */
12213 if (!validate_arg (fmt
, POINTER_TYPE
))
12216 /* Check whether the format is a literal string constant. */
12217 fmt_str
= c_getstr (fmt
);
12218 if (fmt_str
== NULL
)
12221 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
12223 /* If we're using an unlocked function, assume the other
12224 unlocked functions exist explicitly. */
12225 fn_putchar
= built_in_decls
[BUILT_IN_PUTCHAR_UNLOCKED
];
12226 fn_puts
= built_in_decls
[BUILT_IN_PUTS_UNLOCKED
];
12230 fn_putchar
= implicit_built_in_decls
[BUILT_IN_PUTCHAR
];
12231 fn_puts
= implicit_built_in_decls
[BUILT_IN_PUTS
];
12234 if (!init_target_chars ())
12237 if (strcmp (fmt_str
, target_percent_s
) == 0
12238 || strchr (fmt_str
, target_percent
) == NULL
)
12242 if (strcmp (fmt_str
, target_percent_s
) == 0)
12244 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
12247 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12250 str
= c_getstr (arg
);
12256 /* The format specifier doesn't contain any '%' characters. */
12257 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
12263 /* If the string was "", printf does nothing. */
12264 if (str
[0] == '\0')
12265 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
12267 /* If the string has length of 1, call putchar. */
12268 if (str
[1] == '\0')
12270 /* Given printf("c"), (where c is any one character,)
12271 convert "c"[0] to an int and pass that to the replacement
12273 newarg
= build_int_cst (NULL_TREE
, str
[0]);
12275 call
= build_call_expr (fn_putchar
, 1, newarg
);
12279 /* If the string was "string\n", call puts("string"). */
12280 size_t len
= strlen (str
);
12281 if ((unsigned char)str
[len
- 1] == target_newline
)
12283 /* Create a NUL-terminated string that's one char shorter
12284 than the original, stripping off the trailing '\n'. */
12285 char *newstr
= alloca (len
);
12286 memcpy (newstr
, str
, len
- 1);
12287 newstr
[len
- 1] = 0;
12289 newarg
= build_string_literal (len
, newstr
);
12291 call
= build_call_expr (fn_puts
, 1, newarg
);
12294 /* We'd like to arrange to call fputs(string,stdout) here,
12295 but we need stdout and don't have a way to get it yet. */
12300 /* The other optimizations can be done only on the non-va_list variants. */
12301 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
12304 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12305 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
12307 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12310 call
= build_call_expr (fn_puts
, 1, arg
);
12313 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12314 else if (strcmp (fmt_str
, target_percent_c
) == 0)
12316 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
12319 call
= build_call_expr (fn_putchar
, 1, arg
);
12325 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), call
);
12328 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12329 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12330 more than 3 arguments, and ARG may be null in the 2-argument case.
12332 Return NULL_TREE if no simplification was possible, otherwise return the
12333 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12334 code of the function to be simplified. */
12337 fold_builtin_fprintf (tree fndecl
, tree fp
, tree fmt
, tree arg
, bool ignore
,
12338 enum built_in_function fcode
)
12340 tree fn_fputc
, fn_fputs
, call
= NULL_TREE
;
12341 const char *fmt_str
= NULL
;
12343 /* If the return value is used, don't do the transformation. */
12347 /* Verify the required arguments in the original call. */
12348 if (!validate_arg (fp
, POINTER_TYPE
))
12350 if (!validate_arg (fmt
, POINTER_TYPE
))
12353 /* Check whether the format is a literal string constant. */
12354 fmt_str
= c_getstr (fmt
);
12355 if (fmt_str
== NULL
)
12358 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
12360 /* If we're using an unlocked function, assume the other
12361 unlocked functions exist explicitly. */
12362 fn_fputc
= built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
];
12363 fn_fputs
= built_in_decls
[BUILT_IN_FPUTS_UNLOCKED
];
12367 fn_fputc
= implicit_built_in_decls
[BUILT_IN_FPUTC
];
12368 fn_fputs
= implicit_built_in_decls
[BUILT_IN_FPUTS
];
12371 if (!init_target_chars ())
12374 /* If the format doesn't contain % args or %%, use strcpy. */
12375 if (strchr (fmt_str
, target_percent
) == NULL
)
12377 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
12381 /* If the format specifier was "", fprintf does nothing. */
12382 if (fmt_str
[0] == '\0')
12384 /* If FP has side-effects, just wait until gimplification is
12386 if (TREE_SIDE_EFFECTS (fp
))
12389 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
12392 /* When "string" doesn't contain %, replace all cases of
12393 fprintf (fp, string) with fputs (string, fp). The fputs
12394 builtin will take care of special cases like length == 1. */
12396 call
= build_call_expr (fn_fputs
, 2, fmt
, fp
);
12399 /* The other optimizations can be done only on the non-va_list variants. */
12400 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
12403 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12404 else if (strcmp (fmt_str
, target_percent_s
) == 0)
12406 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12409 call
= build_call_expr (fn_fputs
, 2, arg
, fp
);
12412 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12413 else if (strcmp (fmt_str
, target_percent_c
) == 0)
12415 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
12418 call
= build_call_expr (fn_fputc
, 2, arg
, fp
);
12423 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), call
);
12426 /* Initialize format string characters in the target charset. */
12429 init_target_chars (void)
12434 target_newline
= lang_hooks
.to_target_charset ('\n');
12435 target_percent
= lang_hooks
.to_target_charset ('%');
12436 target_c
= lang_hooks
.to_target_charset ('c');
12437 target_s
= lang_hooks
.to_target_charset ('s');
12438 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
12442 target_percent_c
[0] = target_percent
;
12443 target_percent_c
[1] = target_c
;
12444 target_percent_c
[2] = '\0';
12446 target_percent_s
[0] = target_percent
;
12447 target_percent_s
[1] = target_s
;
12448 target_percent_s
[2] = '\0';
12450 target_percent_s_newline
[0] = target_percent
;
12451 target_percent_s_newline
[1] = target_s
;
12452 target_percent_s_newline
[2] = target_newline
;
12453 target_percent_s_newline
[3] = '\0';
12460 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12461 and no overflow/underflow occurred. INEXACT is true if M was not
12462 exactly calculated. TYPE is the tree type for the result. This
12463 function assumes that you cleared the MPFR flags and then
12464 calculated M to see if anything subsequently set a flag prior to
12465 entering this function. Return NULL_TREE if any checks fail. */
12468 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
12470 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12471 overflow/underflow occurred. If -frounding-math, proceed iff the
12472 result of calling FUNC was exact. */
12473 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12474 && (!flag_rounding_math
|| !inexact
))
12476 REAL_VALUE_TYPE rr
;
12478 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
12479 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12480 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12481 but the mpft_t is not, then we underflowed in the
12483 if (real_isfinite (&rr
)
12484 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
12486 REAL_VALUE_TYPE rmode
;
12488 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
12489 /* Proceed iff the specified mode can hold the value. */
12490 if (real_identical (&rmode
, &rr
))
12491 return build_real (type
, rmode
);
12497 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12498 FUNC on it and return the resulting value as a tree with type TYPE.
12499 If MIN and/or MAX are not NULL, then the supplied ARG must be
12500 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12501 acceptable values, otherwise they are not. The mpfr precision is
12502 set to the precision of TYPE. We assume that function FUNC returns
12503 zero if the result could be calculated exactly within the requested
12507 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
12508 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
12511 tree result
= NULL_TREE
;
12515 /* To proceed, MPFR must exactly represent the target floating point
12516 format, which only happens when the target base equals two. */
12517 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12518 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
12520 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
12522 if (real_isfinite (ra
)
12523 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
12524 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
12526 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12530 mpfr_init2 (m
, prec
);
12531 mpfr_from_real (m
, ra
, GMP_RNDN
);
12532 mpfr_clear_flags ();
12533 inexact
= func (m
, m
, GMP_RNDN
);
12534 result
= do_mpfr_ckconv (m
, type
, inexact
);
12542 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12543 FUNC on it and return the resulting value as a tree with type TYPE.
12544 The mpfr precision is set to the precision of TYPE. We assume that
12545 function FUNC returns zero if the result could be calculated
12546 exactly within the requested precision. */
12549 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
12550 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
12552 tree result
= NULL_TREE
;
12557 /* To proceed, MPFR must exactly represent the target floating point
12558 format, which only happens when the target base equals two. */
12559 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12560 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
12561 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
12563 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
12564 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
12566 if (real_isfinite (ra1
) && real_isfinite (ra2
))
12568 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12572 mpfr_inits2 (prec
, m1
, m2
, NULL
);
12573 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12574 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
12575 mpfr_clear_flags ();
12576 inexact
= func (m1
, m1
, m2
, GMP_RNDN
);
12577 result
= do_mpfr_ckconv (m1
, type
, inexact
);
12578 mpfr_clears (m1
, m2
, NULL
);
12585 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12586 FUNC on it and return the resulting value as a tree with type TYPE.
12587 The mpfr precision is set to the precision of TYPE. We assume that
12588 function FUNC returns zero if the result could be calculated
12589 exactly within the requested precision. */
12592 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
12593 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
12595 tree result
= NULL_TREE
;
12601 /* To proceed, MPFR must exactly represent the target floating point
12602 format, which only happens when the target base equals two. */
12603 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12604 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
12605 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
12606 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
12608 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
12609 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
12610 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
12612 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
12614 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12618 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
12619 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12620 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
12621 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
12622 mpfr_clear_flags ();
12623 inexact
= func (m1
, m1
, m2
, m3
, GMP_RNDN
);
12624 result
= do_mpfr_ckconv (m1
, type
, inexact
);
12625 mpfr_clears (m1
, m2
, m3
, NULL
);
12632 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12633 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12634 If ARG_SINP and ARG_COSP are NULL then the result is returned
12635 as a complex value.
12636 The type is taken from the type of ARG and is used for setting the
12637 precision of the calculation and results. */
12640 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
12642 tree
const type
= TREE_TYPE (arg
);
12643 tree result
= NULL_TREE
;
12647 /* To proceed, MPFR must exactly represent the target floating point
12648 format, which only happens when the target base equals two. */
12649 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12650 && TREE_CODE (arg
) == REAL_CST
12651 && !TREE_OVERFLOW (arg
))
12653 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
12655 if (real_isfinite (ra
))
12657 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12658 tree result_s
, result_c
;
12662 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
12663 mpfr_from_real (m
, ra
, GMP_RNDN
);
12664 mpfr_clear_flags ();
12665 inexact
= mpfr_sin_cos (ms
, mc
, m
, GMP_RNDN
);
12666 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
12667 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
12668 mpfr_clears (m
, ms
, mc
, NULL
);
12669 if (result_s
&& result_c
)
12671 /* If we are to return in a complex value do so. */
12672 if (!arg_sinp
&& !arg_cosp
)
12673 return build_complex (build_complex_type (type
),
12674 result_c
, result_s
);
12676 /* Dereference the sin/cos pointer arguments. */
12677 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
12678 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
12679 /* Proceed if valid pointer type were passed in. */
12680 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
12681 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
12683 /* Set the values. */
12684 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
12686 TREE_SIDE_EFFECTS (result_s
) = 1;
12687 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
12689 TREE_SIDE_EFFECTS (result_c
) = 1;
12690 /* Combine the assignments into a compound expr. */
12691 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12692 result_s
, result_c
));
12700 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12701 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12702 two-argument mpfr order N Bessel function FUNC on them and return
12703 the resulting value as a tree with type TYPE. The mpfr precision
12704 is set to the precision of TYPE. We assume that function FUNC
12705 returns zero if the result could be calculated exactly within the
12706 requested precision. */
12708 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
12709 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
12710 const REAL_VALUE_TYPE
*min
, bool inclusive
)
12712 tree result
= NULL_TREE
;
12717 /* To proceed, MPFR must exactly represent the target floating point
12718 format, which only happens when the target base equals two. */
12719 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12720 && host_integerp (arg1
, 0)
12721 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
12723 const HOST_WIDE_INT n
= tree_low_cst(arg1
, 0);
12724 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
12727 && real_isfinite (ra
)
12728 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
12730 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12734 mpfr_init2 (m
, prec
);
12735 mpfr_from_real (m
, ra
, GMP_RNDN
);
12736 mpfr_clear_flags ();
12737 inexact
= func (m
, n
, m
, GMP_RNDN
);
12738 result
= do_mpfr_ckconv (m
, type
, inexact
);
12746 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12747 the pointer *(ARG_QUO) and return the result. The type is taken
12748 from the type of ARG0 and is used for setting the precision of the
12749 calculation and results. */
12752 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
12754 tree
const type
= TREE_TYPE (arg0
);
12755 tree result
= NULL_TREE
;
12760 /* To proceed, MPFR must exactly represent the target floating point
12761 format, which only happens when the target base equals two. */
12762 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12763 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
12764 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
12766 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
12767 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
12769 if (real_isfinite (ra0
) && real_isfinite (ra1
))
12771 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12776 mpfr_inits2 (prec
, m0
, m1
, NULL
);
12777 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
12778 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12779 mpfr_clear_flags ();
12780 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, GMP_RNDN
);
12781 /* Remquo is independent of the rounding mode, so pass
12782 inexact=0 to do_mpfr_ckconv(). */
12783 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
12784 mpfr_clears (m0
, m1
, NULL
);
12787 /* MPFR calculates quo in the host's long so it may
12788 return more bits in quo than the target int can hold
12789 if sizeof(host long) > sizeof(target int). This can
12790 happen even for native compilers in LP64 mode. In
12791 these cases, modulo the quo value with the largest
12792 number that the target int can hold while leaving one
12793 bit for the sign. */
12794 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
12795 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
12797 /* Dereference the quo pointer argument. */
12798 arg_quo
= build_fold_indirect_ref (arg_quo
);
12799 /* Proceed iff a valid pointer type was passed in. */
12800 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
12802 /* Set the value. */
12803 tree result_quo
= fold_build2 (MODIFY_EXPR
,
12804 TREE_TYPE (arg_quo
), arg_quo
,
12805 build_int_cst (NULL
, integer_quo
));
12806 TREE_SIDE_EFFECTS (result_quo
) = 1;
12807 /* Combine the quo assignment with the rem. */
12808 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12809 result_quo
, result_rem
));
12817 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12818 resulting value as a tree with type TYPE. The mpfr precision is
12819 set to the precision of TYPE. We assume that this mpfr function
12820 returns zero if the result could be calculated exactly within the
12821 requested precision. In addition, the integer pointer represented
12822 by ARG_SG will be dereferenced and set to the appropriate signgam
12826 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
12828 tree result
= NULL_TREE
;
12832 /* To proceed, MPFR must exactly represent the target floating point
12833 format, which only happens when the target base equals two. Also
12834 verify ARG is a constant and that ARG_SG is an int pointer. */
12835 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12836 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
12837 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
12838 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
12840 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
12842 /* In addition to NaN and Inf, the argument cannot be zero or a
12843 negative integer. */
12844 if (real_isfinite (ra
)
12845 && ra
->cl
!= rvc_zero
12846 && !(real_isneg(ra
) && real_isinteger(ra
, TYPE_MODE (type
))))
12848 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12853 mpfr_init2 (m
, prec
);
12854 mpfr_from_real (m
, ra
, GMP_RNDN
);
12855 mpfr_clear_flags ();
12856 inexact
= mpfr_lgamma (m
, &sg
, m
, GMP_RNDN
);
12857 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
12863 /* Dereference the arg_sg pointer argument. */
12864 arg_sg
= build_fold_indirect_ref (arg_sg
);
12865 /* Assign the signgam value into *arg_sg. */
12866 result_sg
= fold_build2 (MODIFY_EXPR
,
12867 TREE_TYPE (arg_sg
), arg_sg
,
12868 build_int_cst (NULL
, sg
));
12869 TREE_SIDE_EFFECTS (result_sg
) = 1;
12870 /* Combine the signgam assignment with the lgamma result. */
12871 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12872 result_sg
, result_lg
));