1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
41 #include "typeclass.h"
46 #include "langhooks.h"
48 #define CALLED_AS_BUILT_IN(NODE) \
49 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
51 /* Register mappings for target machines without register windows. */
52 #ifndef INCOMING_REGNO
53 #define INCOMING_REGNO(OUT) (OUT)
55 #ifndef OUTGOING_REGNO
56 #define OUTGOING_REGNO(IN) (IN)
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names
[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) #X,
68 const char *const built_in_names
[(int) END_BUILTINS
] =
70 #include "builtins.def"
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls
[(int) END_BUILTINS
];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases. */
80 tree implicit_built_in_decls
[(int) END_BUILTINS
];
82 /* Trigonometric and mathematical constants used in builtin folding. */
83 static bool builtin_dconsts_init
= 0;
84 static REAL_VALUE_TYPE dconstpi
;
85 static REAL_VALUE_TYPE dconste
;
87 static int get_pointer_alignment (tree
, unsigned int);
88 static tree
c_strlen (tree
, int);
89 static const char *c_getstr (tree
);
90 static rtx
c_readstr (const char *, enum machine_mode
);
91 static int target_char_cast (tree
, char *);
92 static rtx
get_memory_rtx (tree
);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
96 static rtx
result_vector (int, rtx
);
98 static rtx
expand_builtin_setjmp (tree
, rtx
);
99 static void expand_builtin_prefetch (tree
);
100 static rtx
expand_builtin_apply_args (void);
101 static rtx
expand_builtin_apply_args_1 (void);
102 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
103 static void expand_builtin_return (rtx
);
104 static enum type_class
type_to_class (tree
);
105 static rtx
expand_builtin_classify_type (tree
);
106 static void expand_errno_check (tree
, rtx
);
107 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
108 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
109 static rtx
expand_builtin_constant_p (tree
, enum machine_mode
);
110 static rtx
expand_builtin_args_info (tree
);
111 static rtx
expand_builtin_next_arg (tree
);
112 static rtx
expand_builtin_va_start (tree
);
113 static rtx
expand_builtin_va_end (tree
);
114 static rtx
expand_builtin_va_copy (tree
);
115 static rtx
expand_builtin_memcmp (tree
, tree
, rtx
, enum machine_mode
);
116 static rtx
expand_builtin_strcmp (tree
, rtx
, enum machine_mode
);
117 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
118 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
119 static rtx
expand_builtin_strcat (tree
, rtx
, enum machine_mode
);
120 static rtx
expand_builtin_strncat (tree
, rtx
, enum machine_mode
);
121 static rtx
expand_builtin_strspn (tree
, rtx
, enum machine_mode
);
122 static rtx
expand_builtin_strcspn (tree
, rtx
, enum machine_mode
);
123 static rtx
expand_builtin_memcpy (tree
, rtx
, enum machine_mode
);
124 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
, int);
125 static rtx
expand_builtin_memmove (tree
, rtx
, enum machine_mode
);
126 static rtx
expand_builtin_bcopy (tree
);
127 static rtx
expand_builtin_strcpy (tree
, rtx
, enum machine_mode
);
128 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
129 static rtx
builtin_strncpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
130 static rtx
expand_builtin_strncpy (tree
, rtx
, enum machine_mode
);
131 static rtx
builtin_memset_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
132 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
133 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
134 static rtx
expand_builtin_bzero (tree
);
135 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
136 static rtx
expand_builtin_strstr (tree
, rtx
, enum machine_mode
);
137 static rtx
expand_builtin_strpbrk (tree
, rtx
, enum machine_mode
);
138 static rtx
expand_builtin_strchr (tree
, rtx
, enum machine_mode
);
139 static rtx
expand_builtin_strrchr (tree
, rtx
, enum machine_mode
);
140 static rtx
expand_builtin_alloca (tree
, rtx
);
141 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
142 static rtx
expand_builtin_frame_address (tree
, tree
);
143 static rtx
expand_builtin_fputs (tree
, int, int);
144 static rtx
expand_builtin_sprintf (tree
, rtx
, enum machine_mode
);
145 static tree
stabilize_va_list (tree
, int);
146 static rtx
expand_builtin_expect (tree
, rtx
);
147 static tree
fold_builtin_constant_p (tree
);
148 static tree
fold_builtin_classify_type (tree
);
149 static tree
fold_builtin_inf (tree
, int);
150 static tree
fold_builtin_nan (tree
, tree
, int);
151 static int validate_arglist (tree
, ...);
152 static tree
fold_trunc_transparent_mathfn (tree
);
153 static bool readonly_data_expr (tree
);
154 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
155 static rtx
expand_builtin_cabs (tree
, rtx
);
156 static void init_builtin_dconsts (void);
157 static tree
fold_builtin_cabs (tree
, tree
, tree
);
159 /* Initialize mathematical constants for constant folding builtins.
160 These constants need to be given to at least 160 bits precision. */
163 init_builtin_dconsts (void)
165 real_from_string (&dconstpi
,
166 "3.1415926535897932384626433832795028841971693993751058209749445923078");
167 real_from_string (&dconste
,
168 "2.7182818284590452353602874713526624977572470936999595749669676277241");
170 builtin_dconsts_init
= true;
173 /* Return the alignment in bits of EXP, a pointer valued expression.
174 But don't return more than MAX_ALIGN no matter what.
175 The alignment returned is, by default, the alignment of the thing that
176 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
178 Otherwise, look at the expression to see if we can do better, i.e., if the
179 expression is actually pointing at an object whose alignment is tighter. */
182 get_pointer_alignment (tree exp
, unsigned int max_align
)
184 unsigned int align
, inner
;
186 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
189 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
190 align
= MIN (align
, max_align
);
194 switch (TREE_CODE (exp
))
198 case NON_LVALUE_EXPR
:
199 exp
= TREE_OPERAND (exp
, 0);
200 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
203 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
204 align
= MIN (inner
, max_align
);
208 /* If sum of pointer + int, restrict our maximum alignment to that
209 imposed by the integer. If not, we can't do any better than
211 if (! host_integerp (TREE_OPERAND (exp
, 1), 1))
214 while (((tree_low_cst (TREE_OPERAND (exp
, 1), 1))
215 & (max_align
/ BITS_PER_UNIT
- 1))
219 exp
= TREE_OPERAND (exp
, 0);
223 /* See what we are pointing at and look at its alignment. */
224 exp
= TREE_OPERAND (exp
, 0);
225 if (TREE_CODE (exp
) == FUNCTION_DECL
)
226 align
= FUNCTION_BOUNDARY
;
227 else if (DECL_P (exp
))
228 align
= DECL_ALIGN (exp
);
229 #ifdef CONSTANT_ALIGNMENT
230 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c')
231 align
= CONSTANT_ALIGNMENT (exp
, align
);
233 return MIN (align
, max_align
);
241 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
242 way, because it could contain a zero byte in the middle.
243 TREE_STRING_LENGTH is the size of the character array, not the string.
245 ONLY_VALUE should be non-zero if the result is not going to be emitted
246 into the instruction stream and zero if it is going to be expanded.
247 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is non-zero, constant 3
248 is returned, otherwise NULL, since
249 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
250 evaluate the side-effects.
252 The value returned is of type `ssizetype'.
254 Unfortunately, string_constant can't access the values of const char
255 arrays with initializers, so neither can we do so here. */
258 c_strlen (tree src
, int only_value
)
261 HOST_WIDE_INT offset
;
266 if (TREE_CODE (src
) == COND_EXPR
267 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
271 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
272 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
273 if (tree_int_cst_equal (len1
, len2
))
277 if (TREE_CODE (src
) == COMPOUND_EXPR
278 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
279 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
281 src
= string_constant (src
, &offset_node
);
285 max
= TREE_STRING_LENGTH (src
) - 1;
286 ptr
= TREE_STRING_POINTER (src
);
288 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
290 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
291 compute the offset to the following null if we don't know where to
292 start searching for it. */
295 for (i
= 0; i
< max
; i
++)
299 /* We don't know the starting offset, but we do know that the string
300 has no internal zero bytes. We can assume that the offset falls
301 within the bounds of the string; otherwise, the programmer deserves
302 what he gets. Subtract the offset from the length of the string,
303 and return that. This would perhaps not be valid if we were dealing
304 with named arrays in addition to literal string constants. */
306 return size_diffop (size_int (max
), offset_node
);
309 /* We have a known offset into the string. Start searching there for
310 a null character if we can represent it as a single HOST_WIDE_INT. */
311 if (offset_node
== 0)
313 else if (! host_integerp (offset_node
, 0))
316 offset
= tree_low_cst (offset_node
, 0);
318 /* If the offset is known to be out of bounds, warn, and call strlen at
320 if (offset
< 0 || offset
> max
)
322 warning ("offset outside bounds of constant string");
326 /* Use strlen to search for the first zero byte. Since any strings
327 constructed with build_string will have nulls appended, we win even
328 if we get handed something like (char[4])"abcd".
330 Since OFFSET is our starting index into the string, no further
331 calculation is needed. */
332 return ssize_int (strlen (ptr
+ offset
));
335 /* Return a char pointer for a C string if it is a string constant
336 or sum of string constant and integer constant. */
343 src
= string_constant (src
, &offset_node
);
347 if (offset_node
== 0)
348 return TREE_STRING_POINTER (src
);
349 else if (!host_integerp (offset_node
, 1)
350 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
353 return TREE_STRING_POINTER (src
) + tree_low_cst (offset_node
, 1);
356 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
357 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
360 c_readstr (const char *str
, enum machine_mode mode
)
366 if (GET_MODE_CLASS (mode
) != MODE_INT
)
371 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
374 if (WORDS_BIG_ENDIAN
)
375 j
= GET_MODE_SIZE (mode
) - i
- 1;
376 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
377 && GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
378 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
380 if (j
> 2 * HOST_BITS_PER_WIDE_INT
)
383 ch
= (unsigned char) str
[i
];
384 c
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
386 return immed_double_const (c
[0], c
[1], mode
);
389 /* Cast a target constant CST to target CHAR and if that value fits into
390 host char type, return zero and put that value into variable pointed by
394 target_char_cast (tree cst
, char *p
)
396 unsigned HOST_WIDE_INT val
, hostval
;
398 if (!host_integerp (cst
, 1)
399 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
402 val
= tree_low_cst (cst
, 1);
403 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
404 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
407 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
408 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
417 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
418 times to get the address of either a higher stack frame, or a return
419 address located within it (depending on FNDECL_CODE). */
422 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
,
427 /* Some machines need special handling before we can access
428 arbitrary frames. For example, on the sparc, we must first flush
429 all register windows to the stack. */
430 #ifdef SETUP_FRAME_ADDRESSES
432 SETUP_FRAME_ADDRESSES ();
435 /* On the sparc, the return address is not in the frame, it is in a
436 register. There is no way to access it off of the current frame
437 pointer, but it can be accessed off the previous frame pointer by
438 reading the value from the register window save area. */
439 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
440 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
444 /* Scan back COUNT frames to the specified frame. */
445 for (i
= 0; i
< count
; i
++)
447 /* Assume the dynamic chain pointer is in the word that the
448 frame address points to, unless otherwise specified. */
449 #ifdef DYNAMIC_CHAIN_ADDRESS
450 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
452 tem
= memory_address (Pmode
, tem
);
453 tem
= gen_rtx_MEM (Pmode
, tem
);
454 set_mem_alias_set (tem
, get_frame_alias_set ());
455 tem
= copy_to_reg (tem
);
458 /* For __builtin_frame_address, return what we've got. */
459 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
462 /* For __builtin_return_address, Get the return address from that
464 #ifdef RETURN_ADDR_RTX
465 tem
= RETURN_ADDR_RTX (count
, tem
);
467 tem
= memory_address (Pmode
,
468 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
469 tem
= gen_rtx_MEM (Pmode
, tem
);
470 set_mem_alias_set (tem
, get_frame_alias_set ());
475 /* Alias set used for setjmp buffer. */
476 static HOST_WIDE_INT setjmp_alias_set
= -1;
478 /* Construct the leading half of a __builtin_setjmp call. Control will
479 return to RECEIVER_LABEL. This is used directly by sjlj exception
483 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
485 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
489 if (setjmp_alias_set
== -1)
490 setjmp_alias_set
= new_alias_set ();
492 #ifdef POINTERS_EXTEND_UNSIGNED
493 if (GET_MODE (buf_addr
) != Pmode
)
494 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
497 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
501 /* We store the frame pointer and the address of receiver_label in
502 the buffer and use the rest of it for the stack save area, which
503 is machine-dependent. */
505 #ifndef BUILTIN_SETJMP_FRAME_VALUE
506 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
509 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
510 set_mem_alias_set (mem
, setjmp_alias_set
);
511 emit_move_insn (mem
, BUILTIN_SETJMP_FRAME_VALUE
);
513 mem
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
))),
514 set_mem_alias_set (mem
, setjmp_alias_set
);
516 emit_move_insn (validize_mem (mem
),
517 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
519 stack_save
= gen_rtx_MEM (sa_mode
,
520 plus_constant (buf_addr
,
521 2 * GET_MODE_SIZE (Pmode
)));
522 set_mem_alias_set (stack_save
, setjmp_alias_set
);
523 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
525 /* If there is further processing to do, do it. */
526 #ifdef HAVE_builtin_setjmp_setup
527 if (HAVE_builtin_setjmp_setup
)
528 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
531 /* Tell optimize_save_area_alloca that extra work is going to
532 need to go on during alloca. */
533 current_function_calls_setjmp
= 1;
535 /* Set this so all the registers get saved in our frame; we need to be
536 able to copy the saved values for any registers from frames we unwind. */
537 current_function_has_nonlocal_label
= 1;
540 /* Construct the trailing part of a __builtin_setjmp call.
541 This is used directly by sjlj exception handling code. */
544 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
546 /* Clobber the FP when we get here, so we have to make sure it's
547 marked as used by this function. */
548 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
550 /* Mark the static chain as clobbered here so life information
551 doesn't get messed up for it. */
552 emit_insn (gen_rtx_CLOBBER (VOIDmode
, static_chain_rtx
));
554 /* Now put in the code to restore the frame pointer, and argument
555 pointer, if needed. The code below is from expand_end_bindings
556 in stmt.c; see detailed documentation there. */
557 #ifdef HAVE_nonlocal_goto
558 if (! HAVE_nonlocal_goto
)
560 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
562 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
563 if (fixed_regs
[ARG_POINTER_REGNUM
])
565 #ifdef ELIMINABLE_REGS
567 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
569 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
570 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
571 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
574 if (i
== ARRAY_SIZE (elim_regs
))
577 /* Now restore our arg pointer from the address at which it
578 was saved in our stack frame. */
579 emit_move_insn (virtual_incoming_args_rtx
,
580 copy_to_reg (get_arg_pointer_save_area (cfun
)));
585 #ifdef HAVE_builtin_setjmp_receiver
586 if (HAVE_builtin_setjmp_receiver
)
587 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
590 #ifdef HAVE_nonlocal_goto_receiver
591 if (HAVE_nonlocal_goto_receiver
)
592 emit_insn (gen_nonlocal_goto_receiver ());
597 /* @@@ This is a kludge. Not all machine descriptions define a blockage
598 insn, but we must not allow the code we just generated to be reordered
599 by scheduling. Specifically, the update of the frame pointer must
600 happen immediately, not later. So emit an ASM_INPUT to act as blockage
602 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
, ""));
605 /* __builtin_setjmp is passed a pointer to an array of five words (not
606 all will be used on all machines). It operates similarly to the C
607 library function of the same name, but is more efficient. Much of
608 the code below (and for longjmp) is copied from the handling of
611 NOTE: This is intended for use by GNAT and the exception handling
612 scheme in the compiler and will only work in the method used by
616 expand_builtin_setjmp (tree arglist
, rtx target
)
618 rtx buf_addr
, next_lab
, cont_lab
;
620 if (!validate_arglist (arglist
, POINTER_TYPE
, VOID_TYPE
))
623 if (target
== 0 || GET_CODE (target
) != REG
624 || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
625 target
= gen_reg_rtx (TYPE_MODE (integer_type_node
));
627 buf_addr
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
629 next_lab
= gen_label_rtx ();
630 cont_lab
= gen_label_rtx ();
632 expand_builtin_setjmp_setup (buf_addr
, next_lab
);
634 /* Set TARGET to zero and branch to the continue label. */
635 emit_move_insn (target
, const0_rtx
);
636 emit_jump_insn (gen_jump (cont_lab
));
638 emit_label (next_lab
);
640 expand_builtin_setjmp_receiver (next_lab
);
642 /* Set TARGET to one. */
643 emit_move_insn (target
, const1_rtx
);
644 emit_label (cont_lab
);
646 /* Tell flow about the strange goings on. Putting `next_lab' on
647 `nonlocal_goto_handler_labels' to indicates that function
648 calls may traverse the arc back to this label. */
650 current_function_has_nonlocal_label
= 1;
651 nonlocal_goto_handler_labels
652 = gen_rtx_EXPR_LIST (VOIDmode
, next_lab
, nonlocal_goto_handler_labels
);
657 /* __builtin_longjmp is passed a pointer to an array of five words (not
658 all will be used on all machines). It operates similarly to the C
659 library function of the same name, but is more efficient. Much of
660 the code below is copied from the handling of non-local gotos.
662 NOTE: This is intended for use by GNAT and the exception handling
663 scheme in the compiler and will only work in the method used by
667 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
669 rtx fp
, lab
, stack
, insn
, last
;
670 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
672 if (setjmp_alias_set
== -1)
673 setjmp_alias_set
= new_alias_set ();
675 #ifdef POINTERS_EXTEND_UNSIGNED
676 if (GET_MODE (buf_addr
) != Pmode
)
677 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
680 buf_addr
= force_reg (Pmode
, buf_addr
);
682 /* We used to store value in static_chain_rtx, but that fails if pointers
683 are smaller than integers. We instead require that the user must pass
684 a second argument of 1, because that is what builtin_setjmp will
685 return. This also makes EH slightly more efficient, since we are no
686 longer copying around a value that we don't care about. */
687 if (value
!= const1_rtx
)
690 current_function_calls_longjmp
= 1;
692 last
= get_last_insn ();
693 #ifdef HAVE_builtin_longjmp
694 if (HAVE_builtin_longjmp
)
695 emit_insn (gen_builtin_longjmp (buf_addr
));
699 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
700 lab
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
,
701 GET_MODE_SIZE (Pmode
)));
703 stack
= gen_rtx_MEM (sa_mode
, plus_constant (buf_addr
,
704 2 * GET_MODE_SIZE (Pmode
)));
705 set_mem_alias_set (fp
, setjmp_alias_set
);
706 set_mem_alias_set (lab
, setjmp_alias_set
);
707 set_mem_alias_set (stack
, setjmp_alias_set
);
709 /* Pick up FP, label, and SP from the block and jump. This code is
710 from expand_goto in stmt.c; see there for detailed comments. */
711 #if HAVE_nonlocal_goto
712 if (HAVE_nonlocal_goto
)
713 /* We have to pass a value to the nonlocal_goto pattern that will
714 get copied into the static_chain pointer, but it does not matter
715 what that value is, because builtin_setjmp does not use it. */
716 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
720 lab
= copy_to_reg (lab
);
722 emit_move_insn (hard_frame_pointer_rtx
, fp
);
723 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
725 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
726 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
727 emit_indirect_jump (lab
);
731 /* Search backwards and mark the jump insn as a non-local goto.
732 Note that this precludes the use of __builtin_longjmp to a
733 __builtin_setjmp target in the same function. However, we've
734 already cautioned the user that these functions are for
735 internal exception handling use only. */
736 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
740 if (GET_CODE (insn
) == JUMP_INSN
)
742 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO
, const0_rtx
,
746 else if (GET_CODE (insn
) == CALL_INSN
)
751 /* Expand a call to __builtin_prefetch. For a target that does not support
752 data prefetch, evaluate the memory address argument in case it has side
756 expand_builtin_prefetch (tree arglist
)
758 tree arg0
, arg1
, arg2
;
761 if (!validate_arglist (arglist
, POINTER_TYPE
, 0))
764 arg0
= TREE_VALUE (arglist
);
765 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
766 zero (read) and argument 2 (locality) defaults to 3 (high degree of
768 if (TREE_CHAIN (arglist
))
770 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
771 if (TREE_CHAIN (TREE_CHAIN (arglist
)))
772 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
774 arg2
= build_int_2 (3, 0);
778 arg1
= integer_zero_node
;
779 arg2
= build_int_2 (3, 0);
782 /* Argument 0 is an address. */
783 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
785 /* Argument 1 (read/write flag) must be a compile-time constant int. */
786 if (TREE_CODE (arg1
) != INTEGER_CST
)
788 error ("second arg to `__builtin_prefetch' must be a constant");
789 arg1
= integer_zero_node
;
791 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
792 /* Argument 1 must be either zero or one. */
793 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
795 warning ("invalid second arg to __builtin_prefetch; using zero");
799 /* Argument 2 (locality) must be a compile-time constant int. */
800 if (TREE_CODE (arg2
) != INTEGER_CST
)
802 error ("third arg to `__builtin_prefetch' must be a constant");
803 arg2
= integer_zero_node
;
805 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
806 /* Argument 2 must be 0, 1, 2, or 3. */
807 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
809 warning ("invalid third arg to __builtin_prefetch; using zero");
816 if ((! (*insn_data
[(int) CODE_FOR_prefetch
].operand
[0].predicate
)
818 insn_data
[(int) CODE_FOR_prefetch
].operand
[0].mode
))
819 || (GET_MODE(op0
) != Pmode
))
821 #ifdef POINTERS_EXTEND_UNSIGNED
822 if (GET_MODE(op0
) != Pmode
)
823 op0
= convert_memory_address (Pmode
, op0
);
825 op0
= force_reg (Pmode
, op0
);
827 emit_insn (gen_prefetch (op0
, op1
, op2
));
831 op0
= protect_from_queue (op0
, 0);
832 /* Don't do anything with direct references to volatile memory, but
833 generate code to handle other side effects. */
834 if (GET_CODE (op0
) != MEM
&& side_effects_p (op0
))
838 /* Get a MEM rtx for expression EXP which is the address of an operand
839 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
842 get_memory_rtx (tree exp
)
844 rtx addr
= expand_expr (exp
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
847 #ifdef POINTERS_EXTEND_UNSIGNED
848 if (GET_MODE (addr
) != Pmode
)
849 addr
= convert_memory_address (Pmode
, addr
);
852 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
854 /* Get an expression we can use to find the attributes to assign to MEM.
855 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
856 we can. First remove any nops. */
857 while ((TREE_CODE (exp
) == NOP_EXPR
|| TREE_CODE (exp
) == CONVERT_EXPR
858 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
859 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
860 exp
= TREE_OPERAND (exp
, 0);
862 if (TREE_CODE (exp
) == ADDR_EXPR
)
864 exp
= TREE_OPERAND (exp
, 0);
865 set_mem_attributes (mem
, exp
, 0);
867 else if (POINTER_TYPE_P (TREE_TYPE (exp
)))
869 exp
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (exp
)), exp
);
870 /* memcpy, memset and other builtin stringops can alias with anything. */
871 set_mem_alias_set (mem
, 0);
877 /* Built-in functions to perform an untyped call and return. */
879 /* For each register that may be used for calling a function, this
880 gives a mode used to copy the register's value. VOIDmode indicates
881 the register is not used for calling a function. If the machine
882 has register windows, this gives only the outbound registers.
883 INCOMING_REGNO gives the corresponding inbound register. */
884 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
886 /* For each register that may be used for returning values, this gives
887 a mode used to copy the register's value. VOIDmode indicates the
888 register is not used for returning values. If the machine has
889 register windows, this gives only the outbound registers.
890 INCOMING_REGNO gives the corresponding inbound register. */
891 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
893 /* For each register that may be used for calling a function, this
894 gives the offset of that register into the block returned by
895 __builtin_apply_args. 0 indicates that the register is not
896 used for calling a function. */
897 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
899 /* Return the offset of register REGNO into the block returned by
900 __builtin_apply_args. This is not declared static, since it is
901 needed in objc-act.c. */
904 apply_args_register_offset (int regno
)
908 /* Arguments are always put in outgoing registers (in the argument
909 block) if such make sense. */
910 #ifdef OUTGOING_REGNO
911 regno
= OUTGOING_REGNO (regno
);
913 return apply_args_reg_offset
[regno
];
916 /* Return the size required for the block returned by __builtin_apply_args,
917 and initialize apply_args_mode. */
920 apply_args_size (void)
922 static int size
= -1;
925 enum machine_mode mode
;
927 /* The values computed by this function never change. */
930 /* The first value is the incoming arg-pointer. */
931 size
= GET_MODE_SIZE (Pmode
);
933 /* The second value is the structure value address unless this is
934 passed as an "invisible" first argument. */
935 if (struct_value_rtx
)
936 size
+= GET_MODE_SIZE (Pmode
);
938 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
939 if (FUNCTION_ARG_REGNO_P (regno
))
941 /* Search for the proper mode for copying this register's
942 value. I'm not sure this is right, but it works so far. */
943 enum machine_mode best_mode
= VOIDmode
;
945 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
947 mode
= GET_MODE_WIDER_MODE (mode
))
948 if (HARD_REGNO_MODE_OK (regno
, mode
)
949 && HARD_REGNO_NREGS (regno
, mode
) == 1)
952 if (best_mode
== VOIDmode
)
953 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
955 mode
= GET_MODE_WIDER_MODE (mode
))
956 if (HARD_REGNO_MODE_OK (regno
, mode
)
957 && have_insn_for (SET
, mode
))
960 if (best_mode
== VOIDmode
)
961 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT
);
963 mode
= GET_MODE_WIDER_MODE (mode
))
964 if (HARD_REGNO_MODE_OK (regno
, mode
)
965 && have_insn_for (SET
, mode
))
968 if (best_mode
== VOIDmode
)
969 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT
);
971 mode
= GET_MODE_WIDER_MODE (mode
))
972 if (HARD_REGNO_MODE_OK (regno
, mode
)
973 && have_insn_for (SET
, mode
))
977 if (mode
== VOIDmode
)
980 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
981 if (size
% align
!= 0)
982 size
= CEIL (size
, align
) * align
;
983 apply_args_reg_offset
[regno
] = size
;
984 size
+= GET_MODE_SIZE (mode
);
985 apply_args_mode
[regno
] = mode
;
989 apply_args_mode
[regno
] = VOIDmode
;
990 apply_args_reg_offset
[regno
] = 0;
996 /* Return the size required for the block returned by __builtin_apply,
997 and initialize apply_result_mode. */
1000 apply_result_size (void)
1002 static int size
= -1;
1004 enum machine_mode mode
;
1006 /* The values computed by this function never change. */
1011 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1012 if (FUNCTION_VALUE_REGNO_P (regno
))
1014 /* Search for the proper mode for copying this register's
1015 value. I'm not sure this is right, but it works so far. */
1016 enum machine_mode best_mode
= VOIDmode
;
1018 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1020 mode
= GET_MODE_WIDER_MODE (mode
))
1021 if (HARD_REGNO_MODE_OK (regno
, mode
))
1024 if (best_mode
== VOIDmode
)
1025 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
1027 mode
= GET_MODE_WIDER_MODE (mode
))
1028 if (HARD_REGNO_MODE_OK (regno
, mode
)
1029 && have_insn_for (SET
, mode
))
1032 if (best_mode
== VOIDmode
)
1033 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT
);
1035 mode
= GET_MODE_WIDER_MODE (mode
))
1036 if (HARD_REGNO_MODE_OK (regno
, mode
)
1037 && have_insn_for (SET
, mode
))
1040 if (best_mode
== VOIDmode
)
1041 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT
);
1043 mode
= GET_MODE_WIDER_MODE (mode
))
1044 if (HARD_REGNO_MODE_OK (regno
, mode
)
1045 && have_insn_for (SET
, mode
))
1049 if (mode
== VOIDmode
)
1052 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1053 if (size
% align
!= 0)
1054 size
= CEIL (size
, align
) * align
;
1055 size
+= GET_MODE_SIZE (mode
);
1056 apply_result_mode
[regno
] = mode
;
1059 apply_result_mode
[regno
] = VOIDmode
;
1061 /* Allow targets that use untyped_call and untyped_return to override
1062 the size so that machine-specific information can be stored here. */
1063 #ifdef APPLY_RESULT_SIZE
1064 size
= APPLY_RESULT_SIZE
;
1070 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1071 /* Create a vector describing the result block RESULT. If SAVEP is true,
1072 the result block is used to save the values; otherwise it is used to
1073 restore the values. */
1076 result_vector (int savep
, rtx result
)
1078 int regno
, size
, align
, nelts
;
1079 enum machine_mode mode
;
1081 rtx
*savevec
= (rtx
*) alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
1084 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1085 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1087 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1088 if (size
% align
!= 0)
1089 size
= CEIL (size
, align
) * align
;
1090 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1091 mem
= adjust_address (result
, mode
, size
);
1092 savevec
[nelts
++] = (savep
1093 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1094 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1095 size
+= GET_MODE_SIZE (mode
);
1097 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1099 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1101 /* Save the state required to perform an untyped call with the same
1102 arguments as were passed to the current function. */
1105 expand_builtin_apply_args_1 (void)
1108 int size
, align
, regno
;
1109 enum machine_mode mode
;
1111 /* Create a block where the arg-pointer, structure value address,
1112 and argument registers can be saved. */
1113 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1115 /* Walk past the arg-pointer and structure value address. */
1116 size
= GET_MODE_SIZE (Pmode
);
1117 if (struct_value_rtx
)
1118 size
+= GET_MODE_SIZE (Pmode
);
1120 /* Save each register used in calling a function to the block. */
1121 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1122 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1126 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1127 if (size
% align
!= 0)
1128 size
= CEIL (size
, align
) * align
;
1130 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1132 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1133 size
+= GET_MODE_SIZE (mode
);
1136 /* Save the arg pointer to the block. */
1137 emit_move_insn (adjust_address (registers
, Pmode
, 0),
1138 copy_to_reg (virtual_incoming_args_rtx
));
1139 size
= GET_MODE_SIZE (Pmode
);
1141 /* Save the structure value address unless this is passed as an
1142 "invisible" first argument. */
1143 if (struct_value_incoming_rtx
)
1145 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1146 copy_to_reg (struct_value_incoming_rtx
));
1147 size
+= GET_MODE_SIZE (Pmode
);
1150 /* Return the address of the block. */
1151 return copy_addr_to_reg (XEXP (registers
, 0));
1154 /* __builtin_apply_args returns block of memory allocated on
1155 the stack into which is stored the arg pointer, structure
1156 value address, static chain, and all the registers that might
1157 possibly be used in performing a function call. The code is
1158 moved to the start of the function so the incoming values are
1162 expand_builtin_apply_args (void)
1164 /* Don't do __builtin_apply_args more than once in a function.
1165 Save the result of the first call and reuse it. */
1166 if (apply_args_value
!= 0)
1167 return apply_args_value
;
1169 /* When this function is called, it means that registers must be
1170 saved on entry to this function. So we migrate the
1171 call to the first insn of this function. */
1176 temp
= expand_builtin_apply_args_1 ();
1180 apply_args_value
= temp
;
1182 /* Put the insns after the NOTE that starts the function.
1183 If this is inside a start_sequence, make the outer-level insn
1184 chain current, so the code is placed at the start of the
1186 push_topmost_sequence ();
1187 emit_insn_before (seq
, NEXT_INSN (get_insns ()));
1188 pop_topmost_sequence ();
1193 /* Perform an untyped call and save the state required to perform an
1194 untyped return of whatever value was returned by the given function. */
1197 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1199 int size
, align
, regno
;
1200 enum machine_mode mode
;
1201 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1202 rtx old_stack_level
= 0;
1203 rtx call_fusage
= 0;
1205 #ifdef POINTERS_EXTEND_UNSIGNED
1206 if (GET_MODE (arguments
) != Pmode
)
1207 arguments
= convert_memory_address (Pmode
, arguments
);
1210 /* Create a block where the return registers can be saved. */
1211 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1213 /* Fetch the arg pointer from the ARGUMENTS block. */
1214 incoming_args
= gen_reg_rtx (Pmode
);
1215 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1216 #ifndef STACK_GROWS_DOWNWARD
1217 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1218 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1221 /* Perform postincrements before actually calling the function. */
1224 /* Push a new argument block and copy the arguments. Do not allow
1225 the (potential) memcpy call below to interfere with our stack
1227 do_pending_stack_adjust ();
1230 /* Save the stack with nonlocal if available */
1231 #ifdef HAVE_save_stack_nonlocal
1232 if (HAVE_save_stack_nonlocal
)
1233 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
, NULL_RTX
);
1236 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
1238 /* Push a block of memory onto the stack to store the memory arguments.
1239 Save the address in a register, and copy the memory arguments. ??? I
1240 haven't figured out how the calling convention macros effect this,
1241 but it's likely that the source and/or destination addresses in
1242 the block copy will need updating in machine specific ways. */
1243 dest
= allocate_dynamic_stack_space (argsize
, 0, BITS_PER_UNIT
);
1244 dest
= gen_rtx_MEM (BLKmode
, dest
);
1245 set_mem_align (dest
, PARM_BOUNDARY
);
1246 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1247 set_mem_align (src
, PARM_BOUNDARY
);
1248 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1250 /* Refer to the argument block. */
1252 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1253 set_mem_align (arguments
, PARM_BOUNDARY
);
1255 /* Walk past the arg-pointer and structure value address. */
1256 size
= GET_MODE_SIZE (Pmode
);
1257 if (struct_value_rtx
)
1258 size
+= GET_MODE_SIZE (Pmode
);
1260 /* Restore each of the registers previously saved. Make USE insns
1261 for each of these registers for use in making the call. */
1262 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1263 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1265 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1266 if (size
% align
!= 0)
1267 size
= CEIL (size
, align
) * align
;
1268 reg
= gen_rtx_REG (mode
, regno
);
1269 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1270 use_reg (&call_fusage
, reg
);
1271 size
+= GET_MODE_SIZE (mode
);
1274 /* Restore the structure value address unless this is passed as an
1275 "invisible" first argument. */
1276 size
= GET_MODE_SIZE (Pmode
);
1277 if (struct_value_rtx
)
1279 rtx value
= gen_reg_rtx (Pmode
);
1280 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1281 emit_move_insn (struct_value_rtx
, value
);
1282 if (GET_CODE (struct_value_rtx
) == REG
)
1283 use_reg (&call_fusage
, struct_value_rtx
);
1284 size
+= GET_MODE_SIZE (Pmode
);
1287 /* All arguments and registers used for the call are set up by now! */
1288 function
= prepare_call_address (function
, NULL_TREE
, &call_fusage
, 0, 0);
1290 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1291 and we don't want to load it into a register as an optimization,
1292 because prepare_call_address already did it if it should be done. */
1293 if (GET_CODE (function
) != SYMBOL_REF
)
1294 function
= memory_address (FUNCTION_MODE
, function
);
1296 /* Generate the actual call instruction and save the return value. */
1297 #ifdef HAVE_untyped_call
1298 if (HAVE_untyped_call
)
1299 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1300 result
, result_vector (1, result
)));
1303 #ifdef HAVE_call_value
1304 if (HAVE_call_value
)
1308 /* Locate the unique return register. It is not possible to
1309 express a call that sets more than one return register using
1310 call_value; use untyped_call for that. In fact, untyped_call
1311 only needs to save the return registers in the given block. */
1312 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1313 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1316 abort (); /* HAVE_untyped_call required. */
1317 valreg
= gen_rtx_REG (mode
, regno
);
1320 emit_call_insn (GEN_CALL_VALUE (valreg
,
1321 gen_rtx_MEM (FUNCTION_MODE
, function
),
1322 const0_rtx
, NULL_RTX
, const0_rtx
));
1324 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1330 /* Find the CALL insn we just emitted, and attach the register usage
1332 call_insn
= last_call_insn ();
1333 add_function_usage_to (call_insn
, call_fusage
);
1335 /* Restore the stack. */
1336 #ifdef HAVE_save_stack_nonlocal
1337 if (HAVE_save_stack_nonlocal
)
1338 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
, NULL_RTX
);
1341 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1345 /* Return the address of the result block. */
1346 return copy_addr_to_reg (XEXP (result
, 0));
1349 /* Perform an untyped return. */
1352 expand_builtin_return (rtx result
)
1354 int size
, align
, regno
;
1355 enum machine_mode mode
;
1357 rtx call_fusage
= 0;
1359 #ifdef POINTERS_EXTEND_UNSIGNED
1360 if (GET_MODE (result
) != Pmode
)
1361 result
= convert_memory_address (Pmode
, result
);
1364 apply_result_size ();
1365 result
= gen_rtx_MEM (BLKmode
, result
);
1367 #ifdef HAVE_untyped_return
1368 if (HAVE_untyped_return
)
1370 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1376 /* Restore the return value and note that each value is used. */
1378 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1379 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1381 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1382 if (size
% align
!= 0)
1383 size
= CEIL (size
, align
) * align
;
1384 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1385 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1387 push_to_sequence (call_fusage
);
1388 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
1389 call_fusage
= get_insns ();
1391 size
+= GET_MODE_SIZE (mode
);
1394 /* Put the USE insns before the return. */
1395 emit_insn (call_fusage
);
1397 /* Return whatever values was restored by jumping directly to the end
1399 expand_null_return ();
1402 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1404 static enum type_class
1405 type_to_class (tree type
)
1407 switch (TREE_CODE (type
))
1409 case VOID_TYPE
: return void_type_class
;
1410 case INTEGER_TYPE
: return integer_type_class
;
1411 case CHAR_TYPE
: return char_type_class
;
1412 case ENUMERAL_TYPE
: return enumeral_type_class
;
1413 case BOOLEAN_TYPE
: return boolean_type_class
;
1414 case POINTER_TYPE
: return pointer_type_class
;
1415 case REFERENCE_TYPE
: return reference_type_class
;
1416 case OFFSET_TYPE
: return offset_type_class
;
1417 case REAL_TYPE
: return real_type_class
;
1418 case COMPLEX_TYPE
: return complex_type_class
;
1419 case FUNCTION_TYPE
: return function_type_class
;
1420 case METHOD_TYPE
: return method_type_class
;
1421 case RECORD_TYPE
: return record_type_class
;
1423 case QUAL_UNION_TYPE
: return union_type_class
;
1424 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1425 ? string_type_class
: array_type_class
);
1426 case SET_TYPE
: return set_type_class
;
1427 case FILE_TYPE
: return file_type_class
;
1428 case LANG_TYPE
: return lang_type_class
;
1429 default: return no_type_class
;
1433 /* Expand a call to __builtin_classify_type with arguments found in
1437 expand_builtin_classify_type (tree arglist
)
1440 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist
))));
1441 return GEN_INT (no_type_class
);
1444 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1447 expand_builtin_constant_p (tree arglist
, enum machine_mode target_mode
)
1453 arglist
= TREE_VALUE (arglist
);
1455 /* We have taken care of the easy cases during constant folding. This
1456 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE
1457 get a chance to see if it can deduce whether ARGLIST is constant. */
1459 current_function_calls_constant_p
= 1;
1461 tmp
= expand_expr (arglist
, NULL_RTX
, VOIDmode
, 0);
1462 tmp
= gen_rtx_CONSTANT_P_RTX (target_mode
, tmp
);
1466 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1469 mathfn_built_in (tree type
, enum built_in_function fn
)
1471 enum built_in_function fcode
= NOT_BUILT_IN
;
1472 if (TYPE_MODE (type
) == TYPE_MODE (double_type_node
))
1476 case BUILT_IN_SQRTF
:
1477 case BUILT_IN_SQRTL
:
1478 fcode
= BUILT_IN_SQRT
;
1483 fcode
= BUILT_IN_SIN
;
1488 fcode
= BUILT_IN_COS
;
1493 fcode
= BUILT_IN_EXP
;
1498 fcode
= BUILT_IN_LOG
;
1503 fcode
= BUILT_IN_TAN
;
1506 case BUILT_IN_ATANF
:
1507 case BUILT_IN_ATANL
:
1508 fcode
= BUILT_IN_ATAN
;
1510 case BUILT_IN_FLOOR
:
1511 case BUILT_IN_FLOORF
:
1512 case BUILT_IN_FLOORL
:
1513 fcode
= BUILT_IN_FLOOR
;
1516 case BUILT_IN_CEILF
:
1517 case BUILT_IN_CEILL
:
1518 fcode
= BUILT_IN_CEIL
;
1520 case BUILT_IN_TRUNC
:
1521 case BUILT_IN_TRUNCF
:
1522 case BUILT_IN_TRUNCL
:
1523 fcode
= BUILT_IN_TRUNC
;
1525 case BUILT_IN_ROUND
:
1526 case BUILT_IN_ROUNDF
:
1527 case BUILT_IN_ROUNDL
:
1528 fcode
= BUILT_IN_ROUND
;
1530 case BUILT_IN_NEARBYINT
:
1531 case BUILT_IN_NEARBYINTF
:
1532 case BUILT_IN_NEARBYINTL
:
1533 fcode
= BUILT_IN_NEARBYINT
;
1538 else if (TYPE_MODE (type
) == TYPE_MODE (float_type_node
))
1542 case BUILT_IN_SQRTF
:
1543 case BUILT_IN_SQRTL
:
1544 fcode
= BUILT_IN_SQRTF
;
1549 fcode
= BUILT_IN_SINF
;
1554 fcode
= BUILT_IN_COSF
;
1559 fcode
= BUILT_IN_EXPF
;
1564 fcode
= BUILT_IN_LOGF
;
1569 fcode
= BUILT_IN_TANF
;
1572 case BUILT_IN_ATANF
:
1573 case BUILT_IN_ATANL
:
1574 fcode
= BUILT_IN_ATANF
;
1576 case BUILT_IN_FLOOR
:
1577 case BUILT_IN_FLOORF
:
1578 case BUILT_IN_FLOORL
:
1579 fcode
= BUILT_IN_FLOORF
;
1582 case BUILT_IN_CEILF
:
1583 case BUILT_IN_CEILL
:
1584 fcode
= BUILT_IN_CEILF
;
1586 case BUILT_IN_TRUNC
:
1587 case BUILT_IN_TRUNCF
:
1588 case BUILT_IN_TRUNCL
:
1589 fcode
= BUILT_IN_TRUNCF
;
1591 case BUILT_IN_ROUND
:
1592 case BUILT_IN_ROUNDF
:
1593 case BUILT_IN_ROUNDL
:
1594 fcode
= BUILT_IN_ROUNDF
;
1596 case BUILT_IN_NEARBYINT
:
1597 case BUILT_IN_NEARBYINTF
:
1598 case BUILT_IN_NEARBYINTL
:
1599 fcode
= BUILT_IN_NEARBYINTF
;
1604 else if (TYPE_MODE (type
) == TYPE_MODE (long_double_type_node
))
1608 case BUILT_IN_SQRTF
:
1609 case BUILT_IN_SQRTL
:
1610 fcode
= BUILT_IN_SQRTL
;
1615 fcode
= BUILT_IN_SINL
;
1620 fcode
= BUILT_IN_COSL
;
1625 fcode
= BUILT_IN_EXPL
;
1630 fcode
= BUILT_IN_LOGL
;
1635 fcode
= BUILT_IN_TANL
;
1638 case BUILT_IN_ATANF
:
1639 case BUILT_IN_ATANL
:
1640 fcode
= BUILT_IN_ATANL
;
1642 case BUILT_IN_FLOOR
:
1643 case BUILT_IN_FLOORF
:
1644 case BUILT_IN_FLOORL
:
1645 fcode
= BUILT_IN_FLOORL
;
1648 case BUILT_IN_CEILF
:
1649 case BUILT_IN_CEILL
:
1650 fcode
= BUILT_IN_CEILL
;
1652 case BUILT_IN_TRUNC
:
1653 case BUILT_IN_TRUNCF
:
1654 case BUILT_IN_TRUNCL
:
1655 fcode
= BUILT_IN_TRUNCL
;
1657 case BUILT_IN_ROUND
:
1658 case BUILT_IN_ROUNDF
:
1659 case BUILT_IN_ROUNDL
:
1660 fcode
= BUILT_IN_ROUNDL
;
1662 case BUILT_IN_NEARBYINT
:
1663 case BUILT_IN_NEARBYINTF
:
1664 case BUILT_IN_NEARBYINTL
:
1665 fcode
= BUILT_IN_NEARBYINTL
;
1670 return implicit_built_in_decls
[fcode
];
1673 /* If errno must be maintained, expand the RTL to check if the result,
1674 TARGET, of a built-in function call, EXP, is NaN, and if so set
1678 expand_errno_check (tree exp
, rtx target
)
1680 rtx lab
= gen_label_rtx ();
1682 /* Test the result; if it is NaN, set errno=EDOM because
1683 the argument was not in the domain. */
1684 emit_cmp_and_jump_insns (target
, target
, EQ
, 0, GET_MODE (target
),
1688 /* If this built-in doesn't throw an exception, set errno directly. */
1689 if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
1691 #ifdef GEN_ERRNO_RTX
1692 rtx errno_rtx
= GEN_ERRNO_RTX
;
1695 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1697 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
1703 /* We can't set errno=EDOM directly; let the library call do it.
1704 Pop the arguments right away in case the call gets deleted. */
1706 expand_call (exp
, target
, 0);
1712 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1713 Return 0 if a normal call should be emitted rather than expanding the
1714 function in-line. EXP is the expression that is a call to the builtin
1715 function; if convenient, the result should be placed in TARGET.
1716 SUBTARGET may be used as the target for computing one of EXP's operands. */
1719 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1721 optab builtin_optab
;
1723 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
1724 tree arglist
= TREE_OPERAND (exp
, 1);
1725 enum machine_mode mode
;
1726 bool errno_set
= false;
1729 if (!validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
1732 arg
= TREE_VALUE (arglist
);
1734 switch (DECL_FUNCTION_CODE (fndecl
))
1739 builtin_optab
= sin_optab
; break;
1743 builtin_optab
= cos_optab
; break;
1745 case BUILT_IN_SQRTF
:
1746 case BUILT_IN_SQRTL
:
1747 errno_set
= ! tree_expr_nonnegative_p (arg
);
1748 builtin_optab
= sqrt_optab
;
1753 errno_set
= true; builtin_optab
= exp_optab
; break;
1757 errno_set
= true; builtin_optab
= log_optab
; break;
1761 builtin_optab
= tan_optab
; break;
1763 case BUILT_IN_ATANF
:
1764 case BUILT_IN_ATANL
:
1765 builtin_optab
= atan_optab
; break;
1766 case BUILT_IN_FLOOR
:
1767 case BUILT_IN_FLOORF
:
1768 case BUILT_IN_FLOORL
:
1769 builtin_optab
= floor_optab
; break;
1771 case BUILT_IN_CEILF
:
1772 case BUILT_IN_CEILL
:
1773 builtin_optab
= ceil_optab
; break;
1774 case BUILT_IN_TRUNC
:
1775 case BUILT_IN_TRUNCF
:
1776 case BUILT_IN_TRUNCL
:
1777 builtin_optab
= trunc_optab
; break;
1778 case BUILT_IN_ROUND
:
1779 case BUILT_IN_ROUNDF
:
1780 case BUILT_IN_ROUNDL
:
1781 builtin_optab
= round_optab
; break;
1782 case BUILT_IN_NEARBYINT
:
1783 case BUILT_IN_NEARBYINTF
:
1784 case BUILT_IN_NEARBYINTL
:
1785 builtin_optab
= nearbyint_optab
; break;
1790 /* Make a suitable register to place result in. */
1791 mode
= TYPE_MODE (TREE_TYPE (exp
));
1792 target
= gen_reg_rtx (mode
);
1794 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
1797 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1798 need to expand the argument again. This way, we will not perform
1799 side-effects more the once. */
1800 narg
= save_expr (arg
);
1803 arglist
= build_tree_list (NULL_TREE
, arg
);
1804 exp
= build_function_call_expr (fndecl
, arglist
);
1807 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
1812 /* Compute into TARGET.
1813 Set TARGET to wherever the result comes back. */
1814 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
1816 /* If we were unable to expand via the builtin, stop the sequence
1817 (without outputting the insns) and call to the library function
1818 with the stabilized argument list. */
1822 return expand_call (exp
, target
, target
== const0_rtx
);
1826 expand_errno_check (exp
, target
);
1828 /* Output the entire sequence. */
1829 insns
= get_insns ();
1836 /* Expand a call to the builtin binary math functions (pow and atan2).
1837 Return 0 if a normal call should be emitted rather than expanding the
1838 function in-line. EXP is the expression that is a call to the builtin
1839 function; if convenient, the result should be placed in TARGET.
1840 SUBTARGET may be used as the target for computing one of EXP's
1844 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
1846 optab builtin_optab
;
1847 rtx op0
, op1
, insns
;
1848 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
1849 tree arglist
= TREE_OPERAND (exp
, 1);
1850 tree arg0
, arg1
, temp
, narg
;
1851 enum machine_mode mode
;
1852 bool errno_set
= true;
1855 if (!validate_arglist (arglist
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
1858 arg0
= TREE_VALUE (arglist
);
1859 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
1861 switch (DECL_FUNCTION_CODE (fndecl
))
1866 builtin_optab
= pow_optab
; break;
1867 case BUILT_IN_ATAN2
:
1868 case BUILT_IN_ATAN2F
:
1869 case BUILT_IN_ATAN2L
:
1870 builtin_optab
= atan2_optab
; break;
1875 /* Make a suitable register to place result in. */
1876 mode
= TYPE_MODE (TREE_TYPE (exp
));
1877 target
= gen_reg_rtx (mode
);
1879 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
1882 /* Alway stabilize the argument list. */
1883 narg
= save_expr (arg1
);
1886 temp
= build_tree_list (NULL_TREE
, narg
);
1890 temp
= TREE_CHAIN (arglist
);
1892 narg
= save_expr (arg0
);
1895 arglist
= tree_cons (NULL_TREE
, narg
, temp
);
1899 arglist
= tree_cons (NULL_TREE
, arg0
, temp
);
1902 exp
= build_function_call_expr (fndecl
, arglist
);
1904 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
1905 op1
= expand_expr (arg1
, 0, VOIDmode
, 0);
1910 /* Compute into TARGET.
1911 Set TARGET to wherever the result comes back. */
1912 target
= expand_binop (mode
, builtin_optab
, op0
, op1
,
1913 target
, 0, OPTAB_DIRECT
);
1915 /* If we were unable to expand via the builtin, stop the sequence
1916 (without outputting the insns) and call to the library function
1917 with the stabilized argument list. */
1921 return expand_call (exp
, target
, target
== const0_rtx
);
1925 expand_errno_check (exp
, target
);
1927 /* Output the entire sequence. */
1928 insns
= get_insns ();
1935 /* To evaluate powi(x,n), the floating point value x raised to the
1936 constant integer exponent n, we use a hybrid algorithm that
1937 combines the "window method" with look-up tables. For an
1938 introduction to exponentiation algorithms and "addition chains",
1939 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
1940 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
1941 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
1942 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
1944 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
1945 multiplications to inline before calling the system library's pow
1946 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
1947 so this default never requires calling pow, powf or powl. */
1949 #ifndef POWI_MAX_MULTS
1950 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
1953 /* The size of the "optimal power tree" lookup table. All
1954 exponents less than this value are simply looked up in the
1955 powi_table below. This threshold is also used to size the
1956 cache of pseudo registers that hold intermediate results. */
1957 #define POWI_TABLE_SIZE 256
1959 /* The size, in bits of the window, used in the "window method"
1960 exponentiation algorithm. This is equivalent to a radix of
1961 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
1962 #define POWI_WINDOW_SIZE 3
1964 /* The following table is an efficient representation of an
1965 "optimal power tree". For each value, i, the corresponding
1966 value, j, in the table states than an optimal evaluation
1967 sequence for calculating pow(x,i) can be found by evaluating
1968 pow(x,j)*pow(x,i-j). An optimal power tree for the first
1969 100 integers is given in Knuth's "Seminumerical algorithms". */
1971 static const unsigned char powi_table
[POWI_TABLE_SIZE
] =
1973 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
1974 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
1975 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
1976 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
1977 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
1978 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
1979 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
1980 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
1981 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
1982 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
1983 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
1984 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
1985 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
1986 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
1987 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
1988 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
1989 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
1990 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
1991 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
1992 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
1993 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
1994 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
1995 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
1996 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
1997 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
1998 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
1999 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2000 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2001 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2002 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2003 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2004 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2008 /* Return the number of multiplications required to calculate
2009 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2010 subroutine of powi_cost. CACHE is an array indicating
2011 which exponents have already been calculated. */
2014 powi_lookup_cost (unsigned HOST_WIDE_INT n
, bool *cache
)
2016 /* If we've already calculated this exponent, then this evaluation
2017 doesn't require any additional multiplications. */
2022 return powi_lookup_cost (n
- powi_table
[n
], cache
)
2023 + powi_lookup_cost (powi_table
[n
], cache
) + 1;
2026 /* Return the number of multiplications required to calculate
2027 powi(x,n) for an arbitrary x, given the exponent N. This
2028 function needs to be kept in sync with expand_powi below. */
2031 powi_cost (HOST_WIDE_INT n
)
2033 bool cache
[POWI_TABLE_SIZE
];
2034 unsigned HOST_WIDE_INT digit
;
2035 unsigned HOST_WIDE_INT val
;
2041 /* Ignore the reciprocal when calculating the cost. */
2042 val
= (n
< 0) ? -n
: n
;
2044 /* Initialize the exponent cache. */
2045 memset (cache
, 0, POWI_TABLE_SIZE
* sizeof (bool));
2050 while (val
>= POWI_TABLE_SIZE
)
2054 digit
= val
& ((1 << POWI_WINDOW_SIZE
) - 1);
2055 result
+= powi_lookup_cost (digit
, cache
)
2056 + POWI_WINDOW_SIZE
+ 1;
2057 val
>>= POWI_WINDOW_SIZE
;
2066 return result
+ powi_lookup_cost (val
, cache
);
2069 /* Recursive subroutine of expand_powi. This function takes the array,
2070 CACHE, of already calculated exponents and an exponent N and returns
2071 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2074 expand_powi_1 (enum machine_mode mode
, unsigned HOST_WIDE_INT n
, rtx
*cache
)
2076 unsigned HOST_WIDE_INT digit
;
2080 if (n
< POWI_TABLE_SIZE
)
2085 target
= gen_reg_rtx (mode
);
2088 op0
= expand_powi_1 (mode
, n
- powi_table
[n
], cache
);
2089 op1
= expand_powi_1 (mode
, powi_table
[n
], cache
);
2093 target
= gen_reg_rtx (mode
);
2094 digit
= n
& ((1 << POWI_WINDOW_SIZE
) - 1);
2095 op0
= expand_powi_1 (mode
, n
- digit
, cache
);
2096 op1
= expand_powi_1 (mode
, digit
, cache
);
2100 target
= gen_reg_rtx (mode
);
2101 op0
= expand_powi_1 (mode
, n
>> 1, cache
);
2105 result
= expand_mult (mode
, op0
, op1
, target
, 0);
2106 if (result
!= target
)
2107 emit_move_insn (target
, result
);
2111 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2112 floating point operand in mode MODE, and N is the exponent. This
2113 function needs to be kept in sync with powi_cost above. */
2116 expand_powi (rtx x
, enum machine_mode mode
, HOST_WIDE_INT n
)
2118 unsigned HOST_WIDE_INT val
;
2119 rtx cache
[POWI_TABLE_SIZE
];
2123 return CONST1_RTX (mode
);
2125 val
= (n
< 0) ? -n
: n
;
2127 memset (cache
, 0, sizeof(cache
));
2130 result
= expand_powi_1 (mode
, (n
< 0) ? -n
: n
, cache
);
2132 /* If the original exponent was negative, reciprocate the result. */
2134 result
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2135 result
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2140 /* Expand a call to the pow built-in mathematical function. Return 0 if
2141 a normal call should be emitted rather than expanding the function
2142 in-line. EXP is the expression that is a call to the builtin
2143 function; if convenient, the result should be placed in TARGET. */
2146 expand_builtin_pow (tree exp
, rtx target
, rtx subtarget
)
2148 tree arglist
= TREE_OPERAND (exp
, 1);
2151 if (! validate_arglist (arglist
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2154 arg0
= TREE_VALUE (arglist
);
2155 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
2157 if (flag_unsafe_math_optimizations
2158 && ! flag_errno_math
2160 && TREE_CODE (arg1
) == REAL_CST
2161 && ! TREE_CONSTANT_OVERFLOW (arg1
))
2163 REAL_VALUE_TYPE cint
;
2167 c
= TREE_REAL_CST (arg1
);
2168 n
= real_to_integer (&c
);
2169 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2170 if (real_identical (&c
, &cint
)
2171 && powi_cost (n
) <= POWI_MAX_MULTS
)
2173 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
2174 rtx op
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
2175 op
= force_reg (mode
, op
);
2176 return expand_powi (op
, mode
, n
);
2179 return expand_builtin_mathfn_2 (exp
, target
, NULL_RTX
);
2182 /* Expand expression EXP which is a call to the strlen builtin. Return 0
2183 if we failed the caller should emit a normal call, otherwise
2184 try to get the result in TARGET, if convenient. */
2187 expand_builtin_strlen (tree arglist
, rtx target
,
2188 enum machine_mode target_mode
)
2190 if (!validate_arglist (arglist
, POINTER_TYPE
, VOID_TYPE
))
2195 tree len
, src
= TREE_VALUE (arglist
);
2196 rtx result
, src_reg
, char_rtx
, before_strlen
;
2197 enum machine_mode insn_mode
= target_mode
, char_mode
;
2198 enum insn_code icode
= CODE_FOR_nothing
;
2201 /* If the length can be computed at compile-time, return it. */
2202 len
= c_strlen (src
, 0);
2204 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2206 /* If the length can be computed at compile-time and is constant
2207 integer, but there are side-effects in src, evaluate
2208 src for side-effects, then return len.
2209 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2210 can be optimized into: i++; x = 3; */
2211 len
= c_strlen (src
, 1);
2212 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2214 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2215 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2218 align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
2220 /* If SRC is not a pointer type, don't do this operation inline. */
2224 /* Bail out if we can't compute strlen in the right mode. */
2225 while (insn_mode
!= VOIDmode
)
2227 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
2228 if (icode
!= CODE_FOR_nothing
)
2231 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
2233 if (insn_mode
== VOIDmode
)
2236 /* Make a place to write the result of the instruction. */
2239 && GET_CODE (result
) == REG
2240 && GET_MODE (result
) == insn_mode
2241 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
2242 result
= gen_reg_rtx (insn_mode
);
2244 /* Make a place to hold the source address. We will not expand
2245 the actual source until we are sure that the expansion will
2246 not fail -- there are trees that cannot be expanded twice. */
2247 src_reg
= gen_reg_rtx (Pmode
);
2249 /* Mark the beginning of the strlen sequence so we can emit the
2250 source operand later. */
2251 before_strlen
= get_last_insn ();
2253 char_rtx
= const0_rtx
;
2254 char_mode
= insn_data
[(int) icode
].operand
[2].mode
;
2255 if (! (*insn_data
[(int) icode
].operand
[2].predicate
) (char_rtx
,
2257 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
2259 pat
= GEN_FCN (icode
) (result
, gen_rtx_MEM (BLKmode
, src_reg
),
2260 char_rtx
, GEN_INT (align
));
2265 /* Now that we are assured of success, expand the source. */
2267 pat
= memory_address (BLKmode
,
2268 expand_expr (src
, src_reg
, ptr_mode
, EXPAND_SUM
));
2270 emit_move_insn (src_reg
, pat
);
2275 emit_insn_after (pat
, before_strlen
);
2277 emit_insn_before (pat
, get_insns ());
2279 /* Return the value in the proper mode for this function. */
2280 if (GET_MODE (result
) == target_mode
)
2282 else if (target
!= 0)
2283 convert_move (target
, result
, 0);
2285 target
= convert_to_mode (target_mode
, result
, 0);
2291 /* Expand a call to the strstr builtin. Return 0 if we failed the
2292 caller should emit a normal call, otherwise try to get the result
2293 in TARGET, if convenient (and in mode MODE if that's convenient). */
2296 expand_builtin_strstr (tree arglist
, rtx target
, enum machine_mode mode
)
2298 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2302 tree s1
= TREE_VALUE (arglist
), s2
= TREE_VALUE (TREE_CHAIN (arglist
));
2304 const char *p1
, *p2
;
2313 const char *r
= strstr (p1
, p2
);
2318 /* Return an offset into the constant string argument. */
2319 return expand_expr (fold (build (PLUS_EXPR
, TREE_TYPE (s1
),
2320 s1
, ssize_int (r
- p1
))),
2321 target
, mode
, EXPAND_NORMAL
);
2325 return expand_expr (s1
, target
, mode
, EXPAND_NORMAL
);
2330 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
2334 /* New argument list transforming strstr(s1, s2) to
2335 strchr(s1, s2[0]). */
2337 build_tree_list (NULL_TREE
, build_int_2 (p2
[0], 0));
2338 arglist
= tree_cons (NULL_TREE
, s1
, arglist
);
2339 return expand_expr (build_function_call_expr (fn
, arglist
),
2340 target
, mode
, EXPAND_NORMAL
);
2344 /* Expand a call to the strchr builtin. Return 0 if we failed the
2345 caller should emit a normal call, otherwise try to get the result
2346 in TARGET, if convenient (and in mode MODE if that's convenient). */
2349 expand_builtin_strchr (tree arglist
, rtx target
, enum machine_mode mode
)
2351 if (!validate_arglist (arglist
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2355 tree s1
= TREE_VALUE (arglist
), s2
= TREE_VALUE (TREE_CHAIN (arglist
));
2358 if (TREE_CODE (s2
) != INTEGER_CST
)
2367 if (target_char_cast (s2
, &c
))
2375 /* Return an offset into the constant string argument. */
2376 return expand_expr (fold (build (PLUS_EXPR
, TREE_TYPE (s1
),
2377 s1
, ssize_int (r
- p1
))),
2378 target
, mode
, EXPAND_NORMAL
);
2381 /* FIXME: Should use here strchrM optab so that ports can optimize
2387 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2388 caller should emit a normal call, otherwise try to get the result
2389 in TARGET, if convenient (and in mode MODE if that's convenient). */
2392 expand_builtin_strrchr (tree arglist
, rtx target
, enum machine_mode mode
)
2394 if (!validate_arglist (arglist
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2398 tree s1
= TREE_VALUE (arglist
), s2
= TREE_VALUE (TREE_CHAIN (arglist
));
2402 if (TREE_CODE (s2
) != INTEGER_CST
)
2411 if (target_char_cast (s2
, &c
))
2414 r
= strrchr (p1
, c
);
2419 /* Return an offset into the constant string argument. */
2420 return expand_expr (fold (build (PLUS_EXPR
, TREE_TYPE (s1
),
2421 s1
, ssize_int (r
- p1
))),
2422 target
, mode
, EXPAND_NORMAL
);
2425 if (! integer_zerop (s2
))
2428 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
2432 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
2433 return expand_expr (build_function_call_expr (fn
, arglist
),
2434 target
, mode
, EXPAND_NORMAL
);
2438 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2439 caller should emit a normal call, otherwise try to get the result
2440 in TARGET, if convenient (and in mode MODE if that's convenient). */
2443 expand_builtin_strpbrk (tree arglist
, rtx target
, enum machine_mode mode
)
2445 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2449 tree s1
= TREE_VALUE (arglist
), s2
= TREE_VALUE (TREE_CHAIN (arglist
));
2451 const char *p1
, *p2
;
2460 const char *r
= strpbrk (p1
, p2
);
2465 /* Return an offset into the constant string argument. */
2466 return expand_expr (fold (build (PLUS_EXPR
, TREE_TYPE (s1
),
2467 s1
, ssize_int (r
- p1
))),
2468 target
, mode
, EXPAND_NORMAL
);
2473 /* strpbrk(x, "") == NULL.
2474 Evaluate and ignore the arguments in case they had
2476 expand_expr (s1
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2481 return 0; /* Really call strpbrk. */
2483 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
2487 /* New argument list transforming strpbrk(s1, s2) to
2488 strchr(s1, s2[0]). */
2490 build_tree_list (NULL_TREE
, build_int_2 (p2
[0], 0));
2491 arglist
= tree_cons (NULL_TREE
, s1
, arglist
);
2492 return expand_expr (build_function_call_expr (fn
, arglist
),
2493 target
, mode
, EXPAND_NORMAL
);
2497 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2498 bytes from constant string DATA + OFFSET and return it as target
2502 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
2503 enum machine_mode mode
)
2505 const char *str
= (const char *) data
;
2508 || ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
2509 > strlen (str
) + 1))
2510 abort (); /* Attempt to read past the end of constant string. */
2512 return c_readstr (str
+ offset
, mode
);
2515 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2516 Return 0 if we failed, the caller should emit a normal call,
2517 otherwise try to get the result in TARGET, if convenient (and in
2518 mode MODE if that's convenient). */
2520 expand_builtin_memcpy (tree arglist
, rtx target
, enum machine_mode mode
)
2522 if (!validate_arglist (arglist
,
2523 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2527 tree dest
= TREE_VALUE (arglist
);
2528 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
2529 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
2530 const char *src_str
;
2531 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
2532 unsigned int dest_align
2533 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
2534 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
2536 /* If DEST is not a pointer type, call the normal function. */
2537 if (dest_align
== 0)
2540 /* If the LEN parameter is zero, return DEST. */
2541 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 0)
2543 /* Evaluate and ignore SRC in case it has side-effects. */
2544 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2545 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
2548 /* If either SRC is not a pointer type, don't do this
2549 operation in-line. */
2553 dest_mem
= get_memory_rtx (dest
);
2554 set_mem_align (dest_mem
, dest_align
);
2555 len_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
2556 src_str
= c_getstr (src
);
2558 /* If SRC is a string constant and block move would be done
2559 by pieces, we can avoid loading the string from memory
2560 and only stored the computed constants. */
2562 && GET_CODE (len_rtx
) == CONST_INT
2563 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
2564 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
2565 (void *) src_str
, dest_align
))
2567 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
2568 builtin_memcpy_read_str
,
2569 (void *) src_str
, dest_align
, 0);
2570 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
2571 #ifdef POINTERS_EXTEND_UNSIGNED
2572 if (GET_MODE (dest_mem
) != ptr_mode
)
2573 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
2578 src_mem
= get_memory_rtx (src
);
2579 set_mem_align (src_mem
, src_align
);
2581 /* Copy word part most expediently. */
2582 dest_addr
= emit_block_move (dest_mem
, src_mem
, len_rtx
,
2587 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
2588 #ifdef POINTERS_EXTEND_UNSIGNED
2589 if (GET_MODE (dest_addr
) != ptr_mode
)
2590 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
2597 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2598 Return 0 if we failed the caller should emit a normal call,
2599 otherwise try to get the result in TARGET, if convenient (and in
2600 mode MODE if that's convenient). If ENDP is 0 return the
2601 destination pointer, if ENDP is 1 return the end pointer ala
2602 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2606 expand_builtin_mempcpy (tree arglist
, rtx target
, enum machine_mode mode
,
2609 if (!validate_arglist (arglist
,
2610 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2612 /* If return value is ignored, transform mempcpy into memcpy. */
2613 else if (target
== const0_rtx
)
2615 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
2620 return expand_expr (build_function_call_expr (fn
, arglist
),
2621 target
, mode
, EXPAND_NORMAL
);
2625 tree dest
= TREE_VALUE (arglist
);
2626 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
2627 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
2628 const char *src_str
;
2629 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
2630 unsigned int dest_align
2631 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
2632 rtx dest_mem
, src_mem
, len_rtx
;
2634 /* If DEST is not a pointer type or LEN is not constant,
2635 call the normal function. */
2636 if (dest_align
== 0 || !host_integerp (len
, 1))
2639 /* If the LEN parameter is zero, return DEST. */
2640 if (tree_low_cst (len
, 1) == 0)
2642 /* Evaluate and ignore SRC in case it has side-effects. */
2643 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2644 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
2647 /* If either SRC is not a pointer type, don't do this
2648 operation in-line. */
2652 len_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
2653 src_str
= c_getstr (src
);
2655 /* If SRC is a string constant and block move would be done
2656 by pieces, we can avoid loading the string from memory
2657 and only stored the computed constants. */
2659 && GET_CODE (len_rtx
) == CONST_INT
2660 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
2661 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
2662 (void *) src_str
, dest_align
))
2664 dest_mem
= get_memory_rtx (dest
);
2665 set_mem_align (dest_mem
, dest_align
);
2666 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
2667 builtin_memcpy_read_str
,
2668 (void *) src_str
, dest_align
, endp
);
2669 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
2670 #ifdef POINTERS_EXTEND_UNSIGNED
2671 if (GET_MODE (dest_mem
) != ptr_mode
)
2672 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
2677 if (GET_CODE (len_rtx
) == CONST_INT
2678 && can_move_by_pieces (INTVAL (len_rtx
),
2679 MIN (dest_align
, src_align
)))
2681 dest_mem
= get_memory_rtx (dest
);
2682 set_mem_align (dest_mem
, dest_align
);
2683 src_mem
= get_memory_rtx (src
);
2684 set_mem_align (src_mem
, src_align
);
2685 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
2686 MIN (dest_align
, src_align
), endp
);
2687 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
2688 #ifdef POINTERS_EXTEND_UNSIGNED
2689 if (GET_MODE (dest_mem
) != ptr_mode
)
2690 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
2699 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
2700 if we failed the caller should emit a normal call. */
2703 expand_builtin_memmove (tree arglist
, rtx target
, enum machine_mode mode
)
2705 if (!validate_arglist (arglist
,
2706 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2710 tree dest
= TREE_VALUE (arglist
);
2711 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
2712 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
2714 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
2715 unsigned int dest_align
2716 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
2718 /* If DEST is not a pointer type, call the normal function. */
2719 if (dest_align
== 0)
2722 /* If the LEN parameter is zero, return DEST. */
2723 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 0)
2725 /* Evaluate and ignore SRC in case it has side-effects. */
2726 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2727 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
2730 /* If either SRC is not a pointer type, don't do this
2731 operation in-line. */
2735 /* If src is categorized for a readonly section we can use
2737 if (readonly_data_expr (src
))
2739 tree
const fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
2742 return expand_expr (build_function_call_expr (fn
, arglist
),
2743 target
, mode
, EXPAND_NORMAL
);
2746 /* Otherwise, call the normal function. */
2751 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
2752 if we failed the caller should emit a normal call. */
2755 expand_builtin_bcopy (tree arglist
)
2757 tree src
, dest
, size
, newarglist
;
2759 if (!validate_arglist (arglist
,
2760 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2763 src
= TREE_VALUE (arglist
);
2764 dest
= TREE_VALUE (TREE_CHAIN (arglist
));
2765 size
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
2767 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
2768 memmove(ptr y, ptr x, size_t z). This is done this way
2769 so that if it isn't expanded inline, we fallback to
2770 calling bcopy instead of memmove. */
2772 newarglist
= build_tree_list (NULL_TREE
, convert (sizetype
, size
));
2773 newarglist
= tree_cons (NULL_TREE
, src
, newarglist
);
2774 newarglist
= tree_cons (NULL_TREE
, dest
, newarglist
);
2776 return expand_builtin_memmove (newarglist
, const0_rtx
, VOIDmode
);
2779 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2780 if we failed the caller should emit a normal call, otherwise try to get
2781 the result in TARGET, if convenient (and in mode MODE if that's
2785 expand_builtin_strcpy (tree arglist
, rtx target
, enum machine_mode mode
)
2787 tree fn
, len
, src
, dst
;
2789 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2792 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
2796 src
= TREE_VALUE (TREE_CHAIN (arglist
));
2797 len
= c_strlen (src
, 1);
2798 if (len
== 0 || TREE_SIDE_EFFECTS (len
))
2801 dst
= TREE_VALUE (arglist
);
2802 len
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
2803 arglist
= build_tree_list (NULL_TREE
, len
);
2804 arglist
= tree_cons (NULL_TREE
, src
, arglist
);
2805 arglist
= tree_cons (NULL_TREE
, dst
, arglist
);
2806 return expand_expr (build_function_call_expr (fn
, arglist
),
2807 target
, mode
, EXPAND_NORMAL
);
2810 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
2811 Return 0 if we failed the caller should emit a normal call,
2812 otherwise try to get the result in TARGET, if convenient (and in
2813 mode MODE if that's convenient). */
2816 expand_builtin_stpcpy (tree arglist
, rtx target
, enum machine_mode mode
)
2818 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2824 /* If return value is ignored, transform stpcpy into strcpy. */
2825 if (target
== const0_rtx
)
2827 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
2831 return expand_expr (build_function_call_expr (fn
, arglist
),
2832 target
, mode
, EXPAND_NORMAL
);
2835 /* Ensure we get an actual string whose length can be evaluated at
2836 compile-time, not an expression containing a string. This is
2837 because the latter will potentially produce pessimized code
2838 when used to produce the return value. */
2839 src
= TREE_VALUE (TREE_CHAIN (arglist
));
2840 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
2843 dst
= TREE_VALUE (arglist
);
2844 len
= fold (size_binop (PLUS_EXPR
, len
, ssize_int (1)));
2845 arglist
= build_tree_list (NULL_TREE
, len
);
2846 arglist
= tree_cons (NULL_TREE
, src
, arglist
);
2847 arglist
= tree_cons (NULL_TREE
, dst
, arglist
);
2848 return expand_builtin_mempcpy (arglist
, target
, mode
, /*endp=*/2);
2852 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2853 bytes from constant string DATA + OFFSET and return it as target
2857 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
2858 enum machine_mode mode
)
2860 const char *str
= (const char *) data
;
2862 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
2865 return c_readstr (str
+ offset
, mode
);
2868 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2869 if we failed the caller should emit a normal call. */
2872 expand_builtin_strncpy (tree arglist
, rtx target
, enum machine_mode mode
)
2874 if (!validate_arglist (arglist
,
2875 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2879 tree slen
= c_strlen (TREE_VALUE (TREE_CHAIN (arglist
)), 1);
2880 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
2883 /* We must be passed a constant len parameter. */
2884 if (TREE_CODE (len
) != INTEGER_CST
)
2887 /* If the len parameter is zero, return the dst parameter. */
2888 if (integer_zerop (len
))
2890 /* Evaluate and ignore the src argument in case it has
2892 expand_expr (TREE_VALUE (TREE_CHAIN (arglist
)), const0_rtx
,
2893 VOIDmode
, EXPAND_NORMAL
);
2894 /* Return the dst parameter. */
2895 return expand_expr (TREE_VALUE (arglist
), target
, mode
,
2899 /* Now, we must be passed a constant src ptr parameter. */
2900 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
2903 slen
= size_binop (PLUS_EXPR
, slen
, ssize_int (1));
2905 /* We're required to pad with trailing zeros if the requested
2906 len is greater than strlen(s2)+1. In that case try to
2907 use store_by_pieces, if it fails, punt. */
2908 if (tree_int_cst_lt (slen
, len
))
2910 tree dest
= TREE_VALUE (arglist
);
2911 unsigned int dest_align
2912 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
2913 const char *p
= c_getstr (TREE_VALUE (TREE_CHAIN (arglist
)));
2916 if (!p
|| dest_align
== 0 || !host_integerp (len
, 1)
2917 || !can_store_by_pieces (tree_low_cst (len
, 1),
2918 builtin_strncpy_read_str
,
2919 (void *) p
, dest_align
))
2922 dest_mem
= get_memory_rtx (dest
);
2923 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
2924 builtin_strncpy_read_str
,
2925 (void *) p
, dest_align
, 0);
2926 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
2927 #ifdef POINTERS_EXTEND_UNSIGNED
2928 if (GET_MODE (dest_mem
) != ptr_mode
)
2929 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
2934 /* OK transform into builtin memcpy. */
2935 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
2938 return expand_expr (build_function_call_expr (fn
, arglist
),
2939 target
, mode
, EXPAND_NORMAL
);
2943 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2944 bytes from constant string DATA + OFFSET and return it as target
2948 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2949 enum machine_mode mode
)
2951 const char *c
= (const char *) data
;
2952 char *p
= alloca (GET_MODE_SIZE (mode
));
2954 memset (p
, *c
, GET_MODE_SIZE (mode
));
2956 return c_readstr (p
, mode
);
2959 /* Callback routine for store_by_pieces. Return the RTL of a register
2960 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2961 char value given in the RTL register data. For example, if mode is
2962 4 bytes wide, return the RTL for 0x01010101*data. */
2965 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2966 enum machine_mode mode
)
2972 size
= GET_MODE_SIZE (mode
);
2977 memset (p
, 1, size
);
2978 coeff
= c_readstr (p
, mode
);
2980 target
= convert_to_mode (mode
, (rtx
) data
, 1);
2981 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
2982 return force_reg (mode
, target
);
2985 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2986 if we failed the caller should emit a normal call, otherwise try to get
2987 the result in TARGET, if convenient (and in mode MODE if that's
2991 expand_builtin_memset (tree arglist
, rtx target
, enum machine_mode mode
)
2993 if (!validate_arglist (arglist
,
2994 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2998 tree dest
= TREE_VALUE (arglist
);
2999 tree val
= TREE_VALUE (TREE_CHAIN (arglist
));
3000 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
3003 unsigned int dest_align
3004 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3005 rtx dest_mem
, dest_addr
, len_rtx
;
3007 /* If DEST is not a pointer type, don't do this
3008 operation in-line. */
3009 if (dest_align
== 0)
3012 /* If the LEN parameter is zero, return DEST. */
3013 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 0)
3015 /* Evaluate and ignore VAL in case it has side-effects. */
3016 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3017 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3020 if (TREE_CODE (val
) != INTEGER_CST
)
3024 if (!host_integerp (len
, 1))
3027 if (optimize_size
&& tree_low_cst (len
, 1) > 1)
3030 /* Assume that we can memset by pieces if we can store the
3031 * the coefficients by pieces (in the required modes).
3032 * We can't pass builtin_memset_gen_str as that emits RTL. */
3034 if (!can_store_by_pieces (tree_low_cst (len
, 1),
3035 builtin_memset_read_str
,
3039 val
= fold (build1 (CONVERT_EXPR
, unsigned_char_type_node
, val
));
3040 val_rtx
= expand_expr (val
, NULL_RTX
, VOIDmode
, 0);
3041 val_rtx
= force_reg (TYPE_MODE (unsigned_char_type_node
),
3043 dest_mem
= get_memory_rtx (dest
);
3044 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3045 builtin_memset_gen_str
,
3046 val_rtx
, dest_align
, 0);
3047 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3048 #ifdef POINTERS_EXTEND_UNSIGNED
3049 if (GET_MODE (dest_mem
) != ptr_mode
)
3050 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3055 if (target_char_cast (val
, &c
))
3060 if (!host_integerp (len
, 1))
3062 if (!can_store_by_pieces (tree_low_cst (len
, 1),
3063 builtin_memset_read_str
, &c
,
3067 dest_mem
= get_memory_rtx (dest
);
3068 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3069 builtin_memset_read_str
,
3071 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3072 #ifdef POINTERS_EXTEND_UNSIGNED
3073 if (GET_MODE (dest_mem
) != ptr_mode
)
3074 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3079 len_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
3081 dest_mem
= get_memory_rtx (dest
);
3082 set_mem_align (dest_mem
, dest_align
);
3083 dest_addr
= clear_storage (dest_mem
, len_rtx
);
3087 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3088 #ifdef POINTERS_EXTEND_UNSIGNED
3089 if (GET_MODE (dest_addr
) != ptr_mode
)
3090 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3098 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
3099 if we failed the caller should emit a normal call. */
3102 expand_builtin_bzero (tree arglist
)
3104 tree dest
, size
, newarglist
;
3106 if (!validate_arglist (arglist
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3109 dest
= TREE_VALUE (arglist
);
3110 size
= TREE_VALUE (TREE_CHAIN (arglist
));
3112 /* New argument list transforming bzero(ptr x, int y) to
3113 memset(ptr x, int 0, size_t y). This is done this way
3114 so that if it isn't expanded inline, we fallback to
3115 calling bzero instead of memset. */
3117 newarglist
= build_tree_list (NULL_TREE
, convert (sizetype
, size
));
3118 newarglist
= tree_cons (NULL_TREE
, integer_zero_node
, newarglist
);
3119 newarglist
= tree_cons (NULL_TREE
, dest
, newarglist
);
3121 return expand_builtin_memset (newarglist
, const0_rtx
, VOIDmode
);
3124 /* Expand expression EXP, which is a call to the memcmp built-in function.
3125 ARGLIST is the argument list for this call. Return 0 if we failed and the
3126 caller should emit a normal call, otherwise try to get the result in
3127 TARGET, if convenient (and in mode MODE, if that's convenient). */
3130 expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED
, tree arglist
, rtx target
,
3131 enum machine_mode mode
)
3133 tree arg1
, arg2
, len
;
3134 const char *p1
, *p2
;
3136 if (!validate_arglist (arglist
,
3137 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3140 arg1
= TREE_VALUE (arglist
);
3141 arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
3142 len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
3144 /* If the len parameter is zero, return zero. */
3145 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 0)
3147 /* Evaluate and ignore arg1 and arg2 in case they have
3149 expand_expr (arg1
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3150 expand_expr (arg2
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3154 p1
= c_getstr (arg1
);
3155 p2
= c_getstr (arg2
);
3157 /* If all arguments are constant, and the value of len is not greater
3158 than the lengths of arg1 and arg2, evaluate at compile-time. */
3159 if (host_integerp (len
, 1) && p1
&& p2
3160 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
3161 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
3163 const int r
= memcmp (p1
, p2
, tree_low_cst (len
, 1));
3165 return (r
< 0 ? constm1_rtx
: (r
> 0 ? const1_rtx
: const0_rtx
));
3168 /* If len parameter is one, return an expression corresponding to
3169 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3170 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
3172 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
3173 tree cst_uchar_ptr_node
= build_pointer_type (cst_uchar_node
);
3175 fold (build1 (CONVERT_EXPR
, integer_type_node
,
3176 build1 (INDIRECT_REF
, cst_uchar_node
,
3177 build1 (NOP_EXPR
, cst_uchar_ptr_node
, arg1
))));
3179 fold (build1 (CONVERT_EXPR
, integer_type_node
,
3180 build1 (INDIRECT_REF
, cst_uchar_node
,
3181 build1 (NOP_EXPR
, cst_uchar_ptr_node
, arg2
))));
3182 tree result
= fold (build (MINUS_EXPR
, integer_type_node
, ind1
, ind2
));
3183 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3186 #ifdef HAVE_cmpstrsi
3188 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3193 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3195 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3196 enum machine_mode insn_mode
3197 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
3199 /* If we don't have POINTER_TYPE, call the function. */
3200 if (arg1_align
== 0 || arg2_align
== 0)
3203 /* Make a place to write the result of the instruction. */
3206 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
3207 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3208 result
= gen_reg_rtx (insn_mode
);
3210 arg1_rtx
= get_memory_rtx (arg1
);
3211 arg2_rtx
= get_memory_rtx (arg2
);
3212 arg3_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
3216 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3217 GEN_INT (MIN (arg1_align
, arg2_align
)));
3222 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE_MAKE_BLOCK
,
3223 TYPE_MODE (integer_type_node
), 3,
3224 XEXP (arg1_rtx
, 0), Pmode
,
3225 XEXP (arg2_rtx
, 0), Pmode
,
3226 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
3227 TREE_UNSIGNED (sizetype
)),
3228 TYPE_MODE (sizetype
));
3230 /* Return the value in the proper mode for this function. */
3231 mode
= TYPE_MODE (TREE_TYPE (exp
));
3232 if (GET_MODE (result
) == mode
)
3234 else if (target
!= 0)
3236 convert_move (target
, result
, 0);
3240 return convert_to_mode (mode
, result
, 0);
3247 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
3248 if we failed the caller should emit a normal call, otherwise try to get
3249 the result in TARGET, if convenient. */
3252 expand_builtin_strcmp (tree exp
, rtx target
, enum machine_mode mode
)
3254 tree arglist
= TREE_OPERAND (exp
, 1);
3256 const char *p1
, *p2
;
3258 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3261 arg1
= TREE_VALUE (arglist
);
3262 arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
3264 p1
= c_getstr (arg1
);
3265 p2
= c_getstr (arg2
);
3269 const int i
= strcmp (p1
, p2
);
3270 return (i
< 0 ? constm1_rtx
: (i
> 0 ? const1_rtx
: const0_rtx
));
3273 /* If either arg is "", return an expression corresponding to
3274 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3275 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
3277 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
3278 tree cst_uchar_ptr_node
= build_pointer_type (cst_uchar_node
);
3280 fold (build1 (CONVERT_EXPR
, integer_type_node
,
3281 build1 (INDIRECT_REF
, cst_uchar_node
,
3282 build1 (NOP_EXPR
, cst_uchar_ptr_node
, arg1
))));
3284 fold (build1 (CONVERT_EXPR
, integer_type_node
,
3285 build1 (INDIRECT_REF
, cst_uchar_node
,
3286 build1 (NOP_EXPR
, cst_uchar_ptr_node
, arg2
))));
3287 tree result
= fold (build (MINUS_EXPR
, integer_type_node
, ind1
, ind2
));
3288 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3291 #ifdef HAVE_cmpstrsi
3294 tree len
, len1
, len2
;
3295 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3299 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3301 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3302 enum machine_mode insn_mode
3303 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
3305 len1
= c_strlen (arg1
, 1);
3306 len2
= c_strlen (arg2
, 1);
3309 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
3311 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
3313 /* If we don't have a constant length for the first, use the length
3314 of the second, if we know it. We don't require a constant for
3315 this case; some cost analysis could be done if both are available
3316 but neither is constant. For now, assume they're equally cheap,
3317 unless one has side effects. If both strings have constant lengths,
3324 else if (TREE_SIDE_EFFECTS (len1
))
3326 else if (TREE_SIDE_EFFECTS (len2
))
3328 else if (TREE_CODE (len1
) != INTEGER_CST
)
3330 else if (TREE_CODE (len2
) != INTEGER_CST
)
3332 else if (tree_int_cst_lt (len1
, len2
))
3337 /* If both arguments have side effects, we cannot optimize. */
3338 if (!len
|| TREE_SIDE_EFFECTS (len
))
3341 /* If we don't have POINTER_TYPE, call the function. */
3342 if (arg1_align
== 0 || arg2_align
== 0)
3345 /* Make a place to write the result of the instruction. */
3348 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
3349 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3350 result
= gen_reg_rtx (insn_mode
);
3352 arg1_rtx
= get_memory_rtx (arg1
);
3353 arg2_rtx
= get_memory_rtx (arg2
);
3354 arg3_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
3355 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3356 GEN_INT (MIN (arg1_align
, arg2_align
)));
3362 /* Return the value in the proper mode for this function. */
3363 mode
= TYPE_MODE (TREE_TYPE (exp
));
3364 if (GET_MODE (result
) == mode
)
3367 return convert_to_mode (mode
, result
, 0);
3368 convert_move (target
, result
, 0);
3375 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
3376 if we failed the caller should emit a normal call, otherwise try to get
3377 the result in TARGET, if convenient. */
3380 expand_builtin_strncmp (tree exp
, rtx target
, enum machine_mode mode
)
3382 tree arglist
= TREE_OPERAND (exp
, 1);
3383 tree arg1
, arg2
, arg3
;
3384 const char *p1
, *p2
;
3386 if (!validate_arglist (arglist
,
3387 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3390 arg1
= TREE_VALUE (arglist
);
3391 arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
3392 arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
3394 /* If the len parameter is zero, return zero. */
3395 if (host_integerp (arg3
, 1) && tree_low_cst (arg3
, 1) == 0)
3397 /* Evaluate and ignore arg1 and arg2 in case they have
3399 expand_expr (arg1
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3400 expand_expr (arg2
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3404 p1
= c_getstr (arg1
);
3405 p2
= c_getstr (arg2
);
3407 /* If all arguments are constant, evaluate at compile-time. */
3408 if (host_integerp (arg3
, 1) && p1
&& p2
)
3410 const int r
= strncmp (p1
, p2
, tree_low_cst (arg3
, 1));
3411 return (r
< 0 ? constm1_rtx
: (r
> 0 ? const1_rtx
: const0_rtx
));
3414 /* If len == 1 or (either string parameter is "" and (len >= 1)),
3415 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
3416 if (host_integerp (arg3
, 1)
3417 && (tree_low_cst (arg3
, 1) == 1
3418 || (tree_low_cst (arg3
, 1) > 1
3419 && ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0')))))
3421 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
3422 tree cst_uchar_ptr_node
= build_pointer_type (cst_uchar_node
);
3424 fold (build1 (CONVERT_EXPR
, integer_type_node
,
3425 build1 (INDIRECT_REF
, cst_uchar_node
,
3426 build1 (NOP_EXPR
, cst_uchar_ptr_node
, arg1
))));
3428 fold (build1 (CONVERT_EXPR
, integer_type_node
,
3429 build1 (INDIRECT_REF
, cst_uchar_node
,
3430 build1 (NOP_EXPR
, cst_uchar_ptr_node
, arg2
))));
3431 tree result
= fold (build (MINUS_EXPR
, integer_type_node
, ind1
, ind2
));
3432 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3435 /* If c_strlen can determine an expression for one of the string
3436 lengths, and it doesn't have side effects, then emit cmpstrsi
3437 using length MIN(strlen(string)+1, arg3). */
3438 #ifdef HAVE_cmpstrsi
3441 tree len
, len1
, len2
;
3442 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3446 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3448 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3449 enum machine_mode insn_mode
3450 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
3452 len1
= c_strlen (arg1
, 1);
3453 len2
= c_strlen (arg2
, 1);
3456 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
3458 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
3460 /* If we don't have a constant length for the first, use the length
3461 of the second, if we know it. We don't require a constant for
3462 this case; some cost analysis could be done if both are available
3463 but neither is constant. For now, assume they're equally cheap,
3464 unless one has side effects. If both strings have constant lengths,
3471 else if (TREE_SIDE_EFFECTS (len1
))
3473 else if (TREE_SIDE_EFFECTS (len2
))
3475 else if (TREE_CODE (len1
) != INTEGER_CST
)
3477 else if (TREE_CODE (len2
) != INTEGER_CST
)
3479 else if (tree_int_cst_lt (len1
, len2
))
3484 /* If both arguments have side effects, we cannot optimize. */
3485 if (!len
|| TREE_SIDE_EFFECTS (len
))
3488 /* The actual new length parameter is MIN(len,arg3). */
3489 len
= fold (build (MIN_EXPR
, TREE_TYPE (len
), len
, arg3
));
3491 /* If we don't have POINTER_TYPE, call the function. */
3492 if (arg1_align
== 0 || arg2_align
== 0)
3495 /* Make a place to write the result of the instruction. */
3498 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
3499 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3500 result
= gen_reg_rtx (insn_mode
);
3502 arg1_rtx
= get_memory_rtx (arg1
);
3503 arg2_rtx
= get_memory_rtx (arg2
);
3504 arg3_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
3505 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3506 GEN_INT (MIN (arg1_align
, arg2_align
)));
3512 /* Return the value in the proper mode for this function. */
3513 mode
= TYPE_MODE (TREE_TYPE (exp
));
3514 if (GET_MODE (result
) == mode
)
3517 return convert_to_mode (mode
, result
, 0);
3518 convert_move (target
, result
, 0);
3525 /* Expand expression EXP, which is a call to the strcat builtin.
3526 Return 0 if we failed the caller should emit a normal call,
3527 otherwise try to get the result in TARGET, if convenient. */
3530 expand_builtin_strcat (tree arglist
, rtx target
, enum machine_mode mode
)
3532 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3536 tree dst
= TREE_VALUE (arglist
),
3537 src
= TREE_VALUE (TREE_CHAIN (arglist
));
3538 const char *p
= c_getstr (src
);
3540 /* If the string length is zero, return the dst parameter. */
3541 if (p
&& *p
== '\0')
3542 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
3548 /* Expand expression EXP, which is a call to the strncat builtin.
3549 Return 0 if we failed the caller should emit a normal call,
3550 otherwise try to get the result in TARGET, if convenient. */
3553 expand_builtin_strncat (tree arglist
, rtx target
, enum machine_mode mode
)
3555 if (!validate_arglist (arglist
,
3556 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3560 tree dst
= TREE_VALUE (arglist
),
3561 src
= TREE_VALUE (TREE_CHAIN (arglist
)),
3562 len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
3563 const char *p
= c_getstr (src
);
3565 /* If the requested length is zero, or the src parameter string
3566 length is zero, return the dst parameter. */
3567 if (integer_zerop (len
) || (p
&& *p
== '\0'))
3569 /* Evaluate and ignore the src and len parameters in case
3570 they have side-effects. */
3571 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3572 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3573 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
3576 /* If the requested len is greater than or equal to the string
3577 length, call strcat. */
3578 if (TREE_CODE (len
) == INTEGER_CST
&& p
3579 && compare_tree_int (len
, strlen (p
)) >= 0)
3582 = tree_cons (NULL_TREE
, dst
, build_tree_list (NULL_TREE
, src
));
3583 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCAT
];
3585 /* If the replacement _DECL isn't initialized, don't do the
3590 return expand_expr (build_function_call_expr (fn
, newarglist
),
3591 target
, mode
, EXPAND_NORMAL
);
3597 /* Expand expression EXP, which is a call to the strspn builtin.
3598 Return 0 if we failed the caller should emit a normal call,
3599 otherwise try to get the result in TARGET, if convenient. */
3602 expand_builtin_strspn (tree arglist
, rtx target
, enum machine_mode mode
)
3604 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3608 tree s1
= TREE_VALUE (arglist
), s2
= TREE_VALUE (TREE_CHAIN (arglist
));
3609 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
3611 /* If both arguments are constants, evaluate at compile-time. */
3614 const size_t r
= strspn (p1
, p2
);
3615 return expand_expr (size_int (r
), target
, mode
, EXPAND_NORMAL
);
3618 /* If either argument is "", return 0. */
3619 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
3621 /* Evaluate and ignore both arguments in case either one has
3623 expand_expr (s1
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3624 expand_expr (s2
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3631 /* Expand expression EXP, which is a call to the strcspn builtin.
3632 Return 0 if we failed the caller should emit a normal call,
3633 otherwise try to get the result in TARGET, if convenient. */
3636 expand_builtin_strcspn (tree arglist
, rtx target
, enum machine_mode mode
)
3638 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3642 tree s1
= TREE_VALUE (arglist
), s2
= TREE_VALUE (TREE_CHAIN (arglist
));
3643 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
3645 /* If both arguments are constants, evaluate at compile-time. */
3648 const size_t r
= strcspn (p1
, p2
);
3649 return expand_expr (size_int (r
), target
, mode
, EXPAND_NORMAL
);
3652 /* If the first argument is "", return 0. */
3653 if (p1
&& *p1
== '\0')
3655 /* Evaluate and ignore argument s2 in case it has
3657 expand_expr (s2
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3661 /* If the second argument is "", return __builtin_strlen(s1). */
3662 if (p2
&& *p2
== '\0')
3664 tree newarglist
= build_tree_list (NULL_TREE
, s1
),
3665 fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
3667 /* If the replacement _DECL isn't initialized, don't do the
3672 return expand_expr (build_function_call_expr (fn
, newarglist
),
3673 target
, mode
, EXPAND_NORMAL
);
3679 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3680 if that's convenient. */
3683 expand_builtin_saveregs (void)
3687 /* Don't do __builtin_saveregs more than once in a function.
3688 Save the result of the first call and reuse it. */
3689 if (saveregs_value
!= 0)
3690 return saveregs_value
;
3692 /* When this function is called, it means that registers must be
3693 saved on entry to this function. So we migrate the call to the
3694 first insn of this function. */
3698 #ifdef EXPAND_BUILTIN_SAVEREGS
3699 /* Do whatever the machine needs done in this case. */
3700 val
= EXPAND_BUILTIN_SAVEREGS ();
3702 /* ??? We used to try and build up a call to the out of line function,
3703 guessing about what registers needed saving etc. This became much
3704 harder with __builtin_va_start, since we don't have a tree for a
3705 call to __builtin_saveregs to fall back on. There was exactly one
3706 port (i860) that used this code, and I'm unconvinced it could actually
3707 handle the general case. So we no longer try to handle anything
3708 weird and make the backend absorb the evil. */
3710 error ("__builtin_saveregs not supported by this target");
3717 saveregs_value
= val
;
3719 /* Put the insns after the NOTE that starts the function. If this
3720 is inside a start_sequence, make the outer-level insn chain current, so
3721 the code is placed at the start of the function. */
3722 push_topmost_sequence ();
3723 emit_insn_after (seq
, get_insns ());
3724 pop_topmost_sequence ();
3729 /* __builtin_args_info (N) returns word N of the arg space info
3730 for the current function. The number and meanings of words
3731 is controlled by the definition of CUMULATIVE_ARGS. */
3734 expand_builtin_args_info (tree arglist
)
3736 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
3737 int *word_ptr
= (int *) ¤t_function_args_info
;
3739 if (sizeof (CUMULATIVE_ARGS
) % sizeof (int) != 0)
3744 if (!host_integerp (TREE_VALUE (arglist
), 0))
3745 error ("argument of `__builtin_args_info' must be constant");
3748 HOST_WIDE_INT wordnum
= tree_low_cst (TREE_VALUE (arglist
), 0);
3750 if (wordnum
< 0 || wordnum
>= nwords
)
3751 error ("argument of `__builtin_args_info' out of range");
3753 return GEN_INT (word_ptr
[wordnum
]);
3757 error ("missing argument in `__builtin_args_info'");
3762 /* Expand ARGLIST, from a call to __builtin_next_arg. */
3765 expand_builtin_next_arg (tree arglist
)
3767 tree fntype
= TREE_TYPE (current_function_decl
);
3769 if (TYPE_ARG_TYPES (fntype
) == 0
3770 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3773 error ("`va_start' used in function with fixed args");
3779 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
3780 tree arg
= TREE_VALUE (arglist
);
3782 /* Strip off all nops for the sake of the comparison. This
3783 is not quite the same as STRIP_NOPS. It does more.
3784 We must also strip off INDIRECT_EXPR for C++ reference
3786 while (TREE_CODE (arg
) == NOP_EXPR
3787 || TREE_CODE (arg
) == CONVERT_EXPR
3788 || TREE_CODE (arg
) == NON_LVALUE_EXPR
3789 || TREE_CODE (arg
) == INDIRECT_REF
)
3790 arg
= TREE_OPERAND (arg
, 0);
3791 if (arg
!= last_parm
)
3792 warning ("second parameter of `va_start' not last named argument");
3795 /* Evidently an out of date version of <stdarg.h>; can't validate
3796 va_start's second argument, but can still work as intended. */
3797 warning ("`__builtin_next_arg' called without an argument");
3799 return expand_binop (Pmode
, add_optab
,
3800 current_function_internal_arg_pointer
,
3801 current_function_arg_offset_rtx
,
3802 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
3805 /* Make it easier for the backends by protecting the valist argument
3806 from multiple evaluations. */
3809 stabilize_va_list (tree valist
, int needs_lvalue
)
3811 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
3813 if (TREE_SIDE_EFFECTS (valist
))
3814 valist
= save_expr (valist
);
3816 /* For this case, the backends will be expecting a pointer to
3817 TREE_TYPE (va_list_type_node), but it's possible we've
3818 actually been given an array (an actual va_list_type_node).
3820 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
3822 tree p1
= build_pointer_type (TREE_TYPE (va_list_type_node
));
3823 tree p2
= build_pointer_type (va_list_type_node
);
3825 valist
= build1 (ADDR_EXPR
, p2
, valist
);
3826 valist
= fold (build1 (NOP_EXPR
, p1
, valist
));
3835 if (! TREE_SIDE_EFFECTS (valist
))
3838 pt
= build_pointer_type (va_list_type_node
);
3839 valist
= fold (build1 (ADDR_EXPR
, pt
, valist
));
3840 TREE_SIDE_EFFECTS (valist
) = 1;
3843 if (TREE_SIDE_EFFECTS (valist
))
3844 valist
= save_expr (valist
);
3845 valist
= fold (build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)),
3852 /* The "standard" implementation of va_start: just assign `nextarg' to
3856 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
3860 t
= build (MODIFY_EXPR
, TREE_TYPE (valist
), valist
,
3861 make_tree (ptr_type_node
, nextarg
));
3862 TREE_SIDE_EFFECTS (t
) = 1;
3864 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3867 /* Expand ARGLIST, from a call to __builtin_va_start. */
3870 expand_builtin_va_start (tree arglist
)
3875 chain
= TREE_CHAIN (arglist
);
3877 if (TREE_CHAIN (chain
))
3878 error ("too many arguments to function `va_start'");
3880 nextarg
= expand_builtin_next_arg (chain
);
3881 valist
= stabilize_va_list (TREE_VALUE (arglist
), 1);
3883 #ifdef EXPAND_BUILTIN_VA_START
3884 EXPAND_BUILTIN_VA_START (valist
, nextarg
);
3886 std_expand_builtin_va_start (valist
, nextarg
);
3892 /* The "standard" implementation of va_arg: read the value from the
3893 current (padded) address and increment by the (padded) size. */
3896 std_expand_builtin_va_arg (tree valist
, tree type
)
3898 tree addr_tree
, t
, type_size
= NULL
;
3899 tree align
, alignm1
;
3903 /* Compute the rounded size of the type. */
3904 align
= size_int (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3905 alignm1
= size_int (PARM_BOUNDARY
/ BITS_PER_UNIT
- 1);
3906 if (type
== error_mark_node
3907 || (type_size
= TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type
))) == NULL
3908 || TREE_OVERFLOW (type_size
))
3909 rounded_size
= size_zero_node
;
3911 rounded_size
= fold (build (MULT_EXPR
, sizetype
,
3912 fold (build (TRUNC_DIV_EXPR
, sizetype
,
3913 fold (build (PLUS_EXPR
, sizetype
,
3914 type_size
, alignm1
)),
3920 if (PAD_VARARGS_DOWN
&& ! integer_zerop (rounded_size
))
3922 /* Small args are padded downward. */
3923 addr_tree
= fold (build (PLUS_EXPR
, TREE_TYPE (addr_tree
), addr_tree
,
3924 fold (build (COND_EXPR
, sizetype
,
3925 fold (build (GT_EXPR
, sizetype
,
3929 fold (build (MINUS_EXPR
, sizetype
,
3934 addr
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
3935 addr
= copy_to_reg (addr
);
3937 /* Compute new value for AP. */
3938 if (! integer_zerop (rounded_size
))
3940 t
= build (MODIFY_EXPR
, TREE_TYPE (valist
), valist
,
3941 build (PLUS_EXPR
, TREE_TYPE (valist
), valist
,
3943 TREE_SIDE_EFFECTS (t
) = 1;
3944 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3950 /* Expand __builtin_va_arg, which is not really a builtin function, but
3951 a very special sort of operator. */
3954 expand_builtin_va_arg (tree valist
, tree type
)
3957 tree promoted_type
, want_va_type
, have_va_type
;
3959 /* Verify that valist is of the proper type. */
3961 want_va_type
= va_list_type_node
;
3962 have_va_type
= TREE_TYPE (valist
);
3963 if (TREE_CODE (want_va_type
) == ARRAY_TYPE
)
3965 /* If va_list is an array type, the argument may have decayed
3966 to a pointer type, e.g. by being passed to another function.
3967 In that case, unwrap both types so that we can compare the
3968 underlying records. */
3969 if (TREE_CODE (have_va_type
) == ARRAY_TYPE
3970 || TREE_CODE (have_va_type
) == POINTER_TYPE
)
3972 want_va_type
= TREE_TYPE (want_va_type
);
3973 have_va_type
= TREE_TYPE (have_va_type
);
3976 if (TYPE_MAIN_VARIANT (want_va_type
) != TYPE_MAIN_VARIANT (have_va_type
))
3978 error ("first argument to `va_arg' not of type `va_list'");
3982 /* Generate a diagnostic for requesting data of a type that cannot
3983 be passed through `...' due to type promotion at the call site. */
3984 else if ((promoted_type
= (*lang_hooks
.types
.type_promotes_to
) (type
))
3987 const char *name
= "<anonymous type>", *pname
= 0;
3988 static bool gave_help
;
3990 if (TYPE_NAME (type
))
3992 if (TREE_CODE (TYPE_NAME (type
)) == IDENTIFIER_NODE
)
3993 name
= IDENTIFIER_POINTER (TYPE_NAME (type
));
3994 else if (TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
3995 && DECL_NAME (TYPE_NAME (type
)))
3996 name
= IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type
)));
3998 if (TYPE_NAME (promoted_type
))
4000 if (TREE_CODE (TYPE_NAME (promoted_type
)) == IDENTIFIER_NODE
)
4001 pname
= IDENTIFIER_POINTER (TYPE_NAME (promoted_type
));
4002 else if (TREE_CODE (TYPE_NAME (promoted_type
)) == TYPE_DECL
4003 && DECL_NAME (TYPE_NAME (promoted_type
)))
4004 pname
= IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type
)));
4007 /* Unfortunately, this is merely undefined, rather than a constraint
4008 violation, so we cannot make this an error. If this call is never
4009 executed, the program is still strictly conforming. */
4010 warning ("`%s' is promoted to `%s' when passed through `...'",
4015 warning ("(so you should pass `%s' not `%s' to `va_arg')",
4019 /* We can, however, treat "undefined" any way we please.
4020 Call abort to encourage the user to fix the program. */
4021 expand_builtin_trap ();
4023 /* This is dead code, but go ahead and finish so that the
4024 mode of the result comes out right. */
4029 /* Make it easier for the backends by protecting the valist argument
4030 from multiple evaluations. */
4031 valist
= stabilize_va_list (valist
, 0);
4033 #ifdef EXPAND_BUILTIN_VA_ARG
4034 addr
= EXPAND_BUILTIN_VA_ARG (valist
, type
);
4036 addr
= std_expand_builtin_va_arg (valist
, type
);
4040 #ifdef POINTERS_EXTEND_UNSIGNED
4041 if (GET_MODE (addr
) != Pmode
)
4042 addr
= convert_memory_address (Pmode
, addr
);
4045 result
= gen_rtx_MEM (TYPE_MODE (type
), addr
);
4046 set_mem_alias_set (result
, get_varargs_alias_set ());
4051 /* Expand ARGLIST, from a call to __builtin_va_end. */
4054 expand_builtin_va_end (tree arglist
)
4056 tree valist
= TREE_VALUE (arglist
);
4058 #ifdef EXPAND_BUILTIN_VA_END
4059 valist
= stabilize_va_list (valist
, 0);
4060 EXPAND_BUILTIN_VA_END (arglist
);
4062 /* Evaluate for side effects, if needed. I hate macros that don't
4064 if (TREE_SIDE_EFFECTS (valist
))
4065 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4071 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
4072 builtin rather than just as an assignment in stdarg.h because of the
4073 nastiness of array-type va_list types. */
4076 expand_builtin_va_copy (tree arglist
)
4080 dst
= TREE_VALUE (arglist
);
4081 src
= TREE_VALUE (TREE_CHAIN (arglist
));
4083 dst
= stabilize_va_list (dst
, 1);
4084 src
= stabilize_va_list (src
, 0);
4086 if (TREE_CODE (va_list_type_node
) != ARRAY_TYPE
)
4088 t
= build (MODIFY_EXPR
, va_list_type_node
, dst
, src
);
4089 TREE_SIDE_EFFECTS (t
) = 1;
4090 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4094 rtx dstb
, srcb
, size
;
4096 /* Evaluate to pointers. */
4097 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4098 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4099 size
= expand_expr (TYPE_SIZE_UNIT (va_list_type_node
), NULL_RTX
,
4100 VOIDmode
, EXPAND_NORMAL
);
4102 #ifdef POINTERS_EXTEND_UNSIGNED
4103 if (GET_MODE (dstb
) != Pmode
)
4104 dstb
= convert_memory_address (Pmode
, dstb
);
4106 if (GET_MODE (srcb
) != Pmode
)
4107 srcb
= convert_memory_address (Pmode
, srcb
);
4110 /* "Dereference" to BLKmode memories. */
4111 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4112 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4113 set_mem_align (dstb
, TYPE_ALIGN (va_list_type_node
));
4114 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4115 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4116 set_mem_align (srcb
, TYPE_ALIGN (va_list_type_node
));
4119 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4125 /* Expand a call to one of the builtin functions __builtin_frame_address or
4126 __builtin_return_address. */
4129 expand_builtin_frame_address (tree fndecl
, tree arglist
)
4131 /* The argument must be a nonnegative integer constant.
4132 It counts the number of frames to scan up the stack.
4133 The value is the return address saved in that frame. */
4135 /* Warning about missing arg was already issued. */
4137 else if (! host_integerp (TREE_VALUE (arglist
), 1))
4139 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4140 error ("invalid arg to `__builtin_frame_address'");
4142 error ("invalid arg to `__builtin_return_address'");
4148 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4149 tree_low_cst (TREE_VALUE (arglist
), 1),
4150 hard_frame_pointer_rtx
);
4152 /* Some ports cannot access arbitrary stack frames. */
4155 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4156 warning ("unsupported arg to `__builtin_frame_address'");
4158 warning ("unsupported arg to `__builtin_return_address'");
4162 /* For __builtin_frame_address, return what we've got. */
4163 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4166 if (GET_CODE (tem
) != REG
4167 && ! CONSTANT_P (tem
))
4168 tem
= copy_to_mode_reg (Pmode
, tem
);
4173 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
4174 we failed and the caller should emit a normal call, otherwise try to get
4175 the result in TARGET, if convenient. */
4178 expand_builtin_alloca (tree arglist
, rtx target
)
4183 if (!validate_arglist (arglist
, INTEGER_TYPE
, VOID_TYPE
))
4186 /* Compute the argument. */
4187 op0
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
4189 /* Allocate the desired space. */
4190 result
= allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
4192 #ifdef POINTERS_EXTEND_UNSIGNED
4193 if (GET_MODE (result
) != ptr_mode
)
4194 result
= convert_memory_address (ptr_mode
, result
);
4200 /* Expand a call to a unary builtin. The arguments are in ARGLIST.
4201 Return 0 if a normal call should be emitted rather than expanding the
4202 function in-line. If convenient, the result should be placed in TARGET.
4203 SUBTARGET may be used as the target for computing one of EXP's operands. */
4206 expand_builtin_unop (enum machine_mode target_mode
, tree arglist
, rtx target
,
4207 rtx subtarget
, optab op_optab
)
4210 if (!validate_arglist (arglist
, INTEGER_TYPE
, VOID_TYPE
))
4213 /* Compute the argument. */
4214 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
4215 /* Compute op, into TARGET if possible.
4216 Set TARGET to wherever the result comes back. */
4217 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
4218 op_optab
, op0
, target
, 1);
4222 return convert_to_mode (target_mode
, target
, 0);
4225 /* If the string passed to fputs is a constant and is one character
4226 long, we attempt to transform this call into __builtin_fputc(). */
4229 expand_builtin_fputs (tree arglist
, int ignore
, int unlocked
)
4232 tree fn_fputc
= unlocked
? implicit_built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
4233 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
4234 tree fn_fwrite
= unlocked
? implicit_built_in_decls
[BUILT_IN_FWRITE_UNLOCKED
]
4235 : implicit_built_in_decls
[BUILT_IN_FWRITE
];
4237 /* If the return value is used, or the replacement _DECL isn't
4238 initialized, don't do the transformation. */
4239 if (!ignore
|| !fn_fputc
|| !fn_fwrite
)
4242 /* Verify the arguments in the original call. */
4243 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4246 /* Get the length of the string passed to fputs. If the length
4247 can't be determined, punt. */
4248 if (!(len
= c_strlen (TREE_VALUE (arglist
), 1))
4249 || TREE_CODE (len
) != INTEGER_CST
)
4252 switch (compare_tree_int (len
, 1))
4254 case -1: /* length is 0, delete the call entirely . */
4256 /* Evaluate and ignore the argument in case it has
4258 expand_expr (TREE_VALUE (TREE_CHAIN (arglist
)), const0_rtx
,
4259 VOIDmode
, EXPAND_NORMAL
);
4262 case 0: /* length is 1, call fputc. */
4264 const char *p
= c_getstr (TREE_VALUE (arglist
));
4268 /* New argument list transforming fputs(string, stream) to
4269 fputc(string[0], stream). */
4271 build_tree_list (NULL_TREE
, TREE_VALUE (TREE_CHAIN (arglist
)));
4273 tree_cons (NULL_TREE
, build_int_2 (p
[0], 0), arglist
);
4279 case 1: /* length is greater than 1, call fwrite. */
4283 /* If optimizing for size keep fputs. */
4286 string_arg
= TREE_VALUE (arglist
);
4287 /* New argument list transforming fputs(string, stream) to
4288 fwrite(string, 1, len, stream). */
4289 arglist
= build_tree_list (NULL_TREE
, TREE_VALUE (TREE_CHAIN (arglist
)));
4290 arglist
= tree_cons (NULL_TREE
, len
, arglist
);
4291 arglist
= tree_cons (NULL_TREE
, size_one_node
, arglist
);
4292 arglist
= tree_cons (NULL_TREE
, string_arg
, arglist
);
4300 return expand_expr (build_function_call_expr (fn
, arglist
),
4301 (ignore
? const0_rtx
: NULL_RTX
),
4302 VOIDmode
, EXPAND_NORMAL
);
4305 /* Expand a call to __builtin_expect. We return our argument and emit a
4306 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
4307 a non-jump context. */
4310 expand_builtin_expect (tree arglist
, rtx target
)
4315 if (arglist
== NULL_TREE
4316 || TREE_CHAIN (arglist
) == NULL_TREE
)
4318 exp
= TREE_VALUE (arglist
);
4319 c
= TREE_VALUE (TREE_CHAIN (arglist
));
4321 if (TREE_CODE (c
) != INTEGER_CST
)
4323 error ("second arg to `__builtin_expect' must be a constant");
4324 c
= integer_zero_node
;
4327 target
= expand_expr (exp
, target
, VOIDmode
, EXPAND_NORMAL
);
4329 /* Don't bother with expected value notes for integral constants. */
4330 if (flag_guess_branch_prob
&& GET_CODE (target
) != CONST_INT
)
4332 /* We do need to force this into a register so that we can be
4333 moderately sure to be able to correctly interpret the branch
4335 target
= force_reg (GET_MODE (target
), target
);
4337 rtx_c
= expand_expr (c
, NULL_RTX
, GET_MODE (target
), EXPAND_NORMAL
);
4339 note
= emit_note (NOTE_INSN_EXPECTED_VALUE
);
4340 NOTE_EXPECTED_VALUE (note
) = gen_rtx_EQ (VOIDmode
, target
, rtx_c
);
4346 /* Like expand_builtin_expect, except do this in a jump context. This is
4347 called from do_jump if the conditional is a __builtin_expect. Return either
4348 a list of insns to emit the jump or NULL if we cannot optimize
4349 __builtin_expect. We need to optimize this at jump time so that machines
4350 like the PowerPC don't turn the test into a SCC operation, and then jump
4351 based on the test being 0/1. */
4354 expand_builtin_expect_jump (tree exp
, rtx if_false_label
, rtx if_true_label
)
4356 tree arglist
= TREE_OPERAND (exp
, 1);
4357 tree arg0
= TREE_VALUE (arglist
);
4358 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4361 /* Only handle __builtin_expect (test, 0) and
4362 __builtin_expect (test, 1). */
4363 if (TREE_CODE (TREE_TYPE (arg1
)) == INTEGER_TYPE
4364 && (integer_zerop (arg1
) || integer_onep (arg1
)))
4369 /* If we fail to locate an appropriate conditional jump, we'll
4370 fall back to normal evaluation. Ensure that the expression
4371 can be re-evaluated. */
4372 switch (unsafe_for_reeval (arg0
))
4377 case 1: /* Mildly unsafe. */
4378 arg0
= unsave_expr (arg0
);
4381 case 2: /* Wildly unsafe. */
4385 /* Expand the jump insns. */
4387 do_jump (arg0
, if_false_label
, if_true_label
);
4391 /* Now that the __builtin_expect has been validated, go through and add
4392 the expect's to each of the conditional jumps. If we run into an
4393 error, just give up and generate the 'safe' code of doing a SCC
4394 operation and then doing a branch on that. */
4396 while (insn
!= NULL_RTX
)
4398 rtx next
= NEXT_INSN (insn
);
4400 if (GET_CODE (insn
) == JUMP_INSN
&& any_condjump_p (insn
))
4402 rtx ifelse
= SET_SRC (pc_set (insn
));
4406 if (GET_CODE (XEXP (ifelse
, 1)) == LABEL_REF
)
4409 label
= XEXP (XEXP (ifelse
, 1), 0);
4411 /* An inverted jump reverses the probabilities. */
4412 else if (GET_CODE (XEXP (ifelse
, 2)) == LABEL_REF
)
4415 label
= XEXP (XEXP (ifelse
, 2), 0);
4417 /* We shouldn't have to worry about conditional returns during
4418 the expansion stage, but handle it gracefully anyway. */
4419 else if (GET_CODE (XEXP (ifelse
, 1)) == RETURN
)
4424 /* An inverted return reverses the probabilities. */
4425 else if (GET_CODE (XEXP (ifelse
, 2)) == RETURN
)
4433 /* If the test is expected to fail, reverse the
4435 if (integer_zerop (arg1
))
4438 /* If we are jumping to the false label, reverse the
4440 if (label
== NULL_RTX
)
4441 ; /* conditional return */
4442 else if (label
== if_false_label
)
4444 else if (label
!= if_true_label
)
4448 predict_insn_def (insn
, PRED_BUILTIN_EXPECT
, taken
);
4455 /* If no jumps were modified, fail and do __builtin_expect the normal
4465 expand_builtin_trap (void)
4469 emit_insn (gen_trap ());
4472 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4476 /* Expand a call to fabs, fabsf or fabsl with arguments ARGLIST.
4477 Return 0 if a normal call should be emitted rather than expanding
4478 the function inline. If convenient, the result should be placed
4479 in TARGET. SUBTARGET may be used as the target for computing
4483 expand_builtin_fabs (tree arglist
, rtx target
, rtx subtarget
)
4485 enum machine_mode mode
;
4489 if (!validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
4492 arg
= TREE_VALUE (arglist
);
4493 mode
= TYPE_MODE (TREE_TYPE (arg
));
4494 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
4495 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4498 /* Expand a call to cabs, cabsf or cabsl with arguments ARGLIST.
4499 Return 0 if a normal call should be emitted rather than expanding
4500 the function inline. If convenient, the result should be placed
4504 expand_builtin_cabs (tree arglist
, rtx target
)
4506 enum machine_mode mode
;
4510 if (arglist
== 0 || TREE_CHAIN (arglist
))
4512 arg
= TREE_VALUE (arglist
);
4513 if (TREE_CODE (TREE_TYPE (arg
)) != COMPLEX_TYPE
4514 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
4517 mode
= TYPE_MODE (TREE_TYPE (arg
));
4518 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, 0);
4519 return expand_complex_abs (mode
, op0
, target
, 0);
4522 /* Expand a call to sprintf with argument list ARGLIST. Return 0 if
4523 a normal call should be emitted rather than expanding the function
4524 inline. If convenient, the result should be placed in TARGET with
4528 expand_builtin_sprintf (tree arglist
, rtx target
, enum machine_mode mode
)
4530 tree orig_arglist
, dest
, fmt
;
4531 const char *fmt_str
;
4533 orig_arglist
= arglist
;
4535 /* Verify the required arguments in the original call. */
4538 dest
= TREE_VALUE (arglist
);
4539 if (TREE_CODE (TREE_TYPE (dest
)) != POINTER_TYPE
)
4541 arglist
= TREE_CHAIN (arglist
);
4544 fmt
= TREE_VALUE (arglist
);
4545 if (TREE_CODE (TREE_TYPE (dest
)) != POINTER_TYPE
)
4547 arglist
= TREE_CHAIN (arglist
);
4549 /* Check whether the format is a literal string constant. */
4550 fmt_str
= c_getstr (fmt
);
4551 if (fmt_str
== NULL
)
4554 /* If the format doesn't contain % args or %%, use strcpy. */
4555 if (strchr (fmt_str
, '%') == 0)
4557 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
4560 if (arglist
|| ! fn
)
4562 expand_expr (build_function_call_expr (fn
, orig_arglist
),
4563 const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4564 if (target
== const0_rtx
)
4566 exp
= build_int_2 (strlen (fmt_str
), 0);
4567 exp
= fold (build1 (NOP_EXPR
, integer_type_node
, exp
));
4568 return expand_expr (exp
, target
, mode
, EXPAND_NORMAL
);
4570 /* If the format is "%s", use strcpy if the result isn't used. */
4571 else if (strcmp (fmt_str
, "%s") == 0)
4574 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
4579 if (! arglist
|| TREE_CHAIN (arglist
))
4581 arg
= TREE_VALUE (arglist
);
4582 if (TREE_CODE (TREE_TYPE (arg
)) != POINTER_TYPE
)
4585 if (target
!= const0_rtx
)
4587 len
= c_strlen (arg
, 1);
4588 if (! len
|| TREE_CODE (len
) != INTEGER_CST
)
4594 arglist
= build_tree_list (NULL_TREE
, arg
);
4595 arglist
= tree_cons (NULL_TREE
, dest
, arglist
);
4596 expand_expr (build_function_call_expr (fn
, arglist
),
4597 const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4599 if (target
== const0_rtx
)
4601 return expand_expr (len
, target
, mode
, EXPAND_NORMAL
);
4607 /* Expand an expression EXP that calls a built-in function,
4608 with result going to TARGET if that's convenient
4609 (and in mode MODE if that's convenient).
4610 SUBTARGET may be used as the target for computing one of EXP's operands.
4611 IGNORE is nonzero if the value is to be ignored. */
4614 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
4617 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4618 tree arglist
= TREE_OPERAND (exp
, 1);
4619 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
4620 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
4622 /* Perform postincrements before expanding builtin functions. Â */
4625 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
4626 return (*targetm
.expand_builtin
) (exp
, target
, subtarget
, mode
, ignore
);
4628 /* When not optimizing, generate calls to library functions for a certain
4630 if (!optimize
&& !CALLED_AS_BUILT_IN (fndecl
))
4634 case BUILT_IN_SQRTF
:
4635 case BUILT_IN_SQRTL
:
4652 case BUILT_IN_ATANF
:
4653 case BUILT_IN_ATANL
:
4657 case BUILT_IN_ATAN2
:
4658 case BUILT_IN_ATAN2F
:
4659 case BUILT_IN_ATAN2L
:
4660 case BUILT_IN_MEMSET
:
4661 case BUILT_IN_MEMCPY
:
4662 case BUILT_IN_MEMCMP
:
4663 case BUILT_IN_MEMPCPY
:
4664 case BUILT_IN_MEMMOVE
:
4666 case BUILT_IN_BZERO
:
4667 case BUILT_IN_BCOPY
:
4668 case BUILT_IN_INDEX
:
4669 case BUILT_IN_RINDEX
:
4670 case BUILT_IN_SPRINTF
:
4671 case BUILT_IN_STPCPY
:
4672 case BUILT_IN_STRCHR
:
4673 case BUILT_IN_STRRCHR
:
4674 case BUILT_IN_STRLEN
:
4675 case BUILT_IN_STRCPY
:
4676 case BUILT_IN_STRNCPY
:
4677 case BUILT_IN_STRNCMP
:
4678 case BUILT_IN_STRSTR
:
4679 case BUILT_IN_STRPBRK
:
4680 case BUILT_IN_STRCAT
:
4681 case BUILT_IN_STRNCAT
:
4682 case BUILT_IN_STRSPN
:
4683 case BUILT_IN_STRCSPN
:
4684 case BUILT_IN_STRCMP
:
4686 case BUILT_IN_PUTCHAR
:
4688 case BUILT_IN_PRINTF
:
4689 case BUILT_IN_FPUTC
:
4690 case BUILT_IN_FPUTS
:
4691 case BUILT_IN_FWRITE
:
4692 case BUILT_IN_PUTCHAR_UNLOCKED
:
4693 case BUILT_IN_PUTS_UNLOCKED
:
4694 case BUILT_IN_PRINTF_UNLOCKED
:
4695 case BUILT_IN_FPUTC_UNLOCKED
:
4696 case BUILT_IN_FPUTS_UNLOCKED
:
4697 case BUILT_IN_FWRITE_UNLOCKED
:
4698 case BUILT_IN_FLOOR
:
4699 case BUILT_IN_FLOORF
:
4700 case BUILT_IN_FLOORL
:
4702 case BUILT_IN_CEILF
:
4703 case BUILT_IN_CEILL
:
4704 case BUILT_IN_TRUNC
:
4705 case BUILT_IN_TRUNCF
:
4706 case BUILT_IN_TRUNCL
:
4707 case BUILT_IN_ROUND
:
4708 case BUILT_IN_ROUNDF
:
4709 case BUILT_IN_ROUNDL
:
4710 case BUILT_IN_NEARBYINT
:
4711 case BUILT_IN_NEARBYINTF
:
4712 case BUILT_IN_NEARBYINTL
:
4713 return expand_call (exp
, target
, ignore
);
4719 /* The built-in function expanders test for target == const0_rtx
4720 to determine whether the function's result will be ignored. */
4722 target
= const0_rtx
;
4724 /* If the result of a pure or const built-in function is ignored, and
4725 none of its arguments are volatile, we can avoid expanding the
4726 built-in call and just evaluate the arguments for side-effects. */
4727 if (target
== const0_rtx
4728 && (DECL_IS_PURE (fndecl
) || TREE_READONLY (fndecl
)))
4730 bool volatilep
= false;
4733 for (arg
= arglist
; arg
; arg
= TREE_CHAIN (arg
))
4734 if (TREE_THIS_VOLATILE (TREE_VALUE (arg
)))
4742 for (arg
= arglist
; arg
; arg
= TREE_CHAIN (arg
))
4743 expand_expr (TREE_VALUE (arg
), const0_rtx
,
4744 VOIDmode
, EXPAND_NORMAL
);
4753 case BUILT_IN_LLABS
:
4754 case BUILT_IN_IMAXABS
:
4755 /* build_function_call changes these into ABS_EXPR. */
4759 case BUILT_IN_FABSF
:
4760 case BUILT_IN_FABSL
:
4761 target
= expand_builtin_fabs (arglist
, target
, subtarget
);
4767 case BUILT_IN_CABSF
:
4768 case BUILT_IN_CABSL
:
4769 if (flag_unsafe_math_optimizations
)
4771 target
= expand_builtin_cabs (arglist
, target
);
4778 case BUILT_IN_CONJF
:
4779 case BUILT_IN_CONJL
:
4780 case BUILT_IN_CREAL
:
4781 case BUILT_IN_CREALF
:
4782 case BUILT_IN_CREALL
:
4783 case BUILT_IN_CIMAG
:
4784 case BUILT_IN_CIMAGF
:
4785 case BUILT_IN_CIMAGL
:
4786 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
4787 and IMAGPART_EXPR. */
4806 case BUILT_IN_ATANF
:
4807 case BUILT_IN_ATANL
:
4808 /* Treat these like sqrt only if unsafe math optimizations are allowed,
4809 because of possible accuracy problems. */
4810 if (! flag_unsafe_math_optimizations
)
4813 case BUILT_IN_SQRTF
:
4814 case BUILT_IN_SQRTL
:
4815 case BUILT_IN_FLOOR
:
4816 case BUILT_IN_FLOORF
:
4817 case BUILT_IN_FLOORL
:
4819 case BUILT_IN_CEILF
:
4820 case BUILT_IN_CEILL
:
4821 case BUILT_IN_TRUNC
:
4822 case BUILT_IN_TRUNCF
:
4823 case BUILT_IN_TRUNCL
:
4824 case BUILT_IN_ROUND
:
4825 case BUILT_IN_ROUNDF
:
4826 case BUILT_IN_ROUNDL
:
4827 case BUILT_IN_NEARBYINT
:
4828 case BUILT_IN_NEARBYINTF
:
4829 case BUILT_IN_NEARBYINTL
:
4830 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
4838 if (! flag_unsafe_math_optimizations
)
4840 target
= expand_builtin_pow (exp
, target
, subtarget
);
4845 case BUILT_IN_ATAN2
:
4846 case BUILT_IN_ATAN2F
:
4847 case BUILT_IN_ATAN2L
:
4848 if (! flag_unsafe_math_optimizations
)
4850 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
4855 case BUILT_IN_APPLY_ARGS
:
4856 return expand_builtin_apply_args ();
4858 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
4859 FUNCTION with a copy of the parameters described by
4860 ARGUMENTS, and ARGSIZE. It returns a block of memory
4861 allocated on the stack into which is stored all the registers
4862 that might possibly be used for returning the result of a
4863 function. ARGUMENTS is the value returned by
4864 __builtin_apply_args. ARGSIZE is the number of bytes of
4865 arguments that must be copied. ??? How should this value be
4866 computed? We'll also need a safe worst case value for varargs
4868 case BUILT_IN_APPLY
:
4869 if (!validate_arglist (arglist
, POINTER_TYPE
,
4870 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4871 && !validate_arglist (arglist
, REFERENCE_TYPE
,
4872 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4880 for (t
= arglist
, i
= 0; t
; t
= TREE_CHAIN (t
), i
++)
4881 ops
[i
] = expand_expr (TREE_VALUE (t
), NULL_RTX
, VOIDmode
, 0);
4883 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
4886 /* __builtin_return (RESULT) causes the function to return the
4887 value described by RESULT. RESULT is address of the block of
4888 memory returned by __builtin_apply. */
4889 case BUILT_IN_RETURN
:
4890 if (validate_arglist (arglist
, POINTER_TYPE
, VOID_TYPE
))
4891 expand_builtin_return (expand_expr (TREE_VALUE (arglist
),
4892 NULL_RTX
, VOIDmode
, 0));
4895 case BUILT_IN_SAVEREGS
:
4896 return expand_builtin_saveregs ();
4898 case BUILT_IN_ARGS_INFO
:
4899 return expand_builtin_args_info (arglist
);
4901 /* Return the address of the first anonymous stack arg. */
4902 case BUILT_IN_NEXT_ARG
:
4903 return expand_builtin_next_arg (arglist
);
4905 case BUILT_IN_CLASSIFY_TYPE
:
4906 return expand_builtin_classify_type (arglist
);
4908 case BUILT_IN_CONSTANT_P
:
4909 return expand_builtin_constant_p (arglist
, target_mode
);
4911 case BUILT_IN_FRAME_ADDRESS
:
4912 case BUILT_IN_RETURN_ADDRESS
:
4913 return expand_builtin_frame_address (fndecl
, arglist
);
4915 /* Returns the address of the area where the structure is returned.
4917 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
4919 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
4920 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) != MEM
)
4923 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
4925 case BUILT_IN_ALLOCA
:
4926 target
= expand_builtin_alloca (arglist
, target
);
4933 case BUILT_IN_FFSLL
:
4934 target
= expand_builtin_unop (target_mode
, arglist
, target
,
4935 subtarget
, ffs_optab
);
4942 case BUILT_IN_CLZLL
:
4943 target
= expand_builtin_unop (target_mode
, arglist
, target
,
4944 subtarget
, clz_optab
);
4951 case BUILT_IN_CTZLL
:
4952 target
= expand_builtin_unop (target_mode
, arglist
, target
,
4953 subtarget
, ctz_optab
);
4958 case BUILT_IN_POPCOUNT
:
4959 case BUILT_IN_POPCOUNTL
:
4960 case BUILT_IN_POPCOUNTLL
:
4961 target
= expand_builtin_unop (target_mode
, arglist
, target
,
4962 subtarget
, popcount_optab
);
4967 case BUILT_IN_PARITY
:
4968 case BUILT_IN_PARITYL
:
4969 case BUILT_IN_PARITYLL
:
4970 target
= expand_builtin_unop (target_mode
, arglist
, target
,
4971 subtarget
, parity_optab
);
4976 case BUILT_IN_STRLEN
:
4977 target
= expand_builtin_strlen (arglist
, target
, target_mode
);
4982 case BUILT_IN_STRCPY
:
4983 target
= expand_builtin_strcpy (arglist
, target
, mode
);
4988 case BUILT_IN_STRNCPY
:
4989 target
= expand_builtin_strncpy (arglist
, target
, mode
);
4994 case BUILT_IN_STPCPY
:
4995 target
= expand_builtin_stpcpy (arglist
, target
, mode
);
5000 case BUILT_IN_STRCAT
:
5001 target
= expand_builtin_strcat (arglist
, target
, mode
);
5006 case BUILT_IN_STRNCAT
:
5007 target
= expand_builtin_strncat (arglist
, target
, mode
);
5012 case BUILT_IN_STRSPN
:
5013 target
= expand_builtin_strspn (arglist
, target
, mode
);
5018 case BUILT_IN_STRCSPN
:
5019 target
= expand_builtin_strcspn (arglist
, target
, mode
);
5024 case BUILT_IN_STRSTR
:
5025 target
= expand_builtin_strstr (arglist
, target
, mode
);
5030 case BUILT_IN_STRPBRK
:
5031 target
= expand_builtin_strpbrk (arglist
, target
, mode
);
5036 case BUILT_IN_INDEX
:
5037 case BUILT_IN_STRCHR
:
5038 target
= expand_builtin_strchr (arglist
, target
, mode
);
5043 case BUILT_IN_RINDEX
:
5044 case BUILT_IN_STRRCHR
:
5045 target
= expand_builtin_strrchr (arglist
, target
, mode
);
5050 case BUILT_IN_MEMCPY
:
5051 target
= expand_builtin_memcpy (arglist
, target
, mode
);
5056 case BUILT_IN_MEMPCPY
:
5057 target
= expand_builtin_mempcpy (arglist
, target
, mode
, /*endp=*/ 1);
5062 case BUILT_IN_MEMMOVE
:
5063 target
= expand_builtin_memmove (arglist
, target
, mode
);
5068 case BUILT_IN_BCOPY
:
5069 target
= expand_builtin_bcopy (arglist
);
5074 case BUILT_IN_MEMSET
:
5075 target
= expand_builtin_memset (arglist
, target
, mode
);
5080 case BUILT_IN_BZERO
:
5081 target
= expand_builtin_bzero (arglist
);
5086 case BUILT_IN_STRCMP
:
5087 target
= expand_builtin_strcmp (exp
, target
, mode
);
5092 case BUILT_IN_STRNCMP
:
5093 target
= expand_builtin_strncmp (exp
, target
, mode
);
5099 case BUILT_IN_MEMCMP
:
5100 target
= expand_builtin_memcmp (exp
, arglist
, target
, mode
);
5105 case BUILT_IN_SETJMP
:
5106 target
= expand_builtin_setjmp (arglist
, target
);
5111 /* __builtin_longjmp is passed a pointer to an array of five words.
5112 It's similar to the C library longjmp function but works with
5113 __builtin_setjmp above. */
5114 case BUILT_IN_LONGJMP
:
5115 if (!validate_arglist (arglist
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5119 rtx buf_addr
= expand_expr (TREE_VALUE (arglist
), subtarget
,
5121 rtx value
= expand_expr (TREE_VALUE (TREE_CHAIN (arglist
)),
5122 NULL_RTX
, VOIDmode
, 0);
5124 if (value
!= const1_rtx
)
5126 error ("__builtin_longjmp second argument must be 1");
5130 expand_builtin_longjmp (buf_addr
, value
);
5135 expand_builtin_trap ();
5138 case BUILT_IN_FPUTS
:
5139 target
= expand_builtin_fputs (arglist
, ignore
,/*unlocked=*/ 0);
5143 case BUILT_IN_FPUTS_UNLOCKED
:
5144 target
= expand_builtin_fputs (arglist
, ignore
,/*unlocked=*/ 1);
5149 case BUILT_IN_SPRINTF
:
5150 target
= expand_builtin_sprintf (arglist
, target
, mode
);
5155 /* Various hooks for the DWARF 2 __throw routine. */
5156 case BUILT_IN_UNWIND_INIT
:
5157 expand_builtin_unwind_init ();
5159 case BUILT_IN_DWARF_CFA
:
5160 return virtual_cfa_rtx
;
5161 #ifdef DWARF2_UNWIND_INFO
5162 case BUILT_IN_DWARF_SP_COLUMN
:
5163 return expand_builtin_dwarf_sp_column ();
5164 case BUILT_IN_INIT_DWARF_REG_SIZES
:
5165 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist
));
5168 case BUILT_IN_FROB_RETURN_ADDR
:
5169 return expand_builtin_frob_return_addr (TREE_VALUE (arglist
));
5170 case BUILT_IN_EXTRACT_RETURN_ADDR
:
5171 return expand_builtin_extract_return_addr (TREE_VALUE (arglist
));
5172 case BUILT_IN_EH_RETURN
:
5173 expand_builtin_eh_return (TREE_VALUE (arglist
),
5174 TREE_VALUE (TREE_CHAIN (arglist
)));
5176 #ifdef EH_RETURN_DATA_REGNO
5177 case BUILT_IN_EH_RETURN_DATA_REGNO
:
5178 return expand_builtin_eh_return_data_regno (arglist
);
5180 case BUILT_IN_VA_START
:
5181 case BUILT_IN_STDARG_START
:
5182 return expand_builtin_va_start (arglist
);
5183 case BUILT_IN_VA_END
:
5184 return expand_builtin_va_end (arglist
);
5185 case BUILT_IN_VA_COPY
:
5186 return expand_builtin_va_copy (arglist
);
5187 case BUILT_IN_EXPECT
:
5188 return expand_builtin_expect (arglist
, target
);
5189 case BUILT_IN_PREFETCH
:
5190 expand_builtin_prefetch (arglist
);
5194 default: /* just do library call, if unknown builtin */
5195 if (!DECL_ASSEMBLER_NAME_SET_P (fndecl
))
5196 error ("built-in function `%s' not currently supported",
5197 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
5200 /* The switch statement above can drop through to cause the function
5201 to be called normally. */
5202 return expand_call (exp
, target
, ignore
);
5205 /* Determine whether a tree node represents a call to a built-in
5206 math function. If the tree T is a call to a built-in function
5207 taking a single real argument, then the return value is the
5208 DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT. Otherwise
5209 the return value is END_BUILTINS. */
5211 enum built_in_function
5212 builtin_mathfn_code (tree t
)
5214 tree fndecl
, arglist
;
5216 if (TREE_CODE (t
) != CALL_EXPR
5217 || TREE_CODE (TREE_OPERAND (t
, 0)) != ADDR_EXPR
)
5218 return END_BUILTINS
;
5220 fndecl
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
5221 if (TREE_CODE (fndecl
) != FUNCTION_DECL
5222 || ! DECL_BUILT_IN (fndecl
)
5223 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5224 return END_BUILTINS
;
5226 arglist
= TREE_OPERAND (t
, 1);
5228 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != REAL_TYPE
)
5229 return END_BUILTINS
;
5231 arglist
= TREE_CHAIN (arglist
);
5232 switch (DECL_FUNCTION_CODE (fndecl
))
5237 case BUILT_IN_ATAN2
:
5238 case BUILT_IN_ATAN2F
:
5239 case BUILT_IN_ATAN2L
:
5241 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != REAL_TYPE
5242 || TREE_CHAIN (arglist
))
5243 return END_BUILTINS
;
5248 return END_BUILTINS
;
5252 return DECL_FUNCTION_CODE (fndecl
);
5255 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
5256 constant. ARGLIST is the argument list of the call. */
5259 fold_builtin_constant_p (tree arglist
)
5264 arglist
= TREE_VALUE (arglist
);
5266 /* We return 1 for a numeric type that's known to be a constant
5267 value at compile-time or for an aggregate type that's a
5268 literal constant. */
5269 STRIP_NOPS (arglist
);
5271 /* If we know this is a constant, emit the constant of one. */
5272 if (TREE_CODE_CLASS (TREE_CODE (arglist
)) == 'c'
5273 || (TREE_CODE (arglist
) == CONSTRUCTOR
5274 && TREE_CONSTANT (arglist
))
5275 || (TREE_CODE (arglist
) == ADDR_EXPR
5276 && TREE_CODE (TREE_OPERAND (arglist
, 0)) == STRING_CST
))
5277 return integer_one_node
;
5279 /* If we aren't going to be running CSE or this expression
5280 has side effects, show we don't know it to be a constant.
5281 Likewise if it's a pointer or aggregate type since in those
5282 case we only want literals, since those are only optimized
5283 when generating RTL, not later.
5284 And finally, if we are compiling an initializer, not code, we
5285 need to return a definite result now; there's not going to be any
5286 more optimization done. */
5287 if (TREE_SIDE_EFFECTS (arglist
) || cse_not_expected
5288 || AGGREGATE_TYPE_P (TREE_TYPE (arglist
))
5289 || POINTER_TYPE_P (TREE_TYPE (arglist
))
5291 return integer_zero_node
;
5296 /* Fold a call to __builtin_classify_type. */
5299 fold_builtin_classify_type (tree arglist
)
5302 return build_int_2 (no_type_class
, 0);
5304 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist
))), 0);
5307 /* Fold a call to __builtin_inf or __builtin_huge_val. */
5310 fold_builtin_inf (tree type
, int warn
)
5312 REAL_VALUE_TYPE real
;
5314 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
5315 warning ("target format does not support infinity");
5318 return build_real (type
, real
);
5321 /* Fold a call to __builtin_nan or __builtin_nans. */
5324 fold_builtin_nan (tree arglist
, tree type
, int quiet
)
5326 REAL_VALUE_TYPE real
;
5329 if (!validate_arglist (arglist
, POINTER_TYPE
, VOID_TYPE
))
5331 str
= c_getstr (TREE_VALUE (arglist
));
5335 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
5338 return build_real (type
, real
);
5341 /* EXP is assumed to me builtin call where truncation can be propagated
5342 across (for instance floor((double)f) == (double)floorf (f).
5343 Do the transformation. */
5345 fold_trunc_transparent_mathfn (tree exp
)
5347 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5348 tree arglist
= TREE_OPERAND (exp
, 1);
5349 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5351 if (optimize
&& validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
5353 tree arg0
= strip_float_extensions (TREE_VALUE (arglist
));
5354 tree ftype
= TREE_TYPE (exp
);
5355 tree newtype
= TREE_TYPE (arg0
);
5358 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
5359 && (decl
= mathfn_built_in (newtype
, fcode
)))
5362 build_tree_list (NULL_TREE
, fold (convert (newtype
, arg0
)));
5363 return convert (ftype
,
5364 build_function_call_expr (decl
, arglist
));
5370 /* Fold function call to builtin cabs, cabsf or cabsl. FNDECL is the
5371 function's DECL, ARGLIST is the argument list and TYPE is the return
5372 type. Return NULL_TREE if no simplification can be made. */
5375 fold_builtin_cabs (tree fndecl
, tree arglist
, tree type
)
5379 if (!arglist
|| TREE_CHAIN (arglist
))
5382 arg
= TREE_VALUE (arglist
);
5383 if (TREE_CODE (TREE_TYPE (arg
)) != COMPLEX_TYPE
5384 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
5387 /* Evaluate cabs of a constant at compile-time. */
5388 if (flag_unsafe_math_optimizations
5389 && TREE_CODE (arg
) == COMPLEX_CST
5390 && TREE_CODE (TREE_REALPART (arg
)) == REAL_CST
5391 && TREE_CODE (TREE_IMAGPART (arg
)) == REAL_CST
5392 && ! TREE_CONSTANT_OVERFLOW (TREE_REALPART (arg
))
5393 && ! TREE_CONSTANT_OVERFLOW (TREE_IMAGPART (arg
)))
5395 REAL_VALUE_TYPE r
, i
;
5397 r
= TREE_REAL_CST (TREE_REALPART (arg
));
5398 i
= TREE_REAL_CST (TREE_IMAGPART (arg
));
5400 real_arithmetic (&r
, MULT_EXPR
, &r
, &r
);
5401 real_arithmetic (&i
, MULT_EXPR
, &i
, &i
);
5402 real_arithmetic (&r
, PLUS_EXPR
, &r
, &i
);
5403 if (real_sqrt (&r
, TYPE_MODE (type
), &r
)
5404 || ! flag_trapping_math
)
5405 return build_real (type
, r
);
5408 /* If either part is zero, cabs is fabs of the other. */
5409 if (TREE_CODE (arg
) == COMPLEX_EXPR
5410 && real_zerop (TREE_OPERAND (arg
, 0)))
5411 return fold (build1 (ABS_EXPR
, type
, TREE_OPERAND (arg
, 1)));
5412 if (TREE_CODE (arg
) == COMPLEX_EXPR
5413 && real_zerop (TREE_OPERAND (arg
, 1)))
5414 return fold (build1 (ABS_EXPR
, type
, TREE_OPERAND (arg
, 0)));
5416 if (flag_unsafe_math_optimizations
)
5418 enum built_in_function fcode
;
5421 fcode
= DECL_FUNCTION_CODE (fndecl
);
5422 if (fcode
== BUILT_IN_CABS
)
5423 sqrtfn
= implicit_built_in_decls
[BUILT_IN_SQRT
];
5424 else if (fcode
== BUILT_IN_CABSF
)
5425 sqrtfn
= implicit_built_in_decls
[BUILT_IN_SQRTF
];
5426 else if (fcode
== BUILT_IN_CABSL
)
5427 sqrtfn
= implicit_built_in_decls
[BUILT_IN_SQRTL
];
5431 if (sqrtfn
!= NULL_TREE
)
5433 tree rpart
, ipart
, result
, arglist
;
5435 rpart
= fold (build1 (REALPART_EXPR
, type
, arg
));
5436 ipart
= fold (build1 (IMAGPART_EXPR
, type
, arg
));
5438 rpart
= save_expr (rpart
);
5439 ipart
= save_expr (ipart
);
5441 result
= fold (build (PLUS_EXPR
, type
,
5442 fold (build (MULT_EXPR
, type
,
5444 fold (build (MULT_EXPR
, type
,
5447 arglist
= build_tree_list (NULL_TREE
, result
);
5448 return build_function_call_expr (sqrtfn
, arglist
);
5455 /* Used by constant folding to eliminate some builtin calls early. EXP is
5456 the CALL_EXPR of a call to a builtin function. */
5459 fold_builtin (tree exp
)
5461 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5462 tree arglist
= TREE_OPERAND (exp
, 1);
5463 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
5465 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5468 switch (DECL_FUNCTION_CODE (fndecl
))
5470 case BUILT_IN_CONSTANT_P
:
5471 return fold_builtin_constant_p (arglist
);
5473 case BUILT_IN_CLASSIFY_TYPE
:
5474 return fold_builtin_classify_type (arglist
);
5476 case BUILT_IN_STRLEN
:
5477 if (validate_arglist (arglist
, POINTER_TYPE
, VOID_TYPE
))
5479 tree len
= c_strlen (TREE_VALUE (arglist
), 0);
5482 /* Convert from the internal "sizetype" type to "size_t". */
5484 len
= convert (size_type_node
, len
);
5491 case BUILT_IN_FABSF
:
5492 case BUILT_IN_FABSL
:
5493 if (validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
5494 return fold (build1 (ABS_EXPR
, type
, TREE_VALUE (arglist
)));
5498 case BUILT_IN_CABSF
:
5499 case BUILT_IN_CABSL
:
5500 return fold_builtin_cabs (fndecl
, arglist
, type
);
5503 case BUILT_IN_SQRTF
:
5504 case BUILT_IN_SQRTL
:
5505 if (validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
5507 enum built_in_function fcode
;
5508 tree arg
= TREE_VALUE (arglist
);
5510 /* Optimize sqrt of constant value. */
5511 if (TREE_CODE (arg
) == REAL_CST
5512 && ! TREE_CONSTANT_OVERFLOW (arg
))
5514 REAL_VALUE_TYPE r
, x
;
5516 x
= TREE_REAL_CST (arg
);
5517 if (real_sqrt (&r
, TYPE_MODE (type
), &x
)
5518 || (!flag_trapping_math
&& !flag_errno_math
))
5519 return build_real (type
, r
);
5522 /* Optimize sqrt(exp(x)) = exp(x*0.5). */
5523 fcode
= builtin_mathfn_code (arg
);
5524 if (flag_unsafe_math_optimizations
5525 && (fcode
== BUILT_IN_EXP
5526 || fcode
== BUILT_IN_EXPF
5527 || fcode
== BUILT_IN_EXPL
))
5529 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
5530 arg
= fold (build (MULT_EXPR
, type
,
5531 TREE_VALUE (TREE_OPERAND (arg
, 1)),
5532 build_real (type
, dconsthalf
)));
5533 arglist
= build_tree_list (NULL_TREE
, arg
);
5534 return build_function_call_expr (expfn
, arglist
);
5537 /* Optimize sqrt(pow(x,y)) = pow(x,y*0.5). */
5538 if (flag_unsafe_math_optimizations
5539 && (fcode
== BUILT_IN_POW
5540 || fcode
== BUILT_IN_POWF
5541 || fcode
== BUILT_IN_POWL
))
5543 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
5544 tree arg0
= TREE_VALUE (TREE_OPERAND (arg
, 1));
5545 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg
, 1)));
5546 tree narg1
= fold (build (MULT_EXPR
, type
, arg1
,
5547 build_real (type
, dconsthalf
)));
5548 arglist
= tree_cons (NULL_TREE
, arg0
,
5549 build_tree_list (NULL_TREE
, narg1
));
5550 return build_function_call_expr (powfn
, arglist
);
5558 if (validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
5560 tree arg
= TREE_VALUE (arglist
);
5562 /* Optimize sin(0.0) = 0.0. */
5563 if (real_zerop (arg
))
5571 if (validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
5573 tree arg
= TREE_VALUE (arglist
);
5575 /* Optimize cos(0.0) = 1.0. */
5576 if (real_zerop (arg
))
5577 return build_real (type
, dconst1
);
5579 /* Optimize cos(-x) into cos(x). */
5580 if (TREE_CODE (arg
) == NEGATE_EXPR
)
5582 tree arglist
= build_tree_list (NULL_TREE
,
5583 TREE_OPERAND (arg
, 0));
5584 return build_function_call_expr (fndecl
, arglist
);
5592 if (validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
5594 enum built_in_function fcode
;
5595 tree arg
= TREE_VALUE (arglist
);
5597 /* Optimize exp(0.0) = 1.0. */
5598 if (real_zerop (arg
))
5599 return build_real (type
, dconst1
);
5601 /* Optimize exp(1.0) = e. */
5602 if (real_onep (arg
))
5604 REAL_VALUE_TYPE cst
;
5606 if (! builtin_dconsts_init
)
5607 init_builtin_dconsts ();
5608 real_convert (&cst
, TYPE_MODE (type
), &dconste
);
5609 return build_real (type
, cst
);
5612 /* Attempt to evaluate exp at compile-time. */
5613 if (flag_unsafe_math_optimizations
5614 && TREE_CODE (arg
) == REAL_CST
5615 && ! TREE_CONSTANT_OVERFLOW (arg
))
5617 REAL_VALUE_TYPE cint
;
5621 c
= TREE_REAL_CST (arg
);
5622 n
= real_to_integer (&c
);
5623 real_from_integer (&cint
, VOIDmode
, n
,
5625 if (real_identical (&c
, &cint
))
5629 if (! builtin_dconsts_init
)
5630 init_builtin_dconsts ();
5631 real_powi (&x
, TYPE_MODE (type
), &dconste
, n
);
5632 return build_real (type
, x
);
5636 /* Optimize exp(log(x)) = x. */
5637 fcode
= builtin_mathfn_code (arg
);
5638 if (flag_unsafe_math_optimizations
5639 && (fcode
== BUILT_IN_LOG
5640 || fcode
== BUILT_IN_LOGF
5641 || fcode
== BUILT_IN_LOGL
))
5642 return TREE_VALUE (TREE_OPERAND (arg
, 1));
5649 if (validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
5651 enum built_in_function fcode
;
5652 tree arg
= TREE_VALUE (arglist
);
5654 /* Optimize log(1.0) = 0.0. */
5655 if (real_onep (arg
))
5656 return build_real (type
, dconst0
);
5658 /* Optimize log(exp(x)) = x. */
5659 fcode
= builtin_mathfn_code (arg
);
5660 if (flag_unsafe_math_optimizations
5661 && (fcode
== BUILT_IN_EXP
5662 || fcode
== BUILT_IN_EXPF
5663 || fcode
== BUILT_IN_EXPL
))
5664 return TREE_VALUE (TREE_OPERAND (arg
, 1));
5666 /* Optimize log(sqrt(x)) = log(x)*0.5. */
5667 if (flag_unsafe_math_optimizations
5668 && (fcode
== BUILT_IN_SQRT
5669 || fcode
== BUILT_IN_SQRTF
5670 || fcode
== BUILT_IN_SQRTL
))
5672 tree logfn
= build_function_call_expr (fndecl
,
5673 TREE_OPERAND (arg
, 1));
5674 return fold (build (MULT_EXPR
, type
, logfn
,
5675 build_real (type
, dconsthalf
)));
5678 /* Optimize log(pow(x,y)) = y*log(x). */
5679 if (flag_unsafe_math_optimizations
5680 && (fcode
== BUILT_IN_POW
5681 || fcode
== BUILT_IN_POWF
5682 || fcode
== BUILT_IN_POWL
))
5684 tree arg0
, arg1
, logfn
;
5686 arg0
= TREE_VALUE (TREE_OPERAND (arg
, 1));
5687 arg1
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg
, 1)));
5688 arglist
= build_tree_list (NULL_TREE
, arg0
);
5689 logfn
= build_function_call_expr (fndecl
, arglist
);
5690 return fold (build (MULT_EXPR
, type
, arg1
, logfn
));
5698 if (validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
5700 enum built_in_function fcode
;
5701 tree arg
= TREE_VALUE (arglist
);
5703 /* Optimize tan(0.0) = 0.0. */
5704 if (real_zerop (arg
))
5707 /* Optimize tan(atan(x)) = x. */
5708 fcode
= builtin_mathfn_code (arg
);
5709 if (flag_unsafe_math_optimizations
5710 && (fcode
== BUILT_IN_ATAN
5711 || fcode
== BUILT_IN_ATANF
5712 || fcode
== BUILT_IN_ATANL
))
5713 return TREE_VALUE (TREE_OPERAND (arg
, 1));
5718 case BUILT_IN_ATANF
:
5719 case BUILT_IN_ATANL
:
5720 if (validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
5722 tree arg
= TREE_VALUE (arglist
);
5724 /* Optimize atan(0.0) = 0.0. */
5725 if (real_zerop (arg
))
5728 /* Optimize atan(1.0) = pi/4. */
5729 if (real_onep (arg
))
5731 REAL_VALUE_TYPE cst
;
5733 if (! builtin_dconsts_init
)
5734 init_builtin_dconsts ();
5735 real_convert (&cst
, TYPE_MODE (type
), &dconstpi
);
5737 return build_real (type
, cst
);
5745 if (validate_arglist (arglist
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5747 enum built_in_function fcode
;
5748 tree arg0
= TREE_VALUE (arglist
);
5749 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5751 /* Optimize pow(1.0,y) = 1.0. */
5752 if (real_onep (arg0
))
5753 return omit_one_operand (type
, build_real (type
, dconst1
), arg1
);
5755 if (TREE_CODE (arg1
) == REAL_CST
5756 && ! TREE_CONSTANT_OVERFLOW (arg1
))
5759 c
= TREE_REAL_CST (arg1
);
5761 /* Optimize pow(x,0.0) = 1.0. */
5762 if (REAL_VALUES_EQUAL (c
, dconst0
))
5763 return omit_one_operand (type
, build_real (type
, dconst1
),
5766 /* Optimize pow(x,1.0) = x. */
5767 if (REAL_VALUES_EQUAL (c
, dconst1
))
5770 /* Optimize pow(x,-1.0) = 1.0/x. */
5771 if (REAL_VALUES_EQUAL (c
, dconstm1
))
5772 return fold (build (RDIV_EXPR
, type
,
5773 build_real (type
, dconst1
),
5776 /* Optimize pow(x,2.0) = x*x. */
5777 if (REAL_VALUES_EQUAL (c
, dconst2
)
5778 && (*lang_hooks
.decls
.global_bindings_p
) () == 0
5779 && ! CONTAINS_PLACEHOLDER_P (arg0
))
5781 arg0
= save_expr (arg0
);
5782 return fold (build (MULT_EXPR
, type
, arg0
, arg0
));
5785 /* Optimize pow(x,-2.0) = 1.0/(x*x). */
5786 if (flag_unsafe_math_optimizations
5787 && REAL_VALUES_EQUAL (c
, dconstm2
)
5788 && (*lang_hooks
.decls
.global_bindings_p
) () == 0
5789 && ! CONTAINS_PLACEHOLDER_P (arg0
))
5791 arg0
= save_expr (arg0
);
5792 return fold (build (RDIV_EXPR
, type
,
5793 build_real (type
, dconst1
),
5794 fold (build (MULT_EXPR
, type
,
5798 /* Optimize pow(x,0.5) = sqrt(x). */
5799 if (flag_unsafe_math_optimizations
5800 && REAL_VALUES_EQUAL (c
, dconsthalf
))
5804 fcode
= DECL_FUNCTION_CODE (fndecl
);
5805 if (fcode
== BUILT_IN_POW
)
5806 sqrtfn
= implicit_built_in_decls
[BUILT_IN_SQRT
];
5807 else if (fcode
== BUILT_IN_POWF
)
5808 sqrtfn
= implicit_built_in_decls
[BUILT_IN_SQRTF
];
5809 else if (fcode
== BUILT_IN_POWL
)
5810 sqrtfn
= implicit_built_in_decls
[BUILT_IN_SQRTL
];
5814 if (sqrtfn
!= NULL_TREE
)
5816 tree arglist
= build_tree_list (NULL_TREE
, arg0
);
5817 return build_function_call_expr (sqrtfn
, arglist
);
5821 /* Attempt to evaluate pow at compile-time. */
5822 if (TREE_CODE (arg0
) == REAL_CST
5823 && ! TREE_CONSTANT_OVERFLOW (arg0
))
5825 REAL_VALUE_TYPE cint
;
5828 n
= real_to_integer (&c
);
5829 real_from_integer (&cint
, VOIDmode
, n
,
5831 if (real_identical (&c
, &cint
))
5836 x
= TREE_REAL_CST (arg0
);
5837 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
5838 if (flag_unsafe_math_optimizations
|| !inexact
)
5839 return build_real (type
, x
);
5844 /* Optimize pow(exp(x),y) = exp(x*y). */
5845 fcode
= builtin_mathfn_code (arg0
);
5846 if (flag_unsafe_math_optimizations
5847 && (fcode
== BUILT_IN_EXP
5848 || fcode
== BUILT_IN_EXPF
5849 || fcode
== BUILT_IN_EXPL
))
5851 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
5852 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5853 arg
= fold (build (MULT_EXPR
, type
, arg
, arg1
));
5854 arglist
= build_tree_list (NULL_TREE
, arg
);
5855 return build_function_call_expr (expfn
, arglist
);
5858 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
5859 if (flag_unsafe_math_optimizations
5860 && (fcode
== BUILT_IN_SQRT
5861 || fcode
== BUILT_IN_SQRTF
5862 || fcode
== BUILT_IN_SQRTL
))
5864 tree narg0
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5865 tree narg1
= fold (build (MULT_EXPR
, type
, arg1
,
5866 build_real (type
, dconsthalf
)));
5868 arglist
= tree_cons (NULL_TREE
, narg0
,
5869 build_tree_list (NULL_TREE
, narg1
));
5870 return build_function_call_expr (fndecl
, arglist
);
5873 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
5874 if (flag_unsafe_math_optimizations
5875 && (fcode
== BUILT_IN_POW
5876 || fcode
== BUILT_IN_POWF
5877 || fcode
== BUILT_IN_POWL
))
5879 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5880 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
5881 tree narg1
= fold (build (MULT_EXPR
, type
, arg01
, arg1
));
5882 arglist
= tree_cons (NULL_TREE
, arg00
,
5883 build_tree_list (NULL_TREE
, narg1
));
5884 return build_function_call_expr (fndecl
, arglist
);
5892 return fold_builtin_inf (type
, true);
5894 case BUILT_IN_HUGE_VAL
:
5895 case BUILT_IN_HUGE_VALF
:
5896 case BUILT_IN_HUGE_VALL
:
5897 return fold_builtin_inf (type
, false);
5902 return fold_builtin_nan (arglist
, type
, true);
5905 case BUILT_IN_NANSF
:
5906 case BUILT_IN_NANSL
:
5907 return fold_builtin_nan (arglist
, type
, false);
5909 case BUILT_IN_FLOOR
:
5910 case BUILT_IN_FLOORF
:
5911 case BUILT_IN_FLOORL
:
5913 case BUILT_IN_CEILF
:
5914 case BUILT_IN_CEILL
:
5915 case BUILT_IN_TRUNC
:
5916 case BUILT_IN_TRUNCF
:
5917 case BUILT_IN_TRUNCL
:
5918 case BUILT_IN_ROUND
:
5919 case BUILT_IN_ROUNDF
:
5920 case BUILT_IN_ROUNDL
:
5921 case BUILT_IN_NEARBYINT
:
5922 case BUILT_IN_NEARBYINTF
:
5923 case BUILT_IN_NEARBYINTL
:
5924 return fold_trunc_transparent_mathfn (exp
);
5933 /* Conveniently construct a function call expression. */
5936 build_function_call_expr (tree fn
, tree arglist
)
5940 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
5941 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
5942 call_expr
, arglist
);
5943 TREE_SIDE_EFFECTS (call_expr
) = 1;
5944 return fold (call_expr
);
5947 /* This function validates the types of a function call argument list
5948 represented as a tree chain of parameters against a specified list
5949 of tree_codes. If the last specifier is a 0, that represents an
5950 ellipses, otherwise the last specifier must be a VOID_TYPE. */
5953 validate_arglist (tree arglist
, ...)
5955 enum tree_code code
;
5959 va_start (ap
, arglist
);
5963 code
= va_arg (ap
, enum tree_code
);
5967 /* This signifies an ellipses, any further arguments are all ok. */
5971 /* This signifies an endlink, if no arguments remain, return
5972 true, otherwise return false. */
5976 /* If no parameters remain or the parameter's code does not
5977 match the specified code, return false. Otherwise continue
5978 checking any remaining arguments. */
5980 || code
!= TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))))
5984 arglist
= TREE_CHAIN (arglist
);
5988 /* We need gotos here since we can only have one VA_CLOSE in a
5996 /* Default version of target-specific builtin setup that does nothing. */
5999 default_init_builtins (void)
6003 /* Default target-specific builtin expander that does nothing. */
6006 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
6007 rtx target ATTRIBUTE_UNUSED
,
6008 rtx subtarget ATTRIBUTE_UNUSED
,
6009 enum machine_mode mode ATTRIBUTE_UNUSED
,
6010 int ignore ATTRIBUTE_UNUSED
)
6015 /* Instantiate all remaining CONSTANT_P_RTX nodes. */
6018 purge_builtin_constant_p (void)
6020 rtx insn
, set
, arg
, new, note
;
6022 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6024 && (set
= single_set (insn
)) != NULL_RTX
6025 && (GET_CODE (arg
= SET_SRC (set
)) == CONSTANT_P_RTX
6026 || (GET_CODE (arg
) == SUBREG
6027 && (GET_CODE (arg
= SUBREG_REG (arg
))
6028 == CONSTANT_P_RTX
))))
6030 arg
= XEXP (arg
, 0);
6031 new = CONSTANT_P (arg
) ? const1_rtx
: const0_rtx
;
6032 validate_change (insn
, &SET_SRC (set
), new, 0);
6034 /* Remove the REG_EQUAL note from the insn. */
6035 if ((note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)) != 0)
6036 remove_note (insn
, note
);
6040 /* Returns true is EXP represents data that would potentially reside
6041 in a readonly section. */
6044 readonly_data_expr (tree exp
)
6048 if (TREE_CODE (exp
) == ADDR_EXPR
)
6049 return decl_readonly_section (TREE_OPERAND (exp
, 0), 0);