const char *const built_in_class_names[4]
= {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
-#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) STRINGX(X),
+#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) #X,
const char *const built_in_names[(int) END_BUILTINS] =
{
#include "builtins.def"
required to implement the function call in all cases. */
tree implicit_built_in_decls[(int) END_BUILTINS];
-static int get_pointer_alignment PARAMS ((tree, unsigned int));
-static tree c_strlen PARAMS ((tree));
-static const char *c_getstr PARAMS ((tree));
-static rtx c_readstr PARAMS ((const char *,
- enum machine_mode));
-static int target_char_cast PARAMS ((tree, char *));
-static rtx get_memory_rtx PARAMS ((tree));
-static int apply_args_size PARAMS ((void));
-static int apply_result_size PARAMS ((void));
+/* Trigonometric and mathematical constants used in builtin folding. */
+static bool builtin_dconsts_init = 0;
+static REAL_VALUE_TYPE dconstpi;
+static REAL_VALUE_TYPE dconste;
+
+static int get_pointer_alignment (tree, unsigned int);
+static tree c_strlen (tree, int);
+static const char *c_getstr (tree);
+static rtx c_readstr (const char *, enum machine_mode);
+static int target_char_cast (tree, char *);
+static rtx get_memory_rtx (tree);
+static int apply_args_size (void);
+static int apply_result_size (void);
#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
-static rtx result_vector PARAMS ((int, rtx));
+static rtx result_vector (int, rtx);
#endif
-static rtx expand_builtin_setjmp PARAMS ((tree, rtx));
-static void expand_builtin_prefetch PARAMS ((tree));
-static rtx expand_builtin_apply_args PARAMS ((void));
-static rtx expand_builtin_apply_args_1 PARAMS ((void));
-static rtx expand_builtin_apply PARAMS ((rtx, rtx, rtx));
-static void expand_builtin_return PARAMS ((rtx));
-static enum type_class type_to_class PARAMS ((tree));
-static rtx expand_builtin_classify_type PARAMS ((tree));
-static rtx expand_builtin_mathfn PARAMS ((tree, rtx, rtx));
-static rtx expand_builtin_constant_p PARAMS ((tree));
-static rtx expand_builtin_args_info PARAMS ((tree));
-static rtx expand_builtin_next_arg PARAMS ((tree));
-static rtx expand_builtin_va_start PARAMS ((tree));
-static rtx expand_builtin_va_end PARAMS ((tree));
-static rtx expand_builtin_va_copy PARAMS ((tree));
-static rtx expand_builtin_memcmp PARAMS ((tree, tree, rtx,
- enum machine_mode));
-static rtx expand_builtin_strcmp PARAMS ((tree, rtx,
- enum machine_mode));
-static rtx expand_builtin_strncmp PARAMS ((tree, rtx,
- enum machine_mode));
-static rtx builtin_memcpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
- enum machine_mode));
-static rtx expand_builtin_strcat PARAMS ((tree, rtx,
- enum machine_mode));
-static rtx expand_builtin_strncat PARAMS ((tree, rtx,
- enum machine_mode));
-static rtx expand_builtin_strspn PARAMS ((tree, rtx,
- enum machine_mode));
-static rtx expand_builtin_strcspn PARAMS ((tree, rtx,
- enum machine_mode));
-static rtx expand_builtin_memcpy PARAMS ((tree, rtx,
- enum machine_mode));
-static rtx expand_builtin_strcpy PARAMS ((tree, rtx,
- enum machine_mode));
-static rtx builtin_strncpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
- enum machine_mode));
-static rtx expand_builtin_strncpy PARAMS ((tree, rtx,
- enum machine_mode));
-static rtx builtin_memset_read_str PARAMS ((PTR, HOST_WIDE_INT,
- enum machine_mode));
-static rtx builtin_memset_gen_str PARAMS ((PTR, HOST_WIDE_INT,
- enum machine_mode));
-static rtx expand_builtin_memset PARAMS ((tree, rtx,
- enum machine_mode));
-static rtx expand_builtin_bzero PARAMS ((tree));
-static rtx expand_builtin_strlen PARAMS ((tree, rtx));
-static rtx expand_builtin_strstr PARAMS ((tree, rtx,
- enum machine_mode));
-static rtx expand_builtin_strpbrk PARAMS ((tree, rtx,
- enum machine_mode));
-static rtx expand_builtin_strchr PARAMS ((tree, rtx,
- enum machine_mode));
-static rtx expand_builtin_strrchr PARAMS ((tree, rtx,
- enum machine_mode));
-static rtx expand_builtin_alloca PARAMS ((tree, rtx));
-static rtx expand_builtin_unop PARAMS ((tree, rtx, rtx, optab));
-static rtx expand_builtin_frame_address PARAMS ((tree));
-static rtx expand_builtin_fputs PARAMS ((tree, int, int));
-static tree stabilize_va_list PARAMS ((tree, int));
-static rtx expand_builtin_expect PARAMS ((tree, rtx));
-static tree fold_builtin_constant_p PARAMS ((tree));
-static tree fold_builtin_classify_type PARAMS ((tree));
-static tree fold_builtin_inf PARAMS ((tree, int));
-static tree fold_builtin_nan PARAMS ((tree, tree, int));
-static int validate_arglist PARAMS ((tree, ...));
-static tree fold_trunc_transparent_mathfn PARAMS ((tree));
+static rtx expand_builtin_setjmp (tree, rtx);
+static void expand_builtin_prefetch (tree);
+static rtx expand_builtin_apply_args (void);
+static rtx expand_builtin_apply_args_1 (void);
+static rtx expand_builtin_apply (rtx, rtx, rtx);
+static void expand_builtin_return (rtx);
+static enum type_class type_to_class (tree);
+static rtx expand_builtin_classify_type (tree);
+static void expand_errno_check (tree, rtx);
+static rtx expand_builtin_mathfn (tree, rtx, rtx);
+static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
+static rtx expand_builtin_constant_p (tree, enum machine_mode);
+static rtx expand_builtin_args_info (tree);
+static rtx expand_builtin_next_arg (tree);
+static rtx expand_builtin_va_start (tree);
+static rtx expand_builtin_va_end (tree);
+static rtx expand_builtin_va_copy (tree);
+static rtx expand_builtin_memcmp (tree, tree, rtx, enum machine_mode);
+static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
+static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
+static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
+static rtx expand_builtin_strcat (tree, rtx, enum machine_mode);
+static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
+static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
+static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
+static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
+static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode, int);
+static rtx expand_builtin_memmove (tree, rtx, enum machine_mode);
+static rtx expand_builtin_bcopy (tree);
+static rtx expand_builtin_strcpy (tree, rtx, enum machine_mode);
+static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
+static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
+static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
+static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
+static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
+static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
+static rtx expand_builtin_bzero (tree);
+static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
+static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
+static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
+static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
+static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
+static rtx expand_builtin_alloca (tree, rtx);
+static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
+static rtx expand_builtin_frame_address (tree, tree);
+static rtx expand_builtin_fputs (tree, int, int);
+static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
+static tree stabilize_va_list (tree, int);
+static rtx expand_builtin_expect (tree, rtx);
+static tree fold_builtin_constant_p (tree);
+static tree fold_builtin_classify_type (tree);
+static tree fold_builtin_inf (tree, int);
+static tree fold_builtin_nan (tree, tree, int);
+static int validate_arglist (tree, ...);
+static tree fold_trunc_transparent_mathfn (tree);
+static bool readonly_data_expr (tree);
+static rtx expand_builtin_fabs (tree, rtx, rtx);
+static rtx expand_builtin_cabs (tree, rtx);
+static void init_builtin_dconsts (void);
+static tree fold_builtin_cabs (tree, tree, tree);
+
+/* Initialize mathematical constants for constant folding builtins.
+ These constants need to be given to at least 160 bits precision. */
+
+static void
+init_builtin_dconsts (void)
+{
+ real_from_string (&dconstpi,
+ "3.1415926535897932384626433832795028841971693993751058209749445923078");
+ real_from_string (&dconste,
+ "2.7182818284590452353602874713526624977572470936999595749669676277241");
+
+ builtin_dconsts_init = true;
+}
/* Return the alignment in bits of EXP, a pointer valued expression.
But don't return more than MAX_ALIGN no matter what.
expression is actually pointing at an object whose alignment is tighter. */
static int
-get_pointer_alignment (exp, max_align)
- tree exp;
- unsigned int max_align;
+get_pointer_alignment (tree exp, unsigned int max_align)
{
unsigned int align, inner;
way, because it could contain a zero byte in the middle.
TREE_STRING_LENGTH is the size of the character array, not the string.
+ ONLY_VALUE should be nonzero if the result is not going to be emitted
+ into the instruction stream and zero if it is going to be expanded.
+ E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
+ is returned, otherwise NULL, since
+ len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
+ evaluate the side-effects.
+
The value returned is of type `ssizetype'.
Unfortunately, string_constant can't access the values of const char
arrays with initializers, so neither can we do so here. */
static tree
-c_strlen (src)
- tree src;
+c_strlen (tree src, int only_value)
{
tree offset_node;
HOST_WIDE_INT offset;
int max;
const char *ptr;
+ STRIP_NOPS (src);
+ if (TREE_CODE (src) == COND_EXPR
+ && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
+ {
+ tree len1, len2;
+
+ len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
+ len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
+ if (tree_int_cst_equal (len1, len2))
+ return len1;
+ }
+
+ if (TREE_CODE (src) == COMPOUND_EXPR
+ && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
+ return c_strlen (TREE_OPERAND (src, 1), only_value);
+
src = string_constant (src, &offset_node);
if (src == 0)
return 0;
or sum of string constant and integer constant. */
static const char *
-c_getstr (src)
- tree src;
+c_getstr (tree src)
{
tree offset_node;
GET_MODE_BITSIZE (MODE) bits from string constant STR. */
static rtx
-c_readstr (str, mode)
- const char *str;
- enum machine_mode mode;
+c_readstr (const char *str, enum machine_mode mode)
{
HOST_WIDE_INT c[2];
HOST_WIDE_INT ch;
P. */
static int
-target_char_cast (cst, p)
- tree cst;
- char *p;
+target_char_cast (tree cst, char *p)
{
unsigned HOST_WIDE_INT val, hostval;
address located within it (depending on FNDECL_CODE). */
rtx
-expand_builtin_return_addr (fndecl_code, count, tem)
- enum built_in_function fndecl_code;
- int count;
- rtx tem;
+expand_builtin_return_addr (enum built_in_function fndecl_code, int count,
+ rtx tem)
{
int i;
handling code. */
void
-expand_builtin_setjmp_setup (buf_addr, receiver_label)
- rtx buf_addr;
- rtx receiver_label;
+expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
{
enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
rtx stack_save;
This is used directly by sjlj exception handling code. */
void
-expand_builtin_setjmp_receiver (receiver_label)
- rtx receiver_label ATTRIBUTE_UNUSED;
+expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
{
/* Clobber the FP when we get here, so we have to make sure it's
marked as used by this function. */
them. */
static rtx
-expand_builtin_setjmp (arglist, target)
- tree arglist;
- rtx target;
+expand_builtin_setjmp (tree arglist, rtx target)
{
rtx buf_addr, next_lab, cont_lab;
them. */
void
-expand_builtin_longjmp (buf_addr, value)
- rtx buf_addr, value;
+expand_builtin_longjmp (rtx buf_addr, rtx value)
{
rtx fp, lab, stack, insn, last;
enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
effects. */
static void
-expand_builtin_prefetch (arglist)
- tree arglist;
+expand_builtin_prefetch (tree arglist)
{
tree arg0, arg1, arg2;
rtx op0, op1, op2;
to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
static rtx
-get_memory_rtx (exp)
- tree exp;
+get_memory_rtx (tree exp)
{
rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
rtx mem;
needed in objc-act.c. */
int
-apply_args_register_offset (regno)
- int regno;
+apply_args_register_offset (int regno)
{
apply_args_size ();
and initialize apply_args_mode. */
static int
-apply_args_size ()
+apply_args_size (void)
{
static int size = -1;
int align;
and initialize apply_result_mode. */
static int
-apply_result_size ()
+apply_result_size (void)
{
static int size = -1;
int align, regno;
restore the values. */
static rtx
-result_vector (savep, result)
- int savep;
- rtx result;
+result_vector (int savep, rtx result)
{
int regno, size, align, nelts;
enum machine_mode mode;
arguments as were passed to the current function. */
static rtx
-expand_builtin_apply_args_1 ()
+expand_builtin_apply_args_1 (void)
{
rtx registers;
int size, align, regno;
saved. */
static rtx
-expand_builtin_apply_args ()
+expand_builtin_apply_args (void)
{
/* Don't do __builtin_apply_args more than once in a function.
Save the result of the first call and reuse it. */
untyped return of whatever value was returned by the given function. */
static rtx
-expand_builtin_apply (function, arguments, argsize)
- rtx function, arguments, argsize;
+expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
{
int size, align, regno;
enum machine_mode mode;
do_pending_stack_adjust ();
NO_DEFER_POP;
- /* Save the stack with nonlocal if available */
+ /* Save the stack with nonlocal if available. */
#ifdef HAVE_save_stack_nonlocal
if (HAVE_save_stack_nonlocal)
emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
#endif
abort ();
- /* Find the CALL insn we just emitted. */
- for (call_insn = get_last_insn ();
- call_insn && GET_CODE (call_insn) != CALL_INSN;
- call_insn = PREV_INSN (call_insn))
- ;
-
- if (! call_insn)
- abort ();
-
- /* Put the register usage information on the CALL. If there is already
- some usage information, put ours at the end. */
- if (CALL_INSN_FUNCTION_USAGE (call_insn))
- {
- rtx link;
-
- for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
- link = XEXP (link, 1))
- ;
-
- XEXP (link, 1) = call_fusage;
- }
- else
- CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
+ /* Find the CALL insn we just emitted, and attach the register usage
+ information. */
+ call_insn = last_call_insn ();
+ add_function_usage_to (call_insn, call_fusage);
/* Restore the stack. */
#ifdef HAVE_save_stack_nonlocal
/* Perform an untyped return. */
static void
-expand_builtin_return (result)
- rtx result;
+expand_builtin_return (rtx result)
{
int size, align, regno;
enum machine_mode mode;
/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
static enum type_class
-type_to_class (type)
- tree type;
+type_to_class (tree type)
{
switch (TREE_CODE (type))
{
ARGLIST. */
static rtx
-expand_builtin_classify_type (arglist)
- tree arglist;
+expand_builtin_classify_type (tree arglist)
{
if (arglist != 0)
return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
/* Expand expression EXP, which is a call to __builtin_constant_p. */
static rtx
-expand_builtin_constant_p (exp)
- tree exp;
+expand_builtin_constant_p (tree arglist, enum machine_mode target_mode)
{
- tree arglist = TREE_OPERAND (exp, 1);
- enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
rtx tmp;
if (arglist == 0)
current_function_calls_constant_p = 1;
tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
- tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
+ tmp = gen_rtx_CONSTANT_P_RTX (target_mode, tmp);
return tmp;
}
/* Return mathematic function equivalent to FN but operating directly on TYPE,
if available. */
tree
-mathfn_built_in (type, fn)
- tree type;
- enum built_in_function fn;
+mathfn_built_in (tree type, enum built_in_function fn)
{
enum built_in_function fcode = NOT_BUILT_IN;
if (TYPE_MODE (type) == TYPE_MODE (double_type_node))
case BUILT_IN_EXPL:
fcode = BUILT_IN_EXP;
break;
+ case BUILT_IN_LOG:
+ case BUILT_IN_LOGF:
+ case BUILT_IN_LOGL:
+ fcode = BUILT_IN_LOG;
+ break;
+ case BUILT_IN_TAN:
+ case BUILT_IN_TANF:
+ case BUILT_IN_TANL:
+ fcode = BUILT_IN_TAN;
+ break;
+ case BUILT_IN_ATAN:
+ case BUILT_IN_ATANF:
+ case BUILT_IN_ATANL:
+ fcode = BUILT_IN_ATAN;
+ break;
case BUILT_IN_FLOOR:
case BUILT_IN_FLOORF:
case BUILT_IN_FLOORL:
case BUILT_IN_EXPL:
fcode = BUILT_IN_EXPF;
break;
+ case BUILT_IN_LOG:
+ case BUILT_IN_LOGF:
+ case BUILT_IN_LOGL:
+ fcode = BUILT_IN_LOGF;
+ break;
+ case BUILT_IN_TAN:
+ case BUILT_IN_TANF:
+ case BUILT_IN_TANL:
+ fcode = BUILT_IN_TANF;
+ break;
+ case BUILT_IN_ATAN:
+ case BUILT_IN_ATANF:
+ case BUILT_IN_ATANL:
+ fcode = BUILT_IN_ATANF;
+ break;
case BUILT_IN_FLOOR:
case BUILT_IN_FLOORF:
case BUILT_IN_FLOORL:
case BUILT_IN_EXPL:
fcode = BUILT_IN_EXPL;
break;
+ case BUILT_IN_LOG:
+ case BUILT_IN_LOGF:
+ case BUILT_IN_LOGL:
+ fcode = BUILT_IN_LOGL;
+ break;
+ case BUILT_IN_TAN:
+ case BUILT_IN_TANF:
+ case BUILT_IN_TANL:
+ fcode = BUILT_IN_TANL;
+ break;
+ case BUILT_IN_ATAN:
+ case BUILT_IN_ATANF:
+ case BUILT_IN_ATANL:
+ fcode = BUILT_IN_ATANL;
+ break;
case BUILT_IN_FLOOR:
case BUILT_IN_FLOORF:
case BUILT_IN_FLOORL:
return implicit_built_in_decls[fcode];
}
+/* If errno must be maintained, expand the RTL to check if the result,
+ TARGET, of a built-in function call, EXP, is NaN, and if so set
+ errno to EDOM. */
+
+static void
+expand_errno_check (tree exp, rtx target)
+{
+ rtx lab = gen_label_rtx ();
+
+ /* Test the result; if it is NaN, set errno=EDOM because
+ the argument was not in the domain. */
+ emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
+ 0, lab);
+
+#ifdef TARGET_EDOM
+ /* If this built-in doesn't throw an exception, set errno directly. */
+ if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
+ {
+#ifdef GEN_ERRNO_RTX
+ rtx errno_rtx = GEN_ERRNO_RTX;
+#else
+ rtx errno_rtx
+ = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
+#endif
+ emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
+ emit_label (lab);
+ return;
+ }
+#endif
+
+ /* We can't set errno=EDOM directly; let the library call do it.
+ Pop the arguments right away in case the call gets deleted. */
+ NO_DEFER_POP;
+ expand_call (exp, target, 0);
+ OK_DEFER_POP;
+ emit_label (lab);
+}
+
+
/* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
Return 0 if a normal call should be emitted rather than expanding the
function in-line. EXP is the expression that is a call to the builtin
SUBTARGET may be used as the target for computing one of EXP's operands. */
static rtx
-expand_builtin_mathfn (exp, target, subtarget)
- tree exp;
- rtx target, subtarget;
+expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
{
optab builtin_optab;
rtx op0, insns;
tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
tree arglist = TREE_OPERAND (exp, 1);
- enum machine_mode argmode;
- bool errno_set = true;
+ enum machine_mode mode;
+ bool errno_set = false;
+ tree arg, narg;
if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
return 0;
- /* Stabilize and compute the argument. */
- if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
- && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
- {
- exp = copy_node (exp);
- TREE_OPERAND (exp, 1) = arglist;
- /* Wrap the computation of the argument in a SAVE_EXPR. That
- way, if we need to expand the argument again (as in the
- flag_errno_math case below where we cannot directly set
- errno), we will not perform side-effects more than once.
- Note that here we're mutating the original EXP as well as the
- copy; that's the right thing to do in case the original EXP
- is expanded later. */
- TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
- arglist = copy_node (arglist);
- }
- op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
-
- /* Make a suitable register to place result in. */
- target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
-
- emit_queue ();
- start_sequence ();
+ arg = TREE_VALUE (arglist);
switch (DECL_FUNCTION_CODE (fndecl))
{
case BUILT_IN_SQRT:
case BUILT_IN_SQRTF:
case BUILT_IN_SQRTL:
- builtin_optab = sqrt_optab; break;
+ errno_set = ! tree_expr_nonnegative_p (arg);
+ builtin_optab = sqrt_optab;
+ break;
case BUILT_IN_EXP:
case BUILT_IN_EXPF:
case BUILT_IN_EXPL:
- builtin_optab = exp_optab; break;
+ errno_set = true; builtin_optab = exp_optab; break;
case BUILT_IN_LOG:
case BUILT_IN_LOGF:
case BUILT_IN_LOGL:
- builtin_optab = log_optab; break;
+ errno_set = true; builtin_optab = log_optab; break;
+ case BUILT_IN_TAN:
+ case BUILT_IN_TANF:
+ case BUILT_IN_TANL:
+ builtin_optab = tan_optab; break;
+ case BUILT_IN_ATAN:
+ case BUILT_IN_ATANF:
+ case BUILT_IN_ATANL:
+ builtin_optab = atan_optab; break;
case BUILT_IN_FLOOR:
case BUILT_IN_FLOORF:
case BUILT_IN_FLOORL:
- errno_set = false ; builtin_optab = floor_optab; break;
+ builtin_optab = floor_optab; break;
case BUILT_IN_CEIL:
case BUILT_IN_CEILF:
case BUILT_IN_CEILL:
- errno_set = false ; builtin_optab = ceil_optab; break;
+ builtin_optab = ceil_optab; break;
case BUILT_IN_TRUNC:
case BUILT_IN_TRUNCF:
case BUILT_IN_TRUNCL:
- errno_set = false ; builtin_optab = trunc_optab; break;
+ builtin_optab = trunc_optab; break;
case BUILT_IN_ROUND:
case BUILT_IN_ROUNDF:
case BUILT_IN_ROUNDL:
- errno_set = false ; builtin_optab = round_optab; break;
+ builtin_optab = round_optab; break;
case BUILT_IN_NEARBYINT:
case BUILT_IN_NEARBYINTF:
case BUILT_IN_NEARBYINTL:
- errno_set = false ; builtin_optab = nearbyint_optab; break;
+ builtin_optab = nearbyint_optab; break;
default:
abort ();
}
+ /* Make a suitable register to place result in. */
+ mode = TYPE_MODE (TREE_TYPE (exp));
+ target = gen_reg_rtx (mode);
+
+ if (! flag_errno_math || ! HONOR_NANS (mode))
+ errno_set = false;
+
+ /* Wrap the computation of the argument in a SAVE_EXPR, as we may
+ need to expand the argument again. This way, we will not perform
+ side-effects more the once. */
+ narg = save_expr (arg);
+ if (narg != arg)
+ {
+ arglist = build_tree_list (NULL_TREE, arg);
+ exp = build_function_call_expr (fndecl, arglist);
+ }
+
+ op0 = expand_expr (arg, subtarget, VOIDmode, 0);
+
+ emit_queue ();
+ start_sequence ();
+
/* Compute into TARGET.
Set TARGET to wherever the result comes back. */
- argmode = TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist)));
- target = expand_unop (argmode, builtin_optab, op0, target, 0);
+ target = expand_unop (mode, builtin_optab, op0, target, 0);
- /* If we were unable to expand via the builtin, stop the
- sequence (without outputting the insns) and return 0, causing
- a call to the library function. */
+ /* If we were unable to expand via the builtin, stop the sequence
+ (without outputting the insns) and call to the library function
+ with the stabilized argument list. */
if (target == 0)
{
end_sequence ();
- return 0;
+ return expand_call (exp, target, target == const0_rtx);
+ }
+
+ if (errno_set)
+ expand_errno_check (exp, target);
+
+ /* Output the entire sequence. */
+ insns = get_insns ();
+ end_sequence ();
+ emit_insn (insns);
+
+ return target;
+}
+
+/* Expand a call to the builtin binary math functions (pow and atan2).
+ Return 0 if a normal call should be emitted rather than expanding the
+ function in-line. EXP is the expression that is a call to the builtin
+ function; if convenient, the result should be placed in TARGET.
+ SUBTARGET may be used as the target for computing one of EXP's
+ operands. */
+
+static rtx
+expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
+{
+ optab builtin_optab;
+ rtx op0, op1, insns;
+ tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
+ tree arglist = TREE_OPERAND (exp, 1);
+ tree arg0, arg1, temp, narg;
+ enum machine_mode mode;
+ bool errno_set = true;
+ bool stable = true;
+
+ if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
+ return 0;
+
+ arg0 = TREE_VALUE (arglist);
+ arg1 = TREE_VALUE (TREE_CHAIN (arglist));
+
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ case BUILT_IN_POW:
+ case BUILT_IN_POWF:
+ case BUILT_IN_POWL:
+ builtin_optab = pow_optab; break;
+ case BUILT_IN_ATAN2:
+ case BUILT_IN_ATAN2F:
+ case BUILT_IN_ATAN2L:
+ builtin_optab = atan2_optab; break;
+ default:
+ abort ();
}
- /* If errno must be maintained, we must set it to EDOM for NaN results. */
+ /* Make a suitable register to place result in. */
+ mode = TYPE_MODE (TREE_TYPE (exp));
+ target = gen_reg_rtx (mode);
+
+ if (! flag_errno_math || ! HONOR_NANS (mode))
+ errno_set = false;
- if (flag_errno_math && errno_set && HONOR_NANS (argmode))
+ /* Alway stabilize the argument list. */
+ narg = save_expr (arg1);
+ if (narg != arg1)
{
- rtx lab1;
+ temp = build_tree_list (NULL_TREE, narg);
+ stable = false;
+ }
+ else
+ temp = TREE_CHAIN (arglist);
- lab1 = gen_label_rtx ();
+ narg = save_expr (arg0);
+ if (narg != arg0)
+ {
+ arglist = tree_cons (NULL_TREE, narg, temp);
+ stable = false;
+ }
+ else if (! stable)
+ arglist = tree_cons (NULL_TREE, arg0, temp);
- /* Test the result; if it is NaN, set errno=EDOM because
- the argument was not in the domain. */
- emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
- 0, lab1);
+ if (! stable)
+ exp = build_function_call_expr (fndecl, arglist);
-#ifdef TARGET_EDOM
- {
-#ifdef GEN_ERRNO_RTX
- rtx errno_rtx = GEN_ERRNO_RTX;
-#else
- rtx errno_rtx
- = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
-#endif
+ op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
+ op1 = expand_expr (arg1, 0, VOIDmode, 0);
- emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
- }
-#else
- /* We can't set errno=EDOM directly; let the library call do it.
- Pop the arguments right away in case the call gets deleted. */
- NO_DEFER_POP;
- expand_call (exp, target, 0);
- OK_DEFER_POP;
-#endif
+ emit_queue ();
+ start_sequence ();
+
+ /* Compute into TARGET.
+ Set TARGET to wherever the result comes back. */
+ target = expand_binop (mode, builtin_optab, op0, op1,
+ target, 0, OPTAB_DIRECT);
- emit_label (lab1);
+ /* If we were unable to expand via the builtin, stop the sequence
+ (without outputting the insns) and call to the library function
+ with the stabilized argument list. */
+ if (target == 0)
+ {
+ end_sequence ();
+ return expand_call (exp, target, target == const0_rtx);
}
+ if (errno_set)
+ expand_errno_check (exp, target);
+
/* Output the entire sequence. */
insns = get_insns ();
end_sequence ();
return target;
}
+/* To evaluate powi(x,n), the floating point value x raised to the
+ constant integer exponent n, we use a hybrid algorithm that
+ combines the "window method" with look-up tables. For an
+ introduction to exponentiation algorithms and "addition chains",
+ see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
+ "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
+ 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
+ Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
+
+/* Provide a default value for POWI_MAX_MULTS, the maximum number of
+ multiplications to inline before calling the system library's pow
+ function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
+ so this default never requires calling pow, powf or powl. */
+
+#ifndef POWI_MAX_MULTS
+#define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
+#endif
+
+/* The size of the "optimal power tree" lookup table. All
+ exponents less than this value are simply looked up in the
+ powi_table below. This threshold is also used to size the
+ cache of pseudo registers that hold intermediate results. */
+#define POWI_TABLE_SIZE 256
+
+/* The size, in bits of the window, used in the "window method"
+ exponentiation algorithm. This is equivalent to a radix of
+ (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
+#define POWI_WINDOW_SIZE 3
+
+/* The following table is an efficient representation of an
+ "optimal power tree". For each value, i, the corresponding
+ value, j, in the table states than an optimal evaluation
+ sequence for calculating pow(x,i) can be found by evaluating
+ pow(x,j)*pow(x,i-j). An optimal power tree for the first
+ 100 integers is given in Knuth's "Seminumerical algorithms". */
+
+static const unsigned char powi_table[POWI_TABLE_SIZE] =
+ {
+ 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
+ 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
+ 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
+ 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
+ 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
+ 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
+ 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
+ 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
+ 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
+ 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
+ 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
+ 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
+ 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
+ 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
+ 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
+ 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
+ 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
+ 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
+ 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
+ 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
+ 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
+ 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
+ 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
+ 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
+ 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
+ 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
+ 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
+ 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
+ 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
+ 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
+ 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
+ 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
+ };
+
+
+/* Return the number of multiplications required to calculate
+ powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
+ subroutine of powi_cost. CACHE is an array indicating
+ which exponents have already been calculated. */
+
+static int
+powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
+{
+ /* If we've already calculated this exponent, then this evaluation
+ doesn't require any additional multiplications. */
+ if (cache[n])
+ return 0;
+
+ cache[n] = true;
+ return powi_lookup_cost (n - powi_table[n], cache)
+ + powi_lookup_cost (powi_table[n], cache) + 1;
+}
+
+/* Return the number of multiplications required to calculate
+ powi(x,n) for an arbitrary x, given the exponent N. This
+ function needs to be kept in sync with expand_powi below. */
+
+static int
+powi_cost (HOST_WIDE_INT n)
+{
+ bool cache[POWI_TABLE_SIZE];
+ unsigned HOST_WIDE_INT digit;
+ unsigned HOST_WIDE_INT val;
+ int result;
+
+ if (n == 0)
+ return 0;
+
+ /* Ignore the reciprocal when calculating the cost. */
+ val = (n < 0) ? -n : n;
+
+ /* Initialize the exponent cache. */
+ memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
+ cache[1] = true;
+
+ result = 0;
+
+ while (val >= POWI_TABLE_SIZE)
+ {
+ if (val & 1)
+ {
+ digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
+ result += powi_lookup_cost (digit, cache)
+ + POWI_WINDOW_SIZE + 1;
+ val >>= POWI_WINDOW_SIZE;
+ }
+ else
+ {
+ val >>= 1;
+ result++;
+ }
+ }
+
+ return result + powi_lookup_cost (val, cache);
+}
+
+/* Recursive subroutine of expand_powi. This function takes the array,
+ CACHE, of already calculated exponents and an exponent N and returns
+ an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
+
+static rtx
+expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
+{
+ unsigned HOST_WIDE_INT digit;
+ rtx target, result;
+ rtx op0, op1;
+
+ if (n < POWI_TABLE_SIZE)
+ {
+ if (cache[n])
+ return cache[n];
+
+ target = gen_reg_rtx (mode);
+ cache[n] = target;
+
+ op0 = expand_powi_1 (mode, n - powi_table[n], cache);
+ op1 = expand_powi_1 (mode, powi_table[n], cache);
+ }
+ else if (n & 1)
+ {
+ target = gen_reg_rtx (mode);
+ digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
+ op0 = expand_powi_1 (mode, n - digit, cache);
+ op1 = expand_powi_1 (mode, digit, cache);
+ }
+ else
+ {
+ target = gen_reg_rtx (mode);
+ op0 = expand_powi_1 (mode, n >> 1, cache);
+ op1 = op0;
+ }
+
+ result = expand_mult (mode, op0, op1, target, 0);
+ if (result != target)
+ emit_move_insn (target, result);
+ return target;
+}
+
+/* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
+ floating point operand in mode MODE, and N is the exponent. This
+ function needs to be kept in sync with powi_cost above. */
+
+static rtx
+expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
+{
+ unsigned HOST_WIDE_INT val;
+ rtx cache[POWI_TABLE_SIZE];
+ rtx result;
+
+ if (n == 0)
+ return CONST1_RTX (mode);
+
+ val = (n < 0) ? -n : n;
+
+ memset (cache, 0, sizeof(cache));
+ cache[1] = x;
+
+ result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
+
+ /* If the original exponent was negative, reciprocate the result. */
+ if (n < 0)
+ result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
+ result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
+
+ return result;
+}
+
+/* Expand a call to the pow built-in mathematical function. Return 0 if
+ a normal call should be emitted rather than expanding the function
+ in-line. EXP is the expression that is a call to the builtin
+ function; if convenient, the result should be placed in TARGET. */
+
+static rtx
+expand_builtin_pow (tree exp, rtx target, rtx subtarget)
+{
+ tree arglist = TREE_OPERAND (exp, 1);
+ tree arg0, arg1;
+
+ if (! validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
+ return 0;
+
+ arg0 = TREE_VALUE (arglist);
+ arg1 = TREE_VALUE (TREE_CHAIN (arglist));
+
+ if (flag_unsafe_math_optimizations
+ && ! flag_errno_math
+ && ! optimize_size
+ && TREE_CODE (arg1) == REAL_CST
+ && ! TREE_CONSTANT_OVERFLOW (arg1))
+ {
+ REAL_VALUE_TYPE cint;
+ REAL_VALUE_TYPE c;
+ HOST_WIDE_INT n;
+
+ c = TREE_REAL_CST (arg1);
+ n = real_to_integer (&c);
+ real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
+ if (real_identical (&c, &cint)
+ && powi_cost (n) <= POWI_MAX_MULTS)
+ {
+ enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
+ rtx op = expand_expr (arg0, subtarget, VOIDmode, 0);
+ op = force_reg (mode, op);
+ return expand_powi (op, mode, n);
+ }
+ }
+ return expand_builtin_mathfn_2 (exp, target, NULL_RTX);
+}
+
/* Expand expression EXP which is a call to the strlen builtin. Return 0
if we failed the caller should emit a normal call, otherwise
try to get the result in TARGET, if convenient. */
static rtx
-expand_builtin_strlen (exp, target)
- tree exp;
- rtx target;
+expand_builtin_strlen (tree arglist, rtx target,
+ enum machine_mode target_mode)
{
- tree arglist = TREE_OPERAND (exp, 1);
- enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
-
if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
return 0;
else
{
rtx pat;
- tree src = TREE_VALUE (arglist);
-
- int align
- = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
-
+ tree len, src = TREE_VALUE (arglist);
rtx result, src_reg, char_rtx, before_strlen;
- enum machine_mode insn_mode = value_mode, char_mode;
+ enum machine_mode insn_mode = target_mode, char_mode;
enum insn_code icode = CODE_FOR_nothing;
+ int align;
+
+ /* If the length can be computed at compile-time, return it. */
+ len = c_strlen (src, 0);
+ if (len)
+ return expand_expr (len, target, target_mode, EXPAND_NORMAL);
+
+ /* If the length can be computed at compile-time and is constant
+ integer, but there are side-effects in src, evaluate
+ src for side-effects, then return len.
+ E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
+ can be optimized into: i++; x = 3; */
+ len = c_strlen (src, 1);
+ if (len && TREE_CODE (len) == INTEGER_CST)
+ {
+ expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
+ return expand_expr (len, target, target_mode, EXPAND_NORMAL);
+ }
+
+ align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
/* If SRC is not a pointer type, don't do this operation inline. */
if (align == 0)
emit_insn_before (pat, get_insns ());
/* Return the value in the proper mode for this function. */
- if (GET_MODE (result) == value_mode)
+ if (GET_MODE (result) == target_mode)
target = result;
else if (target != 0)
convert_move (target, result, 0);
else
- target = convert_to_mode (value_mode, result, 0);
+ target = convert_to_mode (target_mode, result, 0);
return target;
}
in TARGET, if convenient (and in mode MODE if that's convenient). */
static rtx
-expand_builtin_strstr (arglist, target, mode)
- tree arglist;
- rtx target;
- enum machine_mode mode;
+expand_builtin_strstr (tree arglist, rtx target, enum machine_mode mode)
{
if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
return 0;
in TARGET, if convenient (and in mode MODE if that's convenient). */
static rtx
-expand_builtin_strchr (arglist, target, mode)
- tree arglist;
- rtx target;
- enum machine_mode mode;
+expand_builtin_strchr (tree arglist, rtx target, enum machine_mode mode)
{
if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
return 0;
in TARGET, if convenient (and in mode MODE if that's convenient). */
static rtx
-expand_builtin_strrchr (arglist, target, mode)
- tree arglist;
- rtx target;
- enum machine_mode mode;
+expand_builtin_strrchr (tree arglist, rtx target, enum machine_mode mode)
{
if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
return 0;
in TARGET, if convenient (and in mode MODE if that's convenient). */
static rtx
-expand_builtin_strpbrk (arglist, target, mode)
- tree arglist;
- rtx target;
- enum machine_mode mode;
+expand_builtin_strpbrk (tree arglist, rtx target, enum machine_mode mode)
{
if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
return 0;
constant. */
static rtx
-builtin_memcpy_read_str (data, offset, mode)
- PTR data;
- HOST_WIDE_INT offset;
- enum machine_mode mode;
+builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
+ enum machine_mode mode)
{
const char *str = (const char *) data;
}
/* Expand a call to the memcpy builtin, with arguments in ARGLIST.
- Return 0 if we failed, the caller should emit a normal call, otherwise
- try to get the result in TARGET, if convenient (and in mode MODE if
- that's convenient). */
-
+ Return 0 if we failed, the caller should emit a normal call,
+ otherwise try to get the result in TARGET, if convenient (and in
+ mode MODE if that's convenient). */
static rtx
-expand_builtin_memcpy (arglist, target, mode)
- tree arglist;
- rtx target;
- enum machine_mode mode;
+expand_builtin_memcpy (tree arglist, rtx target, enum machine_mode mode)
{
if (!validate_arglist (arglist,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
tree src = TREE_VALUE (TREE_CHAIN (arglist));
tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
const char *src_str;
-
unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
unsigned int dest_align
= get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
&& GET_CODE (len_rtx) == CONST_INT
&& (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
&& can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
- (PTR) src_str, dest_align))
+ (void *) src_str, dest_align))
{
- store_by_pieces (dest_mem, INTVAL (len_rtx),
- builtin_memcpy_read_str,
- (PTR) src_str, dest_align);
+ dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
+ builtin_memcpy_read_str,
+ (void *) src_str, dest_align, 0);
dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (dest_mem) != ptr_mode)
dest_addr = convert_memory_address (ptr_mode, dest_addr);
#endif
}
-
return dest_addr;
}
}
-/* Expand expression EXP, which is a call to the strcpy builtin. Return 0
- if we failed the caller should emit a normal call, otherwise try to get
- the result in TARGET, if convenient (and in mode MODE if that's
- convenient). */
-
-static rtx
-expand_builtin_strcpy (exp, target, mode)
- tree exp;
- rtx target;
- enum machine_mode mode;
-{
- tree arglist = TREE_OPERAND (exp, 1);
- tree fn, len;
-
- if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
- return 0;
-
- fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
- if (!fn)
- return 0;
-
- len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
- if (len == 0)
- return 0;
-
- len = size_binop (PLUS_EXPR, len, ssize_int (1));
- chainon (arglist, build_tree_list (NULL_TREE, len));
- return expand_expr (build_function_call_expr (fn, arglist),
- target, mode, EXPAND_NORMAL);
-}
-
-/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
- bytes from constant string DATA + OFFSET and return it as target
- constant. */
-
-static rtx
-builtin_strncpy_read_str (data, offset, mode)
- PTR data;
- HOST_WIDE_INT offset;
- enum machine_mode mode;
-{
- const char *str = (const char *) data;
-
- if ((unsigned HOST_WIDE_INT) offset > strlen (str))
- return const0_rtx;
-
- return c_readstr (str + offset, mode);
-}
-
-/* Expand expression EXP, which is a call to the strncpy builtin. Return 0
- if we failed the caller should emit a normal call. */
+/* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
+ Return 0 if we failed the caller should emit a normal call,
+ otherwise try to get the result in TARGET, if convenient (and in
+ mode MODE if that's convenient). If ENDP is 0 return the
+ destination pointer, if ENDP is 1 return the end pointer ala
+ mempcpy, and if ENDP is 2 return the end pointer minus one ala
+ stpcpy. */
static rtx
-expand_builtin_strncpy (arglist, target, mode)
- tree arglist;
- rtx target;
- enum machine_mode mode;
+expand_builtin_mempcpy (tree arglist, rtx target, enum machine_mode mode,
+ int endp)
{
if (!validate_arglist (arglist,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
return 0;
- else
+ /* If return value is ignored, transform mempcpy into memcpy. */
+ else if (target == const0_rtx)
{
- tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
- tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
- tree fn;
+ tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
- /* We must be passed a constant len parameter. */
- if (TREE_CODE (len) != INTEGER_CST)
+ if (!fn)
+ return 0;
+
+ return expand_expr (build_function_call_expr (fn, arglist),
+ target, mode, EXPAND_NORMAL);
+ }
+ else
+ {
+ tree dest = TREE_VALUE (arglist);
+ tree src = TREE_VALUE (TREE_CHAIN (arglist));
+ tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
+ const char *src_str;
+ unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
+ unsigned int dest_align
+ = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
+ rtx dest_mem, src_mem, len_rtx;
+
+ /* If DEST is not a pointer type or LEN is not constant,
+ call the normal function. */
+ if (dest_align == 0 || !host_integerp (len, 1))
+ return 0;
+
+ /* If the LEN parameter is zero, return DEST. */
+ if (tree_low_cst (len, 1) == 0)
+ {
+ /* Evaluate and ignore SRC in case it has side-effects. */
+ expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
+ return expand_expr (dest, target, mode, EXPAND_NORMAL);
+ }
+
+ /* If either SRC is not a pointer type, don't do this
+ operation in-line. */
+ if (src_align == 0)
+ return 0;
+
+ len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
+ src_str = c_getstr (src);
+
+ /* If SRC is a string constant and block move would be done
+ by pieces, we can avoid loading the string from memory
+ and only stored the computed constants. */
+ if (src_str
+ && GET_CODE (len_rtx) == CONST_INT
+ && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
+ && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
+ (void *) src_str, dest_align))
+ {
+ dest_mem = get_memory_rtx (dest);
+ set_mem_align (dest_mem, dest_align);
+ dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
+ builtin_memcpy_read_str,
+ (void *) src_str, dest_align, endp);
+ dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
+#ifdef POINTERS_EXTEND_UNSIGNED
+ if (GET_MODE (dest_mem) != ptr_mode)
+ dest_mem = convert_memory_address (ptr_mode, dest_mem);
+#endif
+ return dest_mem;
+ }
+
+ if (GET_CODE (len_rtx) == CONST_INT
+ && can_move_by_pieces (INTVAL (len_rtx),
+ MIN (dest_align, src_align)))
+ {
+ dest_mem = get_memory_rtx (dest);
+ set_mem_align (dest_mem, dest_align);
+ src_mem = get_memory_rtx (src);
+ set_mem_align (src_mem, src_align);
+ dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
+ MIN (dest_align, src_align), endp);
+ dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
+#ifdef POINTERS_EXTEND_UNSIGNED
+ if (GET_MODE (dest_mem) != ptr_mode)
+ dest_mem = convert_memory_address (ptr_mode, dest_mem);
+#endif
+ return dest_mem;
+ }
+
+ return 0;
+ }
+}
+
+/* Expand expression EXP, which is a call to the memmove builtin. Return 0
+ if we failed the caller should emit a normal call. */
+
+static rtx
+expand_builtin_memmove (tree arglist, rtx target, enum machine_mode mode)
+{
+ if (!validate_arglist (arglist,
+ POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
+ return 0;
+ else
+ {
+ tree dest = TREE_VALUE (arglist);
+ tree src = TREE_VALUE (TREE_CHAIN (arglist));
+ tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
+
+ unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
+ unsigned int dest_align
+ = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
+
+ /* If DEST is not a pointer type, call the normal function. */
+ if (dest_align == 0)
+ return 0;
+
+ /* If the LEN parameter is zero, return DEST. */
+ if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
+ {
+ /* Evaluate and ignore SRC in case it has side-effects. */
+ expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
+ return expand_expr (dest, target, mode, EXPAND_NORMAL);
+ }
+
+ /* If either SRC is not a pointer type, don't do this
+ operation in-line. */
+ if (src_align == 0)
+ return 0;
+
+ /* If src is categorized for a readonly section we can use
+ normal memcpy. */
+ if (readonly_data_expr (src))
+ {
+ tree const fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
+ if (!fn)
+ return 0;
+ return expand_expr (build_function_call_expr (fn, arglist),
+ target, mode, EXPAND_NORMAL);
+ }
+
+ /* Otherwise, call the normal function. */
+ return 0;
+ }
+}
+
+/* Expand expression EXP, which is a call to the bcopy builtin. Return 0
+ if we failed the caller should emit a normal call. */
+
+static rtx
+expand_builtin_bcopy (tree arglist)
+{
+ tree src, dest, size, newarglist;
+
+ if (!validate_arglist (arglist,
+ POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
+ return NULL_RTX;
+
+ src = TREE_VALUE (arglist);
+ dest = TREE_VALUE (TREE_CHAIN (arglist));
+ size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
+
+ /* New argument list transforming bcopy(ptr x, ptr y, int z) to
+ memmove(ptr y, ptr x, size_t z). This is done this way
+ so that if it isn't expanded inline, we fallback to
+ calling bcopy instead of memmove. */
+
+ newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
+ newarglist = tree_cons (NULL_TREE, src, newarglist);
+ newarglist = tree_cons (NULL_TREE, dest, newarglist);
+
+ return expand_builtin_memmove (newarglist, const0_rtx, VOIDmode);
+}
+
+/* Expand expression EXP, which is a call to the strcpy builtin. Return 0
+ if we failed the caller should emit a normal call, otherwise try to get
+ the result in TARGET, if convenient (and in mode MODE if that's
+ convenient). */
+
+static rtx
+expand_builtin_strcpy (tree arglist, rtx target, enum machine_mode mode)
+{
+ tree fn, len, src, dst;
+
+ if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
+ return 0;
+
+ fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
+ if (!fn)
+ return 0;
+
+ src = TREE_VALUE (TREE_CHAIN (arglist));
+ len = c_strlen (src, 1);
+ if (len == 0 || TREE_SIDE_EFFECTS (len))
+ return 0;
+
+ dst = TREE_VALUE (arglist);
+ len = size_binop (PLUS_EXPR, len, ssize_int (1));
+ arglist = build_tree_list (NULL_TREE, len);
+ arglist = tree_cons (NULL_TREE, src, arglist);
+ arglist = tree_cons (NULL_TREE, dst, arglist);
+ return expand_expr (build_function_call_expr (fn, arglist),
+ target, mode, EXPAND_NORMAL);
+}
+
+/* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
+ Return 0 if we failed the caller should emit a normal call,
+ otherwise try to get the result in TARGET, if convenient (and in
+ mode MODE if that's convenient). */
+
+static rtx
+expand_builtin_stpcpy (tree arglist, rtx target, enum machine_mode mode)
+{
+ if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
+ return 0;
+ else
+ {
+ tree dst, src, len;
+
+ /* If return value is ignored, transform stpcpy into strcpy. */
+ if (target == const0_rtx)
+ {
+ tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
+ if (!fn)
+ return 0;
+
+ return expand_expr (build_function_call_expr (fn, arglist),
+ target, mode, EXPAND_NORMAL);
+ }
+
+ /* Ensure we get an actual string whose length can be evaluated at
+ compile-time, not an expression containing a string. This is
+ because the latter will potentially produce pessimized code
+ when used to produce the return value. */
+ src = TREE_VALUE (TREE_CHAIN (arglist));
+ if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
+ return 0;
+
+ dst = TREE_VALUE (arglist);
+ len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
+ arglist = build_tree_list (NULL_TREE, len);
+ arglist = tree_cons (NULL_TREE, src, arglist);
+ arglist = tree_cons (NULL_TREE, dst, arglist);
+ return expand_builtin_mempcpy (arglist, target, mode, /*endp=*/2);
+ }
+}
+
+/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
+ bytes from constant string DATA + OFFSET and return it as target
+ constant. */
+
+static rtx
+builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
+ enum machine_mode mode)
+{
+ const char *str = (const char *) data;
+
+ if ((unsigned HOST_WIDE_INT) offset > strlen (str))
+ return const0_rtx;
+
+ return c_readstr (str + offset, mode);
+}
+
+/* Expand expression EXP, which is a call to the strncpy builtin. Return 0
+ if we failed the caller should emit a normal call. */
+
+static rtx
+expand_builtin_strncpy (tree arglist, rtx target, enum machine_mode mode)
+{
+ if (!validate_arglist (arglist,
+ POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
+ return 0;
+ else
+ {
+ tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)), 1);
+ tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
+ tree fn;
+
+ /* We must be passed a constant len parameter. */
+ if (TREE_CODE (len) != INTEGER_CST)
return 0;
/* If the len parameter is zero, return the dst parameter. */
if (!p || dest_align == 0 || !host_integerp (len, 1)
|| !can_store_by_pieces (tree_low_cst (len, 1),
builtin_strncpy_read_str,
- (PTR) p, dest_align))
+ (void *) p, dest_align))
return 0;
dest_mem = get_memory_rtx (dest);
store_by_pieces (dest_mem, tree_low_cst (len, 1),
builtin_strncpy_read_str,
- (PTR) p, dest_align);
+ (void *) p, dest_align, 0);
dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (dest_mem) != ptr_mode)
constant. */
static rtx
-builtin_memset_read_str (data, offset, mode)
- PTR data;
- HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
- enum machine_mode mode;
+builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
+ enum machine_mode mode)
{
const char *c = (const char *) data;
char *p = alloca (GET_MODE_SIZE (mode));
4 bytes wide, return the RTL for 0x01010101*data. */
static rtx
-builtin_memset_gen_str (data, offset, mode)
- PTR data;
- HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
- enum machine_mode mode;
+builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
+ enum machine_mode mode)
{
rtx target, coeff;
size_t size;
convenient). */
static rtx
-expand_builtin_memset (exp, target, mode)
- tree exp;
- rtx target;
- enum machine_mode mode;
+expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode)
{
- tree arglist = TREE_OPERAND (exp, 1);
-
if (!validate_arglist (arglist,
POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
return 0;
c = 1;
if (!can_store_by_pieces (tree_low_cst (len, 1),
builtin_memset_read_str,
- (PTR) &c, dest_align))
+ &c, dest_align))
return 0;
val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
dest_mem = get_memory_rtx (dest);
store_by_pieces (dest_mem, tree_low_cst (len, 1),
builtin_memset_gen_str,
- (PTR) val_rtx, dest_align);
+ val_rtx, dest_align, 0);
dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (dest_mem) != ptr_mode)
if (!host_integerp (len, 1))
return 0;
if (!can_store_by_pieces (tree_low_cst (len, 1),
- builtin_memset_read_str, (PTR) &c,
+ builtin_memset_read_str, &c,
dest_align))
return 0;
dest_mem = get_memory_rtx (dest);
store_by_pieces (dest_mem, tree_low_cst (len, 1),
builtin_memset_read_str,
- (PTR) &c, dest_align);
+ &c, dest_align, 0);
dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (dest_mem) != ptr_mode)
if we failed the caller should emit a normal call. */
static rtx
-expand_builtin_bzero (exp)
- tree exp;
+expand_builtin_bzero (tree arglist)
{
- tree arglist = TREE_OPERAND (exp, 1);
tree dest, size, newarglist;
- rtx result;
if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
return NULL_RTX;
newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
newarglist = tree_cons (NULL_TREE, dest, newarglist);
- TREE_OPERAND (exp, 1) = newarglist;
- result = expand_builtin_memset (exp, const0_rtx, VOIDmode);
-
- /* Always restore the original arguments. */
- TREE_OPERAND (exp, 1) = arglist;
-
- return result;
+ return expand_builtin_memset (newarglist, const0_rtx, VOIDmode);
}
-/* Expand expression EXP, which is a call to the memcmp or the strcmp builtin.
+/* Expand expression EXP, which is a call to the memcmp built-in function.
ARGLIST is the argument list for this call. Return 0 if we failed and the
caller should emit a normal call, otherwise try to get the result in
TARGET, if convenient (and in mode MODE, if that's convenient). */
static rtx
-expand_builtin_memcmp (exp, arglist, target, mode)
- tree exp ATTRIBUTE_UNUSED;
- tree arglist;
- rtx target;
- enum machine_mode mode;
+expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED, tree arglist, rtx target,
+ enum machine_mode mode)
{
tree arg1, arg2, len;
const char *p1, *p2;
the result in TARGET, if convenient. */
static rtx
-expand_builtin_strcmp (exp, target, mode)
- tree exp;
- rtx target;
- enum machine_mode mode;
+expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
{
tree arglist = TREE_OPERAND (exp, 1);
- tree arg1, arg2, len, len2, fn;
+ tree arg1, arg2;
const char *p1, *p2;
if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
return expand_expr (result, target, mode, EXPAND_NORMAL);
}
- len = c_strlen (arg1);
- len2 = c_strlen (arg2);
+#ifdef HAVE_cmpstrsi
+ if (HAVE_cmpstrsi)
+ {
+ tree len, len1, len2;
+ rtx arg1_rtx, arg2_rtx, arg3_rtx;
+ rtx result, insn;
- if (len)
- len = size_binop (PLUS_EXPR, ssize_int (1), len);
+ int arg1_align
+ = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
+ int arg2_align
+ = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
+ enum machine_mode insn_mode
+ = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
- if (len2)
- len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
+ len1 = c_strlen (arg1, 1);
+ len2 = c_strlen (arg2, 1);
+
+ if (len1)
+ len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
+ if (len2)
+ len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
+
+ /* If we don't have a constant length for the first, use the length
+ of the second, if we know it. We don't require a constant for
+ this case; some cost analysis could be done if both are available
+ but neither is constant. For now, assume they're equally cheap,
+ unless one has side effects. If both strings have constant lengths,
+ use the smaller. */
+
+ if (!len1)
+ len = len2;
+ else if (!len2)
+ len = len1;
+ else if (TREE_SIDE_EFFECTS (len1))
+ len = len2;
+ else if (TREE_SIDE_EFFECTS (len2))
+ len = len1;
+ else if (TREE_CODE (len1) != INTEGER_CST)
+ len = len2;
+ else if (TREE_CODE (len2) != INTEGER_CST)
+ len = len1;
+ else if (tree_int_cst_lt (len1, len2))
+ len = len1;
+ else
+ len = len2;
- /* If we don't have a constant length for the first, use the length
- of the second, if we know it. We don't require a constant for
- this case; some cost analysis could be done if both are available
- but neither is constant. For now, assume they're equally cheap
- unless one has side effects.
+ /* If both arguments have side effects, we cannot optimize. */
+ if (!len || TREE_SIDE_EFFECTS (len))
+ return 0;
- If both strings have constant lengths, use the smaller. This
- could arise if optimization results in strcpy being called with
- two fixed strings, or if the code was machine-generated. We should
- add some code to the `memcmp' handler below to deal with such
- situations, someday. */
+ /* If we don't have POINTER_TYPE, call the function. */
+ if (arg1_align == 0 || arg2_align == 0)
+ return 0;
- if (!len || TREE_CODE (len) != INTEGER_CST)
- {
- if (len2 && !TREE_SIDE_EFFECTS (len2))
- len = len2;
- else if (len == 0)
- return 0;
- }
- else if (len2 && TREE_CODE (len2) == INTEGER_CST
- && tree_int_cst_lt (len2, len))
- len = len2;
+ /* Make a place to write the result of the instruction. */
+ result = target;
+ if (! (result != 0
+ && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
+ && REGNO (result) >= FIRST_PSEUDO_REGISTER))
+ result = gen_reg_rtx (insn_mode);
- /* If both arguments have side effects, we cannot optimize. */
- if (TREE_SIDE_EFFECTS (len))
- return 0;
+ arg1_rtx = get_memory_rtx (arg1);
+ arg2_rtx = get_memory_rtx (arg2);
+ arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
+ insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
+ GEN_INT (MIN (arg1_align, arg2_align)));
+ if (!insn)
+ return 0;
- fn = implicit_built_in_decls[BUILT_IN_MEMCMP];
- if (!fn)
- return 0;
+ emit_insn (insn);
- chainon (arglist, build_tree_list (NULL_TREE, len));
- return expand_expr (build_function_call_expr (fn, arglist),
- target, mode, EXPAND_NORMAL);
+ /* Return the value in the proper mode for this function. */
+ mode = TYPE_MODE (TREE_TYPE (exp));
+ if (GET_MODE (result) == mode)
+ return result;
+ if (target == 0)
+ return convert_to_mode (mode, result, 0);
+ convert_move (target, result, 0);
+ return target;
+ }
+#endif
+ return 0;
}
/* Expand expression EXP, which is a call to the strncmp builtin. Return 0
the result in TARGET, if convenient. */
static rtx
-expand_builtin_strncmp (exp, target, mode)
- tree exp;
- rtx target;
- enum machine_mode mode;
+expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
{
tree arglist = TREE_OPERAND (exp, 1);
- tree fn, newarglist, len = 0;
tree arg1, arg2, arg3;
const char *p1, *p2;
}
/* If c_strlen can determine an expression for one of the string
- lengths, and it doesn't have side effects, then call
- expand_builtin_memcmp() using length MIN(strlen(string)+1, arg3). */
-
- /* Perhaps one of the strings is really constant, if so prefer
- that constant length over the other string's length. */
- if (p1)
- len = c_strlen (arg1);
- else if (p2)
- len = c_strlen (arg2);
-
- /* If we still don't have a len, try either string arg as long
- as they don't have side effects. */
- if (!len && !TREE_SIDE_EFFECTS (arg1))
- len = c_strlen (arg1);
- if (!len && !TREE_SIDE_EFFECTS (arg2))
- len = c_strlen (arg2);
- /* If we still don't have a length, punt. */
- if (!len)
- return 0;
+ lengths, and it doesn't have side effects, then emit cmpstrsi
+ using length MIN(strlen(string)+1, arg3). */
+#ifdef HAVE_cmpstrsi
+ if (HAVE_cmpstrsi)
+ {
+ tree len, len1, len2;
+ rtx arg1_rtx, arg2_rtx, arg3_rtx;
+ rtx result, insn;
- fn = implicit_built_in_decls[BUILT_IN_MEMCMP];
- if (!fn)
- return 0;
+ int arg1_align
+ = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
+ int arg2_align
+ = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
+ enum machine_mode insn_mode
+ = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
+
+ len1 = c_strlen (arg1, 1);
+ len2 = c_strlen (arg2, 1);
+
+ if (len1)
+ len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
+ if (len2)
+ len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
+
+ /* If we don't have a constant length for the first, use the length
+ of the second, if we know it. We don't require a constant for
+ this case; some cost analysis could be done if both are available
+ but neither is constant. For now, assume they're equally cheap,
+ unless one has side effects. If both strings have constant lengths,
+ use the smaller. */
+
+ if (!len1)
+ len = len2;
+ else if (!len2)
+ len = len1;
+ else if (TREE_SIDE_EFFECTS (len1))
+ len = len2;
+ else if (TREE_SIDE_EFFECTS (len2))
+ len = len1;
+ else if (TREE_CODE (len1) != INTEGER_CST)
+ len = len2;
+ else if (TREE_CODE (len2) != INTEGER_CST)
+ len = len1;
+ else if (tree_int_cst_lt (len1, len2))
+ len = len1;
+ else
+ len = len2;
- /* Add one to the string length. */
- len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
+ /* If both arguments have side effects, we cannot optimize. */
+ if (!len || TREE_SIDE_EFFECTS (len))
+ return 0;
- /* The actual new length parameter is MIN(len,arg3). */
- len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
+ /* The actual new length parameter is MIN(len,arg3). */
+ len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
- newarglist = build_tree_list (NULL_TREE, len);
- newarglist = tree_cons (NULL_TREE, arg2, newarglist);
- newarglist = tree_cons (NULL_TREE, arg1, newarglist);
- return expand_expr (build_function_call_expr (fn, newarglist),
- target, mode, EXPAND_NORMAL);
+ /* If we don't have POINTER_TYPE, call the function. */
+ if (arg1_align == 0 || arg2_align == 0)
+ return 0;
+
+ /* Make a place to write the result of the instruction. */
+ result = target;
+ if (! (result != 0
+ && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
+ && REGNO (result) >= FIRST_PSEUDO_REGISTER))
+ result = gen_reg_rtx (insn_mode);
+
+ arg1_rtx = get_memory_rtx (arg1);
+ arg2_rtx = get_memory_rtx (arg2);
+ arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
+ insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
+ GEN_INT (MIN (arg1_align, arg2_align)));
+ if (!insn)
+ return 0;
+
+ emit_insn (insn);
+
+ /* Return the value in the proper mode for this function. */
+ mode = TYPE_MODE (TREE_TYPE (exp));
+ if (GET_MODE (result) == mode)
+ return result;
+ if (target == 0)
+ return convert_to_mode (mode, result, 0);
+ convert_move (target, result, 0);
+ return target;
+ }
+#endif
+ return 0;
}
/* Expand expression EXP, which is a call to the strcat builtin.
otherwise try to get the result in TARGET, if convenient. */
static rtx
-expand_builtin_strcat (arglist, target, mode)
- tree arglist;
- rtx target;
- enum machine_mode mode;
+expand_builtin_strcat (tree arglist, rtx target, enum machine_mode mode)
{
if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
return 0;
otherwise try to get the result in TARGET, if convenient. */
static rtx
-expand_builtin_strncat (arglist, target, mode)
- tree arglist;
- rtx target;
- enum machine_mode mode;
+expand_builtin_strncat (tree arglist, rtx target, enum machine_mode mode)
{
if (!validate_arglist (arglist,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
otherwise try to get the result in TARGET, if convenient. */
static rtx
-expand_builtin_strspn (arglist, target, mode)
- tree arglist;
- rtx target;
- enum machine_mode mode;
+expand_builtin_strspn (tree arglist, rtx target, enum machine_mode mode)
{
if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
return 0;
otherwise try to get the result in TARGET, if convenient. */
static rtx
-expand_builtin_strcspn (arglist, target, mode)
- tree arglist;
- rtx target;
- enum machine_mode mode;
+expand_builtin_strcspn (tree arglist, rtx target, enum machine_mode mode)
{
if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
return 0;
if that's convenient. */
rtx
-expand_builtin_saveregs ()
+expand_builtin_saveregs (void)
{
rtx val, seq;
is controlled by the definition of CUMULATIVE_ARGS. */
static rtx
-expand_builtin_args_info (exp)
- tree exp;
+expand_builtin_args_info (tree arglist)
{
- tree arglist = TREE_OPERAND (exp, 1);
int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
int *word_ptr = (int *) ¤t_function_args_info;
-#if 0
- /* These are used by the code below that is if 0'ed away */
- int i;
- tree type, elts, result;
-#endif
if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
abort ();
error ("missing argument in `__builtin_args_info'");
return const0_rtx;
-
-#if 0
- for (i = 0; i < nwords; i++)
- elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
-
- type = build_array_type (integer_type_node,
- build_index_type (build_int_2 (nwords, 0)));
- result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
- TREE_CONSTANT (result) = 1;
- TREE_STATIC (result) = 1;
- result = build1 (INDIRECT_REF, build_pointer_type (type), result);
- TREE_CONSTANT (result) = 1;
- return expand_expr (result, NULL_RTX, VOIDmode, 0);
-#endif
}
/* Expand ARGLIST, from a call to __builtin_next_arg. */
static rtx
-expand_builtin_next_arg (arglist)
- tree arglist;
+expand_builtin_next_arg (tree arglist)
{
tree fntype = TREE_TYPE (current_function_decl);
from multiple evaluations. */
static tree
-stabilize_va_list (valist, needs_lvalue)
- tree valist;
- int needs_lvalue;
+stabilize_va_list (tree valist, int needs_lvalue)
{
if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
{
the variable. */
void
-std_expand_builtin_va_start (valist, nextarg)
- tree valist;
- rtx nextarg;
+std_expand_builtin_va_start (tree valist, rtx nextarg)
{
tree t;
/* Expand ARGLIST, from a call to __builtin_va_start. */
static rtx
-expand_builtin_va_start (arglist)
- tree arglist;
+expand_builtin_va_start (tree arglist)
{
rtx nextarg;
tree chain, valist;
current (padded) address and increment by the (padded) size. */
rtx
-std_expand_builtin_va_arg (valist, type)
- tree valist, type;
+std_expand_builtin_va_arg (tree valist, tree type)
{
tree addr_tree, t, type_size = NULL;
tree align, alignm1;
a very special sort of operator. */
rtx
-expand_builtin_va_arg (valist, type)
- tree valist, type;
+expand_builtin_va_arg (tree valist, tree type)
{
rtx addr, result;
tree promoted_type, want_va_type, have_va_type;
/* Expand ARGLIST, from a call to __builtin_va_end. */
static rtx
-expand_builtin_va_end (arglist)
- tree arglist;
+expand_builtin_va_end (tree arglist)
{
tree valist = TREE_VALUE (arglist);
nastiness of array-type va_list types. */
static rtx
-expand_builtin_va_copy (arglist)
- tree arglist;
+expand_builtin_va_copy (tree arglist)
{
tree dst, src, t;
__builtin_return_address. */
static rtx
-expand_builtin_frame_address (exp)
- tree exp;
+expand_builtin_frame_address (tree fndecl, tree arglist)
{
- tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
- tree arglist = TREE_OPERAND (exp, 1);
-
/* The argument must be a nonnegative integer constant.
It counts the number of frames to scan up the stack.
The value is the return address saved in that frame. */
the result in TARGET, if convenient. */
static rtx
-expand_builtin_alloca (arglist, target)
- tree arglist;
- rtx target;
+expand_builtin_alloca (tree arglist, rtx target)
{
rtx op0;
rtx result;
SUBTARGET may be used as the target for computing one of EXP's operands. */
static rtx
-expand_builtin_unop (arglist, target, subtarget, op_optab)
- tree arglist;
- rtx target, subtarget;
- optab op_optab;
+expand_builtin_unop (enum machine_mode target_mode, tree arglist, rtx target,
+ rtx subtarget, optab op_optab)
{
rtx op0;
if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
op_optab, op0, target, 1);
if (target == 0)
abort ();
- return target;
+
+ return convert_to_mode (target_mode, target, 0);
}
/* If the string passed to fputs is a constant and is one character
long, we attempt to transform this call into __builtin_fputc(). */
static rtx
-expand_builtin_fputs (arglist, ignore, unlocked)
- tree arglist;
- int ignore;
- int unlocked;
+expand_builtin_fputs (tree arglist, int ignore, int unlocked)
{
tree len, fn;
tree fn_fputc = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
/* Get the length of the string passed to fputs. If the length
can't be determined, punt. */
- if (!(len = c_strlen (TREE_VALUE (arglist)))
+ if (!(len = c_strlen (TREE_VALUE (arglist), 1))
|| TREE_CODE (len) != INTEGER_CST)
return 0;
/* FALLTHROUGH */
case 1: /* length is greater than 1, call fwrite. */
{
- tree string_arg = TREE_VALUE (arglist);
+ tree string_arg;
+ /* If optimizing for size keep fputs. */
+ if (optimize_size)
+ return 0;
+ string_arg = TREE_VALUE (arglist);
/* New argument list transforming fputs(string, stream) to
fwrite(string, 1, len, stream). */
arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
a non-jump context. */
static rtx
-expand_builtin_expect (arglist, target)
- tree arglist;
- rtx target;
+expand_builtin_expect (tree arglist, rtx target)
{
tree exp, c;
rtx note, rtx_c;
target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
/* Don't bother with expected value notes for integral constants. */
- if (GET_CODE (target) != CONST_INT)
+ if (flag_guess_branch_prob && GET_CODE (target) != CONST_INT)
{
/* We do need to force this into a register so that we can be
moderately sure to be able to correctly interpret the branch
rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
- note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
+ note = emit_note (NOTE_INSN_EXPECTED_VALUE);
NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
}
based on the test being 0/1. */
rtx
-expand_builtin_expect_jump (exp, if_false_label, if_true_label)
- tree exp;
- rtx if_false_label;
- rtx if_true_label;
+expand_builtin_expect_jump (tree exp, rtx if_false_label, rtx if_true_label)
{
tree arglist = TREE_OPERAND (exp, 1);
tree arg0 = TREE_VALUE (arglist);
while (insn != NULL_RTX)
{
rtx next = NEXT_INSN (insn);
- rtx pattern;
- if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn)
- && (pattern = pc_set (insn)) != NULL_RTX)
+ if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn))
{
- rtx ifelse = SET_SRC (pattern);
+ rtx ifelse = SET_SRC (pc_set (insn));
rtx label;
int taken;
- if (GET_CODE (ifelse) != IF_THEN_ELSE)
- goto do_next_insn;
-
if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
{
taken = 1;
if (integer_zerop (arg1))
taken = 1 - taken;
- /* If we are jumping to the false label, reverse the
- probabilities. */
- if (label == NULL_RTX)
- ; /* conditional return */
- else if (label == if_false_label)
- taken = 1 - taken;
- else if (label != if_true_label)
- goto do_next_insn;
+ /* If we are jumping to the false label, reverse the
+ probabilities. */
+ if (label == NULL_RTX)
+ ; /* conditional return */
+ else if (label == if_false_label)
+ taken = 1 - taken;
+ else if (label != if_true_label)
+ goto do_next_insn;
+
+ num_jumps++;
+ predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
+ }
+
+ do_next_insn:
+ insn = next;
+ }
+
+ /* If no jumps were modified, fail and do __builtin_expect the normal
+ way. */
+ if (num_jumps == 0)
+ ret = NULL_RTX;
+ }
+
+ return ret;
+}
+
+void
+expand_builtin_trap (void)
+{
+#ifdef HAVE_trap
+ if (HAVE_trap)
+ emit_insn (gen_trap ());
+ else
+#endif
+ emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
+ emit_barrier ();
+}
+
+/* Expand a call to fabs, fabsf or fabsl with arguments ARGLIST.
+ Return 0 if a normal call should be emitted rather than expanding
+ the function inline. If convenient, the result should be placed
+ in TARGET. SUBTARGET may be used as the target for computing
+ the operand. */
+
+static rtx
+expand_builtin_fabs (tree arglist, rtx target, rtx subtarget)
+{
+ enum machine_mode mode;
+ tree arg;
+ rtx op0;
+
+ if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return 0;
+
+ arg = TREE_VALUE (arglist);
+ mode = TYPE_MODE (TREE_TYPE (arg));
+ op0 = expand_expr (arg, subtarget, VOIDmode, 0);
+ return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
+}
+
+/* Expand a call to cabs, cabsf or cabsl with arguments ARGLIST.
+ Return 0 if a normal call should be emitted rather than expanding
+ the function inline. If convenient, the result should be placed
+ in target. */
+
+static rtx
+expand_builtin_cabs (tree arglist, rtx target)
+{
+ enum machine_mode mode;
+ tree arg;
+ rtx op0;
+
+ if (arglist == 0 || TREE_CHAIN (arglist))
+ return 0;
+ arg = TREE_VALUE (arglist);
+ if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
+ return 0;
+
+ mode = TYPE_MODE (TREE_TYPE (arg));
+ op0 = expand_expr (arg, NULL_RTX, VOIDmode, 0);
+ return expand_complex_abs (mode, op0, target, 0);
+}
+
+/* Expand a call to sprintf with argument list ARGLIST. Return 0 if
+ a normal call should be emitted rather than expanding the function
+ inline. If convenient, the result should be placed in TARGET with
+ mode MODE. */
+
+static rtx
+expand_builtin_sprintf (tree arglist, rtx target, enum machine_mode mode)
+{
+ tree orig_arglist, dest, fmt;
+ const char *fmt_str;
+
+ orig_arglist = arglist;
+
+ /* Verify the required arguments in the original call. */
+ if (! arglist)
+ return 0;
+ dest = TREE_VALUE (arglist);
+ if (TREE_CODE (TREE_TYPE (dest)) != POINTER_TYPE)
+ return 0;
+ arglist = TREE_CHAIN (arglist);
+ if (! arglist)
+ return 0;
+ fmt = TREE_VALUE (arglist);
+ if (TREE_CODE (TREE_TYPE (dest)) != POINTER_TYPE)
+ return 0;
+ arglist = TREE_CHAIN (arglist);
+
+ /* Check whether the format is a literal string constant. */
+ fmt_str = c_getstr (fmt);
+ if (fmt_str == NULL)
+ return 0;
+
+ /* If the format doesn't contain % args or %%, use strcpy. */
+ if (strchr (fmt_str, '%') == 0)
+ {
+ tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
+ tree exp;
+
+ if (arglist || ! fn)
+ return 0;
+ expand_expr (build_function_call_expr (fn, orig_arglist),
+ const0_rtx, VOIDmode, EXPAND_NORMAL);
+ if (target == const0_rtx)
+ return const0_rtx;
+ exp = build_int_2 (strlen (fmt_str), 0);
+ exp = fold (build1 (NOP_EXPR, integer_type_node, exp));
+ return expand_expr (exp, target, mode, EXPAND_NORMAL);
+ }
+ /* If the format is "%s", use strcpy if the result isn't used. */
+ else if (strcmp (fmt_str, "%s") == 0)
+ {
+ tree fn, arg, len;
+ fn = implicit_built_in_decls[BUILT_IN_STRCPY];
+
+ if (! fn)
+ return 0;
- num_jumps++;
- predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
- }
+ if (! arglist || TREE_CHAIN (arglist))
+ return 0;
+ arg = TREE_VALUE (arglist);
+ if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
+ return 0;
- do_next_insn:
- insn = next;
+ if (target != const0_rtx)
+ {
+ len = c_strlen (arg, 1);
+ if (! len || TREE_CODE (len) != INTEGER_CST)
+ return 0;
}
+ else
+ len = NULL_TREE;
- /* If no jumps were modified, fail and do __builtin_expect the normal
- way. */
- if (num_jumps == 0)
- ret = NULL_RTX;
- }
+ arglist = build_tree_list (NULL_TREE, arg);
+ arglist = tree_cons (NULL_TREE, dest, arglist);
+ expand_expr (build_function_call_expr (fn, arglist),
+ const0_rtx, VOIDmode, EXPAND_NORMAL);
- return ret;
-}
+ if (target == const0_rtx)
+ return const0_rtx;
+ return expand_expr (len, target, mode, EXPAND_NORMAL);
+ }
-void
-expand_builtin_trap ()
-{
-#ifdef HAVE_trap
- if (HAVE_trap)
- emit_insn (gen_trap ());
- else
-#endif
- emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
- emit_barrier ();
+ return 0;
}
\f
/* Expand an expression EXP that calls a built-in function,
IGNORE is nonzero if the value is to be ignored. */
rtx
-expand_builtin (exp, target, subtarget, mode, ignore)
- tree exp;
- rtx target;
- rtx subtarget;
- enum machine_mode mode;
- int ignore;
+expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
+ int ignore)
{
tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
tree arglist = TREE_OPERAND (exp, 1);
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
+ enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
+
+ /* Perform postincrements before expanding builtin functions. Â */
+ emit_queue ();
if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
case BUILT_IN_EXP:
case BUILT_IN_EXPF:
case BUILT_IN_EXPL:
+ case BUILT_IN_LOG:
+ case BUILT_IN_LOGF:
+ case BUILT_IN_LOGL:
+ case BUILT_IN_TAN:
+ case BUILT_IN_TANF:
+ case BUILT_IN_TANL:
+ case BUILT_IN_ATAN:
+ case BUILT_IN_ATANF:
+ case BUILT_IN_ATANL:
+ case BUILT_IN_POW:
+ case BUILT_IN_POWF:
+ case BUILT_IN_POWL:
+ case BUILT_IN_ATAN2:
+ case BUILT_IN_ATAN2F:
+ case BUILT_IN_ATAN2L:
case BUILT_IN_MEMSET:
case BUILT_IN_MEMCPY:
case BUILT_IN_MEMCMP:
+ case BUILT_IN_MEMPCPY:
+ case BUILT_IN_MEMMOVE:
case BUILT_IN_BCMP:
case BUILT_IN_BZERO:
+ case BUILT_IN_BCOPY:
case BUILT_IN_INDEX:
case BUILT_IN_RINDEX:
+ case BUILT_IN_SPRINTF:
+ case BUILT_IN_STPCPY:
case BUILT_IN_STRCHR:
case BUILT_IN_STRRCHR:
case BUILT_IN_STRLEN:
break;
}
+ /* The built-in function expanders test for target == const0_rtx
+ to determine whether the function's result will be ignored. */
+ if (ignore)
+ target = const0_rtx;
+
+ /* If the result of a pure or const built-in function is ignored, and
+ none of its arguments are volatile, we can avoid expanding the
+ built-in call and just evaluate the arguments for side-effects. */
+ if (target == const0_rtx
+ && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
+ {
+ bool volatilep = false;
+ tree arg;
+
+ for (arg = arglist; arg; arg = TREE_CHAIN (arg))
+ if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
+ {
+ volatilep = true;
+ break;
+ }
+
+ if (! volatilep)
+ {
+ for (arg = arglist; arg; arg = TREE_CHAIN (arg))
+ expand_expr (TREE_VALUE (arg), const0_rtx,
+ VOIDmode, EXPAND_NORMAL);
+ return const0_rtx;
+ }
+ }
+
switch (fcode)
{
case BUILT_IN_ABS:
case BUILT_IN_LABS:
case BUILT_IN_LLABS:
case BUILT_IN_IMAXABS:
+ /* build_function_call changes these into ABS_EXPR. */
+ abort ();
+
case BUILT_IN_FABS:
case BUILT_IN_FABSF:
case BUILT_IN_FABSL:
- /* build_function_call changes these into ABS_EXPR. */
- abort ();
+ target = expand_builtin_fabs (arglist, target, subtarget);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_CABS:
+ case BUILT_IN_CABSF:
+ case BUILT_IN_CABSL:
+ if (flag_unsafe_math_optimizations)
+ {
+ target = expand_builtin_cabs (arglist, target);
+ if (target)
+ return target;
+ }
+ break;
case BUILT_IN_CONJ:
case BUILT_IN_CONJF:
case BUILT_IN_LOG:
case BUILT_IN_LOGF:
case BUILT_IN_LOGL:
+ case BUILT_IN_TAN:
+ case BUILT_IN_TANF:
+ case BUILT_IN_TANL:
+ case BUILT_IN_ATAN:
+ case BUILT_IN_ATANF:
+ case BUILT_IN_ATANL:
/* Treat these like sqrt only if unsafe math optimizations are allowed,
because of possible accuracy problems. */
if (! flag_unsafe_math_optimizations)
return target;
break;
+ case BUILT_IN_POW:
+ case BUILT_IN_POWF:
+ case BUILT_IN_POWL:
+ if (! flag_unsafe_math_optimizations)
+ break;
+ target = expand_builtin_pow (exp, target, subtarget);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_ATAN2:
+ case BUILT_IN_ATAN2F:
+ case BUILT_IN_ATAN2L:
+ if (! flag_unsafe_math_optimizations)
+ break;
+ target = expand_builtin_mathfn_2 (exp, target, subtarget);
+ if (target)
+ return target;
+ break;
+
case BUILT_IN_APPLY_ARGS:
return expand_builtin_apply_args ();
return expand_builtin_saveregs ();
case BUILT_IN_ARGS_INFO:
- return expand_builtin_args_info (exp);
+ return expand_builtin_args_info (arglist);
/* Return the address of the first anonymous stack arg. */
case BUILT_IN_NEXT_ARG:
return expand_builtin_classify_type (arglist);
case BUILT_IN_CONSTANT_P:
- return expand_builtin_constant_p (exp);
+ return expand_builtin_constant_p (arglist, target_mode);
case BUILT_IN_FRAME_ADDRESS:
case BUILT_IN_RETURN_ADDRESS:
- return expand_builtin_frame_address (exp);
+ return expand_builtin_frame_address (fndecl, arglist);
/* Returns the address of the area where the structure is returned.
0 otherwise. */
case BUILT_IN_FFS:
case BUILT_IN_FFSL:
case BUILT_IN_FFSLL:
- target = expand_builtin_unop (arglist, target, subtarget, ffs_optab);
+ target = expand_builtin_unop (target_mode, arglist, target,
+ subtarget, ffs_optab);
if (target)
return target;
break;
case BUILT_IN_CLZ:
case BUILT_IN_CLZL:
case BUILT_IN_CLZLL:
- target = expand_builtin_unop (arglist, target, subtarget, clz_optab);
+ target = expand_builtin_unop (target_mode, arglist, target,
+ subtarget, clz_optab);
if (target)
return target;
break;
case BUILT_IN_CTZ:
case BUILT_IN_CTZL:
case BUILT_IN_CTZLL:
- target = expand_builtin_unop (arglist, target, subtarget, ctz_optab);
+ target = expand_builtin_unop (target_mode, arglist, target,
+ subtarget, ctz_optab);
if (target)
return target;
break;
case BUILT_IN_POPCOUNT:
case BUILT_IN_POPCOUNTL:
case BUILT_IN_POPCOUNTLL:
- target = expand_builtin_unop (arglist, target, subtarget,
- popcount_optab);
+ target = expand_builtin_unop (target_mode, arglist, target,
+ subtarget, popcount_optab);
if (target)
return target;
break;
case BUILT_IN_PARITY:
case BUILT_IN_PARITYL:
case BUILT_IN_PARITYLL:
- target = expand_builtin_unop (arglist, target, subtarget, parity_optab);
+ target = expand_builtin_unop (target_mode, arglist, target,
+ subtarget, parity_optab);
if (target)
return target;
break;
case BUILT_IN_STRLEN:
- target = expand_builtin_strlen (exp, target);
+ target = expand_builtin_strlen (arglist, target, target_mode);
if (target)
return target;
break;
case BUILT_IN_STRCPY:
- target = expand_builtin_strcpy (exp, target, mode);
+ target = expand_builtin_strcpy (arglist, target, mode);
if (target)
return target;
break;
return target;
break;
+ case BUILT_IN_STPCPY:
+ target = expand_builtin_stpcpy (arglist, target, mode);
+ if (target)
+ return target;
+ break;
+
case BUILT_IN_STRCAT:
target = expand_builtin_strcat (arglist, target, mode);
if (target)
return target;
break;
+ case BUILT_IN_MEMPCPY:
+ target = expand_builtin_mempcpy (arglist, target, mode, /*endp=*/ 1);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_MEMMOVE:
+ target = expand_builtin_memmove (arglist, target, mode);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_BCOPY:
+ target = expand_builtin_bcopy (arglist);
+ if (target)
+ return target;
+ break;
+
case BUILT_IN_MEMSET:
- target = expand_builtin_memset (exp, target, mode);
+ target = expand_builtin_memset (arglist, target, mode);
if (target)
return target;
break;
case BUILT_IN_BZERO:
- target = expand_builtin_bzero (exp);
+ target = expand_builtin_bzero (arglist);
if (target)
return target;
break;
return target;
break;
+ case BUILT_IN_SPRINTF:
+ target = expand_builtin_sprintf (arglist, target, mode);
+ if (target)
+ return target;
+ break;
+
/* Various hooks for the DWARF 2 __throw routine. */
case BUILT_IN_UNWIND_INIT:
expand_builtin_unwind_init ();
case BUILT_IN_DWARF_CFA:
return virtual_cfa_rtx;
#ifdef DWARF2_UNWIND_INFO
- case BUILT_IN_DWARF_FP_REGNUM:
- return expand_builtin_dwarf_fp_regnum ();
+ case BUILT_IN_DWARF_SP_COLUMN:
+ return expand_builtin_dwarf_sp_column ();
case BUILT_IN_INIT_DWARF_REG_SIZES:
expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
return const0_rtx;
taking a single real argument, then the return value is the
DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT. Otherwise
the return value is END_BUILTINS. */
-
+
enum built_in_function
-builtin_mathfn_code (t)
- tree t;
+builtin_mathfn_code (tree t)
{
tree fndecl, arglist;
arglist = TREE_OPERAND (t, 1);
if (! arglist
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE
- || TREE_CHAIN (arglist))
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
return END_BUILTINS;
+ arglist = TREE_CHAIN (arglist);
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ case BUILT_IN_POW:
+ case BUILT_IN_POWF:
+ case BUILT_IN_POWL:
+ case BUILT_IN_ATAN2:
+ case BUILT_IN_ATAN2F:
+ case BUILT_IN_ATAN2L:
+ if (! arglist
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE
+ || TREE_CHAIN (arglist))
+ return END_BUILTINS;
+ break;
+
+ default:
+ if (arglist)
+ return END_BUILTINS;
+ break;
+ }
+
return DECL_FUNCTION_CODE (fndecl);
}
constant. ARGLIST is the argument list of the call. */
static tree
-fold_builtin_constant_p (arglist)
- tree arglist;
+fold_builtin_constant_p (tree arglist)
{
if (arglist == 0)
return 0;
/* Fold a call to __builtin_classify_type. */
static tree
-fold_builtin_classify_type (arglist)
- tree arglist;
+fold_builtin_classify_type (tree arglist)
{
if (arglist == 0)
return build_int_2 (no_type_class, 0);
/* Fold a call to __builtin_inf or __builtin_huge_val. */
static tree
-fold_builtin_inf (type, warn)
- tree type;
- int warn;
+fold_builtin_inf (tree type, int warn)
{
REAL_VALUE_TYPE real;
/* Fold a call to __builtin_nan or __builtin_nans. */
static tree
-fold_builtin_nan (arglist, type, quiet)
- tree arglist, type;
- int quiet;
+fold_builtin_nan (tree arglist, tree type, int quiet)
{
REAL_VALUE_TYPE real;
const char *str;
across (for instance floor((double)f) == (double)floorf (f).
Do the transformation. */
static tree
-fold_trunc_transparent_mathfn (exp)
- tree exp;
+fold_trunc_transparent_mathfn (tree exp)
{
tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
tree arglist = TREE_OPERAND (exp, 1);
return 0;
}
+/* Fold function call to builtin cabs, cabsf or cabsl. FNDECL is the
+ function's DECL, ARGLIST is the argument list and TYPE is the return
+ type. Return NULL_TREE if no simplification can be made. */
+
+static tree
+fold_builtin_cabs (tree fndecl, tree arglist, tree type)
+{
+ tree arg;
+
+ if (!arglist || TREE_CHAIN (arglist))
+ return NULL_TREE;
+
+ arg = TREE_VALUE (arglist);
+ if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
+ return NULL_TREE;
+
+ /* Evaluate cabs of a constant at compile-time. */
+ if (flag_unsafe_math_optimizations
+ && TREE_CODE (arg) == COMPLEX_CST
+ && TREE_CODE (TREE_REALPART (arg)) == REAL_CST
+ && TREE_CODE (TREE_IMAGPART (arg)) == REAL_CST
+ && ! TREE_CONSTANT_OVERFLOW (TREE_REALPART (arg))
+ && ! TREE_CONSTANT_OVERFLOW (TREE_IMAGPART (arg)))
+ {
+ REAL_VALUE_TYPE r, i;
+
+ r = TREE_REAL_CST (TREE_REALPART (arg));
+ i = TREE_REAL_CST (TREE_IMAGPART (arg));
+
+ real_arithmetic (&r, MULT_EXPR, &r, &r);
+ real_arithmetic (&i, MULT_EXPR, &i, &i);
+ real_arithmetic (&r, PLUS_EXPR, &r, &i);
+ if (real_sqrt (&r, TYPE_MODE (type), &r)
+ || ! flag_trapping_math)
+ return build_real (type, r);
+ }
+
+ /* If either part is zero, cabs is fabs of the other. */
+ if (TREE_CODE (arg) == COMPLEX_EXPR
+ && real_zerop (TREE_OPERAND (arg, 0)))
+ return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 1)));
+ if (TREE_CODE (arg) == COMPLEX_EXPR
+ && real_zerop (TREE_OPERAND (arg, 1)))
+ return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 0)));
+
+ if (flag_unsafe_math_optimizations)
+ {
+ enum built_in_function fcode;
+ tree sqrtfn;
+
+ fcode = DECL_FUNCTION_CODE (fndecl);
+ if (fcode == BUILT_IN_CABS)
+ sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
+ else if (fcode == BUILT_IN_CABSF)
+ sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
+ else if (fcode == BUILT_IN_CABSL)
+ sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
+ else
+ sqrtfn = NULL_TREE;
+
+ if (sqrtfn != NULL_TREE)
+ {
+ tree rpart, ipart, result, arglist;
+
+ rpart = fold (build1 (REALPART_EXPR, type, arg));
+ ipart = fold (build1 (IMAGPART_EXPR, type, arg));
+
+ rpart = save_expr (rpart);
+ ipart = save_expr (ipart);
+
+ result = fold (build (PLUS_EXPR, type,
+ fold (build (MULT_EXPR, type,
+ rpart, rpart)),
+ fold (build (MULT_EXPR, type,
+ ipart, ipart))));
+
+ arglist = build_tree_list (NULL_TREE, result);
+ return build_function_call_expr (sqrtfn, arglist);
+ }
+ }
+
+ return NULL_TREE;
+}
+
/* Used by constant folding to eliminate some builtin calls early. EXP is
the CALL_EXPR of a call to a builtin function. */
tree
-fold_builtin (exp)
- tree exp;
+fold_builtin (tree exp)
{
tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
tree arglist = TREE_OPERAND (exp, 1);
- enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
+ tree type = TREE_TYPE (TREE_TYPE (fndecl));
if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
return 0;
- switch (fcode)
+ switch (DECL_FUNCTION_CODE (fndecl))
{
case BUILT_IN_CONSTANT_P:
return fold_builtin_constant_p (arglist);
case BUILT_IN_STRLEN:
if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
{
- tree len = c_strlen (TREE_VALUE (arglist));
+ tree len = c_strlen (TREE_VALUE (arglist), 0);
if (len)
{
/* Convert from the internal "sizetype" type to "size_t". */
}
break;
+ case BUILT_IN_FABS:
+ case BUILT_IN_FABSF:
+ case BUILT_IN_FABSL:
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return fold (build1 (ABS_EXPR, type, TREE_VALUE (arglist)));
+ break;
+
+ case BUILT_IN_CABS:
+ case BUILT_IN_CABSF:
+ case BUILT_IN_CABSL:
+ return fold_builtin_cabs (fndecl, arglist, type);
+
case BUILT_IN_SQRT:
case BUILT_IN_SQRTF:
case BUILT_IN_SQRTL:
if (TREE_CODE (arg) == REAL_CST
&& ! TREE_CONSTANT_OVERFLOW (arg))
{
- enum machine_mode mode;
REAL_VALUE_TYPE r, x;
x = TREE_REAL_CST (arg);
- mode = TYPE_MODE (TREE_TYPE (arg));
- if (real_sqrt (&r, mode, &x)
+ if (real_sqrt (&r, TYPE_MODE (type), &x)
|| (!flag_trapping_math && !flag_errno_math))
- return build_real (TREE_TYPE (arg), r);
+ return build_real (type, r);
}
- /* Optimize sqrt(exp(x)) = exp(x/2.0). */
+ /* Optimize sqrt(exp(x)) = exp(x*0.5). */
fcode = builtin_mathfn_code (arg);
if (flag_unsafe_math_optimizations
&& (fcode == BUILT_IN_EXP
|| fcode == BUILT_IN_EXPL))
{
tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
- arg = build (RDIV_EXPR, TREE_TYPE (arg),
- TREE_VALUE (TREE_OPERAND (arg, 1)),
- build_real (TREE_TYPE (arg), dconst2));
+ arg = fold (build (MULT_EXPR, type,
+ TREE_VALUE (TREE_OPERAND (arg, 1)),
+ build_real (type, dconsthalf)));
arglist = build_tree_list (NULL_TREE, arg);
return build_function_call_expr (expfn, arglist);
}
+
+ /* Optimize sqrt(pow(x,y)) = pow(x,y*0.5). */
+ if (flag_unsafe_math_optimizations
+ && (fcode == BUILT_IN_POW
+ || fcode == BUILT_IN_POWF
+ || fcode == BUILT_IN_POWL))
+ {
+ tree powfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
+ tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
+ tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
+ tree narg1 = fold (build (MULT_EXPR, type, arg1,
+ build_real (type, dconsthalf)));
+ arglist = tree_cons (NULL_TREE, arg0,
+ build_tree_list (NULL_TREE, narg1));
+ return build_function_call_expr (powfn, arglist);
+ }
+ }
+ break;
+
+ case BUILT_IN_SIN:
+ case BUILT_IN_SINF:
+ case BUILT_IN_SINL:
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ {
+ tree arg = TREE_VALUE (arglist);
+
+ /* Optimize sin(0.0) = 0.0. */
+ if (real_zerop (arg))
+ return arg;
+ }
+ break;
+
+ case BUILT_IN_COS:
+ case BUILT_IN_COSF:
+ case BUILT_IN_COSL:
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ {
+ tree arg = TREE_VALUE (arglist);
+
+ /* Optimize cos(0.0) = 1.0. */
+ if (real_zerop (arg))
+ return build_real (type, dconst1);
+
+ /* Optimize cos(-x) into cos(x). */
+ if (TREE_CODE (arg) == NEGATE_EXPR)
+ {
+ tree arglist = build_tree_list (NULL_TREE,
+ TREE_OPERAND (arg, 0));
+ return build_function_call_expr (fndecl, arglist);
+ }
}
break;
/* Optimize exp(0.0) = 1.0. */
if (real_zerop (arg))
- return build_real (TREE_TYPE (arg), dconst1);
+ return build_real (type, dconst1);
+
+ /* Optimize exp(1.0) = e. */
+ if (real_onep (arg))
+ {
+ REAL_VALUE_TYPE cst;
+
+ if (! builtin_dconsts_init)
+ init_builtin_dconsts ();
+ real_convert (&cst, TYPE_MODE (type), &dconste);
+ return build_real (type, cst);
+ }
+
+ /* Attempt to evaluate exp at compile-time. */
+ if (flag_unsafe_math_optimizations
+ && TREE_CODE (arg) == REAL_CST
+ && ! TREE_CONSTANT_OVERFLOW (arg))
+ {
+ REAL_VALUE_TYPE cint;
+ REAL_VALUE_TYPE c;
+ HOST_WIDE_INT n;
+
+ c = TREE_REAL_CST (arg);
+ n = real_to_integer (&c);
+ real_from_integer (&cint, VOIDmode, n,
+ n < 0 ? -1 : 0, 0);
+ if (real_identical (&c, &cint))
+ {
+ REAL_VALUE_TYPE x;
+
+ if (! builtin_dconsts_init)
+ init_builtin_dconsts ();
+ real_powi (&x, TYPE_MODE (type), &dconste, n);
+ return build_real (type, x);
+ }
+ }
/* Optimize exp(log(x)) = x. */
fcode = builtin_mathfn_code (arg);
/* Optimize log(1.0) = 0.0. */
if (real_onep (arg))
- return build_real (TREE_TYPE (arg), dconst0);
+ return build_real (type, dconst0);
/* Optimize log(exp(x)) = x. */
fcode = builtin_mathfn_code (arg);
|| fcode == BUILT_IN_EXPL))
return TREE_VALUE (TREE_OPERAND (arg, 1));
- /* Optimize log(sqrt(x)) = log(x)/2.0. */
+ /* Optimize log(sqrt(x)) = log(x)*0.5. */
if (flag_unsafe_math_optimizations
&& (fcode == BUILT_IN_SQRT
|| fcode == BUILT_IN_SQRTF
{
tree logfn = build_function_call_expr (fndecl,
TREE_OPERAND (arg, 1));
- return fold (build (RDIV_EXPR, TREE_TYPE (arg), logfn,
- build_real (TREE_TYPE (arg), dconst2)));
+ return fold (build (MULT_EXPR, type, logfn,
+ build_real (type, dconsthalf)));
+ }
+
+ /* Optimize log(pow(x,y)) = y*log(x). */
+ if (flag_unsafe_math_optimizations
+ && (fcode == BUILT_IN_POW
+ || fcode == BUILT_IN_POWF
+ || fcode == BUILT_IN_POWL))
+ {
+ tree arg0, arg1, logfn;
+
+ arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
+ arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
+ arglist = build_tree_list (NULL_TREE, arg0);
+ logfn = build_function_call_expr (fndecl, arglist);
+ return fold (build (MULT_EXPR, type, arg1, logfn));
+ }
+ }
+ break;
+
+ case BUILT_IN_TAN:
+ case BUILT_IN_TANF:
+ case BUILT_IN_TANL:
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ {
+ enum built_in_function fcode;
+ tree arg = TREE_VALUE (arglist);
+
+ /* Optimize tan(0.0) = 0.0. */
+ if (real_zerop (arg))
+ return arg;
+
+ /* Optimize tan(atan(x)) = x. */
+ fcode = builtin_mathfn_code (arg);
+ if (flag_unsafe_math_optimizations
+ && (fcode == BUILT_IN_ATAN
+ || fcode == BUILT_IN_ATANF
+ || fcode == BUILT_IN_ATANL))
+ return TREE_VALUE (TREE_OPERAND (arg, 1));
+ }
+ break;
+
+ case BUILT_IN_ATAN:
+ case BUILT_IN_ATANF:
+ case BUILT_IN_ATANL:
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ {
+ tree arg = TREE_VALUE (arglist);
+
+ /* Optimize atan(0.0) = 0.0. */
+ if (real_zerop (arg))
+ return arg;
+
+ /* Optimize atan(1.0) = pi/4. */
+ if (real_onep (arg))
+ {
+ REAL_VALUE_TYPE cst;
+
+ if (! builtin_dconsts_init)
+ init_builtin_dconsts ();
+ real_convert (&cst, TYPE_MODE (type), &dconstpi);
+ cst.exp -= 2;
+ return build_real (type, cst);
+ }
+ }
+ break;
+
+ case BUILT_IN_POW:
+ case BUILT_IN_POWF:
+ case BUILT_IN_POWL:
+ if (validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
+ {
+ enum built_in_function fcode;
+ tree arg0 = TREE_VALUE (arglist);
+ tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
+
+ /* Optimize pow(1.0,y) = 1.0. */
+ if (real_onep (arg0))
+ return omit_one_operand (type, build_real (type, dconst1), arg1);
+
+ if (TREE_CODE (arg1) == REAL_CST
+ && ! TREE_CONSTANT_OVERFLOW (arg1))
+ {
+ REAL_VALUE_TYPE c;
+ c = TREE_REAL_CST (arg1);
+
+ /* Optimize pow(x,0.0) = 1.0. */
+ if (REAL_VALUES_EQUAL (c, dconst0))
+ return omit_one_operand (type, build_real (type, dconst1),
+ arg0);
+
+ /* Optimize pow(x,1.0) = x. */
+ if (REAL_VALUES_EQUAL (c, dconst1))
+ return arg0;
+
+ /* Optimize pow(x,-1.0) = 1.0/x. */
+ if (REAL_VALUES_EQUAL (c, dconstm1))
+ return fold (build (RDIV_EXPR, type,
+ build_real (type, dconst1),
+ arg0));
+
+ /* Optimize pow(x,2.0) = x*x. */
+ if (REAL_VALUES_EQUAL (c, dconst2)
+ && (*lang_hooks.decls.global_bindings_p) () == 0
+ && ! CONTAINS_PLACEHOLDER_P (arg0))
+ {
+ arg0 = save_expr (arg0);
+ return fold (build (MULT_EXPR, type, arg0, arg0));
+ }
+
+ /* Optimize pow(x,-2.0) = 1.0/(x*x). */
+ if (flag_unsafe_math_optimizations
+ && REAL_VALUES_EQUAL (c, dconstm2)
+ && (*lang_hooks.decls.global_bindings_p) () == 0
+ && ! CONTAINS_PLACEHOLDER_P (arg0))
+ {
+ arg0 = save_expr (arg0);
+ return fold (build (RDIV_EXPR, type,
+ build_real (type, dconst1),
+ fold (build (MULT_EXPR, type,
+ arg0, arg0))));
+ }
+
+ /* Optimize pow(x,0.5) = sqrt(x). */
+ if (flag_unsafe_math_optimizations
+ && REAL_VALUES_EQUAL (c, dconsthalf))
+ {
+ tree sqrtfn;
+
+ fcode = DECL_FUNCTION_CODE (fndecl);
+ if (fcode == BUILT_IN_POW)
+ sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
+ else if (fcode == BUILT_IN_POWF)
+ sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
+ else if (fcode == BUILT_IN_POWL)
+ sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
+ else
+ sqrtfn = NULL_TREE;
+
+ if (sqrtfn != NULL_TREE)
+ {
+ tree arglist = build_tree_list (NULL_TREE, arg0);
+ return build_function_call_expr (sqrtfn, arglist);
+ }
+ }
+
+ /* Attempt to evaluate pow at compile-time. */
+ if (TREE_CODE (arg0) == REAL_CST
+ && ! TREE_CONSTANT_OVERFLOW (arg0))
+ {
+ REAL_VALUE_TYPE cint;
+ HOST_WIDE_INT n;
+
+ n = real_to_integer (&c);
+ real_from_integer (&cint, VOIDmode, n,
+ n < 0 ? -1 : 0, 0);
+ if (real_identical (&c, &cint))
+ {
+ REAL_VALUE_TYPE x;
+ bool inexact;
+
+ x = TREE_REAL_CST (arg0);
+ inexact = real_powi (&x, TYPE_MODE (type), &x, n);
+ if (flag_unsafe_math_optimizations || !inexact)
+ return build_real (type, x);
+ }
+ }
+ }
+
+ /* Optimize pow(exp(x),y) = exp(x*y). */
+ fcode = builtin_mathfn_code (arg0);
+ if (flag_unsafe_math_optimizations
+ && (fcode == BUILT_IN_EXP
+ || fcode == BUILT_IN_EXPF
+ || fcode == BUILT_IN_EXPL))
+ {
+ tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
+ tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
+ arg = fold (build (MULT_EXPR, type, arg, arg1));
+ arglist = build_tree_list (NULL_TREE, arg);
+ return build_function_call_expr (expfn, arglist);
+ }
+
+ /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
+ if (flag_unsafe_math_optimizations
+ && (fcode == BUILT_IN_SQRT
+ || fcode == BUILT_IN_SQRTF
+ || fcode == BUILT_IN_SQRTL))
+ {
+ tree narg0 = TREE_VALUE (TREE_OPERAND (arg0, 1));
+ tree narg1 = fold (build (MULT_EXPR, type, arg1,
+ build_real (type, dconsthalf)));
+
+ arglist = tree_cons (NULL_TREE, narg0,
+ build_tree_list (NULL_TREE, narg1));
+ return build_function_call_expr (fndecl, arglist);
+ }
+
+ /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
+ if (flag_unsafe_math_optimizations
+ && (fcode == BUILT_IN_POW
+ || fcode == BUILT_IN_POWF
+ || fcode == BUILT_IN_POWL))
+ {
+ tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
+ tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
+ tree narg1 = fold (build (MULT_EXPR, type, arg01, arg1));
+ arglist = tree_cons (NULL_TREE, arg00,
+ build_tree_list (NULL_TREE, narg1));
+ return build_function_call_expr (fndecl, arglist);
}
}
break;
case BUILT_IN_INF:
case BUILT_IN_INFF:
case BUILT_IN_INFL:
- return fold_builtin_inf (TREE_TYPE (TREE_TYPE (fndecl)), true);
+ return fold_builtin_inf (type, true);
case BUILT_IN_HUGE_VAL:
case BUILT_IN_HUGE_VALF:
case BUILT_IN_HUGE_VALL:
- return fold_builtin_inf (TREE_TYPE (TREE_TYPE (fndecl)), false);
+ return fold_builtin_inf (type, false);
case BUILT_IN_NAN:
case BUILT_IN_NANF:
case BUILT_IN_NANL:
- return fold_builtin_nan (arglist, TREE_TYPE (TREE_TYPE (fndecl)), true);
+ return fold_builtin_nan (arglist, type, true);
case BUILT_IN_NANS:
case BUILT_IN_NANSF:
case BUILT_IN_NANSL:
- return fold_builtin_nan (arglist, TREE_TYPE (TREE_TYPE (fndecl)), false);
+ return fold_builtin_nan (arglist, type, false);
case BUILT_IN_FLOOR:
case BUILT_IN_FLOORF:
/* Conveniently construct a function call expression. */
tree
-build_function_call_expr (fn, arglist)
- tree fn, arglist;
+build_function_call_expr (tree fn, tree arglist)
{
tree call_expr;
ellipses, otherwise the last specifier must be a VOID_TYPE. */
static int
-validate_arglist VPARAMS ((tree arglist, ...))
+validate_arglist (tree arglist, ...)
{
enum tree_code code;
int res = 0;
+ va_list ap;
- VA_OPEN (ap, arglist);
- VA_FIXEDARG (ap, tree, arglist);
+ va_start (ap, arglist);
do
{
/* We need gotos here since we can only have one VA_CLOSE in a
function. */
end: ;
- VA_CLOSE (ap);
+ va_end (ap);
return res;
}
/* Default version of target-specific builtin setup that does nothing. */
void
-default_init_builtins ()
+default_init_builtins (void)
{
}
/* Default target-specific builtin expander that does nothing. */
rtx
-default_expand_builtin (exp, target, subtarget, mode, ignore)
- tree exp ATTRIBUTE_UNUSED;
- rtx target ATTRIBUTE_UNUSED;
- rtx subtarget ATTRIBUTE_UNUSED;
- enum machine_mode mode ATTRIBUTE_UNUSED;
- int ignore ATTRIBUTE_UNUSED;
+default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
+ rtx target ATTRIBUTE_UNUSED,
+ rtx subtarget ATTRIBUTE_UNUSED,
+ enum machine_mode mode ATTRIBUTE_UNUSED,
+ int ignore ATTRIBUTE_UNUSED)
{
return NULL_RTX;
}
/* Instantiate all remaining CONSTANT_P_RTX nodes. */
void
-purge_builtin_constant_p ()
+purge_builtin_constant_p (void)
{
rtx insn, set, arg, new, note;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (INSN_P (insn)
&& (set = single_set (insn)) != NULL_RTX
- && GET_CODE (SET_SRC (set)) == CONSTANT_P_RTX)
+ && (GET_CODE (arg = SET_SRC (set)) == CONSTANT_P_RTX
+ || (GET_CODE (arg) == SUBREG
+ && (GET_CODE (arg = SUBREG_REG (arg))
+ == CONSTANT_P_RTX))))
{
- arg = XEXP (SET_SRC (set), 0);
+ arg = XEXP (arg, 0);
new = CONSTANT_P (arg) ? const1_rtx : const0_rtx;
validate_change (insn, &SET_SRC (set), new, 0);
}
}
+/* Returns true is EXP represents data that would potentially reside
+ in a readonly section. */
+
+static bool
+readonly_data_expr (tree exp)
+{
+ STRIP_NOPS (exp);
+
+ if (TREE_CODE (exp) == ADDR_EXPR)
+ return decl_readonly_section (TREE_OPERAND (exp, 0), 0);
+ else
+ return false;
+}