/* Subroutines for manipulating rtx's in semantically interesting ways.
- Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
- 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
- Free Software Foundation, Inc.
+ Copyright (C) 1987-2014 Free Software Foundation, Inc.
This file is part of GCC.
#include "diagnostic-core.h"
#include "rtl.h"
#include "tree.h"
+#include "stor-layout.h"
#include "tm_p.h"
#include "flags.h"
#include "except.h"
+#include "hashtab.h"
+#include "hash-set.h"
+#include "vec.h"
+#include "machmode.h"
+#include "hard-reg-set.h"
+#include "input.h"
#include "function.h"
#include "expr.h"
+#include "insn-codes.h"
#include "optabs.h"
#include "libfuncs.h"
-#include "hard-reg-set.h"
#include "insn-config.h"
#include "ggc.h"
#include "recog.h"
/* Truncate and perhaps sign-extend C as appropriate for MODE. */
HOST_WIDE_INT
-trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
+trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
{
int width = GET_MODE_PRECISION (mode);
/* You want to truncate to a _what_? */
- gcc_assert (SCALAR_INT_MODE_P (mode));
+ gcc_assert (SCALAR_INT_MODE_P (mode)
+ || POINTER_BOUNDS_MODE_P (mode));
/* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
if (mode == BImode)
}
/* Return an rtx for the sum of X and the integer C, given that X has
- mode MODE. */
+ mode MODE. INPLACE is true if X can be modified inplace or false
+ if it must be treated as immutable. */
rtx
-plus_constant (enum machine_mode mode, rtx x, HOST_WIDE_INT c)
+plus_constant (machine_mode mode, rtx x, HOST_WIDE_INT c,
+ bool inplace)
{
RTX_CODE code;
rtx y;
switch (code)
{
- case CONST_INT:
- if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
- {
- unsigned HOST_WIDE_INT l1 = INTVAL (x);
- HOST_WIDE_INT h1 = (l1 >> (HOST_BITS_PER_WIDE_INT - 1)) ? -1 : 0;
- unsigned HOST_WIDE_INT l2 = c;
- HOST_WIDE_INT h2 = c < 0 ? -1 : 0;
- unsigned HOST_WIDE_INT lv;
- HOST_WIDE_INT hv;
-
- if (add_double_with_sign (l1, h1, l2, h2, &lv, &hv, false))
- gcc_unreachable ();
-
- return immed_double_const (lv, hv, VOIDmode);
- }
-
- return GEN_INT (INTVAL (x) + c);
-
- case CONST_DOUBLE:
- {
- unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
- HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
- unsigned HOST_WIDE_INT l2 = c;
- HOST_WIDE_INT h2 = c < 0 ? -1 : 0;
- unsigned HOST_WIDE_INT lv;
- HOST_WIDE_INT hv;
-
- if (add_double_with_sign (l1, h1, l2, h2, &lv, &hv, false))
- /* Sorry, we have no way to represent overflows this wide.
- To fix, add constant support wider than CONST_DOUBLE. */
- gcc_assert (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_DOUBLE_INT);
-
- return immed_double_const (lv, hv, VOIDmode);
- }
-
+ CASE_CONST_SCALAR_INT:
+ return immed_wide_int_const (wi::add (std::make_pair (x, mode), c),
+ mode);
case MEM:
/* If this is a reference to the constant pool, try replacing it with
a reference to a new constant. If the resulting address isn't
case CONST:
/* If adding to something entirely constant, set a flag
so that we can add a CONST around the result. */
+ if (inplace && shared_const_p (x))
+ inplace = false;
x = XEXP (x, 0);
all_constant = 1;
goto restart;
if (CONSTANT_P (XEXP (x, 1)))
{
- x = gen_rtx_PLUS (mode, XEXP (x, 0),
- plus_constant (mode, XEXP (x, 1), c));
+ rtx term = plus_constant (mode, XEXP (x, 1), c, inplace);
+ if (term == const0_rtx)
+ x = XEXP (x, 0);
+ else if (inplace)
+ XEXP (x, 1) = term;
+ else
+ x = gen_rtx_PLUS (mode, XEXP (x, 0), term);
c = 0;
}
- else if (find_constant_term_loc (&y))
+ else if (rtx *const_loc = find_constant_term_loc (&y))
{
- /* We need to be careful since X may be shared and we can't
- modify it in place. */
- rtx copy = copy_rtx (x);
- rtx *const_loc = find_constant_term_loc (©);
-
- *const_loc = plus_constant (mode, *const_loc, c);
- x = copy;
+ if (!inplace)
+ {
+ /* We need to be careful since X may be shared and we can't
+ modify it in place. */
+ x = copy_rtx (x);
+ const_loc = find_constant_term_loc (&x);
+ }
+ *const_loc = plus_constant (mode, *const_loc, c, true);
c = 0;
}
break;
}
if (c != 0)
- x = gen_rtx_PLUS (mode, x, GEN_INT (c));
+ x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
return x;
return x;
}
+/* Returns a tree for the size of EXP in bytes. */
+
+static tree
+tree_expr_size (const_tree exp)
+{
+ if (DECL_P (exp)
+ && DECL_SIZE_UNIT (exp) != 0)
+ return DECL_SIZE_UNIT (exp);
+ else
+ return size_in_bytes (TREE_TYPE (exp));
+}
+
/* Return an rtx for the size in bytes of the value of EXP. */
rtx
gcc_assert (size);
}
- if (size == 0 || !host_integerp (size, 0))
+ if (size == 0 || !tree_fits_shwi_p (size))
return -1;
- return tree_low_cst (size, 0);
+ return tree_to_shwi (size);
}
\f
/* Return a copy of X in which all memory references
an address in the address space's address mode, or vice versa (TO_MODE says
which way). We take advantage of the fact that pointers are not allowed to
overflow by commuting arithmetic operations over conversions so that address
- arithmetic insns can be used. */
+ arithmetic insns can be used. IN_CONST is true if this conversion is inside
+ a CONST. */
-rtx
-convert_memory_address_addr_space (enum machine_mode to_mode ATTRIBUTE_UNUSED,
- rtx x, addr_space_t as ATTRIBUTE_UNUSED)
+static rtx
+convert_memory_address_addr_space_1 (machine_mode to_mode ATTRIBUTE_UNUSED,
+ rtx x, addr_space_t as ATTRIBUTE_UNUSED,
+ bool in_const ATTRIBUTE_UNUSED)
{
#ifndef POINTERS_EXTEND_UNSIGNED
gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
return x;
#else /* defined(POINTERS_EXTEND_UNSIGNED) */
- enum machine_mode pointer_mode, address_mode, from_mode;
+ machine_mode pointer_mode, address_mode, from_mode;
rtx temp;
enum rtx_code code;
to the default case. */
switch (GET_CODE (x))
{
- case CONST_INT:
- case CONST_DOUBLE:
+ CASE_CONST_SCALAR_INT:
if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
code = TRUNCATE;
else if (POINTERS_EXTEND_UNSIGNED < 0)
break;
case LABEL_REF:
- temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
+ temp = gen_rtx_LABEL_REF (to_mode, LABEL_REF_LABEL (x));
LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
return temp;
break;
case CONST:
return gen_rtx_CONST (to_mode,
- convert_memory_address_addr_space
- (to_mode, XEXP (x, 0), as));
+ convert_memory_address_addr_space_1
+ (to_mode, XEXP (x, 0), as, true));
break;
case PLUS:
case MULT:
- /* FIXME: For addition, we used to permute the conversion and
- addition operation only if one operand is a constant and
- converting the constant does not change it or if one operand
- is a constant and we are using a ptr_extend instruction
- (POINTERS_EXTEND_UNSIGNED < 0) even if the resulting address
- may overflow/underflow. We relax the condition to include
- zero-extend (POINTERS_EXTEND_UNSIGNED > 0) since the other
- parts of the compiler depend on it. See PR 49721.
-
+ /* For addition we can safely permute the conversion and addition
+ operation if one operand is a constant and converting the constant
+ does not change it or if one operand is a constant and we are
+ using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
We can always safely permute them if we are making the address
- narrower. */
+ narrower. Inside a CONST RTL, this is safe for both pointers
+ zero or sign extended as pointers cannot wrap. */
if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
|| (GET_CODE (x) == PLUS
&& CONST_INT_P (XEXP (x, 1))
- && (POINTERS_EXTEND_UNSIGNED != 0
- || XEXP (x, 1) == convert_memory_address_addr_space
- (to_mode, XEXP (x, 1), as))))
+ && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
+ || XEXP (x, 1) == convert_memory_address_addr_space_1
+ (to_mode, XEXP (x, 1), as, in_const)
+ || POINTERS_EXTEND_UNSIGNED < 0)))
return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
- convert_memory_address_addr_space
- (to_mode, XEXP (x, 0), as),
+ convert_memory_address_addr_space_1
+ (to_mode, XEXP (x, 0), as, in_const),
XEXP (x, 1));
break;
x, POINTERS_EXTEND_UNSIGNED);
#endif /* defined(POINTERS_EXTEND_UNSIGNED) */
}
+
+/* Given X, a memory address in address space AS' pointer mode, convert it to
+ an address in the address space's address mode, or vice versa (TO_MODE says
+ which way). We take advantage of the fact that pointers are not allowed to
+ overflow by commuting arithmetic operations over conversions so that address
+ arithmetic insns can be used. */
+
+rtx
+convert_memory_address_addr_space (machine_mode to_mode, rtx x, addr_space_t as)
+{
+ return convert_memory_address_addr_space_1 (to_mode, x, as, false);
+}
\f
/* Return something equivalent to X but valid as a memory address for something
of mode MODE in the named address space AS. When X is not itself valid,
this works by copying X or subexpressions of it into registers. */
rtx
-memory_address_addr_space (enum machine_mode mode, rtx x, addr_space_t as)
+memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
{
rtx oldx = x;
- enum machine_mode address_mode = targetm.addr_space.address_mode (as);
+ machine_mode address_mode = targetm.addr_space.address_mode (as);
x = convert_memory_address_addr_space (address_mode, x, as);
return x;
}
-/* Convert a mem ref into one with a valid memory address.
- Pass through anything else unchanged. */
+/* If REF is a MEM with an invalid address, change it into a valid address.
+ Pass through anything else unchanged. REF must be an unshared rtx and
+ the function may modify it in-place. */
rtx
validize_mem (rtx ref)
MEM_ADDR_SPACE (ref)))
return ref;
- /* Don't alter REF itself, since that is probably a stack slot. */
- return replace_equiv_address (ref, XEXP (ref, 0));
+ return replace_equiv_address (ref, XEXP (ref, 0), true);
}
/* If X is a memory reference to a member of an object block, try rewriting
{
rtx base;
HOST_WIDE_INT offset;
- enum machine_mode mode;
+ machine_mode mode;
if (!flag_section_anchors)
return x;
in case X is a constant. */
rtx
-copy_to_mode_reg (enum machine_mode mode, rtx x)
+copy_to_mode_reg (machine_mode mode, rtx x)
{
rtx temp = gen_reg_rtx (mode);
since we mark it as a "constant" register. */
rtx
-force_reg (enum machine_mode mode, rtx x)
+force_reg (machine_mode mode, rtx x)
{
- rtx temp, insn, set;
+ rtx temp, set;
+ rtx_insn *insn;
if (REG_P (x))
return x;
MODE is the mode to use for X in case it is a constant. */
rtx
-copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
+copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
{
rtx temp;
FOR_RETURN is nonzero if the caller is promoting the return value
of FNDECL, else it is for promoting args. */
-enum machine_mode
-promote_function_mode (const_tree type, enum machine_mode mode, int *punsignedp,
+machine_mode
+promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
const_tree funtype, int for_return)
{
/* Called without a type node for a libcall. */
PUNSIGNEDP points to the signedness of the type and may be adjusted
to show what signedness to use on extension operations. */
-enum machine_mode
-promote_mode (const_tree type ATTRIBUTE_UNUSED, enum machine_mode mode,
+machine_mode
+promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
int *punsignedp ATTRIBUTE_UNUSED)
{
#ifdef PROMOTE_MODE
mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
of DECL after promotion. */
-enum machine_mode
+machine_mode
promote_decl_mode (const_tree decl, int *punsignedp)
{
tree type = TREE_TYPE (decl);
int unsignedp = TYPE_UNSIGNED (type);
- enum machine_mode mode = DECL_MODE (decl);
- enum machine_mode pmode;
+ machine_mode mode = DECL_MODE (decl);
+ machine_mode pmode;
if (TREE_CODE (decl) == RESULT_DECL
|| TREE_CODE (decl) == PARM_DECL)
static void
adjust_stack_1 (rtx adjust, bool anti_p)
{
- rtx temp, insn;
+ rtx temp;
+ rtx_insn *insn;
#ifndef STACK_GROWS_DOWNWARD
/* Hereafter anti_p means subtract_p. */
rtx sa = *psave;
/* The default is that we use a move insn and save in a Pmode object. */
rtx (*fcn) (rtx, rtx) = gen_move_insn;
- enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
+ machine_mode mode = STACK_SAVEAREA_MODE (save_level);
/* See if this machine has anything special to do for this kind of save. */
switch (save_level)
unsigned required_align, bool cannot_accumulate)
{
HOST_WIDE_INT stack_usage_size = -1;
- rtx final_label, final_target, target;
+ rtx_code_label *final_label;
+ rtx final_target, target;
unsigned extra_align = 0;
bool must_align;
{
/* Look into the last emitted insn and see if we can deduce
something for the register. */
- rtx insn, set, note;
+ rtx_insn *insn;
+ rtx set, note;
insn = get_last_insn ();
if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
{
current_function_has_unbounded_dynamic_stack_size = 1;
}
- final_label = NULL_RTX;
+ final_label = NULL;
final_target = NULL_RTX;
/* If we are splitting the stack, we need to ask the backend whether
least it doesn't cause a stack overflow. */
if (flag_split_stack)
{
- rtx available_label, ask, space, func;
+ rtx_code_label *available_label;
+ rtx ask, space, func;
- available_label = NULL_RTX;
+ available_label = NULL;
#ifdef HAVE_split_stack_space_check
if (HAVE_split_stack_space_check)
else
{
ask = expand_binop (Pmode, add_optab, size,
- GEN_INT (required_align / BITS_PER_UNIT - 1),
+ gen_int_mode (required_align / BITS_PER_UNIT - 1,
+ Pmode),
NULL_RTX, 1, OPTAB_LIB_WIDEN);
must_align = true;
}
if (crtl->limit_stack)
{
rtx available;
- rtx space_available = gen_label_rtx ();
+ rtx_code_label *space_available = gen_label_rtx ();
#ifdef STACK_GROWS_DOWNWARD
available = expand_binop (Pmode, sub_optab,
stack_pointer_rtx, stack_limit_rtx,
but we know it can't. So add ourselves and then do
TRUNC_DIV_EXPR. */
target = expand_binop (Pmode, add_optab, target,
- GEN_INT (required_align / BITS_PER_UNIT - 1),
+ gen_int_mode (required_align / BITS_PER_UNIT - 1,
+ Pmode),
NULL_RTX, 1, OPTAB_LIB_WIDEN);
target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
- GEN_INT (required_align / BITS_PER_UNIT),
+ gen_int_mode (required_align / BITS_PER_UNIT,
+ Pmode),
NULL_RTX, 1);
target = expand_mult (Pmode, target,
- GEN_INT (required_align / BITS_PER_UNIT),
+ gen_int_mode (required_align / BITS_PER_UNIT,
+ Pmode),
NULL_RTX, 1);
}
else
{
rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
- rtx loop_lab = gen_label_rtx ();
- rtx end_lab = gen_label_rtx ();
-
+ rtx_code_label *loop_lab = gen_label_rtx ();
+ rtx_code_label *end_lab = gen_label_rtx ();
/* Step 1: round SIZE to the previous multiple of the interval. */
/* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
rounded_size
- = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
+ = simplify_gen_binary (AND, Pmode, size,
+ gen_int_mode (-PROBE_INTERVAL, Pmode));
rounded_size_op = force_operand (rounded_size, NULL_RTX);
/* TEST_ADDR = SP + FIRST. */
test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
stack_pointer_rtx,
- GEN_INT (first)), NULL_RTX);
+ gen_int_mode (first, Pmode)),
+ NULL_RTX);
/* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
/* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
- GEN_INT (PROBE_INTERVAL), test_addr,
+ gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
1, OPTAB_WIDEN);
gcc_assert (temp == test_addr);
emit_stack_probe (addr);
}
}
+
+ /* Make sure nothing is scheduled before we are done. */
+ emit_insn (gen_blockage ());
}
/* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
else
{
rtx rounded_size, rounded_size_op, last_addr, temp;
- rtx loop_lab = gen_label_rtx ();
- rtx end_lab = gen_label_rtx ();
+ rtx_code_label *loop_lab = gen_label_rtx ();
+ rtx_code_label *end_lab = gen_label_rtx ();
/* Step 1: round SIZE to the previous multiple of the interval. */
/* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
rounded_size
- = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
+ = simplify_gen_binary (AND, Pmode, size,
+ gen_int_mode (-PROBE_INTERVAL, Pmode));
rounded_size_op = force_operand (rounded_size, NULL_RTX);
&& GET_MODE (val) == BLKmode)
{
unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
- enum machine_mode tmpmode;
+ machine_mode tmpmode;
/* int_size_in_bytes can return -1. We don't need a check here
since the value of bytes will then be large enough that no
in which a scalar value of mode MODE was returned by a library call. */
rtx
-hard_libcall_value (enum machine_mode mode, rtx fun)
+hard_libcall_value (machine_mode mode, rtx fun)
{
return targetm.calls.libcall_value (mode, fun);
}