* alias.c (can_address_p): No longer static.
* emit-rtl.c (get_mem_attrs): New parameter, MODE; all callers changed.
Return 0 if all parameters are default values.
(set_mem_attributes): Set MEM_KEEP_ALIAS_SET_P.
(adjust_address_1): Try harder to compute a size.
* expr.c (expand_assignment, store_constructor): If can't address,
set MEM_KEEP_ALIAS_SET_P.
(store_constructor_field): Don't change set if MEM_KEEP_ALIAS_SET_P.
(store_field): Likewise.
(store_constructor): Simplify call to store_constructor_field.
* expr.h (can_address_p): New declaration.
* gensupport.c (gen_rtx_CONST_INT): New function.
* rtl.h (MEM_KEEP_ALIAS_SET_P): New macro.
(MEM_SIZE): Get size from mode, if not set and not BLKmode.
(MEM_COPY_ATTRIBUTES): Copy MEM_KEEP_ALIAS_SET_P.
From-SVN: r46487
Thu Oct 25 08:46:06 2001 Richard Kenner <kenner@vlsi1.ultra.nyu.edu>
+ * alias.c (can_address_p): No longer static.
+ * emit-rtl.c (get_mem_attrs): New parameter, MODE; all callers changed.
+ Return 0 if all parameters are default values.
+ (set_mem_attributes): Set MEM_KEEP_ALIAS_SET_P.
+ (adjust_address_1): Try harder to compute a size.
+ * expr.c (expand_assignment, store_constructor): If can't address,
+ set MEM_KEEP_ALIAS_SET_P.
+ (store_constructor_field): Don't change set if MEM_KEEP_ALIAS_SET_P.
+ (store_field): Likewise.
+ (store_constructor): Simplify call to store_constructor_field.
+ * expr.h (can_address_p): New declaration.
+ * gensupport.c (gen_rtx_CONST_INT): New function.
+ * rtl.h (MEM_KEEP_ALIAS_SET_P): New macro.
+ (MEM_SIZE): Get size from mode, if not set and not BLKmode.
+ (MEM_COPY_ATTRIBUTES): Copy MEM_KEEP_ALIAS_SET_P.
+
* stmt.c (expand_end_case): Remove orig_minval and use tree_low_cst.
2001-10-24 Christopher Faylor <cgf@redhat.com>
static int base_alias_check PARAMS ((rtx, rtx, enum machine_mode,
enum machine_mode));
static int handled_component_p PARAMS ((tree));
-static int can_address_p PARAMS ((tree));
static rtx find_base_value PARAMS ((rtx));
static int mems_in_disjoint_alias_sets_p PARAMS ((rtx, rtx));
static int insert_subset_children PARAMS ((splay_tree_node, void*));
/* Return 1 if all the nested component references handled by
get_inner_reference in T are such that we can address the object in T. */
-static int
+int
can_address_p (t)
tree t;
{
const void *));
static void mem_attrs_mark PARAMS ((const void *));
static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
- rtx, unsigned int));
+ rtx, unsigned int,
+ enum machine_mode));
/* Probability of the conditional branch currently proceeded by try_split.
Set to -1 otherwise. */
}
/* Allocate a new mem_attrs structure and insert it into the hash table if
- one identical to it is not already in the table. */
+ one identical to it is not already in the table. We are doing this for
+ MEM of mode MODE. */
static mem_attrs *
-get_mem_attrs (alias, decl, offset, size, align)
+get_mem_attrs (alias, decl, offset, size, align, mode)
HOST_WIDE_INT alias;
tree decl;
rtx offset;
rtx size;
unsigned int align;
+ enum machine_mode mode;
{
mem_attrs attrs;
void **slot;
+ /* If everything is the default, we can just return zero. */
+ if (alias == 0 && decl == 0 && offset == 0
+ && (size == 0
+ || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
+ && (align == 1
+ || (mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
+ return 0;
+
attrs.alias = alias;
attrs.decl = decl;
attrs.offset = offset;
|| TREE_CODE (t) == NON_LVALUE_EXPR || TREE_CODE (t) == SAVE_EXPR)
t = TREE_OPERAND (t, 0);
+ /* If this expression can't be addressed (e.g., it contains a reference
+ to a non-addressable field), show we don't change its alias set. */
+ if (! can_address_p (t))
+ MEM_KEEP_ALIAS_SET_P (ref) = 1;
+
/* If this is a decl, set the attributes of the MEM from it. */
if (DECL_P (t))
{
}
/* Now set the attributes we computed above. */
- MEM_ATTRS (ref) = get_mem_attrs (alias, decl, offset, size, align);
+ MEM_ATTRS (ref)
+ = get_mem_attrs (alias, decl, offset, size, align, GET_MODE (ref));
/* If this is already known to be a scalar or aggregate, we are done. */
if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
#endif
MEM_ATTRS (mem) = get_mem_attrs (set, MEM_DECL (mem), MEM_OFFSET (mem),
- MEM_SIZE (mem), MEM_ALIGN (mem));
+ MEM_SIZE (mem), MEM_ALIGN (mem),
+ GET_MODE (mem));
}
/* Set the alignment of MEM to ALIGN bits. */
unsigned int align;
{
MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_DECL (mem),
- MEM_OFFSET (mem), MEM_SIZE (mem), align);
+ MEM_OFFSET (mem), MEM_SIZE (mem), align,
+ GET_MODE (mem));
}
\f
/* Return a memory reference like MEMREF, but with its mode changed to MODE
= get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
(mmode == BLKmode ? 1
- : GET_MODE_ALIGNMENT (mmode) / BITS_PER_UNIT));
+ : GET_MODE_ALIGNMENT (mmode) / BITS_PER_UNIT),
+ mmode);
return new;
}
rtx addr = XEXP (memref, 0);
rtx new;
rtx memoffset = MEM_OFFSET (memref);
+ rtx size = 0;
unsigned int memalign = MEM_ALIGN (memref);
/* If MEMREF is a LO_SUM and the offset is within the alignment of the
if (offset != 0)
memalign = MIN (memalign, (offset & -offset) * BITS_PER_UNIT);
- MEM_ATTRS (new)
- = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_DECL (memref), memoffset,
- mode == BLKmode
- ? 0 : GEN_INT (GET_MODE_SIZE (mode)), memalign);
+ /* We can compute the size in a number of ways. */
+ if (mode != BLKmode)
+ size = GEN_INT (GET_MODE_SIZE (mode));
+ else if (MEM_SIZE (memref))
+ size = plus_constant (MEM_SIZE (memref), -offset);
+
+ MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_DECL (memref),
+ memoffset, size, memalign, GET_MODE (new));
/* At some point, we should validate that this offset is within the object,
if all the appropriate values are known. */
we don't know. */
MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_DECL (memref),
0, 0, MIN (MEM_ALIGN (memref),
- pow2 * BITS_PER_UNIT));
+ pow2 * BITS_PER_UNIT),
+ GET_MODE (new));
return new;
}
}
else
{
+ if (! can_address_p (to))
+ {
+ to_rtx = copy_rtx (to_rtx);
+ MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
+ }
+
result = store_field (to_rtx, bitsize, bitpos, mode1, from,
(want_value
/* Spurious cast for HPUX compiler. */
set, if required. */
if (bitpos != 0)
align = MIN (align, (unsigned int) bitpos & - bitpos);
- if (GET_CODE (target) == MEM)
+
+ if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
+ && MEM_ALIAS_SET (target) != 0)
set_mem_alias_set (target, alias_set);
store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
mode = word_mode;
}
#endif
+
+ if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
+ && DECL_NONADDRESSABLE_P (field))
+ {
+ to_rtx = copy_rtx (to_rtx);
+ MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
+ }
+
store_constructor_field (to_rtx, bitsize, bitpos, mode,
TREE_VALUE (elt), type, align, cleared,
- (DECL_NONADDRESSABLE_P (field)
- && GET_CODE (to_rtx) == MEM)
- ? MEM_ALIAS_SET (to_rtx)
- : get_alias_set (TREE_TYPE (field)));
+ get_alias_set (TREE_TYPE (field)));
}
}
else if (TREE_CODE (type) == ARRAY_TYPE)
for (; lo <= hi; lo++)
{
bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
+
+ if (GET_CODE (target) == MEM
+ && !MEM_KEEP_ALIAS_SET_P (target)
+ && TYPE_NONALIASED_COMPONENT (type))
+ {
+ target = copy_rtx (target);
+ MEM_KEEP_ALIAS_SET_P (target) = 1;
+ }
+
store_constructor_field
(target, bitsize, bitpos, mode, value, type, align,
- cleared,
- TYPE_NONALIASED_COMPONENT (type)
- ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
+ cleared, get_alias_set (elttype));
}
}
else
else
bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
+ if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
+ && TYPE_NONALIASED_COMPONENT (type))
+ {
+ target = copy_rtx (target);
+ MEM_KEEP_ALIAS_SET_P (target) = 1;
+ }
+
store_constructor_field (target, bitsize, bitpos, mode, value,
type, align, cleared,
- TYPE_NONALIASED_COMPONENT (type)
- && GET_CODE (target) == MEM
- ? MEM_ALIAS_SET (target) :
get_alias_set (elttype));
}
bitpos / BITS_PER_UNIT));
MEM_SET_IN_STRUCT_P (to_rtx, 1);
- set_mem_alias_set (to_rtx, alias_set);
+ if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
+ set_mem_alias_set (to_rtx, alias_set);
return store_expr (exp, to_rtx, value_mode != VOIDmode);
}
extern void record_alias_subset PARAMS ((HOST_WIDE_INT,
HOST_WIDE_INT));
extern HOST_WIDE_INT new_alias_set PARAMS ((void));
+extern int can_address_p PARAMS ((tree));
\f
/* Functions from expr.c: */
VA_CLOSE (ap);
}
+
+/* Make a version of gen_rtx_CONST_INT so that GEN_INT can be used in
+ the gensupport programs. */
+
+rtx
+gen_rtx_CONST_INT (mode, arg)
+ enum machine_mode mode ATTRIBUTE_UNUSED;
+ HOST_WIDE_INT arg;
+{
+ rtx rt = rtx_alloc (CONST_INT);
+
+ XWINT (rt, 0) = arg;
+ return rt;
+}
\f
/* Queue PATTERN on LIST_TAIL. */
/* 1 in an INSN if it can alter flow of control
within this function.
+ MEM_KEEP_ALIAS_SET_P in a MEM.
LINK_COST_ZERO in an INSN_LIST. */
unsigned int jump : 1;
/* 1 in an INSN if it can call another function.
#define ASM_OPERANDS_SOURCE_FILE(RTX) XCSTR ((RTX), 5, ASM_OPERANDS)
#define ASM_OPERANDS_SOURCE_LINE(RTX) XCINT ((RTX), 6, ASM_OPERANDS)
+/* For a MEM RTX, 1 if we should keep the alias set for this mem
+ unchanged when we access a component. Set to 1, or example, when we
+ are already in a non-addressable component of an aggregate. */
+#define MEM_KEEP_ALIAS_SET_P(RTX) ((RTX)->jump)
+
/* For a MEM rtx, 1 if it's a volatile reference.
Also in an ASM_OPERANDS rtx. */
#define MEM_VOLATILE_P(RTX) ((RTX)->volatil)
/* For a MEM rtx, the size in bytes of the MEM, if known, as an RTX that
is always a CONST_INT. */
-#define MEM_SIZE(RTX) (MEM_ATTRS (RTX) == 0 ? 0 : MEM_ATTRS (RTX)->size)
+#define MEM_SIZE(RTX) \
+(MEM_ATTRS (RTX) != 0 ? MEM_ATTRS (RTX)->size \
+ : GET_MODE (RTX) != BLKmode ? GEN_INT (GET_MODE_SIZE (GET_MODE (RTX))) \
+ : 0)
/* For a MEM rtx, the alignment in bits. */
#define MEM_ALIGN(RTX) \
/* Copy the attributes that apply to memory locations from RHS to LHS. */
-#define MEM_COPY_ATTRIBUTES(LHS, RHS) \
- (MEM_VOLATILE_P (LHS) = MEM_VOLATILE_P (RHS), \
- MEM_IN_STRUCT_P (LHS) = MEM_IN_STRUCT_P (RHS), \
- MEM_SCALAR_P (LHS) = MEM_SCALAR_P (RHS), \
- RTX_UNCHANGING_P (LHS) = RTX_UNCHANGING_P (RHS), \
+#define MEM_COPY_ATTRIBUTES(LHS, RHS) \
+ (MEM_VOLATILE_P (LHS) = MEM_VOLATILE_P (RHS), \
+ MEM_IN_STRUCT_P (LHS) = MEM_IN_STRUCT_P (RHS), \
+ MEM_SCALAR_P (LHS) = MEM_SCALAR_P (RHS), \
+ RTX_UNCHANGING_P (LHS) = RTX_UNCHANGING_P (RHS), \
+ MEM_KEEP_ALIAS_SET_P (LHS) = MEM_KEEP_ALIAS_SET_P (RHS), \
MEM_ATTRS (LHS) = MEM_ATTRS (RHS))
/* For a LABEL_REF, 1 means that this reference is to a label outside the