+2011-07-19 Richard Sandiford <rdsandiford@googlemail.com>
+
+ * doc/rtl.texi (MEM_SIZE_KNOWN_P): Document.
+ (MEM_SIZE): Change from returning an rtx to returning a HOST_WIDE_INT.
+ * rtl.h (MEM_SIZE_KNOWN_P): New macro.
+ (MEM_SIZE): Return a HOST_WIDE_INT rather than an rtx.
+ * emit-rtl.h (set_mem_size): Take a HOST_WIDE_INT rather than an rtx.
+ (clear_mem_size): Declare.
+ * emit-rtl.c (set_mem_size): Take a HOST_WIDE_INT rather than an rtx.
+ (clear_mem_size): New function.
+ * alias.c (ao_ref_from_mem): Adjust uses of MEM_SIZE, using
+ MEM_SIZE_KNOWN_P to test whether the size is known, and MEM_SIZE
+ to get a HOST_WIDE_INT size. Adjust calls to set_mem_size,
+ passing a HOST_WIDE_INT rather than an rtx. Use clear_mem_size
+ to clear the size.
+ (nonoverlapping_memrefs_p): Likewise.
+ * builtins.c (get_memory_rtx, expand_builtin_memcmp): Likewise.
+ (expand_builtin_init_trampoline): Likewise.
+ * calls.c (compute_argument_addresses): Likewise.
+ * cfgcleanup.c (merge_memattrs): Likewise.
+ * dce.c (find_call_stack_args): Likewise.
+ * dse.c (record_store, scan_insn): Likewise.
+ * dwarf2out.c (dw_sra_loc_expr): Likewise.
+ * expr.c (emit_block_move_hints): Likewise.
+ * function.c (assign_parm_find_stack_rtl): Likewise.
+ * print-rtl.c (print_rtx): Likewise.
+ * reload.c (find_reloads_subreg_address): Likewise.
+ * rtlanal.c (may_trap_p_1): Likewise.
+ * var-tracking.c (track_expr_p): Likewise.
+ * varasm.c (assemble_trampoline_template): Likewise.
+ * config/arm/arm.c (arm_print_operand): Likewise.
+ * config/h8300/h8300.c (h8sx_emit_movmd): Likewise.
+ * config/i386/i386.c (expand_movmem_via_rep_mov): Likewise.
+ (expand_setmem_via_rep_stos, expand_constant_movmem_prologue)
+ (expand_constant_setmem_prologue): Likewise.
+ * config/mips/mips.c (mips_get_unaligned_mem): Likewise.
+ * config/rs6000/rs6000.c (expand_block_move): Likewise.
+ (adjacent_mem_locations): Likewise.
+ * config/s390/s390.c (s390_expand_setmem): Likewise.
+ (s390_expand_insv): Likewise.
+ * config/s390/s390.md (*extzv<mode>, *extv<mode>): Likewise.
+ (*extendqi<mode>2_short_displ): Likewise.
+ * config/sh/sh.c (expand_block_move): Likewise.
+ * config/sh/sh.md (extv, extzv): Likewise.
+
2011-07-19 Richard Sandiford <rdsandiford@googlemail.com>
* emit-rtl.c (mem_attrs_eq_p): New function, split out from...
ref->ref_alias_set = MEM_ALIAS_SET (mem);
- /* If MEM_OFFSET or MEM_SIZE are NULL we have to punt.
+ /* If MEM_OFFSET or MEM_SIZE are unknown we have to punt.
Keep points-to related information though. */
if (!MEM_OFFSET (mem)
- || !MEM_SIZE (mem))
+ || !MEM_SIZE_KNOWN_P (mem))
{
ref->ref = NULL_TREE;
ref->offset = 0;
case of promoted subregs on bigendian targets. Trust the MEM_EXPR
here. */
if (INTVAL (MEM_OFFSET (mem)) < 0
- && ((INTVAL (MEM_SIZE (mem)) + INTVAL (MEM_OFFSET (mem)))
+ && ((MEM_SIZE (mem) + INTVAL (MEM_OFFSET (mem)))
* BITS_PER_UNIT) == ref->size)
return true;
ref->offset += INTVAL (MEM_OFFSET (mem)) * BITS_PER_UNIT;
- ref->size = INTVAL (MEM_SIZE (mem)) * BITS_PER_UNIT;
+ ref->size = MEM_SIZE (mem) * BITS_PER_UNIT;
/* The MEM may extend into adjacent fields, so adjust max_size if
necessary. */
return 0;
sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
- : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
+ : MEM_SIZE_KNOWN_P (rtlx) ? MEM_SIZE (rtlx)
: -1);
sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly))
- : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
- -1);
+ : MEM_SIZE_KNOWN_P (rtly) ? MEM_SIZE (rtly)
+ : -1);
/* If we have an offset for either memref, it can update the values computed
above. */
/* If a memref has both a size and an offset, we can use the smaller size.
We can't do this if the offset isn't known because we must view this
memref as being anywhere inside the DECL's MEM. */
- if (MEM_SIZE (x) && moffsetx)
- sizex = INTVAL (MEM_SIZE (x));
- if (MEM_SIZE (y) && moffsety)
- sizey = INTVAL (MEM_SIZE (y));
+ if (MEM_SIZE_KNOWN_P (x) && moffsetx)
+ sizex = MEM_SIZE (x);
+ if (MEM_SIZE_KNOWN_P (y) && moffsety)
+ sizey = MEM_SIZE (y);
/* Put the values of the memref with the lower offset in X's values. */
if (offsetx > offsety)
}
}
set_mem_alias_set (mem, 0);
- set_mem_size (mem, NULL_RTX);
+ clear_mem_size (mem);
}
return mem;
/* Set MEM_SIZE as appropriate. */
if (CONST_INT_P (arg3_rtx))
{
- set_mem_size (arg1_rtx, arg3_rtx);
- set_mem_size (arg2_rtx, arg3_rtx);
+ set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
+ set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
}
#ifdef HAVE_cmpmemsi
{
m_tramp = change_address (m_tramp, BLKmode, tmp);
set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
- set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
+ set_mem_size (m_tramp, TRAMPOLINE_SIZE);
}
/* The FUNC argument should be the address of the nested function.
partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
MODE_INT, 1);
args[i].stack = gen_rtx_MEM (partial_mode, addr);
- set_mem_size (args[i].stack, GEN_INT (units_on_stack));
+ set_mem_size (args[i].stack, units_on_stack);
}
else
{
Generate a simple memory reference of the correct size.
*/
args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
- set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
+ set_mem_size (args[i].stack_slot, units_on_stack);
}
else
{
MEM_ATTRS (x) = 0;
else
{
- rtx mem_size;
+ HOST_WIDE_INT mem_size;
if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
{
set_mem_offset (y, 0);
}
- if (!MEM_SIZE (x))
- mem_size = NULL_RTX;
- else if (!MEM_SIZE (y))
- mem_size = NULL_RTX;
+ if (MEM_SIZE_KNOWN_P (x) && MEM_SIZE_KNOWN_P (y))
+ {
+ mem_size = MAX (MEM_SIZE (x), MEM_SIZE (y));
+ set_mem_size (x, mem_size);
+ set_mem_size (y, mem_size);
+ }
else
- mem_size = GEN_INT (MAX (INTVAL (MEM_SIZE (x)),
- INTVAL (MEM_SIZE (y))));
- set_mem_size (x, mem_size);
- set_mem_size (y, mem_size);
+ {
+ clear_mem_size (x);
+ clear_mem_size (y);
+ }
set_mem_align (x, MIN (MEM_ALIGN (x), MEM_ALIGN (y)));
set_mem_align (y, MEM_ALIGN (x));
instruction (for some alignments) as an aid to the memory subsystem
of the target. */
align = MEM_ALIGN (x) >> 3;
- memsize = INTVAL (MEM_SIZE (x));
+ memsize = MEM_SIZE (x);
/* Only certain alignment specifiers are supported by the hardware. */
if (memsize == 16 && (align % 32) == 0)
first_dest = replace_equiv_address (dest, dest_reg);
first_src = replace_equiv_address (src, src_reg);
- set_mem_size (first_dest, GEN_INT (n & -factor));
- set_mem_size (first_src, GEN_INT (n & -factor));
+ set_mem_size (first_dest, n & -factor);
+ set_mem_size (first_src, n & -factor);
length = copy_to_mode_reg (HImode, gen_int_mode (n / factor, HImode));
emit_insn (gen_movmd (first_dest, first_src, length, GEN_INT (factor)));
rtx destexp;
rtx srcexp;
rtx countreg;
+ HOST_WIDE_INT rounded_count;
/* If the size is known, it is shorter to use rep movs. */
if (mode == QImode && CONST_INT_P (count)
}
if (CONST_INT_P (count))
{
- count = GEN_INT (INTVAL (count)
+ rounded_count = (INTVAL (count)
& ~((HOST_WIDE_INT) GET_MODE_SIZE (mode) - 1));
destmem = shallow_copy_rtx (destmem);
srcmem = shallow_copy_rtx (srcmem);
- set_mem_size (destmem, count);
- set_mem_size (srcmem, count);
+ set_mem_size (destmem, rounded_count);
+ set_mem_size (srcmem, rounded_count);
}
else
{
- if (MEM_SIZE (destmem))
- set_mem_size (destmem, NULL_RTX);
- if (MEM_SIZE (srcmem))
- set_mem_size (srcmem, NULL_RTX);
+ if (MEM_SIZE_KNOWN_P (destmem))
+ clear_mem_size (destmem);
+ if (MEM_SIZE_KNOWN_P (srcmem))
+ clear_mem_size (srcmem);
}
emit_insn (gen_rep_mov (destptr, destmem, srcptr, srcmem, countreg,
destexp, srcexp));
{
rtx destexp;
rtx countreg;
+ HOST_WIDE_INT rounded_count;
if (destptr != XEXP (destmem, 0) || GET_MODE (destmem) != BLKmode)
destmem = adjust_automodify_address_nv (destmem, BLKmode, destptr, 0);
destexp = gen_rtx_PLUS (Pmode, destptr, countreg);
if (orig_value == const0_rtx && CONST_INT_P (count))
{
- count = GEN_INT (INTVAL (count)
+ rounded_count = (INTVAL (count)
& ~((HOST_WIDE_INT) GET_MODE_SIZE (mode) - 1));
destmem = shallow_copy_rtx (destmem);
- set_mem_size (destmem, count);
+ set_mem_size (destmem, rounded_count);
}
- else if (MEM_SIZE (destmem))
- set_mem_size (destmem, NULL_RTX);
+ else if (MEM_SIZE_KNOWN_P (destmem))
+ clear_mem_size (destmem);
emit_insn (gen_rep_stos (destptr, countreg, destmem, value, destexp));
}
int desired_align, int align_bytes)
{
rtx src = *srcp;
- rtx src_size, dst_size;
+ rtx orig_dst = dst;
+ rtx orig_src = src;
int off = 0;
int src_align_bytes = get_mem_align_offset (src, desired_align * BITS_PER_UNIT);
if (src_align_bytes >= 0)
src_align_bytes = desired_align - src_align_bytes;
- src_size = MEM_SIZE (src);
- dst_size = MEM_SIZE (dst);
if (align_bytes & 1)
{
dst = adjust_automodify_address_nv (dst, QImode, destreg, 0);
if (MEM_ALIGN (src) < src_align * BITS_PER_UNIT)
set_mem_align (src, src_align * BITS_PER_UNIT);
}
- if (dst_size)
- set_mem_size (dst, GEN_INT (INTVAL (dst_size) - align_bytes));
- if (src_size)
- set_mem_size (dst, GEN_INT (INTVAL (src_size) - align_bytes));
+ if (MEM_SIZE_KNOWN_P (orig_dst))
+ set_mem_size (dst, MEM_SIZE (orig_dst) - align_bytes);
+ if (MEM_SIZE_KNOWN_P (orig_src))
+ set_mem_size (src, MEM_SIZE (orig_src) - align_bytes);
*srcp = src;
return dst;
}
int desired_align, int align_bytes)
{
int off = 0;
- rtx dst_size = MEM_SIZE (dst);
+ rtx orig_dst = dst;
if (align_bytes & 1)
{
dst = adjust_automodify_address_nv (dst, QImode, destreg, 0);
dst = adjust_automodify_address_nv (dst, BLKmode, destreg, off);
if (MEM_ALIGN (dst) < (unsigned int) desired_align * BITS_PER_UNIT)
set_mem_align (dst, desired_align * BITS_PER_UNIT);
- if (dst_size)
- set_mem_size (dst, GEN_INT (INTVAL (dst_size) - align_bytes));
+ if (MEM_SIZE_KNOWN_P (orig_dst))
+ set_mem_size (dst, MEM_SIZE (orig_dst) - align_bytes);
return dst;
}
/* Adjust *OP to refer to the whole field. This also has the effect
of legitimizing *OP's address for BLKmode, possibly simplifying it. */
*op = adjust_address (*op, BLKmode, 0);
- set_mem_size (*op, GEN_INT (width / BITS_PER_UNIT));
+ set_mem_size (*op, width / BITS_PER_UNIT);
/* Get references to both ends of the field. We deliberately don't
use the original QImode *OP for FIRST since the new BLKmode one
bool
mips_mem_fits_mode_p (enum machine_mode mode, rtx x)
{
- rtx size;
-
- if (!MEM_P (x))
- return false;
-
- size = MEM_SIZE (x);
- return size && INTVAL (size) == GET_MODE_SIZE (mode);
+ return (MEM_P (x)
+ && MEM_SIZE_KNOWN_P (x)
+ && MEM_SIZE (x) == GET_MODE_SIZE (mode));
}
/* Return true if (zero_extract OP WIDTH BITPOS) can be used as the
rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
src = replace_equiv_address (src, src_reg);
}
- set_mem_size (src, GEN_INT (move_bytes));
+ set_mem_size (src, move_bytes);
if (!REG_P (XEXP (dest, 0)))
{
rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
dest = replace_equiv_address (dest, dest_reg);
}
- set_mem_size (dest, GEN_INT (move_bytes));
+ set_mem_size (dest, move_bytes);
emit_insn ((*gen_func.movmemsi) (dest, src,
GEN_INT (move_bytes & 31),
val_diff = val1 - val0;
return ((REGNO (reg0) == REGNO (reg1))
- && ((MEM_SIZE (a) && val_diff == INTVAL (MEM_SIZE (a)))
- || (MEM_SIZE (b) && val_diff == -INTVAL (MEM_SIZE (b)))));
+ && ((MEM_SIZE_KNOWN_P (a) && val_diff == MEM_SIZE (a))
+ || (MEM_SIZE_KNOWN_P (b) && val_diff == -MEM_SIZE (b))));
}
return false;
DST is set to size 1 so the rest of the memory location
does not count as source operand. */
rtx dstp1 = adjust_address (dst, VOIDmode, 1);
- set_mem_size (dst, const1_rtx);
+ set_mem_size (dst, 1);
emit_insn (gen_movmem_short (dstp1, dst,
GEN_INT (INTVAL (len) - 2)));
else
{
dstp1 = adjust_address (dst, VOIDmode, 1);
- set_mem_size (dst, const1_rtx);
+ set_mem_size (dst, 1);
/* Initialize memory by storing the first byte. */
emit_move_insn (adjust_address (dst, QImode, 0), val);
GET_MODE_SIZE (word_mode) - size);
dest = adjust_address (dest, BLKmode, 0);
- set_mem_size (dest, GEN_INT (size));
+ set_mem_size (dest, size);
s390_expand_movmem (dest, src_mem, GEN_INT (size));
}
emit_move_insn (adjust_address (dest, SImode, size),
gen_lowpart (SImode, src));
- set_mem_size (dest, GEN_INT (size));
+ set_mem_size (dest, size);
emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, GEN_INT
(stcmh_width), const0_rtx),
gen_rtx_LSHIFTRT (word_mode, src, GEN_INT
int mask = ((1ul << size) - 1) << (GET_MODE_SIZE (SImode) - size);
operands[1] = adjust_address (operands[1], BLKmode, 0);
- set_mem_size (operands[1], GEN_INT (size));
+ set_mem_size (operands[1], size);
operands[2] = GEN_INT (GET_MODE_BITSIZE (<MODE>mode) - bitsize);
operands[3] = GEN_INT (mask);
})
int mask = ((1ul << size) - 1) << (GET_MODE_SIZE (SImode) - size);
operands[1] = adjust_address (operands[1], BLKmode, 0);
- set_mem_size (operands[1], GEN_INT (size));
+ set_mem_size (operands[1], size);
operands[2] = GEN_INT (GET_MODE_BITSIZE (<MODE>mode) - bitsize);
operands[3] = GEN_INT (mask);
})
(clobber (reg:CC CC_REGNUM))])]
{
operands[1] = adjust_address (operands[1], BLKmode, 0);
- set_mem_size (operands[1], GEN_INT (GET_MODE_SIZE (QImode)));
+ set_mem_size (operands[1], GET_MODE_SIZE (QImode));
operands[2] = GEN_INT (GET_MODE_BITSIZE (<MODE>mode)
- GET_MODE_BITSIZE (QImode));
})
rtx from = adjust_automodify_address (src, BLKmode,
src_addr, copied);
- set_mem_size (from, GEN_INT (4));
+ set_mem_size (from, 4);
emit_insn (gen_movua (temp, from));
emit_move_insn (src_addr, plus_constant (src_addr, 4));
emit_move_insn (to, temp);
&& MEM_P (operands[1]) && MEM_ALIGN (operands[1]) < 32)
{
rtx src = adjust_address (operands[1], BLKmode, 0);
- set_mem_size (src, GEN_INT (4));
+ set_mem_size (src, 4);
emit_insn (gen_movua (operands[0], src));
DONE;
}
&& MEM_P (operands[1]) && MEM_ALIGN (operands[1]) < 32)
{
rtx src = adjust_address (operands[1], BLKmode, 0);
- set_mem_size (src, GEN_INT (4));
+ set_mem_size (src, 4);
emit_insn (gen_movua (operands[0], src));
DONE;
}
if (GET_CODE (XEXP (p, 0)) == USE
&& MEM_P (XEXP (XEXP (p, 0), 0)))
{
- rtx mem = XEXP (XEXP (p, 0), 0), addr, size;
- HOST_WIDE_INT off = 0;
- size = MEM_SIZE (mem);
- if (size == NULL_RTX)
+ rtx mem = XEXP (XEXP (p, 0), 0), addr;
+ HOST_WIDE_INT off = 0, size;
+ if (!MEM_SIZE_KNOWN_P (mem))
return false;
+ size = MEM_SIZE (mem);
addr = XEXP (mem, 0);
if (GET_CODE (addr) == PLUS
&& REG_P (XEXP (addr, 0))
return false;
}
min_sp_off = MIN (min_sp_off, off);
- max_sp_off = MAX (max_sp_off, off + INTVAL (size));
+ max_sp_off = MAX (max_sp_off, off + size);
}
if (min_sp_off >= max_sp_off)
set = single_set (DF_REF_INSN (defs->ref));
off += INTVAL (XEXP (SET_SRC (set), 1));
}
- for (byte = off; byte < off + INTVAL (MEM_SIZE (mem)); byte++)
+ for (byte = off; byte < off + MEM_SIZE (mem); byte++)
{
if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
gcc_unreachable ();
@item MEM_OFFSET (@var{x})
The offset from the start of @code{MEM_EXPR} as a @code{CONST_INT} rtx.
+@findex MEM_SIZE_KNOWN_P
+@item MEM_SIZE_KNOWN_P (@var{x})
+True if the size of the memory reference is known.
+@samp{MEM_SIZE (@var{x})} provides its size if so.
+
@findex MEM_SIZE
@item MEM_SIZE (@var{x})
-The size in bytes of the memory reference as a @code{CONST_INT} rtx.
+The size in bytes of the memory reference.
This is mostly relevant for @code{BLKmode} references as otherwise
-the size is implied by the mode.
+the size is implied by the mode. The value is only valid if
+@samp{MEM_SIZE_KNOWN_P (@var{x})} is true.
@findex MEM_ALIGN
@item MEM_ALIGN (@var{x})
}
/* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
as memset (addr, 0, 36); */
- else if (!MEM_SIZE (mem)
- || !CONST_INT_P (MEM_SIZE (mem))
+ else if (!MEM_SIZE_KNOWN_P (mem)
+ || MEM_SIZE (mem) <= 0
+ || MEM_SIZE (mem) > MAX_OFFSET
|| GET_CODE (body) != SET
- || INTVAL (MEM_SIZE (mem)) <= 0
- || INTVAL (MEM_SIZE (mem)) > MAX_OFFSET
|| !CONST_INT_P (SET_SRC (body)))
{
if (!store_is_unused)
}
if (GET_MODE (mem) == BLKmode)
- width = INTVAL (MEM_SIZE (mem));
+ width = MEM_SIZE (mem);
else
{
width = GET_MODE_SIZE (GET_MODE (mem));
&& INTVAL (args[2]) > 0)
{
rtx mem = gen_rtx_MEM (BLKmode, args[0]);
- set_mem_size (mem, args[2]);
+ set_mem_size (mem, INTVAL (args[2]));
body = gen_rtx_SET (VOIDmode, mem, args[1]);
mems_found += record_store (body, bb_info);
if (dump_file)
if (MEM_P (varloc))
{
unsigned HOST_WIDE_INT memsize
- = INTVAL (MEM_SIZE (varloc)) * BITS_PER_UNIT;
+ = MEM_SIZE (varloc) * BITS_PER_UNIT;
if (memsize != bitsize)
{
if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
/* Set the size of MEM to SIZE. */
void
-set_mem_size (rtx mem, rtx size)
+set_mem_size (rtx mem, HOST_WIDE_INT size)
{
struct mem_attrs attrs;
attrs = *get_mem_attrs (mem);
- attrs.size = size;
+ attrs.size = GEN_INT (size);
+ set_mem_attrs (mem, &attrs);
+}
+
+/* Clear the size of MEM. */
+
+void
+clear_mem_size (rtx mem)
+{
+ struct mem_attrs attrs;
+
+ attrs = *get_mem_attrs (mem);
+ attrs.size = NULL_RTX;
set_mem_attrs (mem, &attrs);
}
\f
extern void set_mem_offset (rtx, rtx);
/* Set the size for MEM to SIZE. */
-extern void set_mem_size (rtx, rtx);
+extern void set_mem_size (rtx, HOST_WIDE_INT);
+
+/* Clear the size recorded for MEM. */
+extern void clear_mem_size (rtx);
/* Set the attributes for MEM appropriate for a spill slot. */
extern void set_mem_attrs_for_spill (rtx);
{
x = shallow_copy_rtx (x);
y = shallow_copy_rtx (y);
- set_mem_size (x, size);
- set_mem_size (y, size);
+ set_mem_size (x, INTVAL (size));
+ set_mem_size (y, INTVAL (size));
}
if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
if (data->promoted_mode != BLKmode
&& data->promoted_mode != DECL_MODE (parm))
{
- set_mem_size (stack_parm,
- GEN_INT (GET_MODE_SIZE (data->promoted_mode)));
+ set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
if (MEM_EXPR (stack_parm) && MEM_OFFSET (stack_parm))
{
int offset = subreg_lowpart_offset (DECL_MODE (parm),
fprintf (outfile, "+" HOST_WIDE_INT_PRINT_DEC,
INTVAL (MEM_OFFSET (in_rtx)));
- if (MEM_SIZE (in_rtx))
- fprintf (outfile, " S" HOST_WIDE_INT_PRINT_DEC,
- INTVAL (MEM_SIZE (in_rtx)));
+ if (MEM_SIZE_KNOWN_P (in_rtx))
+ fprintf (outfile, " S" HOST_WIDE_INT_PRINT_DEC, MEM_SIZE (in_rtx));
if (MEM_ALIGN (in_rtx) != 1)
fprintf (outfile, " A%u", MEM_ALIGN (in_rtx));
PUT_MODE (tem, GET_MODE (x));
if (MEM_OFFSET (tem))
set_mem_offset (tem, plus_constant (MEM_OFFSET (tem), offset));
- if (MEM_SIZE (tem)
- && INTVAL (MEM_SIZE (tem)) != (HOST_WIDE_INT) outer_size)
- set_mem_size (tem, GEN_INT (outer_size));
+ if (MEM_SIZE_KNOWN_P (tem)
+ && MEM_SIZE (tem) != (HOST_WIDE_INT) outer_size)
+ set_mem_size (tem, outer_size);
/* If this was a paradoxical subreg that we replaced, the
resulting memory must be sufficiently aligned to allow
/* For a MEM rtx, the address space. */
#define MEM_ADDR_SPACE(RTX) (get_mem_attrs (RTX)->addrspace)
-/* For a MEM rtx, the size in bytes of the MEM, if known, as an RTX that
- is always a CONST_INT. */
-#define MEM_SIZE(RTX) (get_mem_attrs (RTX)->size)
+/* For a MEM rtx, true if its MEM_SIZE is known. */
+#define MEM_SIZE_KNOWN_P(RTX) (get_mem_attrs (RTX)->size != NULL_RTX)
+
+/* For a MEM rtx, the size in bytes of the MEM. */
+#define MEM_SIZE(RTX) INTVAL (get_mem_attrs (RTX)->size)
/* For a MEM rtx, the alignment in bits. We can use the alignment of the
mode as a default when STRICT_ALIGNMENT, but not if not. */
code_changed
|| !MEM_NOTRAP_P (x))
{
- HOST_WIDE_INT size = MEM_SIZE (x) ? INTVAL (MEM_SIZE (x)) : 0;
+ HOST_WIDE_INT size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : 0;
return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
GET_MODE (x), code_changed);
}
if (GET_MODE (decl_rtl) == BLKmode
|| AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
return 0;
- if (MEM_SIZE (decl_rtl)
- && INTVAL (MEM_SIZE (decl_rtl)) > MAX_VAR_PARTS)
+ if (MEM_SIZE_KNOWN_P (decl_rtl)
+ && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
return 0;
}
initial_trampoline = gen_const_mem (BLKmode, symbol);
set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
- set_mem_size (initial_trampoline, GEN_INT (TRAMPOLINE_SIZE));
+ set_mem_size (initial_trampoline, TRAMPOLINE_SIZE);
return initial_trampoline;
}