From d05d755107df11e675a57ac4371fd0031c7d68a4 Mon Sep 17 00:00:00 2001 From: Richard Sandiford Date: Wed, 20 Dec 2017 12:53:23 +0000 Subject: [PATCH] poly_int: MEM_OFFSET and MEM_SIZE This patch changes the MEM_OFFSET and MEM_SIZE memory attributes from HOST_WIDE_INT to poly_int64. Most of it is mechanical, but there is one nonbovious change in widen_memory_access. Previously the main while loop broke with: /* Similarly for the decl. */ else if (DECL_P (attrs.expr) && DECL_SIZE_UNIT (attrs.expr) && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0 && (! attrs.offset_known_p || attrs.offset >= 0)) break; but it seemed wrong to optimistically assume the best case when the offset isn't known (and thus might be negative). As it happens, the "! attrs.offset_known_p" condition was always false, because we'd already nullified attrs.expr in that case: /* If we don't know what offset we were at within the expression, then we can't know if we've overstepped the bounds. */ if (! attrs.offset_known_p) attrs.expr = NULL_TREE; The patch therefore drops "! attrs.offset_known_p ||" when converting the offset check to the may/must interface. 2017-12-20 Richard Sandiford Alan Hayward David Sherwood gcc/ * rtl.h (mem_attrs): Add a default constructor. Change size and offset from HOST_WIDE_INT to poly_int64. * emit-rtl.h (set_mem_offset, set_mem_size, adjust_address_1) (adjust_automodify_address_1, set_mem_attributes_minus_bitpos) (widen_memory_access): Take the sizes and offsets as poly_int64s rather than HOST_WIDE_INTs. * alias.c (ao_ref_from_mem): Handle the new form of MEM_OFFSET. (offset_overlap_p): Take poly_int64s rather than HOST_WIDE_INTs and ints. (adjust_offset_for_component_ref): Change the offset from a HOST_WIDE_INT to a poly_int64. (nonoverlapping_memrefs_p): Track polynomial offsets and sizes. * cfgcleanup.c (merge_memattrs): Update after mem_attrs changes. * dce.c (find_call_stack_args): Likewise. * dse.c (record_store): Likewise. * dwarf2out.c (tls_mem_loc_descriptor, dw_sra_loc_expr): Likewise. * print-rtl.c (rtx_writer::print_rtx): Likewise. * read-rtl-function.c (test_loading_mem): Likewise. * rtlanal.c (may_trap_p_1): Likewise. * simplify-rtx.c (delegitimize_mem_from_attrs): Likewise. * var-tracking.c (int_mem_offset, track_expr_p): Likewise. * emit-rtl.c (mem_attrs_eq_p, get_mem_align_offset): Likewise. (mem_attrs::mem_attrs): New function. (set_mem_attributes_minus_bitpos): Change bitpos from a HOST_WIDE_INT to poly_int64. (set_mem_alias_set, set_mem_addr_space, set_mem_align, set_mem_expr) (clear_mem_offset, clear_mem_size, change_address) (get_spill_slot_decl, set_mem_attrs_for_spill): Directly initialize mem_attrs. (set_mem_offset, set_mem_size, adjust_address_1) (adjust_automodify_address_1, offset_address, widen_memory_access): Likewise. Take poly_int64s rather than HOST_WIDE_INT. Co-Authored-By: Alan Hayward Co-Authored-By: David Sherwood From-SVN: r255875 --- gcc/ChangeLog | 37 +++++++++ gcc/alias.c | 42 ++++------ gcc/cfgcleanup.c | 19 +++-- gcc/dce.c | 13 +-- gcc/dse.c | 8 +- gcc/dwarf2out.c | 8 +- gcc/emit-rtl.c | 178 +++++++++++++++++++--------------------- gcc/emit-rtl.h | 14 ++-- gcc/print-rtl.c | 10 ++- gcc/read-rtl-function.c | 8 +- gcc/rtl.h | 6 +- gcc/rtlanal.c | 2 +- gcc/simplify-rtx.c | 18 ++-- gcc/var-tracking.c | 7 +- 14 files changed, 201 insertions(+), 169 deletions(-) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 48865ad23b2..9b04467a8de 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,40 @@ +2017-12-20 Richard Sandiford + Alan Hayward + David Sherwood + + * rtl.h (mem_attrs): Add a default constructor. Change size and + offset from HOST_WIDE_INT to poly_int64. + * emit-rtl.h (set_mem_offset, set_mem_size, adjust_address_1) + (adjust_automodify_address_1, set_mem_attributes_minus_bitpos) + (widen_memory_access): Take the sizes and offsets as poly_int64s + rather than HOST_WIDE_INTs. + * alias.c (ao_ref_from_mem): Handle the new form of MEM_OFFSET. + (offset_overlap_p): Take poly_int64s rather than HOST_WIDE_INTs + and ints. + (adjust_offset_for_component_ref): Change the offset from a + HOST_WIDE_INT to a poly_int64. + (nonoverlapping_memrefs_p): Track polynomial offsets and sizes. + * cfgcleanup.c (merge_memattrs): Update after mem_attrs changes. + * dce.c (find_call_stack_args): Likewise. + * dse.c (record_store): Likewise. + * dwarf2out.c (tls_mem_loc_descriptor, dw_sra_loc_expr): Likewise. + * print-rtl.c (rtx_writer::print_rtx): Likewise. + * read-rtl-function.c (test_loading_mem): Likewise. + * rtlanal.c (may_trap_p_1): Likewise. + * simplify-rtx.c (delegitimize_mem_from_attrs): Likewise. + * var-tracking.c (int_mem_offset, track_expr_p): Likewise. + * emit-rtl.c (mem_attrs_eq_p, get_mem_align_offset): Likewise. + (mem_attrs::mem_attrs): New function. + (set_mem_attributes_minus_bitpos): Change bitpos from a + HOST_WIDE_INT to poly_int64. + (set_mem_alias_set, set_mem_addr_space, set_mem_align, set_mem_expr) + (clear_mem_offset, clear_mem_size, change_address) + (get_spill_slot_decl, set_mem_attrs_for_spill): Directly + initialize mem_attrs. + (set_mem_offset, set_mem_size, adjust_address_1) + (adjust_automodify_address_1, offset_address, widen_memory_access): + Likewise. Take poly_int64s rather than HOST_WIDE_INT. + 2017-12-20 Richard Sandiford Alan Hayward David Sherwood diff --git a/gcc/alias.c b/gcc/alias.c index b1ff8fdd887..072fba207e7 100644 --- a/gcc/alias.c +++ b/gcc/alias.c @@ -330,7 +330,7 @@ ao_ref_from_mem (ao_ref *ref, const_rtx mem) /* If MEM_OFFSET/MEM_SIZE get us outside of ref->offset/ref->max_size drop ref->ref. */ - if (MEM_OFFSET (mem) < 0 + if (maybe_lt (MEM_OFFSET (mem), 0) || (ref->max_size_known_p () && maybe_gt ((MEM_OFFSET (mem) + MEM_SIZE (mem)) * BITS_PER_UNIT, ref->max_size))) @@ -2331,12 +2331,15 @@ addr_side_effect_eval (rtx addr, int size, int n_refs) absolute value of the sizes as the actual sizes. */ static inline bool -offset_overlap_p (HOST_WIDE_INT c, int xsize, int ysize) +offset_overlap_p (poly_int64 c, poly_int64 xsize, poly_int64 ysize) { - return (xsize == 0 || ysize == 0 - || (c >= 0 - ? (abs (xsize) > c) - : (abs (ysize) > -c))); + if (known_eq (xsize, 0) || known_eq (ysize, 0)) + return true; + + if (maybe_ge (c, 0)) + return maybe_gt (maybe_lt (xsize, 0) ? -xsize : xsize, c); + else + return maybe_gt (maybe_lt (ysize, 0) ? -ysize : ysize, -c); } /* Return one if X and Y (memory addresses) reference the @@ -2667,7 +2670,7 @@ decl_for_component_ref (tree x) static void adjust_offset_for_component_ref (tree x, bool *known_p, - HOST_WIDE_INT *offset) + poly_int64 *offset) { if (!*known_p) return; @@ -2708,8 +2711,8 @@ nonoverlapping_memrefs_p (const_rtx x, const_rtx y, bool loop_invariant) rtx rtlx, rtly; rtx basex, basey; bool moffsetx_known_p, moffsety_known_p; - HOST_WIDE_INT moffsetx = 0, moffsety = 0; - HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey; + poly_int64 moffsetx = 0, moffsety = 0; + poly_int64 offsetx = 0, offsety = 0, sizex, sizey; /* Unless both have exprs, we can't tell anything. */ if (exprx == 0 || expry == 0) @@ -2811,12 +2814,10 @@ nonoverlapping_memrefs_p (const_rtx x, const_rtx y, bool loop_invariant) we can avoid overlap is if we can deduce that they are nonoverlapping pieces of that decl, which is very rare. */ basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx; - if (GET_CODE (basex) == PLUS && CONST_INT_P (XEXP (basex, 1))) - offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0); + basex = strip_offset_and_add (basex, &offsetx); basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly; - if (GET_CODE (basey) == PLUS && CONST_INT_P (XEXP (basey, 1))) - offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0); + basey = strip_offset_and_add (basey, &offsety); /* If the bases are different, we know they do not overlap if both are constants or if one is a constant and the other a pointer into the @@ -2837,10 +2838,10 @@ nonoverlapping_memrefs_p (const_rtx x, const_rtx y, bool loop_invariant) declarations are necessarily different (i.e. compare_base_decls (exprx, expry) == -1) */ - sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx)) + sizex = (!MEM_P (rtlx) ? poly_int64 (GET_MODE_SIZE (GET_MODE (rtlx))) : MEM_SIZE_KNOWN_P (rtlx) ? MEM_SIZE (rtlx) : -1); - sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly)) + sizey = (!MEM_P (rtly) ? poly_int64 (GET_MODE_SIZE (GET_MODE (rtly))) : MEM_SIZE_KNOWN_P (rtly) ? MEM_SIZE (rtly) : -1); @@ -2859,16 +2860,7 @@ nonoverlapping_memrefs_p (const_rtx x, const_rtx y, bool loop_invariant) if (MEM_SIZE_KNOWN_P (y) && moffsety_known_p) sizey = MEM_SIZE (y); - /* Put the values of the memref with the lower offset in X's values. */ - if (offsetx > offsety) - { - std::swap (offsetx, offsety); - std::swap (sizex, sizey); - } - - /* If we don't know the size of the lower-offset value, we can't tell - if they conflict. Otherwise, we do the test. */ - return sizex >= 0 && offsety >= offsetx + sizex; + return !ranges_maybe_overlap_p (offsetx, sizex, offsety, sizey); } /* Helper for true_dependence and canon_true_dependence. diff --git a/gcc/cfgcleanup.c b/gcc/cfgcleanup.c index 754e52fe799..f470f18c074 100644 --- a/gcc/cfgcleanup.c +++ b/gcc/cfgcleanup.c @@ -864,8 +864,6 @@ merge_memattrs (rtx x, rtx y) MEM_ATTRS (x) = 0; else { - HOST_WIDE_INT mem_size; - if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y)) { set_mem_alias_set (x, 0); @@ -881,20 +879,23 @@ merge_memattrs (rtx x, rtx y) } else if (MEM_OFFSET_KNOWN_P (x) != MEM_OFFSET_KNOWN_P (y) || (MEM_OFFSET_KNOWN_P (x) - && MEM_OFFSET (x) != MEM_OFFSET (y))) + && maybe_ne (MEM_OFFSET (x), MEM_OFFSET (y)))) { clear_mem_offset (x); clear_mem_offset (y); } - if (MEM_SIZE_KNOWN_P (x) && MEM_SIZE_KNOWN_P (y)) - { - mem_size = MAX (MEM_SIZE (x), MEM_SIZE (y)); - set_mem_size (x, mem_size); - set_mem_size (y, mem_size); - } + if (!MEM_SIZE_KNOWN_P (x)) + clear_mem_size (y); + else if (!MEM_SIZE_KNOWN_P (y)) + clear_mem_size (x); + else if (known_le (MEM_SIZE (x), MEM_SIZE (y))) + set_mem_size (x, MEM_SIZE (y)); + else if (known_le (MEM_SIZE (y), MEM_SIZE (x))) + set_mem_size (y, MEM_SIZE (x)); else { + /* The sizes aren't ordered, so we can't merge them. */ clear_mem_size (x); clear_mem_size (y); } diff --git a/gcc/dce.c b/gcc/dce.c index 6fd9548015c..b41a4432fc7 100644 --- a/gcc/dce.c +++ b/gcc/dce.c @@ -293,9 +293,8 @@ find_call_stack_args (rtx_call_insn *call_insn, bool do_mark, bool fast, { rtx mem = XEXP (XEXP (p, 0), 0), addr; HOST_WIDE_INT off = 0, size; - if (!MEM_SIZE_KNOWN_P (mem)) + if (!MEM_SIZE_KNOWN_P (mem) || !MEM_SIZE (mem).is_constant (&size)) return false; - size = MEM_SIZE (mem); addr = XEXP (mem, 0); if (GET_CODE (addr) == PLUS && REG_P (XEXP (addr, 0)) @@ -360,7 +359,9 @@ find_call_stack_args (rtx_call_insn *call_insn, bool do_mark, bool fast, && MEM_P (XEXP (XEXP (p, 0), 0))) { rtx mem = XEXP (XEXP (p, 0), 0), addr; - HOST_WIDE_INT off = 0, byte; + HOST_WIDE_INT off = 0, byte, size; + /* Checked in the previous iteration. */ + size = MEM_SIZE (mem).to_constant (); addr = XEXP (mem, 0); if (GET_CODE (addr) == PLUS && REG_P (XEXP (addr, 0)) @@ -386,7 +387,7 @@ find_call_stack_args (rtx_call_insn *call_insn, bool do_mark, bool fast, set = single_set (DF_REF_INSN (defs->ref)); off += INTVAL (XEXP (SET_SRC (set), 1)); } - for (byte = off; byte < off + MEM_SIZE (mem); byte++) + for (byte = off; byte < off + size; byte++) { if (!bitmap_set_bit (sp_bytes, byte - min_sp_off)) gcc_unreachable (); @@ -469,8 +470,10 @@ find_call_stack_args (rtx_call_insn *call_insn, bool do_mark, bool fast, break; } + HOST_WIDE_INT size; if (!MEM_SIZE_KNOWN_P (mem) - || !check_argument_store (MEM_SIZE (mem), off, min_sp_off, + || !MEM_SIZE (mem).is_constant (&size) + || !check_argument_store (size, off, min_sp_off, max_sp_off, sp_bytes)) break; diff --git a/gcc/dse.c b/gcc/dse.c index d196b79d41d..3e0a4168fd2 100644 --- a/gcc/dse.c +++ b/gcc/dse.c @@ -1365,6 +1365,7 @@ record_store (rtx body, bb_info_t bb_info) /* At this point we know mem is a mem. */ if (GET_MODE (mem) == BLKmode) { + HOST_WIDE_INT const_size; if (GET_CODE (XEXP (mem, 0)) == SCRATCH) { if (dump_file && (dump_flags & TDF_DETAILS)) @@ -1376,8 +1377,11 @@ record_store (rtx body, bb_info_t bb_info) /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0)) as memset (addr, 0, 36); */ else if (!MEM_SIZE_KNOWN_P (mem) - || MEM_SIZE (mem) <= 0 - || MEM_SIZE (mem) > MAX_OFFSET + || maybe_le (MEM_SIZE (mem), 0) + /* This is a limit on the bitmap size, which is only relevant + for constant-sized MEMs. */ + || (MEM_SIZE (mem).is_constant (&const_size) + && const_size > MAX_OFFSET) || GET_CODE (body) != SET || !CONST_INT_P (SET_SRC (body))) { diff --git a/gcc/dwarf2out.c b/gcc/dwarf2out.c index 6e94ad32a33..ed79fd019d4 100644 --- a/gcc/dwarf2out.c +++ b/gcc/dwarf2out.c @@ -13774,7 +13774,7 @@ tls_mem_loc_descriptor (rtx mem) if (loc_result == NULL) return NULL; - if (MEM_OFFSET (mem)) + if (maybe_ne (MEM_OFFSET (mem), 0)) loc_descr_plus_const (&loc_result, MEM_OFFSET (mem)); return loc_result; @@ -16377,8 +16377,10 @@ dw_sra_loc_expr (tree decl, rtx loc) adjustment. */ if (MEM_P (varloc)) { - unsigned HOST_WIDE_INT memsize - = MEM_SIZE (varloc) * BITS_PER_UNIT; + unsigned HOST_WIDE_INT memsize; + if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize)) + goto discard_descr; + memsize *= BITS_PER_UNIT; if (memsize != bitsize) { if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN diff --git a/gcc/emit-rtl.c b/gcc/emit-rtl.c index ff3585e305a..e1ed2b0b336 100644 --- a/gcc/emit-rtl.c +++ b/gcc/emit-rtl.c @@ -355,9 +355,9 @@ mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q) return false; return (p->alias == q->alias && p->offset_known_p == q->offset_known_p - && (!p->offset_known_p || p->offset == q->offset) + && (!p->offset_known_p || known_eq (p->offset, q->offset)) && p->size_known_p == q->size_known_p - && (!p->size_known_p || p->size == q->size) + && (!p->size_known_p || known_eq (p->size, q->size)) && p->align == q->align && p->addrspace == q->addrspace && (p->expr == q->expr @@ -1773,6 +1773,17 @@ operand_subword_force (rtx op, unsigned int offset, machine_mode mode) return result; } +mem_attrs::mem_attrs () + : expr (NULL_TREE), + offset (0), + size (0), + alias (0), + align (0), + addrspace (ADDR_SPACE_GENERIC), + offset_known_p (false), + size_known_p (false) +{} + /* Returns 1 if both MEM_EXPR can be considered equal and 0 otherwise. */ @@ -1799,7 +1810,7 @@ int get_mem_align_offset (rtx mem, unsigned int align) { tree expr; - unsigned HOST_WIDE_INT offset; + poly_uint64 offset; /* This function can't use if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem) @@ -1841,12 +1852,13 @@ get_mem_align_offset (rtx mem, unsigned int align) tree byte_offset = component_ref_field_offset (expr); tree bit_offset = DECL_FIELD_BIT_OFFSET (field); + poly_uint64 suboffset; if (!byte_offset - || !tree_fits_uhwi_p (byte_offset) + || !poly_int_tree_p (byte_offset, &suboffset) || !tree_fits_uhwi_p (bit_offset)) return -1; - offset += tree_to_uhwi (byte_offset); + offset += suboffset; offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT; if (inner == NULL_TREE) @@ -1870,7 +1882,10 @@ get_mem_align_offset (rtx mem, unsigned int align) else return -1; - return offset & ((align / BITS_PER_UNIT) - 1); + HOST_WIDE_INT misalign; + if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign)) + return -1; + return misalign; } /* Given REF (a MEM) and T, either the type of X or the expression @@ -1880,9 +1895,9 @@ get_mem_align_offset (rtx mem, unsigned int align) void set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, - HOST_WIDE_INT bitpos) + poly_int64 bitpos) { - HOST_WIDE_INT apply_bitpos = 0; + poly_int64 apply_bitpos = 0; tree type; struct mem_attrs attrs, *defattrs, *refattrs; addr_space_t as; @@ -1903,8 +1918,6 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, set_mem_attributes. */ gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t)); - memset (&attrs, 0, sizeof (attrs)); - /* Get the alias set from the expression or type (perhaps using a front-end routine) and use it. */ attrs.alias = get_alias_set (t); @@ -2074,10 +2087,9 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, { attrs.expr = t2; attrs.offset_known_p = false; - if (tree_fits_uhwi_p (off_tree)) + if (poly_int_tree_p (off_tree, &attrs.offset)) { attrs.offset_known_p = true; - attrs.offset = tree_to_uhwi (off_tree); apply_bitpos = bitpos; } } @@ -2098,27 +2110,29 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, unsigned int obj_align; unsigned HOST_WIDE_INT obj_bitpos; get_object_alignment_1 (t, &obj_align, &obj_bitpos); - obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1); - if (obj_bitpos != 0) - obj_align = least_bit_hwi (obj_bitpos); + unsigned int diff_align = known_alignment (obj_bitpos - bitpos); + if (diff_align != 0) + obj_align = MIN (obj_align, diff_align); attrs.align = MAX (attrs.align, obj_align); } - if (tree_fits_uhwi_p (new_size)) + poly_uint64 const_size; + if (poly_int_tree_p (new_size, &const_size)) { attrs.size_known_p = true; - attrs.size = tree_to_uhwi (new_size); + attrs.size = const_size; } /* If we modified OFFSET based on T, then subtract the outstanding bit position offset. Similarly, increase the size of the accessed object to contain the negative offset. */ - if (apply_bitpos) + if (maybe_ne (apply_bitpos, 0)) { gcc_assert (attrs.offset_known_p); - attrs.offset -= apply_bitpos / BITS_PER_UNIT; + poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos); + attrs.offset -= bytepos; if (attrs.size_known_p) - attrs.size += apply_bitpos / BITS_PER_UNIT; + attrs.size += bytepos; } /* Now set the attributes we computed above. */ @@ -2137,11 +2151,9 @@ set_mem_attributes (rtx ref, tree t, int objectp) void set_mem_alias_set (rtx mem, alias_set_type set) { - struct mem_attrs attrs; - /* If the new and old alias sets don't conflict, something is wrong. */ gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem))); - attrs = *get_mem_attrs (mem); + mem_attrs attrs (*get_mem_attrs (mem)); attrs.alias = set; set_mem_attrs (mem, &attrs); } @@ -2151,9 +2163,7 @@ set_mem_alias_set (rtx mem, alias_set_type set) void set_mem_addr_space (rtx mem, addr_space_t addrspace) { - struct mem_attrs attrs; - - attrs = *get_mem_attrs (mem); + mem_attrs attrs (*get_mem_attrs (mem)); attrs.addrspace = addrspace; set_mem_attrs (mem, &attrs); } @@ -2163,9 +2173,7 @@ set_mem_addr_space (rtx mem, addr_space_t addrspace) void set_mem_align (rtx mem, unsigned int align) { - struct mem_attrs attrs; - - attrs = *get_mem_attrs (mem); + mem_attrs attrs (*get_mem_attrs (mem)); attrs.align = align; set_mem_attrs (mem, &attrs); } @@ -2175,9 +2183,7 @@ set_mem_align (rtx mem, unsigned int align) void set_mem_expr (rtx mem, tree expr) { - struct mem_attrs attrs; - - attrs = *get_mem_attrs (mem); + mem_attrs attrs (*get_mem_attrs (mem)); attrs.expr = expr; set_mem_attrs (mem, &attrs); } @@ -2185,11 +2191,9 @@ set_mem_expr (rtx mem, tree expr) /* Set the offset of MEM to OFFSET. */ void -set_mem_offset (rtx mem, HOST_WIDE_INT offset) +set_mem_offset (rtx mem, poly_int64 offset) { - struct mem_attrs attrs; - - attrs = *get_mem_attrs (mem); + mem_attrs attrs (*get_mem_attrs (mem)); attrs.offset_known_p = true; attrs.offset = offset; set_mem_attrs (mem, &attrs); @@ -2200,9 +2204,7 @@ set_mem_offset (rtx mem, HOST_WIDE_INT offset) void clear_mem_offset (rtx mem) { - struct mem_attrs attrs; - - attrs = *get_mem_attrs (mem); + mem_attrs attrs (*get_mem_attrs (mem)); attrs.offset_known_p = false; set_mem_attrs (mem, &attrs); } @@ -2210,11 +2212,9 @@ clear_mem_offset (rtx mem) /* Set the size of MEM to SIZE. */ void -set_mem_size (rtx mem, HOST_WIDE_INT size) +set_mem_size (rtx mem, poly_int64 size) { - struct mem_attrs attrs; - - attrs = *get_mem_attrs (mem); + mem_attrs attrs (*get_mem_attrs (mem)); attrs.size_known_p = true; attrs.size = size; set_mem_attrs (mem, &attrs); @@ -2225,9 +2225,7 @@ set_mem_size (rtx mem, HOST_WIDE_INT size) void clear_mem_size (rtx mem) { - struct mem_attrs attrs; - - attrs = *get_mem_attrs (mem); + mem_attrs attrs (*get_mem_attrs (mem)); attrs.size_known_p = false; set_mem_attrs (mem, &attrs); } @@ -2290,9 +2288,9 @@ change_address (rtx memref, machine_mode mode, rtx addr) { rtx new_rtx = change_address_1 (memref, mode, addr, 1, false); machine_mode mmode = GET_MODE (new_rtx); - struct mem_attrs attrs, *defattrs; + struct mem_attrs *defattrs; - attrs = *get_mem_attrs (memref); + mem_attrs attrs (*get_mem_attrs (memref)); defattrs = mode_mem_attrs[(int) mmode]; attrs.expr = NULL_TREE; attrs.offset_known_p = false; @@ -2327,15 +2325,14 @@ change_address (rtx memref, machine_mode mode, rtx addr) has no inherent size. */ rtx -adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset, +adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset, int validate, int adjust_address, int adjust_object, - HOST_WIDE_INT size) + poly_int64 size) { rtx addr = XEXP (memref, 0); rtx new_rtx; scalar_int_mode address_mode; - int pbits; - struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs; + struct mem_attrs attrs (*get_mem_attrs (memref)), *defattrs; unsigned HOST_WIDE_INT max_align; #ifdef POINTERS_EXTEND_UNSIGNED scalar_int_mode pointer_mode @@ -2352,8 +2349,10 @@ adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset, size = defattrs->size; /* If there are no changes, just return the original memory reference. */ - if (mode == GET_MODE (memref) && !offset - && (size == 0 || (attrs.size_known_p && attrs.size == size)) + if (mode == GET_MODE (memref) + && known_eq (offset, 0) + && (known_eq (size, 0) + || (attrs.size_known_p && known_eq (attrs.size, size))) && (!validate || memory_address_addr_space_p (mode, addr, attrs.addrspace))) return memref; @@ -2366,22 +2365,17 @@ adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset, /* Convert a possibly large offset to a signed value within the range of the target address space. */ address_mode = get_address_mode (memref); - pbits = GET_MODE_BITSIZE (address_mode); - if (HOST_BITS_PER_WIDE_INT > pbits) - { - int shift = HOST_BITS_PER_WIDE_INT - pbits; - offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift)) - >> shift); - } + offset = trunc_int_for_mode (offset, address_mode); if (adjust_address) { /* If MEMREF is a LO_SUM and the offset is within the alignment of the object, we can merge it into the LO_SUM. */ - if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM - && offset >= 0 - && (unsigned HOST_WIDE_INT) offset - < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT) + if (GET_MODE (memref) != BLKmode + && GET_CODE (addr) == LO_SUM + && known_in_range_p (offset, + 0, (GET_MODE_ALIGNMENT (GET_MODE (memref)) + / BITS_PER_UNIT))) addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0), plus_constant (address_mode, XEXP (addr, 1), offset)); @@ -2392,7 +2386,7 @@ adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset, else if (POINTERS_EXTEND_UNSIGNED > 0 && GET_CODE (addr) == ZERO_EXTEND && GET_MODE (XEXP (addr, 0)) == pointer_mode - && trunc_int_for_mode (offset, pointer_mode) == offset) + && known_eq (trunc_int_for_mode (offset, pointer_mode), offset)) addr = gen_rtx_ZERO_EXTEND (address_mode, plus_constant (pointer_mode, XEXP (addr, 0), offset)); @@ -2405,7 +2399,7 @@ adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset, /* If the address is a REG, change_address_1 rightfully returns memref, but this would destroy memref's MEM_ATTRS. */ - if (new_rtx == memref && offset != 0) + if (new_rtx == memref && maybe_ne (offset, 0)) new_rtx = copy_rtx (new_rtx); /* Conservatively drop the object if we don't know where we start from. */ @@ -2422,7 +2416,7 @@ adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset, attrs.offset += offset; /* Drop the object if the new left end is not within its bounds. */ - if (adjust_object && attrs.offset < 0) + if (adjust_object && maybe_lt (attrs.offset, 0)) { attrs.expr = NULL_TREE; attrs.alias = 0; @@ -2432,16 +2426,16 @@ adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset, /* Compute the new alignment by taking the MIN of the alignment and the lowest-order set bit in OFFSET, but don't change the alignment if OFFSET if zero. */ - if (offset != 0) + if (maybe_ne (offset, 0)) { - max_align = least_bit_hwi (offset) * BITS_PER_UNIT; + max_align = known_alignment (offset) * BITS_PER_UNIT; attrs.align = MIN (attrs.align, max_align); } - if (size) + if (maybe_ne (size, 0)) { /* Drop the object if the new right end is not within its bounds. */ - if (adjust_object && (offset + size) > attrs.size) + if (adjust_object && maybe_gt (offset + size, attrs.size)) { attrs.expr = NULL_TREE; attrs.alias = 0; @@ -2469,7 +2463,7 @@ adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset, rtx adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr, - HOST_WIDE_INT offset, int validate) + poly_int64 offset, int validate) { memref = change_address_1 (memref, VOIDmode, addr, validate, false); return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0); @@ -2484,9 +2478,9 @@ offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2) { rtx new_rtx, addr = XEXP (memref, 0); machine_mode address_mode; - struct mem_attrs attrs, *defattrs; + struct mem_attrs *defattrs; - attrs = *get_mem_attrs (memref); + mem_attrs attrs (*get_mem_attrs (memref)); address_mode = get_address_mode (memref); new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset); @@ -2554,17 +2548,16 @@ replace_equiv_address_nv (rtx memref, rtx addr, bool inplace) operations plus masking logic. */ rtx -widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset) +widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset) { rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0); - struct mem_attrs attrs; unsigned int size = GET_MODE_SIZE (mode); /* If there are no changes, just return the original memory reference. */ if (new_rtx == memref) return new_rtx; - attrs = *get_mem_attrs (new_rtx); + mem_attrs attrs (*get_mem_attrs (new_rtx)); /* If we don't know what offset we were at within the expression, then we can't know if we've overstepped the bounds. */ @@ -2586,28 +2579,30 @@ widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset) /* Is the field at least as large as the access? If so, ok, otherwise strip back to the containing structure. */ - if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST - && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0 - && attrs.offset >= 0) + if (poly_int_tree_p (DECL_SIZE_UNIT (field)) + && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size) + && known_ge (attrs.offset, 0)) break; - if (! tree_fits_uhwi_p (offset)) + poly_uint64 suboffset; + if (!poly_int_tree_p (offset, &suboffset)) { attrs.expr = NULL_TREE; break; } attrs.expr = TREE_OPERAND (attrs.expr, 0); - attrs.offset += tree_to_uhwi (offset); + attrs.offset += suboffset; attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)) / BITS_PER_UNIT); } /* Similarly for the decl. */ else if (DECL_P (attrs.expr) && DECL_SIZE_UNIT (attrs.expr) - && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST - && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0 - && (! attrs.offset_known_p || attrs.offset >= 0)) + && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr)) + && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)), + size) + && known_ge (attrs.offset, 0)) break; else { @@ -2638,7 +2633,6 @@ get_spill_slot_decl (bool force_build_p) { tree d = spill_slot_decl; rtx rd; - struct mem_attrs attrs; if (d || !force_build_p) return d; @@ -2652,7 +2646,7 @@ get_spill_slot_decl (bool force_build_p) rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx); MEM_NOTRAP_P (rd) = 1; - attrs = *mode_mem_attrs[(int) BLKmode]; + mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]); attrs.alias = new_alias_set (); attrs.expr = d; set_mem_attrs (rd, &attrs); @@ -2670,10 +2664,9 @@ get_spill_slot_decl (bool force_build_p) void set_mem_attrs_for_spill (rtx mem) { - struct mem_attrs attrs; rtx addr; - attrs = *get_mem_attrs (mem); + mem_attrs attrs (*get_mem_attrs (mem)); attrs.expr = get_spill_slot_decl (true); attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr)); attrs.addrspace = ADDR_SPACE_GENERIC; @@ -2683,10 +2676,7 @@ set_mem_attrs_for_spill (rtx mem) with perhaps the plus missing for offset = 0. */ addr = XEXP (mem, 0); attrs.offset_known_p = true; - attrs.offset = 0; - if (GET_CODE (addr) == PLUS - && CONST_INT_P (XEXP (addr, 1))) - attrs.offset = INTVAL (XEXP (addr, 1)); + strip_offset (addr, &attrs.offset); set_mem_attrs (mem, &attrs); MEM_NOTRAP_P (mem) = 1; diff --git a/gcc/emit-rtl.h b/gcc/emit-rtl.h index 9deff00d679..3e0192123f5 100644 --- a/gcc/emit-rtl.h +++ b/gcc/emit-rtl.h @@ -333,13 +333,13 @@ extern void set_mem_addr_space (rtx, addr_space_t); extern void set_mem_expr (rtx, tree); /* Set the offset for MEM to OFFSET. */ -extern void set_mem_offset (rtx, HOST_WIDE_INT); +extern void set_mem_offset (rtx, poly_int64); /* Clear the offset recorded for MEM. */ extern void clear_mem_offset (rtx); /* Set the size for MEM to SIZE. */ -extern void set_mem_size (rtx, HOST_WIDE_INT); +extern void set_mem_size (rtx, poly_int64); /* Clear the size recorded for MEM. */ extern void clear_mem_size (rtx); @@ -489,10 +489,10 @@ extern rtx change_address (rtx, machine_mode, rtx); #define adjust_automodify_address_nv(MEMREF, MODE, ADDR, OFFSET) \ adjust_automodify_address_1 (MEMREF, MODE, ADDR, OFFSET, 0) -extern rtx adjust_address_1 (rtx, machine_mode, HOST_WIDE_INT, int, int, - int, HOST_WIDE_INT); +extern rtx adjust_address_1 (rtx, machine_mode, poly_int64, int, int, + int, poly_int64); extern rtx adjust_automodify_address_1 (rtx, machine_mode, rtx, - HOST_WIDE_INT, int); + poly_int64, int); /* Return a memory reference like MEMREF, but whose address is changed by adding OFFSET, an RTX, to it. POW2 is the highest power of two factor @@ -507,7 +507,7 @@ extern void set_mem_attributes (rtx, tree, int); /* Similar, except that BITPOS has not yet been applied to REF, so if we alter MEM_OFFSET according to T then we should subtract BITPOS expecting that it'll be added back in later. */ -extern void set_mem_attributes_minus_bitpos (rtx, tree, int, HOST_WIDE_INT); +extern void set_mem_attributes_minus_bitpos (rtx, tree, int, poly_int64); /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or @@ -516,7 +516,7 @@ extern int get_mem_align_offset (rtx, unsigned int); /* Return a memory reference like MEMREF, but with its mode widened to MODE and adjusted by OFFSET. */ -extern rtx widen_memory_access (rtx, machine_mode, HOST_WIDE_INT); +extern rtx widen_memory_access (rtx, machine_mode, poly_int64); extern void maybe_set_max_label_num (rtx_code_label *x); diff --git a/gcc/print-rtl.c b/gcc/print-rtl.c index 7e0a0a01574..1cf2604a867 100644 --- a/gcc/print-rtl.c +++ b/gcc/print-rtl.c @@ -894,10 +894,16 @@ rtx_writer::print_rtx (const_rtx in_rtx) fputc (' ', m_outfile); if (MEM_OFFSET_KNOWN_P (in_rtx)) - fprintf (m_outfile, "+" HOST_WIDE_INT_PRINT_DEC, MEM_OFFSET (in_rtx)); + { + fprintf (m_outfile, "+"); + print_poly_int (m_outfile, MEM_OFFSET (in_rtx)); + } if (MEM_SIZE_KNOWN_P (in_rtx)) - fprintf (m_outfile, " S" HOST_WIDE_INT_PRINT_DEC, MEM_SIZE (in_rtx)); + { + fprintf (m_outfile, " S"); + print_poly_int (m_outfile, MEM_SIZE (in_rtx)); + } if (MEM_ALIGN (in_rtx) != 1) fprintf (m_outfile, " A%u", MEM_ALIGN (in_rtx)); diff --git a/gcc/read-rtl-function.c b/gcc/read-rtl-function.c index 8ddf0ecaabc..9be88ae0a61 100644 --- a/gcc/read-rtl-function.c +++ b/gcc/read-rtl-function.c @@ -2143,9 +2143,9 @@ test_loading_mem () ASSERT_EQ (42, MEM_ALIAS_SET (mem1)); /* "+17". */ ASSERT_TRUE (MEM_OFFSET_KNOWN_P (mem1)); - ASSERT_EQ (17, MEM_OFFSET (mem1)); + ASSERT_KNOWN_EQ (17, MEM_OFFSET (mem1)); /* "S8". */ - ASSERT_EQ (8, MEM_SIZE (mem1)); + ASSERT_KNOWN_EQ (8, MEM_SIZE (mem1)); /* "A128. */ ASSERT_EQ (128, MEM_ALIGN (mem1)); /* "AS5. */ @@ -2159,9 +2159,9 @@ test_loading_mem () ASSERT_EQ (43, MEM_ALIAS_SET (mem2)); /* "+18". */ ASSERT_TRUE (MEM_OFFSET_KNOWN_P (mem2)); - ASSERT_EQ (18, MEM_OFFSET (mem2)); + ASSERT_KNOWN_EQ (18, MEM_OFFSET (mem2)); /* "S9". */ - ASSERT_EQ (9, MEM_SIZE (mem2)); + ASSERT_KNOWN_EQ (9, MEM_SIZE (mem2)); /* "AS6. */ ASSERT_EQ (6, MEM_ADDR_SPACE (mem2)); } diff --git a/gcc/rtl.h b/gcc/rtl.h index ed944a4d989..408298a8f7a 100644 --- a/gcc/rtl.h +++ b/gcc/rtl.h @@ -147,6 +147,8 @@ struct addr_diff_vec_flags they cannot be modified in place. */ struct GTY(()) mem_attrs { + mem_attrs (); + /* The expression that the MEM accesses, or null if not known. This expression might be larger than the memory reference itself. (In other words, the MEM might access only part of the object.) */ @@ -154,11 +156,11 @@ struct GTY(()) mem_attrs /* The offset of the memory reference from the start of EXPR. Only valid if OFFSET_KNOWN_P. */ - HOST_WIDE_INT offset; + poly_int64 offset; /* The size of the memory reference in bytes. Only valid if SIZE_KNOWN_P. */ - HOST_WIDE_INT size; + poly_int64 size; /* The alias set of the memory reference. */ alias_set_type alias; diff --git a/gcc/rtlanal.c b/gcc/rtlanal.c index 54f94df80da..2116907bb13 100644 --- a/gcc/rtlanal.c +++ b/gcc/rtlanal.c @@ -2796,7 +2796,7 @@ may_trap_p_1 (const_rtx x, unsigned flags) code_changed || !MEM_NOTRAP_P (x)) { - HOST_WIDE_INT size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : -1; + poly_int64 size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : -1; return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size, GET_MODE (x), code_changed); } diff --git a/gcc/simplify-rtx.c b/gcc/simplify-rtx.c index 58cf2c56918..d4b70850c16 100644 --- a/gcc/simplify-rtx.c +++ b/gcc/simplify-rtx.c @@ -289,7 +289,7 @@ delegitimize_mem_from_attrs (rtx x) { tree decl = MEM_EXPR (x); machine_mode mode = GET_MODE (x); - HOST_WIDE_INT offset = 0; + poly_int64 offset = 0; switch (TREE_CODE (decl)) { @@ -346,6 +346,7 @@ delegitimize_mem_from_attrs (rtx x) if (MEM_P (newx)) { rtx n = XEXP (newx, 0), o = XEXP (x, 0); + poly_int64 n_offset, o_offset; /* Avoid creating a new MEM needlessly if we already had the same address. We do if there's no OFFSET and the @@ -353,21 +354,14 @@ delegitimize_mem_from_attrs (rtx x) form (plus NEWX OFFSET), or the NEWX is of the form (plus Y (const_int Z)) and X is that with the offset added: (plus Y (const_int Z+OFFSET)). */ - if (!((offset == 0 - || (GET_CODE (o) == PLUS - && GET_CODE (XEXP (o, 1)) == CONST_INT - && (offset == INTVAL (XEXP (o, 1)) - || (GET_CODE (n) == PLUS - && GET_CODE (XEXP (n, 1)) == CONST_INT - && (INTVAL (XEXP (n, 1)) + offset - == INTVAL (XEXP (o, 1))) - && (n = XEXP (n, 0)))) - && (o = XEXP (o, 0)))) + n = strip_offset (n, &n_offset); + o = strip_offset (o, &o_offset); + if (!(known_eq (o_offset, n_offset + offset) && rtx_equal_p (o, n))) x = adjust_address_nv (newx, mode, offset); } else if (GET_MODE (x) == GET_MODE (newx) - && offset == 0) + && known_eq (offset, 0)) x = newx; } } diff --git a/gcc/var-tracking.c b/gcc/var-tracking.c index 2f68298ac78..1a4caaa09b4 100644 --- a/gcc/var-tracking.c +++ b/gcc/var-tracking.c @@ -395,8 +395,9 @@ struct variable static inline HOST_WIDE_INT int_mem_offset (const_rtx mem) { - if (MEM_OFFSET_KNOWN_P (mem)) - return MEM_OFFSET (mem); + HOST_WIDE_INT offset; + if (MEM_OFFSET_KNOWN_P (mem) && MEM_OFFSET (mem).is_constant (&offset)) + return offset; return 0; } @@ -5256,7 +5257,7 @@ track_expr_p (tree expr, bool need_rtl) && !tracked_record_parameter_p (realdecl)) return 0; if (MEM_SIZE_KNOWN_P (decl_rtl) - && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS) + && maybe_gt (MEM_SIZE (decl_rtl), MAX_VAR_PARTS)) return 0; } -- 2.30.2