static void cselib_invalidate_mem (rtx);
static void cselib_record_set (rtx, cselib_val *, cselib_val *);
static void cselib_record_sets (rtx_insn *);
+static rtx autoinc_split (rtx, rtx *, machine_mode);
+
+#define PRESERVED_VALUE_P(RTX) \
+ (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
+
+#define SP_BASED_VALUE_P(RTX) \
+ (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
+
+#define SP_DERIVED_VALUE_P(RTX) \
+ (RTL_FLAG_CHECK1 ("SP_DERIVED_VALUE_P", (RTX), VALUE)->call)
struct expand_value_data
{
if (GET_CODE (x) == VALUE)
return x == v->val_rtx;
+ if (SP_DERIVED_VALUE_P (v->val_rtx) && GET_MODE (x) == Pmode)
+ {
+ rtx xoff = NULL;
+ if (autoinc_split (x, &xoff, memmode) == v->val_rtx && xoff == NULL_RTX)
+ return true;
+ }
+
/* We don't guarantee that distinct rtx's have different hash values,
so we need to do a comparison. */
for (l = v->locs; l; l = l->next)
void (*cselib_record_sets_hook) (rtx_insn *insn, struct cselib_set *sets,
int n_sets);
-#define PRESERVED_VALUE_P(RTX) \
- (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
-
-#define SP_BASED_VALUE_P(RTX) \
- (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
-
\f
/* Allocate a struct elt_list and fill in its two elements with the
};
cselib_val **slot
= cselib_preserved_hash_table->find_slot_with_hash (&lookup,
- v->hash, INSERT);
+ v->hash, INSERT);
gcc_assert (!*slot);
*slot = v;
}
max_value_regs
= hard_regno_nregs (regno,
GET_MODE (cfa_base_preserved_val->locs->loc));
+
+ /* If cfa_base is sp + const_int, need to preserve also the
+ SP_DERIVED_VALUE_P value. */
+ for (struct elt_loc_list *l = cfa_base_preserved_val->locs;
+ l; l = l->next)
+ if (GET_CODE (l->loc) == PLUS
+ && GET_CODE (XEXP (l->loc, 0)) == VALUE
+ && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
+ && CONST_INT_P (XEXP (l->loc, 1)))
+ {
+ if (! invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (l->loc, 0))))
+ {
+ rtx val = cfa_base_preserved_val->val_rtx;
+ rtx_insn *save_cselib_current_insn = cselib_current_insn;
+ cselib_current_insn = l->setting_insn;
+ new_elt_loc_list (CSELIB_VAL_PTR (XEXP (l->loc, 0)),
+ plus_constant (Pmode, val,
+ -UINTVAL (XEXP (l->loc, 1))));
+ cselib_current_insn = save_cselib_current_insn;
+ }
+ break;
+ }
}
else
{
}
if (cselib_preserve_constants)
- cselib_hash_table->traverse <void *, preserve_constants_and_equivs>
- (NULL);
+ cselib_hash_table->traverse <void *, preserve_constants_and_equivs> (NULL);
else
{
cselib_hash_table->empty ();
{
case PLUS:
*off = XEXP (x, 1);
- return XEXP (x, 0);
+ x = XEXP (x, 0);
+ break;
case PRE_DEC:
if (memmode == VOIDmode)
return x;
*off = gen_int_mode (-GET_MODE_SIZE (memmode), GET_MODE (x));
- return XEXP (x, 0);
+ x = XEXP (x, 0);
+ break;
case PRE_INC:
if (memmode == VOIDmode)
return x;
*off = gen_int_mode (GET_MODE_SIZE (memmode), GET_MODE (x));
- return XEXP (x, 0);
+ x = XEXP (x, 0);
+ break;
case PRE_MODIFY:
- return XEXP (x, 1);
+ x = XEXP (x, 1);
+ break;
case POST_DEC:
case POST_INC:
case POST_MODIFY:
- return XEXP (x, 0);
+ x = XEXP (x, 0);
+ break;
default:
- return x;
+ break;
+ }
+
+ if (GET_MODE (x) == Pmode
+ && (REG_P (x) || MEM_P (x) || GET_CODE (x) == VALUE)
+ && (*off == NULL_RTX || CONST_INT_P (*off)))
+ {
+ cselib_val *e;
+ if (GET_CODE (x) == VALUE)
+ e = CSELIB_VAL_PTR (x);
+ else
+ e = cselib_lookup (x, GET_MODE (x), 0, memmode);
+ if (e)
+ for (struct elt_loc_list *l = e->locs; l; l = l->next)
+ if (GET_CODE (l->loc) == PLUS
+ && GET_CODE (XEXP (l->loc, 0)) == VALUE
+ && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
+ && CONST_INT_P (XEXP (l->loc, 1)))
+ {
+ if (*off == NULL_RTX)
+ *off = XEXP (l->loc, 1);
+ else
+ *off = plus_constant (Pmode, *off,
+ INTVAL (XEXP (l->loc, 1)));
+ if (*off == const0_rtx)
+ *off = NULL_RTX;
+ return XEXP (l->loc, 0);
+ }
}
+ return x;
}
/* Return nonzero if we can prove that X and Y contain the same value,
if (GET_CODE (y) == VALUE)
return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
+ if ((SP_DERIVED_VALUE_P (x)
+ || SP_DERIVED_VALUE_P (e->val_rtx))
+ && GET_MODE (y) == Pmode)
+ {
+ rtx yoff = NULL;
+ rtx yr = autoinc_split (y, &yoff, memmode);
+ if ((yr == x || yr == e->val_rtx) && yoff == NULL_RTX)
+ return 1;
+ }
+
if (depth == 128)
return 0;
cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
struct elt_loc_list *l;
+ if ((SP_DERIVED_VALUE_P (y)
+ || SP_DERIVED_VALUE_P (e->val_rtx))
+ && GET_MODE (x) == Pmode)
+ {
+ rtx xoff = NULL;
+ rtx xr = autoinc_split (x, &xoff, memmode);
+ if ((xr == y || xr == e->val_rtx) && xoff == NULL_RTX)
+ return 1;
+ }
+
if (depth == 128)
return 0;
if (GET_MODE (x) != GET_MODE (y))
return 0;
- if (GET_CODE (x) != GET_CODE (y))
+ if (GET_CODE (x) != GET_CODE (y)
+ || (GET_CODE (x) == PLUS
+ && GET_MODE (x) == Pmode
+ && CONST_INT_P (XEXP (x, 1))
+ && CONST_INT_P (XEXP (y, 1))))
{
rtx xorig = x, yorig = y;
rtx xoff = NULL, yoff = NULL;
x = autoinc_split (x, &xoff, memmode);
y = autoinc_split (y, &yoff, memmode);
- if (!xoff != !yoff)
- return 0;
-
- if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode, depth))
- return 0;
-
/* Don't recurse if nothing changed. */
if (x != xorig || y != yorig)
- return rtx_equal_for_cselib_1 (x, y, memmode, depth);
+ {
+ if (!xoff != !yoff)
+ return 0;
- return 0;
+ if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode, depth))
+ return 0;
+
+ return rtx_equal_for_cselib_1 (x, y, memmode, depth);
+ }
+
+ if (GET_CODE (xorig) != GET_CODE (yorig))
+ return 0;
}
/* These won't be handled correctly by the code below. */
return 1;
}
+/* Helper function for cselib_hash_rtx. Arguments like for cselib_hash_rtx,
+ except that it hashes (plus:P x c). */
+
+static unsigned int
+cselib_hash_plus_const_int (rtx x, HOST_WIDE_INT c, int create,
+ machine_mode memmode)
+{
+ cselib_val *e = cselib_lookup (x, GET_MODE (x), create, memmode);
+ if (! e)
+ return 0;
+
+ if (! SP_DERIVED_VALUE_P (e->val_rtx))
+ for (struct elt_loc_list *l = e->locs; l; l = l->next)
+ if (GET_CODE (l->loc) == PLUS
+ && GET_CODE (XEXP (l->loc, 0)) == VALUE
+ && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
+ && CONST_INT_P (XEXP (l->loc, 1)))
+ {
+ e = CSELIB_VAL_PTR (XEXP (l->loc, 0));
+ c = trunc_int_for_mode (c + UINTVAL (XEXP (l->loc, 1)), Pmode);
+ break;
+ }
+ if (c == 0)
+ return e->hash;
+
+ unsigned hash = (unsigned) PLUS + (unsigned) GET_MODE (x);
+ hash += e->hash;
+ unsigned int tem_hash = (unsigned) CONST_INT + (unsigned) VOIDmode;
+ tem_hash += ((unsigned) CONST_INT << 7) + (unsigned HOST_WIDE_INT) c;
+ if (tem_hash == 0)
+ tem_hash = (unsigned int) CONST_INT;
+ hash += tem_hash;
+ return hash ? hash : 1 + (unsigned int) PLUS;
+}
+
/* Hash an rtx. Return 0 if we couldn't hash the rtx.
For registers and memory locations, we look up their cselib_val structure
and return its VALUE element.
offset = -offset;
/* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
like (mem:MEMMODE (plus (reg) (const_int I))). */
- hash += (unsigned) PLUS - (unsigned)code
- + cselib_hash_rtx (XEXP (x, 0), create, memmode)
- + cselib_hash_rtx (gen_int_mode (offset, GET_MODE (x)),
- create, memmode);
+ if (GET_MODE (x) == Pmode
+ && (REG_P (XEXP (x, 0))
+ || MEM_P (XEXP (x, 0))
+ || GET_CODE (XEXP (x, 0)) == VALUE))
+ {
+ HOST_WIDE_INT c;
+ if (offset.is_constant (&c))
+ return cselib_hash_plus_const_int (XEXP (x, 0),
+ trunc_int_for_mode (c, Pmode),
+ create, memmode);
+ }
+ hash = ((unsigned) PLUS + (unsigned) GET_MODE (x)
+ + cselib_hash_rtx (XEXP (x, 0), create, memmode)
+ + cselib_hash_rtx (gen_int_mode (offset, GET_MODE (x)),
+ create, memmode));
return hash ? hash : 1 + (unsigned) PLUS;
case PRE_MODIFY:
break;
+ case PLUS:
+ if (GET_MODE (x) == Pmode
+ && (REG_P (XEXP (x, 0))
+ || MEM_P (XEXP (x, 0))
+ || GET_CODE (XEXP (x, 0)) == VALUE)
+ && CONST_INT_P (XEXP (x, 1)))
+ return cselib_hash_plus_const_int (XEXP (x, 0), INTVAL (XEXP (x, 1)),
+ create, memmode);
+ break;
+
default:
break;
}
gcc_assert (memmode != VOIDmode);
return cselib_subst_to_values (XEXP (x, 0), memmode);
+ case PLUS:
+ if (GET_MODE (x) == Pmode && CONST_INT_P (XEXP (x, 1)))
+ {
+ rtx t = cselib_subst_to_values (XEXP (x, 0), memmode);
+ if (GET_CODE (t) == VALUE)
+ for (struct elt_loc_list *l = CSELIB_VAL_PTR (t)->locs;
+ l; l = l->next)
+ if (GET_CODE (l->loc) == PLUS
+ && GET_CODE (XEXP (l->loc, 0)) == VALUE
+ && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
+ && CONST_INT_P (XEXP (l->loc, 1)))
+ return plus_constant (Pmode, l->loc, INTVAL (XEXP (x, 1)));
+ if (t != XEXP (x, 0))
+ {
+ copy = shallow_copy_rtx (x);
+ XEXP (copy, 0) = t;
+ }
+ return copy;
+ }
+
default:
break;
}
}
e = new_cselib_val (next_uid, GET_MODE (x), x);
+ if (GET_MODE (x) == Pmode && x == stack_pointer_rtx)
+ SP_DERIVED_VALUE_P (e->val_rtx) = 1;
new_elt_loc_list (e, x);
scalar_int_mode int_mode;
the hash table is inconsistent until we do so, and
cselib_subst_to_values will need to do lookups. */
*slot = e;
- new_elt_loc_list (e, cselib_subst_to_values (x, memmode));
+ rtx v = cselib_subst_to_values (x, memmode);
+
+ /* If cselib_preserve_constants, we might get a SP_DERIVED_VALUE_P
+ VALUE that isn't in the hash tables anymore. */
+ if (GET_CODE (v) == VALUE && SP_DERIVED_VALUE_P (v) && PRESERVED_VALUE_P (v))
+ PRESERVED_VALUE_P (e->val_rtx) = 1;
+
+ new_elt_loc_list (e, v);
return e;
}
if (note)
return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
insn_info) == 0;
+
+ /* Punt on stack pushes, those don't have REG_INC notes and we are
+ unprepared to deal with distribution of REG_ARGS_SIZE notes etc. */
+ subrtx_iterator::array_type array;
+ FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
+ {
+ const_rtx x = *iter;
+ if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
+ return false;
+ }
+
return true;
}
if (note)
return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
&insn_info) == 0;
+
+ /* Punt on stack pushes, those don't have REG_INC notes and we are
+ unprepared to deal with distribution of REG_ARGS_SIZE notes etc. */
+ subrtx_iterator::array_type array;
+ FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
+ {
+ const_rtx x = *iter;
+ if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
+ return false;
+ }
+
return true;
}
point. This does occasionally happen, see PR 37922. */
bitmap regs_set = BITMAP_ALLOC (®_obstack);
- for (this_insn = insns; this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
- note_stores (this_insn, look_for_hardregs, regs_set);
+ for (this_insn = insns;
+ this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
+ {
+ if (insn_invalid_p (this_insn, false))
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, " -- replacing the loaded MEM with ");
+ print_simple_rtl (dump_file, read_reg);
+ fprintf (dump_file, " led to an invalid instruction\n");
+ }
+ BITMAP_FREE (regs_set);
+ return false;
+ }
+ note_stores (this_insn, look_for_hardregs, regs_set);
+ }
bitmap_and_into (regs_set, regs_live);
if (!bitmap_empty_p (regs_set))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
- fprintf (dump_file,
- "abandoning replacement because sequence clobbers live hardregs:");
+ fprintf (dump_file, "abandoning replacement because sequence "
+ "clobbers live hardregs:");
df_print_regset (dump_file, regs_set);
}
BITMAP_FREE (regs_set);
}
+ subrtx_iterator::array_type array;
+ FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
+ {
+ const_rtx x = *iter;
+ if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, " -- replacing the MEM failed due to address "
+ "side-effects\n");
+ return false;
+ }
+ }
+
if (validate_change (read_insn->insn, loc, read_reg, 0))
{
deferred_change *change = deferred_change_pool.allocate ();