/* Common subexpression elimination library for GNU compiler.
- Copyright (C) 1987-2017 Free Software Foundation, Inc.
+ Copyright (C) 1987-2020 Free Software Foundation, Inc.
This file is part of GCC.
#include "emit-rtl.h"
#include "dumpfile.h"
#include "cselib.h"
-#include "params.h"
+#include "function-abi.h"
/* A list of cselib_val structures. */
struct elt_list
if (memmode == VOIDmode)
return x;
- *off = GEN_INT (-GET_MODE_SIZE (memmode));
+ *off = gen_int_mode (-GET_MODE_SIZE (memmode), GET_MODE (x));
return XEXP (x, 0);
case PRE_INC:
if (memmode == VOIDmode)
return x;
- *off = GEN_INT (GET_MODE_SIZE (memmode));
+ *off = gen_int_mode (GET_MODE_SIZE (memmode), GET_MODE (x));
return XEXP (x, 0);
case PRE_MODIFY:
cselib_hash_rtx (rtx x, int create, machine_mode memmode)
{
cselib_val *e;
+ poly_int64 offset;
int i, j;
enum rtx_code code;
const char *fmt;
int units;
rtx elt;
- units = CONST_VECTOR_NUNITS (x);
+ units = const_vector_encoded_nelts (x);
for (i = 0; i < units; ++i)
{
- elt = CONST_VECTOR_ELT (x, i);
+ elt = CONST_VECTOR_ENCODED_ELT (x, i);
hash += cselib_hash_rtx (elt, 0, memmode);
}
case PRE_INC:
/* We can't compute these without knowing the MEM mode. */
gcc_assert (memmode != VOIDmode);
- i = GET_MODE_SIZE (memmode);
+ offset = GET_MODE_SIZE (memmode);
if (code == PRE_DEC)
- i = -i;
+ offset = -offset;
/* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
like (mem:MEMMODE (plus (reg) (const_int I))). */
hash += (unsigned) PLUS - (unsigned)code
+ cselib_hash_rtx (XEXP (x, 0), create, memmode)
- + cselib_hash_rtx (GEN_INT (i), create, memmode);
+ + cselib_hash_rtx (gen_int_mode (offset, GET_MODE (x)),
+ create, memmode);
return hash ? hash : 1 + (unsigned) PLUS;
case PRE_MODIFY:
struct elt_list *l;
rtx copy = x;
int i;
+ poly_int64 offset;
switch (code)
{
case PRE_DEC:
case PRE_INC:
gcc_assert (memmode != VOIDmode);
- i = GET_MODE_SIZE (memmode);
+ offset = GET_MODE_SIZE (memmode);
if (code == PRE_DEC)
- i = -i;
+ offset = -offset;
return cselib_subst_to_values (plus_constant (GET_MODE (x),
- XEXP (x, 0), i),
+ XEXP (x, 0), offset),
memmode);
case PRE_MODIFY:
return ret;
}
+/* Invalidate the value at *L, which is part of REG_VALUES (REGNO). */
+
+static void
+cselib_invalidate_regno_val (unsigned int regno, struct elt_list **l)
+{
+ cselib_val *v = (*l)->elt;
+ if (*l == REG_VALUES (regno))
+ {
+ /* Maintain the invariant that the first entry of
+ REG_VALUES, if present, must be the value used to set
+ the register, or NULL. This is also nice because
+ then we won't push the same regno onto user_regs
+ multiple times. */
+ (*l)->elt = NULL;
+ l = &(*l)->next;
+ }
+ else
+ unchain_one_elt_list (l);
+
+ v = canonical_cselib_val (v);
+
+ bool had_locs = v->locs != NULL;
+ rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
+
+ /* Now, we clear the mapping from value to reg. It must exist, so
+ this code will crash intentionally if it doesn't. */
+ for (elt_loc_list **p = &v->locs; ; p = &(*p)->next)
+ {
+ rtx x = (*p)->loc;
+
+ if (REG_P (x) && REGNO (x) == regno)
+ {
+ unchain_one_elt_loc_list (p);
+ break;
+ }
+ }
+
+ if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
+ {
+ if (setting_insn && DEBUG_INSN_P (setting_insn))
+ n_useless_debug_values++;
+ else
+ n_useless_values++;
+ }
+}
+
/* Invalidate any entries in reg_values that overlap REGNO. This is called
if REGNO is changing. MODE is the mode of the assignment to REGNO, which
is used to determine how many hard registers are being changed. If MODE
while (*l)
{
cselib_val *v = (*l)->elt;
- bool had_locs;
- rtx_insn *setting_insn;
- struct elt_loc_list **p;
unsigned int this_last = i;
if (i < FIRST_PSEUDO_REGISTER && v != NULL)
}
/* We have an overlap. */
- if (*l == REG_VALUES (i))
- {
- /* Maintain the invariant that the first entry of
- REG_VALUES, if present, must be the value used to set
- the register, or NULL. This is also nice because
- then we won't push the same regno onto user_regs
- multiple times. */
- (*l)->elt = NULL;
- l = &(*l)->next;
- }
- else
- unchain_one_elt_list (l);
-
- v = canonical_cselib_val (v);
-
- had_locs = v->locs != NULL;
- setting_insn = v->locs ? v->locs->setting_insn : NULL;
-
- /* Now, we clear the mapping from value to reg. It must exist, so
- this code will crash intentionally if it doesn't. */
- for (p = &v->locs; ; p = &(*p)->next)
- {
- rtx x = (*p)->loc;
-
- if (REG_P (x) && REGNO (x) == i)
- {
- unchain_one_elt_loc_list (p);
- break;
- }
- }
-
- if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
- {
- if (setting_insn && DEBUG_INSN_P (setting_insn))
- n_useless_debug_values++;
- else
- n_useless_values++;
- }
+ cselib_invalidate_regno_val (i, l);
}
}
}
p = &(*p)->next;
continue;
}
- if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
+ if (num_mems < param_max_cselib_memory_locations
&& ! canon_anti_dependence (x, false, mem_rtx,
GET_MODE (mem_rtx), mem_addr))
{
*vp = &dummy_val;
}
-/* Invalidate DEST, which is being assigned to or clobbered. */
+/* Invalidate DEST. */
void
cselib_invalidate_rtx (rtx dest)
/* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
static void
-cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED,
+cselib_invalidate_rtx_note_stores (rtx dest, const_rtx,
void *data ATTRIBUTE_UNUSED)
{
cselib_invalidate_rtx (dest);
int n_sets = 0;
int i;
struct cselib_set sets[MAX_SETS];
- rtx body = PATTERN (insn);
rtx cond = 0;
int n_sets_before_autoinc;
+ int n_strict_low_parts = 0;
struct cselib_record_autoinc_data data;
- body = PATTERN (insn);
+ rtx body = PATTERN (insn);
if (GET_CODE (body) == COND_EXEC)
{
cond = COND_EXEC_TEST (body);
for (i = 0; i < n_sets; i++)
{
rtx dest = sets[i].dest;
+ rtx orig = dest;
/* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
the low part after invalidating any knowledge about larger modes. */
else
sets[i].dest_addr_elt = 0;
}
+
+ /* Improve handling of STRICT_LOW_PART if the current value is known
+ to be const0_rtx, then the low bits will be set to dest and higher
+ bits will remain zero. Used in code like:
+
+ {di:SI=0;clobber flags:CC;}
+ flags:CCNO=cmp(bx:SI,0)
+ strict_low_part(di:QI)=flags:CCNO<=0
+
+ where we can note both that di:QI=flags:CCNO<=0 and
+ also that because di:SI is known to be 0 and strict_low_part(di:QI)
+ preserves the upper bits that di:SI=zero_extend(flags:CCNO<=0). */
+ scalar_int_mode mode;
+ if (dest != orig
+ && cselib_record_sets_hook
+ && REG_P (dest)
+ && HARD_REGISTER_P (dest)
+ && sets[i].src_elt
+ && is_a <scalar_int_mode> (GET_MODE (dest), &mode)
+ && n_sets + n_strict_low_parts < MAX_SETS)
+ {
+ opt_scalar_int_mode wider_mode_iter;
+ FOR_EACH_WIDER_MODE (wider_mode_iter, mode)
+ {
+ scalar_int_mode wider_mode = wider_mode_iter.require ();
+ if (GET_MODE_PRECISION (wider_mode) > BITS_PER_WORD)
+ break;
+
+ rtx reg = gen_lowpart (wider_mode, dest);
+ if (!REG_P (reg))
+ break;
+
+ cselib_val *v = cselib_lookup (reg, wider_mode, 0, VOIDmode);
+ if (!v)
+ continue;
+
+ struct elt_loc_list *l;
+ for (l = v->locs; l; l = l->next)
+ if (l->loc == const0_rtx)
+ break;
+
+ if (!l)
+ continue;
+
+ sets[n_sets + n_strict_low_parts].dest = reg;
+ sets[n_sets + n_strict_low_parts].src = dest;
+ sets[n_sets + n_strict_low_parts++].src_elt = sets[i].src_elt;
+ break;
+ }
+ }
}
if (cselib_record_sets_hook)
/* Invalidate all locations written by this insn. Note that the elts we
looked up in the previous loop aren't affected, just some of their
locations may go away. */
- note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
+ note_pattern_stores (body, cselib_invalidate_rtx_note_stores, NULL);
for (i = n_sets_before_autoinc; i < n_sets; i++)
cselib_invalidate_rtx (sets[i].dest);
|| (MEM_P (dest) && cselib_record_memory))
cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
}
+
+ /* And deal with STRICT_LOW_PART. */
+ for (i = 0; i < n_strict_low_parts; i++)
+ {
+ if (! PRESERVED_VALUE_P (sets[n_sets + i].src_elt->val_rtx))
+ continue;
+ machine_mode dest_mode = GET_MODE (sets[n_sets + i].dest);
+ cselib_val *v
+ = cselib_lookup (sets[n_sets + i].dest, dest_mode, 1, VOIDmode);
+ cselib_preserve_value (v);
+ rtx r = gen_rtx_ZERO_EXTEND (dest_mode,
+ sets[n_sets + i].src_elt->val_rtx);
+ cselib_add_permanent_equiv (v, r, insn);
+ }
}
/* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
return true;
}
+/* V is one of the values in REG_VALUES (REGNO). Return true if it
+ would be invalidated by CALLEE_ABI. */
+
+static bool
+cselib_invalidated_by_call_p (const function_abi &callee_abi,
+ unsigned int regno, cselib_val *v)
+{
+ machine_mode mode = GET_MODE (v->val_rtx);
+ if (mode == VOIDmode)
+ {
+ v = REG_VALUES (regno)->elt;
+ if (!v)
+ /* If we don't know what the mode of the constant value is, and we
+ don't know what mode the register was set in, conservatively
+ assume that the register is clobbered. The value's going to be
+ essentially useless in this case anyway. */
+ return true;
+ mode = GET_MODE (v->val_rtx);
+ }
+ return callee_abi.clobbers_reg_p (mode, regno);
+}
+
/* Record the effects of INSN. */
void
memory. */
if (CALL_P (insn))
{
+ function_abi callee_abi = insn_callee_abi (insn);
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (call_used_regs[i]
- || (REG_VALUES (i) && REG_VALUES (i)->elt
- && (targetm.hard_regno_call_part_clobbered
- (i, GET_MODE (REG_VALUES (i)->elt->val_rtx)))))
- cselib_invalidate_regno (i, reg_raw_mode[i]);
+ {
+ elt_list **l = ®_VALUES (i);
+ while (*l)
+ {
+ cselib_val *v = (*l)->elt;
+ if (v && cselib_invalidated_by_call_p (callee_abi, i, v))
+ cselib_invalidate_regno_val (i, l);
+ else
+ l = &(*l)->next;
+ }
+ }
/* Since it is not clear how cselib is going to be used, be
conservative here and treat looping pure or const functions
for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
if (GET_CODE (XEXP (x, 0)) == CLOBBER)
cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
- /* Flush evertything on setjmp. */
+
+ /* Flush everything on setjmp. */
if (cselib_preserve_constants
&& find_reg_note (insn, REG_SETJMP, NULL))
{
}
used_regs = XNEWVEC (unsigned int, cselib_nregs);
n_used_regs = 0;
- cselib_hash_table = new hash_table<cselib_hasher> (31);
+ /* FIXME: enable sanitization (PR87845) */
+ cselib_hash_table
+ = new hash_table<cselib_hasher> (31, /* ggc */ false,
+ /* sanitize_eq_and_hash */ false);
if (cselib_preserve_constants)
- cselib_preserved_hash_table = new hash_table<cselib_hasher> (31);
+ cselib_preserved_hash_table
+ = new hash_table<cselib_hasher> (31, /* ggc */ false,
+ /* sanitize_eq_and_hash */ false);
next_uid = 1;
}