/* Common subexpression elimination library for GNU compiler.
- Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
- 2012 Free Software Foundation, Inc.
+ Copyright (C) 1987-2014 Free Software Foundation, Inc.
This file is part of GCC.
#include "tm.h"
#include "rtl.h"
+#include "tree.h"/* FIXME: For hashing DEBUG_EXPR & friends. */
#include "tm_p.h"
#include "regs.h"
#include "hard-reg-set.h"
#include "function.h"
#include "emit-rtl.h"
#include "diagnostic-core.h"
-#include "output.h"
#include "ggc.h"
-#include "hashtab.h"
-#include "tree-pass.h"
+#include "hash-table.h"
+#include "dumpfile.h"
#include "cselib.h"
+#include "valtrack.h"
#include "params.h"
#include "alloc-pool.h"
#include "target.h"
cselib_val *elt;
};
+/* See the documentation of cselib_find_slot below. */
+static enum machine_mode find_slot_memmode;
+
static bool cselib_record_memory;
static bool cselib_preserve_constants;
-static int entry_and_rtx_equal_p (const void *, const void *);
-static hashval_t get_value_hash (const void *);
+static bool cselib_any_perm_equivs;
+static inline void promote_debug_loc (struct elt_loc_list *l);
static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
static void new_elt_loc_list (cselib_val *, rtx);
static void unchain_one_value (cselib_val *);
static void unchain_one_elt_list (struct elt_list **);
static void unchain_one_elt_loc_list (struct elt_loc_list **);
-static int discard_useless_locs (void **, void *);
-static int discard_useless_values (void **, void *);
static void remove_useless_values (void);
static int rtx_equal_for_cselib_1 (rtx, rtx, enum machine_mode);
static unsigned int cselib_hash_rtx (rtx, int, enum machine_mode);
this involves walking the table entries for a given value and comparing
the locations of the entries with the rtx we are looking up. */
+struct cselib_hasher : typed_noop_remove <cselib_val>
+{
+ typedef cselib_val value_type;
+ typedef rtx_def compare_type;
+ static inline hashval_t hash (const value_type *);
+ static inline bool equal (const value_type *, const compare_type *);
+};
+
+/* The hash function for our hash table. The value is always computed with
+ cselib_hash_rtx when adding an element; this function just extracts the
+ hash value from a cselib_val structure. */
+
+inline hashval_t
+cselib_hasher::hash (const value_type *v)
+{
+ return v->hash;
+}
+
+/* The equality test for our hash table. The first argument V is a table
+ element (i.e. a cselib_val), while the second arg X is an rtx. We know
+ that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
+ CONST of an appropriate mode. */
+
+inline bool
+cselib_hasher::equal (const value_type *v, const compare_type *x_arg)
+{
+ struct elt_loc_list *l;
+ rtx x = CONST_CAST_RTX (x_arg);
+ enum machine_mode mode = GET_MODE (x);
+
+ gcc_assert (!CONST_SCALAR_INT_P (x) && GET_CODE (x) != CONST_FIXED);
+
+ if (mode != GET_MODE (v->val_rtx))
+ return false;
+
+ /* Unwrap X if necessary. */
+ if (GET_CODE (x) == CONST
+ && (CONST_SCALAR_INT_P (XEXP (x, 0))
+ || GET_CODE (XEXP (x, 0)) == CONST_FIXED))
+ x = XEXP (x, 0);
+
+ if (GET_CODE (x) == VALUE)
+ return x == v->val_rtx;
+
+ /* We don't guarantee that distinct rtx's have different hash values,
+ so we need to do a comparison. */
+ for (l = v->locs; l; l = l->next)
+ if (rtx_equal_for_cselib_1 (l->loc, x, find_slot_memmode))
+ {
+ promote_debug_loc (l);
+ return true;
+ }
+
+ return false;
+}
+
/* A table that enables us to look up elts by their value. */
-static htab_t cselib_hash_table;
+static hash_table <cselib_hasher> cselib_hash_table;
+
+/* A table to hold preserved values. */
+static hash_table <cselib_hasher> cselib_preserved_hash_table;
/* This is a global so we don't have to pass this through every function.
It is used in new_elt_loc_list to set SETTING_INSN. */
int n_sets);
#define PRESERVED_VALUE_P(RTX) \
- (RTL_FLAG_CHECK1("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
+ (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
+
+#define SP_BASED_VALUE_P(RTX) \
+ (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
\f
next = val->locs = CSELIB_VAL_PTR (loc)->locs;
}
+ if (CSELIB_VAL_PTR (loc)->addr_list)
+ {
+ /* Bring in addr_list into canonical node. */
+ struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
+ while (last->next)
+ last = last->next;
+ last->next = val->addr_list;
+ val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
+ CSELIB_VAL_PTR (loc)->addr_list = NULL;
+ }
+
+ if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
+ && val->next_containing_mem == NULL)
+ {
+ /* Add VAL to the containing_mem list after LOC. LOC will
+ be removed when we notice it doesn't contain any
+ MEMs. */
+ val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
+ CSELIB_VAL_PTR (loc)->next_containing_mem = val;
+ }
+
/* Chain LOC back to VAL. */
el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
el->loc = val->val_rtx;
static inline void
promote_debug_loc (struct elt_loc_list *l)
{
- if (l->setting_insn && DEBUG_INSN_P (l->setting_insn)
+ if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)
&& (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
{
n_debug_values--;
cselib_reset_table (1);
}
-/* Remove from hash table all VALUEs except constants
- and function invariants. */
+/* Return TRUE if V is a constant, a function invariant or a VALUE
+ equivalence; FALSE otherwise. */
-static int
-preserve_only_constants (void **x, void *info ATTRIBUTE_UNUSED)
+static bool
+invariant_or_equiv_p (cselib_val *v)
{
- cselib_val *v = (cselib_val *)*x;
struct elt_loc_list *l;
+ if (v == cfa_base_preserved_val)
+ return true;
+
+ /* Keep VALUE equivalences around. */
+ for (l = v->locs; l; l = l->next)
+ if (GET_CODE (l->loc) == VALUE)
+ return true;
+
if (v->locs != NULL
&& v->locs->next == NULL)
{
if (CONSTANT_P (v->locs->loc)
&& (GET_CODE (v->locs->loc) != CONST
|| !references_value_p (v->locs->loc, 0)))
- return 1;
+ return true;
/* Although a debug expr may be bound to different expressions,
we can preserve it as if it was constant, to get unification
and proper merging within var-tracking. */
|| GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
|| GET_CODE (v->locs->loc) == ENTRY_VALUE
|| GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
- return 1;
- if (cfa_base_preserved_val)
- {
- if (v == cfa_base_preserved_val)
- return 1;
- if (GET_CODE (v->locs->loc) == PLUS
- && CONST_INT_P (XEXP (v->locs->loc, 1))
- && XEXP (v->locs->loc, 0) == cfa_base_preserved_val->val_rtx)
- return 1;
- }
+ return true;
+
+ /* (plus (value V) (const_int C)) is invariant iff V is invariant. */
+ if (GET_CODE (v->locs->loc) == PLUS
+ && CONST_INT_P (XEXP (v->locs->loc, 1))
+ && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
+ && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
+ return true;
}
- /* Keep VALUE equivalences around. */
- for (l = v->locs; l; l = l->next)
- if (GET_CODE (l->loc) == VALUE)
- return 1;
+ return false;
+}
+
+/* Remove from hash table all VALUEs except constants, function
+ invariants and VALUE equivalences. */
+
+int
+preserve_constants_and_equivs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
+{
+ cselib_val *v = *x;
+
+ if (invariant_or_equiv_p (v))
+ {
+ cselib_val **slot
+ = cselib_preserved_hash_table.find_slot_with_hash (v->val_rtx,
+ v->hash, INSERT);
+ gcc_assert (!*slot);
+ *slot = v;
+ }
+
+ cselib_hash_table.clear_slot (x);
- htab_clear_slot (cselib_hash_table, x);
return 1;
}
}
if (cselib_preserve_constants)
- htab_traverse (cselib_hash_table, preserve_only_constants, NULL);
+ cselib_hash_table.traverse <void *, preserve_constants_and_equivs> (NULL);
else
- htab_empty (cselib_hash_table);
+ {
+ cselib_hash_table.empty ();
+ gcc_checking_assert (!cselib_any_perm_equivs);
+ }
n_useless_values = 0;
n_useless_debug_values = 0;
return next_uid;
}
-/* See the documentation of cselib_find_slot below. */
-static enum machine_mode find_slot_memmode;
-
/* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
INSERTing if requested. When X is part of the address of a MEM,
MEMMODE should specify the mode of the MEM. While searching the
table, MEMMODE is held in FIND_SLOT_MEMMODE, so that autoinc RTXs
in X can be resolved. */
-static void **
+static cselib_val **
cselib_find_slot (rtx x, hashval_t hash, enum insert_option insert,
enum machine_mode memmode)
{
- void **slot;
+ cselib_val **slot = NULL;
find_slot_memmode = memmode;
- slot = htab_find_slot_with_hash (cselib_hash_table, x, hash, insert);
+ if (cselib_preserve_constants)
+ slot = cselib_preserved_hash_table.find_slot_with_hash (x, hash,
+ NO_INSERT);
+ if (!slot)
+ slot = cselib_hash_table.find_slot_with_hash (x, hash, insert);
find_slot_memmode = VOIDmode;
return slot;
}
-/* The equality test for our hash table. The first argument ENTRY is a table
- element (i.e. a cselib_val), while the second arg X is an rtx. We know
- that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
- CONST of an appropriate mode. */
-
-static int
-entry_and_rtx_equal_p (const void *entry, const void *x_arg)
-{
- struct elt_loc_list *l;
- const cselib_val *const v = (const cselib_val *) entry;
- rtx x = CONST_CAST_RTX ((const_rtx)x_arg);
- enum machine_mode mode = GET_MODE (x);
-
- gcc_assert (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
- && (mode != VOIDmode || GET_CODE (x) != CONST_DOUBLE));
-
- if (mode != GET_MODE (v->val_rtx))
- return 0;
-
- /* Unwrap X if necessary. */
- if (GET_CODE (x) == CONST
- && (CONST_INT_P (XEXP (x, 0))
- || GET_CODE (XEXP (x, 0)) == CONST_FIXED
- || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
- x = XEXP (x, 0);
-
- /* We don't guarantee that distinct rtx's have different hash values,
- so we need to do a comparison. */
- for (l = v->locs; l; l = l->next)
- if (rtx_equal_for_cselib_1 (l->loc, x, find_slot_memmode))
- {
- promote_debug_loc (l);
- return 1;
- }
-
- return 0;
-}
-
-/* The hash function for our hash table. The value is always computed with
- cselib_hash_rtx when adding an element; this function just extracts the
- hash value from a cselib_val structure. */
-
-static hashval_t
-get_value_hash (const void *entry)
-{
- const cselib_val *const v = (const cselib_val *) entry;
- return v->hash;
-}
-
/* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
only return true for values which point to a cselib_val whose value
element has been set to zero, which implies the cselib_val will be
values (i.e. values without any location). Called through
htab_traverse. */
-static int
-discard_useless_locs (void **x, void *info ATTRIBUTE_UNUSED)
+int
+discard_useless_locs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
{
- cselib_val *v = (cselib_val *)*x;
+ cselib_val *v = *x;
struct elt_loc_list **p = &v->locs;
bool had_locs = v->locs != NULL;
rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
/* If X is a value with no locations, remove it from the hashtable. */
-static int
-discard_useless_values (void **x, void *info ATTRIBUTE_UNUSED)
+int
+discard_useless_values (cselib_val **x, void *info ATTRIBUTE_UNUSED)
{
- cselib_val *v = (cselib_val *)*x;
+ cselib_val *v = *x;
if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
{
cselib_discard_hook (v);
CSELIB_VAL_PTR (v->val_rtx) = NULL;
- htab_clear_slot (cselib_hash_table, x);
+ cselib_hash_table.clear_slot (x);
unchain_one_value (v);
n_useless_values--;
}
do
{
values_became_useless = 0;
- htab_traverse (cselib_hash_table, discard_useless_locs, 0);
+ cselib_hash_table.traverse <void *, discard_useless_locs> (NULL);
}
while (values_became_useless);
p = &first_containing_mem;
for (v = *p; v != &dummy_val; v = v->next_containing_mem)
- if (v->locs)
+ if (v->locs && v == canonical_cselib_val (v))
{
*p = v;
p = &(*p)->next_containing_mem;
n_debug_values -= n_useless_debug_values;
n_useless_debug_values = 0;
- htab_traverse (cselib_hash_table, discard_useless_values, 0);
+ cselib_hash_table.traverse <void *, discard_useless_values> (NULL);
gcc_assert (!n_useless_values);
}
gcc_assert (first_containing_mem == &dummy_val);
}
+/* Arrange for a value to be marked as based on stack pointer
+ for find_base_term purposes. */
+
+void
+cselib_set_value_sp_based (cselib_val *v)
+{
+ SP_BASED_VALUE_P (v->val_rtx) = 1;
+}
+
+/* Test whether a value is based on stack pointer for
+ find_base_term purposes. */
+
+bool
+cselib_sp_based_value_p (cselib_val *v)
+{
+ return SP_BASED_VALUE_P (v->val_rtx);
+}
+
/* Return the mode in which a register was last set. If X is not a
register, return its mode. If the mode in which the register was
set is not known, or the value was already clobbered, return
static rtx
wrap_constant (enum machine_mode mode, rtx x)
{
- if (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
- && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
+ if (!CONST_SCALAR_INT_P (x) && GET_CODE (x) != CONST_FIXED)
return x;
gcc_assert (mode != VOIDmode);
return gen_rtx_CONST (mode, x);
switch (code)
{
+ case VALUE:
+ e = CSELIB_VAL_PTR (x);
+ return e->hash;
+
case MEM:
case REG:
e = cselib_lookup (x, GET_MODE (x), create, memmode);
{
struct elt_loc_list *l;
+ addr_elt = canonical_cselib_val (addr_elt);
mem_elt = canonical_cselib_val (mem_elt);
/* Avoid duplicates. */
{
enum machine_mode mode = GET_MODE (x);
enum machine_mode addr_mode;
- void **slot;
+ cselib_val **slot;
cselib_val *addr;
cselib_val *mem_elt;
struct elt_list *l;
if (! addr)
return 0;
+ addr = canonical_cselib_val (addr);
/* Find a value that describes a value of our mode at that address. */
for (l = addr->addr_list; l; l = l->next)
if (GET_MODE (l->elt->val_rtx) == mode)
return mem_elt;
}
-/* Search thru the possible substitutions in P. We prefer a non reg
+/* Search through the possible substitutions in P. We prefer a non reg
substitution because this allows us to expand the tree further. If
we find, just a reg, take the lowest regno. There may be several
non-reg results, we just take the first one because they will all
unsigned int regno = UINT_MAX;
struct elt_loc_list *p_in = p;
- for (; p; p = p -> next)
+ for (; p; p = p->next)
{
+ /* Return these right away to avoid returning stack pointer based
+ expressions for frame pointer and vice versa, which is something
+ that would confuse DSE. See the comment in cselib_expand_value_rtx_1
+ for more details. */
+ if (REG_P (p->loc)
+ && (REGNO (p->loc) == STACK_POINTER_REGNUM
+ || REGNO (p->loc) == FRAME_POINTER_REGNUM
+ || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
+ || REGNO (p->loc) == cfa_base_preserved_regno))
+ return p->loc;
/* Avoid infinite recursion trying to expand a reg into a
the same reg. */
if ((REG_P (p->loc))
}
}
- case CONST_INT:
- case CONST_DOUBLE:
- case CONST_VECTOR:
+ CASE_CONST_ANY:
case SYMBOL_REF:
case CODE_LABEL:
case PC:
break;
return e->val_rtx;
- case CONST_DOUBLE:
- case CONST_VECTOR:
- case CONST_INT:
- case CONST_FIXED:
+ CASE_CONST_ANY:
return x;
case PRE_DEC:
i = GET_MODE_SIZE (memmode);
if (code == PRE_DEC)
i = -i;
- return cselib_subst_to_values (plus_constant (XEXP (x, 0), i),
+ return cselib_subst_to_values (plus_constant (GET_MODE (x),
+ XEXP (x, 0), i),
memmode);
case PRE_MODIFY:
return copy;
}
+/* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */
+
+rtx
+cselib_subst_to_values_from_insn (rtx x, enum machine_mode memmode, rtx insn)
+{
+ rtx ret;
+ gcc_assert (!cselib_current_insn);
+ cselib_current_insn = insn;
+ ret = cselib_subst_to_values (x, memmode);
+ cselib_current_insn = NULL;
+ return ret;
+}
+
/* Look up the rtl expression X in our tables and return the value it
has. If CREATE is zero, we return NULL if we don't know the value.
Otherwise, we create a new one if possible, using mode MODE if X
cselib_lookup_1 (rtx x, enum machine_mode mode,
int create, enum machine_mode memmode)
{
- void **slot;
+ cselib_val **slot;
cselib_val *e;
unsigned int hashval;
/* We have to fill the slot before calling cselib_subst_to_values:
the hash table is inconsistent until we do so, and
cselib_subst_to_values will need to do lookups. */
- *slot = (void *) e;
+ *slot = e;
new_elt_loc_list (e, cselib_subst_to_values (x, memmode));
return e;
}
}
}
\f
-/* Return 1 if X has a value that can vary even between two
- executions of the program. 0 means X can be compared reliably
- against certain constants or near-constants. */
-
-static bool
-cselib_rtx_varies_p (const_rtx x ATTRIBUTE_UNUSED, bool from_alias ATTRIBUTE_UNUSED)
-{
- /* We actually don't need to verify very hard. This is because
- if X has actually changed, we invalidate the memory anyway,
- so assume that all common memory addresses are
- invariant. */
- return 0;
-}
-
/* Invalidate any locations in the table which are changed because of a
store to MEM_RTX. If this is called because of a non-const call
instruction, MEM_RTX is (mem:BLK const0_rtx). */
continue;
}
if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
- && ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx), mem_addr,
- x, NULL_RTX, cselib_rtx_varies_p))
+ && ! canon_anti_dependence (x, false, mem_rtx,
+ GET_MODE (mem_rtx), mem_addr))
{
has_mem = true;
num_mems++;
/* We must have a mapping from this MEM's address to the
value (E). Remove that, too. */
addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
+ addr = canonical_cselib_val (addr);
+ gcc_checking_assert (v == canonical_cselib_val (v));
mem_chain = &addr->addr_list;
for (;;)
{
- if ((*mem_chain)->elt == v)
+ cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
+
+ if (canon == v)
{
unchain_one_elt_list (mem_chain);
break;
}
+ /* Record canonicalized elt. */
+ (*mem_chain)->elt = canon;
+
mem_chain = &(*mem_chain)->next;
}
if (nelt != elt)
{
+ cselib_any_perm_equivs = true;
+
if (!PRESERVED_VALUE_P (nelt->val_rtx))
cselib_preserve_value (nelt);
cselib_current_insn = save_cselib_current_insn;
}
+/* Return TRUE if any permanent equivalences have been recorded since
+ the table was last initialized. */
+bool
+cselib_have_permanent_equivalences (void)
+{
+ return cselib_any_perm_equivs;
+}
+
/* There is no good way to determine how many elements there can be
in a PARALLEL. Since it's fairly cheap, use a really large number. */
#define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
if (MEM_P (dest))
{
- enum machine_mode address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (dest));
+ enum machine_mode address_mode = get_address_mode (dest);
sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
address_mode, 1,
}
}
+/* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
+
+bool
+fp_setter_insn (rtx insn)
+{
+ rtx expr, pat = NULL_RTX;
+
+ if (!RTX_FRAME_RELATED_P (insn))
+ return false;
+
+ expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
+ if (expr)
+ pat = XEXP (expr, 0);
+ if (!modified_in_p (hard_frame_pointer_rtx, pat ? pat : insn))
+ return false;
+
+ /* Don't return true for frame pointer restores in the epilogue. */
+ if (find_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx))
+ return false;
+ return true;
+}
+
/* Record the effects of INSN. */
void
cselib_current_insn = insn;
- /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
- if (LABEL_P (insn)
- || (CALL_P (insn)
- && find_reg_note (insn, REG_SETJMP, NULL))
- || (NONJUMP_INSN_P (insn)
- && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
- && MEM_VOLATILE_P (PATTERN (insn))))
+ /* Forget everything at a CODE_LABEL, a volatile insn, or a setjmp. */
+ if ((LABEL_P (insn)
+ || (CALL_P (insn)
+ && find_reg_note (insn, REG_SETJMP, NULL))
+ || (NONJUMP_INSN_P (insn)
+ && volatile_insn_p (PATTERN (insn))))
+ && !cselib_preserve_constants)
{
cselib_reset_table (next_uid);
cselib_current_insn = NULL_RTX;
/* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
after we have processed the insn. */
if (CALL_P (insn))
- for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
- if (GET_CODE (XEXP (x, 0)) == CLOBBER)
- cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
+ {
+ for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
+ if (GET_CODE (XEXP (x, 0)) == CLOBBER)
+ cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
+ /* Flush evertything on setjmp. */
+ if (cselib_preserve_constants
+ && find_reg_note (insn, REG_SETJMP, NULL))
+ {
+ cselib_preserve_only_values ();
+ cselib_reset_table (next_uid);
+ }
+ }
+
+ /* On setter of the hard frame pointer if frame_pointer_needed,
+ invalidate stack_pointer_rtx, so that sp and {,h}fp based
+ VALUEs are distinct. */
+ if (reload_completed
+ && frame_pointer_needed
+ && fp_setter_insn (insn))
+ cselib_invalidate_rtx (stack_pointer_rtx);
cselib_current_insn = NULL_RTX;
quadratic behavior for very large hashtables with very few
useless elements. */
&& ((unsigned int)n_useless_values
- > (cselib_hash_table->n_elements
- - cselib_hash_table->n_deleted
- - n_debug_values) / 4))
+ > (cselib_hash_table.elements () - n_debug_values) / 4))
remove_useless_values ();
}
value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100);
cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
+ cselib_any_perm_equivs = false;
/* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
see canon_true_dependence. This is only created once. */
}
used_regs = XNEWVEC (unsigned int, cselib_nregs);
n_used_regs = 0;
- cselib_hash_table = htab_create (31, get_value_hash,
- entry_and_rtx_equal_p, NULL);
+ cselib_hash_table.create (31);
+ if (cselib_preserve_constants)
+ cselib_preserved_hash_table.create (31);
next_uid = 1;
}
void
cselib_finish (void)
{
+ bool preserved = cselib_preserve_constants;
cselib_discard_hook = NULL;
cselib_preserve_constants = false;
+ cselib_any_perm_equivs = false;
cfa_base_preserved_val = NULL;
cfa_base_preserved_regno = INVALID_REGNUM;
free_alloc_pool (elt_list_pool);
free_alloc_pool (cselib_val_pool);
free_alloc_pool (value_pool);
cselib_clear_table ();
- htab_delete (cselib_hash_table);
+ cselib_hash_table.dispose ();
+ if (preserved)
+ cselib_preserved_hash_table.dispose ();
free (used_regs);
used_regs = 0;
- cselib_hash_table = 0;
n_useless_values = 0;
n_useless_debug_values = 0;
n_debug_values = 0;
next_uid = 0;
}
-/* Dump the cselib_val *X to FILE *info. */
+/* Dump the cselib_val *X to FILE *OUT. */
-static int
-dump_cselib_val (void **x, void *info)
+int
+dump_cselib_val (cselib_val **x, FILE *out)
{
- cselib_val *v = (cselib_val *)*x;
- FILE *out = (FILE *)info;
+ cselib_val *v = *x;
bool need_lf = true;
print_inline_rtx (out, v->val_rtx, 0);
fputs (" locs:", out);
do
{
- fprintf (out, "\n from insn %i ",
- INSN_UID (l->setting_insn));
+ if (l->setting_insn)
+ fprintf (out, "\n from insn %i ",
+ INSN_UID (l->setting_insn));
+ else
+ fprintf (out, "\n ");
print_inline_rtx (out, l->loc, 4);
}
while ((l = l->next));
dump_cselib_table (FILE *out)
{
fprintf (out, "cselib hash table:\n");
- htab_traverse (cselib_hash_table, dump_cselib_val, out);
+ cselib_hash_table.traverse <FILE *, dump_cselib_val> (out);
+ fprintf (out, "cselib preserved hash table:\n");
+ cselib_preserved_hash_table.traverse <FILE *, dump_cselib_val> (out);
if (first_containing_mem != &dummy_val)
{
fputs ("first mem ", out);