+2014-08-22 David Malcolm <dmalcolm@redhat.com>
+
+ * cse.c (struct qty_table_elem): Strengthen field "const_insn"
+ from rtx to rtx_insn *.
+ (struct change_cc_mode_args): Likewise for field "insn".
+ (this_insn): Strengthen from rtx to rtx_insn *.
+ (make_new_qty): Replace use of NULL_RTX with NULL when dealing
+ with insn.
+ (validate_canon_reg): Strengthen param "insn" from rtx to
+ rtx_insn *.
+ (canon_reg): Likewise.
+ (fold_rtx): Likewise. Replace use of NULL_RTX with NULL when
+ dealing with insn.
+ (record_jump_equiv): Strengthen param "insn" from rtx to
+ rtx_insn *.
+ (try_back_substitute_reg): Likewise, also for locals "prev",
+ "bb_head".
+ (find_sets_in_insn): Likewise for param "insn".
+ (canonicalize_insn): Likewise.
+ (cse_insn): Likewise. Add a checked cast.
+ (invalidate_from_clobbers): Likewise for param "insn".
+ (invalidate_from_sets_and_clobbers): Likewise.
+ (cse_process_notes_1): Replace use of NULL_RTX with NULL when
+ dealing with insn.
+ (cse_prescan_path): Strengthen local "insn" from rtx to
+ rtx_insn *.
+ (cse_extended_basic_block): Likewise for locals "insn" and
+ "prev_insn".
+ (cse_main): Likewise for param "f".
+ (check_for_label_ref): Likewise for local "insn".
+ (set_live_p): Likewise for second param ("insn").
+ (insn_live_p): Likewise for first param ("insn") and for local
+ "next".
+ (cse_change_cc_mode_insn): Likewise for first param "insn".
+ (cse_change_cc_mode_insns): Likewise for first and second params
+ "start" and "end".
+ (cse_cc_succs): Likewise for locals "insns", "last_insns", "insn"
+ and "end".
+ (cse_condition_code_reg): Likewise for locals "last_insn", "insn",
+ "cc_src_insn".
+
2014-08-22 Alexander Ivchenko <alexander.ivchenko@intel.com>
Maxim Kuznetsov <maxim.kuznetsov@intel.com>
Anna Tikhonova <anna.tikhonova@intel.com>
struct qty_table_elem
{
rtx const_rtx;
- rtx const_insn;
+ rtx_insn *const_insn;
rtx comparison_const;
int comparison_qty;
unsigned int first_reg, last_reg;
cse_change_cc_mode. */
struct change_cc_mode_args
{
- rtx insn;
+ rtx_insn *insn;
rtx newreg;
};
/* Insn being scanned. */
-static rtx this_insn;
+static rtx_insn *this_insn;
static bool optimize_this_for_speed_p;
/* Index by register number, gives the number of the next (or
static inline unsigned safe_hash (rtx, enum machine_mode);
static inline unsigned hash_rtx_string (const char *);
-static rtx canon_reg (rtx, rtx);
+static rtx canon_reg (rtx, rtx_insn *);
static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
enum machine_mode *,
enum machine_mode *);
-static rtx fold_rtx (rtx, rtx);
+static rtx fold_rtx (rtx, rtx_insn *);
static rtx equiv_constant (rtx);
-static void record_jump_equiv (rtx, bool);
+static void record_jump_equiv (rtx_insn *, bool);
static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
int);
-static void cse_insn (rtx);
+static void cse_insn (rtx_insn *);
static void cse_prescan_path (struct cse_basic_block_data *);
-static void invalidate_from_clobbers (rtx);
-static void invalidate_from_sets_and_clobbers (rtx);
+static void invalidate_from_clobbers (rtx_insn *);
+static void invalidate_from_sets_and_clobbers (rtx_insn *);
static rtx cse_process_notes (rtx, rtx, bool *);
static void cse_extended_basic_block (struct cse_basic_block_data *);
static int check_for_label_ref (rtx *, void *);
static int check_dependence (rtx *, void *);
static void flush_hash_table (void);
-static bool insn_live_p (rtx, int *);
-static bool set_live_p (rtx, rtx, int *);
+static bool insn_live_p (rtx_insn *, int *);
+static bool set_live_p (rtx, rtx_insn *, int *);
static int cse_change_cc_mode (rtx *, void *);
-static void cse_change_cc_mode_insn (rtx, rtx);
-static void cse_change_cc_mode_insns (rtx, rtx, rtx);
+static void cse_change_cc_mode_insn (rtx_insn *, rtx);
+static void cse_change_cc_mode_insns (rtx_insn *, rtx_insn *, rtx);
static enum machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx,
bool);
\f
ent->first_reg = reg;
ent->last_reg = reg;
ent->mode = mode;
- ent->const_rtx = ent->const_insn = NULL_RTX;
+ ent->const_rtx = ent->const_insn = NULL;
ent->comparison_code = UNKNOWN;
eqv = ®_eqv_table[reg];
the result if necessary. INSN is as for canon_reg. */
static void
-validate_canon_reg (rtx *xloc, rtx insn)
+validate_canon_reg (rtx *xloc, rtx_insn *insn)
{
if (*xloc)
{
generally be discarded since the changes we are making are optional. */
static rtx
-canon_reg (rtx x, rtx insn)
+canon_reg (rtx x, rtx_insn *insn)
{
int i;
enum rtx_code code;
of X before modifying it. */
static rtx
-fold_rtx (rtx x, rtx insn)
+fold_rtx (rtx x, rtx_insn *insn)
{
enum rtx_code code;
enum machine_mode mode;
for (p = p->first_same_value; p; p = p->next_same_value)
if (REG_P (p->exp))
return simplify_gen_binary (MINUS, mode, folded_arg0,
- canon_reg (p->exp, NULL_RTX));
+ canon_reg (p->exp, NULL));
}
goto from_plus;
if (y && CONST_INT_P (XEXP (y, 1)))
return fold_rtx (plus_constant (mode, copy_rtx (y),
-INTVAL (const_arg1)),
- NULL_RTX);
+ NULL);
}
/* Fall through. */
comparison is seen later, we will know its value. */
static void
-record_jump_equiv (rtx insn, bool taken)
+record_jump_equiv (rtx_insn *insn, bool taken)
{
int cond_known_true;
rtx op0, op1;
This is the last transformation that cse_insn will try to do. */
static void
-try_back_substitute_reg (rtx set, rtx insn)
+try_back_substitute_reg (rtx set, rtx_insn *insn)
{
rtx dest = SET_DEST (set);
rtx src = SET_SRC (set);
{
/* Scan for the previous nonnote insn, but stop at a basic
block boundary. */
- rtx prev = insn;
- rtx bb_head = BB_HEAD (BLOCK_FOR_INSN (insn));
+ rtx_insn *prev = insn;
+ rtx_insn *bb_head = BB_HEAD (BLOCK_FOR_INSN (insn));
do
{
prev = PREV_INSN (prev);
/* Record all the SETs in this instruction into SETS_PTR,
and return the number of recorded sets. */
static int
-find_sets_in_insn (rtx insn, struct set **psets)
+find_sets_in_insn (rtx_insn *insn, struct set **psets)
{
struct set *sets = *psets;
int n_sets = 0;
see canon_reg. */
static void
-canonicalize_insn (rtx insn, struct set **psets, int n_sets)
+canonicalize_insn (rtx_insn *insn, struct set **psets, int n_sets)
{
struct set *sets = *psets;
rtx tem;
of available values. */
static void
-cse_insn (rtx insn)
+cse_insn (rtx_insn *insn)
{
rtx x = PATTERN (insn);
int i;
}
delete_insn_and_edges (insn);
- insn = new_rtx;
+ insn = as_a <rtx_insn *> (new_rtx);
}
else
INSN_CODE (insn) = -1;
alias with something that is SET or CLOBBERed. */
static void
-invalidate_from_clobbers (rtx insn)
+invalidate_from_clobbers (rtx_insn *insn)
{
rtx x = PATTERN (insn);
alias with something that is SET or CLOBBERed. */
static void
-invalidate_from_sets_and_clobbers (rtx insn)
+invalidate_from_sets_and_clobbers (rtx_insn *insn)
{
rtx tem;
rtx x = PATTERN (insn);
}
/* Otherwise, canonicalize this register. */
- return canon_reg (x, NULL_RTX);
+ return canon_reg (x, NULL);
default:
break;
for (path_entry = 0; path_entry < path_size; path_entry++)
{
basic_block bb;
- rtx insn;
+ rtx_insn *insn;
bb = data->path[path_entry].bb;
for (path_entry = 0; path_entry < path_size; path_entry++)
{
basic_block bb;
- rtx insn;
+ rtx_insn *insn;
bb = ebb_data->path[path_entry].bb;
Here we use fact that nothing expects CC0 to be
valid over an insn, which is true until the final
pass. */
- rtx prev_insn, tem;
+ rtx_insn *prev_insn;
+ rtx tem;
prev_insn = prev_nonnote_nondebug_insn (insn);
if (prev_insn && NONJUMP_INSN_P (prev_insn)
Return 0 otherwise. */
static int
-cse_main (rtx f ATTRIBUTE_UNUSED, int nregs)
+cse_main (rtx_insn *f ATTRIBUTE_UNUSED, int nregs)
{
struct cse_basic_block_data ebb_data;
basic_block bb;
static int
check_for_label_ref (rtx *rtl, void *data)
{
- rtx insn = (rtx) data;
+ rtx_insn *insn = (rtx_insn *) data;
/* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
note for it, we must rerun jump since it needs to place the note. If
/* Return true if set is live. */
static bool
-set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
+set_live_p (rtx set, rtx_insn *insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
int *counts)
{
#ifdef HAVE_cc0
/* Return true if insn is live. */
static bool
-insn_live_p (rtx insn, int *counts)
+insn_live_p (rtx_insn *insn, int *counts)
{
int i;
if (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
}
else if (DEBUG_INSN_P (insn))
{
- rtx next;
+ rtx_insn *next;
for (next = NEXT_INSN (insn); next; next = NEXT_INSN (next))
if (NOTE_P (next))
GET_MODE (NEWREG) in INSN. */
static void
-cse_change_cc_mode_insn (rtx insn, rtx newreg)
+cse_change_cc_mode_insn (rtx_insn *insn, rtx newreg)
{
struct change_cc_mode_args args;
int success;
any instruction which modifies NEWREG. */
static void
-cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
+cse_change_cc_mode_insns (rtx_insn *start, rtx_insn *end, rtx newreg)
{
- rtx insn;
+ rtx_insn *insn;
for (insn = start; insn != end; insn = NEXT_INSN (insn))
{
enum machine_mode mode;
unsigned int insn_count;
edge e;
- rtx insns[2];
+ rtx_insn *insns[2];
enum machine_mode modes[2];
- rtx last_insns[2];
+ rtx_insn *last_insns[2];
unsigned int i;
rtx newreg;
edge_iterator ei;
insn_count = 0;
FOR_EACH_EDGE (e, ei, bb->succs)
{
- rtx insn;
- rtx end;
+ rtx_insn *insn;
+ rtx_insn *end;
if (e->flags & EDGE_COMPLEX)
continue;
FOR_EACH_BB_FN (bb, cfun)
{
- rtx last_insn;
+ rtx_insn *last_insn;
rtx cc_reg;
- rtx insn;
- rtx cc_src_insn;
+ rtx_insn *insn;
+ rtx_insn *cc_src_insn;
rtx cc_src;
enum machine_mode mode;
enum machine_mode orig_mode;
else
continue;
- cc_src_insn = NULL_RTX;
+ cc_src_insn = NULL;
cc_src = NULL_RTX;
for (insn = PREV_INSN (last_insn);
insn && insn != PREV_INSN (BB_HEAD (bb));