+2014-08-22 David Malcolm <dmalcolm@redhat.com>
+
+ * ira-int.h (struct ira_allocno_copy): Strengthen field "insn"
+ from rtx to rtx_insn *insn.
+ (ira_create_copy): Strengthen param "insn" from rtx to rtx_insn *.
+ (ira_add_allocno_copy): Likewise.
+ * ira-build.c (find_allocno_copy): Strengthen param "insn" from
+ rtx to rtx_insn *.
+ (ira_create_copy): Likewise.
+ (ira_add_allocno_copy): Likewise.
+ (create_bb_allocnos): Likewise for local "insn".
+ * ira-conflicts.c (process_regs_for_copy): Likewise for param "insn".
+ (process_reg_shuffles): Update NULL_RTX to NULL in invocation of
+ process_regs_for_copy for rtx_insn * param.
+ (add_insn_allocno_copies): Strengthen param "insn" from rtx to
+ rtx_insn *insn. Update NULL_RTX to NULL in invocation of
+ process_regs_for_copy for rtx_insn * param.
+ (add_copies): Strengthen local "insn" from rtx to rtx_insn *insn.
+ * ira-costs.c (record_reg_classes): Likewise for param "insn".
+ (record_operand_costs): Likewise.
+ (scan_one_insn): Likewise for return type, and for param "insn".
+ (process_bb_for_costs): Likewise for local "insn".
+ (process_bb_node_for_hard_reg_moves): Likewise.
+ * ira-emit.c (struct move): Likewise for field "insn".
+ (create_move): Eliminate use of NULL_RTX when dealing with an
+ rtx_insn *.
+ (emit_move_list): Strengthen return type and locals "result",
+ "insn" from rtx to rtx_insn *insn.
+ (emit_moves): Likewise for locals "insns", "tmp".
+ (ira_emit): Likewise for local "insn".
+ * ira-lives.c (mark_hard_reg_early_clobbers): Likewise for param
+ "insn".
+ (find_call_crossed_cheap_reg): Likewise.
+ (process_bb_node_lives): Likewise for local "insn".
+ * ira.c (decrease_live_ranges_number): Likewise.
+ (compute_regs_asm_clobbered): Likewise.
+ (build_insn_chain): Likewise.
+ (find_moveable_pseudos): Likewise, also locals "def_insn",
+ "use_insn", "x". Also strengthen local "closest_uses" from rtx *
+ to rtx_insn **. Add a checked cast when assigning from
+ "closest_use" into closest_uses array in a region where we know
+ it's a non-NULL insn.
+ (interesting_dest_for_shprep): Strengthen param "insn" from rtx
+ to rtx_insn *.
+ (split_live_ranges_for_shrink_wrap): Likewise for locals "insn",
+ "last_interesting_insn", "uin".
+ (move_unallocated_pseudos): Likewise for locals "def_insn",
+ "move_insn", "newinsn".
+
2014-08-22 David Malcolm <dmalcolm@redhat.com>
* internal-fn.c (ubsan_expand_si_overflow_addsub_check):
#include "ira-int.h"
#include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
-static ira_copy_t find_allocno_copy (ira_allocno_t, ira_allocno_t, rtx,
+static ira_copy_t find_allocno_copy (ira_allocno_t, ira_allocno_t, rtx_insn *,
ira_loop_tree_node_t);
/* The root of the loop tree corresponding to the all function. */
/* Return copy connecting A1 and A2 and originated from INSN of
LOOP_TREE_NODE if any. */
static ira_copy_t
-find_allocno_copy (ira_allocno_t a1, ira_allocno_t a2, rtx insn,
+find_allocno_copy (ira_allocno_t a1, ira_allocno_t a2, rtx_insn *insn,
ira_loop_tree_node_t loop_tree_node)
{
ira_copy_t cp, next_cp;
SECOND, FREQ, CONSTRAINT_P, and INSN. */
ira_copy_t
ira_create_copy (ira_allocno_t first, ira_allocno_t second, int freq,
- bool constraint_p, rtx insn,
+ bool constraint_p, rtx_insn *insn,
ira_loop_tree_node_t loop_tree_node)
{
ira_copy_t cp;
LOOP_TREE_NODE. */
ira_copy_t
ira_add_allocno_copy (ira_allocno_t first, ira_allocno_t second, int freq,
- bool constraint_p, rtx insn,
+ bool constraint_p, rtx_insn *insn,
ira_loop_tree_node_t loop_tree_node)
{
ira_copy_t cp;
create_bb_allocnos (ira_loop_tree_node_t bb_node)
{
basic_block bb;
- rtx insn;
+ rtx_insn *insn;
unsigned int i;
bitmap_iterator bi;
FALSE. */
static bool
process_regs_for_copy (rtx reg1, rtx reg2, bool constraint_p,
- rtx insn, int freq)
+ rtx_insn *insn, int freq)
{
int allocno_preferenced_hard_regno, cost, index, offset1, offset2;
bool only_regs_p;
|| bound_p[i])
continue;
- process_regs_for_copy (reg, another_reg, false, NULL_RTX, freq);
+ process_regs_for_copy (reg, another_reg, false, NULL, freq);
}
}
it might be because INSN is a pseudo-register move or INSN is two
operand insn. */
static void
-add_insn_allocno_copies (rtx insn)
+add_insn_allocno_copies (rtx_insn *insn)
{
rtx set, operand, dup;
bool bound_p[MAX_RECOG_OPERANDS];
REG_P (operand)
? operand
: SUBREG_REG (operand)) != NULL_RTX)
- process_regs_for_copy (operand, dup, true, NULL_RTX,
+ process_regs_for_copy (operand, dup, true, NULL,
freq);
}
}
add_copies (ira_loop_tree_node_t loop_tree_node)
{
basic_block bb;
- rtx insn;
+ rtx_insn *insn;
bb = loop_tree_node->bb;
if (bb == NULL)
static void
record_reg_classes (int n_alts, int n_ops, rtx *ops,
enum machine_mode *modes, const char **constraints,
- rtx insn, enum reg_class *pref)
+ rtx_insn *insn, enum reg_class *pref)
{
int alt;
int i, j, k;
/* Calculate the costs of insn operands. */
static void
-record_operand_costs (rtx insn, enum reg_class *pref)
+record_operand_costs (rtx_insn *insn, enum reg_class *pref)
{
const char *constraints[MAX_RECOG_OPERANDS];
enum machine_mode modes[MAX_RECOG_OPERANDS];
/* Process one insn INSN. Scan it and record each time it would save
code to put a certain allocnos in a certain class. Return the last
insn processed, so that the scan can be continued from there. */
-static rtx
-scan_one_insn (rtx insn)
+static rtx_insn *
+scan_one_insn (rtx_insn *insn)
{
enum rtx_code pat_code;
rtx set, note;
static void
process_bb_for_costs (basic_block bb)
{
- rtx insn;
+ rtx_insn *insn;
frequency = REG_FREQ_FROM_BB (bb);
if (frequency == 0)
ira_loop_tree_node_t curr_loop_tree_node;
enum reg_class rclass;
basic_block bb;
- rtx insn, set, src, dst;
+ rtx_insn *insn;
+ rtx set, src, dst;
bb = loop_tree_node->bb;
if (bb == NULL)
dependencies. */
move_t *deps;
/* First insn generated for the move. */
- rtx insn;
+ rtx_insn *insn;
};
/* Array of moves (indexed by BB index) which should be put at the
move->to = to;
move->from = from;
move->next = NULL;
- move->insn = NULL_RTX;
+ move->insn = NULL;
move->visited_p = false;
return move;
}
/* Generate RTX move insns from the move list LIST. This updates
allocation cost using move execution frequency FREQ. */
-static rtx
+static rtx_insn *
emit_move_list (move_t list, int freq)
{
rtx to, from, dest;
int to_regno, from_regno, cost, regno;
- rtx result, insn, set;
+ rtx_insn *result, *insn;
+ rtx set;
enum machine_mode mode;
enum reg_class aclass;
basic_block bb;
edge_iterator ei;
edge e;
- rtx insns, tmp;
+ rtx_insn *insns, *tmp;
FOR_EACH_BB_FN (bb, cfun)
{
ira_emit (bool loops_p)
{
basic_block bb;
- rtx insn;
+ rtx_insn *insn;
edge_iterator ei;
edge e;
ira_allocno_t a;
for the copy created to remove register shuffle is NULL. In last
case the copy frequency is smaller than the corresponding insn
execution frequency. */
- rtx insn;
+ rtx_insn *insn;
/* All copies with the same allocno as FIRST are linked by the two
following members. */
ira_copy_t prev_first_allocno_copy, next_first_allocno_copy;
extern void ira_remove_pref (ira_pref_t);
extern void ira_remove_allocno_prefs (ira_allocno_t);
extern ira_copy_t ira_create_copy (ira_allocno_t, ira_allocno_t,
- int, bool, rtx, ira_loop_tree_node_t);
+ int, bool, rtx_insn *,
+ ira_loop_tree_node_t);
extern ira_copy_t ira_add_allocno_copy (ira_allocno_t, ira_allocno_t, int,
- bool, rtx, ira_loop_tree_node_t);
+ bool, rtx_insn *,
+ ira_loop_tree_node_t);
extern int *ira_allocate_cost_vector (reg_class_t);
extern void ira_free_cost_vector (int *, reg_class_t);
/* Mark early clobber hard registers of the current INSN as live (if
LIVE_P) or dead. Return true if there are such registers. */
static bool
-mark_hard_reg_early_clobbers (rtx insn, bool live_p)
+mark_hard_reg_early_clobbers (rtx_insn *insn, bool live_p)
{
df_ref def;
bool set_p = false;
we find a SET rtx that we can use to deduce that a register can be cheaply
caller-saved. Return such a register, or NULL_RTX if none is found. */
static rtx
-find_call_crossed_cheap_reg (rtx insn)
+find_call_crossed_cheap_reg (rtx_insn *insn)
{
rtx cheap_reg = NULL_RTX;
rtx exp = CALL_INSN_FUNCTION_USAGE (insn);
int i, freq;
unsigned int j;
basic_block bb;
- rtx insn;
+ rtx_insn *insn;
bitmap_iterator bi;
bitmap reg_live_out;
unsigned int px;
decrease_live_ranges_number (void)
{
basic_block bb;
- rtx insn, set, src, dest, dest_death, p, q, note;
+ rtx_insn *insn;
+ rtx set, src, dest, dest_death, p, q, note;
int sregno, dregno;
if (! flag_expensive_optimizations)
FOR_EACH_BB_FN (bb, cfun)
{
- rtx insn;
+ rtx_insn *insn;
FOR_BB_INSNS_REVERSE (bb, insn)
{
df_ref def;
FOR_EACH_BB_REVERSE_FN (bb, cfun)
{
bitmap_iterator bi;
- rtx insn;
+ rtx_insn *insn;
CLEAR_REG_SET (live_relevant_regs);
bitmap_clear (live_subregs_used);
int max_uid = get_max_uid ();
basic_block bb;
int *uid_luid = XNEWVEC (int, max_uid);
- rtx *closest_uses = XNEWVEC (rtx, max_regs);
+ rtx_insn **closest_uses = XNEWVEC (rtx_insn *, max_regs);
/* A set of registers which are live but not modified throughout a block. */
bitmap_head *bb_transp_live = XNEWVEC (bitmap_head,
last_basic_block_for_fn (cfun));
bitmap_initialize (&unusable_as_input, 0);
FOR_EACH_BB_FN (bb, cfun)
{
- rtx insn;
+ rtx_insn *insn;
bitmap transp = bb_transp_live + bb->index;
bitmap moveable = bb_moveable_reg_sets + bb->index;
bitmap local = bb_local + bb->index;
FOR_EACH_BB_FN (bb, cfun)
{
bitmap local = bb_local + bb->index;
- rtx insn;
+ rtx_insn *insn;
FOR_BB_INSNS (bb, insn)
if (NONDEBUG_INSN_P (insn))
{
df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
- rtx def_insn, closest_use, note;
+ rtx_insn *def_insn;
+ rtx closest_use, note;
df_ref def, use;
unsigned regno;
bool all_dominated, all_local;
closest_use = NULL_RTX;
for (; use; use = DF_REF_NEXT_REG (use))
{
- rtx insn;
+ rtx_insn *insn;
if (!DF_REF_INSN_INFO (use))
{
all_dominated = false;
}
#endif
bitmap_set_bit (&interesting, regno);
- closest_uses[regno] = closest_use;
+ /* If we get here, we know closest_use is a non-NULL insn
+ (as opposed to const_0_rtx). */
+ closest_uses[regno] = as_a <rtx_insn *> (closest_use);
if (dump_file && (all_local || all_dominated))
{
EXECUTE_IF_SET_IN_BITMAP (&interesting, 0, i, bi)
{
df_ref def = DF_REG_DEF_CHAIN (i);
- rtx def_insn = DF_REF_INSN (def);
+ rtx_insn *def_insn = DF_REF_INSN (def);
basic_block def_block = BLOCK_FOR_INSN (def_insn);
bitmap def_bb_local = bb_local + def_block->index;
bitmap def_bb_moveable = bb_moveable_reg_sets + def_block->index;
bitmap def_bb_transp = bb_transp_live + def_block->index;
bool local_to_bb_p = bitmap_bit_p (def_bb_local, i);
- rtx use_insn = closest_uses[i];
+ rtx_insn *use_insn = closest_uses[i];
df_ref use;
bool all_ok = true;
bool all_transp = true;
{
if (modified_between_p (DF_REF_REG (use), def_insn, use_insn))
{
- rtx x = NEXT_INSN (def_insn);
+ rtx_insn *x = NEXT_INSN (def_insn);
while (!modified_in_p (DF_REF_REG (use), x))
{
gcc_assert (x != use_insn);
Otherwise return NULL. */
static rtx
-interesting_dest_for_shprep (rtx insn, basic_block call_dom)
+interesting_dest_for_shprep (rtx_insn *insn, basic_block call_dom)
{
if (!INSN_P (insn))
return NULL;
{
basic_block bb, call_dom = NULL;
basic_block first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
- rtx insn, last_interesting_insn = NULL;
+ rtx_insn *insn, *last_interesting_insn = NULL;
bitmap_head need_new, reachable;
vec<basic_block> queue;
df_ref use, next;
for (use = DF_REG_USE_CHAIN (REGNO (dest)); use; use = next)
{
- rtx uin = DF_REF_INSN (use);
+ rtx_insn *uin = DF_REF_INSN (use);
next = DF_REF_NEXT_REG (use);
basic_block ubb = BLOCK_FOR_INSN (uin);
{
int idx = i - first_moveable_pseudo;
rtx other_reg = pseudo_replaced_reg[idx];
- rtx def_insn = DF_REF_INSN (DF_REG_DEF_CHAIN (i));
+ rtx_insn *def_insn = DF_REF_INSN (DF_REG_DEF_CHAIN (i));
/* The use must follow all definitions of OTHER_REG, so we can
insert the new definition immediately after any of them. */
df_ref other_def = DF_REG_DEF_CHAIN (REGNO (other_reg));
- rtx move_insn = DF_REF_INSN (other_def);
- rtx newinsn = emit_insn_after (PATTERN (def_insn), move_insn);
+ rtx_insn *move_insn = DF_REF_INSN (other_def);
+ rtx_insn *newinsn = emit_insn_after (PATTERN (def_insn), move_insn);
rtx set;
int success;