+2014-08-28 David Malcolm <dmalcolm@redhat.com>
+
+ * cfgexpand.c (pass_expand::execute): Strengthen local "after"
+ from rtx to rtx_insn *.
+ * cfgrtl.c (force_nonfallthru_and_redirect): Replace use of local
+ rtx "note" with new local rtx_insn * "new_head" when calculating
+ head insn of new basic block.
+ * combine.c (combine_split_insns): Strengthen return type and local
+ "ret" from rtx to rtx_insn *.
+ (likely_spilled_retval_p): Likewise for locals "use" and "p".
+ (try_combine): Eliminate local "m_split", splitting into new
+ locals "m_split_insn" and "m_split_pat".
+ (find_split_point): Strengthen local "seq" from rtx into
+ rtx_insn *.
+ * config/spu/spu.c (spu_machine_dependent_reorg): Likewise for
+ locals "label", "branch".
+ * config/spu/spu.md (define_expand "smulsi3_highpart"): Likewise
+ for local "insn".
+ (define_expand "umulsi3_highpart"): Likewise for local "insn".
+ * dse.c (note_add_store_info): Likewise for fields "first",
+ "current".
+ (note_add_store): Likewise for local "insn".
+ (emit_inc_dec_insn_before): Likewise for locals "insn",
+ "new_insn", "cur".
+ (find_shift_sequence): Likewise for locals "shift_seq", "insn".
+ (replace_read): Likewise for locals "insns", "this_insn".
+ * dwarf2cfi.c (dw_trace_info): Likewise for field "eh_head".
+ (notice_eh_throw): Likewise for param "insn".
+ (before_next_cfi_note): Likewise for return type, param, and local
+ "prev".
+ (connect_traces): Likewise for local "note".
+ * emit-rtl.c (reset_all_used_flags): Likewise for local "p".
+ (verify_rtl_sharing): Likewise.
+ (unshare_all_rtl_in_chain): Likewise for param "insn".
+ (get_first_nonnote_insn): Likewise for local "insn".
+ (get_last_nonnote_insn): Likewise. Introduce local rtx_sequence *
+ "seq" and use its methods to clarify things.
+ (next_insn): Strengthen return type from rtx to rtx_insn *.
+ Rename param "insn" to "uncast_insn" and reintroduce "insn" as a
+ local rtx_insn * using a checked cast, dropping a checked cast
+ made redundant by this change. Use a cast to and method of
+ rtx_sequence to clarify the code.
+ (previous_insn): Rename param "insn" to "uncast_insn" and
+ reintroduce "insn" as a local rtx_insn * using a checked cast,
+ dropping a checked cast made redundant by this change. Use a cast
+ to and method of rtx_sequence to clarify the code.
+ (next_nonnote_insn): Rename param "insn" to "uncast_insn" and
+ reintroduce "insn" as a local rtx_insn * using a checked cast,
+ dropping a checked cast made redundant by this change.
+ (next_nonnote_insn_bb): Likewise.
+ (prev_nonnote_insn): Likewise.
+ (prev_nonnote_insn_bb): Likewise.
+ (next_nondebug_insn): Likewise.
+ (prev_nondebug_insn): Likewise.
+ (next_nonnote_nondebug_insn): Likewise.
+ (prev_nonnote_nondebug_insn): Likewise.
+ (next_real_insn): Likewise.
+ (prev_real_insn): Likewise.
+ (next_active_insn): Likewise.
+ (prev_active_insn): Likewise.
+ (next_cc0_user): Likewise. Use rtx_sequence and a method for
+ clarity.
+ (prev_cc0_setter): Likewise.
+ (try_split): Rename param "trial" to "uncast_trial" and
+ reintroduce "insn" as a local rtx_insn * using a checked cast,
+ dropping checked casts made redundant by this change.
+ Strengthen locals "seq", "tem", "insn_last", "insn", "next" from
+ rtx to rtx_insn *.
+ (remove_insn): Rename param "insn" to "uncast_insn" and
+ reintroduce "insn" as a local rtx_insn * using a checked cast.
+ (emit_pattern_after_setloc): Likewise for param "after", as
+ "uncast_after".
+ (emit_pattern_after): Likewise. Strengthen local "prev" from
+ rtx to rtx_insn *.
+ (emit_pattern_before_setloc): Rename param "before" to
+ "uncast_before" and reintroduce "before" as a local rtx_insn *
+ using a checked cast. Strengthen locals "first", "last" from
+ rtx to rtx_insn *.
+ (emit_pattern_before): Likewise rename/cast param "before" to
+ "uncast_before". Strengthen local "next" from rtx to rtx_insn *.
+ * except.c (copy_reg_eh_region_note_forward): Strengthen param
+ "first" and local "insn" from rtx to rtx_insn *.
+ (copy_reg_eh_region_note_backward): Likewise for param "last"
+ and local "insn".
+ * expr.c (fixup_args_size_notes): Rename param "last" to
+ "uncast_last" and reintroduce "last" as a local rtx_insn *
+ using a checked cast. Strengthen local "insn" from rtx to
+ rtx_insn *.
+ * function.c (set_insn_locations): Strengthen param "insn" from
+ rtx to rtx_insn *.
+ (record_insns): Likewise for param "insns" and local "tmp".
+ (active_insn_between): Rename param "tail" to
+ "uncast_tail" and reintroduce "tail" as a local rtx_insn *
+ using a checked cast.
+ (thread_prologue_and_epilogue_insns): Split out top-level local
+ rtx "seq" into three different rtx_insn * locals. Strengthen
+ local "prologue_seq" from rtx to rtx_insn *.
+ * gcse.c (insert_insn_end_basic_block): Strenghen local "insn"
+ from rtx to rtx_insn *.
+ * haifa-sched.c (initiate_bb_reg_pressure_info): Likewise.
+ (priority): Likewise for locals "prev_first", "twin".
+ (setup_insn_max_reg_pressure): Likewise for param "after".
+ (sched_setup_bb_reg_pressure_info): Likewise.
+ (no_real_insns_p): Strengthen params from const_rtx to
+ const rtx_insn *.
+ (schedule_block): Strengthen local "next_tail" from rtx to
+ rtx_insn *.
+ * ifcvt.c (find_active_insn_before): Strengthen return type and
+ param "insn" from rtx to rtx_insn *.
+ (find_active_insn_after): Likewise.
+ (cond_exec_process_insns): Likewise for param "start" and local "insn".
+ (cond_exec_process_if_block): Likewise for locals "then_start",
+ "then_end", "else_start", "else_end", "insn", "start", "end", "from".
+ (noce_process_if_block): Likewise for local "jump".
+ (merge_if_block): Likewise for two locals named "end".
+ (cond_exec_find_if_block): Likewise for local "last_insn".
+ * jump.c (delete_related_insns): Rename param "insn" to
+ "uncast_insn" and reintroduce "insn" as a local rtx_insn * using a
+ checked cast. Strengthen local "p" from rtx to rtx_insn *.
+ * lra-constraints.c (inherit_reload_reg): Replace NULL_RTX with
+ NULL.
+ (split_reg): Likewise.
+ * lra.c (lra_process_new_insns): Likewise.
+ * modulo-sched.c (permute_partial_schedule): Strengthen param
+ "last" from rtx to rtx_insn *.
+ * optabs.c (add_equal_note): Likewise for param "insns" and local
+ "last_insn".
+ (expand_binop_directly): Add checked casts to rtx_insn * within
+ NEXT_INSN (pat) uses.
+ (expand_unop_direct): Likewise.
+ (maybe_emit_unop_insn): Likewise.
+ * recog.c (peep2_attempt): Strengthen locals "last",
+ "before_try", "x" from rtx to rtx_insn *.
+ * reorg.c (optimize_skip): Strengthen return type and local
+ "delay_list" from rtx to rtx_insn_list *. Strengthen param "insn"
+ and locals "trial", "next_trial" from rtx to rtx_insn *.
+ * resource.c (next_insn_no_annul): Strengthen return type and
+ param "insn" from rtx to rtx_insn *. Use a cast to and method of
+ rtx_sequence to clarify the code.
+ (mark_referenced_resources): Add a checked cast to rtx_insn *
+ within PREV_INSN (x).
+ (find_dead_or_set_registers): Strengthen return type, param
+ "target", locals "insn", "next", "jump_insn", "this_jump_insn"
+ from rtx to rtx_insn *. Strengthen param "jump_target" from rtx *
+ to rtx_insn **.
+ (mark_target_live_regs): Strengthen params "insns" and "target",
+ locals "insn", "jump_target", "start_insn", "stop_insn" from rtx
+ to rtx_insn *. Use cast to and method of rtx_sequence to clarify
+ the code.
+ * resource.h (mark_target_live_regs): Strengthen params 1 and 2
+ from rtx to rtx_insn *.
+ * rtl.h (copy_reg_eh_region_note_forward): Strengthen second param
+ from rtx to rtx_insn *.
+ (copy_reg_eh_region_note_backward): Likewise.
+ (unshare_all_rtl_in_chain): Likewise for sole param.
+ (dump_rtl_slim): Strengthen second and third params from const_rtx
+ to const rtx_insn *.
+ * sched-deps.c (sched_free_deps): Strengthen params "head" and
+ "tail" and locals "insn", "next_tail" from rtx to rtx_insn *.
+ * sched-ebb.c (init_ready_list): Strengthen locals "prev_head",
+ "next_tail" from rtx to rtx_insn *.
+ (begin_move_insn): Likewise for local "next".
+ * sched-int.h (sched_free_deps): Likewise for first and second
+ params.
+ (no_real_insns_p): Strengthen both params from const_rtx to
+ const rtx_insn *.
+ (sched_setup_bb_reg_pressure_info): Strengthen second params from
+ rtx to rtx_insn *.
+ * sched-rgn.c (init_ready_list): Likewise for locals "prev_head",
+ "next_tail".
+ * sched-vis.c (dump_rtl_slim): Strengthen params "first", "last"
+ and locals "insn", "tail" from const_rtx to const rtx_insn *.
+ (rtl_dump_bb_for_graph): Strengthen local "insn" from rtx to
+ rtx_insn *.
+ (debug_rtl_slim): Strengthen params "first" and "last" from
+ const_rtx to const rtx_insn *.
+ * shrink-wrap.c (try_shrink_wrapping): Strengthen param
+ "prologue_seq" and locals "seq", "p_insn" from rtx to rtx_insn *.
+ (convert_to_simple_return): Likewise for param "returnjump".
+ * shrink-wrap.h (try_shrink_wrapping): Likewise for param
+ "prologue_seq".
+ (convert_to_simple_return): Likewise for param "returnjump".
+ * valtrack.c (propagate_for_debug): Likewise for params
+ "insn", "last".
+ * valtrack.h (propagate_for_debug): Likewise for second param.
+
2014-08-28 David Malcolm <dmalcolm@redhat.com>
* output.h (insn_current_reference_address): Strengthen param
if (var_ret_seq)
{
- rtx after = return_label;
+ rtx_insn *after = return_label;
rtx_insn *next = NEXT_INSN (after);
if (next && NOTE_INSN_BASIC_BLOCK_P (next))
after = next;
if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
{
+ rtx_insn *new_head;
gcov_type count = e->count;
int probability = e->probability;
/* Create the new structures. */
forward from the last instruction of the old block. */
rtx_jump_table_data *table;
if (tablejump_p (BB_END (e->src), NULL, &table))
- note = table;
+ new_head = table;
else
- note = BB_END (e->src);
- note = NEXT_INSN (note);
+ new_head = BB_END (e->src);
+ new_head = NEXT_INSN (new_head);
- jump_block = create_basic_block (note, NULL, e->src);
+ jump_block = create_basic_block (new_head, NULL, e->src);
jump_block->count = count;
jump_block->frequency = EDGE_FREQUENCY (e);
reg_stat vector is made larger if the splitter creates a new
register. */
-static rtx
+static rtx_insn *
combine_split_insns (rtx pattern, rtx insn)
{
- rtx ret;
+ rtx_insn *ret;
unsigned int nregs;
- ret = split_insns (pattern, insn);
+ ret = safe_as_a <rtx_insn *> (split_insns (pattern, insn));
nregs = max_reg_num ();
if (nregs > reg_stat.length ())
reg_stat.safe_grow_cleared (nregs);
static int
likely_spilled_retval_p (rtx_insn *insn)
{
- rtx use = BB_END (this_basic_block);
- rtx reg, p;
+ rtx_insn *use = BB_END (this_basic_block);
+ rtx reg;
+ rtx_insn *p;
unsigned regno, nregs;
/* We assume here that no machine mode needs more than
32 hard registers when the value overlaps with a register
if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
&& asm_noperands (newpat) < 0)
{
- rtx parallel, m_split, *split;
+ rtx parallel, *split;
+ rtx_insn *m_split_insn;
/* See if the MD file can split NEWPAT. If it can't, see if letting it
use I2DEST as a scratch register will help. In the latter case,
convert I2DEST to the mode of the source of NEWPAT if we can. */
- m_split = combine_split_insns (newpat, i3);
+ m_split_insn = combine_split_insns (newpat, i3);
/* We can only use I2DEST as a scratch reg if it doesn't overlap any
inputs of NEWPAT. */
possible to try that as a scratch reg. This would require adding
more code to make it work though. */
- if (m_split == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
+ if (m_split_insn == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
{
enum machine_mode new_mode = GET_MODE (SET_DEST (newpat));
gen_rtvec (2, newpat,
gen_rtx_CLOBBER (VOIDmode,
i2dest)));
- m_split = combine_split_insns (parallel, i3);
+ m_split_insn = combine_split_insns (parallel, i3);
/* If that didn't work, try changing the mode of I2DEST if
we can. */
- if (m_split == 0
+ if (m_split_insn == 0
&& new_mode != GET_MODE (i2dest)
&& new_mode != VOIDmode
&& can_change_dest_mode (i2dest, added_sets_2, new_mode))
gen_rtvec (2, newpat,
gen_rtx_CLOBBER (VOIDmode,
ni2dest))));
- m_split = combine_split_insns (parallel, i3);
+ m_split_insn = combine_split_insns (parallel, i3);
- if (m_split == 0
+ if (m_split_insn == 0
&& REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
{
struct undo *buf;
}
}
- i2scratch = m_split != 0;
+ i2scratch = m_split_insn != 0;
}
/* If recog_for_combine has discarded clobbers, try to use them
again for the split. */
- if (m_split == 0 && newpat_vec_with_clobbers)
+ if (m_split_insn == 0 && newpat_vec_with_clobbers)
{
parallel = gen_rtx_PARALLEL (VOIDmode, newpat_vec_with_clobbers);
- m_split = combine_split_insns (parallel, i3);
+ m_split_insn = combine_split_insns (parallel, i3);
}
- if (m_split && NEXT_INSN (m_split) == NULL_RTX)
+ if (m_split_insn && NEXT_INSN (m_split_insn) == NULL_RTX)
{
- m_split = PATTERN (m_split);
- insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
+ rtx m_split_pat = PATTERN (m_split_insn);
+ insn_code_number = recog_for_combine (&m_split_pat, i3, &new_i3_notes);
if (insn_code_number >= 0)
- newpat = m_split;
+ newpat = m_split_pat;
}
- else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
+ else if (m_split_insn && NEXT_INSN (NEXT_INSN (m_split_insn)) == NULL_RTX
&& (next_nonnote_nondebug_insn (i2) == i3
- || ! use_crosses_set_p (PATTERN (m_split), DF_INSN_LUID (i2))))
+ || ! use_crosses_set_p (PATTERN (m_split_insn), DF_INSN_LUID (i2))))
{
rtx i2set, i3set;
- rtx newi3pat = PATTERN (NEXT_INSN (m_split));
- newi2pat = PATTERN (m_split);
+ rtx newi3pat = PATTERN (NEXT_INSN (m_split_insn));
+ newi2pat = PATTERN (m_split_insn);
- i3set = single_set (NEXT_INSN (m_split));
- i2set = single_set (m_split);
+ i3set = single_set (NEXT_INSN (m_split_insn));
+ i2set = single_set (m_split_insn);
i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
MEM_ADDR_SPACE (x)))
{
rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
- rtx seq = combine_split_insns (gen_rtx_SET (VOIDmode, reg,
- XEXP (x, 0)),
- subst_insn);
+ rtx_insn *seq = combine_split_insns (gen_rtx_SET (VOIDmode, reg,
+ XEXP (x, 0)),
+ subst_insn);
/* This should have produced two insns, each of which sets our
placeholder. If the source of the second is a valid address,
label because GCC expects it at the beginning of the block. */
rtx unspec = SET_SRC (XVECEXP (PATTERN (insn), 0, 0));
rtx label_ref = XVECEXP (unspec, 0, 0);
- rtx label = XEXP (label_ref, 0);
- rtx branch;
+ rtx_insn *label = as_a <rtx_insn *> (XEXP (label_ref, 0));
+ rtx_insn *branch;
int offset = 0;
for (branch = NEXT_INSN (label);
!JUMP_P (branch) && !CALL_P (branch);
rtx t0_hi = gen_rtx_SUBREG (HImode, t0, 2);
rtx t1_hi = gen_rtx_SUBREG (HImode, t1, 2);
- rtx insn = emit_insn (gen_lshrsi3 (t0, operands[1], GEN_INT (16)));
+ rtx_insn *insn = emit_insn (gen_lshrsi3 (t0, operands[1], GEN_INT (16)));
emit_insn (gen_lshrsi3 (t1, operands[2], GEN_INT (16)));
emit_insn (gen_umulhisi3 (t2, op1_hi, op2_hi));
emit_insn (gen_mpyh_si (t3, operands[1], operands[2]));
rtx op2_hi = gen_rtx_SUBREG (HImode, operands[2], 2);
rtx t0_hi = gen_rtx_SUBREG (HImode, t0, 2);
- rtx insn = emit_insn (gen_rotlsi3 (t0, operands[2], GEN_INT (16)));
+ rtx_insn *insn = emit_insn (gen_rotlsi3 (t0, operands[2], GEN_INT (16)));
emit_insn (gen_umulhisi3 (t1, op1_hi, op2_hi));
emit_insn (gen_umulhisi3 (t2, op1_hi, t0_hi));
emit_insn (gen_mpyhhu_si (t3, operands[1], t0));
typedef struct
{
- rtx first, current;
+ rtx_insn *first, *current;
regset fixed_regs_live;
bool failure;
} note_add_store_info;
static void
note_add_store (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *data)
{
- rtx insn;
+ rtx_insn *insn;
note_add_store_info *info = (note_add_store_info *) data;
int r, n;
rtx dest, rtx src, rtx srcoff, void *arg)
{
insn_info_t insn_info = (insn_info_t) arg;
- rtx insn = insn_info->insn, new_insn, cur;
+ rtx_insn *insn = insn_info->insn, *new_insn, *cur;
note_add_store_info info;
/* We can reuse all operands without copying, because we are about
end_sequence ();
}
else
- new_insn = gen_move_insn (dest, src);
+ new_insn = as_a <rtx_insn *> (gen_move_insn (dest, src));
info.first = new_insn;
info.fixed_regs_live = insn_info->fixed_regs_live;
info.failure = false;
GET_MODE_BITSIZE (new_mode) <= BITS_PER_WORD;
new_mode = GET_MODE_WIDER_MODE (new_mode))
{
- rtx target, new_reg, shift_seq, insn, new_lhs;
+ rtx target, new_reg, new_lhs;
+ rtx_insn *shift_seq, *insn;
int cost;
/* If a constant was stored into memory, try to simplify it here,
{
enum machine_mode store_mode = GET_MODE (store_info->mem);
enum machine_mode read_mode = GET_MODE (read_info->mem);
- rtx insns, this_insn, read_reg;
+ rtx_insn *insns, *this_insn;
+ rtx read_reg;
basic_block bb;
if (!dbg_cnt (dse))
HOST_WIDE_INT beg_delay_args_size, end_delay_args_size;
/* The first EH insn in the trace, where beg_delay_args_size must be set. */
- rtx eh_head;
+ rtx_insn *eh_head;
/* The following variables contain data used in interpreting frame related
expressions. These are not part of the "real" row state as defined by
data within the trace related to EH insns and args_size. */
static void
-notice_eh_throw (rtx insn)
+notice_eh_throw (rtx_insn *insn)
{
HOST_WIDE_INT args_size;
/* Return the insn before the first NOTE_INSN_CFI after START. */
-static rtx
-before_next_cfi_note (rtx start)
+static rtx_insn *
+before_next_cfi_note (rtx_insn *start)
{
- rtx prev = start;
+ rtx_insn *prev = start;
while (start)
{
if (NOTE_P (start) && NOTE_KIND (start) == NOTE_INSN_CFI)
if (dump_file && add_cfi_insn != ti->head)
{
- rtx note;
+ rtx_insn *note;
fprintf (dump_file, "Fixup between trace %u and %u:\n",
prev_ti->id, ti->id);
static void
reset_all_used_flags (void)
{
- rtx p;
+ rtx_insn *p;
for (p = get_insns (); p; p = NEXT_INSN (p))
if (INSN_P (p))
DEBUG_FUNCTION void
verify_rtl_sharing (void)
{
- rtx p;
+ rtx_insn *p;
timevar_push (TV_VERIFY_RTL_SHARING);
Assumes the mark bits are cleared at entry. */
void
-unshare_all_rtl_in_chain (rtx insn)
+unshare_all_rtl_in_chain (rtx_insn *insn)
{
for (; insn; insn = NEXT_INSN (insn))
if (INSN_P (insn))
rtx
get_first_nonnote_insn (void)
{
- rtx insn = get_insns ();
+ rtx_insn *insn = get_insns ();
if (insn)
{
{
if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0, 0);
+ insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
}
}
rtx
get_last_nonnote_insn (void)
{
- rtx insn = get_last_insn ();
+ rtx_insn *insn = get_last_insn ();
if (insn)
{
continue;
else
{
- if (NONJUMP_INSN_P (insn)
- && GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0,
- XVECLEN (PATTERN (insn), 0) - 1);
+ if (NONJUMP_INSN_P (insn))
+ if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
+ insn = seq->insn (seq->len () - 1);
}
}
of the sequence. */
rtx_insn *
-next_insn (rtx insn)
+next_insn (rtx uncast_insn)
{
+ rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
if (insn)
{
insn = NEXT_INSN (insn);
if (insn && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0, 0);
+ insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
}
- return safe_as_a <rtx_insn *> (insn);
+ return insn;
}
/* Return the previous insn. If it is a SEQUENCE, return the last insn
of the sequence. */
rtx_insn *
-previous_insn (rtx insn)
+previous_insn (rtx uncast_insn)
{
+ rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
if (insn)
{
insn = PREV_INSN (insn);
- if (insn && NONJUMP_INSN_P (insn)
- && GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
+ if (insn && NONJUMP_INSN_P (insn))
+ if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
+ insn = seq->insn (seq->len () - 1);
}
- return safe_as_a <rtx_insn *> (insn);
+ return insn;
}
/* Return the next insn after INSN that is not a NOTE. This routine does not
look inside SEQUENCEs. */
rtx_insn *
-next_nonnote_insn (rtx insn)
+next_nonnote_insn (rtx uncast_insn)
{
+ rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
while (insn)
{
insn = NEXT_INSN (insn);
break;
}
- return safe_as_a <rtx_insn *> (insn);
+ return insn;
}
/* Return the next insn after INSN that is not a NOTE, but stop the
look inside SEQUENCEs. */
rtx_insn *
-next_nonnote_insn_bb (rtx insn)
+next_nonnote_insn_bb (rtx uncast_insn)
{
+ rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = NEXT_INSN (insn);
return NULL;
}
- return safe_as_a <rtx_insn *> (insn);
+ return insn;
}
/* Return the previous insn before INSN that is not a NOTE. This routine does
not look inside SEQUENCEs. */
rtx_insn *
-prev_nonnote_insn (rtx insn)
+prev_nonnote_insn (rtx uncast_insn)
{
+ rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = PREV_INSN (insn);
break;
}
- return safe_as_a <rtx_insn *> (insn);
+ return insn;
}
/* Return the previous insn before INSN that is not a NOTE, but stop
not look inside SEQUENCEs. */
rtx_insn *
-prev_nonnote_insn_bb (rtx insn)
+prev_nonnote_insn_bb (rtx uncast_insn)
{
+ rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = PREV_INSN (insn);
return NULL;
}
- return safe_as_a <rtx_insn *> (insn);
+ return insn;
}
/* Return the next insn after INSN that is not a DEBUG_INSN. This
routine does not look inside SEQUENCEs. */
rtx_insn *
-next_nondebug_insn (rtx insn)
+next_nondebug_insn (rtx uncast_insn)
{
+ rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = NEXT_INSN (insn);
break;
}
- return safe_as_a <rtx_insn *> (insn);
+ return insn;
}
/* Return the previous insn before INSN that is not a DEBUG_INSN.
This routine does not look inside SEQUENCEs. */
rtx_insn *
-prev_nondebug_insn (rtx insn)
+prev_nondebug_insn (rtx uncast_insn)
{
+ rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = PREV_INSN (insn);
break;
}
- return safe_as_a <rtx_insn *> (insn);
+ return insn;
}
/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
This routine does not look inside SEQUENCEs. */
rtx_insn *
-next_nonnote_nondebug_insn (rtx insn)
+next_nonnote_nondebug_insn (rtx uncast_insn)
{
+ rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = NEXT_INSN (insn);
break;
}
- return safe_as_a <rtx_insn *> (insn);
+ return insn;
}
/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
This routine does not look inside SEQUENCEs. */
rtx_insn *
-prev_nonnote_nondebug_insn (rtx insn)
+prev_nonnote_nondebug_insn (rtx uncast_insn)
{
+ rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = PREV_INSN (insn);
break;
}
- return safe_as_a <rtx_insn *> (insn);
+ return insn;
}
/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
SEQUENCEs. */
rtx_insn *
-next_real_insn (rtx insn)
+next_real_insn (rtx uncast_insn)
{
+ rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = NEXT_INSN (insn);
break;
}
- return safe_as_a <rtx_insn *> (insn);
+ return insn;
}
/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
SEQUENCEs. */
rtx_insn *
-prev_real_insn (rtx insn)
+prev_real_insn (rtx uncast_insn)
{
+ rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = PREV_INSN (insn);
break;
}
- return safe_as_a <rtx_insn *> (insn);
+ return insn;
}
/* Return the last CALL_INSN in the current list, or 0 if there is none.
}
rtx_insn *
-next_active_insn (rtx insn)
+next_active_insn (rtx uncast_insn)
{
+ rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = NEXT_INSN (insn);
break;
}
- return safe_as_a <rtx_insn *> (insn);
+ return insn;
}
/* Find the last insn before INSN that really does something. This routine
standalone USE and CLOBBER insn. */
rtx_insn *
-prev_active_insn (rtx insn)
+prev_active_insn (rtx uncast_insn)
{
+ rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = PREV_INSN (insn);
break;
}
- return safe_as_a <rtx_insn *> (insn);
+ return insn;
}
\f
#ifdef HAVE_cc0
Return 0 if we can't find the insn. */
rtx_insn *
-next_cc0_user (rtx insn)
+next_cc0_user (rtx uncast_insn)
{
+ rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
+
rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
if (note)
insn = next_nonnote_insn (insn);
if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0, 0);
+ insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
- return safe_as_a <rtx_insn *> (insn);
+ return insn;
return 0;
}
note, it is the previous insn. */
rtx_insn *
-prev_cc0_setter (rtx insn)
+prev_cc0_setter (rtx uncast_insn)
{
+ rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
+
rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
if (note)
insn = prev_nonnote_insn (insn);
gcc_assert (sets_cc0_p (PATTERN (insn)));
- return safe_as_a <rtx_insn *> (insn);
+ return insn;
}
#endif
returns TRIAL. If the insn to be returned can be split, it will be. */
rtx_insn *
-try_split (rtx pat, rtx trial, int last)
+try_split (rtx pat, rtx uncast_trial, int last)
{
+ rtx_insn *trial = as_a <rtx_insn *> (uncast_trial);
rtx_insn *before = PREV_INSN (trial);
rtx_insn *after = NEXT_INSN (trial);
int has_barrier = 0;
- rtx note, seq, tem;
+ rtx note;
+ rtx_insn *seq, *tem;
int probability;
- rtx insn_last, insn;
+ rtx_insn *insn_last, *insn;
int njumps = 0;
rtx call_insn = NULL_RTX;
/* We're not good at redistributing frame information. */
if (RTX_FRAME_RELATED_P (trial))
- return as_a <rtx_insn *> (trial);
+ return trial;
if (any_condjump_p (trial)
&& (note = find_reg_note (trial, REG_BR_PROB, 0)))
split_branch_probability = XINT (note, 0);
probability = split_branch_probability;
- seq = split_insns (pat, trial);
+ seq = safe_as_a <rtx_insn *> (split_insns (pat, trial));
split_branch_probability = -1;
}
if (!seq)
- return as_a <rtx_insn *> (trial);
+ return trial;
/* Avoid infinite loop if any insn of the result matches
the original pattern. */
{
if (INSN_P (insn_last)
&& rtx_equal_p (PATTERN (insn_last), pat))
- return as_a <rtx_insn *> (trial);
+ return trial;
if (!NEXT_INSN (insn_last))
break;
insn_last = NEXT_INSN (insn_last);
for (insn = insn_last; insn ; insn = PREV_INSN (insn))
if (CALL_P (insn))
{
- rtx next, *p;
+ rtx_insn *next;
+ rtx *p;
gcc_assert (call_insn == NULL_RTX);
call_insn = insn;
To really delete an insn and related DF information, use delete_insn. */
void
-remove_insn (rtx insn)
+remove_insn (rtx uncast_insn)
{
+ rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
rtx_insn *next = NEXT_INSN (insn);
rtx_insn *prev = PREV_INSN (insn);
basic_block bb;
MAKE_RAW indicates how to turn PATTERN into a real insn. */
static rtx_insn *
-emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
+emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
rtx_insn *(*make_raw) (rtx))
{
+ rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
if (pattern == NULL_RTX || !loc)
any DEBUG_INSNs. */
static rtx_insn *
-emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
+emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
rtx_insn *(*make_raw) (rtx))
{
- rtx prev = after;
+ rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
+ rtx_insn *prev = after;
if (skip_debug_insns)
while (DEBUG_INSN_P (prev))
CALL_INSN, etc. */
static rtx_insn *
-emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
+emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
rtx_insn *(*make_raw) (rtx))
{
- rtx first = PREV_INSN (before);
- rtx last = emit_pattern_before_noloc (pattern, before,
- insnp ? before : NULL_RTX,
- NULL, make_raw);
+ rtx_insn *before = as_a <rtx_insn *> (uncast_before);
+ rtx_insn *first = PREV_INSN (before);
+ rtx_insn *last = emit_pattern_before_noloc (pattern, before,
+ insnp ? before : NULL_RTX,
+ NULL, make_raw);
if (pattern == NULL_RTX || !loc)
- return safe_as_a <rtx_insn *> (last);
+ return last;
if (!first)
first = get_insns ();
break;
first = NEXT_INSN (first);
}
- return safe_as_a <rtx_insn *> (last);
+ return last;
}
/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
static rtx_insn *
-emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
+emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
bool insnp, rtx_insn *(*make_raw) (rtx))
{
- rtx next = before;
+ rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
+ rtx_insn *next = before;
if (skip_debug_insns)
while (DEBUG_INSN_P (next))
to look for a note, or the note itself. */
void
-copy_reg_eh_region_note_forward (rtx note_or_insn, rtx first, rtx last)
+copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
{
- rtx insn, note = note_or_insn;
+ rtx_insn *insn;
+ rtx note = note_or_insn;
if (INSN_P (note_or_insn))
{
/* Likewise, but iterate backward. */
void
-copy_reg_eh_region_note_backward (rtx note_or_insn, rtx last, rtx first)
+copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
{
- rtx insn, note = note_or_insn;
+ rtx_insn *insn;
+ rtx note = note_or_insn;
if (INSN_P (note_or_insn))
{
}
int
-fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
+fixup_args_size_notes (rtx prev, rtx uncast_last, int end_args_size)
{
+ rtx_insn *last = safe_as_a <rtx_insn *> (uncast_last);
int args_size = end_args_size;
bool saw_unknown = false;
- rtx insn;
+ rtx_insn *insn;
for (insn = last; insn != prev; insn = PREV_INSN (insn))
{
extern tree debug_find_var_in_block_tree (tree, tree);
/* We always define `record_insns' even if it's not used so that we
can always export `prologue_epilogue_contains'. */
-static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
+static void record_insns (rtx_insn *, rtx, htab_t *) ATTRIBUTE_UNUSED;
static bool contains (const_rtx, htab_t);
static void prepare_function_start (void);
static void do_clobber_return_reg (rtx, void *);
/* Set the location of the insn chain starting at INSN to LOC. */
static void
-set_insn_locations (rtx insn, int loc)
+set_insn_locations (rtx_insn *insn, int loc)
{
- while (insn != NULL_RTX)
+ while (insn != NULL)
{
if (INSN_P (insn))
INSN_LOCATION (insn) = loc;
for the first time. */
static void
-record_insns (rtx insns, rtx end, htab_t *hashp)
+record_insns (rtx_insn *insns, rtx end, htab_t *hashp)
{
- rtx tmp;
+ rtx_insn *tmp;
htab_t hash = *hashp;
if (hash == NULL)
#if defined (HAVE_return) || defined (HAVE_simple_return)
/* Return true if there are any active insns between HEAD and TAIL. */
bool
-active_insn_between (rtx head, rtx tail)
+active_insn_between (rtx head, rtx uncast_tail)
{
+ rtx_insn *tail = safe_as_a <rtx_insn *> (uncast_tail);
while (tail)
{
if (active_insn_p (tail))
bitmap_head bb_flags;
#endif
rtx_insn *returnjump;
- rtx seq ATTRIBUTE_UNUSED;
rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
- rtx prologue_seq ATTRIBUTE_UNUSED, split_prologue_seq ATTRIBUTE_UNUSED;
+ rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED;
edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
edge_iterator ei;
rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
inserted = false;
- seq = NULL_RTX;
epilogue_end = NULL;
returnjump = NULL;
entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
orig_entry_edge = entry_edge;
- split_prologue_seq = NULL_RTX;
+ split_prologue_seq = NULL;
if (flag_split_stack
&& (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
== NULL))
#endif
}
- prologue_seq = NULL_RTX;
+ prologue_seq = NULL;
#ifdef HAVE_prologue
if (HAVE_prologue)
{
start_sequence ();
- seq = gen_prologue ();
+ rtx_insn *seq = safe_as_a <rtx_insn *> (gen_prologue ());
emit_insn (seq);
/* Insert an explicit USE for the frame pointer
{
start_sequence ();
epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
- seq = gen_epilogue ();
+ rtx_insn *seq = as_a <rtx_insn *> (gen_epilogue ());
if (seq)
emit_jump_insn (seq);
start_sequence ();
emit_note (NOTE_INSN_EPILOGUE_BEG);
emit_insn (ep_seq);
- seq = get_insns ();
+ rtx_insn *seq = get_insns ();
end_sequence ();
/* Retain a map of the epilogue insns. Used in life analysis to
static void
insert_insn_end_basic_block (struct expr *expr, basic_block bb)
{
- rtx insn = BB_END (bb);
+ rtx_insn *insn = BB_END (bb);
rtx_insn *new_insn;
rtx reg = expr->reaching_reg;
int regno = REGNO (reg);
if cc0 isn't set. */
rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
if (note)
- insn = XEXP (note, 0);
+ insn = safe_as_a <rtx_insn *> (XEXP (note, 0));
else
{
rtx_insn *maybe_cc0_setter = prev_nonnote_insn (insn);
initiate_bb_reg_pressure_info (basic_block bb)
{
unsigned int i ATTRIBUTE_UNUSED;
- rtx insn;
+ rtx_insn *insn;
if (current_nr_blocks > 1)
FOR_BB_INSNS (bb, insn)
this_priority = insn_cost (insn);
else
{
- rtx prev_first, twin;
+ rtx_insn *prev_first, *twin;
basic_block rec;
/* For recovery check instructions we calculate priority slightly
meaning in sched-int.h::_haifa_insn_data) for all current BB insns
after insn AFTER. */
static void
-setup_insn_max_reg_pressure (rtx after, bool update_p)
+setup_insn_max_reg_pressure (rtx_insn *after, bool update_p)
{
int i, p;
bool eq_p;
insns starting after insn AFTER. Set up also max register pressure
for all insns of the basic block. */
void
-sched_setup_bb_reg_pressure_info (basic_block bb, rtx after)
+sched_setup_bb_reg_pressure_info (basic_block bb, rtx_insn *after)
{
gcc_assert (sched_pressure == SCHED_PRESSURE_WEIGHTED);
initiate_bb_reg_pressure_info (bb);
/* Return nonzero if there are no real insns in the range [ HEAD, TAIL ]. */
int
-no_real_insns_p (const_rtx head, const_rtx tail)
+no_real_insns_p (const rtx_insn *head, const rtx_insn *tail)
{
while (head != NEXT_INSN (tail))
{
/* Head/tail info for this block. */
rtx_insn *prev_head = current_sched_info->prev_head;
- rtx next_tail = current_sched_info->next_tail;
+ rtx_insn *next_tail = current_sched_info->next_tail;
rtx_insn *head = NEXT_INSN (prev_head);
rtx_insn *tail = PREV_INSN (next_tail);
static bool cheap_bb_rtx_cost_p (const_basic_block, int, int);
static rtx_insn *first_active_insn (basic_block);
static rtx_insn *last_active_insn (basic_block, int);
-static rtx find_active_insn_before (basic_block, rtx);
-static rtx find_active_insn_after (basic_block, rtx);
+static rtx_insn *find_active_insn_before (basic_block, rtx_insn *);
+static rtx_insn *find_active_insn_after (basic_block, rtx_insn *);
static basic_block block_fallthru (basic_block);
-static int cond_exec_process_insns (ce_if_block *, rtx, rtx, rtx, int, int);
+static int cond_exec_process_insns (ce_if_block *, rtx_insn *, rtx, rtx, int,
+ int);
static rtx cond_exec_get_condition (rtx);
static rtx noce_get_condition (rtx_insn *, rtx_insn **, bool);
static int noce_operand_ok (const_rtx);
/* Return the active insn before INSN inside basic block CURR_BB. */
-static rtx
-find_active_insn_before (basic_block curr_bb, rtx insn)
+static rtx_insn *
+find_active_insn_before (basic_block curr_bb, rtx_insn *insn)
{
if (!insn || insn == BB_HEAD (curr_bb))
- return NULL_RTX;
+ return NULL;
while ((insn = PREV_INSN (insn)) != NULL_RTX)
{
/* No other active insn all the way to the start of the basic block. */
if (insn == BB_HEAD (curr_bb))
- return NULL_RTX;
+ return NULL;
}
return insn;
/* Return the active insn after INSN inside basic block CURR_BB. */
-static rtx
-find_active_insn_after (basic_block curr_bb, rtx insn)
+static rtx_insn *
+find_active_insn_after (basic_block curr_bb, rtx_insn *insn)
{
if (!insn || insn == BB_END (curr_bb))
- return NULL_RTX;
+ return NULL;
while ((insn = NEXT_INSN (insn)) != NULL_RTX)
{
/* No other active insn all the way to the end of the basic block. */
if (insn == BB_END (curr_bb))
- return NULL_RTX;
+ return NULL;
}
return insn;
static int
cond_exec_process_insns (ce_if_block *ce_info ATTRIBUTE_UNUSED,
- /* if block information */rtx start,
+ /* if block information */rtx_insn *start,
/* first insn to look at */rtx end,
/* last insn to look at */rtx test,
/* conditional execution test */int prob_val,
/* probability of branch taken. */int mod_ok)
{
int must_be_last = FALSE;
- rtx insn;
+ rtx_insn *insn;
rtx xtest;
rtx pattern;
basic_block then_bb = ce_info->then_bb; /* THEN */
basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
- rtx then_start; /* first insn in THEN block */
- rtx then_end; /* last insn + 1 in THEN block */
- rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
- rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
+ rtx_insn *then_start; /* first insn in THEN block */
+ rtx_insn *then_end; /* last insn + 1 in THEN block */
+ rtx_insn *else_start = NULL; /* first insn in ELSE block or NULL */
+ rtx_insn *else_end = NULL; /* last insn + 1 in ELSE block */
int max; /* max # of insns to convert. */
int then_mod_ok; /* whether conditional mods are ok in THEN */
rtx true_expr; /* test for else block insns */
&then_first_tail, &else_first_tail,
NULL);
if (then_first_tail == BB_HEAD (then_bb))
- then_start = then_end = NULL_RTX;
+ then_start = then_end = NULL;
if (else_first_tail == BB_HEAD (else_bb))
- else_start = else_end = NULL_RTX;
+ else_start = else_end = NULL;
if (n_matching > 0)
{
if (n_matching > 0)
{
- rtx insn;
+ rtx_insn *insn;
/* We won't pass the insns in the head sequence to
cond_exec_process_insns, so we need to test them here
}
if (then_last_head == then_end)
- then_start = then_end = NULL_RTX;
+ then_start = then_end = NULL;
if (else_last_head == else_end)
- else_start = else_end = NULL_RTX;
+ else_start = else_end = NULL;
if (n_matching > 0)
{
do
{
- rtx start, end;
+ rtx_insn *start, *end;
rtx t, f;
enum rtx_code f_code;
that the remaining one is executed first for both branches. */
if (then_first_tail)
{
- rtx from = then_first_tail;
+ rtx_insn *from = then_first_tail;
if (!INSN_P (from))
from = find_active_insn_after (then_bb, from);
delete_insn_chain (from, BB_END (then_bb), false);
basic_block then_bb = if_info->then_bb; /* THEN */
basic_block else_bb = if_info->else_bb; /* ELSE or NULL */
basic_block join_bb = if_info->join_bb; /* JOIN */
- rtx jump = if_info->jump;
+ rtx_insn *jump = if_info->jump;
rtx cond = if_info->cond;
rtx_insn *insn_a, *insn_b;
rtx set_a, set_b;
if (EDGE_COUNT (then_bb->succs) == 0
&& EDGE_COUNT (combo_bb->succs) > 1)
{
- rtx end = NEXT_INSN (BB_END (then_bb));
+ rtx_insn *end = NEXT_INSN (BB_END (then_bb));
while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
end = NEXT_INSN (end);
if (EDGE_COUNT (else_bb->succs) == 0
&& EDGE_COUNT (combo_bb->succs) > 1)
{
- rtx end = NEXT_INSN (BB_END (else_bb));
+ rtx_insn *end = NEXT_INSN (BB_END (else_bb));
while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
end = NEXT_INSN (end);
{
if (single_pred_p (else_bb) && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
{
- rtx last_insn = BB_END (then_bb);
+ rtx_insn *last_insn = BB_END (then_bb);
while (last_insn
&& NOTE_P (last_insn)
subsequent cfg_cleanup pass to delete unreachable code if needed. */
rtx_insn *
-delete_related_insns (rtx insn)
+delete_related_insns (rtx uncast_insn)
{
+ rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
int was_code_label = (LABEL_P (insn));
rtx note;
rtx_insn *next = NEXT_INSN (insn), *prev = PREV_INSN (insn);
&& GET_CODE (PATTERN (insn)) == SEQUENCE
&& CALL_P (XVECEXP (PATTERN (insn), 0, 0))))
{
- rtx p;
+ rtx_insn *p;
for (p = next && INSN_DELETED_P (next) ? NEXT_INSN (next) : next;
p && NOTE_P (p);
" Rejecting inheritance %d->%d "
"as it results in 2 or more insns:\n",
original_regno, REGNO (new_reg));
- dump_rtl_slim (lra_dump_file, new_insns, NULL_RTX, -1, 0);
+ dump_rtl_slim (lra_dump_file, new_insns, NULL, -1, 0);
fprintf (lra_dump_file,
" >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
}
(lra_dump_file,
" Rejecting split %d->%d resulting in > 2 %s save insns:\n",
original_regno, REGNO (new_reg), call_save_p ? "call" : "");
- dump_rtl_slim (lra_dump_file, save, NULL_RTX, -1, 0);
+ dump_rtl_slim (lra_dump_file, save, NULL, -1, 0);
fprintf (lra_dump_file,
" ))))))))))))))))))))))))))))))))))))))))))))))))\n");
}
" Rejecting split %d->%d "
"resulting in > 2 %s restore insns:\n",
original_regno, REGNO (new_reg), call_save_p ? "call" : "");
- dump_rtl_slim (lra_dump_file, restore, NULL_RTX, -1, 0);
+ dump_rtl_slim (lra_dump_file, restore, NULL, -1, 0);
fprintf (lra_dump_file,
" ))))))))))))))))))))))))))))))))))))))))))))))))\n");
}
if (before != NULL_RTX)
{
fprintf (lra_dump_file," %s before:\n", title);
- dump_rtl_slim (lra_dump_file, before, NULL_RTX, -1, 0);
+ dump_rtl_slim (lra_dump_file, before, NULL, -1, 0);
}
if (after != NULL_RTX)
{
fprintf (lra_dump_file, " %s after:\n", title);
- dump_rtl_slim (lra_dump_file, after, NULL_RTX, -1, 0);
+ dump_rtl_slim (lra_dump_file, after, NULL, -1, 0);
}
fprintf (lra_dump_file, "\n");
}
static int sms_order_nodes (ddg_ptr, int, int *, int *);
static void set_node_sched_params (ddg_ptr);
static partial_schedule_ptr sms_schedule_by_order (ddg_ptr, int, int, int *);
-static void permute_partial_schedule (partial_schedule_ptr, rtx);
+static void permute_partial_schedule (partial_schedule_ptr, rtx_insn *);
static void generate_prolog_epilog (partial_schedule_ptr, struct loop *,
rtx, rtx);
static int calculate_stage_count (partial_schedule_ptr, int);
row ii-1, and position them right before LAST. This schedules
the insns of the loop kernel. */
static void
-permute_partial_schedule (partial_schedule_ptr ps, rtx last)
+permute_partial_schedule (partial_schedule_ptr ps, rtx_insn *last)
{
int ii = ps->ii;
int row;
try again, ensuring that TARGET is not one of the operands. */
static int
-add_equal_note (rtx insns, rtx target, enum rtx_code code, rtx op0, rtx op1)
+add_equal_note (rtx_insn *insns, rtx target, enum rtx_code code, rtx op0, rtx op1)
{
- rtx last_insn, set;
+ rtx_insn *last_insn;
+ rtx set;
rtx note;
gcc_assert (insns && INSN_P (insns) && NEXT_INSN (insns));
/* If PAT is composed of more than one insn, try to add an appropriate
REG_EQUAL note to it. If we can't because TEMP conflicts with an
operand, call expand_binop again, this time without a target. */
- if (INSN_P (pat) && NEXT_INSN (pat) != NULL_RTX
- && ! add_equal_note (pat, ops[0].value, optab_to_code (binoptab),
+ if (INSN_P (pat) && NEXT_INSN (as_a <rtx_insn *> (pat)) != NULL_RTX
+ && ! add_equal_note (as_a <rtx_insn *> (pat), ops[0].value,
+ optab_to_code (binoptab),
ops[1].value, ops[2].value))
{
delete_insns_since (last);
pat = maybe_gen_insn (icode, 2, ops);
if (pat)
{
- if (INSN_P (pat) && NEXT_INSN (pat) != NULL_RTX
- && ! add_equal_note (pat, ops[0].value, optab_to_code (unoptab),
+ if (INSN_P (pat) && NEXT_INSN (as_a <rtx_insn *> (pat)) != NULL_RTX
+ && ! add_equal_note (as_a <rtx_insn *> (pat), ops[0].value,
+ optab_to_code (unoptab),
ops[1].value, NULL_RTX))
{
delete_insns_since (last);
if (!pat)
return false;
- if (INSN_P (pat) && NEXT_INSN (pat) != NULL_RTX && code != UNKNOWN)
- add_equal_note (pat, ops[0].value, code, ops[1].value, NULL_RTX);
+ if (INSN_P (pat) && NEXT_INSN (as_a <rtx_insn *> (pat)) != NULL_RTX
+ && code != UNKNOWN)
+ add_equal_note (as_a <rtx_insn *> (pat), ops[0].value, code, ops[1].value,
+ NULL_RTX);
emit_insn (pat);
peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
{
int i;
- rtx last, eh_note, as_note, before_try, x;
+ rtx_insn *last, *before_try, *x;
+ rtx eh_note, as_note;
rtx old_insn, new_insn;
bool was_call = false;
static void delete_scheduled_jump (rtx);
static void note_delay_statistics (int, int);
#if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
-static rtx_insn_list *optimize_skip (rtx);
+static rtx_insn_list *optimize_skip (rtx_insn *);
#endif
static int get_jump_flags (rtx, rtx);
static int mostly_true_jump (rtx);
of delay slots required. */
static rtx_insn_list *
-optimize_skip (rtx insn)
+optimize_skip (rtx_insn *insn)
{
rtx_insn *trial = next_nonnote_insn (insn);
- rtx next_trial = next_active_insn (trial);
+ rtx_insn *next_trial = next_active_insn (trial);
rtx_insn_list *delay_list = 0;
int flags;
\f
static void update_live_status (rtx, const_rtx, void *);
static int find_basic_block (rtx, int);
-static rtx next_insn_no_annul (rtx);
-static rtx find_dead_or_set_registers (rtx, struct resources*,
- rtx*, int, struct resources,
- struct resources);
+static rtx_insn *next_insn_no_annul (rtx_insn *);
+static rtx_insn *find_dead_or_set_registers (rtx_insn *, struct resources*,
+ rtx_insn **, int, struct resources,
+ struct resources);
\f
/* Utility function called from mark_target_live_regs via note_stores.
It deadens any CLOBBERed registers and livens any SET registers. */
/* Similar to next_insn, but ignores insns in the delay slots of
an annulled branch. */
-static rtx
-next_insn_no_annul (rtx insn)
+static rtx_insn *
+next_insn_no_annul (rtx_insn *insn)
{
if (insn)
{
insn = NEXT_INSN (insn);
if (insn && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0, 0);
+ insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
}
return insn;
However, we may have moved some of the parameter loading insns
into the delay slot of this CALL. If so, the USE's for them
don't count and should be skipped. */
- rtx_insn *insn = PREV_INSN (x);
+ rtx_insn *insn = PREV_INSN (as_a <rtx_insn *> (x));
rtx_sequence *sequence = 0;
int seq_size = 0;
int i;
Stop after passing a few conditional jumps, and/or a small
number of unconditional branches. */
-static rtx
-find_dead_or_set_registers (rtx target, struct resources *res,
- rtx *jump_target, int jump_count,
+static rtx_insn *
+find_dead_or_set_registers (rtx_insn *target, struct resources *res,
+ rtx_insn **jump_target, int jump_count,
struct resources set, struct resources needed)
{
HARD_REG_SET scratch;
- rtx insn, next;
- rtx jump_insn = 0;
+ rtx_insn *insn, *next;
+ rtx_insn *jump_insn = 0;
int i;
for (insn = target; insn; insn = next)
{
- rtx this_jump_insn = insn;
+ rtx_insn *this_jump_insn = insn;
next = NEXT_INSN (insn);
of a call, so search for a JUMP_INSN in any position. */
for (i = 0; i < seq->len (); i++)
{
- this_jump_insn = seq->element (i);
+ this_jump_insn = seq->insn (i);
if (JUMP_P (this_jump_insn))
break;
}
if (any_uncondjump_p (this_jump_insn)
|| ANY_RETURN_P (PATTERN (this_jump_insn)))
{
- next = JUMP_LABEL (this_jump_insn);
+ next = JUMP_LABEL_AS_INSN (this_jump_insn);
if (ANY_RETURN_P (next))
- next = NULL_RTX;
+ next = NULL;
if (jump_insn == 0)
{
jump_insn = insn;
if (jump_target)
- *jump_target = JUMP_LABEL (this_jump_insn);
+ *jump_target = JUMP_LABEL_AS_INSN (this_jump_insn);
}
}
else if (any_condjump_p (this_jump_insn))
AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
if (!ANY_RETURN_P (JUMP_LABEL (this_jump_insn)))
- find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
+ find_dead_or_set_registers (JUMP_LABEL_AS_INSN (this_jump_insn),
&target_res, 0, jump_count,
target_set, needed);
find_dead_or_set_registers (next,
init_resource_info () was invoked before we are called. */
void
-mark_target_live_regs (rtx insns, rtx target, struct resources *res)
+mark_target_live_regs (rtx_insn *insns, rtx_insn *target, struct resources *res)
{
int b = -1;
unsigned int i;
struct target_info *tinfo = NULL;
- rtx insn;
+ rtx_insn *insn;
rtx jump_insn = 0;
- rtx jump_target;
+ rtx_insn *jump_target;
HARD_REG_SET scratch;
struct resources set, needed;
if (b != -1)
{
regset regs_live = DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun, b));
- rtx start_insn, stop_insn;
+ rtx_insn *start_insn, *stop_insn;
/* Compute hard regs live at start of block. */
REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
if (NONJUMP_INSN_P (start_insn)
&& GET_CODE (PATTERN (start_insn)) == SEQUENCE)
- start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
+ start_insn = as_a <rtx_sequence *> (PATTERN (start_insn))->insn (0);
if (NONJUMP_INSN_P (stop_insn)
&& GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
if (jump_insn)
{
struct resources new_resources;
- rtx stop_insn = next_active_insn (jump_insn);
+ rtx_insn *stop_insn = next_active_insn (jump_insn);
if (!ANY_RETURN_P (jump_target))
jump_target = next_active_insn (jump_target);
MARK_SRC_DEST_CALL = 1
};
-extern void mark_target_live_regs (rtx, rtx, struct resources *);
+extern void mark_target_live_regs (rtx_insn *, rtx_insn *, struct resources *);
extern void mark_set_resources (rtx, struct resources *, int,
enum mark_resource_type);
extern void mark_referenced_resources (rtx, struct resources *, bool);
extern bool insn_could_throw_p (const_rtx);
extern bool insn_nothrow_p (const_rtx);
extern bool can_nonlocal_goto (const_rtx);
-extern void copy_reg_eh_region_note_forward (rtx, rtx, rtx);
-extern void copy_reg_eh_region_note_backward (rtx, rtx, rtx);
+extern void copy_reg_eh_region_note_forward (rtx, rtx_insn *, rtx);
+extern void copy_reg_eh_region_note_backward (rtx, rtx_insn *, rtx);
extern int inequality_comparisons_p (const_rtx);
extern rtx replace_rtx (rtx, rtx, rtx);
extern void replace_label (rtx *, rtx, rtx, bool);
extern void set_new_first_and_last_insn (rtx_insn *, rtx_insn *);
extern unsigned int unshare_all_rtl (void);
extern void unshare_all_rtl_again (rtx_insn *);
-extern void unshare_all_rtl_in_chain (rtx);
+extern void unshare_all_rtl_in_chain (rtx_insn *);
extern void verify_rtl_sharing (void);
extern void add_insn (rtx_insn *);
extern void add_insn_before (rtx, rtx, basic_block);
by the scheduler anymore but for all "slim" RTL dumping. */
extern void dump_value_slim (FILE *, const_rtx, int);
extern void dump_insn_slim (FILE *, const_rtx);
-extern void dump_rtl_slim (FILE *, const_rtx, const_rtx, int, int);
+extern void dump_rtl_slim (FILE *, const rtx_insn *, const rtx_insn *,
+ int, int);
extern void print_value (pretty_printer *, const_rtx, int);
extern void print_pattern (pretty_printer *, const_rtx, int);
extern void print_insn (pretty_printer *, const_rtx, int);
/* Delete (RESOLVED_P) dependencies between HEAD and TAIL together with
deps_lists. */
void
-sched_free_deps (rtx head, rtx tail, bool resolved_p)
+sched_free_deps (rtx_insn *head, rtx_insn *tail, bool resolved_p)
{
- rtx insn;
- rtx next_tail = NEXT_INSN (tail);
+ rtx_insn *insn;
+ rtx_insn *next_tail = NEXT_INSN (tail);
/* We make two passes since some insns may be scheduled before their
dependencies are resolved. */
init_ready_list (void)
{
int n = 0;
- rtx prev_head = current_sched_info->prev_head;
- rtx next_tail = current_sched_info->next_tail;
+ rtx_insn *prev_head = current_sched_info->prev_head;
+ rtx_insn *next_tail = current_sched_info->next_tail;
rtx_insn *insn;
sched_rgn_n_insns = 0;
else
{
/* Create an empty unreachable block after the INSN. */
- rtx next = NEXT_INSN (insn);
+ rtx_insn *next = NEXT_INSN (insn);
if (next && BARRIER_P (next))
next = NEXT_INSN (next);
bb = create_basic_block (next, NULL_RTX, last_bb);
extern enum reg_note ds_to_dt (ds_t);
extern bool deps_pools_are_empty_p (void);
-extern void sched_free_deps (rtx, rtx, bool);
+extern void sched_free_deps (rtx_insn *, rtx_insn *, bool);
extern void extend_dependency_caches (int, bool);
extern void debug_ds (ds_t);
extern int haifa_classify_insn (const_rtx);
extern void get_ebb_head_tail (basic_block, basic_block,
rtx_insn **, rtx_insn **);
-extern int no_real_insns_p (const_rtx, const_rtx);
+extern int no_real_insns_p (const rtx_insn *, const rtx_insn *);
extern int insn_cost (rtx_insn *);
extern int dep_cost_1 (dep_t, dw_t);
extern int dep_cost (dep_t);
extern int set_priorities (rtx_insn *, rtx_insn *);
-extern void sched_setup_bb_reg_pressure_info (basic_block, rtx);
+extern void sched_setup_bb_reg_pressure_info (basic_block, rtx_insn *);
extern bool schedule_block (basic_block *, state_t);
extern int cycle_issued_insns;
static void
init_ready_list (void)
{
- rtx prev_head = current_sched_info->prev_head;
- rtx next_tail = current_sched_info->next_tail;
+ rtx_insn *prev_head = current_sched_info->prev_head;
+ rtx_insn *next_tail = current_sched_info->next_tail;
int bb_src;
rtx_insn *insn;
If COUNT < 0 it will stop only at LAST or NULL rtx. */
void
-dump_rtl_slim (FILE *f, const_rtx first, const_rtx last,
+dump_rtl_slim (FILE *f, const rtx_insn *first, const rtx_insn *last,
int count, int flags ATTRIBUTE_UNUSED)
{
- const_rtx insn, tail;
+ const rtx_insn *insn, *tail;
pretty_printer rtl_slim_pp;
rtl_slim_pp.buffer->stream = f;
- tail = last ? NEXT_INSN (last) : NULL_RTX;
+ tail = last ? NEXT_INSN (last) : NULL;
for (insn = first;
(insn != NULL) && (insn != tail) && (count != 0);
insn = NEXT_INSN (insn))
void
rtl_dump_bb_for_graph (pretty_printer *pp, basic_block bb)
{
- rtx insn;
+ rtx_insn *insn;
bool first = true;
/* TODO: inter-bb stuff. */
}
/* Same as above, but using dump_rtl_slim. */
-extern void debug_rtl_slim (FILE *, const_rtx, const_rtx, int, int);
+extern void debug_rtl_slim (FILE *, const rtx_insn *, const rtx_insn *,
+ int, int);
DEBUG_FUNCTION void
-debug_rtl_slim (const_rtx first, const_rtx last, int count, int flags)
+debug_rtl_slim (const rtx_insn *first, const rtx_insn *last, int count,
+ int flags)
{
dump_rtl_slim (stderr, first, last, count, flags);
}
void
try_shrink_wrapping (edge *entry_edge, edge orig_entry_edge,
- bitmap_head *bb_flags, rtx prologue_seq)
+ bitmap_head *bb_flags, rtx_insn *prologue_seq)
{
edge e;
edge_iterator ei;
bool nonempty_prologue = false;
unsigned max_grow_size;
- rtx seq;
+ rtx_insn *seq;
for (seq = prologue_seq; seq; seq = NEXT_INSN (seq))
if (!NOTE_P (seq) || NOTE_KIND (seq) != NOTE_INSN_PROLOGUE_END)
{
HARD_REG_SET prologue_clobbered, prologue_used, live_on_edge;
struct hard_reg_set_container set_up_by_prologue;
- rtx p_insn;
+ rtx_insn *p_insn;
vec<basic_block> vec;
basic_block bb;
bitmap_head bb_antic_flags;
void
convert_to_simple_return (edge entry_edge, edge orig_entry_edge,
- bitmap_head bb_flags, rtx returnjump,
+ bitmap_head bb_flags, rtx_insn *returnjump,
vec<edge> unconverted_simple_returns)
{
edge e;
rtx_insn *before,
bitmap_head *need_prologue);
extern void try_shrink_wrapping (edge *entry_edge, edge orig_entry_edge,
- bitmap_head *bb_flags, rtx prologue_seq);
+ bitmap_head *bb_flags, rtx_insn *prologue_seq);
extern edge get_unconverted_simple_return (edge, bitmap_head,
vec<edge> *, rtx_insn **);
extern void convert_to_simple_return (edge entry_edge, edge orig_entry_edge,
- bitmap_head bb_flags, rtx returnjump,
+ bitmap_head bb_flags,
+ rtx_insn *returnjump,
vec<edge> unconverted_simple_returns);
#endif
of THIS_BASIC_BLOCK. */
void
-propagate_for_debug (rtx_insn *insn, rtx last, rtx dest, rtx src,
+propagate_for_debug (rtx_insn *insn, rtx_insn *last, rtx dest, rtx src,
basic_block this_basic_block)
{
rtx_insn *next, *end = NEXT_INSN (BB_END (this_basic_block));
unsigned int uregno, rtx insn,
enum debug_temp_where);
-extern void propagate_for_debug (rtx_insn *, rtx, rtx, rtx, basic_block);
+extern void propagate_for_debug (rtx_insn *, rtx_insn *, rtx, rtx, basic_block);
#endif /* GCC_VALTRACK_H */