+2016-09-22 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
+
+ * emit-rtl.c (next_active_insn): Change argument type to
+ rtx_insn *.
+ (prev_active_insn): Likewise.
+ (active_insn_p): Likewise.
+ * rtl.h: Adjust prototypes.
+ * cfgcleanup.c (merge_blocks_move_successor_nojumps): Adjust.
+ * config/arc/arc.md: Likewise.
+ * config/pa/pa.c (branch_to_delay_slot_p): Likewise.
+ (branch_needs_nop_p): Likewise.
+ (use_skip_p): Likewise.
+ * config/sh/sh.c (gen_block_redirect): Likewise.
+ (split_branches): Likewise.
+ * reorg.c (optimize_skip): Likewise.
+ (fill_simple_delay_slots): Likewise.
+ (fill_slots_from_thread): Likewise.
+ (relax_delay_slots): Likewise.
+ * resource.c (mark_target_live_regs): Likewise.
+
2016-09-22 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
* config/cris/cris.c (cris_asm_output_case_end): Change argument
/* If there is a jump table following block B temporarily add the jump table
to block B so that it will also be moved to the correct location. */
if (tablejump_p (BB_END (b), &label, &table)
- && prev_active_insn (label) == BB_END (b))
+ && prev_active_insn (as_a<rtx_insn *> (label)) == BB_END (b))
{
BB_END (b) = table;
}
scan = as_a <rtx_insn *> (XEXP (SET_SRC (PATTERN (scan)), 0));
continue;
}
- if (JUMP_LABEL (scan)
- /* JUMP_LABEL might be simple_return instead if an insn. */
- && (!INSN_P (JUMP_LABEL (scan))
- || (!next_active_insn (JUMP_LABEL (scan))
- || (recog_memoized (next_active_insn (JUMP_LABEL (scan)))
- != CODE_FOR_doloop_begin_i)))
- && (!next_active_insn (NEXT_INSN (PREV_INSN (scan)))
- || (recog_memoized
- (next_active_insn (NEXT_INSN (PREV_INSN (scan))))
- != CODE_FOR_doloop_begin_i)))
+
+ rtx lab = JUMP_LABEL (scan);
+ if (!lab)
+ break;
+
+ rtx_insn *next_scan
+ = next_active_insn (NEXT_INSN (PREV_INSN (scan)));
+ if (next_scan
+ && recog_memoized (next_scan) != CODE_FOR_doloop_begin_i)
+ break;
+
+ /* JUMP_LABEL might be simple_return instead if an insn. */
+ if (!INSN_P (lab))
+ {
+ n_insns++;
+ break;
+ }
+
+ rtx_insn *next_lab = next_active_insn (as_a<rtx_insn *> (lab));
+ if (next_lab
+ && recog_memoized (next_lab) != CODE_FOR_doloop_begin_i)
+ break;
+
n_insns++;
}
break;
if (dbr_sequence_length ())
return FALSE;
- jump_insn = next_active_insn (JUMP_LABEL (insn));
+ jump_insn = next_active_insn (JUMP_LABEL_AS_INSN (insn));
while (insn)
{
insn = next_active_insn (insn);
if (dbr_sequence_length ())
return FALSE;
- jump_insn = next_active_insn (JUMP_LABEL (insn));
+ jump_insn = next_active_insn (JUMP_LABEL_AS_INSN (insn));
while (insn)
{
insn = next_active_insn (insn);
static bool
use_skip_p (rtx_insn *insn)
{
- rtx_insn *jump_insn = next_active_insn (JUMP_LABEL (insn));
+ rtx_insn *jump_insn = next_active_insn (JUMP_LABEL_AS_INSN (insn));
while (insn)
{
else if (optimize && need_block >= 0)
{
- rtx_insn *next = next_active_insn (next_active_insn (dest));
+ rtx_insn *next = next_active_insn (as_a<rtx_insn *> (dest));
+ next = next_active_insn (next);
if (next && JUMP_P (next)
&& GET_CODE (PATTERN (next)) == SET
&& recog_memoized (next) == CODE_FOR_jump_compact)
/* We can't use JUMP_LABEL here because it might be undefined
when not optimizing. */
/* A syntax error might cause beyond to be NULL_RTX. */
- beyond
- = next_active_insn (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1),
- 0));
+ rtx temp = XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0);
+ beyond = next_active_insn (as_a<rtx_insn *> (temp));
if (beyond
&& (JUMP_P (beyond)
standalone USE and CLOBBER insn. */
int
-active_insn_p (const_rtx insn)
+active_insn_p (const rtx_insn *insn)
{
return (CALL_P (insn) || JUMP_P (insn)
|| JUMP_TABLE_DATA_P (insn) /* FIXME */
}
rtx_insn *
-next_active_insn (rtx uncast_insn)
+next_active_insn (rtx_insn *insn)
{
- rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
-
while (insn)
{
insn = NEXT_INSN (insn);
standalone USE and CLOBBER insn. */
rtx_insn *
-prev_active_insn (rtx uncast_insn)
+prev_active_insn (rtx_insn *insn)
{
- rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
-
while (insn)
{
insn = PREV_INSN (insn);
we have one insn followed by a branch to the same label we branch to.
In both of these cases, inverting the jump and annulling the delay
slot give the same effect in fewer insns. */
- if (next_trial == next_active_insn (JUMP_LABEL (insn))
+ if (next_trial == next_active_insn (JUMP_LABEL_AS_INSN (insn))
|| (next_trial != 0
&& simplejump_or_return_p (next_trial)
&& JUMP_LABEL (insn) == JUMP_LABEL (next_trial)))
&& trial
&& jump_to_label_p (trial)
&& simplejump_p (trial)
- && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
+ && (next_trial = next_active_insn (JUMP_LABEL_AS_INSN (trial))) != 0
&& ! (NONJUMP_INSN_P (next_trial)
&& GET_CODE (PATTERN (next_trial)) == SEQUENCE)
&& !JUMP_P (next_trial)
&& simplejump_p (jump_insn)
&& slots_filled != slots_to_fill)
fill_slots_from_thread (jump_insn, const_true_rtx,
- next_active_insn (JUMP_LABEL (insn)), NULL, 1,
- 1, own_thread_p (JUMP_LABEL (insn),
+ next_active_insn (JUMP_LABEL_AS_INSN (insn)),
+ NULL, 1, 1, own_thread_p (JUMP_LABEL (insn),
JUMP_LABEL (insn), 0),
slots_to_fill, &slots_filled, &delay_list);
to call update_block and delete_insn. */
fix_reg_dead_note (prior_insn, insn);
update_reg_unused_notes (prior_insn, new_thread);
- new_thread = next_active_insn (new_thread);
+ new_thread
+ = next_active_insn (as_a<rtx_insn *> (new_thread));
}
break;
}
}
static rtx_insn *
-label_before_next_insn (rtx x, rtx scan_limit)
+label_before_next_insn (rtx_insn *x, rtx scan_limit)
{
rtx_insn *insn = next_active_insn (x);
while (insn)
if (ANY_RETURN_P (target_label))
target_label = find_end_label (target_label);
- if (target_label && next_active_insn (target_label) == next
+ if (target_label
+ && next_active_insn (as_a<rtx_insn *> (target_label)) == next
&& ! condjump_in_parallel_p (jump_insn)
&& ! (next && switch_text_sections_between_p (jump_insn, next)))
{
if (next && simplejump_or_return_p (next)
&& any_condjump_p (jump_insn)
&& target_label
- && next_active_insn (target_label) == next_active_insn (next)
+ && (next_active_insn (as_a<rtx_insn *> (target_label))
+ == next_active_insn (next))
&& no_labels_between_p (jump_insn, next)
&& targetm.can_follow_jump (jump_insn, next))
{
{
/* Figure out where to emit the special USE insn so we don't
later incorrectly compute register live/death info. */
- rtx_insn *tmp = next_active_insn (trial);
+ rtx_insn *tmp = next_active_insn (as_a<rtx_insn *> (trial));
if (tmp == 0)
tmp = find_end_label (simple_return_rtx);
/* See if we have a simple (conditional) jump that is useless. */
if (! INSN_ANNULLED_BRANCH_P (delay_jump_insn)
&& ! condjump_in_parallel_p (delay_jump_insn)
- && prev_active_insn (target_label) == insn
+ && prev_active_insn (as_a<rtx_insn *> (target_label)) == insn
&& ! BARRIER_P (prev_nonnote_insn (as_a<rtx_insn *> (target_label)))
/* If the last insn in the delay slot sets CC0 for some insn,
various code assumes that it is in a delay slot. We could
if (! INSN_ANNULLED_BRANCH_P (delay_jump_insn)
&& any_condjump_p (delay_jump_insn)
&& next && simplejump_or_return_p (next)
- && next_active_insn (target_label) == next_active_insn (next)
+ && (next_active_insn (as_a<rtx_insn *> (target_label))
+ == next_active_insn (next))
&& no_labels_between_p (insn, next))
{
rtx label = JUMP_LABEL (next);
try_merge_delay_insns (insn, next);
else if (! INSN_FROM_TARGET_P (pat->insn (1))
&& own_thread_p (target_label, target_label, 0))
- try_merge_delay_insns (insn, next_active_insn (target_label));
+ try_merge_delay_insns (insn,
+ next_active_insn (as_a<rtx_insn *> (target_label)));
/* If we get here, we haven't deleted INSN. But we may have deleted
NEXT, so recompute it. */
rtx_insn *stop_insn = next_active_insn (jump_insn);
if (!ANY_RETURN_P (jump_target))
- jump_target = next_active_insn (jump_target);
+ jump_target = next_active_insn (as_a<rtx_insn *> (jump_target));
mark_target_live_regs (insns, jump_target, &new_resources);
CLEAR_RESOURCE (&set);
CLEAR_RESOURCE (&needed);
extern rtx_insn *next_nonnote_nondebug_insn (rtx_insn *);
extern rtx_insn *prev_real_insn (rtx_insn *);
extern rtx_insn *next_real_insn (rtx);
-extern rtx_insn *prev_active_insn (rtx);
-extern rtx_insn *next_active_insn (rtx);
-extern int active_insn_p (const_rtx);
+extern rtx_insn *prev_active_insn (rtx_insn *);
+extern rtx_insn *next_active_insn (rtx_insn *);
+extern int active_insn_p (const rtx_insn *);
extern rtx_insn *next_cc0_user (rtx);
extern rtx_insn *prev_cc0_setter (rtx_insn *);