+2014-09-05 David Malcolm <dmalcolm@redhat.com>
+
+ * config/arc/arc.c (arc_print_operand): Use insn method of
+ final_sequence for type-safety.
+ * config/bfin/bfin.c (bfin_hardware_loop): Strengthen param
+ "insn" from rtx to rtx_insn *.
+ * config/frv/frv.c (frv_print_operand_jump_hint): Likewise.
+ * config/mn10300/mn10300.c (mn10300_scan_for_setlb_lcc):
+ Likewise for locals "branch", "label".
+ * config/h8300/h8300.c (same_cmp_preceding_p): Likewise for
+ locals "i1", "i2". Use NULL rather than NULL_RTX in comparisons.
+ (same_cmp_following_p): Likewise for locals "i2", "i3".
+ * config/sh/sh_optimize_sett_clrt.cc
+ (sh_optimize_sett_clrt::sh_cbranch_ccreg_value): Likewise for
+ param "cbranch_insn".
+ * function.c (convert_jumps_to_returns): Likewis for local "jump".
+ * ifcvt.c (cond_exec_get_condition): Likewise for param "jump".
+ * jump.c (simplejump_p): Strengthen param "insn" from const_rtx to
+ const rtx_insn *.
+ (condjump_p): Likewise.
+ (condjump_in_parallel_p): Likewise.
+ (pc_set): Likewise.
+ (any_uncondjump_p): Likewise.
+ (any_condjump_p): Likewise.
+ (condjump_label): Likewise.
+ (returnjump_p): Strengthen param "insn" from rtx to
+ const rtx_insn *.
+ (onlyjump_p): Strengthen param "insn" from const_rtx to
+ const rtx_insn *.
+ (jump_to_label_p): Likewise.
+ (invert_jump_1): Strengthen param "jump" from rtx to rtx_insn *.
+ (invert_jump): Likewise.
+ * reorg.c (simplejump_or_return_p): Add checked cast when calling
+ simplejump_p.
+ (get_jump_flags): Strengthen param "insn" from rtx to
+ const rtx_insn *.
+ (get_branch_condition): Likewise.
+ (condition_dominates_p): Likewise.
+ (make_return_insns): Move declaration of local "pat" earlier, to
+ after we've handled NONJUMP_INSN_P and non-sequences, using its
+ methods to simplify the code and for type-safety.
+ * rtl.h (find_constant_src): Strengthen param from const_rtx to
+ const rtx_insn *.
+ (jump_to_label_p): Strengthen param from rtx to const rtx_insn *.
+ (condjump_p): Strengthen param from const_rtx to
+ const rtx_insn *.
+ (any_condjump_p): Likewise.
+ (any_uncondjump_p): Likewise.
+ (pc_set): Likewise.
+ (condjump_label): Likewise.
+ (simplejump_p): Likewise.
+ (returnjump_p): Likewise.
+ (onlyjump_p): Likewise.
+ (invert_jump_1): Strengthen param 1 from rtx to rtx_insn *.
+ (invert_jump): Likewise.
+ (condjump_in_parallel_p): Strengthen param from const_rtx to
+ const rtx_insn *.
+ * rtlanal.c (find_constant_src): Strengthen param from const_rtx
+ to const rtx_insn *.
+ * sel-sched-ir.c (fallthru_bb_of_jump): Strengthen param from rtx
+ to const rtx_insn *.
+ * sel-sched-ir.h (fallthru_bb_of_jump): Likewise.
+
2014-09-05 David Malcolm <dmalcolm@redhat.com>
* reorg.c (relax_delay_slots): Move declaration of "trial_seq"
/* Is this insn in a delay slot sequence? */
if (!final_sequence || XVECLEN (final_sequence, 0) < 2
|| current_insn_predicate
- || CALL_P (XVECEXP (final_sequence, 0, 0))
- || simplejump_p (XVECEXP (final_sequence, 0, 0)))
+ || CALL_P (final_sequence->insn (0))
+ || simplejump_p (final_sequence->insn (0)))
{
/* This insn isn't in a delay slot sequence, or conditionalized
independently of its position in a delay slot. */
/* Estimate the length of INSN conservatively. */
static int
-length_for_loop (rtx insn)
+length_for_loop (rtx_insn *insn)
{
int length = 0;
if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
static void frv_print_operand_memory_reference_reg
(FILE *, rtx);
static void frv_print_operand_memory_reference (FILE *, rtx, int);
-static int frv_print_operand_jump_hint (rtx);
+static int frv_print_operand_jump_hint (rtx_insn *);
static const char *comparison_string (enum rtx_code, rtx);
static rtx frv_function_value (const_tree, const_tree,
bool);
#define FRV_JUMP_NOT_LIKELY 0
static int
-frv_print_operand_jump_hint (rtx insn)
+frv_print_operand_jump_hint (rtx_insn *insn)
{
rtx note;
rtx labelref;
int
same_cmp_preceding_p (rtx i3)
{
- rtx i1, i2;
+ rtx_insn *i1, *i2;
/* Make sure we have a sequence of three insns. */
i2 = prev_nonnote_insn (i3);
- if (i2 == NULL_RTX)
+ if (i2 == NULL)
return 0;
i1 = prev_nonnote_insn (i2);
- if (i1 == NULL_RTX)
+ if (i1 == NULL)
return 0;
return (INSN_P (i1) && rtx_equal_p (PATTERN (i1), PATTERN (i3))
int
same_cmp_following_p (rtx i1)
{
- rtx i2, i3;
+ rtx_insn *i2, *i3;
/* Make sure we have a sequence of three insns. */
i2 = next_nonnote_insn (i1);
- if (i2 == NULL_RTX)
+ if (i2 == NULL)
return 0;
i3 = next_nonnote_insn (i2);
- if (i3 == NULL_RTX)
+ if (i3 == NULL)
return 0;
return (INSN_P (i3) && rtx_equal_p (PATTERN (i1), PATTERN (i3))
reason = "it contains CALL insns";
else
{
- rtx branch = BB_END (loop->latch);
+ rtx_insn *branch = BB_END (loop->latch);
gcc_assert (JUMP_P (branch));
if (single_set (branch) == NULL_RTX || ! any_condjump_p (branch))
reason = "it is not a simple loop";
else
{
- rtx label;
+ rtx_insn *label;
if (dump_file)
flow_loop_dump (loop, dump_file, NULL, 0);
// Given a cbranch insn, its basic block and another basic block, determine
// the value to which the ccreg will be set after jumping/falling through to
// the specified target basic block.
- bool sh_cbranch_ccreg_value (rtx cbranch_insn,
+ bool sh_cbranch_ccreg_value (rtx_insn *cbranch_insn,
basic_block cbranch_insn_bb,
basic_block branch_target_bb) const;
bool
sh_optimize_sett_clrt
-::sh_cbranch_ccreg_value (rtx cbranch_insn, basic_block cbranch_insn_bb,
+::sh_cbranch_ccreg_value (rtx_insn *cbranch_insn, basic_block cbranch_insn_bb,
basic_block branch_target_bb) const
{
rtx pc_set_rtx = pc_set (cbranch_insn);
FOR_EACH_VEC_ELT (src_bbs, i, bb)
{
- rtx jump = BB_END (bb);
+ rtx_insn *jump = BB_END (bb);
if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
continue;
static basic_block block_fallthru (basic_block);
static int cond_exec_process_insns (ce_if_block *, rtx_insn *, rtx, rtx, int,
int);
-static rtx cond_exec_get_condition (rtx);
+static rtx cond_exec_get_condition (rtx_insn *);
static rtx noce_get_condition (rtx_insn *, rtx_insn **, bool);
static int noce_operand_ok (const_rtx);
static void merge_if_block (ce_if_block *);
/* Return the condition for a jump. Do not do any special processing. */
static rtx
-cond_exec_get_condition (rtx jump)
+cond_exec_get_condition (rtx_insn *jump)
{
rtx test_if, cond;
/* Return 1 if INSN is an unconditional jump and nothing else. */
int
-simplejump_p (const_rtx insn)
+simplejump_p (const rtx_insn *insn)
{
return (JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) == SET
branch and compare insns. Use any_condjump_p instead whenever possible. */
int
-condjump_p (const_rtx insn)
+condjump_p (const rtx_insn *insn)
{
const_rtx x = PATTERN (insn);
branch and compare insns. Use any_condjump_p instead whenever possible. */
int
-condjump_in_parallel_p (const_rtx insn)
+condjump_in_parallel_p (const rtx_insn *insn)
{
const_rtx x = PATTERN (insn);
/* Return set of PC, otherwise NULL. */
rtx
-pc_set (const_rtx insn)
+pc_set (const rtx_insn *insn)
{
rtx pat;
if (!JUMP_P (insn))
possibly bundled inside a PARALLEL. */
int
-any_uncondjump_p (const_rtx insn)
+any_uncondjump_p (const rtx_insn *insn)
{
const_rtx x = pc_set (insn);
if (!x)
Note that unlike condjump_p it returns false for unconditional jumps. */
int
-any_condjump_p (const_rtx insn)
+any_condjump_p (const rtx_insn *insn)
{
const_rtx x = pc_set (insn);
enum rtx_code a, b;
/* Return the label of a conditional jump. */
rtx
-condjump_label (const_rtx insn)
+condjump_label (const rtx_insn *insn)
{
rtx x = pc_set (insn);
/* Return TRUE if INSN is a return jump. */
int
-returnjump_p (rtx insn)
+returnjump_p (const rtx_insn *insn)
{
if (JUMP_P (insn))
{
nothing more. */
int
-onlyjump_p (const_rtx insn)
+onlyjump_p (const rtx_insn *insn)
{
rtx set;
/* Return true iff INSN is a jump and its JUMP_LABEL is a label, not
NULL or a return. */
bool
-jump_to_label_p (rtx insn)
+jump_to_label_p (const rtx_insn *insn)
{
return (JUMP_P (insn)
&& JUMP_LABEL (insn) != NULL && !ANY_RETURN_P (JUMP_LABEL (insn)));
inversion and redirection. */
int
-invert_jump_1 (rtx jump, rtx nlabel)
+invert_jump_1 (rtx_insn *jump, rtx nlabel)
{
rtx x = pc_set (jump);
int ochanges;
NLABEL instead of where it jumps now. Return true if successful. */
int
-invert_jump (rtx jump, rtx nlabel, int delete_unused)
+invert_jump (rtx_insn *jump, rtx nlabel, int delete_unused)
{
rtx olabel = JUMP_LABEL (jump);
#if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
static rtx_insn_list *optimize_skip (rtx_insn *);
#endif
-static int get_jump_flags (rtx, rtx);
+static int get_jump_flags (const rtx_insn *, rtx);
static int mostly_true_jump (rtx);
-static rtx get_branch_condition (rtx, rtx);
-static int condition_dominates_p (rtx, rtx);
+static rtx get_branch_condition (const rtx_insn *, rtx);
+static int condition_dominates_p (rtx, const rtx_insn *);
static int redirect_with_delay_slots_safe_p (rtx_insn *, rtx, rtx);
static int redirect_with_delay_list_safe_p (rtx_insn *, rtx, rtx_insn_list *);
static int check_annul_list_true_false (int, rtx);
simplejump_or_return_p (rtx insn)
{
return (JUMP_P (insn)
- && (simplejump_p (insn) || ANY_RETURN_P (PATTERN (insn))));
+ && (simplejump_p (as_a <rtx_insn *> (insn))
+ || ANY_RETURN_P (PATTERN (insn))));
}
\f
/* Return TRUE if this insn should stop the search for insn to fill delay
are predicted as very likely taken. */
static int
-get_jump_flags (rtx insn, rtx label)
+get_jump_flags (const rtx_insn *insn, rtx label)
{
int flags;
type of jump, or it doesn't go to TARGET, return 0. */
static rtx
-get_branch_condition (rtx insn, rtx target)
+get_branch_condition (const rtx_insn *insn, rtx target)
{
rtx pat = PATTERN (insn);
rtx src;
INSN, i.e., if INSN will always branch if CONDITION is true. */
static int
-condition_dominates_p (rtx condition, rtx insn)
+condition_dominates_p (rtx condition, const rtx_insn *insn)
{
rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
enum rtx_code code = GET_CODE (condition);
/* Only look at filled JUMP_INSNs that go to the end of function
label. */
- if (!NONJUMP_INSN_P (insn)
- || GET_CODE (PATTERN (insn)) != SEQUENCE
- || !jump_to_label_p (XVECEXP (PATTERN (insn), 0, 0)))
+ if (!NONJUMP_INSN_P (insn))
continue;
- if (JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) == function_return_label)
+ if (GET_CODE (PATTERN (insn)) != SEQUENCE)
+ continue;
+
+ rtx_sequence *pat = as_a <rtx_sequence *> (PATTERN (insn));
+
+ if (!jump_to_label_p (pat->insn (0)))
+ continue;
+
+ if (JUMP_LABEL (pat->insn (0)) == function_return_label)
{
kind = ret_rtx;
real_label = real_return_label;
}
- else if (JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0))
- == function_simple_return_label)
+ else if (JUMP_LABEL (pat->insn (0)) == function_simple_return_label)
{
kind = simple_return_rtx;
real_label = real_simple_return_label;
else
continue;
- rtx_sequence *pat = as_a <rtx_sequence *> (PATTERN (insn));
jump_insn = pat->insn (0);
/* If we can't make the jump into a RETURN, try to redirect it to the best
extern rtx find_reg_note (const_rtx, enum reg_note, const_rtx);
extern rtx find_regno_note (const_rtx, enum reg_note, unsigned int);
extern rtx find_reg_equal_equiv_note (const_rtx);
-extern rtx find_constant_src (const_rtx);
+extern rtx find_constant_src (const rtx_insn *);
extern int find_reg_fusage (const_rtx, enum rtx_code, const_rtx);
extern int find_regno_fusage (const_rtx, enum rtx_code, unsigned int);
extern rtx alloc_reg_note (enum reg_note, rtx, rtx);
/* In jump.c */
extern int comparison_dominates_p (enum rtx_code, enum rtx_code);
-extern bool jump_to_label_p (rtx);
-extern int condjump_p (const_rtx);
-extern int any_condjump_p (const_rtx);
-extern int any_uncondjump_p (const_rtx);
-extern rtx pc_set (const_rtx);
-extern rtx condjump_label (const_rtx);
-extern int simplejump_p (const_rtx);
-extern int returnjump_p (rtx);
+extern bool jump_to_label_p (const rtx_insn *);
+extern int condjump_p (const rtx_insn *);
+extern int any_condjump_p (const rtx_insn *);
+extern int any_uncondjump_p (const rtx_insn *);
+extern rtx pc_set (const rtx_insn *);
+extern rtx condjump_label (const rtx_insn *);
+extern int simplejump_p (const rtx_insn *);
+extern int returnjump_p (const rtx_insn *);
extern int eh_returnjump_p (rtx_insn *);
-extern int onlyjump_p (const_rtx);
+extern int onlyjump_p (const rtx_insn *);
extern int only_sets_cc0_p (const_rtx);
extern int sets_cc0_p (const_rtx);
-extern int invert_jump_1 (rtx, rtx);
-extern int invert_jump (rtx, rtx, int);
+extern int invert_jump_1 (rtx_insn *, rtx);
+extern int invert_jump (rtx_insn *, rtx, int);
extern int rtx_renumbered_equal_p (const_rtx, const_rtx);
extern int true_regnum (const_rtx);
extern unsigned int reg_or_subregno (const_rtx);
extern enum rtx_code reversed_comparison_code_parts (enum rtx_code, const_rtx,
const_rtx, const_rtx);
extern void delete_for_peephole (rtx_insn *, rtx_insn *);
-extern int condjump_in_parallel_p (const_rtx);
+extern int condjump_in_parallel_p (const rtx_insn *);
/* In emit-rtl.c. */
extern int max_reg_num (void);
return null. */
rtx
-find_constant_src (const_rtx insn)
+find_constant_src (const rtx_insn *insn)
{
rtx note, set, x;
/* Return the block which is a fallthru bb of a conditional jump JUMP. */
basic_block
-fallthru_bb_of_jump (rtx jump)
+fallthru_bb_of_jump (const rtx_insn *jump)
{
if (!JUMP_P (jump))
return NULL;
extern bool sel_bb_empty_p (basic_block);
extern bool in_current_region_p (basic_block);
-extern basic_block fallthru_bb_of_jump (rtx);
+extern basic_block fallthru_bb_of_jump (const rtx_insn *);
extern void sel_init_bbs (bb_vec_t);
extern void sel_finish_bbs (void);