+2016-09-22 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
+
+ * emit-rtl.c (next_nonnote_insn): Change argument type to
+ rtx_insn *.
+ (prev_nonnote_insn): Likewise.
+ * jump.c (reversed_comparison_code_parts): Likewise.
+ (reversed_comparison): Likewise.
+ * rtl.h: Adjust prototypes.
+ * config/arc/arc.md: Adjust.
+ * cse.c (find_comparison_args): Likewise.
+ * reorg.c (redundant_insn): Change return type to rtx_insn *.
+ (fix_reg_dead_note): Change argument type to rtx_insn *.
+ (delete_prior_computation): Likewise.
+ (delete_computation): Likewise.
+ (fill_slots_from_thread): Adjust.
+ (relax_delay_slots): Likewise.
+ * simplify-rtx.c (simplify_unary_operation_1): Likewise.
+ (simplify_relational_operation_1): Likewise.
+ (simplify_ternary_operation): Likewise.
+
2016-09-22 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
* config/arc/arc-protos.h (arc_label_align): Change type of
""
"*
{
- rtx diff_vec = PATTERN (next_nonnote_insn (operands[3]));
+ rtx diff_vec = PATTERN (next_nonnote_insn (as_a<rtx_insn *> (operands[3])));
if (GET_CODE (diff_vec) != ADDR_DIFF_VEC)
{
[(set_attr "type" "load")
(set_attr_alternative "iscompact"
[(cond
- [(ne (symbol_ref "GET_MODE (PATTERN (next_nonnote_insn (operands[3])))")
+ [(ne (symbol_ref "GET_MODE (PATTERN (next_nonnote_insn
+ (as_a<rtx_insn *> (operands[3]))))")
(symbol_ref "QImode"))
(const_string "false")
- (match_test "!ADDR_DIFF_VEC_FLAGS (PATTERN (next_nonnote_insn (operands[3]))).offset_unsigned")
+ (match_test "!ADDR_DIFF_VEC_FLAGS (PATTERN (next_nonnote_insn
+ (as_a<rtx_insn *> (operands[3])))).offset_unsigned")
(const_string "false")]
(const_string "true"))
(const_string "false")
"TARGET_COMPACT_CASESI"
"*
{
- rtx diff_vec = PATTERN (next_nonnote_insn (operands[1]));
+ rtx diff_vec = PATTERN (next_nonnote_insn (as_a<rtx_insn *> (operands[1])));
int unalign = arc_get_unalign ();
rtx xop[3];
const char *s;
with floating-point operands. */
if (reverse_code)
{
- enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
+ enum rtx_code reversed = reversed_comparison_code (x, NULL);
if (reversed == UNKNOWN)
break;
else
look inside SEQUENCEs. */
rtx_insn *
-next_nonnote_insn (rtx uncast_insn)
+next_nonnote_insn (rtx_insn *insn)
{
- rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
while (insn)
{
insn = NEXT_INSN (insn);
not look inside SEQUENCEs. */
rtx_insn *
-prev_nonnote_insn (rtx uncast_insn)
+prev_nonnote_insn (rtx_insn *insn)
{
- rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
-
while (insn)
{
insn = PREV_INSN (insn);
static void mark_jump_label_1 (rtx, rtx_insn *, bool, bool);
static void mark_jump_label_asm (rtx, rtx_insn *);
static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
-static int invert_exp_1 (rtx, rtx);
+static int invert_exp_1 (rtx, rtx_insn *);
\f
/* Worker for rebuild_jump_labels and rebuild_jump_labels_chain. */
static void
to help this function avoid overhead in these cases. */
enum rtx_code
reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
- const_rtx arg1, const_rtx insn)
+ const_rtx arg1, const rtx_insn *insn)
{
machine_mode mode;
/* These CONST_CAST's are okay because prev_nonnote_insn just
returns its argument and we assign it to a const_rtx
variable. */
- for (rtx_insn *prev = prev_nonnote_insn (CONST_CAST_RTX (insn));
+ for (rtx_insn *prev = prev_nonnote_insn (const_cast<rtx_insn *> (insn));
prev != 0 && !LABEL_P (prev);
prev = prev_nonnote_insn (prev))
{
/* A wrapper around the previous function to take COMPARISON as rtx
expression. This simplifies many callers. */
enum rtx_code
-reversed_comparison_code (const_rtx comparison, const_rtx insn)
+reversed_comparison_code (const_rtx comparison, const rtx_insn *insn)
{
if (!COMPARISON_P (comparison))
return UNKNOWN;
rtx
reversed_comparison (const_rtx exp, machine_mode mode)
{
- enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
+ enum rtx_code reversed_code = reversed_comparison_code (exp, NULL);
if (reversed_code == UNKNOWN)
return NULL_RTX;
else
/* Invert the jump condition X contained in jump insn INSN. Accrue the
modifications into the change group. Return nonzero for success. */
static int
-invert_exp_1 (rtx x, rtx insn)
+invert_exp_1 (rtx x, rtx_insn *insn)
{
RTX_CODE code = GET_CODE (x);
struct resources *,
int, int *, int *);
static void try_merge_delay_insns (rtx_insn *, rtx_insn *);
-static rtx redundant_insn (rtx, rtx_insn *, const vec<rtx_insn *> &);
+static rtx_insn *redundant_insn (rtx, rtx_insn *, const vec<rtx_insn *> &);
static int own_thread_p (rtx, rtx, int);
static void update_block (rtx_insn *, rtx);
static int reorg_redirect_jump (rtx_jump_insn *, rtx);
static void update_reg_dead_notes (rtx_insn *, rtx_insn *);
-static void fix_reg_dead_note (rtx, rtx);
+static void fix_reg_dead_note (rtx_insn *, rtx);
static void update_reg_unused_notes (rtx, rtx);
static void fill_simple_delay_slots (int);
static void fill_slots_from_thread (rtx_jump_insn *, rtx, rtx, rtx,
redundant insn, but the cost of splitting seems greater than the possible
gain in rare cases. */
-static rtx
+static rtx_insn *
redundant_insn (rtx insn, rtx_insn *target, const vec<rtx_insn *> &delay_list)
{
rtx target_main = target;
resource requirements as we go. */
for (i = seq->len () - 1; i > 0; i--)
{
- rtx candidate = seq->element (i);
+ rtx_insn *candidate = seq->insn (i);
/* If an insn will be annulled if the branch is false, it isn't
considered as a possible duplicate insn. */
confused into thinking the register is dead. */
static void
-fix_reg_dead_note (rtx start_insn, rtx stop_insn)
+fix_reg_dead_note (rtx_insn *start_insn, rtx stop_insn)
{
rtx link, next;
rtx_insn *p;
&& (! own_thread || ! sets_cc0_p (pat)))))
&& ! can_throw_internal (trial))
{
- rtx prior_insn;
+ rtx_insn *prior_insn;
/* If TRIAL is redundant with some insn before INSN, we don't
actually need to add it to the delay list; we can merely pretend
}
}
\f
-static void delete_computation (rtx insn);
+static void delete_computation (rtx_insn *insn);
/* Recursively delete prior insns that compute the value (used only by INSN
which the caller is deleting) stored in the register mentioned by NOTE
which is a REG_DEAD note associated with INSN. */
static void
-delete_prior_computation (rtx note, rtx insn)
+delete_prior_computation (rtx note, rtx_insn *insn)
{
- rtx our_prev;
+ rtx_insn *our_prev;
rtx reg = XEXP (note, 0);
for (our_prev = prev_nonnote_insn (insn);
delete the insn that set it. */
static void
-delete_computation (rtx insn)
+delete_computation (rtx_insn *insn)
{
rtx note, next;
if (! INSN_ANNULLED_BRANCH_P (delay_jump_insn)
&& ! condjump_in_parallel_p (delay_jump_insn)
&& prev_active_insn (target_label) == insn
- && ! BARRIER_P (prev_nonnote_insn (target_label))
+ && ! BARRIER_P (prev_nonnote_insn (as_a<rtx_insn *> (target_label)))
/* If the last insn in the delay slot sets CC0 for some insn,
various code assumes that it is in a delay slot. We could
put it back where it belonged and delete the register notes,
extern rtx_call_insn *last_call_insn (void);
extern rtx_insn *previous_insn (rtx_insn *);
extern rtx_insn *next_insn (rtx_insn *);
-extern rtx_insn *prev_nonnote_insn (rtx);
+extern rtx_insn *prev_nonnote_insn (rtx_insn *);
extern rtx_insn *prev_nonnote_insn_bb (rtx);
-extern rtx_insn *next_nonnote_insn (rtx);
+extern rtx_insn *next_nonnote_insn (rtx_insn *);
extern rtx_insn *next_nonnote_insn_bb (rtx_insn *);
extern rtx_insn *prev_nondebug_insn (rtx);
extern rtx_insn *next_nondebug_insn (rtx);
extern void rebuild_jump_labels (rtx_insn *);
extern void rebuild_jump_labels_chain (rtx_insn *);
extern rtx reversed_comparison (const_rtx, machine_mode);
-extern enum rtx_code reversed_comparison_code (const_rtx, const_rtx);
+extern enum rtx_code reversed_comparison_code (const_rtx, const rtx_insn *);
extern enum rtx_code reversed_comparison_code_parts (enum rtx_code, const_rtx,
- const_rtx, const_rtx);
+ const_rtx, const rtx_insn *);
extern void delete_for_peephole (rtx_insn *, rtx_insn *);
extern int condjump_in_parallel_p (const rtx_insn *);
comparison is all ones. */
if (COMPARISON_P (op)
&& (mode == BImode || STORE_FLAG_VALUE == -1)
- && ((reversed = reversed_comparison_code (op, NULL_RTX)) != UNKNOWN))
+ && ((reversed = reversed_comparison_code (op, NULL)) != UNKNOWN))
return simplify_gen_relational (reversed, mode, VOIDmode,
XEXP (op, 0), XEXP (op, 1));
|| (GET_CODE (false_rtx) == NEG
&& rtx_equal_p (XEXP (false_rtx, 0), true_rtx)))
{
- if (reversed_comparison_code (cond, NULL_RTX) != UNKNOWN)
+ if (reversed_comparison_code (cond, NULL) != UNKNOWN)
temp = reversed_comparison (cond, mode);
else
{
}
else if (code == EQ)
{
- enum rtx_code new_code = reversed_comparison_code (op0, NULL_RTX);
+ enum rtx_code new_code = reversed_comparison_code (op0, NULL);
if (new_code != UNKNOWN)
return simplify_gen_relational (new_code, mode, VOIDmode,
XEXP (op0, 0), XEXP (op0, 1));
else if (t == 0 && f == STORE_FLAG_VALUE)
{
enum rtx_code tmp;
- tmp = reversed_comparison_code (op0, NULL_RTX);
+ tmp = reversed_comparison_code (op0, NULL);
if (tmp == UNKNOWN)
break;
code = tmp;