+2016-09-22 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
+
+ * config/arc/arc-protos.h (arc_label_align): Change type of
+ variables from rtx to rtx_insn *.
+ * config/arc/arc.c (arc_label_align): Likewise.
+ * config/arm/arm.c (any_sibcall_could_use_r3): Likewise.
+ * config/bfin/bfin.c (workaround_speculation): Likewise.
+ * config/c6x/c6x.c (find_next_cycle_insn): Likewise.
+ (find_last_same_clock): Likewise.
+ (reorg_split_calls): Likewise.
+ * config/cris/cris-protos.h (cris_cc0_user_requires_cmp): Likewise.
+ * config/cris/cris.c (cris_cc0_user_requires_cmp): Likewise.
+ * config/h8300/h8300-protos.h (same_cmp_preceding_p): Likewise.
+ (same_cmp_following_p): Likewise.
+ * config/h8300/h8300.c (same_cmp_preceding_p): Likewise.
+ (same_cmp_following_p): Likwise.
+ * config/m32r/m32r.c (m32r_expand_epilogue): Likewise.
+ * config/nds32/nds32-protos.h (nds32_target_alignment): Likewise.
+ * config/nds32/nds32.c (nds32_target_alignment): Likewise.
+ * config/rl78/rl78.c (rl78_alloc_physical_registers_op2):
+ * Likewise.
+ (rl78_alloc_physical_registers_cmp): Likewise.
+ (rl78_alloc_physical_registers_umul): Likewise.
+ (rl78_calculate_death_notes): Likewise.
+ * config/s390/s390-protos.h (s390_label_align): Likewise.
+ * config/s390/s390.c (s390_label_align): Likewise.
+ * config/sh/sh.c (barrier_align): Likewise.
+ * config/sparc/sparc-protos.h (emit_cbcond_nop): Likewise.
+ * config/sparc/sparc.c (sparc_asm_function_epilogue): Likewise.
+ (emit_cbcond_nop): Likewise.
+
2016-09-22 Martin Liska <mliska@suse.cz>
PR ipa/77653
extern int arc_attr_type (rtx_insn *);
extern bool arc_scheduling_not_expected (void);
extern bool arc_sets_cc_p (rtx_insn *insn);
-extern int arc_label_align (rtx label);
+extern int arc_label_align (rtx_insn *label);
extern bool arc_need_delay (rtx_insn *insn);
extern bool arc_text_label (rtx_insn *insn);
long.) */
int
-arc_label_align (rtx label)
+arc_label_align (rtx_insn *label)
{
int loop_align = LOOP_ALIGN (LABEL);
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
if (e->flags & EDGE_SIBCALL)
{
- rtx call = BB_END (e->src);
+ rtx_insn *call = BB_END (e->src);
if (!CALL_P (call))
call = prev_nonnote_nondebug_insn (call);
gcc_assert (CALL_P (call) && SIBLING_CALL_P (call));
|| cbranch_predicted_taken_p (insn)))
{
rtx_insn *target = JUMP_LABEL_AS_INSN (insn);
- rtx label = target;
+ rtx_insn *label = target;
rtx_insn *next_tgt;
cycles_since_jump = 0;
/* Scan forwards from INSN until we find the next insn that has mode TImode
(indicating it starts a new cycle), and occurs in cycle CLOCK.
Return it if we find such an insn, NULL_RTX otherwise. */
-static rtx
-find_next_cycle_insn (rtx insn, int clock)
+static rtx_insn *
+find_next_cycle_insn (rtx_insn *insn, int clock)
{
- rtx t = insn;
+ rtx_insn *t = insn;
if (GET_MODE (t) == TImode)
t = next_real_insn (t);
while (t && GET_MODE (t) != TImode)
if (t && insn_get_clock (t) == clock)
return t;
- return NULL_RTX;
+ return NULL;
}
/* If COND_INSN has a COND_EXEC condition, wrap the same condition
/* Walk forward from INSN to find the last insn that issues in the same clock
cycle. */
-static rtx
-find_last_same_clock (rtx insn)
+static rtx_insn *
+find_last_same_clock (rtx_insn *insn)
{
- rtx retval = insn;
+ rtx_insn *retval = insn;
rtx_insn *t = next_real_insn (insn);
while (t && GET_MODE (t) != TImode)
no insn setting/using B3 is scheduled in the delay slots of
a call. */
int this_clock = insn_get_clock (insn);
- rtx last_same_clock;
- rtx after1;
+ rtx_insn *after1;
call_labels[INSN_UID (insn)] = label;
- last_same_clock = find_last_same_clock (insn);
+ rtx_insn *last_same_clock = find_last_same_clock (insn);
if (can_use_callp (insn))
{
extern bool cris_reload_address_legitimized (rtx, machine_mode, int, int, int);
extern int cris_side_effect_mode_ok (enum rtx_code, rtx *, int, int,
int, int, int);
-extern bool cris_cc0_user_requires_cmp (rtx);
+extern bool cris_cc0_user_requires_cmp (rtx_insn *);
extern rtx cris_return_addr_rtx (int, rtx);
extern rtx cris_split_movdx (rtx *);
extern int cris_legitimate_pic_operand (rtx);
insn for other reasons. */
bool
-cris_cc0_user_requires_cmp (rtx insn)
+cris_cc0_user_requires_cmp (rtx_insn *insn)
{
rtx_insn *cc0_user = NULL;
rtx body;
extern int h8300_eightbit_constant_address_p (rtx);
extern int h8300_tiny_constant_address_p (rtx);
extern int byte_accesses_mergeable_p (rtx, rtx);
-extern int same_cmp_preceding_p (rtx);
-extern int same_cmp_following_p (rtx);
+extern int same_cmp_preceding_p (rtx_insn *);
+extern int same_cmp_following_p (rtx_insn *);
/* Used in builtins.c */
extern rtx h8300_return_addr_rtx (int, rtx);
before I3. I3 is assumed to be a comparison insn. */
int
-same_cmp_preceding_p (rtx i3)
+same_cmp_preceding_p (rtx_insn *i3)
{
rtx_insn *i1, *i2;
after I1. I1 is assumed to be a comparison insn. */
int
-same_cmp_following_p (rtx i1)
+same_cmp_following_p (rtx_insn *i1)
{
rtx_insn *i2, *i3;
if (total_size == 0)
{
- rtx insn = get_last_insn ();
+ rtx_insn *insn = get_last_insn ();
/* If the last insn was a BARRIER, we don't have to write any code
because a jump (aka return) was put there. */
/* Auxiliary functions to decide output alignment or not. */
-extern int nds32_target_alignment (rtx);
+extern int nds32_target_alignment (rtx_insn *);
/* Auxiliary functions to expand builtin functions. */
/* Return align 2 (log base 2) if the next instruction of LABEL is 4 byte. */
int
-nds32_target_alignment (rtx label)
+nds32_target_alignment (rtx_insn *label)
{
rtx_insn *insn;
static void
rl78_alloc_physical_registers_op2 (rtx_insn * insn)
{
- rtx prev;
- rtx first;
+ rtx_insn *prev;
+ rtx_insn *first;
bool hl_used;
int tmp_id;
rtx saved_op1;
int tmp_id;
rtx saved_op1;
rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
- rtx first;
+ rtx_insn *first;
OP (1) = transcode_memory_rtx (OP (1), DE, insn);
OP (2) = transcode_memory_rtx (OP (2), HL, insn);
rl78_alloc_physical_registers_umul (rtx_insn * insn)
{
rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
- rtx first;
+ rtx_insn *first;
int tmp_id;
rtx saved_op1;
rl78_calculate_death_notes (void)
{
char dead[FIRST_PSEUDO_REGISTER];
- rtx insn, p, s, d;
+ rtx p, s, d;
+rtx_insn *insn;
int i;
memset (dead, 0, sizeof (dead));
extern void print_operand_address (FILE *, rtx);
extern void print_operand (FILE *, rtx, int);
extern void s390_output_pool_entry (rtx, machine_mode, unsigned int);
-extern int s390_label_align (rtx);
+extern int s390_label_align (rtx_insn *);
extern int s390_agen_dep_p (rtx_insn *, rtx_insn *);
extern rtx_insn *s390_load_got (void);
extern rtx s390_get_thread_pointer (void);
/* Return the alignment for LABEL. We default to the -falign-labels
value except for the literal pool base label. */
int
-s390_label_align (rtx label)
+s390_label_align (rtx_insn *label)
{
rtx_insn *prev_insn = prev_active_insn (label);
rtx set, src;
? 1 : align_jumps_log);
}
- rtx next = next_active_insn (barrier_or_label);
+ rtx_insn *next = next_active_insn (barrier_or_label);
if (! next)
return 0;
extern int mems_ok_for_ldd_peep (rtx, rtx, rtx);
extern rtx widen_mem_for_ldd_peep (rtx, rtx, machine_mode);
extern int empty_delay_slot (rtx_insn *);
-extern int emit_cbcond_nop (rtx);
+extern int emit_cbcond_nop (rtx_insn *);
extern int eligible_for_call_delay (rtx_insn *);
extern int eligible_for_return_delay (rtx_insn *);
extern int eligible_for_sibcall_delay (rtx_insn *);
situation. */
int
-emit_cbcond_nop (rtx insn)
+emit_cbcond_nop (rtx_insn *insn)
{
rtx next = next_active_insn (insn);
backtraces in such cases. This is pointless for sibling calls since
the return address is explicitly adjusted. */
- rtx insn, last_real_insn;
+ rtx_insn *insn = get_last_insn ();
- insn = get_last_insn ();
-
- last_real_insn = prev_real_insn (insn);
+ rtx last_real_insn = prev_real_insn (insn);
if (last_real_insn
&& NONJUMP_INSN_P (last_real_insn)
&& GET_CODE (PATTERN (last_real_insn)) == SEQUENCE)