+2014-08-25 David Malcolm <dmalcolm@redhat.com>
+
+ * config/mips/mips-protos.h (mips_emit_move): Strengthen return
+ type from rtx to rtx_insn *.
+ (mips_expand_call): Likewise.
+ (mips_adjust_insn_length): Likewise for first param.
+ (mips_output_conditional_branch): Likewise.
+ (mips_output_order_conditional_branch): Likewise.
+ (mips_final_prescan_insn): Likewise.
+
+ * config/mips/mips.c (SEQ_BEGIN): For now, add checked cast to
+ rtx_insn * for the SEQUENCE case.
+ (SEQ_END): Likewise.
+ (mips_emit_move): Strengthen return type from rtx to rtx_insn *.
+ (mips_emit_call_insn): Likewise, also for local "insn".
+ (mips16_gp_pseudo_reg): Likewise for local "scan".
+ (mips16_build_call_stub): Likewise for return type and for local
+ "insn". Introduce a new local "pattern" so that "insn" can indeed
+ be an insn.
+ (mips_expand_call): Strengthen return type and local "insn" from
+ rtx to rtx_insn *.
+ (mips_block_move_loop): Strengthen local "label" from rtx to
+ rtx_code_label *.
+ (mips_expand_synci_loop): Likewise for locals "label",
+ "end_label".
+ (mips_set_frame_expr): Strengthen local "insn" from rtx to
+ rtx_insn *.
+ (mips16e_collect_argument_saves): Likewise for locals "insn",
+ "next".
+ (mips_find_gp_ref): Likewise for param of callback for "pred"
+ param, and for local "insn".
+ (mips_insn_has_inflexible_gp_ref_p): Likewise for param "insn".
+ (mips_insn_has_flexible_gp_ref_p): Likewise.
+ (mips_epilogue_emit_cfa_restores): Likewise for return type and
+ local "insn".
+ (mips_epilogue_set_cfa): Likewise for local "insn".
+ (mips_expand_epilogue): Likewise.
+ (mips_adjust_insn_length): Likewise for param "insn".
+ (mips_output_conditional_branch): Likewise.
+ (mips_output_order_conditional_branch): Likewise.
+ (struct mips_ls2): Likewise for fields "alu1_turn_enabled_insn",
+ "alu2_turn_enabled_insn", "falu1_turn_enabled_insn",
+ "falu2_turn_enabled_insn".
+ (mips_builtin_branch_and_move): Strengthen locals "true_label",
+ "done_label" from rtx to rtx_code_label *.
+ (struct mips16_constant): Likewise for field "label".
+ (mips16_add_constant): Likewise for return type.
+ (mips16_emit_constants_1): Strengthen return type and param "insn"
+ from rtx to rtx_insn *.
+ (mips16_emit_constants): Likewise for param "insn".
+ (mips16_insn_length): Likewise.
+ (mips16_rewrite_pool_constant): Strengthen local "label" from rtx
+ to rtx_code_label *.
+ (struct mips16_rewrite_pool_refs_info): Strengthen field "insn"
+ from rtx to rtx_insn *.
+ (mips16_lay_out_constants): Likewise for locals "insn", "barrier",
+ "jump". Strengthen local "label" from rtx to rtx_code_label *.
+ (r10k_simplify_address): Strengthen param "insn" and local
+ "def_insn" from rtx to rtx_insn *.
+ (r10k_safe_address_p): Strengthen param "insn" from rtx to
+ rtx_insn *.
+ (r10k_needs_protection_p_1): Update target type of cast of data
+ from to rtx to rtx_insn *.
+ (r10k_needs_protection_p_store): Strengthen local "insn_ptr" from
+ rtx * to rtx_insn **.
+ (r10k_needs_protection_p): Strengthen param "insn" from rtx to
+ rtx_insn *.
+ (r10k_insert_cache_barriers): Likewise for locals "insn", "end".
+ (mips_call_expr_from_insn): Likewise for param "insn".
+ (mips_pic_call_symbol_from_set): Likewise for local "def_insn".
+ (mips_find_pic_call_symbol): Likewise for param "insn".
+ (mips_annotate_pic_calls): Likewise for local "insn".
+ (mips_sim_insn): Likewise for this variable.
+ (struct mips_sim): Likewise for field "insn" within elements of
+ last_set array.
+ (mips_sim_wait_reg): Likewise for param "insn".
+ (mips_sim_wait_regs): Likewise.
+ (mips_sim_wait_units): Likewise.
+ (mips_sim_wait_insn): Likewise.
+ (mips_sim_issue_insn): Likewise.
+ (mips_sim_finish_insn): Likewise.
+ (mips_seq_time): Likewise for param "seq" and local "insn".
+ (vr4130_avoid_branch_rt_conflict): Likewise for param "insn" and
+ locals "first", "second".
+ (vr4130_align_insns): Likewise for locals "insn", "subinsn",
+ "last", "last2", "next".
+ (mips_avoid_hazard): Likewise for params "after", "insn".
+ (mips_reorg_process_insns): Likewise for locals "insn",
+ "last_insn", "subinsn", "next_insn".
+ (mips_has_long_branch_p): Likewise for locals "insn", "subinsn".
+ (mips16_split_long_branches): Likewise for locals "insn" "jump",
+ "jump_sequence".
+ (mips_output_mi_thunk): Likewise for local "insn".
+ (mips_final_prescan_insn): Likewise for param "insn".
+
2014-08-25 David Malcolm <dmalcolm@redhat.com>
* config/microblaze/microblaze.c (microblaze_call_tls_get_addr):
in the sequence, otherwise return INSN itself. */
#define SEQ_BEGIN(INSN) \
(INSN_P (INSN) && GET_CODE (PATTERN (INSN)) == SEQUENCE \
- ? XVECEXP (PATTERN (INSN), 0, 0) \
+ ? as_a <rtx_insn *> (XVECEXP (PATTERN (INSN), 0, 0)) \
: (INSN))
/* Likewise for the last instruction in a delayed branch sequence. */
#define SEQ_END(INSN) \
(INSN_P (INSN) && GET_CODE (PATTERN (INSN)) == SEQUENCE \
- ? XVECEXP (PATTERN (INSN), 0, XVECLEN (PATTERN (INSN), 0) - 1) \
+ ? as_a <rtx_insn *> (XVECEXP (PATTERN (INSN), \
+ 0, \
+ XVECLEN (PATTERN (INSN), 0) - 1)) \
: (INSN))
/* Execute the following loop body with SUBINSN set to each instruction
}
};
\f
-static rtx mips_find_pic_call_symbol (rtx, rtx, bool);
+static rtx mips_find_pic_call_symbol (rtx_insn *, rtx, bool);
static int mips_register_move_cost (enum machine_mode, reg_class_t,
reg_class_t);
static unsigned int mips_function_arg_boundary (enum machine_mode, const_tree);
how to force Pmode objects into the constant pool even when the
constant pool address is not itself legitimate. */
-rtx
+rtx_insn *
mips_emit_move (rtx dest, rtx src)
{
return (can_create_pseudo_p ()
ADDR is the legitimized form, and LAZY_P is true if the call
address is lazily-bound. */
-static rtx
+static rtx_insn *
mips_emit_call_insn (rtx pattern, rtx orig_addr, rtx addr, bool lazy_p)
{
- rtx insn, reg;
+ rtx_insn *insn;
+ rtx reg;
insn = emit_call_insn (pattern);
{
if (cfun->machine->mips16_gp_pseudo_rtx == NULL_RTX)
{
- rtx insn, scan;
+ rtx insn;
+ rtx_insn *scan;
cfun->machine->mips16_gp_pseudo_rtx = gen_reg_rtx (Pmode);
automatically redirects the JAL to the stub, otherwise the JAL
continues to call FN directly. */
-static rtx
+static rtx_insn *
mips16_build_call_stub (rtx retval, rtx *fn_ptr, rtx args_size, int fp_code)
{
const char *fnname;
bool fp_ret_p;
struct mips16_stub *l;
- rtx insn, fn;
+ rtx_insn *insn;
+ rtx pattern, fn;
/* We don't need to do anything if we aren't in MIPS16 mode, or if
we were invoked with the -msoft-float option. */
if (!TARGET_MIPS16 || TARGET_SOFT_FLOAT_ABI)
- return NULL_RTX;
+ return NULL;
/* Figure out whether the value might come back in a floating-point
register. */
arguments and the value will not be returned in a floating-point
register. */
if (fp_code == 0 && !fp_ret_p)
- return NULL_RTX;
+ return NULL;
/* We don't need to do anything if this is a call to a special
MIPS16 support function. */
fn = *fn_ptr;
if (mips16_stub_function_p (fn))
- return NULL_RTX;
+ return NULL;
/* If we're calling a locally-defined MIPS16 function, we know that
it will return values in both the "soft-float" and "hard-float"
registers. There is no need to use a stub to move the latter
to the former. */
if (fp_code == 0 && mips16_local_function_p (fn))
- return NULL_RTX;
+ return NULL;
/* This code will only work for o32 and o64 abis. The other ABI's
require more sophisticated support. */
|| !call_insn_operand (fn, VOIDmode))
{
char buf[30];
- rtx stub_fn, insn, addr;
+ rtx stub_fn, addr;
+ rtx_insn *insn;
bool lazy_p;
/* If this is a locally-defined and locally-binding function,
if (mips16_local_function_p (fn))
{
*fn_ptr = mips16_local_alias (fn);
- return NULL_RTX;
+ return NULL;
}
/* Create a SYMBOL_REF for the libgcc.a function. */
error ("cannot handle inconsistent calls to %qs", fnname);
if (retval == NULL_RTX)
- insn = gen_call_internal_direct (fn, args_size);
+ pattern = gen_call_internal_direct (fn, args_size);
else
- insn = gen_call_value_internal_direct (retval, fn, args_size);
- insn = mips_emit_call_insn (insn, fn, fn, false);
+ pattern = gen_call_value_internal_direct (retval, fn, args_size);
+ insn = mips_emit_call_insn (pattern, fn, fn, false);
/* If we are calling a stub which handles a floating-point return
value, we need to arrange to save $18 in the prologue. We do this
Return the call itself. */
-rtx
+rtx_insn *
mips_expand_call (enum mips_call_type type, rtx result, rtx addr,
rtx args_size, rtx aux, bool lazy_p)
{
- rtx orig_addr, pattern, insn;
+ rtx orig_addr, pattern;
+ rtx_insn *insn;
int fp_code;
fp_code = aux == 0 ? 0 : (int) GET_MODE (aux);
mips_block_move_loop (rtx dest, rtx src, HOST_WIDE_INT length,
HOST_WIDE_INT bytes_per_iter)
{
- rtx label, src_reg, dest_reg, final_src, test;
+ rtx_code_label *label;
+ rtx src_reg, dest_reg, final_src, test;
HOST_WIDE_INT leftover;
leftover = length % bytes_per_iter;
void
mips_expand_synci_loop (rtx begin, rtx end)
{
- rtx inc, label, end_label, cmp_result, mask, length;
+ rtx inc, cmp_result, mask, length;
+ rtx_code_label *label, *end_label;
/* Create end_label. */
end_label = gen_label_rtx ();
static void
mips_set_frame_expr (rtx frame_pattern)
{
- rtx insn;
+ rtx_insn *insn;
insn = get_last_insn ();
RTX_FRAME_RELATED_P (insn) = 1;
mips16e_collect_argument_saves (void)
{
rtx reg_values[FIRST_PSEUDO_REGISTER];
- rtx insn, next, set, dest, src;
+ rtx_insn *insn, *next;
+ rtx set, dest, src;
unsigned int nargs, regno;
push_topmost_sequence ();
if *CACHE is already true. */
static bool
-mips_find_gp_ref (bool *cache, bool (*pred) (rtx))
+mips_find_gp_ref (bool *cache, bool (*pred) (rtx_insn *))
{
- rtx insn;
+ rtx_insn *insn;
if (!*cache)
{
See mips_cfun_has_inflexible_gp_ref_p for details. */
static bool
-mips_insn_has_inflexible_gp_ref_p (rtx insn)
+mips_insn_has_inflexible_gp_ref_p (rtx_insn *insn)
{
/* Uses of pic_offset_table_rtx in CALL_INSN_FUNCTION_USAGE
indicate that the target could be a traditional MIPS
See mips_cfun_has_flexible_gp_ref_p for details. */
static bool
-mips_insn_has_flexible_gp_ref_p (rtx insn)
+mips_insn_has_flexible_gp_ref_p (rtx_insn *insn)
{
return (get_attr_got (insn) != GOT_UNSET
|| mips_small_data_pattern_p (PATTERN (insn))
/* Attach all pending register saves to the previous instruction.
Return that instruction. */
-static rtx
+static rtx_insn *
mips_epilogue_emit_cfa_restores (void)
{
- rtx insn;
+ rtx_insn *insn;
insn = get_last_insn ();
gcc_assert (insn && !REG_NOTES (insn));
static void
mips_epilogue_set_cfa (rtx reg, HOST_WIDE_INT offset)
{
- rtx insn;
+ rtx_insn *insn;
insn = mips_epilogue_emit_cfa_restores ();
if (reg != mips_epilogue.cfa_reg || offset != mips_epilogue.cfa_offset)
{
const struct mips_frame_info *frame;
HOST_WIDE_INT step1, step2;
- rtx base, adjust, insn;
+ rtx base, adjust;
+ rtx_insn *insn;
bool use_jraddiusp_p = false;
if (!sibcall_p && mips_can_use_return_insn ())
attributes in the machine-description file. */
int
-mips_adjust_insn_length (rtx insn, int length)
+mips_adjust_insn_length (rtx_insn *insn, int length)
{
/* mips.md uses MAX_PIC_BRANCH_LENGTH as a placeholder for the length
of a PIC long-branch sequence. Substitute the correct value. */
version of BRANCH_IF_TRUE. */
const char *
-mips_output_conditional_branch (rtx insn, rtx *operands,
+mips_output_conditional_branch (rtx_insn *insn, rtx *operands,
const char *branch_if_true,
const char *branch_if_false)
{
its second is always zero. */
const char *
-mips_output_order_conditional_branch (rtx insn, rtx *operands, bool inverted_p)
+mips_output_order_conditional_branch (rtx_insn *insn, rtx *operands, bool inverted_p)
{
const char *branch[2];
DFA state.
E.g., when alu1_turn_enabled_insn is issued it makes next ALU1/2
instruction to go ALU1. */
- rtx alu1_turn_enabled_insn;
- rtx alu2_turn_enabled_insn;
- rtx falu1_turn_enabled_insn;
- rtx falu2_turn_enabled_insn;
+ rtx_insn *alu1_turn_enabled_insn;
+ rtx_insn *alu2_turn_enabled_insn;
+ rtx_insn *falu1_turn_enabled_insn;
+ rtx_insn *falu2_turn_enabled_insn;
} mips_ls2;
/* Implement TARGET_SCHED_ADJUST_COST. We assume that anti and output
mips_builtin_branch_and_move (rtx condition, rtx target,
rtx value_if_true, rtx value_if_false)
{
- rtx true_label, done_label;
+ rtx_code_label *true_label, *done_label;
true_label = gen_label_rtx ();
done_label = gen_label_rtx ();
struct mips16_constant {
struct mips16_constant *next;
rtx value;
- rtx label;
+ rtx_code_label *label;
enum machine_mode mode;
};
/* Add constant VALUE to POOL and return its label. MODE is the
value's mode (used for CONST_INTs, etc.). */
-static rtx
+static rtx_code_label *
mips16_add_constant (struct mips16_constant_pool *pool,
rtx value, enum machine_mode mode)
{
/* Output constant VALUE after instruction INSN and return the last
instruction emitted. MODE is the mode of the constant. */
-static rtx
-mips16_emit_constants_1 (enum machine_mode mode, rtx value, rtx insn)
+static rtx_insn *
+mips16_emit_constants_1 (enum machine_mode mode, rtx value, rtx_insn *insn)
{
if (SCALAR_INT_MODE_P (mode) || ALL_SCALAR_FIXED_POINT_MODE_P (mode))
{
/* Dump out the constants in CONSTANTS after INSN. */
static void
-mips16_emit_constants (struct mips16_constant *constants, rtx insn)
+mips16_emit_constants (struct mips16_constant *constants, rtx_insn *insn)
{
struct mips16_constant *c, *next;
int align;
/* Return the length of instruction INSN. */
static int
-mips16_insn_length (rtx insn)
+mips16_insn_length (rtx_insn *insn)
{
if (JUMP_TABLE_DATA_P (insn))
{
static void
mips16_rewrite_pool_constant (struct mips16_constant_pool *pool, rtx *x)
{
- rtx base, offset, label;
+ rtx base, offset;
+ rtx_code_label *label;
split_const (*x, &base, &offset);
if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
INSN is the instruction we're rewriting and POOL points to the current
constant pool. */
struct mips16_rewrite_pool_refs_info {
- rtx insn;
+ rtx_insn *insn;
struct mips16_constant_pool *pool;
};
{
struct mips16_constant_pool pool;
struct mips16_rewrite_pool_refs_info info;
- rtx insn, barrier;
+ rtx_insn *insn, *barrier;
if (!TARGET_MIPS16_PCREL_LOADS)
return;
do it immediately before INSN. */
if (barrier == 0 && pool.insn_address + 4 > pool.highest_address)
{
- rtx label, jump;
+ rtx_code_label *label;
+ rtx_insn *jump;
label = gen_label_rtx ();
virtual_incoming_args_rtx (which should never occur in X otherwise). */
static rtx
-r10k_simplify_address (rtx x, rtx insn)
+r10k_simplify_address (rtx x, rtx_insn *insn)
{
- rtx newx, op0, op1, set, def_insn, note;
+ rtx newx, op0, op1, set, note;
+ rtx_insn *def_insn;
df_ref use, def;
struct df_link *defs;
expression; it might not be a legitimate address. */
static bool
-r10k_safe_address_p (rtx x, rtx insn)
+r10k_safe_address_p (rtx x, rtx_insn *insn)
{
rtx base, offset;
HOST_WIDE_INT offset_val;
&& r10k_safe_mem_expr_p (MEM_EXPR (mem), MEM_OFFSET (mem)))
return -1;
- if (r10k_safe_address_p (XEXP (mem, 0), (rtx) data))
+ if (r10k_safe_address_p (XEXP (mem, 0), (rtx_insn *) data))
return -1;
return 1;
r10k_needs_protection_p_store (rtx x, const_rtx pat ATTRIBUTE_UNUSED,
void *data)
{
- rtx *insn_ptr;
+ rtx_insn **insn_ptr;
- insn_ptr = (rtx *) data;
+ insn_ptr = (rtx_insn **) data;
if (*insn_ptr && for_each_rtx (&x, r10k_needs_protection_p_1, *insn_ptr))
- *insn_ptr = NULL_RTX;
+ *insn_ptr = NULL;
}
/* A for_each_rtx callback that iterates over the pattern of a CALL_INSN.
cache barrier. */
static bool
-r10k_needs_protection_p (rtx insn)
+r10k_needs_protection_p (rtx_insn *insn)
{
if (CALL_P (insn))
return for_each_rtx (&PATTERN (insn), r10k_needs_protection_p_call, NULL);
unsigned int i, n;
basic_block bb;
sbitmap protected_bbs;
- rtx insn, end, unprotected_region;
+ rtx_insn *insn, *end;
+ rtx unprotected_region;
if (TARGET_MIPS16)
{
SECOND_CALL. */
static rtx
-mips_call_expr_from_insn (rtx insn, rtx *second_call)
+mips_call_expr_from_insn (rtx_insn *insn, rtx *second_call)
{
rtx x;
rtx x2;
static rtx
mips_pic_call_symbol_from_set (df_ref def, rtx reg, bool recurse_p)
{
- rtx def_insn, set;
+ rtx_insn *def_insn;
+ rtx set;
if (DF_REF_IS_ARTIFICIAL (def))
return NULL_RTX;
mips_pic_call_symbol_from_set. */
static rtx
-mips_find_pic_call_symbol (rtx insn, rtx reg, bool recurse_p)
+mips_find_pic_call_symbol (rtx_insn *insn, rtx reg, bool recurse_p)
{
df_ref use;
struct df_link *defs;
mips_annotate_pic_calls (void)
{
basic_block bb;
- rtx insn;
+ rtx_insn *insn;
FOR_EACH_BB_FN (bb, cfun)
FOR_BB_INSNS (bb, insn)
}
\f
/* A temporary variable used by for_each_rtx callbacks, etc. */
-static rtx mips_sim_insn;
+static rtx_insn *mips_sim_insn;
/* A structure representing the state of the processor pipeline.
Used by the mips_sim_* family of functions. */
LAST_SET[X].TIME is the time at which that instruction was issued.
INSN is null if no instruction has yet set register X. */
struct {
- rtx insn;
+ rtx_insn *insn;
unsigned int time;
} last_set[FIRST_PSEUDO_REGISTER];
register REG. */
static void
-mips_sim_wait_reg (struct mips_sim *state, rtx insn, rtx reg)
+mips_sim_wait_reg (struct mips_sim *state, rtx_insn *insn, rtx reg)
{
unsigned int regno, end_regno;
dependencies are satisfied. */
static void
-mips_sim_wait_regs (struct mips_sim *state, rtx insn)
+mips_sim_wait_regs (struct mips_sim *state, rtx_insn *insn)
{
mips_sim_insn = insn;
note_uses (&PATTERN (insn), mips_sim_wait_regs_1, state);
instruction INSN are available. */
static void
-mips_sim_wait_units (struct mips_sim *state, rtx insn)
+mips_sim_wait_units (struct mips_sim *state, rtx_insn *insn)
{
state_t tmp_state;
/* Advance simulation state STATE until INSN is ready to issue. */
static void
-mips_sim_wait_insn (struct mips_sim *state, rtx insn)
+mips_sim_wait_insn (struct mips_sim *state, rtx_insn *insn)
{
mips_sim_wait_regs (state, insn);
mips_sim_wait_units (state, insn);
been called). */
static void
-mips_sim_issue_insn (struct mips_sim *state, rtx insn)
+mips_sim_issue_insn (struct mips_sim *state, rtx_insn *insn)
{
curr_state = state->dfa_state;
SEQUENCE. */
static void
-mips_sim_finish_insn (struct mips_sim *state, rtx insn)
+mips_sim_finish_insn (struct mips_sim *state, rtx_insn *insn)
{
/* If INSN is a jump with an implicit delay slot, simulate a nop. */
if (JUMP_P (insn))
instruction sequence SEQ. */
static unsigned int
-mips_seq_time (struct mips_sim *state, rtx seq)
+mips_seq_time (struct mips_sim *state, rtx_insn *seq)
{
mips_sim_reset (state);
- for (rtx insn = seq; insn; insn = NEXT_INSN (insn))
+ for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
{
mips_sim_wait_insn (state, insn);
mips_sim_issue_insn (state, insn);
try to avoid it by swapping rs and rt. */
static void
-vr4130_avoid_branch_rt_conflict (rtx insn)
+vr4130_avoid_branch_rt_conflict (rtx_insn *insn)
{
- rtx first, second;
+ rtx_insn *first, *second;
first = SEQ_BEGIN (insn);
second = SEQ_END (insn);
vr4130_align_insns (void)
{
struct mips_sim state;
- rtx insn, subinsn, last, last2, next;
+ rtx_insn *insn, *subinsn, *last, *last2, *next;
bool aligned_p;
dfa_start ();
LO_REG is an rtx for the LO register, used in dependence checking. */
static void
-mips_avoid_hazard (rtx after, rtx insn, int *hilo_delay,
+mips_avoid_hazard (rtx_insn *after, rtx_insn *insn, int *hilo_delay,
rtx *delayed_reg, rtx lo_reg)
{
rtx pattern, set;
static void
mips_reorg_process_insns (void)
{
- rtx insn, last_insn, subinsn, next_insn, lo_reg, delayed_reg;
+ rtx_insn *insn, *last_insn, *subinsn, *next_insn;
+ rtx lo_reg, delayed_reg;
int hilo_delay;
/* Force all instructions to be split into their final form. */
static bool
mips_has_long_branch_p (void)
{
- rtx insn, subinsn;
+ rtx_insn *insn, *subinsn;
int normal_length;
/* We need up-to-date instruction lengths. */
/* Loop until the alignments for all targets are sufficient. */
do
{
- rtx insn;
+ rtx_insn *insn;
shorten_branches (get_insns ());
something_changed = false;
&& (any_condjump_p (insn) || any_uncondjump_p (insn)))
{
rtx old_label, new_label, temp, saved_temp;
- rtx target, jump, jump_sequence;
+ rtx target;
+ rtx_insn *jump, *jump_sequence;
start_sequence ();
HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
tree function)
{
- rtx this_rtx, temp1, temp2, insn, fnaddr;
+ rtx this_rtx, temp1, temp2, fnaddr;
+ rtx_insn *insn;
bool use_sibcall_p;
/* Pretend to be a post-reload pass while generating rtl. */
/* Implement FINAL_PRESCAN_INSN. */
void
-mips_final_prescan_insn (rtx insn, rtx *opvec, int noperands)
+mips_final_prescan_insn (rtx_insn *insn, rtx *opvec, int noperands)
{
if (mips_need_noat_wrapper_p (insn, opvec, noperands))
mips_push_asm_switch (&mips_noat);