/* Return the cost of forcibly inserting a barrier after INSN. */
static int
-arm_barrier_cost (rtx insn)
+arm_barrier_cost (rtx_insn *insn)
{
/* Basing the location of the pool on the loop depth is preferable,
but at the moment, the basic block information seems to be
in the register and on the stack. */
static void
-ix86_add_cfa_restore_note (rtx insn, rtx reg, HOST_WIDE_INT cfa_offset)
+ix86_add_cfa_restore_note (rtx_insn *insn, rtx reg, HOST_WIDE_INT cfa_offset)
{
if (!crtl->shrink_wrapped
&& cfa_offset <= cfun->machine->fs.red_zone_offset)
m->fs.drap_valid = true;
}
else
- ix86_add_cfa_restore_note (NULL_RTX, reg, cfa_offset);
+ ix86_add_cfa_restore_note (NULL, reg, cfa_offset);
cfa_offset -= UNITS_PER_WORD;
}
set_mem_align (mem, 128);
emit_move_insn (reg, mem);
- ix86_add_cfa_restore_note (NULL_RTX, reg, cfa_offset);
+ ix86_add_cfa_restore_note (NULL, reg, cfa_offset);
cfa_offset -= 16;
}
static bool
insn_defines_reg (unsigned int regno1, unsigned int regno2,
- rtx insn)
+ rtx_insn *insn)
{
df_ref def;
/* Return true if insn is a branch instruction. */
static bool
-is_branch (rtx insn)
+is_branch (rtx_insn *insn)
{
return (CALL_P (insn) || JUMP_P (insn));
}
/* Return true if insn is a prefetch instruction. */
static bool
-is_prefetch (rtx insn)
+is_prefetch (rtx_insn *insn)
{
return NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == PREFETCH;
}
bit immediates. */
static int
-get_num_immediates (rtx insn, int *imm, int *imm32, int *imm64)
+get_num_immediates (rtx_insn *insn, int *imm, int *imm32, int *imm64)
{
imm_info imm_values = {0, 0, 0};
immediate. */
static bool
-has_immediate (rtx insn)
+has_immediate (rtx_insn *insn)
{
int num_imm_operand;
int num_imm32_operand;