+2014-10-30 Jeff Law <law@redhat.com>
+
+ * config/pa/pa-protos.h (pa_output_arg_descriptor): Strengthen
+ argument from rtx to rtx_insn *.
+ (compute_movmem_length, compute_clrmem_length): Likewise.
+ (copy_fp_args, length_fp_args): Likewise.
+ * config/pa/pa.c (legitimize_pic_address): Promote local variable
+ "insn" from rtx to rtx_insn *.
+ (legitimize_tls_address, pa_emit_move_sequence): Likewise.
+ (pa_output_block_move, store_reg, store_reg_modify): Likewise.
+ (set_reg_plus_d, pa_expand_prologue, hppa_profile_hook): Likewise.
+ (branch_to_delay_slot_p, branch_needs_nop_p, use_skip_p): Likewise.
+ (pa_output_arg_descriptor): Strengthen argument to an rtx_insn *.
+ (compute_movmem_length, compute_clrmem_length): Likewise.
+ (copy_fp-args, length_fp_args): Likewise.
+
2014-10-29 Ramana Radhakrishnan <ramana.radhakrishnan@arm.com>
* config/arm/arm.h (MACHMODE): Treat machine_mode as a
static bool forward_branch_p (rtx_insn *);
static void compute_zdepwi_operands (unsigned HOST_WIDE_INT, unsigned *);
static void compute_zdepdi_operands (unsigned HOST_WIDE_INT, unsigned *);
-static int compute_movmem_length (rtx);
-static int compute_clrmem_length (rtx);
+static int compute_movmem_length (rtx_insn *);
+static int compute_clrmem_length (rtx_insn *);
static bool pa_assemble_integer (rtx, unsigned int, int);
static void remove_useless_addtr_insns (int);
static void store_reg (int, HOST_WIDE_INT, int);
static tree hppa_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
static bool pa_scalar_mode_supported_p (machine_mode);
static bool pa_commutative_p (const_rtx x, int outer_code);
-static void copy_fp_args (rtx) ATTRIBUTE_UNUSED;
-static int length_fp_args (rtx) ATTRIBUTE_UNUSED;
+static void copy_fp_args (rtx_insn *) ATTRIBUTE_UNUSED;
+static int length_fp_args (rtx_insn *) ATTRIBUTE_UNUSED;
static rtx hppa_legitimize_address (rtx, rtx, machine_mode);
static inline void pa_file_start_level (void) ATTRIBUTE_UNUSED;
static inline void pa_file_start_space (int) ATTRIBUTE_UNUSED;
/* Labels need special handling. */
if (pic_label_operand (orig, mode))
{
- rtx insn;
+ rtx_insn *insn;
/* We do not want to go through the movXX expanders here since that
would create recursion.
}
if (GET_CODE (orig) == SYMBOL_REF)
{
- rtx insn, tmp_reg;
+ rtx_insn *insn;
+ rtx tmp_reg;
gcc_assert (reg);
static rtx
legitimize_tls_address (rtx addr)
{
- rtx ret, insn, tmp, t1, t2, tp;
+ rtx ret, tmp, t1, t2, tp;
+ rtx_insn *insn;
/* Currently, we can't handle anything but a SYMBOL_REF. */
if (GET_CODE (addr) != SYMBOL_REF)
else if (GET_CODE (operand1) != CONST_INT
|| !pa_cint_ok_for_move (INTVAL (operand1)))
{
- rtx insn, temp;
+ rtx temp;
+ rtx_insn *insn;
rtx op1 = operand1;
HOST_WIDE_INT value = 0;
HOST_WIDE_INT insv = 0;
count insns rather than emit them. */
static int
-compute_movmem_length (rtx insn)
+compute_movmem_length (rtx_insn *insn)
{
rtx pat = PATTERN (insn);
unsigned int align = INTVAL (XEXP (XVECEXP (pat, 0, 7), 0));
count insns rather than emit them. */
static int
-compute_clrmem_length (rtx insn)
+compute_clrmem_length (rtx_insn *insn)
{
rtx pat = PATTERN (insn);
unsigned int align = INTVAL (XEXP (XVECEXP (pat, 0, 4), 0));
static void
store_reg (int reg, HOST_WIDE_INT disp, int base)
{
- rtx insn, dest, src, basereg;
+ rtx dest, src, basereg;
+ rtx_insn *insn;
src = gen_rtx_REG (word_mode, reg);
basereg = gen_rtx_REG (Pmode, base);
static void
store_reg_modify (int base, int reg, HOST_WIDE_INT mod)
{
- rtx insn, basereg, srcreg, delta;
+ rtx basereg, srcreg, delta;
+ rtx_insn *insn;
gcc_assert (VAL_14_BITS_P (mod));
static void
set_reg_plus_d (int reg, int base, HOST_WIDE_INT disp, int note)
{
- rtx insn;
+ rtx_insn *insn;
if (VAL_14_BITS_P (disp))
{
HOST_WIDE_INT size = get_frame_size ();
HOST_WIDE_INT offset;
int i;
- rtx insn, tmpreg;
+ rtx tmpreg;
+ rtx_insn *insn;
gr_saved = 0;
fr_saved = 0;
if (df_regs_ever_live_p (i)
|| (! TARGET_64BIT && df_regs_ever_live_p (i + 1)))
{
- rtx addr, insn, reg;
+ rtx addr, reg;
+ rtx_insn *insn;
addr = gen_rtx_MEM (DFmode,
gen_rtx_POST_INC (word_mode, tmpreg));
reg = gen_rtx_REG (DFmode, i);
lcla2 and load_offset_label_address insn patterns. */
rtx reg = gen_reg_rtx (SImode);
rtx_code_label *label_rtx = gen_label_rtx ();
- rtx begin_label_rtx, call_insn;
+ rtx begin_label_rtx;
+ rtx_insn *call_insn;
char begin_label_name[16];
ASM_GENERATE_INTERNAL_LABEL (begin_label_name, FUNC_BEGIN_PROLOG_LABEL,
}
void
-pa_output_arg_descriptor (rtx call_insn)
+pa_output_arg_descriptor (rtx_insn *call_insn)
{
const char *arg_regs[4];
machine_mode arg_mode;
static bool
branch_to_delay_slot_p (rtx_insn *insn)
{
- rtx jump_insn;
+ rtx_insn *jump_insn;
if (dbr_sequence_length ())
return FALSE;
static bool
branch_needs_nop_p (rtx_insn *insn)
{
- rtx jump_insn;
+ rtx_insn *jump_insn;
if (dbr_sequence_length ())
return FALSE;
static bool
use_skip_p (rtx_insn *insn)
{
- rtx jump_insn = next_active_insn (JUMP_LABEL (insn));
+ rtx_insn *jump_insn = next_active_insn (JUMP_LABEL (insn));
while (insn)
{
/* Copy any FP arguments in INSN into integer registers. */
static void
-copy_fp_args (rtx insn)
+copy_fp_args (rtx_insn *insn)
{
rtx link;
rtx xoperands[2];
/* Compute length of the FP argument copy sequence for INSN. */
static int
-length_fp_args (rtx insn)
+length_fp_args (rtx_insn *insn)
{
int length = 0;
rtx link;