+2014-08-25 David Malcolm <dmalcolm@redhat.com>
+
+ * config/sh/sh-protos.h (output_ieee_ccmpeq): Strengthen param 1
+ from rtx to rtx_insn *.
+ (output_branchy_insn): Likewise for param 3.
+ (output_far_jump): Likewise for param 1.
+ (final_prescan_insn): Likewise.
+ (sh_insn_length_adjustment): Likewise for sole param.
+
+ * config/sh/sh.c (expand_cbranchsi4): Likewise for local "jump".
+ (expand_cbranchdi4): Strengthen local "skip_label" from rtx to
+ rtx_code_label *.
+ (sh_emit_compare_and_set): Likewise for local "lab".
+ (output_far_jump): Strengthen param "insn" and local "prev" from
+ rtx to rtx_insn *.
+ (output_branchy_insn): Likewise for param "insn" and local
+ "next_insn".
+ (output_ieee_ccmpeq): Likewise for param "insn".
+ (struct label_ref_list_d): Strengthen field "label" from rtx to
+ rtx_code_label *.
+ (pool_node): Likewise.
+ (pool_window_label): Likewise for this global.
+ (add_constant): Likewise for return type and locals "lab", "new_rtx".
+ (dump_table): Strengthen params "start", "barrier" and local
+ "scan" from rtx to rtx_insn *.
+ (broken_move): Likewise for param "insn".
+ (untangle_mova): Likewise for params "first_mova" and "new_mova".
+ Strengthen param "first_mova" from rtx * to rtx_insn **.
+ (mova_p): Likewise for param "insn".
+ (fixup_mova): Likewise for param "mova".
+ (find_barrier): Likewise for return type, params "mova" and
+ "from", and locals "barrier_before_mova", "found_barrier",
+ "good_barrier", "orig", "last_symoff", "next". Strengthen local
+ "label" from rtx to rtx_code_label *.
+ (sh_loop_align): Strengthen locals "first", "insn", "mova" from
+ rtx to rtx_insn *.
+ (sh_reorg): Likewise for locals "link", "scan", "barrier".
+ (split_branches): Likewise for param "first" and local "insn".
+ (final_prescan_insn): Likewise for param "insn".
+ (sequence_insn_p): Likewise for locals "prev", "next".
+ (sh_insn_length_adjustment): Likewise for param "insn".
+ (sh_can_redirect_branch): Likewise for local "insn".
+ (find_r0_life_regions): Likewise for locals "end", "insn".
+ (sh_output_mi_thunk): Likewise for local "insns".
+
2014-08-25 David Malcolm <dmalcolm@redhat.com>
* config/score/score.c (score_output_mi_thunk): Strengthen local
/* Declare functions defined in sh.c and used in templates. */
extern const char *output_branch (int, rtx, rtx *);
-extern const char *output_ieee_ccmpeq (rtx, rtx *);
-extern const char *output_branchy_insn (enum rtx_code, const char *, rtx, rtx *);
+extern const char *output_ieee_ccmpeq (rtx_insn *, rtx *);
+extern const char *output_branchy_insn (enum rtx_code, const char *,
+ rtx_insn *, rtx *);
extern const char *output_movedouble (rtx, rtx[], enum machine_mode);
extern const char *output_movepcrel (rtx, rtx[], enum machine_mode);
-extern const char *output_far_jump (rtx, rtx);
+extern const char *output_far_jump (rtx_insn *, rtx);
extern rtx sfunc_uses_reg (rtx);
extern int barrier_align (rtx);
extern int regs_used (rtx, int);
extern void fixup_addr_diff_vecs (rtx);
extern int get_dest_uid (rtx, int);
-extern void final_prescan_insn (rtx, rtx *, int);
+extern void final_prescan_insn (rtx_insn *, rtx *, int);
extern enum tls_model tls_symbolic_operand (rtx, enum machine_mode);
extern bool system_reg_operand (rtx, enum machine_mode);
extern bool reg_unused_after (rtx, rtx);
extern void expand_sf_binop (rtx (*)(rtx, rtx, rtx, rtx), rtx *);
extern void expand_df_unop (rtx (*)(rtx, rtx, rtx), rtx *);
extern void expand_df_binop (rtx (*)(rtx, rtx, rtx, rtx), rtx *);
-extern int sh_insn_length_adjustment (rtx);
+extern int sh_insn_length_adjustment (rtx_insn *);
extern bool sh_can_redirect_branch (rtx, rtx);
extern void sh_expand_unop_v2sf (enum rtx_code, rtx, rtx);
extern void sh_expand_binop_v2sf (enum rtx_code, rtx, rtx, rtx);
static bool shmedia_space_reserved_for_target_registers;
-static void split_branches (rtx);
+static void split_branches (rtx_insn *);
static int branch_dest (rtx);
static void print_slot (rtx);
-static rtx add_constant (rtx, enum machine_mode, rtx);
-static void dump_table (rtx, rtx);
-static bool broken_move (rtx);
-static bool mova_p (rtx);
-static rtx find_barrier (int, rtx, rtx);
+static rtx_code_label *add_constant (rtx, enum machine_mode, rtx);
+static void dump_table (rtx_insn *, rtx_insn *);
+static bool broken_move (rtx_insn *);
+static bool mova_p (rtx_insn *);
+static rtx_insn *find_barrier (int, rtx_insn *, rtx_insn *);
static bool noncall_uses_reg (rtx, rtx, rtx *);
static rtx gen_block_redirect (rtx, int, int);
static void sh_reorg (void);
emit_insn (gen_rtx_SET (VOIDmode, get_t_reg_rtx (),
gen_rtx_fmt_ee (comparison, SImode,
operands[1], operands[2])));
- rtx jump = emit_jump_insn (branch_expander (operands[3]));
+ rtx_insn *jump = emit_jump_insn (branch_expander (operands[3]));
if (probability >= 0)
add_int_reg_note (jump, REG_BR_PROB, probability);
}
expand_cbranchdi4 (rtx *operands, enum rtx_code comparison)
{
enum rtx_code msw_taken, msw_skip, lsw_taken;
- rtx skip_label = NULL_RTX;
+ rtx_code_label *skip_label = NULL;
rtx op1h, op1l, op2h, op2l;
int num_branches;
int prob, rev_prob;
enum rtx_code code = GET_CODE (operands[1]);
rtx op0 = operands[2];
rtx op1 = operands[3];
- rtx lab = NULL_RTX;
+ rtx_code_label *lab = NULL;
bool invert = false;
rtx tem;
}
const char *
-output_far_jump (rtx insn, rtx op)
+output_far_jump (rtx_insn *insn, rtx op)
{
struct { rtx lab, reg, op; } this_jmp;
rtx braf_base_lab = NULL_RTX;
const char *jump;
int far;
int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
- rtx prev;
+ rtx_insn *prev;
this_jmp.lab = gen_label_rtx ();
follow jmp and bt, if the address is in range. */
const char *
output_branchy_insn (enum rtx_code code, const char *templ,
- rtx insn, rtx *operands)
+ rtx_insn *insn, rtx *operands)
{
- rtx next_insn = NEXT_INSN (insn);
+ rtx_insn *next_insn = NEXT_INSN (insn);
if (next_insn && JUMP_P (next_insn) && condjump_p (next_insn))
{
}
const char *
-output_ieee_ccmpeq (rtx insn, rtx *operands)
+output_ieee_ccmpeq (rtx_insn *insn, rtx *operands)
{
return output_branchy_insn (NE, "bt %l9" "\n"
" fcmp/eq %1,%0",
typedef struct label_ref_list_d
{
- rtx label;
+ rtx_code_label *label;
struct label_ref_list_d *next;
} *label_ref_list_t;
typedef struct
{
rtx value; /* Value in table. */
- rtx label; /* Label of value. */
+ rtx_code_label *label; /* Label of value. */
label_ref_list_t wend; /* End of window. */
enum machine_mode mode; /* Mode of value. */
#define MAX_POOL_SIZE 372
static pool_node pool_vector[MAX_POOL_SIZE];
static int pool_size;
-static rtx pool_window_label;
+static rtx_code_label *pool_window_label;
static int pool_window_last;
static int max_labelno_before_reorg;
necessary. */
/* Add a constant to the pool and return its label. */
-static rtx
+static rtx_code_label *
add_constant (rtx x, enum machine_mode mode, rtx last_value)
{
int i;
- rtx lab, new_rtx;
+ rtx_code_label *lab, *new_rtx;
label_ref_list_t ref, newref;
/* First see if we've already got it. */
these insns at a 4-byte aligned position. BARRIER is the barrier
after which we are to place the table. */
static void
-dump_table (rtx start, rtx barrier)
+dump_table (rtx_insn *start, rtx_insn *barrier)
{
- rtx scan = barrier;
+ rtx_insn *scan = barrier;
int i;
bool need_align = true;
rtx lab;
scan = emit_insn_after (gen_consttable_end (), scan);
scan = emit_barrier_after (scan);
pool_size = 0;
- pool_window_label = NULL_RTX;
+ pool_window_label = NULL;
pool_window_last = 0;
}
CONST_DOUBLE input value is CONST_OK_FOR_I08. For a SFmode move, we don't
need to fix it if the input value is CONST_OK_FOR_I08. */
static bool
-broken_move (rtx insn)
+broken_move (rtx_insn *insn)
{
if (NONJUMP_INSN_P (insn))
{
/* Return true if the specified insn is a mova insn. */
static bool
-mova_p (rtx insn)
+mova_p (rtx_insn *insn)
{
return (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SET
/* Fix up a mova from a switch that went out of range. */
static void
-fixup_mova (rtx mova)
+fixup_mova (rtx_insn *mova)
{
PUT_MODE (XEXP (MOVA_LABELREF (mova), 0), QImode);
if (! flag_pic)
return 0 if *first_mova was replaced, 1 if new_mova was replaced,
2 if new_mova has been assigned to *first_mova, -1 otherwise.. */
static int
-untangle_mova (int *num_mova, rtx *first_mova, rtx new_mova)
+untangle_mova (int *num_mova, rtx_insn **first_mova, rtx_insn *new_mova)
{
int n_addr = 0; /* Initialization to shut up spurious warning. */
int f_target, n_target = 0; /* Likewise. */
/* Find the last barrier from insn FROM which is close enough to hold the
constant pool. If we can't find one, then create one near the end of
the range. */
-static rtx
-find_barrier (int num_mova, rtx mova, rtx from)
+static rtx_insn *
+find_barrier (int num_mova, rtx_insn *mova, rtx_insn *from)
{
int count_si = 0;
int count_hi = 0;
int hi_align = 2;
int si_align = 2;
int leading_mova = num_mova;
- rtx barrier_before_mova = NULL_RTX;
- rtx found_barrier = NULL_RTX;
- rtx good_barrier = NULL_RTX;
+ rtx_insn *barrier_before_mova = NULL;
+ rtx_insn *found_barrier = NULL;
+ rtx_insn *good_barrier = NULL;
int si_limit;
int hi_limit;
- rtx orig = from;
+ rtx_insn *orig = from;
rtx last_got = NULL_RTX;
- rtx last_symoff = NULL_RTX;
+ rtx_insn *last_symoff = NULL;
/* For HImode: range is 510, add 4 because pc counts from address of
second instruction after this one, subtract 2 for the jump instruction
if (BARRIER_P (from))
{
- rtx next;
+ rtx_insn *next;
found_barrier = from;
{
/* We didn't find a barrier in time to dump our stuff,
so we'll make one. */
- rtx label = gen_label_rtx ();
+ rtx_code_label *label = gen_label_rtx ();
/* Don't emit a constant table in the middle of insns for
casesi_worker_2. This is a bit overkill but is enough
CALL_ARG_LOCATION note. */
if (CALL_P (from))
{
- rtx next = NEXT_INSN (from);
+ rtx_insn *next = NEXT_INSN (from);
if (next && NOTE_P (next)
&& NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
from = next;
static void
sh_reorg (void)
{
- rtx first, insn, mova = NULL_RTX;
+ rtx_insn *first, *insn, *mova = NULL;
int num_mova;
rtx r0_rtx = gen_rtx_REG (Pmode, 0);
rtx r0_inc_rtx = gen_rtx_POST_INC (Pmode, r0_rtx);
for (insn = first; insn; insn = NEXT_INSN (insn))
{
- rtx pattern, reg, link, set, scan, dies, label;
+ rtx pattern, reg, set, dies, label;
+ rtx_insn *link, *scan;
int rescan = 0, foundinsn = 0;
if (CALL_P (insn))
|| (NONJUMP_INSN_P (insn)
&& recog_memoized (insn) == CODE_FOR_casesi_worker_2))
{
- rtx scan;
+ rtx_insn *scan;
/* Scan ahead looking for a barrier to stick the constant table
behind. */
- rtx barrier = find_barrier (num_mova, mova, insn);
+ rtx_insn *barrier = find_barrier (num_mova, mova, insn);
rtx last_float_move = NULL_RTX, last_float = 0, *last_float_addr = NULL;
int need_aligned_label = 0;
newly created instructions into account. It also allows us to
find branches with common targets more easily. */
static void
-split_branches (rtx first)
+split_branches (rtx_insn *first)
{
- rtx insn;
+ rtx_insn *insn;
struct far_branch **uid_branch, *far_branch_list = 0;
int max_uid = get_max_uid ();
int ok;
variable length. This is because the second pass of shorten_branches
does not bother to update them. */
void
-final_prescan_insn (rtx insn, rtx *opvec ATTRIBUTE_UNUSED,
+final_prescan_insn (rtx_insn *insn, rtx *opvec ATTRIBUTE_UNUSED,
int noperands ATTRIBUTE_UNUSED)
{
if (TARGET_DUMPISIZE)
static bool
sequence_insn_p (rtx insn)
{
- rtx prev, next;
+ rtx_insn *prev, *next;
prev = PREV_INSN (insn);
if (prev == NULL)
}
int
-sh_insn_length_adjustment (rtx insn)
+sh_insn_length_adjustment (rtx_insn *insn)
{
/* Instructions with unfilled delay slots take up an extra two bytes for
the nop in the delay slot. */
if (flag_expensive_optimizations && simplejump_p (branch2))
{
rtx dest = XEXP (SET_SRC (single_set (branch2)), 0);
- rtx insn;
+ rtx_insn *insn;
int distance;
for (distance = 0, insn = NEXT_INSN (branch1);
static int
find_r0_life_regions (basic_block b)
{
- rtx end, insn;
+ rtx_insn *end, *insn;
rtx pset;
rtx r0_reg;
int live;
{
CUMULATIVE_ARGS cum;
int structure_value_byref = 0;
- rtx this_rtx, this_value, sibcall, insns, funexp;
+ rtx this_rtx, this_value, sibcall, funexp;
+ rtx_insn *insns;
tree funtype = TREE_TYPE (function);
int simple_add = CONST_OK_FOR_ADD (delta);
int did_load = 0;