+2014-08-25 David Malcolm <dmalcolm@redhat.com>
+
+ * config/tilegx/tilegx-protos.h (tilegx_output_cbranch_with_opcode):
+ Strengthen param 1 from rtx to rtx_insn *.
+ (tilegx_output_cbranch): Likewise.
+ (tilegx_adjust_insn_length): Likewise.
+ (tilegx_final_prescan_insn): Likewise for sole param.
+
+ * config/tilegx/tilegx.c (tilegx_legitimize_tls_address): Likewise
+ or local "last".
+ (cbranch_predicted_p): Likewise for param "insn".
+ (tilegx_output_simple_cbranch_with_opcode): Likewise.
+ (tilegx_output_cbranch_with_opcode): Likewise.
+ (tilegx_output_cbranch): Likewise.
+ (frame_emit_load): Likewise for return type.
+ (set_frame_related_p): Likewise for locals "seq", "insn".
+ (emit_sp_adjust): Likewise for return type, and for local "insn".
+ Introduce local "pat" for use in place of "insn" where the latter
+ isn't an instruction.
+ (tilegx_expand_epilogue): Strengthen locals "last_insn", "insn"
+ from rtx to rtx_insn *.
+ (tilegx_adjust_insn_length): Likewise for param "insn".
+ (next_insn_to_bundle): Likewise for return type and params "r" and
+ "end".
+ (tilegx_gen_bundles): Likewise for locals "insn", "next", "prev",
+ "end".
+ (replace_insns): Likewise for params "old_insn", "new_insns".
+ (replace_mov_pcrel_step1): Likewise for param "insn" and local
+ "new_insns".
+ (replace_mov_pcrel_step2): Likewise.
+ (replace_mov_pcrel_step3): Likewise.
+ (tilegx_fixup_pcrel_references): Likewise for locals "insn",
+ "next_insn".
+ (reorder_var_tracking_notes): Likewise for locals "insn", "next",
+ "queue", "next_queue", "prev".
+ (tilegx_output_mi_thunk): Likewise for local "insn".
+ (tilegx_final_prescan_insn): Likewise for param "insn".
+
2014-08-25 David Malcolm <dmalcolm@redhat.com>
* config/spu/spu.c (frame_emit_store): Strengthen return type from
}
case TLS_MODEL_INITIAL_EXEC:
{
- rtx temp, temp2, temp3, got, last;
+ rtx temp, temp2, temp3, got;
+ rtx_insn *last;
ret = gen_reg_rtx (Pmode);
temp = gen_reg_rtx (Pmode);
}
case TLS_MODEL_LOCAL_EXEC:
{
- rtx temp, temp2, last;
+ rtx temp, temp2;
+ rtx_insn *last;
ret = gen_reg_rtx (Pmode);
temp = gen_reg_rtx (Pmode);
/* Return true if INSN is annotated with a REG_BR_PROB note that
indicates it's a branch that's predicted taken. */
static bool
-cbranch_predicted_p (rtx insn)
+cbranch_predicted_p (rtx_insn *insn)
{
rtx x = find_reg_note (insn, REG_BR_PROB, 0);
/* Output assembly code for a specific branch instruction, appending
the branch prediction flag to the opcode if appropriate. */
static const char *
-tilegx_output_simple_cbranch_with_opcode (rtx insn, const char *opcode,
+tilegx_output_simple_cbranch_with_opcode (rtx_insn *insn, const char *opcode,
int regop, bool reverse_predicted)
{
static char buf[64];
/* Output assembly code for a specific branch instruction, appending
the branch prediction flag to the opcode if appropriate. */
const char *
-tilegx_output_cbranch_with_opcode (rtx insn, rtx *operands,
+tilegx_output_cbranch_with_opcode (rtx_insn *insn, rtx *operands,
const char *opcode,
const char *rev_opcode, int regop)
{
/* Output assembly code for a conditional branch instruction. */
const char *
-tilegx_output_cbranch (rtx insn, rtx *operands, bool reversed)
+tilegx_output_cbranch (rtx_insn *insn, rtx *operands, bool reversed)
{
enum rtx_code code = GET_CODE (operands[1]);
const char *opcode;
/* Emit a load in the stack frame to load REGNO from address ADDR.
Add a REG_CFA_RESTORE note to CFA_RESTORES if CFA_RESTORES is
non-null. Return the emitted insn. */
-static rtx
+static rtx_insn *
frame_emit_load (int regno, rtx addr, rtx *cfa_restores)
{
rtx reg = gen_rtx_REG (DImode, regno);
static rtx
set_frame_related_p (void)
{
- rtx seq = get_insns ();
- rtx insn;
+ rtx_insn *seq = get_insns ();
+ rtx_insn *insn;
end_sequence ();
large register and using 'add'.
This happens after reload, so we need to expand it ourselves. */
-static rtx
+static rtx_insn *
emit_sp_adjust (int offset, int *next_scratch_regno, bool frame_related,
rtx reg_notes)
{
rtx to_add;
rtx imm_rtx = GEN_INT (offset);
+ rtx pat;
+ rtx_insn *insn;
- rtx insn;
if (satisfies_constraint_J (imm_rtx))
{
/* We can add this using a single immediate add. */
/* Actually adjust the stack pointer. */
if (TARGET_32BIT)
- insn = gen_sp_adjust_32bit (stack_pointer_rtx, stack_pointer_rtx, to_add);
+ pat = gen_sp_adjust_32bit (stack_pointer_rtx, stack_pointer_rtx, to_add);
else
- insn = gen_sp_adjust (stack_pointer_rtx, stack_pointer_rtx, to_add);
+ pat = gen_sp_adjust (stack_pointer_rtx, stack_pointer_rtx, to_add);
- insn = emit_insn (insn);
+ insn = emit_insn (pat);
REG_NOTES (insn) = reg_notes;
/* Describe what just happened in a way that dwarf understands. */
rtx reg_save_addr[ROUND_ROBIN_SIZE] = {
NULL_RTX, NULL_RTX, NULL_RTX, NULL_RTX
};
- rtx last_insn, insn;
+ rtx_insn *last_insn, *insn;
unsigned int which_scratch;
int offset, start_offset, regno;
rtx cfa_restores = NULL_RTX;
by attributes in the machine-description file. This is where we
account for bundles. */
int
-tilegx_adjust_insn_length (rtx insn, int length)
+tilegx_adjust_insn_length (rtx_insn *insn, int length)
{
enum machine_mode mode = GET_MODE (insn);
/* Skip over irrelevant NOTEs and such and look for the next insn we
would consider bundling. */
-static rtx
-next_insn_to_bundle (rtx r, rtx end)
+static rtx_insn *
+next_insn_to_bundle (rtx_insn *r, rtx_insn *end)
{
for (; r != end; r = NEXT_INSN (r))
{
return r;
}
- return NULL_RTX;
+ return NULL;
}
basic_block bb;
FOR_EACH_BB_FN (bb, cfun)
{
- rtx insn, next, prev;
- rtx end = NEXT_INSN (BB_END (bb));
+ rtx_insn *insn, *next, *prev;
+ rtx_insn *end = NEXT_INSN (BB_END (bb));
- prev = NULL_RTX;
+ prev = NULL;
for (insn = next_insn_to_bundle (BB_HEAD (bb), end); insn;
prev = insn, insn = next)
{
/* Replace OLD_INSN with NEW_INSN. */
static void
-replace_insns (rtx old_insn, rtx new_insns)
+replace_insns (rtx_insn *old_insn, rtx_insn *new_insns)
{
if (new_insns)
emit_insn_before (new_insns, old_insn);
/* Do the first replacement step in tilegx_fixup_pcrel_references. */
static void
-replace_mov_pcrel_step1 (rtx insn)
+replace_mov_pcrel_step1 (rtx_insn *insn)
{
rtx pattern = PATTERN (insn);
rtx unspec;
rtx opnds[2];
- rtx new_insns;
+ rtx_insn *new_insns;
gcc_assert (GET_CODE (pattern) == SET);
opnds[0] = SET_DEST (pattern);
/* Do the second replacement step in tilegx_fixup_pcrel_references. */
static void
-replace_mov_pcrel_step2 (rtx insn)
+replace_mov_pcrel_step2 (rtx_insn *insn)
{
rtx pattern = PATTERN (insn);
rtx unspec;
rtx addr;
rtx opnds[3];
- rtx new_insns;
+ rtx_insn *new_insns;
rtx got_rtx = tilegx_got_rtx ();
gcc_assert (GET_CODE (pattern) == SET);
/* Do the third replacement step in tilegx_fixup_pcrel_references. */
static void
-replace_mov_pcrel_step3 (rtx insn)
+replace_mov_pcrel_step3 (rtx_insn *insn)
{
rtx pattern = PATTERN (insn);
rtx unspec;
rtx opnds[4];
- rtx new_insns;
+ rtx_insn *new_insns;
rtx got_rtx = tilegx_got_rtx ();
rtx text_label_rtx = tilegx_text_label_rtx ();
static void
tilegx_fixup_pcrel_references (void)
{
- rtx insn, next_insn;
+ rtx_insn *insn, *next_insn;
bool same_section_as_entry = true;
for (insn = get_insns (); insn; insn = next_insn)
basic_block bb;
FOR_EACH_BB_FN (bb, cfun)
{
- rtx insn, next;
- rtx queue = NULL_RTX;
+ rtx_insn *insn, *next;
+ rtx_insn *queue = NULL;
bool in_bundle = false;
for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = next)
{
while (queue)
{
- rtx next_queue = PREV_INSN (queue);
+ rtx_insn *next_queue = PREV_INSN (queue);
SET_PREV_INSN (NEXT_INSN (insn)) = queue;
SET_NEXT_INSN (queue) = NEXT_INSN (insn);
SET_NEXT_INSN (insn) = queue;
{
if (in_bundle)
{
- rtx prev = PREV_INSN (insn);
+ rtx_insn *prev = PREV_INSN (insn);
SET_PREV_INSN (next) = prev;
SET_NEXT_INSN (prev) = next;
HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
tree function)
{
- rtx this_rtx, insn, funexp, addend;
+ rtx this_rtx, funexp, addend;
+ rtx_insn *insn;
/* Pretend to be a post-reload pass while generating rtl. */
reload_completed = 1;
/* Implement FINAL_PRESCAN_INSN. This is used to emit bundles. */
void
-tilegx_final_prescan_insn (rtx insn)
+tilegx_final_prescan_insn (rtx_insn *insn)
{
/* Record this for tilegx_asm_output_opcode to examine. */
insn_mode = GET_MODE (insn);