+2016-11-21 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
+
+ * config/aarch64/aarch64.c (aarch64_emit_unlikely_jump): split
+ up variables to make some rtx_insn *.
+ * config/alpha/alpha.c (emit_unlikely_jump): Likewise.
+ * config/arc/arc.c: Likewise.
+ * config/arm/arm.c: Likewise.
+ * config/mn10300/mn10300.c (mn10300_legitimize_pic_address):
+ Likewise.
+ * config/rs6000/rs6000.c (rs6000_expand_split_stack_prologue):
+ Likewise.
+ * config/spu/spu.c (spu_emit_branch_hint): Likewise.
+
2016-11-21 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
* config/arm/arm.c (legitimize_pic_address): Change to use
{
int very_unlikely = REG_BR_PROB_BASE / 100 - 1;
- insn = emit_jump_insn (insn);
- add_int_reg_note (insn, REG_BR_PROB, very_unlikely);
+ rtx_insn *jump = emit_jump_insn (insn);
+ add_int_reg_note (jump, REG_BR_PROB, very_unlikely);
}
/* Expand a compare and swap pattern. */
emit_unlikely_jump (rtx cond, rtx label)
{
int very_unlikely = REG_BR_PROB_BASE / 100 - 1;
- rtx x;
-
- x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
- x = emit_jump_insn (gen_rtx_SET (pc_rtx, x));
- add_int_reg_note (x, REG_BR_PROB, very_unlikely);
+ rtx x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
+ rtx_insn *insn = emit_jump_insn (gen_rtx_SET (pc_rtx, x));
+ add_int_reg_note (insn, REG_BR_PROB, very_unlikely);
}
/* A subroutine of the atomic operation splitters. Emit a load-locked
{
int very_unlikely = REG_BR_PROB_BASE / 100 - 1;
- insn = emit_jump_insn (insn);
- add_int_reg_note (insn, REG_BR_PROB, very_unlikely);
+ rtx_insn *jump = emit_jump_insn (insn);
+ add_int_reg_note (jump, REG_BR_PROB, very_unlikely);
}
/* Expand code to perform a 8 or 16-bit compare and swap by doing
{
int very_unlikely = REG_BR_PROB_BASE / 100 - 1;
- insn = emit_jump_insn (insn);
- add_int_reg_note (insn, REG_BR_PROB, very_unlikely);
+ rtx_insn *jump = emit_jump_insn (insn);
+ add_int_reg_note (jump, REG_BR_PROB, very_unlikely);
}
/* Expand a compare and swap pattern. */
mn10300_legitimize_pic_address (rtx orig, rtx reg)
{
rtx x;
+ rtx_insn *insn;
if (GET_CODE (orig) == LABEL_REF
|| (GET_CODE (orig) == SYMBOL_REF
x = gen_rtx_CONST (SImode, x);
emit_move_insn (reg, x);
- x = emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
+ insn = emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
}
else if (GET_CODE (orig) == SYMBOL_REF)
{
x = gen_rtx_PLUS (SImode, pic_offset_table_rtx, x);
x = gen_const_mem (SImode, x);
- x = emit_move_insn (reg, x);
+ insn = emit_move_insn (reg, x);
}
else
return orig;
- set_unique_reg_note (x, REG_EQUAL, orig);
+ set_unique_reg_note (insn, REG_EQUAL, orig);
return reg;
}
Insert a SETLB insn just before LABEL. */
static void
-mn10300_insert_setlb_lcc (rtx_insn *label, rtx branch)
+mn10300_insert_setlb_lcc (rtx_insn *label, rtx_insn *branch)
{
rtx lcc, comparison, cmp_reg;
emit_unlikely_jump (rtx cond, rtx label)
{
int very_unlikely = REG_BR_PROB_BASE / 100 - 1;
- rtx x;
-
- x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
- x = emit_jump_insn (gen_rtx_SET (pc_rtx, x));
- add_int_reg_note (x, REG_BR_PROB, very_unlikely);
+ rtx x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
+ rtx_insn *insn = emit_jump_insn (gen_rtx_SET (pc_rtx, x));
+ add_int_reg_note (insn, REG_BR_PROB, very_unlikely);
}
/* A subroutine of the atomic operation splitters. Emit a load-locked
gen_rtx_GEU (VOIDmode, compare, const0_rtx),
gen_rtx_LABEL_REF (VOIDmode, ok_label),
pc_rtx);
- jump = emit_jump_insn (gen_rtx_SET (pc_rtx, jump));
- JUMP_LABEL (jump) = ok_label;
+ insn = emit_jump_insn (gen_rtx_SET (pc_rtx, jump));
+ JUMP_LABEL (insn) = ok_label;
/* Mark the jump as very likely to be taken. */
- add_int_reg_note (jump, REG_BR_PROB,
+ add_int_reg_note (insn, REG_BR_PROB,
REG_BR_PROB_BASE - REG_BR_PROB_BASE / 100);
lr = gen_rtx_REG (Pmode, LR_REGNO);
spu_emit_branch_hint (rtx_insn *before, rtx_insn *branch, rtx target,
int distance, sbitmap blocks)
{
- rtx branch_label = 0;
rtx_insn *hint;
rtx_insn *insn;
rtx_jump_table_data *table;
if (NOTE_INSN_BASIC_BLOCK_P (before))
before = NEXT_INSN (before);
- branch_label = gen_label_rtx ();
+ rtx_code_label *branch_label = gen_label_rtx ();
LABEL_NUSES (branch_label)++;
LABEL_PRESERVE_P (branch_label) = 1;
insn = emit_label_before (branch_label, branch);
- branch_label = gen_rtx_LABEL_REF (VOIDmode, branch_label);
+ rtx branch_label_ref = gen_rtx_LABEL_REF (VOIDmode, branch_label);
bitmap_set_bit (blocks, BLOCK_FOR_INSN (branch)->index);
- hint = emit_insn_before (gen_hbr (branch_label, target), before);
+ hint = emit_insn_before (gen_hbr (branch_label_ref, target), before);
recog_memoized (hint);
INSN_LOCATION (hint) = INSN_LOCATION (branch);
HINTED_P (branch) = 1;