+2017-07-16 Jan Hubicka <hubicka@ucw.cz>
+
+ * profile-count.h (profile_probability::from_reg_br_prob_note,
+ profile_probability::to_reg_br_prob_note): New functions.
+ * doc/rtl.texi (REG_BR_PROB_NOTE): Update documentation.
+ * reg-notes.h (REG_BR_PROB, REG_BR_PRED): Update docs.
+ * predict.c (probability_reliable_p): Update.
+ (edge_probability_reliable_p): Update.
+ (br_prob_note_reliable_p): Update.
+ (invert_br_probabilities): Update.
+ (add_reg_br_prob_note): New function.
+ (combine_predictions_for_insn): Update.
+ * asan.c (asan_clear_shadow): Update.
+ * cfgbuild.c (compute_outgoing_frequencies): Update.
+ * cfgrtl.c (force_nonfallthru_and_redirect): Update.
+ (update_br_prob_note): Update.
+ (rtl_verify_edges): Update.
+ (purge_dead_edges): Update.
+ (fixup_reorder_chain): Update.
+ * emit-rtl.c (try_split): Update.
+ * ifcvt.c (cond_exec_process_insns): Update.
+ (cond_exec_process_if_block): Update.
+ (dead_or_predicable): Update.
+ * internal-fn.c (expand_addsub_overflow): Update.
+ (expand_neg_overflow): Update.
+ (expand_mul_overflow): Update.
+ * loop-doloop.c (doloop_modify): Update.
+ * loop-unroll.c (compare_and_jump_seq): Update.
+ * optabs.c (emit_cmp_and_jump_insn_1): Update.
+ * predict.h: Update.
+ * reorg.c (mostly_true_jump): Update.
+ * rtl.h: Update.
+ * config/aarch64/aarch64.c (aarch64_emit_unlikely_jump): Update.
+ * config/alpha/alpha.c (emit_unlikely_jump): Update.
+ * config/arc/arc.c: (emit_unlikely_jump): Update.
+ * config/arm/arm.c: (emit_unlikely_jump): Update.
+ * config/bfin/bfin.c (cbranch_predicted_taken_p): Update.
+ * config/frv/frv.c (frv_print_operand_jump_hint): Update.
+ * config/i386/i386.c (ix86_expand_split_stack_prologue): Update.
+ (ix86_print_operand): Update.
+ (ix86_split_fp_branch): Update.
+ (predict_jump): Update.
+ * config/ia64/ia64.c (ia64_print_operand): Update.
+ * config/mmix/mmix.c (mmix_print_operand): Update.
+ * config/powerpcspe/powerpcspe.c (output_cbranch): Update.
+ (rs6000_expand_split_stack_prologue): Update.
+ * config/rs6000/rs6000.c: Update.
+ * config/s390/s390.c (s390_expand_vec_strlen): Update.
+ (s390_expand_vec_movstr): Update.
+ (s390_expand_cs_tdsi): Update.
+ (s390_expand_split_stack_prologue): Update.
+ * config/sh/sh.c (sh_print_operand): Update.
+ (expand_cbranchsi4): Update.
+ (expand_cbranchdi4): Update.
+ * config/sparc/sparc.c (output_v9branch): Update.
+ * config/spu/spu.c (get_branch_target): Update.
+ (ea_load_store_inline): Update.
+ * config/tilegx/tilegx.c (cbranch_predicted_p): Update.
+ * config/tilepro/tilepro.c: Update.
+
2017-07-16 Eric Botcazou <ebotcazou@adacore.com>
* gimplify.c (mostly_copy_tree_r): Revert latest change.
emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
jump = get_last_insn ();
gcc_assert (JUMP_P (jump));
- add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
+ add_reg_br_prob_note (jump,
+ profile_probability::guessed_always ()
+ .apply_scale (80, 100));
}
void
probability = XINT (note, 0);
e = BRANCH_EDGE (b);
e->probability
- = profile_probability::from_reg_br_prob_base (probability);
- e->count = b->count.apply_probability (probability);
+ = profile_probability::from_reg_br_prob_note (probability);
+ e->count = b->count.apply_probability (e->probability);
f = FALLTHRU_EDGE (b);
- f->probability
- = profile_probability::from_reg_br_prob_base (REG_BR_PROB_BASE
- - probability);
+ f->probability = e->probability.invert ();
f->count = b->count - e->count;
return;
}
{
int prob = XINT (note, 0);
- b->probability = profile_probability::from_reg_br_prob_base (prob);
- b->count = e->count.apply_probability (prob);
+ b->probability = profile_probability::from_reg_br_prob_note (prob);
+ b->count = e->count.apply_probability (b->probability);
e->probability -= e->probability;
e->count -= b->count;
}
return;
note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
if (!note
- || XINT (note, 0) == BRANCH_EDGE (bb)->probability.to_reg_br_prob_base ())
+ || XINT (note, 0) == BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ())
return;
- XINT (note, 0) = BRANCH_EDGE (bb)->probability.to_reg_br_prob_base ();
+ XINT (note, 0) = BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ();
}
/* Get the last insn associated with block BB (that includes barriers and
}
}
else if (XINT (note, 0)
- != BRANCH_EDGE (bb)->probability.to_reg_br_prob_base ()
+ != BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ()
&& profile_status_for_fn (cfun) != PROFILE_ABSENT)
{
error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
XINT (note, 0),
- BRANCH_EDGE (bb)->probability.to_reg_br_prob_base ());
+ BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ());
err = 1;
}
}
b = BRANCH_EDGE (bb);
f = FALLTHRU_EDGE (bb);
- b->probability = profile_probability::from_reg_br_prob_base
+ b->probability = profile_probability::from_reg_br_prob_note
(XINT (note, 0));
- f->probability = profile_probability::always () - b->probability;
+ f->probability = b->probability.invert ();
b->count = bb->count.apply_probability (b->probability);
f->count = bb->count.apply_probability (f->probability);
}
rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
if (note
- && XINT (note, 0) < REG_BR_PROB_BASE / 2
+ && profile_probability::from_reg_br_prob_note
+ (XINT (note, 0)) < profile_probability::even ()
&& invert_jump (bb_end_jump,
(e_fall->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun)
static void
aarch64_emit_unlikely_jump (rtx insn)
{
- int very_unlikely = REG_BR_PROB_BASE / 100 - 1;
-
rtx_insn *jump = emit_jump_insn (insn);
- add_int_reg_note (jump, REG_BR_PROB, very_unlikely);
+ add_reg_br_prob_note (jump, profile_probability::very_unlikely ());
}
/* Expand a compare and swap pattern. */
static void
emit_unlikely_jump (rtx cond, rtx label)
{
- int very_unlikely = REG_BR_PROB_BASE / 100 - 1;
rtx x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
rtx_insn *insn = emit_jump_insn (gen_rtx_SET (pc_rtx, x));
- add_int_reg_note (insn, REG_BR_PROB, very_unlikely);
+ add_reg_br_prob_note (insn, profile_probability::very_unlikely ());
}
/* A subroutine of the atomic operation splitters. Emit a load-locked
static void
emit_unlikely_jump (rtx insn)
{
- int very_unlikely = REG_BR_PROB_BASE / 100 - 1;
-
rtx_insn *jump = emit_jump_insn (insn);
- add_int_reg_note (jump, REG_BR_PROB, very_unlikely);
+ add_reg_br_prob_note (jump, profile_probability::very_unlikely ());
}
/* Expand code to perform a 8 or 16-bit compare and swap by doing
int very_unlikely = REG_BR_PROB_BASE / 100 - 1;
rtx_insn *jump = emit_jump_insn (insn);
- add_int_reg_note (jump, REG_BR_PROB, very_unlikely);
+ add_reg_br_prob_note (jump, profile_probability::very_unlikely ());
}
/* Expand a compare and swap pattern. */
if (x)
{
- int pred_val = XINT (x, 0);
-
- return pred_val >= REG_BR_PROB_BASE / 2;
+ return profile_probability::from_reg_br_prob_note (XINT (x, 0))
+ >= profile_probability::even ();
}
return 0;
rtx note;
rtx labelref;
int ret;
- int prob = -1;
enum { UNKNOWN, BACKWARD, FORWARD } jump_type = UNKNOWN;
gcc_assert (JUMP_P (insn));
else
{
- prob = XINT (note, 0);
- ret = ((prob >= (REG_BR_PROB_BASE / 2))
+ ret = ((profile_probability::from_reg_br_prob_note (XINT (note, 0))
+ >= profile_probability::even ())
? FRV_JUMP_LIKELY
: FRV_JUMP_NOT_LIKELY);
}
JUMP_LABEL (jump_insn) = label;
/* Mark the jump as very likely to be taken. */
- add_int_reg_note (jump_insn, REG_BR_PROB,
- REG_BR_PROB_BASE - REG_BR_PROB_BASE / 100);
+ add_reg_br_prob_note (jump_insn, profile_probability::very_likely ());
if (split_stack_fn == NULL_RTX)
{
x = find_reg_note (current_output_insn, REG_BR_PROB, 0);
if (x)
{
- int pred_val = XINT (x, 0);
+ int pred_val = profile_probability::from_reg_br_prob_note
+ (XINT (x, 0)).to_reg_br_prob_base ();
if (pred_val < REG_BR_PROB_BASE * 45 / 100
|| pred_val > REG_BR_PROB_BASE * 55 / 100)
(pc_rtx,
gen_rtx_IF_THEN_ELSE (VOIDmode,
condition, target1, target2)));
- if (split_branch_probability >= 0)
- add_int_reg_note (i, REG_BR_PROB, split_branch_probability);
+ if (split_branch_probability.initialized_p ())
+ add_reg_br_prob_note (i, split_branch_probability);
}
void
{
rtx_insn *insn = get_last_insn ();
gcc_assert (JUMP_P (insn));
- add_int_reg_note (insn, REG_BR_PROB, prob);
+ add_reg_br_prob_note (insn, profile_probability::from_reg_br_prob_base (prob));
}
/* Helper function for the string operations below. Dest VARIABLE whether
x = find_reg_note (current_output_insn, REG_BR_PROB, 0);
if (x)
{
- int pred_val = XINT (x, 0);
+ int pred_val = profile_probability::from_reg_br_prob_note
+ (XINT (x, 0)).to_reg_br_prob_base ();
/* Guess top and bottom 10% statically predicted. */
if (pred_val < REG_BR_PROB_BASE / 50
if (TARGET_BRANCH_PREDICT)
{
x = find_reg_note (current_output_insn, REG_BR_PROB, 0);
- if (x && XINT (x, 0) > REG_BR_PROB_BASE / 2)
+ if (x && profile_probability::from_reg_br_prob_note (XINT (x, 0))
+ > profile_probability::even ())
putc ('P', stream);
}
return;
if (note != NULL_RTX)
{
/* PROB is the difference from 50%. */
- int prob = XINT (note, 0) - REG_BR_PROB_BASE / 2;
+ int prob = profile_probability::from_reg_br_prob_note (XINT (note, 0))
+ .to_reg_br_prob_base () - REG_BR_PROB_BASE / 2;
/* Only hint for highly probable/improbable branches on newer cpus when
we have real profile data, as static prediction overrides processor
static void
emit_unlikely_jump (rtx cond, rtx label)
{
- int very_unlikely = REG_BR_PROB_BASE / 100 - 1;
rtx x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
rtx_insn *insn = emit_jump_insn (gen_rtx_SET (pc_rtx, x));
- add_int_reg_note (insn, REG_BR_PROB, very_unlikely);
+ add_reg_br_prob_note (insn, profile_probability::very_unlikely ());
}
/* A subroutine of the atomic operation splitters. Emit a load-locked
insn = emit_jump_insn (gen_rtx_SET (pc_rtx, jump));
JUMP_LABEL (insn) = ok_label;
/* Mark the jump as very likely to be taken. */
- add_int_reg_note (insn, REG_BR_PROB,
- REG_BR_PROB_BASE - REG_BR_PROB_BASE / 100);
+ add_reg_br_prob_note (insn, profile_probability::very_likely ());
lr = gen_rtx_REG (Pmode, LR_REGNO);
insn = emit_move_insn (r0, lr);
if (note != NULL_RTX)
{
/* PROB is the difference from 50%. */
- int prob = XINT (note, 0) - REG_BR_PROB_BASE / 2;
+ int prob = profile_probability::from_reg_br_prob_note (XINT (note, 0))
+ .to_reg_br_prob_base () - REG_BR_PROB_BASE / 2;
/* Only hint for highly probable/improbable branches on newer cpus when
we have real profile data, as static prediction overrides processor
static void
emit_unlikely_jump (rtx cond, rtx label)
{
- int very_unlikely = REG_BR_PROB_BASE / 100 - 1;
rtx x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
rtx_insn *insn = emit_jump_insn (gen_rtx_SET (pc_rtx, x));
- add_int_reg_note (insn, REG_BR_PROB, very_unlikely);
+ add_reg_br_prob_note (insn, profile_probability::very_unlikely ());
}
/* A subroutine of the atomic operation splitters. Emit a load-locked
insn = emit_jump_insn (gen_rtx_SET (pc_rtx, jump));
JUMP_LABEL (insn) = ok_label;
/* Mark the jump as very likely to be taken. */
- add_int_reg_note (insn, REG_BR_PROB,
- REG_BR_PROB_BASE - REG_BR_PROB_BASE / 100);
+ add_reg_br_prob_note (insn, profile_probability::very_likely ());
lr = gen_rtx_REG (Pmode, LR_REGNO);
insn = emit_move_insn (r0, lr);
void
s390_expand_vec_strlen (rtx target, rtx string, rtx alignment)
{
- int very_unlikely = REG_BR_PROB_BASE / 100 - 1;
- int very_likely = REG_BR_PROB_BASE - 1;
rtx highest_index_to_load_reg = gen_reg_rtx (Pmode);
rtx str_reg = gen_reg_rtx (V16QImode);
rtx str_addr_base_reg = gen_reg_rtx (Pmode);
GEN_INT (VSTRING_FLAG_ZS | VSTRING_FLAG_CS)));
add_int_reg_note (s390_emit_ccraw_jump (8, NE, loop_start_label),
- REG_BR_PROB, very_likely);
+ REG_BR_PROB,
+ profile_probability::very_likely ().to_reg_br_prob_note ());
emit_insn (gen_vec_extractv16qi (len, result_reg, GEN_INT (7)));
/* If the string pointer wasn't aligned we have loaded less then 16
emit_insn (gen_movsicc (str_idx_reg, cond,
highest_index_to_load_reg, str_idx_reg));
- add_int_reg_note (s390_emit_jump (is_aligned_label, cond), REG_BR_PROB,
- very_unlikely);
+ add_reg_br_prob_note (s390_emit_jump (is_aligned_label, cond),
+ profile_probability::very_unlikely ());
expand_binop (Pmode, add_optab, str_idx_reg,
GEN_INT (-16), str_idx_reg, 1, OPTAB_DIRECT);
void
s390_expand_vec_movstr (rtx result, rtx dst, rtx src)
{
- int very_unlikely = REG_BR_PROB_BASE / 100 - 1;
rtx temp = gen_reg_rtx (Pmode);
rtx src_addr = XEXP (src, 0);
rtx dst_addr = XEXP (dst, 0);
emit_insn (gen_vec_vfenesv16qi (vpos, vsrc, vsrc,
GEN_INT (VSTRING_FLAG_ZS | VSTRING_FLAG_CS)));
add_int_reg_note (s390_emit_ccraw_jump (8, EQ, done_label),
- REG_BR_PROB, very_unlikely);
+ REG_BR_PROB, profile_probability::very_unlikely ()
+ .to_reg_br_prob_note ());
emit_move_insn (gen_rtx_MEM (V16QImode,
gen_rtx_PLUS (Pmode, dst_addr_reg, offset)),
if (do_const_opt)
{
- const int very_unlikely = REG_BR_PROB_BASE / 100 - 1;
rtx cc = gen_rtx_REG (CCZmode, CC_REGNUM);
skip_cs_label = gen_label_rtx ();
emit_insn (gen_rtx_SET (cc, gen_rtx_COMPARE (CCZmode, output, cmp)));
}
s390_emit_jump (skip_cs_label, gen_rtx_NE (VOIDmode, cc, const0_rtx));
- add_int_reg_note (get_last_insn (), REG_BR_PROB, very_unlikely);
+ add_reg_br_prob_note (get_last_insn (),
+ profile_probability::very_unlikely ());
/* If the jump is not taken, OUTPUT is the expected value. */
cmp = output;
/* Reload newval to a register manually, *after* the compare and jump
LABEL_NUSES (call_done)++;
/* Mark the jump as very unlikely to be taken. */
- add_int_reg_note (insn, REG_BR_PROB, REG_BR_PROB_BASE / 100);
+ add_reg_br_prob_note (insn,
+ profile_probability::very_unlikely ());
if (cfun->machine->split_stack_varargs_pointer != NULL_RTX)
{
extern void sh_expand_setmem (rtx *);
extern enum rtx_code prepare_cbranch_operands (rtx *, machine_mode mode,
enum rtx_code comparison);
-extern void expand_cbranchsi4 (rtx *operands, enum rtx_code comparison, int);
+extern void expand_cbranchsi4 (rtx *operands, enum rtx_code comparison);
extern bool expand_cbranchdi4 (rtx *operands, enum rtx_code comparison);
extern void sh_emit_scc_to_t (enum rtx_code, rtx, rtx);
extern void sh_emit_compare_and_branch (rtx *, machine_mode);
{
rtx note = find_reg_note (current_output_insn, REG_BR_PROB, 0);
- if (note && XINT (note, 0) * 2 < REG_BR_PROB_BASE)
+ if (note
+ && profile_probability::from_reg_br_prob_note (XINT (note, 0))
+ < profile_probability::even ())
fputs ("/u", stream);
break;
}
return comparison;
}
-void
-expand_cbranchsi4 (rtx *operands, enum rtx_code comparison, int probability)
+static void
+expand_cbranchsi4 (rtx *operands, enum rtx_code comparison,
+ profile_probability probability)
{
rtx (*branch_expander) (rtx) = gen_branch_true;
comparison = prepare_cbranch_operands (operands, SImode, comparison);
gen_rtx_fmt_ee (comparison, SImode,
operands[1], operands[2])));
rtx_insn *jump = emit_jump_insn (branch_expander (operands[3]));
- if (probability >= 0)
- add_int_reg_note (jump, REG_BR_PROB, probability);
+ if (probability.initialized_p ())
+ add_reg_br_prob_note (jump, probability);
+}
+
+void
+expand_cbranchsi4 (rtx *operands, enum rtx_code comparison)
+{
+ expand_cbranchsi4 (operands, comparison,
+ profile_probability::uninitialized ());
}
/* ??? How should we distribute probabilities when more than one branch
rtx_code_label *skip_label = NULL;
rtx op1h, op1l, op2h, op2l;
int num_branches;
- int prob, rev_prob;
- int msw_taken_prob = -1, msw_skip_prob = -1, lsw_taken_prob = -1;
+ profile_probability prob, rev_prob;
+ profile_probability msw_taken_prob = profile_probability::uninitialized (),
+ msw_skip_prob = profile_probability::uninitialized (),
+ lsw_taken_prob = profile_probability::uninitialized ();
comparison = prepare_cbranch_operands (operands, DImode, comparison);
op1h = gen_highpart_mode (SImode, DImode, operands[1]);
op2l = gen_lowpart (SImode, operands[2]);
msw_taken = msw_skip = lsw_taken = LAST_AND_UNUSED_RTX_CODE;
prob = split_branch_probability;
- rev_prob = REG_BR_PROB_BASE - prob;
+ rev_prob = prob.invert ();
switch (comparison)
{
case EQ:
msw_skip = NE;
lsw_taken = EQ;
- if (prob >= 0)
+ if (prob.initialized_p ())
{
- // If we had more precision, we'd use rev_prob - (rev_prob >> 32) .
+ /* FIXME: This is not optimal. We do not really know the probablity
+ that values differ by MCW only, but we should probably distribute
+ probabilities more evenly. */
msw_skip_prob = rev_prob;
- if (REG_BR_PROB_BASE <= 65535)
- lsw_taken_prob = prob ? REG_BR_PROB_BASE : 0;
- else
- {
- lsw_taken_prob
- = (prob
- ? (REG_BR_PROB_BASE
- - ((gcov_type) REG_BR_PROB_BASE * rev_prob
- / ((gcov_type) prob << 32)))
- : 0);
- }
+ lsw_taken_prob = prob > profile_probability::never ()
+ ? profile_probability::guessed_always ()
+ : profile_probability::guessed_never ();
}
break;
case NE:
msw_taken = NE;
msw_taken_prob = prob;
lsw_taken = NE;
- lsw_taken_prob = 0;
+ lsw_taken_prob = profile_probability::guessed_never ();
break;
case GTU: case GT:
msw_taken = comparison;
if (comparison != EQ && comparison != NE && num_branches > 1)
{
if (!CONSTANT_P (operands[2])
- && prob >= (int) (REG_BR_PROB_BASE * 3 / 8U)
- && prob <= (int) (REG_BR_PROB_BASE * 5 / 8U))
- {
- msw_taken_prob = prob / 2U;
- msw_skip_prob
- = REG_BR_PROB_BASE * rev_prob / (REG_BR_PROB_BASE + rev_prob);
+ && prob.initialized_p ()
+ && prob.to_reg_br_prob_base () >= (int) (REG_BR_PROB_BASE * 3 / 8U)
+ && prob.to_reg_br_prob_base () <= (int) (REG_BR_PROB_BASE * 5 / 8U))
+ {
+ msw_taken_prob = prob.apply_scale (1, 2);
+ msw_skip_prob = rev_prob.apply_scale (REG_BR_PROB_BASE,
+ rev_prob.to_reg_br_prob_base ()
+ + REG_BR_PROB_BASE);
lsw_taken_prob = prob;
}
else
{
msw_taken_prob = prob;
- msw_skip_prob = REG_BR_PROB_BASE;
+ msw_skip_prob = profile_probability::guessed_always ();
/* ??? If we have a constant op2h, should we use that when
calculating lsw_taken_prob? */
lsw_taken_prob = prob;
(clobber (reg:SI T_REG))]
"can_create_pseudo_p ()"
{
- expand_cbranchsi4 (operands, LAST_AND_UNUSED_RTX_CODE, -1);
+ expand_cbranchsi4 (operands, LAST_AND_UNUSED_RTX_CODE);
DONE;
})
if (*labelno && insn && (note = find_reg_note (insn, REG_BR_PROB, NULL_RTX)))
{
strcpy (p,
- ((XINT (note, 0) >= REG_BR_PROB_BASE / 2) ^ far)
+ ((profile_probability::from_reg_br_prob_note (XINT (note, 0))
+ >= profile_probability::even ()) ^ far)
? ",pt" : ",pn");
p += 3;
spaces -= 3;
if (insn && (note = find_reg_note (insn, REG_BR_PROB, NULL_RTX)))
{
strcpy (p,
- ((XINT (note, 0) >= REG_BR_PROB_BASE / 2) ^ far)
+ ((profile_probability::from_reg_br_prob_note (XINT (note, 0))
+ >= profile_probability::even ()) ^ far)
? ",pt" : ",pn");
p += 3;
}
{
/* If the more probable case is not a fall through, then
try a branch hint. */
- int prob = XINT (note, 0);
+ int prob = profile_probability::from_reg_br_prob_note
+ (XINT (note, 0)).to_reg_br_prob_base ();
if (prob > (REG_BR_PROB_BASE * 6 / 10)
&& GET_CODE (XEXP (src, 1)) != PC)
lab = XEXP (src, 1);
gen_rtx_IF_THEN_ELSE (VOIDmode, bcomp,
hit_ref, pc_rtx)));
/* Say that this branch is very likely to happen. */
- v = REG_BR_PROB_BASE - REG_BR_PROB_BASE / 100 - 1;
- add_int_reg_note (insn, REG_BR_PROB, v);
+ add_reg_br_prob_note (insn, profile_probability::very_likely ());
ea_load_store (mem, is_store, ea_addr, data_addr);
cont_label = gen_label_rtx ();
if (x)
{
- int pred_val = XINT (x, 0);
-
- return pred_val >= REG_BR_PROB_BASE / 2;
+ return profile_probability::from_reg_br_prob_note (XINT (x, 0))
+ >= profile_probability::even ();
}
return false;
if (x)
{
- int pred_val = XINT (x, 0);
-
- return pred_val >= REG_BR_PROB_BASE / 2;
+ return profile_probability::from_reg_br_prob_note (XINT (x, 0))
+ >= profile_probability::even ();
}
return false;
@item REG_BR_PROB
This is used to specify the ratio of branches to non-branches of a
branch insn according to the profile data. The note is represented
-as an @code{int_list} expression whose integer value is between 0 and
-REG_BR_PROB_BASE. Larger values indicate a higher probability that
-the branch will be taken.
+as an @code{int_list} expression whose integer value is an encoding
+of @code{profile_probability} type. @code{profile_probability} provide
+member function @code{from_reg_br_prob_note} and @code{to_reg_br_prob_note}
+to extract and store the probability into the RTL encoding.
@findex REG_BR_PRED
@item REG_BR_PRED
#include "rtl-iter.h"
#include "stor-layout.h"
#include "opts.h"
+#include "predict.h"
struct target_rtl default_target_rtl;
#if SWITCHABLE_TARGET
static rtx gen_const_vector (machine_mode, int);
static void copy_rtx_if_shared_1 (rtx *orig);
-/* Probability of the conditional branch currently proceeded by try_split.
- Set to -1 otherwise. */
-int split_branch_probability = -1;
+/* Probability of the conditional branch currently proceeded by try_split. */
+profile_probability split_branch_probability;
\f
/* Returns a hash code for X (which is a really a CONST_INT). */
rtx_insn *before, *after;
rtx note;
rtx_insn *seq, *tem;
- int probability;
+ profile_probability probability;
rtx_insn *insn_last, *insn;
int njumps = 0;
rtx_insn *call_insn = NULL;
if (any_condjump_p (trial)
&& (note = find_reg_note (trial, REG_BR_PROB, 0)))
- split_branch_probability = XINT (note, 0);
+ split_branch_probability
+ = profile_probability::from_reg_br_prob_note (XINT (note, 0));
+ else
+ split_branch_probability = profile_probability::uninitialized ();
+
probability = split_branch_probability;
seq = split_insns (pat, trial);
- split_branch_probability = -1;
+ split_branch_probability = profile_probability::uninitialized ();
if (!seq)
return trial;
CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
mark_jump_label (PATTERN (insn), insn, 0);
njumps++;
- if (probability != -1
+ if (probability.initialized_p ()
&& any_condjump_p (insn)
&& !find_reg_note (insn, REG_BR_PROB, 0))
{
is responsible for this step using
split_branch_probability variable. */
gcc_assert (njumps == 1);
- add_int_reg_note (insn, REG_BR_PROB, probability);
+ add_reg_br_prob_note (insn, probability);
}
}
}
static rtx_insn *find_active_insn_before (basic_block, rtx_insn *);
static rtx_insn *find_active_insn_after (basic_block, rtx_insn *);
static basic_block block_fallthru (basic_block);
-static int cond_exec_process_insns (ce_if_block *, rtx_insn *, rtx, rtx, int,
- int);
static rtx cond_exec_get_condition (rtx_insn *);
static rtx noce_get_condition (rtx_insn *, rtx_insn **, bool);
static int noce_operand_ok (const_rtx);
/* if block information */rtx_insn *start,
/* first insn to look at */rtx end,
/* last insn to look at */rtx test,
- /* conditional execution test */int prob_val,
+ /* conditional execution test */profile_probability
+ prob_val,
/* probability of branch taken. */int mod_ok)
{
int must_be_last = FALSE;
validate_change (insn, &PATTERN (insn), pattern, 1);
- if (CALL_P (insn) && prob_val >= 0)
+ if (CALL_P (insn) && prob_val.initialized_p ())
validate_change (insn, ®_NOTES (insn),
gen_rtx_INT_LIST ((machine_mode) REG_BR_PROB,
- prob_val, REG_NOTES (insn)), 1);
+ prob_val.to_reg_br_prob_note (),
+ REG_NOTES (insn)), 1);
insn_done:
if (insn == end)
int then_mod_ok; /* whether conditional mods are ok in THEN */
rtx true_expr; /* test for else block insns */
rtx false_expr; /* test for then block insns */
- int true_prob_val; /* probability of else block */
- int false_prob_val; /* probability of then block */
+ profile_probability true_prob_val;/* probability of else block */
+ profile_probability false_prob_val;/* probability of then block */
rtx_insn *then_last_head = NULL; /* Last match at the head of THEN */
rtx_insn *else_last_head = NULL; /* Last match at the head of ELSE */
rtx_insn *then_first_tail = NULL; /* First match at the tail of THEN */
note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
if (note)
{
- true_prob_val = XINT (note, 0);
- false_prob_val = REG_BR_PROB_BASE - true_prob_val;
+ true_prob_val = profile_probability::from_reg_br_prob_note (XINT (note, 0));
+ false_prob_val = true_prob_val.invert ();
}
else
{
- true_prob_val = -1;
- false_prob_val = -1;
+ true_prob_val = profile_probability::uninitialized ();
+ false_prob_val = profile_probability::uninitialized ();
}
/* If we have && or || tests, do them here. These tests are in the adjacent
return FALSE;
rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
- int prob_val = (note ? XINT (note, 0) : -1);
+ profile_probability prob_val
+ = (note ? profile_probability::from_reg_br_prob_note (XINT (note, 0))
+ : profile_probability::uninitialized ());
if (reversep)
{
return FALSE;
cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
XEXP (cond, 1));
- if (prob_val >= 0)
- prob_val = REG_BR_PROB_BASE - prob_val;
+ prob_val = prob_val.invert ();
}
if (cond_exec_process_insns (NULL, head, end, cond, prob_val, 0)
&& JUMP_P (last)
&& any_condjump_p (last)
&& !find_reg_note (last, REG_BR_PROB, 0))
- add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
+ add_reg_br_prob_note (last,
+ profile_probability::very_unlikely ());
emit_jump (done_label);
goto do_error_label;
}
&& JUMP_P (last)
&& any_condjump_p (last)
&& !find_reg_note (last, REG_BR_PROB, 0))
- add_int_reg_note (last, REG_BR_PROB, PROB_UNLIKELY);
+ add_reg_br_prob_note (last,
+ profile_probability::very_unlikely ());
emit_jump (done_label);
goto do_error_label;
}
&& JUMP_P (last)
&& any_condjump_p (last)
&& !find_reg_note (last, REG_BR_PROB, 0))
- add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
+ add_reg_br_prob_note (last,
+ profile_probability::very_unlikely ());
emit_jump (done_label);
}
else
&& JUMP_P (last)
&& any_condjump_p (last)
&& !find_reg_note (last, REG_BR_PROB, 0))
- add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
+ add_reg_br_prob_note (last,
+ profile_probability::very_unlikely ());
emit_jump (done_label);
}
else
bool increment_count;
basic_block loop_end = desc->out_edge->src;
machine_mode mode;
- rtx true_prob_val;
widest_int iterations;
jump_insn = BB_END (loop_end);
fputs (" iterations).\n", dump_file);
}
- /* Get the probability of the original branch. If it exists we would
- need to update REG_BR_PROB of the new jump_insn. */
- true_prob_val = find_reg_note (jump_insn, REG_BR_PROB, NULL_RTX);
-
/* Discard original jump to continue loop. The original compare
result may still be live, so it cannot be discarded explicitly. */
delete_insn (jump_insn);
add_reg_note (jump_insn, REG_NONNEG, NULL_RTX);
/* Update the REG_BR_PROB note. */
- if (true_prob_val && desc->in_edge->probability.initialized_p ())
- {
- /* Seems safer to use the branch probability. */
- add_int_reg_note (jump_insn, REG_BR_PROB,
- desc->in_edge->probability.to_reg_br_prob_base ());
- }
+ if (desc->in_edge->probability.initialized_p ())
+ add_reg_br_prob_note (jump_insn, desc->in_edge->probability);
}
/* Called through note_stores. */
LABEL_NUSES (label)++;
}
if (prob.initialized_p ())
- add_int_reg_note (jump, REG_BR_PROB, prob.to_reg_br_prob_base ());
+ add_reg_br_prob_note (jump, prob);
seq = get_insns ();
end_sequence ();
&& JUMP_P (insn)
&& any_condjump_p (insn)
&& !find_reg_note (insn, REG_BR_PROB, 0))
- add_int_reg_note (insn, REG_BR_PROB, prob.to_reg_br_prob_base ());
+ add_reg_br_prob_note (insn, prob);
}
/* Generate code to compare X with Y so that the condition codes are
return false;
}
-/* Return true when the probability of edge is reliable.
-
- The profile guessing code is good at predicting branch outcome (ie.
- taken/not taken), that is predicted right slightly over 75% of time.
- It is however notoriously poor on predicting the probability itself.
- In general the profile appear a lot flatter (with probabilities closer
- to 50%) than the reality so it is bad idea to use it to drive optimization
- such as those disabling dynamic branch prediction for well predictable
- branches.
-
- There are two exceptions - edges leading to noreturn edges and edges
- predicted by number of iterations heuristics are predicted well. This macro
- should be able to distinguish those, but at the moment it simply check for
- noreturn heuristic that is only one giving probability over 99% or bellow
- 1%. In future we might want to propagate reliability information across the
- CFG if we find this information useful on multiple places. */
-static bool
-probability_reliable_p (int prob)
-{
- return (profile_status_for_fn (cfun) == PROFILE_READ
- || (profile_status_for_fn (cfun) == PROFILE_GUESSED
- && (prob <= HITRATE (1) || prob >= HITRATE (99))));
-}
-
/* Same predicate as above, working on edges. */
bool
edge_probability_reliable_p (const_edge e)
{
- return e->probability.reliable_p ();
+ return e->probability.probably_reliable_p ();
}
/* Same predicate as edge_probability_reliable_p, working on notes. */
br_prob_note_reliable_p (const_rtx note)
{
gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB);
- return probability_reliable_p (XINT (note, 0));
+ return profile_probability::from_reg_br_prob_note
+ (XINT (note, 0)).probably_reliable_p ();
}
static void
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
if (REG_NOTE_KIND (note) == REG_BR_PROB)
- XINT (note, 0) = REG_BR_PROB_BASE - XINT (note, 0);
+ XINT (note, 0) = profile_probability::from_reg_br_prob_note
+ (XINT (note, 0)).invert ().to_reg_br_prob_note ();
else if (REG_NOTE_KIND (note) == REG_BR_PRED)
XEXP (XEXP (note, 0), 1)
= GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
e->probability = profile_probability::never ();
}
+/* Add REG_BR_PROB note to JUMP with PROB. */
+
+void
+add_reg_br_prob_note (rtx_insn *jump, profile_probability prob)
+{
+ gcc_checking_assert (JUMP_P (jump) && !find_reg_note (jump, REG_BR_PROB, 0));
+ add_int_reg_note (jump, REG_BR_PROB, prob.to_reg_br_prob_note ());
+}
+
/* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
note if not already present. Remove now useless REG_BR_PRED notes. */
if (!prob_note)
{
- add_int_reg_note (insn, REG_BR_PROB, combined_probability);
+ profile_probability p
+ = profile_probability::from_reg_br_prob_base (combined_probability);
+ add_reg_br_prob_note (insn, p);
/* Save the prediction into CFG in case we are seeing non-degenerated
conditional jump. */
if (!single_succ_p (bb))
{
- BRANCH_EDGE (bb)->probability
- = profile_probability::from_reg_br_prob_base (combined_probability);
+ BRANCH_EDGE (bb)->probability = p;
FALLTHRU_EDGE (bb)->probability
= BRANCH_EDGE (bb)->probability.invert ();
}
}
else if (!single_succ_p (bb))
{
- int prob = XINT (prob_note, 0);
+ profile_probability prob = profile_probability::from_reg_br_prob_note
+ (XINT (prob_note, 0));
- BRANCH_EDGE (bb)->probability
- = profile_probability::from_reg_br_prob_base (prob);
- FALLTHRU_EDGE (bb)->probability
- = BRANCH_EDGE (bb)->probability.invert ();
+ BRANCH_EDGE (bb)->probability = prob;
+ FALLTHRU_EDGE (bb)->probability = prob.invert ();
}
else
single_succ_edge (bb)->probability = profile_probability::always ();
TAKEN
};
+/* In emit-rtl.c. */
+extern profile_probability split_branch_probability;
+
extern gcov_type get_hot_bb_threshold (void);
extern void set_hot_bb_threshold (gcov_type);
extern bool maybe_hot_count_p (struct function *, profile_count);
extern void report_predictor_hitrates (void);
extern void force_edge_cold (edge, bool);
+extern void add_reg_br_prob_note (rtx_insn *, profile_probability);
+
#endif /* GCC_PREDICT_H */
return RDIV (m_val * REG_BR_PROB_BASE, max_probability);
}
+ /* Conversion to and from RTL representation of profile probabilities. */
+ static profile_probability from_reg_br_prob_note (int v)
+ {
+ profile_probability ret;
+ ret.m_val = ((unsigned int)v) / 4;
+ ret.m_quality = (enum profile_quality)(v & 3);
+ return ret;
+ }
+ int to_reg_br_prob_note () const
+ {
+ gcc_checking_assert (initialized_p ());
+ int ret = m_val * 4 + m_quality;
+ gcc_checking_assert (profile_probability::from_reg_br_prob_note (ret)
+ == *this);
+ return ret;
+ }
+
/* Return VAL1/VAL2. */
static profile_probability probability_in_gcov_type
(gcov_type val1, gcov_type val2)
REG_NOTE (DEP_ANTI)
REG_NOTE (DEP_CONTROL)
-/* REG_BR_PROB is attached to JUMP_INSNs and CALL_INSNs. It has an
+/* REG_BR_PROB is attached to JUMP_INSNs. It has an
integer value (in an INT_LIST). For jumps, it is the probability
- that this is a taken branch. For calls, it is the probability that
- this call won't return. */
+ that this is a taken branch. The integer represents a value of
+ profile_probability type. Use to_reg_br_prob_note and from_reg_br_prob_note
+ to extract the actual value. */
REG_NOTE (BR_PROB)
/* Attached to a call insn; indicates that the call is malloc-like and
that the pointer returned cannot alias anything else. */
REG_NOTE (NOALIAS)
-/* REG_BR_PRED is attached to JUMP_INSNs and CALL_INSNSs. It contains
+/* REG_BR_PRED is attached to JUMP_INSNs. It contains
CONCAT of two integer value. First specifies the branch predictor
that added the note, second specifies the predicted hitrate of
- branch in the same format as REG_BR_PROB note uses. */
+ branch in a fixed point arithmetic based on REG_BR_PROB_BASE. */
REG_NOTE (BR_PRED)
/* Attached to insns that are RTX_FRAME_RELATED_P, but are too complex
rtx note = find_reg_note (jump_insn, REG_BR_PROB, 0);
if (note)
{
- int prob = XINT (note, 0);
+ int prob = profile_probability::from_reg_br_prob_note (XINT (note, 0))
+ .to_reg_br_prob_base ();
if (prob >= REG_BR_PROB_BASE * 9 / 10)
return 2;
/* In emit-rtl.c */
extern rtx_insn *try_split (rtx, rtx_insn *, int);
-extern int split_branch_probability;
/* In insn-recog.c (generated by genrecog). */
extern rtx_insn *split_insns (rtx, rtx_insn *);
+2017-07-16 Jan Hubicka <hubicka@ucw.cz>
+
+ * gcc.dg/predict-8.c: Update.
+
2017-07-16 Volker Reichelt <v.reichelt@netcologne.de>
* g++.dg/cpp1z/direct-enum-init1.C: Revert special enum handling.
return 2;
}
-/* { dg-final { scan-rtl-dump-times "REG_BR_PROB 100" 1 "expand"} } */
+/* { dg-final { scan-rtl-dump-times "REG_BR_PROB 400 " 1 "expand"} } */