+2017-06-08 Jan Hubicka <hubicka@ucw.cz>
+
+ * cgraph.c (cgraph_edge::maybe_hot_p): Do not check
+ flag_branch_probabilities.
+ * ipa-inline.c (edge_badness): Likewise.
+ * ipa-profile.c (ipa_propagate_frequency_1): Likewise.
+ * postreload-gcse.c (eliminate_partially_redundant_load): Likewise.
+ * predict.c (maybe_hot_frequency_p): Likewise.
+ (probably_never_executed): Likewise.
+ * sched-ebb.c (schedule_ebbs): Likewise.
+ * sched-rgn.c (find_single_block_region): Likewise.
+ * tracer.c (tail_duplicate): Likewise.
+
2017-06-08 Jan Hubicka <hubicka@ucw.cz>
* opts.c (finish_options): x_flag_reorder_blocks_and_partition no
bool
cgraph_edge::maybe_hot_p (void)
{
- /* TODO: Export profile_status from cfun->cfg to cgraph_node. */
- if (profile_info
- && opt_for_fn (caller->decl, flag_branch_probabilities)
- && !maybe_hot_count_p (NULL, count))
+ if (!maybe_hot_count_p (NULL, count))
return false;
if (caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
|| (callee
numerator = ((sreal) 1 >> 8);
if (caller->count > profile_count::zero ())
numerator *= caller->count.to_gcov_type ();
- else if (opt_for_fn (caller->decl, flag_branch_probabilities))
+ else if (caller->count.initialized_p ())
numerator = numerator >> 11;
denominator = growth;
it is executed by the train run. Transfer the function only if all
callers are unlikely executed. */
if (profile_info
- && opt_for_fn (d->function_symbol->decl, flag_branch_probabilities)
+ && edge->callee->count.initialized_p ()
/* Thunks are not profiled. This is more or less implementation
bug. */
&& !d->function_symbol->thunk.thunk_p
|| (optimize_bb_for_size_p (bb) && npred_ok > 1)
/* If we don't have profile information we cannot tell if splitting
a critical edge is profitable or not so don't do it. */
- || ((! profile_info || ! flag_branch_probabilities
+ || ((! profile_info || profile_status_for_fn (cfun) != PROFILE_READ
|| targetm.cannot_modify_jumps_p ())
&& critical_edge_split))
goto cleanup;
maybe_hot_frequency_p (struct function *fun, int freq)
{
struct cgraph_node *node = cgraph_node::get (fun->decl);
- if (!profile_info
- || !opt_for_fn (fun->decl, flag_branch_probabilities))
+ if (!profile_info || profile_status_for_fn (fun) != PROFILE_READ)
{
if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
return false;
return false;
return true;
}
- if ((!profile_info || !(opt_for_fn (fun->decl, flag_branch_probabilities)))
+ if ((!profile_info || profile_status_for_fn (fun) != PROFILE_READ)
&& (cgraph_node::get (fun->decl)->frequency
== NODE_FREQUENCY_UNLIKELY_EXECUTED))
return true;
if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
return;
- if (profile_info && flag_branch_probabilities)
+ if (profile_info && profile_status_for_fn (cfun) == PROFILE_READ)
probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY_FEEDBACK);
else
probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY);
if (ebbs_p) {
int probability_cutoff;
- if (profile_info && flag_branch_probabilities)
+ if (profile_info && profile_status_for_fn (cfun) == PROFILE_READ)
probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY_FEEDBACK);
else
probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY);
bitmap_clear (bb_seen);
initialize_original_copy_tables ();
- if (profile_info && flag_branch_probabilities)
+ if (profile_info && profile_status_for_fn (cfun) == PROFILE_READ)
probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY_FEEDBACK);
else
probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY);
weighted_insns += n * bb->frequency;
}
- if (profile_info && flag_branch_probabilities)
+ if (profile_info && profile_status_for_fn (cfun) == PROFILE_READ)
cover_insns = PARAM_VALUE (TRACER_DYNAMIC_COVERAGE_FEEDBACK);
else
cover_insns = PARAM_VALUE (TRACER_DYNAMIC_COVERAGE);