cgraph.c (cgraph_edge::maybe_hot_p): Do not check flag_branch_probabilities.
authorJan Hubicka <hubicka@ucw.cz>
Thu, 8 Jun 2017 15:15:51 +0000 (17:15 +0200)
committerJan Hubicka <hubicka@gcc.gnu.org>
Thu, 8 Jun 2017 15:15:51 +0000 (15:15 +0000)
* cgraph.c (cgraph_edge::maybe_hot_p): Do not check
flag_branch_probabilities.
* ipa-inline.c (edge_badness): Likewise.
* ipa-profile.c (ipa_propagate_frequency_1): Likewise.
* postreload-gcse.c (eliminate_partially_redundant_load): Likewise.
* predict.c (maybe_hot_frequency_p): Likewise.
(probably_never_executed): Likewise.
* sched-ebb.c (schedule_ebbs): Likewise.
* sched-rgn.c (find_single_block_region): Likewise.
* tracer.c (tail_duplicate): Likewise.

From-SVN: r249020

gcc/ChangeLog
gcc/cgraph.c
gcc/ipa-inline.c
gcc/ipa-profile.c
gcc/postreload-gcse.c
gcc/predict.c
gcc/sched-ebb.c
gcc/sched-rgn.c
gcc/tracer.c

index 8bff6313c910d5201089da55ce6464a15340247d..65a12395a86f119b3ac699e540fe10dad3969d2f 100644 (file)
@@ -1,3 +1,16 @@
+2017-06-08  Jan Hubicka  <hubicka@ucw.cz>
+
+       * cgraph.c (cgraph_edge::maybe_hot_p): Do not check
+       flag_branch_probabilities.
+       * ipa-inline.c (edge_badness): Likewise.
+       * ipa-profile.c (ipa_propagate_frequency_1): Likewise.
+       * postreload-gcse.c (eliminate_partially_redundant_load): Likewise.
+       * predict.c (maybe_hot_frequency_p): Likewise.
+       (probably_never_executed): Likewise.
+       * sched-ebb.c (schedule_ebbs): Likewise.
+       * sched-rgn.c (find_single_block_region): Likewise.
+       * tracer.c (tail_duplicate): Likewise.
+
 2017-06-08  Jan Hubicka  <hubicka@ucw.cz>
 
        * opts.c (finish_options): x_flag_reorder_blocks_and_partition no
index 2cbacc774d39bdf9daf1cfbcf359e59e188c2573..213587e7e2fc782a3303791ab149fc032f4bc708 100644 (file)
@@ -2729,10 +2729,7 @@ cgraph_edge::cannot_lead_to_return_p (void)
 bool
 cgraph_edge::maybe_hot_p (void)
 {
-  /* TODO: Export profile_status from cfun->cfg to cgraph_node.  */
-  if (profile_info
-      && opt_for_fn (caller->decl, flag_branch_probabilities)
-      && !maybe_hot_count_p (NULL, count))
+  if (!maybe_hot_count_p (NULL, count))
     return false;
   if (caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
       || (callee
index 64c9ebd6692f54c253847e8b7a38eadee0818acc..8924f7eb15f004ac45a206be458b65d71f3cd633 100644 (file)
@@ -1078,7 +1078,7 @@ edge_badness (struct cgraph_edge *edge, bool dump)
        numerator = ((sreal) 1 >> 8);
       if (caller->count > profile_count::zero ())
        numerator *= caller->count.to_gcov_type ();
-      else if (opt_for_fn (caller->decl, flag_branch_probabilities))
+      else if (caller->count.initialized_p ())
        numerator = numerator >> 11;
       denominator = growth;
 
index 05cad3f85b177429d90edb6870c0eb552885c4a4..f149d0196fa578a67ac44a0eefc6718d7e500f93 100644 (file)
@@ -330,7 +330,7 @@ ipa_propagate_frequency_1 (struct cgraph_node *node, void *data)
         it is executed by the train run.  Transfer the function only if all
         callers are unlikely executed.  */
       if (profile_info
-         && opt_for_fn (d->function_symbol->decl, flag_branch_probabilities)
+         && edge->callee->count.initialized_p ()
          /* Thunks are not profiled.  This is more or less implementation
             bug.  */
          && !d->function_symbol->thunk.thunk_p
index e3632a26f66ffd3366c5f392c290a6372c99e2ef..a1dcac2600c7a3c86495f65f4d43c1ff51a2fcc6 100644 (file)
@@ -1158,7 +1158,7 @@ eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
       || (optimize_bb_for_size_p (bb) && npred_ok > 1)
       /* If we don't have profile information we cannot tell if splitting
          a critical edge is profitable or not so don't do it.  */
-      || ((! profile_info || ! flag_branch_probabilities
+      || ((! profile_info || profile_status_for_fn (cfun) != PROFILE_READ
           || targetm.cannot_modify_jumps_p ())
          && critical_edge_split))
     goto cleanup;
index 7c7a35d4de6a49324c413a031377d5018fab3959..b460a6f26ee6e024ea188d2c9532745283c514d5 100644 (file)
@@ -123,8 +123,7 @@ static inline bool
 maybe_hot_frequency_p (struct function *fun, int freq)
 {
   struct cgraph_node *node = cgraph_node::get (fun->decl);
-  if (!profile_info
-      || !opt_for_fn (fun->decl, flag_branch_probabilities))
+  if (!profile_info || profile_status_for_fn (fun) != PROFILE_READ)
     {
       if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
         return false;
@@ -222,7 +221,7 @@ probably_never_executed (struct function *fun,
        return false;
       return true;
     }
-  if ((!profile_info || !(opt_for_fn (fun->decl, flag_branch_probabilities)))
+  if ((!profile_info || profile_status_for_fn (fun) != PROFILE_READ)
       && (cgraph_node::get (fun->decl)->frequency
          == NODE_FREQUENCY_UNLIKELY_EXECUTED))
     return true;
index 592fc1f3e887b7388477e9f0df1aa6afb57154c0..9123343fa0f489c6b0e9b8ee039f694dd722b52c 100644 (file)
@@ -622,7 +622,7 @@ schedule_ebbs (void)
   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
     return;
 
-  if (profile_info && flag_branch_probabilities)
+  if (profile_info && profile_status_for_fn (cfun) == PROFILE_READ)
     probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY_FEEDBACK);
   else
     probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY);
index a09fc5d1066ac17a5cd2215d3f4552c51abc5dbe..3bb9356693e741af01f8bebf88258eb8f9ed005f 100644 (file)
@@ -477,7 +477,7 @@ find_single_block_region (bool ebbs_p)
 
   if (ebbs_p) {
     int probability_cutoff;
-    if (profile_info && flag_branch_probabilities)
+    if (profile_info && profile_status_for_fn (cfun) == PROFILE_READ)
       probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY_FEEDBACK);
     else
       probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY);
index bb44673142057107e047a07a2e2cf4f46159771f..0b7f4da0d81efd81463bf404abfd90345f966adb 100644 (file)
@@ -270,7 +270,7 @@ tail_duplicate (void)
   bitmap_clear (bb_seen);
   initialize_original_copy_tables ();
 
-  if (profile_info && flag_branch_probabilities)
+  if (profile_info && profile_status_for_fn (cfun) == PROFILE_READ)
     probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY_FEEDBACK);
   else
     probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY);
@@ -290,7 +290,7 @@ tail_duplicate (void)
       weighted_insns += n * bb->frequency;
     }
 
-  if (profile_info && flag_branch_probabilities)
+  if (profile_info && profile_status_for_fn (cfun) == PROFILE_READ)
     cover_insns = PARAM_VALUE (TRACER_DYNAMIC_COVERAGE_FEEDBACK);
   else
     cover_insns = PARAM_VALUE (TRACER_DYNAMIC_COVERAGE);