Reduce SCCs in IPA postorder.
authorMartin Liska <mliska@suse.cz>
Mon, 10 Jun 2019 11:07:24 +0000 (13:07 +0200)
committerMartin Liska <marxin@gcc.gnu.org>
Mon, 10 Jun 2019 11:07:24 +0000 (11:07 +0000)
2019-06-10  Martin Liska  <mliska@suse.cz>

* ipa-cp.c (ignore_edge_p): New function.
(build_toporder_info): Use it.
* ipa-inline.c (ignore_edge_p): New function/
(inline_small_functions): Use it.
* ipa-pure-const.c (ignore_edge_for_nothrow):
Verify opt_for_fn for caller and callee.
(ignore_edge_for_pure_const): Likewise.
* ipa-reference.c (ignore_edge_p): Extend to check
for opt_for_fn.
* ipa-utils.c (searchc): Refactor.
* ipa-utils.h: Fix coding style.

From-SVN: r272115

gcc/ChangeLog
gcc/ipa-cp.c
gcc/ipa-inline.c
gcc/ipa-pure-const.c
gcc/ipa-reference.c
gcc/ipa-utils.c
gcc/ipa-utils.h

index d8fbb00c3bf0189d56dbf430989b0d424853c641..e93db0a1b7bb8bbdf2bdf8cec7e2289295e6c3f5 100644 (file)
@@ -1,3 +1,17 @@
+2019-06-10  Martin Liska  <mliska@suse.cz>
+
+       * ipa-cp.c (ignore_edge_p): New function.
+       (build_toporder_info): Use it.
+       * ipa-inline.c (ignore_edge_p): New function/
+       (inline_small_functions): Use it.
+       * ipa-pure-const.c (ignore_edge_for_nothrow):
+       Verify opt_for_fn for caller and callee.
+       (ignore_edge_for_pure_const): Likewise.
+       * ipa-reference.c (ignore_edge_p): Extend to check
+       for opt_for_fn.
+       * ipa-utils.c (searchc): Refactor.
+       * ipa-utils.h: Fix coding style.
+
 2019-06-10  Claudiu Zissulescu  <claziss@synopsys.com>
 
        * config/arc/arc.c (arc_rtx_costs): Update costs.
index 59b15fa7362c87de3e063f5b727d1279456b6cd6..a0f6f23829b6300d546b8f4e9d398134b8833220 100644 (file)
@@ -806,6 +806,21 @@ public:
   {}
 };
 
+/* Skip edges from and to nodes without ipa_cp enabled.
+   Ignore not available symbols.  */
+
+static bool
+ignore_edge_p (cgraph_edge *e)
+{
+  enum availability avail;
+  cgraph_node *ultimate_target
+    = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
+
+  return (avail <= AVAIL_INTERPOSABLE
+         || !opt_for_fn (e->caller->decl, flag_ipa_cp)
+         || !opt_for_fn (ultimate_target->decl, flag_ipa_cp));
+}
+
 /* Allocate the arrays in TOPO and topologically sort the nodes into order.  */
 
 static void
@@ -815,7 +830,8 @@ build_toporder_info (struct ipa_topo_info *topo)
   topo->stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
 
   gcc_checking_assert (topo->stack_top == 0);
-  topo->nnodes = ipa_reduced_postorder (topo->order, true, NULL);
+  topo->nnodes = ipa_reduced_postorder (topo->order, true,
+                                       ignore_edge_p);
 }
 
 /* Free information about strongly connected components and the arrays in
index 745bdf3002a65382d20fc3fe91cf708bf6bc587c..3475258b92251a6691e6c4ecae44ecd808aacf70 100644 (file)
@@ -1752,6 +1752,16 @@ sum_callers (struct cgraph_node *node, void *data)
   return false;
 }
 
+/* We only propagate across edges with non-interposable callee.  */
+
+inline bool
+ignore_edge_p (struct cgraph_edge *e)
+{
+  enum availability avail;
+  e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
+  return (avail <= AVAIL_INTERPOSABLE);
+}
+
 /* We use greedy algorithm for inlining of small functions:
    All inline candidates are put into prioritized heap ordered in
    increasing badness.
@@ -1779,7 +1789,7 @@ inline_small_functions (void)
      metrics.  */
 
   max_count = profile_count::uninitialized ();
-  ipa_reduced_postorder (order, true, NULL);
+  ipa_reduced_postorder (order, true, ignore_edge_p);
   free (order);
 
   FOR_EACH_DEFINED_FUNCTION (node)
index bb561d00853f5d148d37b5cecd30a5f2e8db38ef..f5e53967df117ad76b3998ee463a29e37aa338ce 100644 (file)
@@ -1361,12 +1361,14 @@ ignore_edge_for_nothrow (struct cgraph_edge *e)
     return true;
 
   enum availability avail;
-  cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail,
-                                                               e->caller);
-  if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl))
+  cgraph_node *ultimate_target
+    = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
+  if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
     return true;
-  return opt_for_fn (e->callee->decl, flag_non_call_exceptions)
-        && !e->callee->binds_to_current_def_p (e->caller);
+  return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
+          && !e->callee->binds_to_current_def_p (e->caller))
+         || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
+         || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
 }
 
 /* Return true if NODE is self recursive function.
@@ -1396,16 +1398,21 @@ cdtor_p (cgraph_node *n, void *)
   return false;
 }
 
-/* We only propagate across edges with non-interposable callee.  */
+/* Skip edges from and to nodes without ipa_pure_const enabled.
+   Ignore not available symbols.  */
 
 static bool
 ignore_edge_for_pure_const (struct cgraph_edge *e)
 {
   enum availability avail;
-  e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
-  return (avail <= AVAIL_INTERPOSABLE);
-}
+  cgraph_node *ultimate_target
+    = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
 
+  return (avail <= AVAIL_INTERPOSABLE
+         || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
+         || !opt_for_fn (ultimate_target->decl,
+                         flag_ipa_pure_const));
+}
 
 /* Produce transitive closure over the callgraph and compute pure/const
    attributes.  */
index 9ef03c2505bfccde8138806c8660843ea4e4fa84..7b2614ffa28a3778dfc939c75f1816c384663dae 100644 (file)
@@ -676,16 +676,23 @@ get_read_write_all_from_node (struct cgraph_node *node,
       }
 }
 
-/* Skip edges from and to nodes without ipa_reference enables.  This leave
-   them out of strongy connected coponents and makes them easyto skip in the
+/* Skip edges from and to nodes without ipa_reference enabled.
+   Ignore not available symbols.  This leave
+   them out of strongly connected components and makes them easy to skip in the
    propagation loop bellow.  */
 
 static bool
 ignore_edge_p (cgraph_edge *e)
 {
-  return (!opt_for_fn (e->caller->decl, flag_ipa_reference)
-          || !opt_for_fn (e->callee->function_symbol ()->decl,
-                         flag_ipa_reference));
+  enum availability avail;
+  cgraph_node *ultimate_target
+    = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
+
+  return (avail < AVAIL_INTERPOSABLE
+         || (avail == AVAIL_INTERPOSABLE
+             && !(flags_from_decl_or_type (e->callee->decl) & ECF_LEAF))
+         || !opt_for_fn (e->caller->decl, flag_ipa_reference)
+          || !opt_for_fn (ultimate_target->decl, flag_ipa_reference));
 }
 
 /* Produce the global information by preforming a transitive closure
index 79b250c394397b8d7026dec4c415f95fa94dae36..25c2e2cf78916c212ab16e0e4d17bdccabccb8bd 100644 (file)
@@ -103,8 +103,7 @@ searchc (struct searchc_env* env, struct cgraph_node *v,
         continue;
 
       if (w->aux
-         && (avail > AVAIL_INTERPOSABLE
-             || avail == AVAIL_INTERPOSABLE))
+         && (avail >= AVAIL_INTERPOSABLE))
        {
          w_info = (struct ipa_dfs_info *) w->aux;
          if (w_info->new_node)
index 4a39d4ad74e0c467b727cd23e929dbdc7653daad..5903da5840dc5fa1ba85fa0c6ed9fb9e34f19406 100644 (file)
@@ -244,5 +244,3 @@ odr_type_p (const_tree t)
 }
 
 #endif  /* GCC_IPA_UTILS_H  */
-
-