+2019-06-10 Martin Liska <mliska@suse.cz>
+
+ * ipa-cp.c (ignore_edge_p): New function.
+ (build_toporder_info): Use it.
+ * ipa-inline.c (ignore_edge_p): New function/
+ (inline_small_functions): Use it.
+ * ipa-pure-const.c (ignore_edge_for_nothrow):
+ Verify opt_for_fn for caller and callee.
+ (ignore_edge_for_pure_const): Likewise.
+ * ipa-reference.c (ignore_edge_p): Extend to check
+ for opt_for_fn.
+ * ipa-utils.c (searchc): Refactor.
+ * ipa-utils.h: Fix coding style.
+
2019-06-10 Claudiu Zissulescu <claziss@synopsys.com>
* config/arc/arc.c (arc_rtx_costs): Update costs.
{}
};
+/* Skip edges from and to nodes without ipa_cp enabled.
+ Ignore not available symbols. */
+
+static bool
+ignore_edge_p (cgraph_edge *e)
+{
+ enum availability avail;
+ cgraph_node *ultimate_target
+ = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
+
+ return (avail <= AVAIL_INTERPOSABLE
+ || !opt_for_fn (e->caller->decl, flag_ipa_cp)
+ || !opt_for_fn (ultimate_target->decl, flag_ipa_cp));
+}
+
/* Allocate the arrays in TOPO and topologically sort the nodes into order. */
static void
topo->stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
gcc_checking_assert (topo->stack_top == 0);
- topo->nnodes = ipa_reduced_postorder (topo->order, true, NULL);
+ topo->nnodes = ipa_reduced_postorder (topo->order, true,
+ ignore_edge_p);
}
/* Free information about strongly connected components and the arrays in
return false;
}
+/* We only propagate across edges with non-interposable callee. */
+
+inline bool
+ignore_edge_p (struct cgraph_edge *e)
+{
+ enum availability avail;
+ e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
+ return (avail <= AVAIL_INTERPOSABLE);
+}
+
/* We use greedy algorithm for inlining of small functions:
All inline candidates are put into prioritized heap ordered in
increasing badness.
metrics. */
max_count = profile_count::uninitialized ();
- ipa_reduced_postorder (order, true, NULL);
+ ipa_reduced_postorder (order, true, ignore_edge_p);
free (order);
FOR_EACH_DEFINED_FUNCTION (node)
return true;
enum availability avail;
- cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail,
- e->caller);
- if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl))
+ cgraph_node *ultimate_target
+ = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
+ if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
return true;
- return opt_for_fn (e->callee->decl, flag_non_call_exceptions)
- && !e->callee->binds_to_current_def_p (e->caller);
+ return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
+ && !e->callee->binds_to_current_def_p (e->caller))
+ || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
+ || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
}
/* Return true if NODE is self recursive function.
return false;
}
-/* We only propagate across edges with non-interposable callee. */
+/* Skip edges from and to nodes without ipa_pure_const enabled.
+ Ignore not available symbols. */
static bool
ignore_edge_for_pure_const (struct cgraph_edge *e)
{
enum availability avail;
- e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
- return (avail <= AVAIL_INTERPOSABLE);
-}
+ cgraph_node *ultimate_target
+ = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
+ return (avail <= AVAIL_INTERPOSABLE
+ || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
+ || !opt_for_fn (ultimate_target->decl,
+ flag_ipa_pure_const));
+}
/* Produce transitive closure over the callgraph and compute pure/const
attributes. */
}
}
-/* Skip edges from and to nodes without ipa_reference enables. This leave
- them out of strongy connected coponents and makes them easyto skip in the
+/* Skip edges from and to nodes without ipa_reference enabled.
+ Ignore not available symbols. This leave
+ them out of strongly connected components and makes them easy to skip in the
propagation loop bellow. */
static bool
ignore_edge_p (cgraph_edge *e)
{
- return (!opt_for_fn (e->caller->decl, flag_ipa_reference)
- || !opt_for_fn (e->callee->function_symbol ()->decl,
- flag_ipa_reference));
+ enum availability avail;
+ cgraph_node *ultimate_target
+ = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
+
+ return (avail < AVAIL_INTERPOSABLE
+ || (avail == AVAIL_INTERPOSABLE
+ && !(flags_from_decl_or_type (e->callee->decl) & ECF_LEAF))
+ || !opt_for_fn (e->caller->decl, flag_ipa_reference)
+ || !opt_for_fn (ultimate_target->decl, flag_ipa_reference));
}
/* Produce the global information by preforming a transitive closure
continue;
if (w->aux
- && (avail > AVAIL_INTERPOSABLE
- || avail == AVAIL_INTERPOSABLE))
+ && (avail >= AVAIL_INTERPOSABLE))
{
w_info = (struct ipa_dfs_info *) w->aux;
if (w_info->new_node)
}
#endif /* GCC_IPA_UTILS_H */
-
-