/* Do not create new values when propagating within an SCC because if there
are arithmetic functions with circular dependencies, there is infinite
- number of them and we would just make lattices bottom. */
+ number of them and we would just make lattices bottom. If this condition
+ is ever relaxed we have to detect self-feeding recursive calls in
+ cgraph_edge_brings_value_p in a smarter way. */
if ((ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
&& ipa_edge_within_scc (cs))
ret = dest_lat->set_contains_variable ();
return info->is_all_contexts_clone && info->ipcp_orig_node == dest;
}
-/* Return true if edge CS does bring about the value described by SRC to node
- DEST or its clone for all contexts. */
+/* Return true if edge CS does bring about the value described by SRC to
+ DEST_VAL of node DEST or its clone for all contexts. */
static bool
cgraph_edge_brings_value_p (cgraph_edge *cs, ipcp_value_source<tree> *src,
- cgraph_node *dest)
+ cgraph_node *dest, ipcp_value<tree> *dest_val)
{
struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
enum availability availability;
|| availability <= AVAIL_INTERPOSABLE
|| caller_info->node_dead)
return false;
- if (!src->val)
+ /* At the moment we do not propagate over arithmetic jump functions in SCCs,
+ so it is safe to detect self-feeding recursive calls in this way. */
+ if (!src->val || src->val == dest_val)
return true;
if (caller_info->ipcp_orig_node)
}
}
-/* Return true if edge CS does bring about the value described by SRC to node
- DEST or its clone for all contexts. */
+/* Return true if edge CS does bring about the value described by SRC to
+ DST_VAL of node DEST or its clone for all contexts. */
static bool
cgraph_edge_brings_value_p (cgraph_edge *cs,
ipcp_value_source<ipa_polymorphic_call_context> *src,
- cgraph_node *dest)
+ cgraph_node *dest,
+ ipcp_value<ipa_polymorphic_call_context> *)
{
struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
cgraph_node *real_dest = cs->callee->function_symbol ();
return next_edge_clone[cs->uid];
}
-/* Given VAL that is intended for DEST, iterate over all its sources and if
- they still hold, add their edge frequency and their number into *FREQUENCY
- and *CALLER_COUNT respectively. */
+/* Given VAL that is intended for DEST, iterate over all its sources and if any
+ of them is viable and hot, return true. In that case, for those that still
+ hold, add their edge frequency and their number into *FREQUENCY and
+ *CALLER_COUNT respectively. */
template <typename valtype>
static bool
int freq = 0, count = 0;
profile_count cnt = profile_count::zero ();
bool hot = false;
+ bool non_self_recursive = false;
for (src = val->sources; src; src = src->next)
{
struct cgraph_edge *cs = src->cs;
while (cs)
{
- if (cgraph_edge_brings_value_p (cs, src, dest))
+ if (cgraph_edge_brings_value_p (cs, src, dest, val))
{
count++;
freq += cs->frequency ();
if (cs->count.ipa ().initialized_p ())
cnt += cs->count.ipa ();
hot |= cs->maybe_hot_p ();
+ if (cs->caller != dest)
+ non_self_recursive = true;
}
cs = get_next_cgraph_edge_clone (cs);
}
}
+ /* If the only edges bringing a value are self-recursive ones, do not bother
+ evaluating it. */
+ if (!non_self_recursive)
+ return false;
+
*freq_sum = freq;
*count_sum = cnt;
*caller_count = count;
struct cgraph_edge *cs = src->cs;
while (cs)
{
- if (cgraph_edge_brings_value_p (cs, src, dest))
+ if (cgraph_edge_brings_value_p (cs, src, dest, val))
ret.quick_push (cs);
cs = get_next_cgraph_edge_clone (cs);
}
vec_safe_push (replace_trees, replace_map);
}
}
+ auto_vec<cgraph_edge *, 2> self_recursive_calls;
+ for (i = callers.length () - 1; i >= 0; i--)
+ {
+ cgraph_edge *cs = callers[i];
+ if (cs->caller == node)
+ {
+ self_recursive_calls.safe_push (cs);
+ callers.unordered_remove (i);
+ }
+ }
new_node = node->create_virtual_clone (callers, replace_trees,
args_to_skip, "constprop");
+
+ for (unsigned j = 0; j < self_recursive_calls.length (); j++)
+ {
+ cgraph_edge *cs = next_edge_clone[self_recursive_calls[j]->uid];
+ gcc_checking_assert (cs);
+ gcc_assert (cs->caller == new_node);
+ cs->redirect_callee_duplicating_thunks (new_node);
+ }
+
ipa_set_node_agg_value_chain (new_node, aggvals);
for (av = aggvals; av; av = av->next)
new_node->maybe_create_reference (av->value, NULL);
return new_node;
}
+/* Return true, if JFUNC, which describes a i-th parameter of call CS, is a
+ simple no-operation pass-through function to itself. */
+
+static bool
+self_recursive_pass_through_p (cgraph_edge *cs, ipa_jump_func *jfunc, int i)
+{
+ enum availability availability;
+ if (cs->caller == cs->callee->function_symbol (&availability)
+ && availability > AVAIL_INTERPOSABLE
+ && jfunc->type == IPA_JF_PASS_THROUGH
+ && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR
+ && ipa_get_jf_pass_through_formal_id (jfunc) == i)
+ return true;
+ return false;
+}
+
/* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
KNOWN_CSTS with constants that are also known for all of the CALLERS. */
struct ipa_jump_func *jump_func;
tree t;
+ if (IPA_NODE_REF (cs->caller)->node_dead)
+ continue;
+
if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
|| (i == 0
&& call_passes_through_thunk_p (cs))
break;
}
jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
+ if (self_recursive_pass_through_p (cs, jump_func, i))
+ continue;
+
t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func, type);
if (!t
|| (newval
FOR_EACH_VEC_ELT (callers, j, cs)
{
+ struct ipa_jump_func *jfunc
+ = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
+ if (self_recursive_pass_through_p (cs, jfunc, i))
+ continue;
inter = intersect_aggregates_with_edge (cs, i, inter);
if (!inter.exists ())
return res;
}
-/* Turn KNOWN_AGGS into a list of aggregate replacement values. */
-
-static struct ipa_agg_replacement_value *
-known_aggs_to_agg_replacement_list (vec<ipa_agg_jump_function> known_aggs)
-{
- struct ipa_agg_replacement_value *res;
- struct ipa_agg_replacement_value **tail = &res;
- struct ipa_agg_jump_function *aggjf;
- struct ipa_agg_jf_item *item;
- int i, j;
-
- FOR_EACH_VEC_ELT (known_aggs, i, aggjf)
- FOR_EACH_VEC_SAFE_ELT (aggjf->items, j, item)
- {
- struct ipa_agg_replacement_value *v;
- v = ggc_alloc<ipa_agg_replacement_value> ();
- v->index = i;
- v->offset = item->offset;
- v->value = item->value;
- v->by_ref = aggjf->by_ref;
- *tail = v;
- tail = &v->next;
- }
- *tail = NULL;
- return res;
-}
-
/* Determine whether CS also brings all scalar values that the NODE is
specialized for. */
struct cgraph_edge *cs = src->cs;
while (cs)
{
- if (cgraph_edge_brings_value_p (cs, src, node)
+ if (cgraph_edge_brings_value_p (cs, src, node, val)
&& cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
&& cgraph_edge_brings_all_agg_vals_for_node (cs, val->spec_node))
{
"for all known contexts.\n", node->dump_name ());
callers = node->collect_callers ();
+ find_more_scalar_values_for_callers_subset (node, known_csts, callers);
+ find_more_contexts_for_caller_subset (node, &known_contexts, callers);
+ ipa_agg_replacement_value *aggvals
+ = find_aggregate_values_for_callers_subset (node, callers);
if (!known_contexts_useful_p (known_contexts))
{
known_contexts = vNULL;
}
clone = create_specialized_node (node, known_csts, known_contexts,
- known_aggs_to_agg_replacement_list (known_aggs),
- callers);
+ aggvals, callers);
info = IPA_NODE_REF (node);
info->do_clone_for_all_contexts = false;
IPA_NODE_REF (clone)->is_all_contexts_clone = true;