bitmap_iterator bi;
stmt_ann_t s_ann = stmt_ann (stmt);
bitmap not_read_b, not_written_b;
-
+ tree call = get_call_expr_in (stmt);
+
+ gcc_assert (!(call_expr_flags (call) & (ECF_PURE | ECF_CONST)));
+
/* If we created .GLOBAL_VAR earlier, just use it. */
if (gimple_global_var (cfun))
{
or write that variable. */
not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
-
/* Add a VDEF operand for every call clobbered variable. */
EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
{
tree var = referenced_var_lookup (u);
- unsigned int escape_mask = var_ann (var)->escape_mask;
tree real_var = var;
bool not_read;
bool not_written;
/* See if this variable is really clobbered by this function. */
- /* Trivial case: Things escaping only to pure/const are not
- clobbered by non-pure-const, and only read by pure/const. */
- if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
- {
- tree call = get_call_expr_in (stmt);
- if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
- {
- add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
- clobber_stats.unescapable_clobbers_avoided++;
- continue;
- }
- else
- {
- clobber_stats.unescapable_clobbers_avoided++;
- continue;
- }
- }
-
if (not_written)
{
clobber_stats.static_write_clobbers_avoided++;
bitmap_iterator bi;
stmt_ann_t s_ann = stmt_ann (stmt);
bitmap not_read_b;
+ tree call = get_call_expr_in (stmt);
+
+ /* Const functions do not reference memory. */
+ if (call_expr_flags (call) & ECF_CONST)
+ return;
- /* if the function is not pure, it may reference memory. Add
- a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
- for the heuristic used to decide whether to create .GLOBAL_VAR. */
+ not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
+
+ /* For pure functions we compute non-escaped uses separately. */
+ if (call_expr_flags (call) & ECF_PURE)
+ EXECUTE_IF_SET_IN_BITMAP (gimple_call_used_vars (cfun), 0, u, bi)
+ {
+ tree var = referenced_var_lookup (u);
+ tree real_var = var;
+ bool not_read;
+
+ if (unmodifiable_var_p (var))
+ continue;
+
+ not_read = not_read_b
+ ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
+ : false;
+
+ clobber_stats.readonly_clobbers++;
+
+ /* See if this variable is really used by this function. */
+ if (!not_read)
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
+ else
+ clobber_stats.static_readonly_clobbers_avoided++;
+ }
+
+ /* Add a VUSE for .GLOBAL_VAR if it has been created. See
+ add_referenced_var for the heuristic used to decide whether to
+ create .GLOBAL_VAR. */
if (gimple_global_var (cfun))
{
tree var = gimple_global_var (cfun);
add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
return;
}
-
- not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
/* Add a VUSE for each call-clobbered variable. */
EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
/* Static IDs for the special variables. */
enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
- escaped_id = 3, nonlocal_id = 4, integer_id = 5 };
+ escaped_id = 3, nonlocal_id = 4, callused_id = 5, integer_id = 6 };
/* Variable that represents the unknown pointer. */
static varinfo_t var_anything;
static varinfo_t var_nonlocal;
static tree nonlocal_tree;
+/* Variable that represents call-used memory. */
+static varinfo_t var_callused;
+static tree callused_tree;
+
/* Variable that represents integers. This is used for when people do things
like &0->a.b. */
static varinfo_t var_integer;
if (get_varinfo (t)->is_special_var)
flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
/* Merging the solution from ESCAPED needlessly increases
- the set. Use ESCAPED as representative instead. */
- else if (get_varinfo (t)->id == escaped_id
+ the set. Use ESCAPED as representative instead.
+ Same for CALLUSED. */
+ else if ((get_varinfo (t)->id == escaped_id
+ || get_varinfo (t)->id == callused_id)
&& !bitmap_bit_p (sol, get_varinfo (t)->id))
{
- bitmap_set_bit (sol, escaped_id);
+ bitmap_set_bit (sol, get_varinfo (t)->id);
flag = true;
}
else if (add_graph_edge (graph, lhs, t))
solution_empty = bitmap_empty_p (solution);
if (!solution_empty
- /* Do not propagate the ESCAPED solution. */
- && i != escaped_id)
+ /* Do not propagate the ESCAPED/CALLUSED solutions. */
+ && i != escaped_id
+ && i != callused_id)
{
bitmap_iterator bi;
VEC_free (ce_s, heap, lhsc);
}
+/* For non-IPA mode, generate constraints necessary for a call to a
+ pure function in statement STMT. */
+
+static void
+handle_pure_call (tree stmt)
+{
+ tree call = get_call_expr_in (stmt);
+ tree arg;
+ call_expr_arg_iterator iter;
+
+ /* Memory reached from pointer arguments is call-used. */
+ FOR_EACH_CALL_EXPR_ARG (arg, iter, call)
+ if (could_have_pointers (arg))
+ make_constraint_to (callused_id, arg);
+
+ /* The static chain is used as well. */
+ if (CALL_EXPR_STATIC_CHAIN (call))
+ make_constraint_to (callused_id, CALL_EXPR_STATIC_CHAIN (call));
+
+ /* If the call returns a pointer it may point to reachable memory
+ from the arguments. */
+ if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
+ && could_have_pointers (GIMPLE_STMT_OPERAND (stmt, 0)))
+ {
+ tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+ VEC(ce_s, heap) *lhsc = NULL;
+ struct constraint_expr rhsc;
+ struct constraint_expr *lhsp;
+ unsigned j;
+
+ get_constraint_for (lhs, &lhsc);
+
+ /* If this is a nested function then it can return anything. */
+ if (CALL_EXPR_STATIC_CHAIN (call))
+ {
+ rhsc.var = anything_id;
+ rhsc.offset = 0;
+ rhsc.type = ADDRESSOF;
+ for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
+ process_constraint_1 (new_constraint (*lhsp, rhsc), true);
+ VEC_free (ce_s, heap, lhsc);
+ return;
+ }
+
+ /* Else just add the call-used memory here. Escaped variables
+ and globals will be dealt with in handle_lhs_call. */
+ rhsc.var = callused_id;
+ rhsc.offset = 0;
+ rhsc.type = ADDRESSOF;
+ for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
+ process_constraint_1 (new_constraint (*lhsp, rhsc), true);
+ VEC_free (ce_s, heap, lhsc);
+ }
+}
+
/* Walk statement T setting up aliasing constraints according to the
references found in T. This function is the main part of the
constraint builder. AI points to auxiliary alias information used
&& could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
handle_const_call (t);
}
+ else if (flags & ECF_PURE)
+ {
+ handle_pure_call (t);
+ if (TREE_CODE (t) == GIMPLE_MODIFY_STMT
+ && could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
+ handle_lhs_call (GIMPLE_STMT_OPERAND (t, 0));
+ }
/* Pure functions can return addresses in and of memory
reachable from their arguments, but they are not an escape
point for reachable memory of their arguments. But as we
pi->pt_null = 1;
else if (vi->id == anything_id
|| vi->id == nonlocal_id
- || vi->id == escaped_id)
+ || vi->id == escaped_id
+ || vi->id == callused_id)
was_pt_anything = 1;
else if (vi->id == readonly_id)
was_pt_anything = 1;
variable for escaped_id. */
vi = get_varinfo (find (escaped_id));
+ /* If call-used memory escapes we need to include it in the
+ set of escaped variables. This can happen if a pure
+ function returns a pointer and this pointer escapes. */
+ if (bitmap_bit_p (vi->solution, callused_id))
+ {
+ varinfo_t cu_vi = get_varinfo (find (callused_id));
+ bitmap_ior_into (vi->solution, cu_vi->solution);
+ }
+
/* Mark variables in the solution call-clobbered. */
EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
{
return true;
}
+/* Compute the call-used variables. */
+
+void
+compute_call_used_vars (void)
+{
+ varinfo_t vi;
+ unsigned int i;
+ bitmap_iterator bi;
+ bool has_anything_id = false;
+
+ if (!have_alias_info)
+ return;
+
+ /* This variable may have been collapsed, let's get the real
+ variable for escaped_id. */
+ vi = get_varinfo (find (callused_id));
+
+ /* Mark variables in the solution call-clobbered. */
+ EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
+ {
+ varinfo_t vi = get_varinfo (i);
+
+ if (vi->is_artificial_var)
+ {
+ /* For anything_id and integer_id we need to make
+ all local addressable vars call-used. */
+ if (vi->id == anything_id
+ || vi->id == integer_id)
+ has_anything_id = true;
+ }
+
+ /* Only artificial heap-vars are further interesting. */
+ if (vi->is_artificial_var && !vi->is_heap_var)
+ continue;
+
+ if ((TREE_CODE (vi->decl) == VAR_DECL
+ || TREE_CODE (vi->decl) == PARM_DECL
+ || TREE_CODE (vi->decl) == RESULT_DECL)
+ && !unmodifiable_var_p (vi->decl))
+ bitmap_set_bit (gimple_call_used_vars (cfun), DECL_UID (vi->decl));
+ }
+
+ /* If anything is call-used, add all addressable locals to the set. */
+ if (has_anything_id)
+ bitmap_ior_into (gimple_call_used_vars (cfun),
+ gimple_addressable_vars (cfun));
+}
+
/* Dump points-to information to OUTFILE. */
rhs.offset = 0;
process_constraint (new_constraint (lhs, rhs));
+ /* Create the CALLUSED variable, used to represent the set of call-used
+ memory. */
+ callused_tree = create_tmp_var_raw (void_type_node, "CALLUSED");
+ var_callused = new_var_info (callused_tree, callused_id, "CALLUSED");
+ insert_vi_for_tree (callused_tree, var_callused);
+ var_callused->is_artificial_var = 1;
+ var_callused->offset = 0;
+ var_callused->size = ~0;
+ var_callused->fullsize = ~0;
+ var_callused->is_special_var = 0;
+ VEC_safe_push (varinfo_t, heap, varmap, var_callused);
+
+ /* CALLUSED = *CALLUSED, because call-used is may-deref'd at calls, etc. */
+ lhs.type = SCALAR;
+ lhs.var = callused_id;
+ lhs.offset = 0;
+ rhs.type = DEREF;
+ rhs.var = callused_id;
+ rhs.offset = 0;
+ process_constraint_1 (new_constraint (lhs, rhs), true);
+
/* Create the INTEGER variable, used to represent that a variable points
to an INTEGER. */
integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");