/* Tree based points-to analysis
- Copyright (C) 2005-2013 Free Software Foundation, Inc.
+ Copyright (C) 2005-2014 Free Software Foundation, Inc.
Contributed by Daniel Berlin <dberlin@dberlin.org>
This file is part of GCC.
#include "system.h"
#include "coretypes.h"
#include "tm.h"
-#include "ggc.h"
#include "obstack.h"
#include "bitmap.h"
#include "sbitmap.h"
#include "flags.h"
+#include "predict.h"
+#include "vec.h"
+#include "hashtab.h"
+#include "hash-set.h"
+#include "machmode.h"
+#include "hard-reg-set.h"
+#include "input.h"
+#include "function.h"
+#include "dominance.h"
+#include "cfg.h"
#include "basic-block.h"
#include "tree.h"
+#include "stor-layout.h"
+#include "stmt.h"
+#include "hash-table.h"
+#include "tree-ssa-alias.h"
+#include "internal-fn.h"
+#include "gimple-expr.h"
+#include "is-a.h"
#include "gimple.h"
#include "gimple-iterator.h"
#include "gimple-ssa.h"
+#include "hash-map.h"
+#include "plugin-api.h"
+#include "ipa-ref.h"
#include "cgraph.h"
+#include "stringpool.h"
#include "tree-ssanames.h"
#include "tree-into-ssa.h"
+#include "expr.h"
#include "tree-dfa.h"
#include "tree-inline.h"
#include "diagnostic-core.h"
-#include "hash-table.h"
-#include "function.h"
#include "tree-pass.h"
#include "alloc-pool.h"
#include "splay-tree.h"
#include "params.h"
#include "alias.h"
-#include "pointer-set.h"
/* The idea behind this analyzer is to generate set constraints from the
program, then solve the resulting constraints in order to generate the
static alloc_pool variable_info_pool;
/* Map varinfo to final pt_solution. */
-static pointer_map_t *final_solutions;
+static hash_map<varinfo_t, pt_solution *> *final_solutions;
struct obstack final_solutions_obstack;
/* Table of variable info structures for constraint variables.
/* Static IDs for the special variables. Variable ID zero is unused
and used as terminator for the sub-variable chain. */
-enum { nothing_id = 1, anything_id = 2, readonly_id = 3,
+enum { nothing_id = 1, anything_id = 2, string_id = 3,
escaped_id = 4, nonlocal_id = 5,
storedanything_id = 6, integer_id = 7 };
/* A map mapping call statements to per-stmt variables for uses
and clobbers specific to the call. */
-static struct pointer_map_t *call_stmt_vars;
+static hash_map<gimple, varinfo_t> *call_stmt_vars;
/* Lookup or create the variable for the call statement CALL. */
static varinfo_t
get_call_vi (gimple call)
{
- void **slot_p;
varinfo_t vi, vi2;
- slot_p = pointer_map_insert (call_stmt_vars, call);
- if (*slot_p)
- return (varinfo_t) *slot_p;
+ bool existed;
+ varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
+ if (existed)
+ return *slot_p;
vi = new_var_info (NULL_TREE, "CALLUSED");
vi->offset = 0;
vi->next = vi2->id;
- *slot_p = (void *) vi;
+ *slot_p = vi;
return vi;
}
static varinfo_t
lookup_call_use_vi (gimple call)
{
- void **slot_p;
-
- slot_p = pointer_map_contains (call_stmt_vars, call);
+ varinfo_t *slot_p = call_stmt_vars->get (call);
if (slot_p)
- return (varinfo_t) *slot_p;
+ return *slot_p;
return NULL;
}
return found;
}
-/* Union two constraint vectors, TO and FROM. Put the result in TO. */
+/* Union two constraint vectors, TO and FROM. Put the result in TO.
+ Returns true of TO set is changed. */
-static void
+static bool
constraint_set_union (vec<constraint_t> *to,
vec<constraint_t> *from)
{
int i;
constraint_t c;
+ bool any_change = false;
FOR_EACH_VEC_ELT (*from, i, c)
{
{
unsigned int place = to->lower_bound (c, constraint_less);
to->safe_insert (place, c);
+ any_change = true;
}
}
+ return any_change;
}
/* Expands the solution in SET to all sub-fields of variables included. */
-static void
-solution_set_expand (bitmap set)
+static bitmap
+solution_set_expand (bitmap set, bitmap *expanded)
{
bitmap_iterator bi;
unsigned j;
+ if (*expanded)
+ return *expanded;
+
+ *expanded = BITMAP_ALLOC (&iteration_obstack);
+
/* In a first pass expand to the head of the variables we need to
add all sub-fields off. This avoids quadratic behavior. */
EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
if (v->is_artificial_var
|| v->is_full_var)
continue;
- bitmap_set_bit (set, v->head);
+ bitmap_set_bit (*expanded, v->head);
}
/* In the second pass now expand all head variables with subfields. */
- EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
+ EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
{
varinfo_t v = get_varinfo (j);
- if (v->is_artificial_var
- || v->is_full_var
- || v->head != j)
+ if (v->head != j)
continue;
for (v = vi_next (v); v != NULL; v = vi_next (v))
- bitmap_set_bit (set, v->id);
+ bitmap_set_bit (*expanded, v->id);
}
+
+ /* And finally set the rest of the bits from SET. */
+ bitmap_ior_into (*expanded, set);
+
+ return *expanded;
}
-/* Union solution sets TO and FROM, and add INC to each member of FROM in the
+/* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
process. */
static bool
-set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
+set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
+ bitmap *expanded_delta)
{
bool changed = false;
bitmap_iterator bi;
unsigned int i;
- /* If the solution of FROM contains anything it is good enough to transfer
+ /* If the solution of DELTA contains anything it is good enough to transfer
this to TO. */
- if (bitmap_bit_p (from, anything_id))
+ if (bitmap_bit_p (delta, anything_id))
return bitmap_set_bit (to, anything_id);
- /* For zero offset simply union the solution into the destination. */
- if (inc == 0)
- return bitmap_ior_into (to, from);
-
/* If the offset is unknown we have to expand the solution to
all subfields. */
if (inc == UNKNOWN_OFFSET)
{
- bitmap tmp = BITMAP_ALLOC (&iteration_obstack);
- bitmap_copy (tmp, from);
- solution_set_expand (tmp);
- changed |= bitmap_ior_into (to, tmp);
- BITMAP_FREE (tmp);
+ delta = solution_set_expand (delta, expanded_delta);
+ changed |= bitmap_ior_into (to, delta);
return changed;
}
/* For non-zero offset union the offsetted solution into the destination. */
- EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
{
varinfo_t vi = get_varinfo (i);
changed |= bitmap_set_bit (to, i);
else
{
- unsigned HOST_WIDE_INT fieldoffset = vi->offset + inc;
+ HOST_WIDE_INT fieldoffset = vi->offset + inc;
+ unsigned HOST_WIDE_INT size = vi->size;
/* If the offset makes the pointer point to before the
variable use offset zero for the field lookup. */
- if (inc < 0
- && fieldoffset > vi->offset)
- fieldoffset = 0;
-
- vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
-
- changed |= bitmap_set_bit (to, vi->id);
- /* If the result is not exactly at fieldoffset include the next
- field as well. See get_constraint_for_ptr_offset for more
- rationale. */
- if (vi->offset != fieldoffset
- && vi->next != 0)
- changed |= bitmap_set_bit (to, vi->next);
+ if (fieldoffset < 0)
+ vi = get_varinfo (vi->head);
+ else
+ vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
+
+ do
+ {
+ changed |= bitmap_set_bit (to, vi->id);
+ if (vi->is_full_var
+ || vi->next == 0)
+ break;
+
+ /* We have to include all fields that overlap the current field
+ shifted by inc. */
+ vi = vi_next (vi);
+ }
+ while (vi->offset < fieldoffset + size);
}
}
/* Condense two variable nodes into a single variable node, by moving
- all associated info from SRC to TO. */
+ all associated info from FROM to TO. Returns true if TO node's
+ constraint set changes after the merge. */
-static void
+static bool
merge_node_constraints (constraint_graph_t graph, unsigned int to,
unsigned int from)
{
unsigned int i;
constraint_t c;
+ bool any_change = false;
gcc_checking_assert (find (from) == to);
/* Move all complex constraints from src node into to node */
FOR_EACH_VEC_ELT (graph->complex[from], i, c)
{
- /* In complex constraints for node src, we may have either
- a = *src, and *src = a, or an offseted constraint which are
+ /* In complex constraints for node FROM, we may have either
+ a = *FROM, and *FROM = a, or an offseted constraint which are
always added to the rhs node's constraints. */
if (c->rhs.type == DEREF)
c->lhs.var = to;
else
c->rhs.var = to;
+
}
- constraint_set_union (&graph->complex[to], &graph->complex[from]);
+ any_change = constraint_set_union (&graph->complex[to],
+ &graph->complex[from]);
graph->complex[from].release ();
+ return any_change;
}
stats.unified_vars_static++;
merge_graph_nodes (graph, to, from);
- merge_node_constraints (graph, to, from);
+ if (merge_node_constraints (graph, to, from))
+ {
+ if (update_changed)
+ bitmap_set_bit (changed, to);
+ }
/* Mark TO as changed if FROM was changed. If TO was already marked
as changed, decrease the changed count. */
static void
do_sd_constraint (constraint_graph_t graph, constraint_t c,
- bitmap delta)
+ bitmap delta, bitmap *expanded_delta)
{
unsigned int lhs = c->lhs.var;
bool flag = false;
dereferenced at all valid offsets. */
if (roffset == UNKNOWN_OFFSET)
{
- solution_set_expand (delta);
+ delta = solution_set_expand (delta, expanded_delta);
/* No further offset processing is necessary. */
roffset = 0;
}
{
varinfo_t v = get_varinfo (j);
HOST_WIDE_INT fieldoffset = v->offset + roffset;
+ unsigned HOST_WIDE_INT size = v->size;
unsigned int t;
if (v->is_full_var)
- fieldoffset = v->offset;
+ ;
else if (roffset != 0)
- v = first_vi_for_offset (v, fieldoffset);
- /* If the access is outside of the variable we can ignore it. */
- if (!v)
- continue;
+ {
+ if (fieldoffset < 0)
+ v = get_varinfo (v->head);
+ else
+ v = first_or_preceding_vi_for_offset (v, fieldoffset);
+ }
+ /* We have to include all fields that overlap the current field
+ shifted by roffset. */
do
{
t = find (v->id);
&& add_graph_edge (graph, lhs, t))
flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
- /* If the variable is not exactly at the requested offset
- we have to include the next one. */
- if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
+ if (v->is_full_var
|| v->next == 0)
break;
v = vi_next (v);
- fieldoffset = v->offset;
}
- while (1);
+ while (v->offset < fieldoffset + size);
}
done:
as the starting solution for x. */
static void
-do_ds_constraint (constraint_t c, bitmap delta)
+do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
{
unsigned int rhs = c->rhs.var;
bitmap sol = get_varinfo (rhs)->solution;
dereferenced at all valid offsets. */
if (loff == UNKNOWN_OFFSET)
{
- solution_set_expand (delta);
+ delta = solution_set_expand (delta, expanded_delta);
loff = 0;
}
varinfo_t v = get_varinfo (j);
unsigned int t;
HOST_WIDE_INT fieldoffset = v->offset + loff;
+ unsigned HOST_WIDE_INT size = v->size;
if (v->is_full_var)
- fieldoffset = v->offset;
+ ;
else if (loff != 0)
- v = first_vi_for_offset (v, fieldoffset);
- /* If the access is outside of the variable we can ignore it. */
- if (!v)
- continue;
+ {
+ if (fieldoffset < 0)
+ v = get_varinfo (v->head);
+ else
+ v = first_or_preceding_vi_for_offset (v, fieldoffset);
+ }
+ /* We have to include all fields that overlap the current field
+ shifted by loff. */
do
{
if (v->may_have_pointers)
bitmap_set_bit (changed, t);
}
- /* If the variable is not exactly at the requested offset
- we have to include the next one. */
- if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
+ if (v->is_full_var
|| v->next == 0)
break;
v = vi_next (v);
- fieldoffset = v->offset;
}
- while (1);
+ while (v->offset < fieldoffset + size);
}
}
constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
static void
-do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
+do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
+ bitmap *expanded_delta)
{
if (c->lhs.type == DEREF)
{
else
{
/* *x = y */
- do_ds_constraint (c, delta);
+ do_ds_constraint (c, delta, expanded_delta);
}
}
else if (c->rhs.type == DEREF)
{
/* x = *y */
if (!(get_varinfo (c->lhs.var)->is_special_var))
- do_sd_constraint (graph, c, delta);
+ do_sd_constraint (graph, c, delta, expanded_delta);
}
else
{
bitmap tmp;
- bitmap solution;
bool flag = false;
- gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
- solution = get_varinfo (c->rhs.var)->solution;
+ gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
+ && c->rhs.offset != 0 && c->lhs.offset == 0);
tmp = get_varinfo (c->lhs.var)->solution;
- flag = set_union_with_increment (tmp, solution, c->rhs.offset);
+ flag = set_union_with_increment (tmp, delta, c->rhs.offset,
+ expanded_delta);
if (flag)
bitmap_set_bit (changed, c->lhs.var);
/* A hashtable for mapping a bitmap of labels->pointer equivalence
classes. */
-static hash_table <equiv_class_hasher> pointer_equiv_class_table;
+static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
/* A hashtable for mapping a bitmap of labels->location equivalence
classes. */
-static hash_table <equiv_class_hasher> location_equiv_class_table;
+static hash_table<equiv_class_hasher> *location_equiv_class_table;
/* Lookup a equivalence class in TABLE by the bitmap of LABELS with
hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
is equivalent to. */
static equiv_class_label *
-equiv_class_lookup_or_add (hash_table <equiv_class_hasher> table, bitmap labels)
+equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
+ bitmap labels)
{
equiv_class_label **slot;
equiv_class_label ecl;
ecl.labels = labels;
ecl.hashcode = bitmap_hash (labels);
- slot = table.find_slot_with_hash (&ecl, ecl.hashcode, INSERT);
+ slot = table->find_slot (&ecl, INSERT);
if (!*slot)
{
*slot = XNEW (struct equiv_class_label);
struct scc_info *si = init_scc_info (size);
bitmap_obstack_initialize (&iteration_obstack);
- pointer_equiv_class_table.create (511);
- location_equiv_class_table.create (511);
+ pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
+ location_equiv_class_table
+ = new hash_table<equiv_class_hasher> (511);
pointer_equiv_class = 1;
location_equiv_class = 1;
free (graph->points_to);
free (graph->eq_rep);
sbitmap_free (graph->direct_nodes);
- pointer_equiv_class_table.dispose ();
- location_equiv_class_table.dispose ();
+ delete pointer_equiv_class_table;
+ pointer_equiv_class_table = NULL;
+ delete location_equiv_class_table;
+ location_equiv_class_table = NULL;
bitmap_obstack_release (&iteration_obstack);
}
&& !bitmap_empty_p (get_varinfo (node)->solution))
{
unsigned int i;
- vec<unsigned> queue = vNULL;
+ auto_vec<unsigned> queue;
int queuepos;
unsigned int to = find (graph->indirect_cycles[node]);
bitmap_iterator bi;
{
unify_nodes (graph, to, i, true);
}
- queue.release ();
return true;
}
return false;
solution_empty = bitmap_empty_p (solution);
/* Process the complex constraints */
+ bitmap expanded_pts = NULL;
FOR_EACH_VEC_ELT (complex, j, c)
{
/* XXX: This is going to unsort the constraints in
is a constraint where the lhs side is receiving
some set from elsewhere. */
if (!solution_empty || c->lhs.type != DEREF)
- do_complex_constraint (graph, c, pts);
+ do_complex_constraint (graph, c, pts, &expanded_pts);
}
+ BITMAP_FREE (expanded_pts);
solution_empty = bitmap_empty_p (solution);
}
/* Map from trees to variable infos. */
-static struct pointer_map_t *vi_for_tree;
+static hash_map<tree, varinfo_t> *vi_for_tree;
/* Insert ID as the variable id for tree T in the vi_for_tree map. */
static void
insert_vi_for_tree (tree t, varinfo_t vi)
{
- void **slot = pointer_map_insert (vi_for_tree, t);
gcc_assert (vi);
- gcc_assert (*slot == NULL);
- *slot = vi;
+ gcc_assert (!vi_for_tree->put (t, vi));
}
/* Find the variable info for tree T in VI_FOR_TREE. If T does not
static varinfo_t
lookup_vi_for_tree (tree t)
{
- void **slot = pointer_map_contains (vi_for_tree, t);
+ varinfo_t *slot = vi_for_tree->get (t);
if (slot == NULL)
return NULL;
- return (varinfo_t) *slot;
+ return *slot;
}
/* Return a printable name for DECL */
static varinfo_t
get_vi_for_tree (tree t)
{
- void **slot = pointer_map_contains (vi_for_tree, t);
+ varinfo_t *slot = vi_for_tree->get (t);
if (slot == NULL)
return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
- return (varinfo_t) *slot;
+ return *slot;
}
/* Get a scalar constraint expression for a new temporary variable. */
if (TREE_CODE (t) == VAR_DECL
&& (TREE_STATIC (t) || DECL_EXTERNAL (t)))
{
- struct varpool_node *node = varpool_get_node (t);
+ varpool_node *node = varpool_node::get (t);
if (node && node->alias && node->analyzed)
{
- node = varpool_variable_node (node, NULL);
+ node = node->ultimate_alias_target ();
t = node->decl;
}
}
cexpr.var = vi->id;
cexpr.type = SCALAR;
cexpr.offset = 0;
- /* If we determine the result is "anything", and we know this is readonly,
- say it points to readonly memory instead. */
- if (cexpr.var == anything_id && TREE_READONLY (t))
- {
- gcc_unreachable ();
- cexpr.type = ADDRESSOF;
- cexpr.var = readonly_id;
- }
/* If we are not taking the address of the constraint expr, add all
sub-fiels of the variable as well. */
|| !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
return -1;
- return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
- + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
+ return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
+ + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
}
else
{
/* Sign-extend the offset. */
- double_int soffset = tree_to_double_int (offset)
- .sext (TYPE_PRECISION (TREE_TYPE (offset)));
- if (!soffset.fits_shwi ())
+ offset_int soffset = offset_int::from (offset, SIGNED);
+ if (!wi::fits_shwi_p (soffset))
rhsoffset = UNKNOWN_OFFSET;
else
{
/* Make sure the bit-offset also fits. */
- HOST_WIDE_INT rhsunitoffset = soffset.low;
+ HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
rhsoffset = rhsunitoffset * BITS_PER_UNIT;
if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
rhsoffset = UNKNOWN_OFFSET;
if (c.type == ADDRESSOF
/* If this varinfo represents a full variable just use it. */
&& curr->is_full_var)
- c.offset = 0;
+ ;
else if (c.type == ADDRESSOF
/* If we do not know the offset add all subfields. */
&& rhsoffset == UNKNOWN_OFFSET)
varinfo_t temp;
unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
- /* Search the sub-field which overlaps with the
- pointed-to offset. If the result is outside of the variable
- we have to provide a conservative result, as the variable is
- still reachable from the resulting pointer (even though it
- technically cannot point to anything). The last and first
- sub-fields are such conservative results.
- ??? If we always had a sub-field for &object + 1 then
- we could represent this in a more precise way. */
+ /* If curr->offset + rhsoffset is less than zero adjust it. */
if (rhsoffset < 0
&& curr->offset < offset)
offset = 0;
- temp = first_or_preceding_vi_for_offset (curr, offset);
- /* If the found variable is not exactly at the pointed to
- result, we have to include the next variable in the
- solution as well. Otherwise two increments by offset / 2
- do not result in the same or a conservative superset
- solution. */
- if (temp->offset != offset
- && temp->next != 0)
+ /* We have to include all fields that overlap the current
+ field shifted by rhsoffset. And we include at least
+ the last or the first field of the variable to represent
+ reachability of off-bound addresses, in particular &object + 1,
+ conservatively correct. */
+ temp = first_or_preceding_vi_for_offset (curr, offset);
+ c.var = temp->id;
+ c.offset = 0;
+ temp = vi_next (temp);
+ while (temp
+ && temp->offset < offset + curr->size)
{
struct constraint_expr c2;
- c2.var = temp->next;
+ c2.var = temp->id;
c2.type = ADDRESSOF;
c2.offset = 0;
results->safe_push (c2);
+ temp = vi_next (temp);
}
- c.var = temp->id;
- c.offset = 0;
+ }
+ else if (c.type == SCALAR)
+ {
+ gcc_assert (c.offset == 0);
+ c.offset = rhsoffset;
}
else
- c.offset = rhsoffset;
+ /* We shouldn't get any DEREFs here. */
+ gcc_unreachable ();
(*results)[j] = c;
}
return;
}
- /* Handle type-punning through unions. If we are extracting a pointer
- from a union via a possibly type-punning access that pointer
- points to anything, similar to a conversion of an integer to
- a pointer. */
- if (!lhs_p)
- {
- tree u;
- for (u = t;
- TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
- u = TREE_OPERAND (u, 0))
- if (TREE_CODE (u) == COMPONENT_REF
- && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
- {
- struct constraint_expr temp;
-
- temp.offset = 0;
- temp.var = anything_id;
- temp.type = ADDRESSOF;
- results->safe_push (temp);
- return;
- }
- }
-
t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
/* Pretend to take the address of the base, we'll take care of
return;
}
- /* String constants are read-only. */
+ /* String constants are read-only, ideally we'd have a CONST_DECL
+ for those. */
if (TREE_CODE (t) == STRING_CST)
{
- temp.var = readonly_id;
+ temp.var = string_id;
temp.type = SCALAR;
temp.offset = 0;
results->safe_push (temp);
{
unsigned HOST_WIDE_INT size;
if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
- size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t)));
+ size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
else
size = -1;
for (; curr; curr = vi_next (curr))
{
unsigned int i;
tree val;
- vec<ce_s> tmp = vNULL;
+ auto_vec<ce_s> tmp;
FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
{
struct constraint_expr *rhsp;
results->safe_push (*rhsp);
tmp.truncate (0);
}
- tmp.release ();
/* We do not know whether the constructor was complete,
so technically we have to add &NOTHING or &ANYTHING
like we do for an empty constructor as well. */
do_structure_copy (tree lhsop, tree rhsop)
{
struct constraint_expr *lhsp, *rhsp;
- vec<ce_s> lhsc = vNULL;
- vec<ce_s> rhsc = vNULL;
+ auto_vec<ce_s> lhsc;
+ auto_vec<ce_s> rhsc;
unsigned j;
get_constraint_for (lhsop, &lhsc);
}
else
gcc_unreachable ();
-
- lhsc.release ();
- rhsc.release ();
}
/* Create constraints ID = { rhsc }. */
static void
make_constraint_to (unsigned id, tree op)
{
- vec<ce_s> rhsc = vNULL;
+ auto_vec<ce_s> rhsc;
get_constraint_for_rhs (op, &rhsc);
make_constraints_to (id, rhsc);
- rhsc.release ();
}
/* Create a constraint ID = &FROM. */
lhs.offset = 0;
rhs.type = DEREF;
rhs.var = vi->id;
- rhs.offset = 0;
- process_constraint (new_constraint (lhs, rhs));
-
- /* VAR = VAR + UNKNOWN; */
- lhs.type = SCALAR;
- lhs.var = vi->id;
- lhs.offset = 0;
- rhs.type = SCALAR;
- rhs.var = vi->id;
rhs.offset = UNKNOWN_OFFSET;
process_constraint (new_constraint (lhs, rhs));
}
&& gimple_call_lhs (stmt) != NULL_TREE
&& TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
{
- vec<ce_s> tmpc = vNULL;
+ auto_vec<ce_s> tmpc;
struct constraint_expr lhsc, *c;
get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
lhsc.var = escaped_id;
lhsc.type = SCALAR;
FOR_EACH_VEC_ELT (tmpc, i, c)
process_constraint (new_constraint (lhsc, *c));
- tmpc.release ();
}
/* Regular functions return nonlocal memory. */
handle_lhs_call (gimple stmt, tree lhs, int flags, vec<ce_s> rhsc,
tree fndecl)
{
- vec<ce_s> lhsc = vNULL;
+ auto_vec<ce_s> lhsc;
get_constraint_for (lhs, &lhsc);
/* If the store is to a global decl make sure to
/* If the call returns an argument unmodified override the rhs
constraints. */
- flags = gimple_call_return_flags (stmt);
if (flags & ERF_RETURNS_ARG
&& (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
{
struct constraint_expr tmpc;
rhsc.create (0);
vi = make_heapvar ("HEAP");
- /* We marking allocated storage local, we deal with it becoming
+ /* We are marking allocated storage local, we deal with it becoming
global by escaping and setting of vars_contains_escaped_heap. */
DECL_EXTERNAL (vi->decl) = 0;
vi->is_global_var = 0;
}
else
process_all_all_constraints (lhsc, rhsc);
-
- lhsc.release ();
}
/* For non-IPA mode, generate constraints necessary for a call of a
for (k = 0; k < gimple_call_num_args (stmt); ++k)
{
tree arg = gimple_call_arg (stmt, k);
- vec<ce_s> argc = vNULL;
+ auto_vec<ce_s> argc;
unsigned i;
struct constraint_expr *argp;
get_constraint_for_rhs (arg, &argc);
FOR_EACH_VEC_ELT (argc, i, argp)
results->safe_push (*argp);
- argc.release ();
}
/* May return addresses of globals. */
was handled, otherwise false. */
static bool
-find_func_aliases_for_builtin_call (gimple t)
+find_func_aliases_for_builtin_call (struct function *fn, gimple t)
{
tree fndecl = gimple_call_fndecl (t);
- vec<ce_s> lhsc = vNULL;
- vec<ce_s> rhsc = vNULL;
+ auto_vec<ce_s, 2> lhsc;
+ auto_vec<ce_s, 4> rhsc;
varinfo_t fi;
if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
else
get_constraint_for (dest, &rhsc);
process_all_all_constraints (lhsc, rhsc);
- lhsc.release ();
- rhsc.release ();
+ lhsc.truncate (0);
+ rhsc.truncate (0);
}
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
do_deref (&lhsc);
do_deref (&rhsc);
process_all_all_constraints (lhsc, rhsc);
- lhsc.release ();
- rhsc.release ();
return true;
}
case BUILT_IN_MEMSET:
get_constraint_for (res, &lhsc);
get_constraint_for (dest, &rhsc);
process_all_all_constraints (lhsc, rhsc);
- lhsc.release ();
- rhsc.release ();
+ lhsc.truncate (0);
}
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
do_deref (&lhsc);
ac.offset = 0;
FOR_EACH_VEC_ELT (lhsc, i, lhsp)
process_constraint (new_constraint (*lhsp, ac));
- lhsc.release ();
+ return true;
+ }
+ case BUILT_IN_POSIX_MEMALIGN:
+ {
+ tree ptrptr = gimple_call_arg (t, 0);
+ get_constraint_for (ptrptr, &lhsc);
+ do_deref (&lhsc);
+ varinfo_t vi = make_heapvar ("HEAP");
+ /* We are marking allocated storage local, we deal with it becoming
+ global by escaping and setting of vars_contains_escaped_heap. */
+ DECL_EXTERNAL (vi->decl) = 0;
+ vi->is_global_var = 0;
+ struct constraint_expr tmpc;
+ tmpc.var = vi->id;
+ tmpc.offset = 0;
+ tmpc.type = ADDRESSOF;
+ rhsc.safe_push (tmpc);
+ process_all_all_constraints (lhsc, rhsc);
return true;
}
case BUILT_IN_ASSUME_ALIGNED:
get_constraint_for (res, &lhsc);
get_constraint_for (dest, &rhsc);
process_all_all_constraints (lhsc, rhsc);
- lhsc.release ();
- rhsc.release ();
}
return true;
}
return true;
case BUILT_IN_STRDUP:
case BUILT_IN_STRNDUP:
+ case BUILT_IN_REALLOC:
if (gimple_call_lhs (t))
{
- handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
+ handle_lhs_call (t, gimple_call_lhs (t),
+ gimple_call_return_flags (t) | ERF_NOALIAS,
vNULL, fndecl);
get_constraint_for_ptr_offset (gimple_call_lhs (t),
NULL_TREE, &lhsc);
do_deref (&lhsc);
do_deref (&rhsc);
process_all_all_constraints (lhsc, rhsc);
- lhsc.release ();
- rhsc.release ();
+ lhsc.truncate (0);
+ rhsc.truncate (0);
+ /* For realloc the resulting pointer can be equal to the
+ argument as well. But only doing this wouldn't be
+ correct because with ptr == 0 realloc behaves like malloc. */
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
+ {
+ get_constraint_for (gimple_call_lhs (t), &lhsc);
+ get_constraint_for (gimple_call_arg (t, 0), &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ }
return true;
}
break;
rhsc.safe_push (nul);
get_constraint_for (gimple_call_lhs (t), &lhsc);
process_all_all_constraints (lhsc, rhsc);
- lhsc.release ();
- rhsc.release ();
}
return true;
/* Trampolines are special - they set up passing the static
lhs = get_function_part_constraint (nfi, fi_static_chain);
get_constraint_for (frame, &rhsc);
FOR_EACH_VEC_ELT (rhsc, i, rhsp)
- process_constraint (new_constraint (lhs, *rhsp));
- rhsc.release ();
+ process_constraint (new_constraint (lhs, *rhsp));
+ rhsc.truncate (0);
/* Make the frame point to the function for
the trampoline adjustment call. */
do_deref (&lhsc);
get_constraint_for (nfunc, &rhsc);
process_all_all_constraints (lhsc, rhsc);
- rhsc.release ();
- lhsc.release ();
return true;
}
get_constraint_for (tramp, &rhsc);
do_deref (&rhsc);
process_all_all_constraints (lhsc, rhsc);
- rhsc.release ();
- lhsc.release ();
}
return true;
}
do_deref (&lhsc);
get_constraint_for (src, &rhsc);
process_all_all_constraints (lhsc, rhsc);
- lhsc.release ();
- rhsc.release ();
return true;
}
CASE_BUILT_IN_TM_LOAD (1):
get_constraint_for (addr, &rhsc);
do_deref (&rhsc);
process_all_all_constraints (lhsc, rhsc);
- lhsc.release ();
- rhsc.release ();
return true;
}
/* Variadic argument handling needs to be handled in IPA
and otherwise are just all nonlocal variables. */
if (in_ipa_mode)
{
- fi = lookup_vi_for_tree (cfun->decl);
+ fi = lookup_vi_for_tree (fn->decl);
rhs = get_function_part_constraint (fi, ~0);
rhs.type = ADDRESSOF;
}
}
FOR_EACH_VEC_ELT (lhsc, i, lhsp)
process_constraint (new_constraint (*lhsp, rhs));
- lhsc.release ();
/* va_list is clobbered. */
make_constraint_to (get_call_clobber_vi (t)->id, valist);
return true;
{
fi = NULL;
if (!in_ipa_mode
- || !(fi = get_vi_for_tree (cfun->decl)))
+ || !(fi = get_vi_for_tree (fn->decl)))
make_constraint_from (get_varinfo (escaped_id), anything_id);
else if (in_ipa_mode
&& fi != NULL)
/* Create constraints for the call T. */
static void
-find_func_aliases_for_call (gimple t)
+find_func_aliases_for_call (struct function *fn, gimple t)
{
tree fndecl = gimple_call_fndecl (t);
- vec<ce_s> lhsc = vNULL;
- vec<ce_s> rhsc = vNULL;
varinfo_t fi;
if (fndecl != NULL_TREE
&& DECL_BUILT_IN (fndecl)
- && find_func_aliases_for_builtin_call (t))
+ && find_func_aliases_for_builtin_call (fn, t))
return;
fi = get_fi_for_callee (t);
if (!in_ipa_mode
|| (fndecl && !fi->is_fn_info))
{
- vec<ce_s> rhsc = vNULL;
+ auto_vec<ce_s, 16> rhsc;
int flags = gimple_call_flags (t);
/* Const functions can return their arguments and addresses
else
handle_rhs_call (t, &rhsc);
if (gimple_call_lhs (t))
- handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
- rhsc.release ();
+ handle_lhs_call (t, gimple_call_lhs (t),
+ gimple_call_return_flags (t), rhsc, fndecl);
}
else
{
+ auto_vec<ce_s, 2> rhsc;
tree lhsop;
unsigned j;
lhsop = gimple_call_lhs (t);
if (lhsop)
{
+ auto_vec<ce_s, 2> lhsc;
struct constraint_expr rhs;
struct constraint_expr *lhsp;
&& DECL_RESULT (fndecl)
&& DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
{
- vec<ce_s> tem = vNULL;
- tem.safe_push (rhs);
+ auto_vec<ce_s, 2> tem;
+ tem.quick_push (rhs);
do_deref (&tem);
+ gcc_checking_assert (tem.length () == 1);
rhs = tem[0];
- tem.release ();
}
FOR_EACH_VEC_ELT (lhsc, j, lhsp)
process_constraint (new_constraint (*lhsp, rhs));
lhs = get_function_part_constraint (fi, fi_result);
FOR_EACH_VEC_ELT (rhsc, j, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
- rhsc.release ();
+ rhsc.truncate (0);
}
/* If we use a static chain, pass it along. */
when building alias sets and computing alias grouping heuristics. */
static void
-find_func_aliases (gimple origt)
+find_func_aliases (struct function *fn, gimple origt)
{
gimple t = origt;
- vec<ce_s> lhsc = vNULL;
- vec<ce_s> rhsc = vNULL;
+ auto_vec<ce_s, 16> lhsc;
+ auto_vec<ce_s, 16> rhsc;
struct constraint_expr *c;
varinfo_t fi;
In non-ipa mode, we need to generate constraints for each
pointer passed by address. */
else if (is_gimple_call (t))
- find_func_aliases_for_call (t);
+ find_func_aliases_for_call (fn, t);
/* Otherwise, just a regular assignment statement. Only care about
operations with pointer result, others are dealt with as escape
else if (code == COND_EXPR)
{
/* The result is a merge of both COND_EXPR arms. */
- vec<ce_s> tmp = vNULL;
+ auto_vec<ce_s, 2> tmp;
struct constraint_expr *rhsp;
unsigned i;
get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
FOR_EACH_VEC_ELT (tmp, i, rhsp)
rhsc.safe_push (*rhsp);
- tmp.release ();
}
else if (truth_value_p (code))
/* Truth value results are not pointer (parts). Or at least
else
{
/* All other operations are merges. */
- vec<ce_s> tmp = vNULL;
+ auto_vec<ce_s, 4> tmp;
struct constraint_expr *rhsp;
unsigned i, j;
get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
rhsc.safe_push (*rhsp);
tmp.truncate (0);
}
- tmp.release ();
}
process_all_all_constraints (lhsc, rhsc);
}
{
fi = NULL;
if (!in_ipa_mode
- || !(fi = get_vi_for_tree (cfun->decl)))
+ || !(fi = get_vi_for_tree (fn->decl)))
make_escape_constraint (gimple_return_retval (t));
else if (in_ipa_mode
&& fi != NULL)
any global memory. */
if (op)
{
- vec<ce_s> lhsc = vNULL;
+ auto_vec<ce_s, 2> lhsc;
struct constraint_expr rhsc, *lhsp;
unsigned j;
get_constraint_for (op, &lhsc);
rhsc.type = SCALAR;
FOR_EACH_VEC_ELT (lhsc, j, lhsp)
process_constraint (new_constraint (*lhsp, rhsc));
- lhsc.release ();
}
}
for (i = 0; i < gimple_asm_ninputs (t); ++i)
make_escape_constraint (op);
}
}
-
- rhsc.release ();
- lhsc.release ();
}
IPA constraint builder. */
static void
-find_func_clobbers (gimple origt)
+find_func_clobbers (struct function *fn, gimple origt)
{
gimple t = origt;
- vec<ce_s> lhsc = vNULL;
- vec<ce_s> rhsc = vNULL;
+ auto_vec<ce_s, 16> lhsc;
+ auto_vec<ce_s, 16> rhsc;
varinfo_t fi;
/* Add constraints for clobbered/used in IPA mode.
return;
/* We'd better have function information for the current function. */
- fi = lookup_vi_for_tree (cfun->decl);
+ fi = lookup_vi_for_tree (fn->decl);
gcc_assert (fi != NULL);
/* Account for stores in assignments and calls. */
while (handled_component_p (tem))
tem = TREE_OPERAND (tem, 0);
if ((DECL_P (tem)
- && !auto_var_in_fn_p (tem, cfun->decl))
+ && !auto_var_in_fn_p (tem, fn->decl))
|| INDIRECT_REF_P (tem)
|| (TREE_CODE (tem) == MEM_REF
&& !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
&& auto_var_in_fn_p
- (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
+ (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
{
struct constraint_expr lhsc, *rhsp;
unsigned i;
get_constraint_for_address_of (lhs, &rhsc);
FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhsc, *rhsp));
- rhsc.release ();
+ rhsc.truncate (0);
}
}
while (handled_component_p (tem))
tem = TREE_OPERAND (tem, 0);
if ((DECL_P (tem)
- && !auto_var_in_fn_p (tem, cfun->decl))
+ && !auto_var_in_fn_p (tem, fn->decl))
|| INDIRECT_REF_P (tem)
|| (TREE_CODE (tem) == MEM_REF
&& !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
&& auto_var_in_fn_p
- (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
+ (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
{
struct constraint_expr lhs, *rhsp;
unsigned i;
get_constraint_for_address_of (rhs, &rhsc);
FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
- rhsc.release ();
+ rhsc.truncate (0);
}
}
lhs = get_function_part_constraint (fi, fi_clobbers);
FOR_EACH_VEC_ELT (lhsc, i, lhsp)
process_constraint (new_constraint (lhs, *lhsp));
- lhsc.release ();
get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
lhs = get_function_part_constraint (fi, fi_uses);
FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
- rhsc.release ();
return;
}
/* The following function clobbers memory pointed to by
its argument. */
case BUILT_IN_MEMSET:
case BUILT_IN_MEMSET_CHK:
+ case BUILT_IN_POSIX_MEMALIGN:
{
tree dest = gimple_call_arg (t, 0);
unsigned i;
lhs = get_function_part_constraint (fi, fi_clobbers);
FOR_EACH_VEC_ELT (lhsc, i, lhsp)
process_constraint (new_constraint (lhs, *lhsp));
- lhsc.release ();
return;
}
/* The following functions clobber their second and third
get_constraint_for_address_of (arg, &rhsc);
FOR_EACH_VEC_ELT (rhsc, j, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
- rhsc.release ();
+ rhsc.truncate (0);
}
/* Build constraints for propagating clobbers/uses along the
make_constraint_from (first_vi_for_offset (fi, fi_uses),
anything_id);
}
-
- rhsc.release ();
}
&& !pair->has_unknown_size
&& pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
{
- pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
+ pair->size += tree_to_uhwi (DECL_SIZE (field));
}
else
{
e.offset = offset + foff;
e.has_unknown_size = has_unknown_size;
if (!has_unknown_size)
- e.size = TREE_INT_CST_LOW (DECL_SIZE (field));
+ e.size = tree_to_uhwi (DECL_SIZE (field));
else
e.size = -1;
e.must_have_pointers = must_have_pointers_p;
varinfo_t vi, newvi;
tree decl_type = TREE_TYPE (decl);
tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
- vec<fieldoff_s> fieldstack = vNULL;
+ auto_vec<fieldoff_s> fieldstack;
fieldoff_s *fo;
unsigned int i;
+ varpool_node *vnode;
if (!declsize
|| !tree_fits_uhwi_p (declsize))
in IPA mode. Else we'd have to parse arbitrary initializers. */
&& !(in_ipa_mode
&& is_global_var (decl)
- && DECL_INITIAL (decl)))
+ && (vnode = varpool_node::get (decl))
+ && vnode->get_constructor ()))
{
fieldoff_s *fo = NULL;
bool notokay = false;
vi = new_var_info (decl, name);
vi->offset = 0;
vi->may_have_pointers = true;
- vi->fullsize = TREE_INT_CST_LOW (declsize);
+ vi->fullsize = tree_to_uhwi (declsize);
vi->size = vi->fullsize;
vi->is_full_var = true;
fieldstack.release ();
}
vi = new_var_info (decl, name);
- vi->fullsize = TREE_INT_CST_LOW (declsize);
+ vi->fullsize = tree_to_uhwi (declsize);
for (i = 0, newvi = vi;
fieldstack.iterate (i, &fo);
++i, newvi = vi_next (newvi))
}
}
- fieldstack.release ();
-
return vi;
}
for it. */
else
{
- struct varpool_node *vnode = varpool_get_node (decl);
+ varpool_node *vnode = varpool_node::get (decl);
/* For escaped variables initialize them from nonlocal. */
- if (!varpool_all_refs_explicit_p (vnode))
+ if (!vnode->all_refs_explicit_p ())
make_copy_constraint (vi, nonlocal_id);
/* If this is a global variable with an initializer and we are in
IPA mode generate constraints for it. */
- if (DECL_INITIAL (decl)
+ if (vnode->get_constructor ()
&& vnode->definition)
{
- vec<ce_s> rhsc = vNULL;
+ auto_vec<ce_s> rhsc;
struct constraint_expr lhs, *rhsp;
unsigned i;
- get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
+ get_constraint_for_rhs (vnode->get_constructor (), &rhsc);
lhs.var = vi->id;
lhs.offset = 0;
lhs.type = SCALAR;
process_constraint (new_constraint (lhs, *rhsp));
/* If this is a variable that escapes from the unit
the initializer escapes as well. */
- if (!varpool_all_refs_explicit_p (vnode))
+ if (!vnode->all_refs_explicit_p ())
{
lhs.var = escaped_id;
lhs.offset = 0;
FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
}
- rhsc.release ();
}
}
}
fprintf (file, "\n");
}
-/* Print the points-to solution for VAR to stdout. */
+/* Print the points-to solution for VAR to stderr. */
DEBUG_FUNCTION void
debug_solution_for_var (unsigned int var)
{
- dump_solution_for_var (stdout, var);
+ dump_solution_for_var (stderr, var);
}
/* Create varinfo structures for all of the variables in the
function for intraprocedural mode. */
static void
-intra_create_variable_infos (void)
+intra_create_variable_infos (struct function *fn)
{
tree t;
/* For each incoming pointer argument arg, create the constraint ARG
= NONLOCAL or a dummy variable if it is a restrict qualified
passed-by-reference argument. */
- for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
+ for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
{
varinfo_t p = get_vi_for_tree (t);
}
/* Add a constraint for a result decl that is passed by reference. */
- if (DECL_RESULT (cfun->decl)
- && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
+ if (DECL_RESULT (fn->decl)
+ && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
{
- varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
+ varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
for (p = result_vi; p; p = vi_next (p))
make_constraint_from (p, nonlocal_id);
}
/* Add a constraint for the incoming static chain parameter. */
- if (cfun->static_chain_decl != NULL_TREE)
+ if (fn->static_chain_decl != NULL_TREE)
{
- varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
+ varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
for (p = chain_vi; p; p = vi_next (p))
make_constraint_from (p, nonlocal_id);
/* Shared_bitmap hashtable. */
-static hash_table <shared_bitmap_hasher> shared_bitmap_table;
+static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
/* Lookup a bitmap in the shared bitmap hashtable, and return an already
existing instance if there is one, NULL otherwise. */
sbi.pt_vars = pt_vars;
sbi.hashcode = bitmap_hash (pt_vars);
- slot = shared_bitmap_table.find_slot_with_hash (&sbi, sbi.hashcode,
- NO_INSERT);
+ slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
if (!slot)
return NULL;
else
sbi->pt_vars = pt_vars;
sbi->hashcode = bitmap_hash (pt_vars);
- slot = shared_bitmap_table.find_slot_with_hash (sbi, sbi->hashcode, INSERT);
+ slot = shared_bitmap_table->find_slot (sbi, INSERT);
gcc_assert (!*slot);
*slot = sbi;
}
bitmap finished_solution;
bitmap result;
varinfo_t vi;
- void **slot;
struct pt_solution *pt;
/* This variable may have been collapsed, let's get the real
vi = get_varinfo (find (orig_vi->id));
/* See if we have already computed the solution and return it. */
- slot = pointer_map_insert (final_solutions, vi);
+ pt_solution **slot = &final_solutions->get_or_insert (vi);
if (*slot != NULL)
- return *(struct pt_solution *)*slot;
+ return **slot;
*slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
memset (pt, 0, sizeof (struct pt_solution));
pt->ipa_escaped = 1;
else
pt->escaped = 1;
+ /* Expand some special vars of ESCAPED in-place here. */
+ varinfo_t evi = get_varinfo (find (escaped_id));
+ if (bitmap_bit_p (evi->solution, nonlocal_id))
+ pt->nonlocal = 1;
}
else if (vi->id == nonlocal_id)
pt->nonlocal = 1;
else if (vi->is_heap_var)
/* We represent heapvars in the points-to set properly. */
;
- else if (vi->id == readonly_id)
- /* Nobody cares. */
+ else if (vi->id == string_id)
+ /* Nobody cares - STRING_CSTs are read-only entities. */
;
else if (vi->id == anything_id
|| vi->id == integer_id)
struct constraint_expr lhs, rhs;
varinfo_t var_anything;
varinfo_t var_nothing;
- varinfo_t var_readonly;
+ varinfo_t var_string;
varinfo_t var_escaped;
varinfo_t var_nonlocal;
varinfo_t var_storedanything;
but this one are redundant. */
constraints.safe_push (new_constraint (lhs, rhs));
- /* Create the READONLY variable, used to represent that a variable
- points to readonly memory. */
- var_readonly = new_var_info (NULL_TREE, "READONLY");
- gcc_assert (var_readonly->id == readonly_id);
- var_readonly->is_artificial_var = 1;
- var_readonly->offset = 0;
- var_readonly->size = ~0;
- var_readonly->fullsize = ~0;
- var_readonly->is_special_var = 1;
-
- /* readonly memory points to anything, in order to make deref
- easier. In reality, it points to anything the particular
- readonly variable can point to, but we don't track this
- separately. */
- lhs.type = SCALAR;
- lhs.var = readonly_id;
- lhs.offset = 0;
- rhs.type = ADDRESSOF;
- rhs.var = readonly_id; /* FIXME */
- rhs.offset = 0;
- process_constraint (new_constraint (lhs, rhs));
+ /* Create the STRING variable, used to represent that a variable
+ points to a string literal. String literals don't contain
+ pointers so STRING doesn't point to anything. */
+ var_string = new_var_info (NULL_TREE, "STRING");
+ gcc_assert (var_string->id == string_id);
+ var_string->is_artificial_var = 1;
+ var_string->offset = 0;
+ var_string->size = ~0;
+ var_string->fullsize = ~0;
+ var_string->is_special_var = 1;
+ var_string->may_have_pointers = 0;
/* Create the ESCAPED variable, used to represent the set of escaped
memory. */
sizeof (struct variable_info), 30);
constraints.create (8);
varmap.create (8);
- vi_for_tree = pointer_map_create ();
- call_stmt_vars = pointer_map_create ();
+ vi_for_tree = new hash_map<tree, varinfo_t>;
+ call_stmt_vars = new hash_map<gimple, varinfo_t>;
memset (&stats, 0, sizeof (stats));
- shared_bitmap_table.create (511);
+ shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
init_base_vars ();
gcc_obstack_init (&fake_var_decl_obstack);
- final_solutions = pointer_map_create ();
+ final_solutions = new hash_map<varinfo_t, pt_solution *>;
gcc_obstack_init (&final_solutions_obstack);
}
init_alias_vars ();
- intra_create_variable_infos ();
+ intra_create_variable_infos (cfun);
/* Now walk all statements and build the constraint set. */
- FOR_EACH_BB (bb)
+ FOR_EACH_BB_FN (bb, cfun)
{
gimple_stmt_iterator gsi;
gimple phi = gsi_stmt (gsi);
if (! virtual_operand_p (gimple_phi_result (phi)))
- find_func_aliases (phi);
+ find_func_aliases (cfun, phi);
}
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
- find_func_aliases (stmt);
+ find_func_aliases (cfun, stmt);
}
}
}
/* Compute the call-used/clobbered sets. */
- FOR_EACH_BB (bb)
+ FOR_EACH_BB_FN (bb, cfun)
{
gimple_stmt_iterator gsi;
{
unsigned int i;
- shared_bitmap_table.dispose ();
+ delete shared_bitmap_table;
+ shared_bitmap_table = NULL;
if (dump_file && (dump_flags & TDF_STATS))
fprintf (dump_file, "Points to sets created:%d\n",
stats.points_to_sets_created);
- pointer_map_destroy (vi_for_tree);
- pointer_map_destroy (call_stmt_vars);
+ delete vi_for_tree;
+ delete call_stmt_vars;
bitmap_obstack_release (&pta_obstack);
constraints.release ();
obstack_free (&fake_var_decl_obstack, NULL);
- pointer_map_destroy (final_solutions);
+ delete final_solutions;
obstack_free (&final_solutions_obstack, NULL);
}
return 0;
}
-static bool
-gate_tree_pta (void)
-{
- return flag_tree_pta;
-}
-
/* A dummy pass to cause points-to information to be computed via
TODO_rebuild_alias. */
GIMPLE_PASS, /* type */
"alias", /* name */
OPTGROUP_NONE, /* optinfo_flags */
- true, /* has_gate */
- false, /* has_execute */
TV_NONE, /* tv_id */
( PROP_cfg | PROP_ssa ), /* properties_required */
0, /* properties_provided */
{}
/* opt_pass methods: */
- bool gate () { return gate_tree_pta (); }
+ virtual bool gate (function *) { return flag_tree_pta; }
}; // class pass_build_alias
GIMPLE_PASS, /* type */
"ealias", /* name */
OPTGROUP_NONE, /* optinfo_flags */
- true, /* has_gate */
- false, /* has_execute */
TV_NONE, /* tv_id */
( PROP_cfg | PROP_ssa ), /* properties_required */
0, /* properties_provided */
{}
/* opt_pass methods: */
- bool gate () { return gate_tree_pta (); }
+ virtual bool gate (function *) { return flag_tree_pta; }
}; // class pass_build_ealias
}
-/* Return true if we should execute IPA PTA. */
-static bool
-gate_ipa_pta (void)
-{
- return (optimize
- && flag_ipa_pta
- /* Don't bother doing anything if the program has errors. */
- && !seen_error ());
-}
-
/* IPA PTA solutions for ESCAPED. */
struct pt_solution ipa_escaped_pt
= { true, false, false, false, false, false, false, false, NULL };
ipa_pta_execute (void)
{
struct cgraph_node *node;
- struct varpool_node *var;
+ varpool_node *var;
int from;
in_ipa_mode = 1;
if (dump_file && (dump_flags & TDF_DETAILS))
{
- dump_symtab (dump_file);
+ symtab_node::dump_table (dump_file);
fprintf (dump_file, "\n");
}
/* Nodes without a body are not interesting. Especially do not
visit clones at this point for now - we get duplicate decls
there for inline clones at least. */
- if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
+ if (!node->has_gimple_body_p () || node->clone_of)
continue;
- cgraph_get_body (node);
+ node->get_body ();
gcc_assert (!node->clone_of);
vi = create_function_info_for (node->decl,
alias_get_name (node->decl));
- cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
+ node->call_for_symbol_thunks_and_aliases
+ (associate_varinfo_to_alias, vi, true);
}
/* Create constraints for global variables and their initializers. */
basic_block bb;
/* Nodes without a body are not interesting. */
- if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
+ if (!node->has_gimple_body_p () || node->clone_of)
continue;
if (dump_file)
}
func = DECL_STRUCT_FUNCTION (node->decl);
- push_cfun (func);
+ gcc_assert (cfun == NULL);
/* For externally visible or attribute used annotated functions use
local constraints for their arguments.
|| node->externally_visible
|| node->force_output)
{
- intra_create_variable_infos ();
+ intra_create_variable_infos (func);
/* We also need to make function return values escape. Nothing
escapes by returning from main though. */
gimple phi = gsi_stmt (gsi);
if (! virtual_operand_p (gimple_phi_result (phi)))
- find_func_aliases (phi);
+ find_func_aliases (func, phi);
}
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
- find_func_aliases (stmt);
- find_func_clobbers (stmt);
+ find_func_aliases (func, stmt);
+ find_func_clobbers (func, stmt);
}
}
- pop_cfun ();
-
if (dump_file)
{
fprintf (dump_file, "\n");
tree ptr;
struct function *fn;
unsigned i;
- varinfo_t fi;
basic_block bb;
- struct pt_solution uses, clobbers;
- struct cgraph_edge *e;
/* Nodes without a body are not interesting. */
- if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
+ if (!node->has_gimple_body_p () || node->clone_of)
continue;
fn = DECL_STRUCT_FUNCTION (node->decl);
find_what_p_points_to (ptr);
}
- /* Compute the call-use and call-clobber sets for all direct calls. */
- fi = lookup_vi_for_tree (node->decl);
- gcc_assert (fi->is_fn_info);
- clobbers
- = find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers));
- uses = find_what_var_points_to (first_vi_for_offset (fi, fi_uses));
- for (e = node->callers; e; e = e->next_caller)
- {
- if (!e->call_stmt)
- continue;
-
- *gimple_call_clobber_set (e->call_stmt) = clobbers;
- *gimple_call_use_set (e->call_stmt) = uses;
- }
-
/* Compute the call-use and call-clobber sets for indirect calls
and calls to external functions. */
FOR_EACH_BB_FN (bb, fn)
{
gimple stmt = gsi_stmt (gsi);
struct pt_solution *pt;
- varinfo_t vi;
+ varinfo_t vi, fi;
tree decl;
if (!is_gimple_call (stmt))
continue;
- /* Handle direct calls to external functions. */
+ /* Handle direct calls to functions with body. */
decl = gimple_call_fndecl (stmt);
if (decl
- && (!(fi = lookup_vi_for_tree (decl))
- || !fi->is_fn_info))
+ && (fi = lookup_vi_for_tree (decl))
+ && fi->is_fn_info)
+ {
+ *gimple_call_clobber_set (stmt)
+ = find_what_var_points_to
+ (first_vi_for_offset (fi, fi_clobbers));
+ *gimple_call_use_set (stmt)
+ = find_what_var_points_to
+ (first_vi_for_offset (fi, fi_uses));
+ }
+ /* Handle direct calls to external functions. */
+ else if (decl)
{
pt = gimple_call_use_set (stmt);
if (gimple_call_flags (stmt) & ECF_CONST)
pt->nonlocal = 1;
}
}
-
/* Handle indirect calls. */
- if (!decl
- && (fi = get_fi_for_callee (stmt)))
+ else if (!decl
+ && (fi = get_fi_for_callee (stmt)))
{
/* We need to accumulate all clobbers/uses of all possible
callees. */
SIMPLE_IPA_PASS, /* type */
"pta", /* name */
OPTGROUP_NONE, /* optinfo_flags */
- true, /* has_gate */
- true, /* has_execute */
TV_IPA_PTA, /* tv_id */
0, /* properties_required */
0, /* properties_provided */
{}
/* opt_pass methods: */
- bool gate () { return gate_ipa_pta (); }
- unsigned int execute () { return ipa_pta_execute (); }
+ virtual bool gate (function *)
+ {
+ return (optimize
+ && flag_ipa_pta
+ /* Don't bother doing anything if the program has errors. */
+ && !seen_error ());
+ }
+
+ virtual unsigned int execute (function *) { return ipa_pta_execute (); }
}; // class pass_ipa_pta