1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dberlin@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3 of the License, or
11 (at your option) any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "basic-block.h"
32 #include "tree-flow.h"
33 #include "tree-inline.h"
34 #include "diagnostic-core.h"
39 #include "tree-pass.h"
40 #include "alloc-pool.h"
41 #include "splay-tree.h"
45 #include "pointer-set.h"
47 /* The idea behind this analyzer is to generate set constraints from the
48 program, then solve the resulting constraints in order to generate the
51 Set constraints are a way of modeling program analysis problems that
52 involve sets. They consist of an inclusion constraint language,
53 describing the variables (each variable is a set) and operations that
54 are involved on the variables, and a set of rules that derive facts
55 from these operations. To solve a system of set constraints, you derive
56 all possible facts under the rules, which gives you the correct sets
59 See "Efficient Field-sensitive pointer analysis for C" by "David
60 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
61 http://citeseer.ist.psu.edu/pearce04efficient.html
63 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
64 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
65 http://citeseer.ist.psu.edu/heintze01ultrafast.html
67 There are three types of real constraint expressions, DEREF,
68 ADDRESSOF, and SCALAR. Each constraint expression consists
69 of a constraint type, a variable, and an offset.
71 SCALAR is a constraint expression type used to represent x, whether
72 it appears on the LHS or the RHS of a statement.
73 DEREF is a constraint expression type used to represent *x, whether
74 it appears on the LHS or the RHS of a statement.
75 ADDRESSOF is a constraint expression used to represent &x, whether
76 it appears on the LHS or the RHS of a statement.
78 Each pointer variable in the program is assigned an integer id, and
79 each field of a structure variable is assigned an integer id as well.
81 Structure variables are linked to their list of fields through a "next
82 field" in each variable that points to the next field in offset
84 Each variable for a structure field has
86 1. "size", that tells the size in bits of that field.
87 2. "fullsize, that tells the size in bits of the entire structure.
88 3. "offset", that tells the offset in bits from the beginning of the
89 structure to this field.
101 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
102 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
103 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
106 In order to solve the system of set constraints, the following is
109 1. Each constraint variable x has a solution set associated with it,
112 2. Constraints are separated into direct, copy, and complex.
113 Direct constraints are ADDRESSOF constraints that require no extra
114 processing, such as P = &Q
115 Copy constraints are those of the form P = Q.
116 Complex constraints are all the constraints involving dereferences
117 and offsets (including offsetted copies).
119 3. All direct constraints of the form P = &Q are processed, such
120 that Q is added to Sol(P)
122 4. All complex constraints for a given constraint variable are stored in a
123 linked list attached to that variable's node.
125 5. A directed graph is built out of the copy constraints. Each
126 constraint variable is a node in the graph, and an edge from
127 Q to P is added for each copy constraint of the form P = Q
129 6. The graph is then walked, and solution sets are
130 propagated along the copy edges, such that an edge from Q to P
131 causes Sol(P) <- Sol(P) union Sol(Q).
133 7. As we visit each node, all complex constraints associated with
134 that node are processed by adding appropriate copy edges to the graph, or the
135 appropriate variables to the solution set.
137 8. The process of walking the graph is iterated until no solution
140 Prior to walking the graph in steps 6 and 7, We perform static
141 cycle elimination on the constraint graph, as well
142 as off-line variable substitution.
144 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
145 on and turned into anything), but isn't. You can just see what offset
146 inside the pointed-to struct it's going to access.
148 TODO: Constant bounded arrays can be handled as if they were structs of the
149 same number of elements.
151 TODO: Modeling heap and incoming pointers becomes much better if we
152 add fields to them as we discover them, which we could do.
154 TODO: We could handle unions, but to be honest, it's probably not
155 worth the pain or slowdown. */
157 /* IPA-PTA optimizations possible.
159 When the indirect function called is ANYTHING we can add disambiguation
160 based on the function signatures (or simply the parameter count which
161 is the varinfo size). We also do not need to consider functions that
162 do not have their address taken.
164 The is_global_var bit which marks escape points is overly conservative
165 in IPA mode. Split it to is_escape_point and is_global_var - only
166 externally visible globals are escape points in IPA mode. This is
167 also needed to fix the pt_solution_includes_global predicate
168 (and thus ptr_deref_may_alias_global_p).
170 The way we introduce DECL_PT_UID to avoid fixing up all points-to
171 sets in the translation unit when we copy a DECL during inlining
172 pessimizes precision. The advantage is that the DECL_PT_UID keeps
173 compile-time and memory usage overhead low - the points-to sets
174 do not grow or get unshared as they would during a fixup phase.
175 An alternative solution is to delay IPA PTA until after all
176 inlining transformations have been applied.
178 The way we propagate clobber/use information isn't optimized.
179 It should use a new complex constraint that properly filters
180 out local variables of the callee (though that would make
181 the sets invalid after inlining). OTOH we might as well
182 admit defeat to WHOPR and simply do all the clobber/use analysis
183 and propagation after PTA finished but before we threw away
184 points-to information for memory variables. WHOPR and PTA
185 do not play along well anyway - the whole constraint solving
186 would need to be done in WPA phase and it will be very interesting
187 to apply the results to local SSA names during LTRANS phase.
189 We probably should compute a per-function unit-ESCAPE solution
190 propagating it simply like the clobber / uses solutions. The
191 solution can go alongside the non-IPA espaced solution and be
192 used to query which vars escape the unit through a function.
194 We never put function decls in points-to sets so we do not
195 keep the set of called functions for indirect calls.
197 And probably more. */
199 static bool use_field_sensitive
= true;
200 static int in_ipa_mode
= 0;
202 /* Used for predecessor bitmaps. */
203 static bitmap_obstack predbitmap_obstack
;
205 /* Used for points-to sets. */
206 static bitmap_obstack pta_obstack
;
208 /* Used for oldsolution members of variables. */
209 static bitmap_obstack oldpta_obstack
;
211 /* Used for per-solver-iteration bitmaps. */
212 static bitmap_obstack iteration_obstack
;
214 static unsigned int create_variable_info_for (tree
, const char *);
215 typedef struct constraint_graph
*constraint_graph_t
;
216 static void unify_nodes (constraint_graph_t
, unsigned int, unsigned int, bool);
219 typedef struct constraint
*constraint_t
;
222 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
224 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
226 static struct constraint_stats
228 unsigned int total_vars
;
229 unsigned int nonpointer_vars
;
230 unsigned int unified_vars_static
;
231 unsigned int unified_vars_dynamic
;
232 unsigned int iterations
;
233 unsigned int num_edges
;
234 unsigned int num_implicit_edges
;
235 unsigned int points_to_sets_created
;
240 /* ID of this variable */
243 /* True if this is a variable created by the constraint analysis, such as
244 heap variables and constraints we had to break up. */
245 unsigned int is_artificial_var
: 1;
247 /* True if this is a special variable whose solution set should not be
249 unsigned int is_special_var
: 1;
251 /* True for variables whose size is not known or variable. */
252 unsigned int is_unknown_size_var
: 1;
254 /* True for (sub-)fields that represent a whole variable. */
255 unsigned int is_full_var
: 1;
257 /* True if this is a heap variable. */
258 unsigned int is_heap_var
: 1;
260 /* True if this field may contain pointers. */
261 unsigned int may_have_pointers
: 1;
263 /* True if this field has only restrict qualified pointers. */
264 unsigned int only_restrict_pointers
: 1;
266 /* True if this represents a global variable. */
267 unsigned int is_global_var
: 1;
269 /* True if this represents a IPA function info. */
270 unsigned int is_fn_info
: 1;
272 /* A link to the variable for the next field in this structure. */
273 struct variable_info
*next
;
275 /* Offset of this variable, in bits, from the base variable */
276 unsigned HOST_WIDE_INT offset
;
278 /* Size of the variable, in bits. */
279 unsigned HOST_WIDE_INT size
;
281 /* Full size of the base variable, in bits. */
282 unsigned HOST_WIDE_INT fullsize
;
284 /* Name of this variable */
287 /* Tree that this variable is associated with. */
290 /* Points-to set for this variable. */
293 /* Old points-to set for this variable. */
296 typedef struct variable_info
*varinfo_t
;
298 static varinfo_t
first_vi_for_offset (varinfo_t
, unsigned HOST_WIDE_INT
);
299 static varinfo_t
first_or_preceding_vi_for_offset (varinfo_t
,
300 unsigned HOST_WIDE_INT
);
301 static varinfo_t
lookup_vi_for_tree (tree
);
302 static inline bool type_can_have_subvars (const_tree
);
304 /* Pool of variable info structures. */
305 static alloc_pool variable_info_pool
;
309 /* Table of variable info structures for constraint variables.
310 Indexed directly by variable info id. */
311 static vec
<varinfo_t
> varmap
;
313 /* Return the varmap element N */
315 static inline varinfo_t
316 get_varinfo (unsigned int n
)
321 /* Static IDs for the special variables. */
322 enum { nothing_id
= 0, anything_id
= 1, readonly_id
= 2,
323 escaped_id
= 3, nonlocal_id
= 4,
324 storedanything_id
= 5, integer_id
= 6 };
326 /* Return a new variable info structure consisting for a variable
327 named NAME, and using constraint graph node NODE. Append it
328 to the vector of variable info structures. */
331 new_var_info (tree t
, const char *name
)
333 unsigned index
= varmap
.length ();
334 varinfo_t ret
= (varinfo_t
) pool_alloc (variable_info_pool
);
339 /* Vars without decl are artificial and do not have sub-variables. */
340 ret
->is_artificial_var
= (t
== NULL_TREE
);
341 ret
->is_special_var
= false;
342 ret
->is_unknown_size_var
= false;
343 ret
->is_full_var
= (t
== NULL_TREE
);
344 ret
->is_heap_var
= false;
345 ret
->may_have_pointers
= true;
346 ret
->only_restrict_pointers
= false;
347 ret
->is_global_var
= (t
== NULL_TREE
);
348 ret
->is_fn_info
= false;
350 ret
->is_global_var
= (is_global_var (t
)
351 /* We have to treat even local register variables
353 || (TREE_CODE (t
) == VAR_DECL
354 && DECL_HARD_REGISTER (t
)));
355 ret
->solution
= BITMAP_ALLOC (&pta_obstack
);
356 ret
->oldsolution
= NULL
;
361 varmap
.safe_push (ret
);
367 /* A map mapping call statements to per-stmt variables for uses
368 and clobbers specific to the call. */
369 struct pointer_map_t
*call_stmt_vars
;
371 /* Lookup or create the variable for the call statement CALL. */
374 get_call_vi (gimple call
)
379 slot_p
= pointer_map_insert (call_stmt_vars
, call
);
381 return (varinfo_t
) *slot_p
;
383 vi
= new_var_info (NULL_TREE
, "CALLUSED");
387 vi
->is_full_var
= true;
389 vi
->next
= vi2
= new_var_info (NULL_TREE
, "CALLCLOBBERED");
393 vi2
->is_full_var
= true;
395 *slot_p
= (void *) vi
;
399 /* Lookup the variable for the call statement CALL representing
400 the uses. Returns NULL if there is nothing special about this call. */
403 lookup_call_use_vi (gimple call
)
407 slot_p
= pointer_map_contains (call_stmt_vars
, call
);
409 return (varinfo_t
) *slot_p
;
414 /* Lookup the variable for the call statement CALL representing
415 the clobbers. Returns NULL if there is nothing special about this call. */
418 lookup_call_clobber_vi (gimple call
)
420 varinfo_t uses
= lookup_call_use_vi (call
);
427 /* Lookup or create the variable for the call statement CALL representing
431 get_call_use_vi (gimple call
)
433 return get_call_vi (call
);
436 /* Lookup or create the variable for the call statement CALL representing
439 static varinfo_t ATTRIBUTE_UNUSED
440 get_call_clobber_vi (gimple call
)
442 return get_call_vi (call
)->next
;
446 typedef enum {SCALAR
, DEREF
, ADDRESSOF
} constraint_expr_type
;
448 /* An expression that appears in a constraint. */
450 struct constraint_expr
452 /* Constraint type. */
453 constraint_expr_type type
;
455 /* Variable we are referring to in the constraint. */
458 /* Offset, in bits, of this constraint from the beginning of
459 variables it ends up referring to.
461 IOW, in a deref constraint, we would deref, get the result set,
462 then add OFFSET to each member. */
463 HOST_WIDE_INT offset
;
466 /* Use 0x8000... as special unknown offset. */
467 #define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
469 typedef struct constraint_expr ce_s
;
470 static void get_constraint_for_1 (tree
, vec
<ce_s
> *, bool, bool);
471 static void get_constraint_for (tree
, vec
<ce_s
> *);
472 static void get_constraint_for_rhs (tree
, vec
<ce_s
> *);
473 static void do_deref (vec
<ce_s
> *);
475 /* Our set constraints are made up of two constraint expressions, one
478 As described in the introduction, our set constraints each represent an
479 operation between set valued variables.
483 struct constraint_expr lhs
;
484 struct constraint_expr rhs
;
487 /* List of constraints that we use to build the constraint graph from. */
489 static vec
<constraint_t
> constraints
;
490 static alloc_pool constraint_pool
;
492 /* The constraint graph is represented as an array of bitmaps
493 containing successor nodes. */
495 struct constraint_graph
497 /* Size of this graph, which may be different than the number of
498 nodes in the variable map. */
501 /* Explicit successors of each node. */
504 /* Implicit predecessors of each node (Used for variable
506 bitmap
*implicit_preds
;
508 /* Explicit predecessors of each node (Used for variable substitution). */
511 /* Indirect cycle representatives, or -1 if the node has no indirect
513 int *indirect_cycles
;
515 /* Representative node for a node. rep[a] == a unless the node has
519 /* Equivalence class representative for a label. This is used for
520 variable substitution. */
523 /* Pointer equivalence label for a node. All nodes with the same
524 pointer equivalence label can be unified together at some point
525 (either during constraint optimization or after the constraint
529 /* Pointer equivalence representative for a label. This is used to
530 handle nodes that are pointer equivalent but not location
531 equivalent. We can unite these once the addressof constraints
532 are transformed into initial points-to sets. */
535 /* Pointer equivalence label for each node, used during variable
537 unsigned int *pointer_label
;
539 /* Location equivalence label for each node, used during location
540 equivalence finding. */
541 unsigned int *loc_label
;
543 /* Pointed-by set for each node, used during location equivalence
544 finding. This is pointed-by rather than pointed-to, because it
545 is constructed using the predecessor graph. */
548 /* Points to sets for pointer equivalence. This is *not* the actual
549 points-to sets for nodes. */
552 /* Bitmap of nodes where the bit is set if the node is a direct
553 node. Used for variable substitution. */
554 sbitmap direct_nodes
;
556 /* Bitmap of nodes where the bit is set if the node is address
557 taken. Used for variable substitution. */
558 bitmap address_taken
;
560 /* Vector of complex constraints for each graph node. Complex
561 constraints are those involving dereferences or offsets that are
563 vec
<constraint_t
> *complex;
566 static constraint_graph_t graph
;
568 /* During variable substitution and the offline version of indirect
569 cycle finding, we create nodes to represent dereferences and
570 address taken constraints. These represent where these start and
572 #define FIRST_REF_NODE (varmap).length ()
573 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
575 /* Return the representative node for NODE, if NODE has been unioned
577 This function performs path compression along the way to finding
578 the representative. */
581 find (unsigned int node
)
583 gcc_assert (node
< graph
->size
);
584 if (graph
->rep
[node
] != node
)
585 return graph
->rep
[node
] = find (graph
->rep
[node
]);
589 /* Union the TO and FROM nodes to the TO nodes.
590 Note that at some point in the future, we may want to do
591 union-by-rank, in which case we are going to have to return the
592 node we unified to. */
595 unite (unsigned int to
, unsigned int from
)
597 gcc_assert (to
< graph
->size
&& from
< graph
->size
);
598 if (to
!= from
&& graph
->rep
[from
] != to
)
600 graph
->rep
[from
] = to
;
606 /* Create a new constraint consisting of LHS and RHS expressions. */
609 new_constraint (const struct constraint_expr lhs
,
610 const struct constraint_expr rhs
)
612 constraint_t ret
= (constraint_t
) pool_alloc (constraint_pool
);
618 /* Print out constraint C to FILE. */
621 dump_constraint (FILE *file
, constraint_t c
)
623 if (c
->lhs
.type
== ADDRESSOF
)
625 else if (c
->lhs
.type
== DEREF
)
627 fprintf (file
, "%s", get_varinfo (c
->lhs
.var
)->name
);
628 if (c
->lhs
.offset
== UNKNOWN_OFFSET
)
629 fprintf (file
, " + UNKNOWN");
630 else if (c
->lhs
.offset
!= 0)
631 fprintf (file
, " + " HOST_WIDE_INT_PRINT_DEC
, c
->lhs
.offset
);
632 fprintf (file
, " = ");
633 if (c
->rhs
.type
== ADDRESSOF
)
635 else if (c
->rhs
.type
== DEREF
)
637 fprintf (file
, "%s", get_varinfo (c
->rhs
.var
)->name
);
638 if (c
->rhs
.offset
== UNKNOWN_OFFSET
)
639 fprintf (file
, " + UNKNOWN");
640 else if (c
->rhs
.offset
!= 0)
641 fprintf (file
, " + " HOST_WIDE_INT_PRINT_DEC
, c
->rhs
.offset
);
645 void debug_constraint (constraint_t
);
646 void debug_constraints (void);
647 void debug_constraint_graph (void);
648 void debug_solution_for_var (unsigned int);
649 void debug_sa_points_to_info (void);
651 /* Print out constraint C to stderr. */
654 debug_constraint (constraint_t c
)
656 dump_constraint (stderr
, c
);
657 fprintf (stderr
, "\n");
660 /* Print out all constraints to FILE */
663 dump_constraints (FILE *file
, int from
)
667 for (i
= from
; constraints
.iterate (i
, &c
); i
++)
670 dump_constraint (file
, c
);
671 fprintf (file
, "\n");
675 /* Print out all constraints to stderr. */
678 debug_constraints (void)
680 dump_constraints (stderr
, 0);
683 /* Print the constraint graph in dot format. */
686 dump_constraint_graph (FILE *file
)
690 /* Only print the graph if it has already been initialized: */
694 /* Prints the header of the dot file: */
695 fprintf (file
, "strict digraph {\n");
696 fprintf (file
, " node [\n shape = box\n ]\n");
697 fprintf (file
, " edge [\n fontsize = \"12\"\n ]\n");
698 fprintf (file
, "\n // List of nodes and complex constraints in "
699 "the constraint graph:\n");
701 /* The next lines print the nodes in the graph together with the
702 complex constraints attached to them. */
703 for (i
= 0; i
< graph
->size
; i
++)
707 if (i
< FIRST_REF_NODE
)
708 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
710 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
711 if (graph
->complex[i
].exists ())
715 fprintf (file
, " [label=\"\\N\\n");
716 for (j
= 0; graph
->complex[i
].iterate (j
, &c
); ++j
)
718 dump_constraint (file
, c
);
719 fprintf (file
, "\\l");
721 fprintf (file
, "\"]");
723 fprintf (file
, ";\n");
726 /* Go over the edges. */
727 fprintf (file
, "\n // Edges in the constraint graph:\n");
728 for (i
= 0; i
< graph
->size
; i
++)
734 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[i
], 0, j
, bi
)
736 unsigned to
= find (j
);
739 if (i
< FIRST_REF_NODE
)
740 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
742 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
743 fprintf (file
, " -> ");
744 if (to
< FIRST_REF_NODE
)
745 fprintf (file
, "\"%s\"", get_varinfo (to
)->name
);
747 fprintf (file
, "\"*%s\"", get_varinfo (to
- FIRST_REF_NODE
)->name
);
748 fprintf (file
, ";\n");
752 /* Prints the tail of the dot file. */
753 fprintf (file
, "}\n");
756 /* Print out the constraint graph to stderr. */
759 debug_constraint_graph (void)
761 dump_constraint_graph (stderr
);
766 The solver is a simple worklist solver, that works on the following
769 sbitmap changed_nodes = all zeroes;
771 For each node that is not already collapsed:
773 set bit in changed nodes
775 while (changed_count > 0)
777 compute topological ordering for constraint graph
779 find and collapse cycles in the constraint graph (updating
780 changed if necessary)
782 for each node (n) in the graph in topological order:
785 Process each complex constraint associated with the node,
786 updating changed if necessary.
788 For each outgoing edge from n, propagate the solution from n to
789 the destination of the edge, updating changed as necessary.
793 /* Return true if two constraint expressions A and B are equal. */
796 constraint_expr_equal (struct constraint_expr a
, struct constraint_expr b
)
798 return a
.type
== b
.type
&& a
.var
== b
.var
&& a
.offset
== b
.offset
;
801 /* Return true if constraint expression A is less than constraint expression
802 B. This is just arbitrary, but consistent, in order to give them an
806 constraint_expr_less (struct constraint_expr a
, struct constraint_expr b
)
808 if (a
.type
== b
.type
)
811 return a
.offset
< b
.offset
;
813 return a
.var
< b
.var
;
816 return a
.type
< b
.type
;
819 /* Return true if constraint A is less than constraint B. This is just
820 arbitrary, but consistent, in order to give them an ordering. */
823 constraint_less (const constraint_t
&a
, const constraint_t
&b
)
825 if (constraint_expr_less (a
->lhs
, b
->lhs
))
827 else if (constraint_expr_less (b
->lhs
, a
->lhs
))
830 return constraint_expr_less (a
->rhs
, b
->rhs
);
833 /* Return true if two constraints A and B are equal. */
836 constraint_equal (struct constraint a
, struct constraint b
)
838 return constraint_expr_equal (a
.lhs
, b
.lhs
)
839 && constraint_expr_equal (a
.rhs
, b
.rhs
);
843 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
846 constraint_vec_find (vec
<constraint_t
> vec
,
847 struct constraint lookfor
)
855 place
= vec
.lower_bound (&lookfor
, constraint_less
);
856 if (place
>= vec
.length ())
859 if (!constraint_equal (*found
, lookfor
))
864 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
867 constraint_set_union (vec
<constraint_t
> *to
,
868 vec
<constraint_t
> *from
)
873 FOR_EACH_VEC_ELT (*from
, i
, c
)
875 if (constraint_vec_find (*to
, *c
) == NULL
)
877 unsigned int place
= to
->lower_bound (c
, constraint_less
);
878 to
->safe_insert (place
, c
);
883 /* Expands the solution in SET to all sub-fields of variables included.
884 Union the expanded result into RESULT. */
887 solution_set_expand (bitmap result
, bitmap set
)
893 /* In a first pass record all variables we need to add all
894 sub-fields off. This avoids quadratic behavior. */
895 EXECUTE_IF_SET_IN_BITMAP (set
, 0, j
, bi
)
897 varinfo_t v
= get_varinfo (j
);
898 if (v
->is_artificial_var
901 v
= lookup_vi_for_tree (v
->decl
);
903 vars
= BITMAP_ALLOC (NULL
);
904 bitmap_set_bit (vars
, v
->id
);
907 /* In the second pass now do the addition to the solution and
908 to speed up solving add it to the delta as well. */
911 EXECUTE_IF_SET_IN_BITMAP (vars
, 0, j
, bi
)
913 varinfo_t v
= get_varinfo (j
);
914 for (; v
!= NULL
; v
= v
->next
)
915 bitmap_set_bit (result
, v
->id
);
921 /* Take a solution set SET, add OFFSET to each member of the set, and
922 overwrite SET with the result when done. */
925 solution_set_add (bitmap set
, HOST_WIDE_INT offset
)
927 bitmap result
= BITMAP_ALLOC (&iteration_obstack
);
931 /* If the offset is unknown we have to expand the solution to
933 if (offset
== UNKNOWN_OFFSET
)
935 solution_set_expand (set
, set
);
939 EXECUTE_IF_SET_IN_BITMAP (set
, 0, i
, bi
)
941 varinfo_t vi
= get_varinfo (i
);
943 /* If this is a variable with just one field just set its bit
945 if (vi
->is_artificial_var
946 || vi
->is_unknown_size_var
948 bitmap_set_bit (result
, i
);
951 unsigned HOST_WIDE_INT fieldoffset
= vi
->offset
+ offset
;
953 /* If the offset makes the pointer point to before the
954 variable use offset zero for the field lookup. */
956 && fieldoffset
> vi
->offset
)
960 vi
= first_or_preceding_vi_for_offset (vi
, fieldoffset
);
962 bitmap_set_bit (result
, vi
->id
);
963 /* If the result is not exactly at fieldoffset include the next
964 field as well. See get_constraint_for_ptr_offset for more
966 if (vi
->offset
!= fieldoffset
968 bitmap_set_bit (result
, vi
->next
->id
);
972 bitmap_copy (set
, result
);
973 BITMAP_FREE (result
);
976 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
980 set_union_with_increment (bitmap to
, bitmap from
, HOST_WIDE_INT inc
)
983 return bitmap_ior_into (to
, from
);
989 tmp
= BITMAP_ALLOC (&iteration_obstack
);
990 bitmap_copy (tmp
, from
);
991 solution_set_add (tmp
, inc
);
992 res
= bitmap_ior_into (to
, tmp
);
998 /* Insert constraint C into the list of complex constraints for graph
1002 insert_into_complex (constraint_graph_t graph
,
1003 unsigned int var
, constraint_t c
)
1005 vec
<constraint_t
> complex = graph
->complex[var
];
1006 unsigned int place
= complex.lower_bound (c
, constraint_less
);
1008 /* Only insert constraints that do not already exist. */
1009 if (place
>= complex.length ()
1010 || !constraint_equal (*c
, *complex[place
]))
1011 graph
->complex[var
].safe_insert (place
, c
);
1015 /* Condense two variable nodes into a single variable node, by moving
1016 all associated info from SRC to TO. */
1019 merge_node_constraints (constraint_graph_t graph
, unsigned int to
,
1025 gcc_assert (find (from
) == to
);
1027 /* Move all complex constraints from src node into to node */
1028 FOR_EACH_VEC_ELT (graph
->complex[from
], i
, c
)
1030 /* In complex constraints for node src, we may have either
1031 a = *src, and *src = a, or an offseted constraint which are
1032 always added to the rhs node's constraints. */
1034 if (c
->rhs
.type
== DEREF
)
1036 else if (c
->lhs
.type
== DEREF
)
1041 constraint_set_union (&graph
->complex[to
], &graph
->complex[from
]);
1042 graph
->complex[from
].release ();
1046 /* Remove edges involving NODE from GRAPH. */
1049 clear_edges_for_node (constraint_graph_t graph
, unsigned int node
)
1051 if (graph
->succs
[node
])
1052 BITMAP_FREE (graph
->succs
[node
]);
1055 /* Merge GRAPH nodes FROM and TO into node TO. */
1058 merge_graph_nodes (constraint_graph_t graph
, unsigned int to
,
1061 if (graph
->indirect_cycles
[from
] != -1)
1063 /* If we have indirect cycles with the from node, and we have
1064 none on the to node, the to node has indirect cycles from the
1065 from node now that they are unified.
1066 If indirect cycles exist on both, unify the nodes that they
1067 are in a cycle with, since we know they are in a cycle with
1069 if (graph
->indirect_cycles
[to
] == -1)
1070 graph
->indirect_cycles
[to
] = graph
->indirect_cycles
[from
];
1073 /* Merge all the successor edges. */
1074 if (graph
->succs
[from
])
1076 if (!graph
->succs
[to
])
1077 graph
->succs
[to
] = BITMAP_ALLOC (&pta_obstack
);
1078 bitmap_ior_into (graph
->succs
[to
],
1079 graph
->succs
[from
]);
1082 clear_edges_for_node (graph
, from
);
1086 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1087 it doesn't exist in the graph already. */
1090 add_implicit_graph_edge (constraint_graph_t graph
, unsigned int to
,
1096 if (!graph
->implicit_preds
[to
])
1097 graph
->implicit_preds
[to
] = BITMAP_ALLOC (&predbitmap_obstack
);
1099 if (bitmap_set_bit (graph
->implicit_preds
[to
], from
))
1100 stats
.num_implicit_edges
++;
1103 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1104 it doesn't exist in the graph already.
1105 Return false if the edge already existed, true otherwise. */
1108 add_pred_graph_edge (constraint_graph_t graph
, unsigned int to
,
1111 if (!graph
->preds
[to
])
1112 graph
->preds
[to
] = BITMAP_ALLOC (&predbitmap_obstack
);
1113 bitmap_set_bit (graph
->preds
[to
], from
);
1116 /* Add a graph edge to GRAPH, going from FROM to TO if
1117 it doesn't exist in the graph already.
1118 Return false if the edge already existed, true otherwise. */
1121 add_graph_edge (constraint_graph_t graph
, unsigned int to
,
1132 if (!graph
->succs
[from
])
1133 graph
->succs
[from
] = BITMAP_ALLOC (&pta_obstack
);
1134 if (bitmap_set_bit (graph
->succs
[from
], to
))
1137 if (to
< FIRST_REF_NODE
&& from
< FIRST_REF_NODE
)
1145 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1148 valid_graph_edge (constraint_graph_t graph
, unsigned int src
,
1151 return (graph
->succs
[dest
]
1152 && bitmap_bit_p (graph
->succs
[dest
], src
));
1155 /* Initialize the constraint graph structure to contain SIZE nodes. */
1158 init_graph (unsigned int size
)
1162 graph
= XCNEW (struct constraint_graph
);
1164 graph
->succs
= XCNEWVEC (bitmap
, graph
->size
);
1165 graph
->indirect_cycles
= XNEWVEC (int, graph
->size
);
1166 graph
->rep
= XNEWVEC (unsigned int, graph
->size
);
1167 /* ??? Macros do not support template types with multiple arguments,
1168 so we use a typedef to work around it. */
1169 typedef vec
<constraint_t
> vec_constraint_t_heap
;
1170 graph
->complex = XCNEWVEC (vec_constraint_t_heap
, size
);
1171 graph
->pe
= XCNEWVEC (unsigned int, graph
->size
);
1172 graph
->pe_rep
= XNEWVEC (int, graph
->size
);
1174 for (j
= 0; j
< graph
->size
; j
++)
1177 graph
->pe_rep
[j
] = -1;
1178 graph
->indirect_cycles
[j
] = -1;
1182 /* Build the constraint graph, adding only predecessor edges right now. */
1185 build_pred_graph (void)
1191 graph
->implicit_preds
= XCNEWVEC (bitmap
, graph
->size
);
1192 graph
->preds
= XCNEWVEC (bitmap
, graph
->size
);
1193 graph
->pointer_label
= XCNEWVEC (unsigned int, graph
->size
);
1194 graph
->loc_label
= XCNEWVEC (unsigned int, graph
->size
);
1195 graph
->pointed_by
= XCNEWVEC (bitmap
, graph
->size
);
1196 graph
->points_to
= XCNEWVEC (bitmap
, graph
->size
);
1197 graph
->eq_rep
= XNEWVEC (int, graph
->size
);
1198 graph
->direct_nodes
= sbitmap_alloc (graph
->size
);
1199 graph
->address_taken
= BITMAP_ALLOC (&predbitmap_obstack
);
1200 bitmap_clear (graph
->direct_nodes
);
1202 for (j
= 0; j
< FIRST_REF_NODE
; j
++)
1204 if (!get_varinfo (j
)->is_special_var
)
1205 bitmap_set_bit (graph
->direct_nodes
, j
);
1208 for (j
= 0; j
< graph
->size
; j
++)
1209 graph
->eq_rep
[j
] = -1;
1211 for (j
= 0; j
< varmap
.length (); j
++)
1212 graph
->indirect_cycles
[j
] = -1;
1214 FOR_EACH_VEC_ELT (constraints
, i
, c
)
1216 struct constraint_expr lhs
= c
->lhs
;
1217 struct constraint_expr rhs
= c
->rhs
;
1218 unsigned int lhsvar
= lhs
.var
;
1219 unsigned int rhsvar
= rhs
.var
;
1221 if (lhs
.type
== DEREF
)
1224 if (rhs
.offset
== 0 && lhs
.offset
== 0 && rhs
.type
== SCALAR
)
1225 add_pred_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1227 else if (rhs
.type
== DEREF
)
1230 if (rhs
.offset
== 0 && lhs
.offset
== 0 && lhs
.type
== SCALAR
)
1231 add_pred_graph_edge (graph
, lhsvar
, FIRST_REF_NODE
+ rhsvar
);
1233 bitmap_clear_bit (graph
->direct_nodes
, lhsvar
);
1235 else if (rhs
.type
== ADDRESSOF
)
1240 if (graph
->points_to
[lhsvar
] == NULL
)
1241 graph
->points_to
[lhsvar
] = BITMAP_ALLOC (&predbitmap_obstack
);
1242 bitmap_set_bit (graph
->points_to
[lhsvar
], rhsvar
);
1244 if (graph
->pointed_by
[rhsvar
] == NULL
)
1245 graph
->pointed_by
[rhsvar
] = BITMAP_ALLOC (&predbitmap_obstack
);
1246 bitmap_set_bit (graph
->pointed_by
[rhsvar
], lhsvar
);
1248 /* Implicitly, *x = y */
1249 add_implicit_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1251 /* All related variables are no longer direct nodes. */
1252 bitmap_clear_bit (graph
->direct_nodes
, rhsvar
);
1253 v
= get_varinfo (rhsvar
);
1254 if (!v
->is_full_var
)
1256 v
= lookup_vi_for_tree (v
->decl
);
1259 bitmap_clear_bit (graph
->direct_nodes
, v
->id
);
1264 bitmap_set_bit (graph
->address_taken
, rhsvar
);
1266 else if (lhsvar
> anything_id
1267 && lhsvar
!= rhsvar
&& lhs
.offset
== 0 && rhs
.offset
== 0)
1270 add_pred_graph_edge (graph
, lhsvar
, rhsvar
);
1271 /* Implicitly, *x = *y */
1272 add_implicit_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
,
1273 FIRST_REF_NODE
+ rhsvar
);
1275 else if (lhs
.offset
!= 0 || rhs
.offset
!= 0)
1277 if (rhs
.offset
!= 0)
1278 bitmap_clear_bit (graph
->direct_nodes
, lhs
.var
);
1279 else if (lhs
.offset
!= 0)
1280 bitmap_clear_bit (graph
->direct_nodes
, rhs
.var
);
1285 /* Build the constraint graph, adding successor edges. */
1288 build_succ_graph (void)
1293 FOR_EACH_VEC_ELT (constraints
, i
, c
)
1295 struct constraint_expr lhs
;
1296 struct constraint_expr rhs
;
1297 unsigned int lhsvar
;
1298 unsigned int rhsvar
;
1305 lhsvar
= find (lhs
.var
);
1306 rhsvar
= find (rhs
.var
);
1308 if (lhs
.type
== DEREF
)
1310 if (rhs
.offset
== 0 && lhs
.offset
== 0 && rhs
.type
== SCALAR
)
1311 add_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1313 else if (rhs
.type
== DEREF
)
1315 if (rhs
.offset
== 0 && lhs
.offset
== 0 && lhs
.type
== SCALAR
)
1316 add_graph_edge (graph
, lhsvar
, FIRST_REF_NODE
+ rhsvar
);
1318 else if (rhs
.type
== ADDRESSOF
)
1321 gcc_assert (find (rhs
.var
) == rhs
.var
);
1322 bitmap_set_bit (get_varinfo (lhsvar
)->solution
, rhsvar
);
1324 else if (lhsvar
> anything_id
1325 && lhsvar
!= rhsvar
&& lhs
.offset
== 0 && rhs
.offset
== 0)
1327 add_graph_edge (graph
, lhsvar
, rhsvar
);
1331 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1332 receive pointers. */
1333 t
= find (storedanything_id
);
1334 for (i
= integer_id
+ 1; i
< FIRST_REF_NODE
; ++i
)
1336 if (!bitmap_bit_p (graph
->direct_nodes
, i
)
1337 && get_varinfo (i
)->may_have_pointers
)
1338 add_graph_edge (graph
, find (i
), t
);
1341 /* Everything stored to ANYTHING also potentially escapes. */
1342 add_graph_edge (graph
, find (escaped_id
), t
);
1346 /* Changed variables on the last iteration. */
1347 static bitmap changed
;
1349 /* Strongly Connected Component visitation info. */
1356 unsigned int *node_mapping
;
1358 vec
<unsigned> scc_stack
;
1362 /* Recursive routine to find strongly connected components in GRAPH.
1363 SI is the SCC info to store the information in, and N is the id of current
1364 graph node we are processing.
1366 This is Tarjan's strongly connected component finding algorithm, as
1367 modified by Nuutila to keep only non-root nodes on the stack.
1368 The algorithm can be found in "On finding the strongly connected
1369 connected components in a directed graph" by Esko Nuutila and Eljas
1370 Soisalon-Soininen, in Information Processing Letters volume 49,
1371 number 1, pages 9-14. */
1374 scc_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
1378 unsigned int my_dfs
;
1380 bitmap_set_bit (si
->visited
, n
);
1381 si
->dfs
[n
] = si
->current_index
++;
1382 my_dfs
= si
->dfs
[n
];
1384 /* Visit all the successors. */
1385 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[n
], 0, i
, bi
)
1389 if (i
> LAST_REF_NODE
)
1393 if (bitmap_bit_p (si
->deleted
, w
))
1396 if (!bitmap_bit_p (si
->visited
, w
))
1397 scc_visit (graph
, si
, w
);
1399 unsigned int t
= find (w
);
1400 unsigned int nnode
= find (n
);
1401 gcc_assert (nnode
== n
);
1403 if (si
->dfs
[t
] < si
->dfs
[nnode
])
1404 si
->dfs
[n
] = si
->dfs
[t
];
1408 /* See if any components have been identified. */
1409 if (si
->dfs
[n
] == my_dfs
)
1411 if (si
->scc_stack
.length () > 0
1412 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
1414 bitmap scc
= BITMAP_ALLOC (NULL
);
1415 unsigned int lowest_node
;
1418 bitmap_set_bit (scc
, n
);
1420 while (si
->scc_stack
.length () != 0
1421 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
1423 unsigned int w
= si
->scc_stack
.pop ();
1425 bitmap_set_bit (scc
, w
);
1428 lowest_node
= bitmap_first_set_bit (scc
);
1429 gcc_assert (lowest_node
< FIRST_REF_NODE
);
1431 /* Collapse the SCC nodes into a single node, and mark the
1433 EXECUTE_IF_SET_IN_BITMAP (scc
, 0, i
, bi
)
1435 if (i
< FIRST_REF_NODE
)
1437 if (unite (lowest_node
, i
))
1438 unify_nodes (graph
, lowest_node
, i
, false);
1442 unite (lowest_node
, i
);
1443 graph
->indirect_cycles
[i
- FIRST_REF_NODE
] = lowest_node
;
1447 bitmap_set_bit (si
->deleted
, n
);
1450 si
->scc_stack
.safe_push (n
);
1453 /* Unify node FROM into node TO, updating the changed count if
1454 necessary when UPDATE_CHANGED is true. */
1457 unify_nodes (constraint_graph_t graph
, unsigned int to
, unsigned int from
,
1458 bool update_changed
)
1461 gcc_assert (to
!= from
&& find (to
) == to
);
1462 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1463 fprintf (dump_file
, "Unifying %s to %s\n",
1464 get_varinfo (from
)->name
,
1465 get_varinfo (to
)->name
);
1468 stats
.unified_vars_dynamic
++;
1470 stats
.unified_vars_static
++;
1472 merge_graph_nodes (graph
, to
, from
);
1473 merge_node_constraints (graph
, to
, from
);
1475 /* Mark TO as changed if FROM was changed. If TO was already marked
1476 as changed, decrease the changed count. */
1479 && bitmap_bit_p (changed
, from
))
1481 bitmap_clear_bit (changed
, from
);
1482 bitmap_set_bit (changed
, to
);
1484 if (get_varinfo (from
)->solution
)
1486 /* If the solution changes because of the merging, we need to mark
1487 the variable as changed. */
1488 if (bitmap_ior_into (get_varinfo (to
)->solution
,
1489 get_varinfo (from
)->solution
))
1492 bitmap_set_bit (changed
, to
);
1495 BITMAP_FREE (get_varinfo (from
)->solution
);
1496 if (get_varinfo (from
)->oldsolution
)
1497 BITMAP_FREE (get_varinfo (from
)->oldsolution
);
1499 if (stats
.iterations
> 0
1500 && get_varinfo (to
)->oldsolution
)
1501 BITMAP_FREE (get_varinfo (to
)->oldsolution
);
1503 if (valid_graph_edge (graph
, to
, to
))
1505 if (graph
->succs
[to
])
1506 bitmap_clear_bit (graph
->succs
[to
], to
);
1510 /* Information needed to compute the topological ordering of a graph. */
1514 /* sbitmap of visited nodes. */
1516 /* Array that stores the topological order of the graph, *in
1518 vec
<unsigned> topo_order
;
1522 /* Initialize and return a topological info structure. */
1524 static struct topo_info
*
1525 init_topo_info (void)
1527 size_t size
= graph
->size
;
1528 struct topo_info
*ti
= XNEW (struct topo_info
);
1529 ti
->visited
= sbitmap_alloc (size
);
1530 bitmap_clear (ti
->visited
);
1531 ti
->topo_order
.create (1);
1536 /* Free the topological sort info pointed to by TI. */
1539 free_topo_info (struct topo_info
*ti
)
1541 sbitmap_free (ti
->visited
);
1542 ti
->topo_order
.release ();
1546 /* Visit the graph in topological order, and store the order in the
1547 topo_info structure. */
1550 topo_visit (constraint_graph_t graph
, struct topo_info
*ti
,
1556 bitmap_set_bit (ti
->visited
, n
);
1558 if (graph
->succs
[n
])
1559 EXECUTE_IF_SET_IN_BITMAP (graph
->succs
[n
], 0, j
, bi
)
1561 if (!bitmap_bit_p (ti
->visited
, j
))
1562 topo_visit (graph
, ti
, j
);
1565 ti
->topo_order
.safe_push (n
);
1568 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1569 starting solution for y. */
1572 do_sd_constraint (constraint_graph_t graph
, constraint_t c
,
1575 unsigned int lhs
= c
->lhs
.var
;
1577 bitmap sol
= get_varinfo (lhs
)->solution
;
1580 HOST_WIDE_INT roffset
= c
->rhs
.offset
;
1582 /* Our IL does not allow this. */
1583 gcc_assert (c
->lhs
.offset
== 0);
1585 /* If the solution of Y contains anything it is good enough to transfer
1587 if (bitmap_bit_p (delta
, anything_id
))
1589 flag
|= bitmap_set_bit (sol
, anything_id
);
1593 /* If we do not know at with offset the rhs is dereferenced compute
1594 the reachability set of DELTA, conservatively assuming it is
1595 dereferenced at all valid offsets. */
1596 if (roffset
== UNKNOWN_OFFSET
)
1598 solution_set_expand (delta
, delta
);
1599 /* No further offset processing is necessary. */
1603 /* For each variable j in delta (Sol(y)), add
1604 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1605 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1607 varinfo_t v
= get_varinfo (j
);
1608 HOST_WIDE_INT fieldoffset
= v
->offset
+ roffset
;
1612 fieldoffset
= v
->offset
;
1613 else if (roffset
!= 0)
1614 v
= first_vi_for_offset (v
, fieldoffset
);
1615 /* If the access is outside of the variable we can ignore it. */
1623 /* Adding edges from the special vars is pointless.
1624 They don't have sets that can change. */
1625 if (get_varinfo (t
)->is_special_var
)
1626 flag
|= bitmap_ior_into (sol
, get_varinfo (t
)->solution
);
1627 /* Merging the solution from ESCAPED needlessly increases
1628 the set. Use ESCAPED as representative instead. */
1629 else if (v
->id
== escaped_id
)
1630 flag
|= bitmap_set_bit (sol
, escaped_id
);
1631 else if (v
->may_have_pointers
1632 && add_graph_edge (graph
, lhs
, t
))
1633 flag
|= bitmap_ior_into (sol
, get_varinfo (t
)->solution
);
1635 /* If the variable is not exactly at the requested offset
1636 we have to include the next one. */
1637 if (v
->offset
== (unsigned HOST_WIDE_INT
)fieldoffset
1642 fieldoffset
= v
->offset
;
1648 /* If the LHS solution changed, mark the var as changed. */
1651 get_varinfo (lhs
)->solution
= sol
;
1652 bitmap_set_bit (changed
, lhs
);
1656 /* Process a constraint C that represents *(x + off) = y using DELTA
1657 as the starting solution for x. */
1660 do_ds_constraint (constraint_t c
, bitmap delta
)
1662 unsigned int rhs
= c
->rhs
.var
;
1663 bitmap sol
= get_varinfo (rhs
)->solution
;
1666 HOST_WIDE_INT loff
= c
->lhs
.offset
;
1667 bool escaped_p
= false;
1669 /* Our IL does not allow this. */
1670 gcc_assert (c
->rhs
.offset
== 0);
1672 /* If the solution of y contains ANYTHING simply use the ANYTHING
1673 solution. This avoids needlessly increasing the points-to sets. */
1674 if (bitmap_bit_p (sol
, anything_id
))
1675 sol
= get_varinfo (find (anything_id
))->solution
;
1677 /* If the solution for x contains ANYTHING we have to merge the
1678 solution of y into all pointer variables which we do via
1680 if (bitmap_bit_p (delta
, anything_id
))
1682 unsigned t
= find (storedanything_id
);
1683 if (add_graph_edge (graph
, t
, rhs
))
1685 if (bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1686 bitmap_set_bit (changed
, t
);
1691 /* If we do not know at with offset the rhs is dereferenced compute
1692 the reachability set of DELTA, conservatively assuming it is
1693 dereferenced at all valid offsets. */
1694 if (loff
== UNKNOWN_OFFSET
)
1696 solution_set_expand (delta
, delta
);
1700 /* For each member j of delta (Sol(x)), add an edge from y to j and
1701 union Sol(y) into Sol(j) */
1702 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1704 varinfo_t v
= get_varinfo (j
);
1706 HOST_WIDE_INT fieldoffset
= v
->offset
+ loff
;
1709 fieldoffset
= v
->offset
;
1711 v
= first_vi_for_offset (v
, fieldoffset
);
1712 /* If the access is outside of the variable we can ignore it. */
1718 if (v
->may_have_pointers
)
1720 /* If v is a global variable then this is an escape point. */
1721 if (v
->is_global_var
1724 t
= find (escaped_id
);
1725 if (add_graph_edge (graph
, t
, rhs
)
1726 && bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1727 bitmap_set_bit (changed
, t
);
1728 /* Enough to let rhs escape once. */
1732 if (v
->is_special_var
)
1736 if (add_graph_edge (graph
, t
, rhs
)
1737 && bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1738 bitmap_set_bit (changed
, t
);
1741 /* If the variable is not exactly at the requested offset
1742 we have to include the next one. */
1743 if (v
->offset
== (unsigned HOST_WIDE_INT
)fieldoffset
1748 fieldoffset
= v
->offset
;
1754 /* Handle a non-simple (simple meaning requires no iteration),
1755 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1758 do_complex_constraint (constraint_graph_t graph
, constraint_t c
, bitmap delta
)
1760 if (c
->lhs
.type
== DEREF
)
1762 if (c
->rhs
.type
== ADDRESSOF
)
1769 do_ds_constraint (c
, delta
);
1772 else if (c
->rhs
.type
== DEREF
)
1775 if (!(get_varinfo (c
->lhs
.var
)->is_special_var
))
1776 do_sd_constraint (graph
, c
, delta
);
1784 gcc_assert (c
->rhs
.type
== SCALAR
&& c
->lhs
.type
== SCALAR
);
1785 solution
= get_varinfo (c
->rhs
.var
)->solution
;
1786 tmp
= get_varinfo (c
->lhs
.var
)->solution
;
1788 flag
= set_union_with_increment (tmp
, solution
, c
->rhs
.offset
);
1792 get_varinfo (c
->lhs
.var
)->solution
= tmp
;
1793 bitmap_set_bit (changed
, c
->lhs
.var
);
1798 /* Initialize and return a new SCC info structure. */
1800 static struct scc_info
*
1801 init_scc_info (size_t size
)
1803 struct scc_info
*si
= XNEW (struct scc_info
);
1806 si
->current_index
= 0;
1807 si
->visited
= sbitmap_alloc (size
);
1808 bitmap_clear (si
->visited
);
1809 si
->deleted
= sbitmap_alloc (size
);
1810 bitmap_clear (si
->deleted
);
1811 si
->node_mapping
= XNEWVEC (unsigned int, size
);
1812 si
->dfs
= XCNEWVEC (unsigned int, size
);
1814 for (i
= 0; i
< size
; i
++)
1815 si
->node_mapping
[i
] = i
;
1817 si
->scc_stack
.create (1);
1821 /* Free an SCC info structure pointed to by SI */
1824 free_scc_info (struct scc_info
*si
)
1826 sbitmap_free (si
->visited
);
1827 sbitmap_free (si
->deleted
);
1828 free (si
->node_mapping
);
1830 si
->scc_stack
.release ();
1835 /* Find indirect cycles in GRAPH that occur, using strongly connected
1836 components, and note them in the indirect cycles map.
1838 This technique comes from Ben Hardekopf and Calvin Lin,
1839 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1840 Lines of Code", submitted to PLDI 2007. */
1843 find_indirect_cycles (constraint_graph_t graph
)
1846 unsigned int size
= graph
->size
;
1847 struct scc_info
*si
= init_scc_info (size
);
1849 for (i
= 0; i
< MIN (LAST_REF_NODE
, size
); i
++ )
1850 if (!bitmap_bit_p (si
->visited
, i
) && find (i
) == i
)
1851 scc_visit (graph
, si
, i
);
1856 /* Compute a topological ordering for GRAPH, and store the result in the
1857 topo_info structure TI. */
1860 compute_topo_order (constraint_graph_t graph
,
1861 struct topo_info
*ti
)
1864 unsigned int size
= graph
->size
;
1866 for (i
= 0; i
!= size
; ++i
)
1867 if (!bitmap_bit_p (ti
->visited
, i
) && find (i
) == i
)
1868 topo_visit (graph
, ti
, i
);
1871 /* Structure used to for hash value numbering of pointer equivalence
1874 typedef struct equiv_class_label
1877 unsigned int equivalence_class
;
1879 } *equiv_class_label_t
;
1880 typedef const struct equiv_class_label
*const_equiv_class_label_t
;
1882 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1884 static htab_t pointer_equiv_class_table
;
1886 /* A hashtable for mapping a bitmap of labels->location equivalence
1888 static htab_t location_equiv_class_table
;
1890 /* Hash function for a equiv_class_label_t */
1893 equiv_class_label_hash (const void *p
)
1895 const_equiv_class_label_t
const ecl
= (const_equiv_class_label_t
) p
;
1896 return ecl
->hashcode
;
1899 /* Equality function for two equiv_class_label_t's. */
1902 equiv_class_label_eq (const void *p1
, const void *p2
)
1904 const_equiv_class_label_t
const eql1
= (const_equiv_class_label_t
) p1
;
1905 const_equiv_class_label_t
const eql2
= (const_equiv_class_label_t
) p2
;
1906 return (eql1
->hashcode
== eql2
->hashcode
1907 && bitmap_equal_p (eql1
->labels
, eql2
->labels
));
1910 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1914 equiv_class_lookup (htab_t table
, bitmap labels
)
1917 struct equiv_class_label ecl
;
1919 ecl
.labels
= labels
;
1920 ecl
.hashcode
= bitmap_hash (labels
);
1922 slot
= htab_find_slot_with_hash (table
, &ecl
,
1923 ecl
.hashcode
, NO_INSERT
);
1927 return ((equiv_class_label_t
) *slot
)->equivalence_class
;
1931 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1935 equiv_class_add (htab_t table
, unsigned int equivalence_class
,
1939 equiv_class_label_t ecl
= XNEW (struct equiv_class_label
);
1941 ecl
->labels
= labels
;
1942 ecl
->equivalence_class
= equivalence_class
;
1943 ecl
->hashcode
= bitmap_hash (labels
);
1945 slot
= htab_find_slot_with_hash (table
, ecl
,
1946 ecl
->hashcode
, INSERT
);
1947 gcc_assert (!*slot
);
1948 *slot
= (void *) ecl
;
1951 /* Perform offline variable substitution.
1953 This is a worst case quadratic time way of identifying variables
1954 that must have equivalent points-to sets, including those caused by
1955 static cycles, and single entry subgraphs, in the constraint graph.
1957 The technique is described in "Exploiting Pointer and Location
1958 Equivalence to Optimize Pointer Analysis. In the 14th International
1959 Static Analysis Symposium (SAS), August 2007." It is known as the
1960 "HU" algorithm, and is equivalent to value numbering the collapsed
1961 constraint graph including evaluating unions.
1963 The general method of finding equivalence classes is as follows:
1964 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1965 Initialize all non-REF nodes to be direct nodes.
1966 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1968 For each constraint containing the dereference, we also do the same
1971 We then compute SCC's in the graph and unify nodes in the same SCC,
1974 For each non-collapsed node x:
1975 Visit all unvisited explicit incoming edges.
1976 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1978 Lookup the equivalence class for pts(x).
1979 If we found one, equivalence_class(x) = found class.
1980 Otherwise, equivalence_class(x) = new class, and new_class is
1981 added to the lookup table.
1983 All direct nodes with the same equivalence class can be replaced
1984 with a single representative node.
1985 All unlabeled nodes (label == 0) are not pointers and all edges
1986 involving them can be eliminated.
1987 We perform these optimizations during rewrite_constraints
1989 In addition to pointer equivalence class finding, we also perform
1990 location equivalence class finding. This is the set of variables
1991 that always appear together in points-to sets. We use this to
1992 compress the size of the points-to sets. */
1994 /* Current maximum pointer equivalence class id. */
1995 static int pointer_equiv_class
;
1997 /* Current maximum location equivalence class id. */
1998 static int location_equiv_class
;
2000 /* Recursive routine to find strongly connected components in GRAPH,
2001 and label it's nodes with DFS numbers. */
2004 condense_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
2008 unsigned int my_dfs
;
2010 gcc_assert (si
->node_mapping
[n
] == n
);
2011 bitmap_set_bit (si
->visited
, n
);
2012 si
->dfs
[n
] = si
->current_index
++;
2013 my_dfs
= si
->dfs
[n
];
2015 /* Visit all the successors. */
2016 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
2018 unsigned int w
= si
->node_mapping
[i
];
2020 if (bitmap_bit_p (si
->deleted
, w
))
2023 if (!bitmap_bit_p (si
->visited
, w
))
2024 condense_visit (graph
, si
, w
);
2026 unsigned int t
= si
->node_mapping
[w
];
2027 unsigned int nnode
= si
->node_mapping
[n
];
2028 gcc_assert (nnode
== n
);
2030 if (si
->dfs
[t
] < si
->dfs
[nnode
])
2031 si
->dfs
[n
] = si
->dfs
[t
];
2035 /* Visit all the implicit predecessors. */
2036 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->implicit_preds
[n
], 0, i
, bi
)
2038 unsigned int w
= si
->node_mapping
[i
];
2040 if (bitmap_bit_p (si
->deleted
, w
))
2043 if (!bitmap_bit_p (si
->visited
, w
))
2044 condense_visit (graph
, si
, w
);
2046 unsigned int t
= si
->node_mapping
[w
];
2047 unsigned int nnode
= si
->node_mapping
[n
];
2048 gcc_assert (nnode
== n
);
2050 if (si
->dfs
[t
] < si
->dfs
[nnode
])
2051 si
->dfs
[n
] = si
->dfs
[t
];
2055 /* See if any components have been identified. */
2056 if (si
->dfs
[n
] == my_dfs
)
2058 while (si
->scc_stack
.length () != 0
2059 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
2061 unsigned int w
= si
->scc_stack
.pop ();
2062 si
->node_mapping
[w
] = n
;
2064 if (!bitmap_bit_p (graph
->direct_nodes
, w
))
2065 bitmap_clear_bit (graph
->direct_nodes
, n
);
2067 /* Unify our nodes. */
2068 if (graph
->preds
[w
])
2070 if (!graph
->preds
[n
])
2071 graph
->preds
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2072 bitmap_ior_into (graph
->preds
[n
], graph
->preds
[w
]);
2074 if (graph
->implicit_preds
[w
])
2076 if (!graph
->implicit_preds
[n
])
2077 graph
->implicit_preds
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2078 bitmap_ior_into (graph
->implicit_preds
[n
],
2079 graph
->implicit_preds
[w
]);
2081 if (graph
->points_to
[w
])
2083 if (!graph
->points_to
[n
])
2084 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2085 bitmap_ior_into (graph
->points_to
[n
],
2086 graph
->points_to
[w
]);
2089 bitmap_set_bit (si
->deleted
, n
);
2092 si
->scc_stack
.safe_push (n
);
2095 /* Label pointer equivalences. */
2098 label_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
2102 bitmap_set_bit (si
->visited
, n
);
2104 if (!graph
->points_to
[n
])
2105 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2107 /* Label and union our incoming edges's points to sets. */
2108 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
2110 unsigned int w
= si
->node_mapping
[i
];
2111 if (!bitmap_bit_p (si
->visited
, w
))
2112 label_visit (graph
, si
, w
);
2114 /* Skip unused edges */
2115 if (w
== n
|| graph
->pointer_label
[w
] == 0)
2118 if (graph
->points_to
[w
])
2119 bitmap_ior_into(graph
->points_to
[n
], graph
->points_to
[w
]);
2121 /* Indirect nodes get fresh variables. */
2122 if (!bitmap_bit_p (graph
->direct_nodes
, n
))
2123 bitmap_set_bit (graph
->points_to
[n
], FIRST_REF_NODE
+ n
);
2125 if (!bitmap_empty_p (graph
->points_to
[n
]))
2127 unsigned int label
= equiv_class_lookup (pointer_equiv_class_table
,
2128 graph
->points_to
[n
]);
2131 label
= pointer_equiv_class
++;
2132 equiv_class_add (pointer_equiv_class_table
,
2133 label
, graph
->points_to
[n
]);
2135 graph
->pointer_label
[n
] = label
;
2139 /* Perform offline variable substitution, discovering equivalence
2140 classes, and eliminating non-pointer variables. */
2142 static struct scc_info
*
2143 perform_var_substitution (constraint_graph_t graph
)
2146 unsigned int size
= graph
->size
;
2147 struct scc_info
*si
= init_scc_info (size
);
2149 bitmap_obstack_initialize (&iteration_obstack
);
2150 pointer_equiv_class_table
= htab_create (511, equiv_class_label_hash
,
2151 equiv_class_label_eq
, free
);
2152 location_equiv_class_table
= htab_create (511, equiv_class_label_hash
,
2153 equiv_class_label_eq
, free
);
2154 pointer_equiv_class
= 1;
2155 location_equiv_class
= 1;
2157 /* Condense the nodes, which means to find SCC's, count incoming
2158 predecessors, and unite nodes in SCC's. */
2159 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
2160 if (!bitmap_bit_p (si
->visited
, si
->node_mapping
[i
]))
2161 condense_visit (graph
, si
, si
->node_mapping
[i
]);
2163 bitmap_clear (si
->visited
);
2164 /* Actually the label the nodes for pointer equivalences */
2165 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
2166 if (!bitmap_bit_p (si
->visited
, si
->node_mapping
[i
]))
2167 label_visit (graph
, si
, si
->node_mapping
[i
]);
2169 /* Calculate location equivalence labels. */
2170 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
2177 if (!graph
->pointed_by
[i
])
2179 pointed_by
= BITMAP_ALLOC (&iteration_obstack
);
2181 /* Translate the pointed-by mapping for pointer equivalence
2183 EXECUTE_IF_SET_IN_BITMAP (graph
->pointed_by
[i
], 0, j
, bi
)
2185 bitmap_set_bit (pointed_by
,
2186 graph
->pointer_label
[si
->node_mapping
[j
]]);
2188 /* The original pointed_by is now dead. */
2189 BITMAP_FREE (graph
->pointed_by
[i
]);
2191 /* Look up the location equivalence label if one exists, or make
2193 label
= equiv_class_lookup (location_equiv_class_table
,
2197 label
= location_equiv_class
++;
2198 equiv_class_add (location_equiv_class_table
,
2203 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2204 fprintf (dump_file
, "Found location equivalence for node %s\n",
2205 get_varinfo (i
)->name
);
2206 BITMAP_FREE (pointed_by
);
2208 graph
->loc_label
[i
] = label
;
2212 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2213 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
2215 bool direct_node
= bitmap_bit_p (graph
->direct_nodes
, i
);
2217 "Equivalence classes for %s node id %d:%s are pointer: %d"
2219 direct_node
? "Direct node" : "Indirect node", i
,
2220 get_varinfo (i
)->name
,
2221 graph
->pointer_label
[si
->node_mapping
[i
]],
2222 graph
->loc_label
[si
->node_mapping
[i
]]);
2225 /* Quickly eliminate our non-pointer variables. */
2227 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
2229 unsigned int node
= si
->node_mapping
[i
];
2231 if (graph
->pointer_label
[node
] == 0)
2233 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2235 "%s is a non-pointer variable, eliminating edges.\n",
2236 get_varinfo (node
)->name
);
2237 stats
.nonpointer_vars
++;
2238 clear_edges_for_node (graph
, node
);
2245 /* Free information that was only necessary for variable
2249 free_var_substitution_info (struct scc_info
*si
)
2252 free (graph
->pointer_label
);
2253 free (graph
->loc_label
);
2254 free (graph
->pointed_by
);
2255 free (graph
->points_to
);
2256 free (graph
->eq_rep
);
2257 sbitmap_free (graph
->direct_nodes
);
2258 htab_delete (pointer_equiv_class_table
);
2259 htab_delete (location_equiv_class_table
);
2260 bitmap_obstack_release (&iteration_obstack
);
2263 /* Return an existing node that is equivalent to NODE, which has
2264 equivalence class LABEL, if one exists. Return NODE otherwise. */
2267 find_equivalent_node (constraint_graph_t graph
,
2268 unsigned int node
, unsigned int label
)
2270 /* If the address version of this variable is unused, we can
2271 substitute it for anything else with the same label.
2272 Otherwise, we know the pointers are equivalent, but not the
2273 locations, and we can unite them later. */
2275 if (!bitmap_bit_p (graph
->address_taken
, node
))
2277 gcc_assert (label
< graph
->size
);
2279 if (graph
->eq_rep
[label
] != -1)
2281 /* Unify the two variables since we know they are equivalent. */
2282 if (unite (graph
->eq_rep
[label
], node
))
2283 unify_nodes (graph
, graph
->eq_rep
[label
], node
, false);
2284 return graph
->eq_rep
[label
];
2288 graph
->eq_rep
[label
] = node
;
2289 graph
->pe_rep
[label
] = node
;
2294 gcc_assert (label
< graph
->size
);
2295 graph
->pe
[node
] = label
;
2296 if (graph
->pe_rep
[label
] == -1)
2297 graph
->pe_rep
[label
] = node
;
2303 /* Unite pointer equivalent but not location equivalent nodes in
2304 GRAPH. This may only be performed once variable substitution is
2308 unite_pointer_equivalences (constraint_graph_t graph
)
2312 /* Go through the pointer equivalences and unite them to their
2313 representative, if they aren't already. */
2314 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
2316 unsigned int label
= graph
->pe
[i
];
2319 int label_rep
= graph
->pe_rep
[label
];
2321 if (label_rep
== -1)
2324 label_rep
= find (label_rep
);
2325 if (label_rep
>= 0 && unite (label_rep
, find (i
)))
2326 unify_nodes (graph
, label_rep
, i
, false);
2331 /* Move complex constraints to the GRAPH nodes they belong to. */
2334 move_complex_constraints (constraint_graph_t graph
)
2339 FOR_EACH_VEC_ELT (constraints
, i
, c
)
2343 struct constraint_expr lhs
= c
->lhs
;
2344 struct constraint_expr rhs
= c
->rhs
;
2346 if (lhs
.type
== DEREF
)
2348 insert_into_complex (graph
, lhs
.var
, c
);
2350 else if (rhs
.type
== DEREF
)
2352 if (!(get_varinfo (lhs
.var
)->is_special_var
))
2353 insert_into_complex (graph
, rhs
.var
, c
);
2355 else if (rhs
.type
!= ADDRESSOF
&& lhs
.var
> anything_id
2356 && (lhs
.offset
!= 0 || rhs
.offset
!= 0))
2358 insert_into_complex (graph
, rhs
.var
, c
);
2365 /* Optimize and rewrite complex constraints while performing
2366 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2367 result of perform_variable_substitution. */
2370 rewrite_constraints (constraint_graph_t graph
,
2371 struct scc_info
*si
)
2377 for (j
= 0; j
< graph
->size
; j
++)
2378 gcc_assert (find (j
) == j
);
2380 FOR_EACH_VEC_ELT (constraints
, i
, c
)
2382 struct constraint_expr lhs
= c
->lhs
;
2383 struct constraint_expr rhs
= c
->rhs
;
2384 unsigned int lhsvar
= find (lhs
.var
);
2385 unsigned int rhsvar
= find (rhs
.var
);
2386 unsigned int lhsnode
, rhsnode
;
2387 unsigned int lhslabel
, rhslabel
;
2389 lhsnode
= si
->node_mapping
[lhsvar
];
2390 rhsnode
= si
->node_mapping
[rhsvar
];
2391 lhslabel
= graph
->pointer_label
[lhsnode
];
2392 rhslabel
= graph
->pointer_label
[rhsnode
];
2394 /* See if it is really a non-pointer variable, and if so, ignore
2398 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2401 fprintf (dump_file
, "%s is a non-pointer variable,"
2402 "ignoring constraint:",
2403 get_varinfo (lhs
.var
)->name
);
2404 dump_constraint (dump_file
, c
);
2405 fprintf (dump_file
, "\n");
2407 constraints
[i
] = NULL
;
2413 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2416 fprintf (dump_file
, "%s is a non-pointer variable,"
2417 "ignoring constraint:",
2418 get_varinfo (rhs
.var
)->name
);
2419 dump_constraint (dump_file
, c
);
2420 fprintf (dump_file
, "\n");
2422 constraints
[i
] = NULL
;
2426 lhsvar
= find_equivalent_node (graph
, lhsvar
, lhslabel
);
2427 rhsvar
= find_equivalent_node (graph
, rhsvar
, rhslabel
);
2428 c
->lhs
.var
= lhsvar
;
2429 c
->rhs
.var
= rhsvar
;
2434 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2435 part of an SCC, false otherwise. */
2438 eliminate_indirect_cycles (unsigned int node
)
2440 if (graph
->indirect_cycles
[node
] != -1
2441 && !bitmap_empty_p (get_varinfo (node
)->solution
))
2444 vec
<unsigned> queue
= vNULL
;
2446 unsigned int to
= find (graph
->indirect_cycles
[node
]);
2449 /* We can't touch the solution set and call unify_nodes
2450 at the same time, because unify_nodes is going to do
2451 bitmap unions into it. */
2453 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node
)->solution
, 0, i
, bi
)
2455 if (find (i
) == i
&& i
!= to
)
2458 queue
.safe_push (i
);
2463 queue
.iterate (queuepos
, &i
);
2466 unify_nodes (graph
, to
, i
, true);
2474 /* Solve the constraint graph GRAPH using our worklist solver.
2475 This is based on the PW* family of solvers from the "Efficient Field
2476 Sensitive Pointer Analysis for C" paper.
2477 It works by iterating over all the graph nodes, processing the complex
2478 constraints and propagating the copy constraints, until everything stops
2479 changed. This corresponds to steps 6-8 in the solving list given above. */
2482 solve_graph (constraint_graph_t graph
)
2484 unsigned int size
= graph
->size
;
2488 changed
= BITMAP_ALLOC (NULL
);
2490 /* Mark all initial non-collapsed nodes as changed. */
2491 for (i
= 0; i
< size
; i
++)
2493 varinfo_t ivi
= get_varinfo (i
);
2494 if (find (i
) == i
&& !bitmap_empty_p (ivi
->solution
)
2495 && ((graph
->succs
[i
] && !bitmap_empty_p (graph
->succs
[i
]))
2496 || graph
->complex[i
].length () > 0))
2497 bitmap_set_bit (changed
, i
);
2500 /* Allocate a bitmap to be used to store the changed bits. */
2501 pts
= BITMAP_ALLOC (&pta_obstack
);
2503 while (!bitmap_empty_p (changed
))
2506 struct topo_info
*ti
= init_topo_info ();
2509 bitmap_obstack_initialize (&iteration_obstack
);
2511 compute_topo_order (graph
, ti
);
2513 while (ti
->topo_order
.length () != 0)
2516 i
= ti
->topo_order
.pop ();
2518 /* If this variable is not a representative, skip it. */
2522 /* In certain indirect cycle cases, we may merge this
2523 variable to another. */
2524 if (eliminate_indirect_cycles (i
) && find (i
) != i
)
2527 /* If the node has changed, we need to process the
2528 complex constraints and outgoing edges again. */
2529 if (bitmap_clear_bit (changed
, i
))
2534 vec
<constraint_t
> complex = graph
->complex[i
];
2535 varinfo_t vi
= get_varinfo (i
);
2536 bool solution_empty
;
2538 /* Compute the changed set of solution bits. */
2539 if (vi
->oldsolution
)
2540 bitmap_and_compl (pts
, vi
->solution
, vi
->oldsolution
);
2542 bitmap_copy (pts
, vi
->solution
);
2544 if (bitmap_empty_p (pts
))
2547 if (vi
->oldsolution
)
2548 bitmap_ior_into (vi
->oldsolution
, pts
);
2551 vi
->oldsolution
= BITMAP_ALLOC (&oldpta_obstack
);
2552 bitmap_copy (vi
->oldsolution
, pts
);
2555 solution
= vi
->solution
;
2556 solution_empty
= bitmap_empty_p (solution
);
2558 /* Process the complex constraints */
2559 FOR_EACH_VEC_ELT (complex, j
, c
)
2561 /* XXX: This is going to unsort the constraints in
2562 some cases, which will occasionally add duplicate
2563 constraints during unification. This does not
2564 affect correctness. */
2565 c
->lhs
.var
= find (c
->lhs
.var
);
2566 c
->rhs
.var
= find (c
->rhs
.var
);
2568 /* The only complex constraint that can change our
2569 solution to non-empty, given an empty solution,
2570 is a constraint where the lhs side is receiving
2571 some set from elsewhere. */
2572 if (!solution_empty
|| c
->lhs
.type
!= DEREF
)
2573 do_complex_constraint (graph
, c
, pts
);
2576 solution_empty
= bitmap_empty_p (solution
);
2578 if (!solution_empty
)
2581 unsigned eff_escaped_id
= find (escaped_id
);
2583 /* Propagate solution to all successors. */
2584 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[i
],
2590 unsigned int to
= find (j
);
2591 tmp
= get_varinfo (to
)->solution
;
2594 /* Don't try to propagate to ourselves. */
2598 /* If we propagate from ESCAPED use ESCAPED as
2600 if (i
== eff_escaped_id
)
2601 flag
= bitmap_set_bit (tmp
, escaped_id
);
2603 flag
= set_union_with_increment (tmp
, pts
, 0);
2607 get_varinfo (to
)->solution
= tmp
;
2608 bitmap_set_bit (changed
, to
);
2614 free_topo_info (ti
);
2615 bitmap_obstack_release (&iteration_obstack
);
2619 BITMAP_FREE (changed
);
2620 bitmap_obstack_release (&oldpta_obstack
);
2623 /* Map from trees to variable infos. */
2624 static struct pointer_map_t
*vi_for_tree
;
2627 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2630 insert_vi_for_tree (tree t
, varinfo_t vi
)
2632 void **slot
= pointer_map_insert (vi_for_tree
, t
);
2634 gcc_assert (*slot
== NULL
);
2638 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2639 exist in the map, return NULL, otherwise, return the varinfo we found. */
2642 lookup_vi_for_tree (tree t
)
2644 void **slot
= pointer_map_contains (vi_for_tree
, t
);
2648 return (varinfo_t
) *slot
;
2651 /* Return a printable name for DECL */
2654 alias_get_name (tree decl
)
2656 const char *res
= NULL
;
2658 int num_printed
= 0;
2663 if (TREE_CODE (decl
) == SSA_NAME
)
2665 res
= get_name (decl
);
2667 num_printed
= asprintf (&temp
, "%s_%u", res
, SSA_NAME_VERSION (decl
));
2669 num_printed
= asprintf (&temp
, "_%u", SSA_NAME_VERSION (decl
));
2670 if (num_printed
> 0)
2672 res
= ggc_strdup (temp
);
2676 else if (DECL_P (decl
))
2678 if (DECL_ASSEMBLER_NAME_SET_P (decl
))
2679 res
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
2682 res
= get_name (decl
);
2685 num_printed
= asprintf (&temp
, "D.%u", DECL_UID (decl
));
2686 if (num_printed
> 0)
2688 res
= ggc_strdup (temp
);
2700 /* Find the variable id for tree T in the map.
2701 If T doesn't exist in the map, create an entry for it and return it. */
2704 get_vi_for_tree (tree t
)
2706 void **slot
= pointer_map_contains (vi_for_tree
, t
);
2708 return get_varinfo (create_variable_info_for (t
, alias_get_name (t
)));
2710 return (varinfo_t
) *slot
;
2713 /* Get a scalar constraint expression for a new temporary variable. */
2715 static struct constraint_expr
2716 new_scalar_tmp_constraint_exp (const char *name
)
2718 struct constraint_expr tmp
;
2721 vi
= new_var_info (NULL_TREE
, name
);
2725 vi
->is_full_var
= 1;
2734 /* Get a constraint expression vector from an SSA_VAR_P node.
2735 If address_p is true, the result will be taken its address of. */
2738 get_constraint_for_ssa_var (tree t
, vec
<ce_s
> *results
, bool address_p
)
2740 struct constraint_expr cexpr
;
2743 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2744 gcc_assert (TREE_CODE (t
) == SSA_NAME
|| DECL_P (t
));
2746 /* For parameters, get at the points-to set for the actual parm
2748 if (TREE_CODE (t
) == SSA_NAME
2749 && SSA_NAME_IS_DEFAULT_DEF (t
)
2750 && (TREE_CODE (SSA_NAME_VAR (t
)) == PARM_DECL
2751 || TREE_CODE (SSA_NAME_VAR (t
)) == RESULT_DECL
))
2753 get_constraint_for_ssa_var (SSA_NAME_VAR (t
), results
, address_p
);
2757 /* For global variables resort to the alias target. */
2758 if (TREE_CODE (t
) == VAR_DECL
2759 && (TREE_STATIC (t
) || DECL_EXTERNAL (t
)))
2761 struct varpool_node
*node
= varpool_get_node (t
);
2762 if (node
&& node
->alias
)
2764 node
= varpool_variable_node (node
, NULL
);
2765 t
= node
->symbol
.decl
;
2769 vi
= get_vi_for_tree (t
);
2771 cexpr
.type
= SCALAR
;
2773 /* If we determine the result is "anything", and we know this is readonly,
2774 say it points to readonly memory instead. */
2775 if (cexpr
.var
== anything_id
&& TREE_READONLY (t
))
2778 cexpr
.type
= ADDRESSOF
;
2779 cexpr
.var
= readonly_id
;
2782 /* If we are not taking the address of the constraint expr, add all
2783 sub-fiels of the variable as well. */
2785 && !vi
->is_full_var
)
2787 for (; vi
; vi
= vi
->next
)
2790 results
->safe_push (cexpr
);
2795 results
->safe_push (cexpr
);
2798 /* Process constraint T, performing various simplifications and then
2799 adding it to our list of overall constraints. */
2802 process_constraint (constraint_t t
)
2804 struct constraint_expr rhs
= t
->rhs
;
2805 struct constraint_expr lhs
= t
->lhs
;
2807 gcc_assert (rhs
.var
< varmap
.length ());
2808 gcc_assert (lhs
.var
< varmap
.length ());
2810 /* If we didn't get any useful constraint from the lhs we get
2811 &ANYTHING as fallback from get_constraint_for. Deal with
2812 it here by turning it into *ANYTHING. */
2813 if (lhs
.type
== ADDRESSOF
2814 && lhs
.var
== anything_id
)
2817 /* ADDRESSOF on the lhs is invalid. */
2818 gcc_assert (lhs
.type
!= ADDRESSOF
);
2820 /* We shouldn't add constraints from things that cannot have pointers.
2821 It's not completely trivial to avoid in the callers, so do it here. */
2822 if (rhs
.type
!= ADDRESSOF
2823 && !get_varinfo (rhs
.var
)->may_have_pointers
)
2826 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2827 if (!get_varinfo (lhs
.var
)->may_have_pointers
)
2830 /* This can happen in our IR with things like n->a = *p */
2831 if (rhs
.type
== DEREF
&& lhs
.type
== DEREF
&& rhs
.var
!= anything_id
)
2833 /* Split into tmp = *rhs, *lhs = tmp */
2834 struct constraint_expr tmplhs
;
2835 tmplhs
= new_scalar_tmp_constraint_exp ("doubledereftmp");
2836 process_constraint (new_constraint (tmplhs
, rhs
));
2837 process_constraint (new_constraint (lhs
, tmplhs
));
2839 else if (rhs
.type
== ADDRESSOF
&& lhs
.type
== DEREF
)
2841 /* Split into tmp = &rhs, *lhs = tmp */
2842 struct constraint_expr tmplhs
;
2843 tmplhs
= new_scalar_tmp_constraint_exp ("derefaddrtmp");
2844 process_constraint (new_constraint (tmplhs
, rhs
));
2845 process_constraint (new_constraint (lhs
, tmplhs
));
2849 gcc_assert (rhs
.type
!= ADDRESSOF
|| rhs
.offset
== 0);
2850 constraints
.safe_push (t
);
2855 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2858 static HOST_WIDE_INT
2859 bitpos_of_field (const tree fdecl
)
2861 if (!host_integerp (DECL_FIELD_OFFSET (fdecl
), 0)
2862 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl
), 0))
2865 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl
)) * BITS_PER_UNIT
2866 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl
)));
2870 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2871 resulting constraint expressions in *RESULTS. */
2874 get_constraint_for_ptr_offset (tree ptr
, tree offset
,
2877 struct constraint_expr c
;
2879 HOST_WIDE_INT rhsoffset
;
2881 /* If we do not do field-sensitive PTA adding offsets to pointers
2882 does not change the points-to solution. */
2883 if (!use_field_sensitive
)
2885 get_constraint_for_rhs (ptr
, results
);
2889 /* If the offset is not a non-negative integer constant that fits
2890 in a HOST_WIDE_INT, we have to fall back to a conservative
2891 solution which includes all sub-fields of all pointed-to
2892 variables of ptr. */
2893 if (offset
== NULL_TREE
2894 || TREE_CODE (offset
) != INTEGER_CST
)
2895 rhsoffset
= UNKNOWN_OFFSET
;
2898 /* Sign-extend the offset. */
2899 double_int soffset
= tree_to_double_int (offset
)
2900 .sext (TYPE_PRECISION (TREE_TYPE (offset
)));
2901 if (!soffset
.fits_shwi ())
2902 rhsoffset
= UNKNOWN_OFFSET
;
2905 /* Make sure the bit-offset also fits. */
2906 HOST_WIDE_INT rhsunitoffset
= soffset
.low
;
2907 rhsoffset
= rhsunitoffset
* BITS_PER_UNIT
;
2908 if (rhsunitoffset
!= rhsoffset
/ BITS_PER_UNIT
)
2909 rhsoffset
= UNKNOWN_OFFSET
;
2913 get_constraint_for_rhs (ptr
, results
);
2917 /* As we are eventually appending to the solution do not use
2918 vec::iterate here. */
2919 n
= results
->length ();
2920 for (j
= 0; j
< n
; j
++)
2924 curr
= get_varinfo (c
.var
);
2926 if (c
.type
== ADDRESSOF
2927 /* If this varinfo represents a full variable just use it. */
2928 && curr
->is_full_var
)
2930 else if (c
.type
== ADDRESSOF
2931 /* If we do not know the offset add all subfields. */
2932 && rhsoffset
== UNKNOWN_OFFSET
)
2934 varinfo_t temp
= lookup_vi_for_tree (curr
->decl
);
2937 struct constraint_expr c2
;
2939 c2
.type
= ADDRESSOF
;
2941 if (c2
.var
!= c
.var
)
2942 results
->safe_push (c2
);
2947 else if (c
.type
== ADDRESSOF
)
2950 unsigned HOST_WIDE_INT offset
= curr
->offset
+ rhsoffset
;
2952 /* Search the sub-field which overlaps with the
2953 pointed-to offset. If the result is outside of the variable
2954 we have to provide a conservative result, as the variable is
2955 still reachable from the resulting pointer (even though it
2956 technically cannot point to anything). The last and first
2957 sub-fields are such conservative results.
2958 ??? If we always had a sub-field for &object + 1 then
2959 we could represent this in a more precise way. */
2961 && curr
->offset
< offset
)
2963 temp
= first_or_preceding_vi_for_offset (curr
, offset
);
2965 /* If the found variable is not exactly at the pointed to
2966 result, we have to include the next variable in the
2967 solution as well. Otherwise two increments by offset / 2
2968 do not result in the same or a conservative superset
2970 if (temp
->offset
!= offset
2971 && temp
->next
!= NULL
)
2973 struct constraint_expr c2
;
2974 c2
.var
= temp
->next
->id
;
2975 c2
.type
= ADDRESSOF
;
2977 results
->safe_push (c2
);
2983 c
.offset
= rhsoffset
;
2990 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
2991 If address_p is true the result will be taken its address of.
2992 If lhs_p is true then the constraint expression is assumed to be used
2996 get_constraint_for_component_ref (tree t
, vec
<ce_s
> *results
,
2997 bool address_p
, bool lhs_p
)
3000 HOST_WIDE_INT bitsize
= -1;
3001 HOST_WIDE_INT bitmaxsize
= -1;
3002 HOST_WIDE_INT bitpos
;
3005 /* Some people like to do cute things like take the address of
3008 while (handled_component_p (forzero
)
3009 || INDIRECT_REF_P (forzero
)
3010 || TREE_CODE (forzero
) == MEM_REF
)
3011 forzero
= TREE_OPERAND (forzero
, 0);
3013 if (CONSTANT_CLASS_P (forzero
) && integer_zerop (forzero
))
3015 struct constraint_expr temp
;
3018 temp
.var
= integer_id
;
3020 results
->safe_push (temp
);
3024 /* Handle type-punning through unions. If we are extracting a pointer
3025 from a union via a possibly type-punning access that pointer
3026 points to anything, similar to a conversion of an integer to
3032 TREE_CODE (u
) == COMPONENT_REF
|| TREE_CODE (u
) == ARRAY_REF
;
3033 u
= TREE_OPERAND (u
, 0))
3034 if (TREE_CODE (u
) == COMPONENT_REF
3035 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u
, 0))) == UNION_TYPE
)
3037 struct constraint_expr temp
;
3040 temp
.var
= anything_id
;
3041 temp
.type
= ADDRESSOF
;
3042 results
->safe_push (temp
);
3047 t
= get_ref_base_and_extent (t
, &bitpos
, &bitsize
, &bitmaxsize
);
3049 /* Pretend to take the address of the base, we'll take care of
3050 adding the required subset of sub-fields below. */
3051 get_constraint_for_1 (t
, results
, true, lhs_p
);
3052 gcc_assert (results
->length () == 1);
3053 struct constraint_expr
&result
= results
->last ();
3055 if (result
.type
== SCALAR
3056 && get_varinfo (result
.var
)->is_full_var
)
3057 /* For single-field vars do not bother about the offset. */
3059 else if (result
.type
== SCALAR
)
3061 /* In languages like C, you can access one past the end of an
3062 array. You aren't allowed to dereference it, so we can
3063 ignore this constraint. When we handle pointer subtraction,
3064 we may have to do something cute here. */
3066 if ((unsigned HOST_WIDE_INT
)bitpos
< get_varinfo (result
.var
)->fullsize
3069 /* It's also not true that the constraint will actually start at the
3070 right offset, it may start in some padding. We only care about
3071 setting the constraint to the first actual field it touches, so
3073 struct constraint_expr cexpr
= result
;
3077 for (curr
= get_varinfo (cexpr
.var
); curr
; curr
= curr
->next
)
3079 if (ranges_overlap_p (curr
->offset
, curr
->size
,
3080 bitpos
, bitmaxsize
))
3082 cexpr
.var
= curr
->id
;
3083 results
->safe_push (cexpr
);
3088 /* If we are going to take the address of this field then
3089 to be able to compute reachability correctly add at least
3090 the last field of the variable. */
3091 if (address_p
&& results
->length () == 0)
3093 curr
= get_varinfo (cexpr
.var
);
3094 while (curr
->next
!= NULL
)
3096 cexpr
.var
= curr
->id
;
3097 results
->safe_push (cexpr
);
3099 else if (results
->length () == 0)
3100 /* Assert that we found *some* field there. The user couldn't be
3101 accessing *only* padding. */
3102 /* Still the user could access one past the end of an array
3103 embedded in a struct resulting in accessing *only* padding. */
3104 /* Or accessing only padding via type-punning to a type
3105 that has a filed just in padding space. */
3107 cexpr
.type
= SCALAR
;
3108 cexpr
.var
= anything_id
;
3110 results
->safe_push (cexpr
);
3113 else if (bitmaxsize
== 0)
3115 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3116 fprintf (dump_file
, "Access to zero-sized part of variable,"
3120 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3121 fprintf (dump_file
, "Access to past the end of variable, ignoring\n");
3123 else if (result
.type
== DEREF
)
3125 /* If we do not know exactly where the access goes say so. Note
3126 that only for non-structure accesses we know that we access
3127 at most one subfiled of any variable. */
3129 || bitsize
!= bitmaxsize
3130 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t
))
3131 || result
.offset
== UNKNOWN_OFFSET
)
3132 result
.offset
= UNKNOWN_OFFSET
;
3134 result
.offset
+= bitpos
;
3136 else if (result
.type
== ADDRESSOF
)
3138 /* We can end up here for component references on a
3139 VIEW_CONVERT_EXPR <>(&foobar). */
3140 result
.type
= SCALAR
;
3141 result
.var
= anything_id
;
3149 /* Dereference the constraint expression CONS, and return the result.
3150 DEREF (ADDRESSOF) = SCALAR
3151 DEREF (SCALAR) = DEREF
3152 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3153 This is needed so that we can handle dereferencing DEREF constraints. */
3156 do_deref (vec
<ce_s
> *constraints
)
3158 struct constraint_expr
*c
;
3161 FOR_EACH_VEC_ELT (*constraints
, i
, c
)
3163 if (c
->type
== SCALAR
)
3165 else if (c
->type
== ADDRESSOF
)
3167 else if (c
->type
== DEREF
)
3169 struct constraint_expr tmplhs
;
3170 tmplhs
= new_scalar_tmp_constraint_exp ("dereftmp");
3171 process_constraint (new_constraint (tmplhs
, *c
));
3172 c
->var
= tmplhs
.var
;
3179 /* Given a tree T, return the constraint expression for taking the
3183 get_constraint_for_address_of (tree t
, vec
<ce_s
> *results
)
3185 struct constraint_expr
*c
;
3188 get_constraint_for_1 (t
, results
, true, true);
3190 FOR_EACH_VEC_ELT (*results
, i
, c
)
3192 if (c
->type
== DEREF
)
3195 c
->type
= ADDRESSOF
;
3199 /* Given a tree T, return the constraint expression for it. */
3202 get_constraint_for_1 (tree t
, vec
<ce_s
> *results
, bool address_p
,
3205 struct constraint_expr temp
;
3207 /* x = integer is all glommed to a single variable, which doesn't
3208 point to anything by itself. That is, of course, unless it is an
3209 integer constant being treated as a pointer, in which case, we
3210 will return that this is really the addressof anything. This
3211 happens below, since it will fall into the default case. The only
3212 case we know something about an integer treated like a pointer is
3213 when it is the NULL pointer, and then we just say it points to
3216 Do not do that if -fno-delete-null-pointer-checks though, because
3217 in that case *NULL does not fail, so it _should_ alias *anything.
3218 It is not worth adding a new option or renaming the existing one,
3219 since this case is relatively obscure. */
3220 if ((TREE_CODE (t
) == INTEGER_CST
3221 && integer_zerop (t
))
3222 /* The only valid CONSTRUCTORs in gimple with pointer typed
3223 elements are zero-initializer. But in IPA mode we also
3224 process global initializers, so verify at least. */
3225 || (TREE_CODE (t
) == CONSTRUCTOR
3226 && CONSTRUCTOR_NELTS (t
) == 0))
3228 if (flag_delete_null_pointer_checks
)
3229 temp
.var
= nothing_id
;
3231 temp
.var
= nonlocal_id
;
3232 temp
.type
= ADDRESSOF
;
3234 results
->safe_push (temp
);
3238 /* String constants are read-only. */
3239 if (TREE_CODE (t
) == STRING_CST
)
3241 temp
.var
= readonly_id
;
3244 results
->safe_push (temp
);
3248 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
3250 case tcc_expression
:
3252 switch (TREE_CODE (t
))
3255 get_constraint_for_address_of (TREE_OPERAND (t
, 0), results
);
3263 switch (TREE_CODE (t
))
3267 struct constraint_expr cs
;
3269 get_constraint_for_ptr_offset (TREE_OPERAND (t
, 0),
3270 TREE_OPERAND (t
, 1), results
);
3273 /* If we are not taking the address then make sure to process
3274 all subvariables we might access. */
3278 cs
= results
->last ();
3279 if (cs
.type
== DEREF
3280 && type_can_have_subvars (TREE_TYPE (t
)))
3282 /* For dereferences this means we have to defer it
3284 results
->last ().offset
= UNKNOWN_OFFSET
;
3287 if (cs
.type
!= SCALAR
)
3290 vi
= get_varinfo (cs
.var
);
3292 if (!vi
->is_full_var
3295 unsigned HOST_WIDE_INT size
;
3296 if (host_integerp (TYPE_SIZE (TREE_TYPE (t
)), 1))
3297 size
= TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t
)));
3300 for (; curr
; curr
= curr
->next
)
3302 if (curr
->offset
- vi
->offset
< size
)
3305 results
->safe_push (cs
);
3314 case ARRAY_RANGE_REF
:
3316 get_constraint_for_component_ref (t
, results
, address_p
, lhs_p
);
3318 case VIEW_CONVERT_EXPR
:
3319 get_constraint_for_1 (TREE_OPERAND (t
, 0), results
, address_p
,
3322 /* We are missing handling for TARGET_MEM_REF here. */
3327 case tcc_exceptional
:
3329 switch (TREE_CODE (t
))
3333 get_constraint_for_ssa_var (t
, results
, address_p
);
3340 vec
<ce_s
> tmp
= vNULL
;
3341 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), i
, val
)
3343 struct constraint_expr
*rhsp
;
3345 get_constraint_for_1 (val
, &tmp
, address_p
, lhs_p
);
3346 FOR_EACH_VEC_ELT (tmp
, j
, rhsp
)
3347 results
->safe_push (*rhsp
);
3351 /* We do not know whether the constructor was complete,
3352 so technically we have to add &NOTHING or &ANYTHING
3353 like we do for an empty constructor as well. */
3360 case tcc_declaration
:
3362 get_constraint_for_ssa_var (t
, results
, address_p
);
3367 /* We cannot refer to automatic variables through constants. */
3368 temp
.type
= ADDRESSOF
;
3369 temp
.var
= nonlocal_id
;
3371 results
->safe_push (temp
);
3377 /* The default fallback is a constraint from anything. */
3378 temp
.type
= ADDRESSOF
;
3379 temp
.var
= anything_id
;
3381 results
->safe_push (temp
);
3384 /* Given a gimple tree T, return the constraint expression vector for it. */
3387 get_constraint_for (tree t
, vec
<ce_s
> *results
)
3389 gcc_assert (results
->length () == 0);
3391 get_constraint_for_1 (t
, results
, false, true);
3394 /* Given a gimple tree T, return the constraint expression vector for it
3395 to be used as the rhs of a constraint. */
3398 get_constraint_for_rhs (tree t
, vec
<ce_s
> *results
)
3400 gcc_assert (results
->length () == 0);
3402 get_constraint_for_1 (t
, results
, false, false);
3406 /* Efficiently generates constraints from all entries in *RHSC to all
3407 entries in *LHSC. */
3410 process_all_all_constraints (vec
<ce_s
> lhsc
,
3413 struct constraint_expr
*lhsp
, *rhsp
;
3416 if (lhsc
.length () <= 1 || rhsc
.length () <= 1)
3418 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
3419 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
3420 process_constraint (new_constraint (*lhsp
, *rhsp
));
3424 struct constraint_expr tmp
;
3425 tmp
= new_scalar_tmp_constraint_exp ("allalltmp");
3426 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
3427 process_constraint (new_constraint (tmp
, *rhsp
));
3428 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
3429 process_constraint (new_constraint (*lhsp
, tmp
));
3433 /* Handle aggregate copies by expanding into copies of the respective
3434 fields of the structures. */
3437 do_structure_copy (tree lhsop
, tree rhsop
)
3439 struct constraint_expr
*lhsp
, *rhsp
;
3440 vec
<ce_s
> lhsc
= vNULL
;
3441 vec
<ce_s
> rhsc
= vNULL
;
3444 get_constraint_for (lhsop
, &lhsc
);
3445 get_constraint_for_rhs (rhsop
, &rhsc
);
3448 if (lhsp
->type
== DEREF
3449 || (lhsp
->type
== ADDRESSOF
&& lhsp
->var
== anything_id
)
3450 || rhsp
->type
== DEREF
)
3452 if (lhsp
->type
== DEREF
)
3454 gcc_assert (lhsc
.length () == 1);
3455 lhsp
->offset
= UNKNOWN_OFFSET
;
3457 if (rhsp
->type
== DEREF
)
3459 gcc_assert (rhsc
.length () == 1);
3460 rhsp
->offset
= UNKNOWN_OFFSET
;
3462 process_all_all_constraints (lhsc
, rhsc
);
3464 else if (lhsp
->type
== SCALAR
3465 && (rhsp
->type
== SCALAR
3466 || rhsp
->type
== ADDRESSOF
))
3468 HOST_WIDE_INT lhssize
, lhsmaxsize
, lhsoffset
;
3469 HOST_WIDE_INT rhssize
, rhsmaxsize
, rhsoffset
;
3471 get_ref_base_and_extent (lhsop
, &lhsoffset
, &lhssize
, &lhsmaxsize
);
3472 get_ref_base_and_extent (rhsop
, &rhsoffset
, &rhssize
, &rhsmaxsize
);
3473 for (j
= 0; lhsc
.iterate (j
, &lhsp
);)
3475 varinfo_t lhsv
, rhsv
;
3477 lhsv
= get_varinfo (lhsp
->var
);
3478 rhsv
= get_varinfo (rhsp
->var
);
3479 if (lhsv
->may_have_pointers
3480 && (lhsv
->is_full_var
3481 || rhsv
->is_full_var
3482 || ranges_overlap_p (lhsv
->offset
+ rhsoffset
, lhsv
->size
,
3483 rhsv
->offset
+ lhsoffset
, rhsv
->size
)))
3484 process_constraint (new_constraint (*lhsp
, *rhsp
));
3485 if (!rhsv
->is_full_var
3486 && (lhsv
->is_full_var
3487 || (lhsv
->offset
+ rhsoffset
+ lhsv
->size
3488 > rhsv
->offset
+ lhsoffset
+ rhsv
->size
)))
3491 if (k
>= rhsc
.length ())
3505 /* Create constraints ID = { rhsc }. */
3508 make_constraints_to (unsigned id
, vec
<ce_s
> rhsc
)
3510 struct constraint_expr
*c
;
3511 struct constraint_expr includes
;
3515 includes
.offset
= 0;
3516 includes
.type
= SCALAR
;
3518 FOR_EACH_VEC_ELT (rhsc
, j
, c
)
3519 process_constraint (new_constraint (includes
, *c
));
3522 /* Create a constraint ID = OP. */
3525 make_constraint_to (unsigned id
, tree op
)
3527 vec
<ce_s
> rhsc
= vNULL
;
3528 get_constraint_for_rhs (op
, &rhsc
);
3529 make_constraints_to (id
, rhsc
);
3533 /* Create a constraint ID = &FROM. */
3536 make_constraint_from (varinfo_t vi
, int from
)
3538 struct constraint_expr lhs
, rhs
;
3546 rhs
.type
= ADDRESSOF
;
3547 process_constraint (new_constraint (lhs
, rhs
));
3550 /* Create a constraint ID = FROM. */
3553 make_copy_constraint (varinfo_t vi
, int from
)
3555 struct constraint_expr lhs
, rhs
;
3564 process_constraint (new_constraint (lhs
, rhs
));
3567 /* Make constraints necessary to make OP escape. */
3570 make_escape_constraint (tree op
)
3572 make_constraint_to (escaped_id
, op
);
3575 /* Add constraints to that the solution of VI is transitively closed. */
3578 make_transitive_closure_constraints (varinfo_t vi
)
3580 struct constraint_expr lhs
, rhs
;
3589 process_constraint (new_constraint (lhs
, rhs
));
3591 /* VAR = VAR + UNKNOWN; */
3597 rhs
.offset
= UNKNOWN_OFFSET
;
3598 process_constraint (new_constraint (lhs
, rhs
));
3601 /* Temporary storage for fake var decls. */
3602 struct obstack fake_var_decl_obstack
;
3604 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3607 build_fake_var_decl (tree type
)
3609 tree decl
= (tree
) XOBNEW (&fake_var_decl_obstack
, struct tree_var_decl
);
3610 memset (decl
, 0, sizeof (struct tree_var_decl
));
3611 TREE_SET_CODE (decl
, VAR_DECL
);
3612 TREE_TYPE (decl
) = type
;
3613 DECL_UID (decl
) = allocate_decl_uid ();
3614 SET_DECL_PT_UID (decl
, -1);
3615 layout_decl (decl
, 0);
3619 /* Create a new artificial heap variable with NAME.
3620 Return the created variable. */
3623 make_heapvar (const char *name
)
3628 heapvar
= build_fake_var_decl (ptr_type_node
);
3629 DECL_EXTERNAL (heapvar
) = 1;
3631 vi
= new_var_info (heapvar
, name
);
3632 vi
->is_artificial_var
= true;
3633 vi
->is_heap_var
= true;
3634 vi
->is_unknown_size_var
= true;
3638 vi
->is_full_var
= true;
3639 insert_vi_for_tree (heapvar
, vi
);
3644 /* Create a new artificial heap variable with NAME and make a
3645 constraint from it to LHS. Set flags according to a tag used
3646 for tracking restrict pointers. */
3649 make_constraint_from_restrict (varinfo_t lhs
, const char *name
)
3651 varinfo_t vi
= make_heapvar (name
);
3652 vi
->is_global_var
= 1;
3653 vi
->may_have_pointers
= 1;
3654 make_constraint_from (lhs
, vi
->id
);
3658 /* Create a new artificial heap variable with NAME and make a
3659 constraint from it to LHS. Set flags according to a tag used
3660 for tracking restrict pointers and make the artificial heap
3661 point to global memory. */
3664 make_constraint_from_global_restrict (varinfo_t lhs
, const char *name
)
3666 varinfo_t vi
= make_constraint_from_restrict (lhs
, name
);
3667 make_copy_constraint (vi
, nonlocal_id
);
3671 /* In IPA mode there are varinfos for different aspects of reach
3672 function designator. One for the points-to set of the return
3673 value, one for the variables that are clobbered by the function,
3674 one for its uses and one for each parameter (including a single
3675 glob for remaining variadic arguments). */
3677 enum { fi_clobbers
= 1, fi_uses
= 2,
3678 fi_static_chain
= 3, fi_result
= 4, fi_parm_base
= 5 };
3680 /* Get a constraint for the requested part of a function designator FI
3681 when operating in IPA mode. */
3683 static struct constraint_expr
3684 get_function_part_constraint (varinfo_t fi
, unsigned part
)
3686 struct constraint_expr c
;
3688 gcc_assert (in_ipa_mode
);
3690 if (fi
->id
== anything_id
)
3692 /* ??? We probably should have a ANYFN special variable. */
3693 c
.var
= anything_id
;
3697 else if (TREE_CODE (fi
->decl
) == FUNCTION_DECL
)
3699 varinfo_t ai
= first_vi_for_offset (fi
, part
);
3703 c
.var
= anything_id
;
3717 /* For non-IPA mode, generate constraints necessary for a call on the
3721 handle_rhs_call (gimple stmt
, vec
<ce_s
> *results
)
3723 struct constraint_expr rhsc
;
3725 bool returns_uses
= false;
3727 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3729 tree arg
= gimple_call_arg (stmt
, i
);
3730 int flags
= gimple_call_arg_flags (stmt
, i
);
3732 /* If the argument is not used we can ignore it. */
3733 if (flags
& EAF_UNUSED
)
3736 /* As we compute ESCAPED context-insensitive we do not gain
3737 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3738 set. The argument would still get clobbered through the
3740 if ((flags
& EAF_NOCLOBBER
)
3741 && (flags
& EAF_NOESCAPE
))
3743 varinfo_t uses
= get_call_use_vi (stmt
);
3744 if (!(flags
& EAF_DIRECT
))
3746 varinfo_t tem
= new_var_info (NULL_TREE
, "callarg");
3747 make_constraint_to (tem
->id
, arg
);
3748 make_transitive_closure_constraints (tem
);
3749 make_copy_constraint (uses
, tem
->id
);
3752 make_constraint_to (uses
->id
, arg
);
3753 returns_uses
= true;
3755 else if (flags
& EAF_NOESCAPE
)
3757 struct constraint_expr lhs
, rhs
;
3758 varinfo_t uses
= get_call_use_vi (stmt
);
3759 varinfo_t clobbers
= get_call_clobber_vi (stmt
);
3760 varinfo_t tem
= new_var_info (NULL_TREE
, "callarg");
3761 make_constraint_to (tem
->id
, arg
);
3762 if (!(flags
& EAF_DIRECT
))
3763 make_transitive_closure_constraints (tem
);
3764 make_copy_constraint (uses
, tem
->id
);
3765 make_copy_constraint (clobbers
, tem
->id
);
3766 /* Add *tem = nonlocal, do not add *tem = callused as
3767 EAF_NOESCAPE parameters do not escape to other parameters
3768 and all other uses appear in NONLOCAL as well. */
3773 rhs
.var
= nonlocal_id
;
3775 process_constraint (new_constraint (lhs
, rhs
));
3776 returns_uses
= true;
3779 make_escape_constraint (arg
);
3782 /* If we added to the calls uses solution make sure we account for
3783 pointers to it to be returned. */
3786 rhsc
.var
= get_call_use_vi (stmt
)->id
;
3789 results
->safe_push (rhsc
);
3792 /* The static chain escapes as well. */
3793 if (gimple_call_chain (stmt
))
3794 make_escape_constraint (gimple_call_chain (stmt
));
3796 /* And if we applied NRV the address of the return slot escapes as well. */
3797 if (gimple_call_return_slot_opt_p (stmt
)
3798 && gimple_call_lhs (stmt
) != NULL_TREE
3799 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt
))))
3801 vec
<ce_s
> tmpc
= vNULL
;
3802 struct constraint_expr lhsc
, *c
;
3803 get_constraint_for_address_of (gimple_call_lhs (stmt
), &tmpc
);
3804 lhsc
.var
= escaped_id
;
3807 FOR_EACH_VEC_ELT (tmpc
, i
, c
)
3808 process_constraint (new_constraint (lhsc
, *c
));
3812 /* Regular functions return nonlocal memory. */
3813 rhsc
.var
= nonlocal_id
;
3816 results
->safe_push (rhsc
);
3819 /* For non-IPA mode, generate constraints necessary for a call
3820 that returns a pointer and assigns it to LHS. This simply makes
3821 the LHS point to global and escaped variables. */
3824 handle_lhs_call (gimple stmt
, tree lhs
, int flags
, vec
<ce_s
> rhsc
,
3827 vec
<ce_s
> lhsc
= vNULL
;
3829 get_constraint_for (lhs
, &lhsc
);
3830 /* If the store is to a global decl make sure to
3831 add proper escape constraints. */
3832 lhs
= get_base_address (lhs
);
3835 && is_global_var (lhs
))
3837 struct constraint_expr tmpc
;
3838 tmpc
.var
= escaped_id
;
3841 lhsc
.safe_push (tmpc
);
3844 /* If the call returns an argument unmodified override the rhs
3846 flags
= gimple_call_return_flags (stmt
);
3847 if (flags
& ERF_RETURNS_ARG
3848 && (flags
& ERF_RETURN_ARG_MASK
) < gimple_call_num_args (stmt
))
3852 arg
= gimple_call_arg (stmt
, flags
& ERF_RETURN_ARG_MASK
);
3853 get_constraint_for (arg
, &rhsc
);
3854 process_all_all_constraints (lhsc
, rhsc
);
3857 else if (flags
& ERF_NOALIAS
)
3860 struct constraint_expr tmpc
;
3862 vi
= make_heapvar ("HEAP");
3863 /* We delay marking allocated storage global until we know if
3865 DECL_EXTERNAL (vi
->decl
) = 0;
3866 vi
->is_global_var
= 0;
3867 /* If this is not a real malloc call assume the memory was
3868 initialized and thus may point to global memory. All
3869 builtin functions with the malloc attribute behave in a sane way. */
3871 || DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_NORMAL
)
3872 make_constraint_from (vi
, nonlocal_id
);
3875 tmpc
.type
= ADDRESSOF
;
3876 rhsc
.safe_push (tmpc
);
3877 process_all_all_constraints (lhsc
, rhsc
);
3881 process_all_all_constraints (lhsc
, rhsc
);
3886 /* For non-IPA mode, generate constraints necessary for a call of a
3887 const function that returns a pointer in the statement STMT. */
3890 handle_const_call (gimple stmt
, vec
<ce_s
> *results
)
3892 struct constraint_expr rhsc
;
3895 /* Treat nested const functions the same as pure functions as far
3896 as the static chain is concerned. */
3897 if (gimple_call_chain (stmt
))
3899 varinfo_t uses
= get_call_use_vi (stmt
);
3900 make_transitive_closure_constraints (uses
);
3901 make_constraint_to (uses
->id
, gimple_call_chain (stmt
));
3902 rhsc
.var
= uses
->id
;
3905 results
->safe_push (rhsc
);
3908 /* May return arguments. */
3909 for (k
= 0; k
< gimple_call_num_args (stmt
); ++k
)
3911 tree arg
= gimple_call_arg (stmt
, k
);
3912 vec
<ce_s
> argc
= vNULL
;
3914 struct constraint_expr
*argp
;
3915 get_constraint_for_rhs (arg
, &argc
);
3916 FOR_EACH_VEC_ELT (argc
, i
, argp
)
3917 results
->safe_push (*argp
);
3921 /* May return addresses of globals. */
3922 rhsc
.var
= nonlocal_id
;
3924 rhsc
.type
= ADDRESSOF
;
3925 results
->safe_push (rhsc
);
3928 /* For non-IPA mode, generate constraints necessary for a call to a
3929 pure function in statement STMT. */
3932 handle_pure_call (gimple stmt
, vec
<ce_s
> *results
)
3934 struct constraint_expr rhsc
;
3936 varinfo_t uses
= NULL
;
3938 /* Memory reached from pointer arguments is call-used. */
3939 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3941 tree arg
= gimple_call_arg (stmt
, i
);
3944 uses
= get_call_use_vi (stmt
);
3945 make_transitive_closure_constraints (uses
);
3947 make_constraint_to (uses
->id
, arg
);
3950 /* The static chain is used as well. */
3951 if (gimple_call_chain (stmt
))
3955 uses
= get_call_use_vi (stmt
);
3956 make_transitive_closure_constraints (uses
);
3958 make_constraint_to (uses
->id
, gimple_call_chain (stmt
));
3961 /* Pure functions may return call-used and nonlocal memory. */
3964 rhsc
.var
= uses
->id
;
3967 results
->safe_push (rhsc
);
3969 rhsc
.var
= nonlocal_id
;
3972 results
->safe_push (rhsc
);
3976 /* Return the varinfo for the callee of CALL. */
3979 get_fi_for_callee (gimple call
)
3981 tree decl
, fn
= gimple_call_fn (call
);
3983 if (fn
&& TREE_CODE (fn
) == OBJ_TYPE_REF
)
3984 fn
= OBJ_TYPE_REF_EXPR (fn
);
3986 /* If we can directly resolve the function being called, do so.
3987 Otherwise, it must be some sort of indirect expression that
3988 we should still be able to handle. */
3989 decl
= gimple_call_addr_fndecl (fn
);
3991 return get_vi_for_tree (decl
);
3993 /* If the function is anything other than a SSA name pointer we have no
3994 clue and should be getting ANYFN (well, ANYTHING for now). */
3995 if (!fn
|| TREE_CODE (fn
) != SSA_NAME
)
3996 return get_varinfo (anything_id
);
3998 if (SSA_NAME_IS_DEFAULT_DEF (fn
)
3999 && (TREE_CODE (SSA_NAME_VAR (fn
)) == PARM_DECL
4000 || TREE_CODE (SSA_NAME_VAR (fn
)) == RESULT_DECL
))
4001 fn
= SSA_NAME_VAR (fn
);
4003 return get_vi_for_tree (fn
);
4006 /* Create constraints for the builtin call T. Return true if the call
4007 was handled, otherwise false. */
4010 find_func_aliases_for_builtin_call (gimple t
)
4012 tree fndecl
= gimple_call_fndecl (t
);
4013 vec
<ce_s
> lhsc
= vNULL
;
4014 vec
<ce_s
> rhsc
= vNULL
;
4017 if (gimple_call_builtin_p (t
, BUILT_IN_NORMAL
))
4018 /* ??? All builtins that are handled here need to be handled
4019 in the alias-oracle query functions explicitly! */
4020 switch (DECL_FUNCTION_CODE (fndecl
))
4022 /* All the following functions return a pointer to the same object
4023 as their first argument points to. The functions do not add
4024 to the ESCAPED solution. The functions make the first argument
4025 pointed to memory point to what the second argument pointed to
4026 memory points to. */
4027 case BUILT_IN_STRCPY
:
4028 case BUILT_IN_STRNCPY
:
4029 case BUILT_IN_BCOPY
:
4030 case BUILT_IN_MEMCPY
:
4031 case BUILT_IN_MEMMOVE
:
4032 case BUILT_IN_MEMPCPY
:
4033 case BUILT_IN_STPCPY
:
4034 case BUILT_IN_STPNCPY
:
4035 case BUILT_IN_STRCAT
:
4036 case BUILT_IN_STRNCAT
:
4037 case BUILT_IN_STRCPY_CHK
:
4038 case BUILT_IN_STRNCPY_CHK
:
4039 case BUILT_IN_MEMCPY_CHK
:
4040 case BUILT_IN_MEMMOVE_CHK
:
4041 case BUILT_IN_MEMPCPY_CHK
:
4042 case BUILT_IN_STPCPY_CHK
:
4043 case BUILT_IN_STPNCPY_CHK
:
4044 case BUILT_IN_STRCAT_CHK
:
4045 case BUILT_IN_STRNCAT_CHK
:
4046 case BUILT_IN_TM_MEMCPY
:
4047 case BUILT_IN_TM_MEMMOVE
:
4049 tree res
= gimple_call_lhs (t
);
4050 tree dest
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (fndecl
)
4051 == BUILT_IN_BCOPY
? 1 : 0));
4052 tree src
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (fndecl
)
4053 == BUILT_IN_BCOPY
? 0 : 1));
4054 if (res
!= NULL_TREE
)
4056 get_constraint_for (res
, &lhsc
);
4057 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMPCPY
4058 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPCPY
4059 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPNCPY
4060 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMPCPY_CHK
4061 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPCPY_CHK
4062 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPNCPY_CHK
)
4063 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &rhsc
);
4065 get_constraint_for (dest
, &rhsc
);
4066 process_all_all_constraints (lhsc
, rhsc
);
4070 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4071 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
4074 process_all_all_constraints (lhsc
, rhsc
);
4079 case BUILT_IN_MEMSET
:
4080 case BUILT_IN_MEMSET_CHK
:
4081 case BUILT_IN_TM_MEMSET
:
4083 tree res
= gimple_call_lhs (t
);
4084 tree dest
= gimple_call_arg (t
, 0);
4087 struct constraint_expr ac
;
4088 if (res
!= NULL_TREE
)
4090 get_constraint_for (res
, &lhsc
);
4091 get_constraint_for (dest
, &rhsc
);
4092 process_all_all_constraints (lhsc
, rhsc
);
4096 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4098 if (flag_delete_null_pointer_checks
4099 && integer_zerop (gimple_call_arg (t
, 1)))
4101 ac
.type
= ADDRESSOF
;
4102 ac
.var
= nothing_id
;
4107 ac
.var
= integer_id
;
4110 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4111 process_constraint (new_constraint (*lhsp
, ac
));
4115 case BUILT_IN_ASSUME_ALIGNED
:
4117 tree res
= gimple_call_lhs (t
);
4118 tree dest
= gimple_call_arg (t
, 0);
4119 if (res
!= NULL_TREE
)
4121 get_constraint_for (res
, &lhsc
);
4122 get_constraint_for (dest
, &rhsc
);
4123 process_all_all_constraints (lhsc
, rhsc
);
4129 /* All the following functions do not return pointers, do not
4130 modify the points-to sets of memory reachable from their
4131 arguments and do not add to the ESCAPED solution. */
4132 case BUILT_IN_SINCOS
:
4133 case BUILT_IN_SINCOSF
:
4134 case BUILT_IN_SINCOSL
:
4135 case BUILT_IN_FREXP
:
4136 case BUILT_IN_FREXPF
:
4137 case BUILT_IN_FREXPL
:
4138 case BUILT_IN_GAMMA_R
:
4139 case BUILT_IN_GAMMAF_R
:
4140 case BUILT_IN_GAMMAL_R
:
4141 case BUILT_IN_LGAMMA_R
:
4142 case BUILT_IN_LGAMMAF_R
:
4143 case BUILT_IN_LGAMMAL_R
:
4145 case BUILT_IN_MODFF
:
4146 case BUILT_IN_MODFL
:
4147 case BUILT_IN_REMQUO
:
4148 case BUILT_IN_REMQUOF
:
4149 case BUILT_IN_REMQUOL
:
4152 case BUILT_IN_STRDUP
:
4153 case BUILT_IN_STRNDUP
:
4154 if (gimple_call_lhs (t
))
4156 handle_lhs_call (t
, gimple_call_lhs (t
), gimple_call_flags (t
),
4158 get_constraint_for_ptr_offset (gimple_call_lhs (t
),
4160 get_constraint_for_ptr_offset (gimple_call_arg (t
, 0),
4164 process_all_all_constraints (lhsc
, rhsc
);
4170 /* Trampolines are special - they set up passing the static
4172 case BUILT_IN_INIT_TRAMPOLINE
:
4174 tree tramp
= gimple_call_arg (t
, 0);
4175 tree nfunc
= gimple_call_arg (t
, 1);
4176 tree frame
= gimple_call_arg (t
, 2);
4178 struct constraint_expr lhs
, *rhsp
;
4181 varinfo_t nfi
= NULL
;
4182 gcc_assert (TREE_CODE (nfunc
) == ADDR_EXPR
);
4183 nfi
= lookup_vi_for_tree (TREE_OPERAND (nfunc
, 0));
4186 lhs
= get_function_part_constraint (nfi
, fi_static_chain
);
4187 get_constraint_for (frame
, &rhsc
);
4188 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4189 process_constraint (new_constraint (lhs
, *rhsp
));
4192 /* Make the frame point to the function for
4193 the trampoline adjustment call. */
4194 get_constraint_for (tramp
, &lhsc
);
4196 get_constraint_for (nfunc
, &rhsc
);
4197 process_all_all_constraints (lhsc
, rhsc
);
4204 /* Else fallthru to generic handling which will let
4205 the frame escape. */
4208 case BUILT_IN_ADJUST_TRAMPOLINE
:
4210 tree tramp
= gimple_call_arg (t
, 0);
4211 tree res
= gimple_call_lhs (t
);
4212 if (in_ipa_mode
&& res
)
4214 get_constraint_for (res
, &lhsc
);
4215 get_constraint_for (tramp
, &rhsc
);
4217 process_all_all_constraints (lhsc
, rhsc
);
4223 CASE_BUILT_IN_TM_STORE (1):
4224 CASE_BUILT_IN_TM_STORE (2):
4225 CASE_BUILT_IN_TM_STORE (4):
4226 CASE_BUILT_IN_TM_STORE (8):
4227 CASE_BUILT_IN_TM_STORE (FLOAT
):
4228 CASE_BUILT_IN_TM_STORE (DOUBLE
):
4229 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
4230 CASE_BUILT_IN_TM_STORE (M64
):
4231 CASE_BUILT_IN_TM_STORE (M128
):
4232 CASE_BUILT_IN_TM_STORE (M256
):
4234 tree addr
= gimple_call_arg (t
, 0);
4235 tree src
= gimple_call_arg (t
, 1);
4237 get_constraint_for (addr
, &lhsc
);
4239 get_constraint_for (src
, &rhsc
);
4240 process_all_all_constraints (lhsc
, rhsc
);
4245 CASE_BUILT_IN_TM_LOAD (1):
4246 CASE_BUILT_IN_TM_LOAD (2):
4247 CASE_BUILT_IN_TM_LOAD (4):
4248 CASE_BUILT_IN_TM_LOAD (8):
4249 CASE_BUILT_IN_TM_LOAD (FLOAT
):
4250 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
4251 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
4252 CASE_BUILT_IN_TM_LOAD (M64
):
4253 CASE_BUILT_IN_TM_LOAD (M128
):
4254 CASE_BUILT_IN_TM_LOAD (M256
):
4256 tree dest
= gimple_call_lhs (t
);
4257 tree addr
= gimple_call_arg (t
, 0);
4259 get_constraint_for (dest
, &lhsc
);
4260 get_constraint_for (addr
, &rhsc
);
4262 process_all_all_constraints (lhsc
, rhsc
);
4267 /* Variadic argument handling needs to be handled in IPA
4269 case BUILT_IN_VA_START
:
4271 tree valist
= gimple_call_arg (t
, 0);
4272 struct constraint_expr rhs
, *lhsp
;
4274 get_constraint_for (valist
, &lhsc
);
4276 /* The va_list gets access to pointers in variadic
4277 arguments. Which we know in the case of IPA analysis
4278 and otherwise are just all nonlocal variables. */
4281 fi
= lookup_vi_for_tree (cfun
->decl
);
4282 rhs
= get_function_part_constraint (fi
, ~0);
4283 rhs
.type
= ADDRESSOF
;
4287 rhs
.var
= nonlocal_id
;
4288 rhs
.type
= ADDRESSOF
;
4291 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4292 process_constraint (new_constraint (*lhsp
, rhs
));
4294 /* va_list is clobbered. */
4295 make_constraint_to (get_call_clobber_vi (t
)->id
, valist
);
4298 /* va_end doesn't have any effect that matters. */
4299 case BUILT_IN_VA_END
:
4301 /* Alternate return. Simply give up for now. */
4302 case BUILT_IN_RETURN
:
4306 || !(fi
= get_vi_for_tree (cfun
->decl
)))
4307 make_constraint_from (get_varinfo (escaped_id
), anything_id
);
4308 else if (in_ipa_mode
4311 struct constraint_expr lhs
, rhs
;
4312 lhs
= get_function_part_constraint (fi
, fi_result
);
4313 rhs
.var
= anything_id
;
4316 process_constraint (new_constraint (lhs
, rhs
));
4320 /* printf-style functions may have hooks to set pointers to
4321 point to somewhere into the generated string. Leave them
4322 for a later excercise... */
4324 /* Fallthru to general call handling. */;
4330 /* Create constraints for the call T. */
4333 find_func_aliases_for_call (gimple t
)
4335 tree fndecl
= gimple_call_fndecl (t
);
4336 vec
<ce_s
> lhsc
= vNULL
;
4337 vec
<ce_s
> rhsc
= vNULL
;
4340 if (fndecl
!= NULL_TREE
4341 && DECL_BUILT_IN (fndecl
)
4342 && find_func_aliases_for_builtin_call (t
))
4345 fi
= get_fi_for_callee (t
);
4347 || (fndecl
&& !fi
->is_fn_info
))
4349 vec
<ce_s
> rhsc
= vNULL
;
4350 int flags
= gimple_call_flags (t
);
4352 /* Const functions can return their arguments and addresses
4353 of global memory but not of escaped memory. */
4354 if (flags
& (ECF_CONST
|ECF_NOVOPS
))
4356 if (gimple_call_lhs (t
))
4357 handle_const_call (t
, &rhsc
);
4359 /* Pure functions can return addresses in and of memory
4360 reachable from their arguments, but they are not an escape
4361 point for reachable memory of their arguments. */
4362 else if (flags
& (ECF_PURE
|ECF_LOOPING_CONST_OR_PURE
))
4363 handle_pure_call (t
, &rhsc
);
4365 handle_rhs_call (t
, &rhsc
);
4366 if (gimple_call_lhs (t
))
4367 handle_lhs_call (t
, gimple_call_lhs (t
), flags
, rhsc
, fndecl
);
4375 /* Assign all the passed arguments to the appropriate incoming
4376 parameters of the function. */
4377 for (j
= 0; j
< gimple_call_num_args (t
); j
++)
4379 struct constraint_expr lhs
;
4380 struct constraint_expr
*rhsp
;
4381 tree arg
= gimple_call_arg (t
, j
);
4383 get_constraint_for_rhs (arg
, &rhsc
);
4384 lhs
= get_function_part_constraint (fi
, fi_parm_base
+ j
);
4385 while (rhsc
.length () != 0)
4387 rhsp
= &rhsc
.last ();
4388 process_constraint (new_constraint (lhs
, *rhsp
));
4393 /* If we are returning a value, assign it to the result. */
4394 lhsop
= gimple_call_lhs (t
);
4397 struct constraint_expr rhs
;
4398 struct constraint_expr
*lhsp
;
4400 get_constraint_for (lhsop
, &lhsc
);
4401 rhs
= get_function_part_constraint (fi
, fi_result
);
4403 && DECL_RESULT (fndecl
)
4404 && DECL_BY_REFERENCE (DECL_RESULT (fndecl
)))
4406 vec
<ce_s
> tem
= vNULL
;
4407 tem
.safe_push (rhs
);
4412 FOR_EACH_VEC_ELT (lhsc
, j
, lhsp
)
4413 process_constraint (new_constraint (*lhsp
, rhs
));
4416 /* If we pass the result decl by reference, honor that. */
4419 && DECL_RESULT (fndecl
)
4420 && DECL_BY_REFERENCE (DECL_RESULT (fndecl
)))
4422 struct constraint_expr lhs
;
4423 struct constraint_expr
*rhsp
;
4425 get_constraint_for_address_of (lhsop
, &rhsc
);
4426 lhs
= get_function_part_constraint (fi
, fi_result
);
4427 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
4428 process_constraint (new_constraint (lhs
, *rhsp
));
4432 /* If we use a static chain, pass it along. */
4433 if (gimple_call_chain (t
))
4435 struct constraint_expr lhs
;
4436 struct constraint_expr
*rhsp
;
4438 get_constraint_for (gimple_call_chain (t
), &rhsc
);
4439 lhs
= get_function_part_constraint (fi
, fi_static_chain
);
4440 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
4441 process_constraint (new_constraint (lhs
, *rhsp
));
4446 /* Walk statement T setting up aliasing constraints according to the
4447 references found in T. This function is the main part of the
4448 constraint builder. AI points to auxiliary alias information used
4449 when building alias sets and computing alias grouping heuristics. */
4452 find_func_aliases (gimple origt
)
4455 vec
<ce_s
> lhsc
= vNULL
;
4456 vec
<ce_s
> rhsc
= vNULL
;
4457 struct constraint_expr
*c
;
4460 /* Now build constraints expressions. */
4461 if (gimple_code (t
) == GIMPLE_PHI
)
4466 /* For a phi node, assign all the arguments to
4468 get_constraint_for (gimple_phi_result (t
), &lhsc
);
4469 for (i
= 0; i
< gimple_phi_num_args (t
); i
++)
4471 tree strippedrhs
= PHI_ARG_DEF (t
, i
);
4473 STRIP_NOPS (strippedrhs
);
4474 get_constraint_for_rhs (gimple_phi_arg_def (t
, i
), &rhsc
);
4476 FOR_EACH_VEC_ELT (lhsc
, j
, c
)
4478 struct constraint_expr
*c2
;
4479 while (rhsc
.length () > 0)
4482 process_constraint (new_constraint (*c
, *c2
));
4488 /* In IPA mode, we need to generate constraints to pass call
4489 arguments through their calls. There are two cases,
4490 either a GIMPLE_CALL returning a value, or just a plain
4491 GIMPLE_CALL when we are not.
4493 In non-ipa mode, we need to generate constraints for each
4494 pointer passed by address. */
4495 else if (is_gimple_call (t
))
4496 find_func_aliases_for_call (t
);
4498 /* Otherwise, just a regular assignment statement. Only care about
4499 operations with pointer result, others are dealt with as escape
4500 points if they have pointer operands. */
4501 else if (is_gimple_assign (t
))
4503 /* Otherwise, just a regular assignment statement. */
4504 tree lhsop
= gimple_assign_lhs (t
);
4505 tree rhsop
= (gimple_num_ops (t
) == 2) ? gimple_assign_rhs1 (t
) : NULL
;
4507 if (rhsop
&& TREE_CLOBBER_P (rhsop
))
4508 /* Ignore clobbers, they don't actually store anything into
4511 else if (rhsop
&& AGGREGATE_TYPE_P (TREE_TYPE (lhsop
)))
4512 do_structure_copy (lhsop
, rhsop
);
4515 enum tree_code code
= gimple_assign_rhs_code (t
);
4517 get_constraint_for (lhsop
, &lhsc
);
4519 if (code
== POINTER_PLUS_EXPR
)
4520 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t
),
4521 gimple_assign_rhs2 (t
), &rhsc
);
4522 else if (code
== BIT_AND_EXPR
4523 && TREE_CODE (gimple_assign_rhs2 (t
)) == INTEGER_CST
)
4525 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4526 the pointer. Handle it by offsetting it by UNKNOWN. */
4527 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t
),
4530 else if ((CONVERT_EXPR_CODE_P (code
)
4531 && !(POINTER_TYPE_P (gimple_expr_type (t
))
4532 && !POINTER_TYPE_P (TREE_TYPE (rhsop
))))
4533 || gimple_assign_single_p (t
))
4534 get_constraint_for_rhs (rhsop
, &rhsc
);
4535 else if (code
== COND_EXPR
)
4537 /* The result is a merge of both COND_EXPR arms. */
4538 vec
<ce_s
> tmp
= vNULL
;
4539 struct constraint_expr
*rhsp
;
4541 get_constraint_for_rhs (gimple_assign_rhs2 (t
), &rhsc
);
4542 get_constraint_for_rhs (gimple_assign_rhs3 (t
), &tmp
);
4543 FOR_EACH_VEC_ELT (tmp
, i
, rhsp
)
4544 rhsc
.safe_push (*rhsp
);
4547 else if (truth_value_p (code
))
4548 /* Truth value results are not pointer (parts). Or at least
4549 very very unreasonable obfuscation of a part. */
4553 /* All other operations are merges. */
4554 vec
<ce_s
> tmp
= vNULL
;
4555 struct constraint_expr
*rhsp
;
4557 get_constraint_for_rhs (gimple_assign_rhs1 (t
), &rhsc
);
4558 for (i
= 2; i
< gimple_num_ops (t
); ++i
)
4560 get_constraint_for_rhs (gimple_op (t
, i
), &tmp
);
4561 FOR_EACH_VEC_ELT (tmp
, j
, rhsp
)
4562 rhsc
.safe_push (*rhsp
);
4567 process_all_all_constraints (lhsc
, rhsc
);
4569 /* If there is a store to a global variable the rhs escapes. */
4570 if ((lhsop
= get_base_address (lhsop
)) != NULL_TREE
4572 && is_global_var (lhsop
)
4574 || DECL_EXTERNAL (lhsop
) || TREE_PUBLIC (lhsop
)))
4575 make_escape_constraint (rhsop
);
4577 /* Handle escapes through return. */
4578 else if (gimple_code (t
) == GIMPLE_RETURN
4579 && gimple_return_retval (t
) != NULL_TREE
)
4583 || !(fi
= get_vi_for_tree (cfun
->decl
)))
4584 make_escape_constraint (gimple_return_retval (t
));
4585 else if (in_ipa_mode
4588 struct constraint_expr lhs
;
4589 struct constraint_expr
*rhsp
;
4592 lhs
= get_function_part_constraint (fi
, fi_result
);
4593 get_constraint_for_rhs (gimple_return_retval (t
), &rhsc
);
4594 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4595 process_constraint (new_constraint (lhs
, *rhsp
));
4598 /* Handle asms conservatively by adding escape constraints to everything. */
4599 else if (gimple_code (t
) == GIMPLE_ASM
)
4601 unsigned i
, noutputs
;
4602 const char **oconstraints
;
4603 const char *constraint
;
4604 bool allows_mem
, allows_reg
, is_inout
;
4606 noutputs
= gimple_asm_noutputs (t
);
4607 oconstraints
= XALLOCAVEC (const char *, noutputs
);
4609 for (i
= 0; i
< noutputs
; ++i
)
4611 tree link
= gimple_asm_output_op (t
, i
);
4612 tree op
= TREE_VALUE (link
);
4614 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4615 oconstraints
[i
] = constraint
;
4616 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
4617 &allows_reg
, &is_inout
);
4619 /* A memory constraint makes the address of the operand escape. */
4620 if (!allows_reg
&& allows_mem
)
4621 make_escape_constraint (build_fold_addr_expr (op
));
4623 /* The asm may read global memory, so outputs may point to
4624 any global memory. */
4627 vec
<ce_s
> lhsc
= vNULL
;
4628 struct constraint_expr rhsc
, *lhsp
;
4630 get_constraint_for (op
, &lhsc
);
4631 rhsc
.var
= nonlocal_id
;
4634 FOR_EACH_VEC_ELT (lhsc
, j
, lhsp
)
4635 process_constraint (new_constraint (*lhsp
, rhsc
));
4639 for (i
= 0; i
< gimple_asm_ninputs (t
); ++i
)
4641 tree link
= gimple_asm_input_op (t
, i
);
4642 tree op
= TREE_VALUE (link
);
4644 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4646 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0, oconstraints
,
4647 &allows_mem
, &allows_reg
);
4649 /* A memory constraint makes the address of the operand escape. */
4650 if (!allows_reg
&& allows_mem
)
4651 make_escape_constraint (build_fold_addr_expr (op
));
4652 /* Strictly we'd only need the constraint to ESCAPED if
4653 the asm clobbers memory, otherwise using something
4654 along the lines of per-call clobbers/uses would be enough. */
4656 make_escape_constraint (op
);
4665 /* Create a constraint adding to the clobber set of FI the memory
4666 pointed to by PTR. */
4669 process_ipa_clobber (varinfo_t fi
, tree ptr
)
4671 vec
<ce_s
> ptrc
= vNULL
;
4672 struct constraint_expr
*c
, lhs
;
4674 get_constraint_for_rhs (ptr
, &ptrc
);
4675 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
4676 FOR_EACH_VEC_ELT (ptrc
, i
, c
)
4677 process_constraint (new_constraint (lhs
, *c
));
4681 /* Walk statement T setting up clobber and use constraints according to the
4682 references found in T. This function is a main part of the
4683 IPA constraint builder. */
4686 find_func_clobbers (gimple origt
)
4689 vec
<ce_s
> lhsc
= vNULL
;
4690 vec
<ce_s
> rhsc
= vNULL
;
4693 /* Add constraints for clobbered/used in IPA mode.
4694 We are not interested in what automatic variables are clobbered
4695 or used as we only use the information in the caller to which
4696 they do not escape. */
4697 gcc_assert (in_ipa_mode
);
4699 /* If the stmt refers to memory in any way it better had a VUSE. */
4700 if (gimple_vuse (t
) == NULL_TREE
)
4703 /* We'd better have function information for the current function. */
4704 fi
= lookup_vi_for_tree (cfun
->decl
);
4705 gcc_assert (fi
!= NULL
);
4707 /* Account for stores in assignments and calls. */
4708 if (gimple_vdef (t
) != NULL_TREE
4709 && gimple_has_lhs (t
))
4711 tree lhs
= gimple_get_lhs (t
);
4713 while (handled_component_p (tem
))
4714 tem
= TREE_OPERAND (tem
, 0);
4716 && !auto_var_in_fn_p (tem
, cfun
->decl
))
4717 || INDIRECT_REF_P (tem
)
4718 || (TREE_CODE (tem
) == MEM_REF
4719 && !(TREE_CODE (TREE_OPERAND (tem
, 0)) == ADDR_EXPR
4721 (TREE_OPERAND (TREE_OPERAND (tem
, 0), 0), cfun
->decl
))))
4723 struct constraint_expr lhsc
, *rhsp
;
4725 lhsc
= get_function_part_constraint (fi
, fi_clobbers
);
4726 get_constraint_for_address_of (lhs
, &rhsc
);
4727 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4728 process_constraint (new_constraint (lhsc
, *rhsp
));
4733 /* Account for uses in assigments and returns. */
4734 if (gimple_assign_single_p (t
)
4735 || (gimple_code (t
) == GIMPLE_RETURN
4736 && gimple_return_retval (t
) != NULL_TREE
))
4738 tree rhs
= (gimple_assign_single_p (t
)
4739 ? gimple_assign_rhs1 (t
) : gimple_return_retval (t
));
4741 while (handled_component_p (tem
))
4742 tem
= TREE_OPERAND (tem
, 0);
4744 && !auto_var_in_fn_p (tem
, cfun
->decl
))
4745 || INDIRECT_REF_P (tem
)
4746 || (TREE_CODE (tem
) == MEM_REF
4747 && !(TREE_CODE (TREE_OPERAND (tem
, 0)) == ADDR_EXPR
4749 (TREE_OPERAND (TREE_OPERAND (tem
, 0), 0), cfun
->decl
))))
4751 struct constraint_expr lhs
, *rhsp
;
4753 lhs
= get_function_part_constraint (fi
, fi_uses
);
4754 get_constraint_for_address_of (rhs
, &rhsc
);
4755 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4756 process_constraint (new_constraint (lhs
, *rhsp
));
4761 if (is_gimple_call (t
))
4763 varinfo_t cfi
= NULL
;
4764 tree decl
= gimple_call_fndecl (t
);
4765 struct constraint_expr lhs
, rhs
;
4768 /* For builtins we do not have separate function info. For those
4769 we do not generate escapes for we have to generate clobbers/uses. */
4770 if (gimple_call_builtin_p (t
, BUILT_IN_NORMAL
))
4771 switch (DECL_FUNCTION_CODE (decl
))
4773 /* The following functions use and clobber memory pointed to
4774 by their arguments. */
4775 case BUILT_IN_STRCPY
:
4776 case BUILT_IN_STRNCPY
:
4777 case BUILT_IN_BCOPY
:
4778 case BUILT_IN_MEMCPY
:
4779 case BUILT_IN_MEMMOVE
:
4780 case BUILT_IN_MEMPCPY
:
4781 case BUILT_IN_STPCPY
:
4782 case BUILT_IN_STPNCPY
:
4783 case BUILT_IN_STRCAT
:
4784 case BUILT_IN_STRNCAT
:
4785 case BUILT_IN_STRCPY_CHK
:
4786 case BUILT_IN_STRNCPY_CHK
:
4787 case BUILT_IN_MEMCPY_CHK
:
4788 case BUILT_IN_MEMMOVE_CHK
:
4789 case BUILT_IN_MEMPCPY_CHK
:
4790 case BUILT_IN_STPCPY_CHK
:
4791 case BUILT_IN_STPNCPY_CHK
:
4792 case BUILT_IN_STRCAT_CHK
:
4793 case BUILT_IN_STRNCAT_CHK
:
4795 tree dest
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (decl
)
4796 == BUILT_IN_BCOPY
? 1 : 0));
4797 tree src
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (decl
)
4798 == BUILT_IN_BCOPY
? 0 : 1));
4800 struct constraint_expr
*rhsp
, *lhsp
;
4801 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4802 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
4803 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4804 process_constraint (new_constraint (lhs
, *lhsp
));
4806 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
4807 lhs
= get_function_part_constraint (fi
, fi_uses
);
4808 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4809 process_constraint (new_constraint (lhs
, *rhsp
));
4813 /* The following function clobbers memory pointed to by
4815 case BUILT_IN_MEMSET
:
4816 case BUILT_IN_MEMSET_CHK
:
4818 tree dest
= gimple_call_arg (t
, 0);
4821 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4822 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
4823 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4824 process_constraint (new_constraint (lhs
, *lhsp
));
4828 /* The following functions clobber their second and third
4830 case BUILT_IN_SINCOS
:
4831 case BUILT_IN_SINCOSF
:
4832 case BUILT_IN_SINCOSL
:
4834 process_ipa_clobber (fi
, gimple_call_arg (t
, 1));
4835 process_ipa_clobber (fi
, gimple_call_arg (t
, 2));
4838 /* The following functions clobber their second argument. */
4839 case BUILT_IN_FREXP
:
4840 case BUILT_IN_FREXPF
:
4841 case BUILT_IN_FREXPL
:
4842 case BUILT_IN_LGAMMA_R
:
4843 case BUILT_IN_LGAMMAF_R
:
4844 case BUILT_IN_LGAMMAL_R
:
4845 case BUILT_IN_GAMMA_R
:
4846 case BUILT_IN_GAMMAF_R
:
4847 case BUILT_IN_GAMMAL_R
:
4849 case BUILT_IN_MODFF
:
4850 case BUILT_IN_MODFL
:
4852 process_ipa_clobber (fi
, gimple_call_arg (t
, 1));
4855 /* The following functions clobber their third argument. */
4856 case BUILT_IN_REMQUO
:
4857 case BUILT_IN_REMQUOF
:
4858 case BUILT_IN_REMQUOL
:
4860 process_ipa_clobber (fi
, gimple_call_arg (t
, 2));
4863 /* The following functions neither read nor clobber memory. */
4864 case BUILT_IN_ASSUME_ALIGNED
:
4867 /* Trampolines are of no interest to us. */
4868 case BUILT_IN_INIT_TRAMPOLINE
:
4869 case BUILT_IN_ADJUST_TRAMPOLINE
:
4871 case BUILT_IN_VA_START
:
4872 case BUILT_IN_VA_END
:
4874 /* printf-style functions may have hooks to set pointers to
4875 point to somewhere into the generated string. Leave them
4876 for a later excercise... */
4878 /* Fallthru to general call handling. */;
4881 /* Parameters passed by value are used. */
4882 lhs
= get_function_part_constraint (fi
, fi_uses
);
4883 for (i
= 0; i
< gimple_call_num_args (t
); i
++)
4885 struct constraint_expr
*rhsp
;
4886 tree arg
= gimple_call_arg (t
, i
);
4888 if (TREE_CODE (arg
) == SSA_NAME
4889 || is_gimple_min_invariant (arg
))
4892 get_constraint_for_address_of (arg
, &rhsc
);
4893 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
4894 process_constraint (new_constraint (lhs
, *rhsp
));
4898 /* Build constraints for propagating clobbers/uses along the
4900 cfi
= get_fi_for_callee (t
);
4901 if (cfi
->id
== anything_id
)
4903 if (gimple_vdef (t
))
4904 make_constraint_from (first_vi_for_offset (fi
, fi_clobbers
),
4906 make_constraint_from (first_vi_for_offset (fi
, fi_uses
),
4911 /* For callees without function info (that's external functions),
4912 ESCAPED is clobbered and used. */
4913 if (gimple_call_fndecl (t
)
4914 && !cfi
->is_fn_info
)
4918 if (gimple_vdef (t
))
4919 make_copy_constraint (first_vi_for_offset (fi
, fi_clobbers
),
4921 make_copy_constraint (first_vi_for_offset (fi
, fi_uses
), escaped_id
);
4923 /* Also honor the call statement use/clobber info. */
4924 if ((vi
= lookup_call_clobber_vi (t
)) != NULL
)
4925 make_copy_constraint (first_vi_for_offset (fi
, fi_clobbers
),
4927 if ((vi
= lookup_call_use_vi (t
)) != NULL
)
4928 make_copy_constraint (first_vi_for_offset (fi
, fi_uses
),
4933 /* Otherwise the caller clobbers and uses what the callee does.
4934 ??? This should use a new complex constraint that filters
4935 local variables of the callee. */
4936 if (gimple_vdef (t
))
4938 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
4939 rhs
= get_function_part_constraint (cfi
, fi_clobbers
);
4940 process_constraint (new_constraint (lhs
, rhs
));
4942 lhs
= get_function_part_constraint (fi
, fi_uses
);
4943 rhs
= get_function_part_constraint (cfi
, fi_uses
);
4944 process_constraint (new_constraint (lhs
, rhs
));
4946 else if (gimple_code (t
) == GIMPLE_ASM
)
4948 /* ??? Ick. We can do better. */
4949 if (gimple_vdef (t
))
4950 make_constraint_from (first_vi_for_offset (fi
, fi_clobbers
),
4952 make_constraint_from (first_vi_for_offset (fi
, fi_uses
),
4960 /* Find the first varinfo in the same variable as START that overlaps with
4961 OFFSET. Return NULL if we can't find one. */
4964 first_vi_for_offset (varinfo_t start
, unsigned HOST_WIDE_INT offset
)
4966 /* If the offset is outside of the variable, bail out. */
4967 if (offset
>= start
->fullsize
)
4970 /* If we cannot reach offset from start, lookup the first field
4971 and start from there. */
4972 if (start
->offset
> offset
)
4973 start
= lookup_vi_for_tree (start
->decl
);
4977 /* We may not find a variable in the field list with the actual
4978 offset when when we have glommed a structure to a variable.
4979 In that case, however, offset should still be within the size
4981 if (offset
>= start
->offset
4982 && (offset
- start
->offset
) < start
->size
)
4991 /* Find the first varinfo in the same variable as START that overlaps with
4992 OFFSET. If there is no such varinfo the varinfo directly preceding
4993 OFFSET is returned. */
4996 first_or_preceding_vi_for_offset (varinfo_t start
,
4997 unsigned HOST_WIDE_INT offset
)
4999 /* If we cannot reach offset from start, lookup the first field
5000 and start from there. */
5001 if (start
->offset
> offset
)
5002 start
= lookup_vi_for_tree (start
->decl
);
5004 /* We may not find a variable in the field list with the actual
5005 offset when when we have glommed a structure to a variable.
5006 In that case, however, offset should still be within the size
5008 If we got beyond the offset we look for return the field
5009 directly preceding offset which may be the last field. */
5011 && offset
>= start
->offset
5012 && !((offset
- start
->offset
) < start
->size
))
5013 start
= start
->next
;
5019 /* This structure is used during pushing fields onto the fieldstack
5020 to track the offset of the field, since bitpos_of_field gives it
5021 relative to its immediate containing type, and we want it relative
5022 to the ultimate containing object. */
5026 /* Offset from the base of the base containing object to this field. */
5027 HOST_WIDE_INT offset
;
5029 /* Size, in bits, of the field. */
5030 unsigned HOST_WIDE_INT size
;
5032 unsigned has_unknown_size
: 1;
5034 unsigned must_have_pointers
: 1;
5036 unsigned may_have_pointers
: 1;
5038 unsigned only_restrict_pointers
: 1;
5040 typedef struct fieldoff fieldoff_s
;
5043 /* qsort comparison function for two fieldoff's PA and PB */
5046 fieldoff_compare (const void *pa
, const void *pb
)
5048 const fieldoff_s
*foa
= (const fieldoff_s
*)pa
;
5049 const fieldoff_s
*fob
= (const fieldoff_s
*)pb
;
5050 unsigned HOST_WIDE_INT foasize
, fobsize
;
5052 if (foa
->offset
< fob
->offset
)
5054 else if (foa
->offset
> fob
->offset
)
5057 foasize
= foa
->size
;
5058 fobsize
= fob
->size
;
5059 if (foasize
< fobsize
)
5061 else if (foasize
> fobsize
)
5066 /* Sort a fieldstack according to the field offset and sizes. */
5068 sort_fieldstack (vec
<fieldoff_s
> fieldstack
)
5070 fieldstack
.qsort (fieldoff_compare
);
5073 /* Return true if T is a type that can have subvars. */
5076 type_can_have_subvars (const_tree t
)
5078 /* Aggregates without overlapping fields can have subvars. */
5079 return TREE_CODE (t
) == RECORD_TYPE
;
5082 /* Return true if V is a tree that we can have subvars for.
5083 Normally, this is any aggregate type. Also complex
5084 types which are not gimple registers can have subvars. */
5087 var_can_have_subvars (const_tree v
)
5089 /* Volatile variables should never have subvars. */
5090 if (TREE_THIS_VOLATILE (v
))
5093 /* Non decls or memory tags can never have subvars. */
5097 return type_can_have_subvars (TREE_TYPE (v
));
5100 /* Return true if T is a type that does contain pointers. */
5103 type_must_have_pointers (tree type
)
5105 if (POINTER_TYPE_P (type
))
5108 if (TREE_CODE (type
) == ARRAY_TYPE
)
5109 return type_must_have_pointers (TREE_TYPE (type
));
5111 /* A function or method can have pointers as arguments, so track
5112 those separately. */
5113 if (TREE_CODE (type
) == FUNCTION_TYPE
5114 || TREE_CODE (type
) == METHOD_TYPE
)
5121 field_must_have_pointers (tree t
)
5123 return type_must_have_pointers (TREE_TYPE (t
));
5126 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5127 the fields of TYPE onto fieldstack, recording their offsets along
5130 OFFSET is used to keep track of the offset in this entire
5131 structure, rather than just the immediately containing structure.
5132 Returns false if the caller is supposed to handle the field we
5136 push_fields_onto_fieldstack (tree type
, vec
<fieldoff_s
> *fieldstack
,
5137 HOST_WIDE_INT offset
)
5140 bool empty_p
= true;
5142 if (TREE_CODE (type
) != RECORD_TYPE
)
5145 /* If the vector of fields is growing too big, bail out early.
5146 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5148 if (fieldstack
->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE
)
5151 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
5152 if (TREE_CODE (field
) == FIELD_DECL
)
5155 HOST_WIDE_INT foff
= bitpos_of_field (field
);
5157 if (!var_can_have_subvars (field
)
5158 || TREE_CODE (TREE_TYPE (field
)) == QUAL_UNION_TYPE
5159 || TREE_CODE (TREE_TYPE (field
)) == UNION_TYPE
)
5161 else if (!push_fields_onto_fieldstack
5162 (TREE_TYPE (field
), fieldstack
, offset
+ foff
)
5163 && (DECL_SIZE (field
)
5164 && !integer_zerop (DECL_SIZE (field
))))
5165 /* Empty structures may have actual size, like in C++. So
5166 see if we didn't push any subfields and the size is
5167 nonzero, push the field onto the stack. */
5172 fieldoff_s
*pair
= NULL
;
5173 bool has_unknown_size
= false;
5174 bool must_have_pointers_p
;
5176 if (!fieldstack
->is_empty ())
5177 pair
= &fieldstack
->last ();
5179 /* If there isn't anything at offset zero, create sth. */
5181 && offset
+ foff
!= 0)
5183 fieldoff_s e
= {0, offset
+ foff
, false, false, false, false};
5184 pair
= fieldstack
->safe_push (e
);
5187 if (!DECL_SIZE (field
)
5188 || !host_integerp (DECL_SIZE (field
), 1))
5189 has_unknown_size
= true;
5191 /* If adjacent fields do not contain pointers merge them. */
5192 must_have_pointers_p
= field_must_have_pointers (field
);
5194 && !has_unknown_size
5195 && !must_have_pointers_p
5196 && !pair
->must_have_pointers
5197 && !pair
->has_unknown_size
5198 && pair
->offset
+ (HOST_WIDE_INT
)pair
->size
== offset
+ foff
)
5200 pair
->size
+= TREE_INT_CST_LOW (DECL_SIZE (field
));
5205 e
.offset
= offset
+ foff
;
5206 e
.has_unknown_size
= has_unknown_size
;
5207 if (!has_unknown_size
)
5208 e
.size
= TREE_INT_CST_LOW (DECL_SIZE (field
));
5211 e
.must_have_pointers
= must_have_pointers_p
;
5212 e
.may_have_pointers
= true;
5213 e
.only_restrict_pointers
5214 = (!has_unknown_size
5215 && POINTER_TYPE_P (TREE_TYPE (field
))
5216 && TYPE_RESTRICT (TREE_TYPE (field
)));
5217 fieldstack
->safe_push (e
);
5227 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5228 if it is a varargs function. */
5231 count_num_arguments (tree decl
, bool *is_varargs
)
5233 unsigned int num
= 0;
5236 /* Capture named arguments for K&R functions. They do not
5237 have a prototype and thus no TYPE_ARG_TYPES. */
5238 for (t
= DECL_ARGUMENTS (decl
); t
; t
= DECL_CHAIN (t
))
5241 /* Check if the function has variadic arguments. */
5242 for (t
= TYPE_ARG_TYPES (TREE_TYPE (decl
)); t
; t
= TREE_CHAIN (t
))
5243 if (TREE_VALUE (t
) == void_type_node
)
5251 /* Creation function node for DECL, using NAME, and return the index
5252 of the variable we've created for the function. */
5255 create_function_info_for (tree decl
, const char *name
)
5257 struct function
*fn
= DECL_STRUCT_FUNCTION (decl
);
5258 varinfo_t vi
, prev_vi
;
5261 bool is_varargs
= false;
5262 unsigned int num_args
= count_num_arguments (decl
, &is_varargs
);
5264 /* Create the variable info. */
5266 vi
= new_var_info (decl
, name
);
5269 vi
->fullsize
= fi_parm_base
+ num_args
;
5271 vi
->may_have_pointers
= false;
5274 insert_vi_for_tree (vi
->decl
, vi
);
5278 /* Create a variable for things the function clobbers and one for
5279 things the function uses. */
5281 varinfo_t clobbervi
, usevi
;
5282 const char *newname
;
5285 asprintf (&tempname
, "%s.clobber", name
);
5286 newname
= ggc_strdup (tempname
);
5289 clobbervi
= new_var_info (NULL
, newname
);
5290 clobbervi
->offset
= fi_clobbers
;
5291 clobbervi
->size
= 1;
5292 clobbervi
->fullsize
= vi
->fullsize
;
5293 clobbervi
->is_full_var
= true;
5294 clobbervi
->is_global_var
= false;
5295 gcc_assert (prev_vi
->offset
< clobbervi
->offset
);
5296 prev_vi
->next
= clobbervi
;
5297 prev_vi
= clobbervi
;
5299 asprintf (&tempname
, "%s.use", name
);
5300 newname
= ggc_strdup (tempname
);
5303 usevi
= new_var_info (NULL
, newname
);
5304 usevi
->offset
= fi_uses
;
5306 usevi
->fullsize
= vi
->fullsize
;
5307 usevi
->is_full_var
= true;
5308 usevi
->is_global_var
= false;
5309 gcc_assert (prev_vi
->offset
< usevi
->offset
);
5310 prev_vi
->next
= usevi
;
5314 /* And one for the static chain. */
5315 if (fn
->static_chain_decl
!= NULL_TREE
)
5318 const char *newname
;
5321 asprintf (&tempname
, "%s.chain", name
);
5322 newname
= ggc_strdup (tempname
);
5325 chainvi
= new_var_info (fn
->static_chain_decl
, newname
);
5326 chainvi
->offset
= fi_static_chain
;
5328 chainvi
->fullsize
= vi
->fullsize
;
5329 chainvi
->is_full_var
= true;
5330 chainvi
->is_global_var
= false;
5331 gcc_assert (prev_vi
->offset
< chainvi
->offset
);
5332 prev_vi
->next
= chainvi
;
5334 insert_vi_for_tree (fn
->static_chain_decl
, chainvi
);
5337 /* Create a variable for the return var. */
5338 if (DECL_RESULT (decl
) != NULL
5339 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
5342 const char *newname
;
5344 tree resultdecl
= decl
;
5346 if (DECL_RESULT (decl
))
5347 resultdecl
= DECL_RESULT (decl
);
5349 asprintf (&tempname
, "%s.result", name
);
5350 newname
= ggc_strdup (tempname
);
5353 resultvi
= new_var_info (resultdecl
, newname
);
5354 resultvi
->offset
= fi_result
;
5356 resultvi
->fullsize
= vi
->fullsize
;
5357 resultvi
->is_full_var
= true;
5358 if (DECL_RESULT (decl
))
5359 resultvi
->may_have_pointers
= true;
5360 gcc_assert (prev_vi
->offset
< resultvi
->offset
);
5361 prev_vi
->next
= resultvi
;
5363 if (DECL_RESULT (decl
))
5364 insert_vi_for_tree (DECL_RESULT (decl
), resultvi
);
5367 /* Set up variables for each argument. */
5368 arg
= DECL_ARGUMENTS (decl
);
5369 for (i
= 0; i
< num_args
; i
++)
5372 const char *newname
;
5374 tree argdecl
= decl
;
5379 asprintf (&tempname
, "%s.arg%d", name
, i
);
5380 newname
= ggc_strdup (tempname
);
5383 argvi
= new_var_info (argdecl
, newname
);
5384 argvi
->offset
= fi_parm_base
+ i
;
5386 argvi
->is_full_var
= true;
5387 argvi
->fullsize
= vi
->fullsize
;
5389 argvi
->may_have_pointers
= true;
5390 gcc_assert (prev_vi
->offset
< argvi
->offset
);
5391 prev_vi
->next
= argvi
;
5395 insert_vi_for_tree (arg
, argvi
);
5396 arg
= DECL_CHAIN (arg
);
5400 /* Add one representative for all further args. */
5404 const char *newname
;
5408 asprintf (&tempname
, "%s.varargs", name
);
5409 newname
= ggc_strdup (tempname
);
5412 /* We need sth that can be pointed to for va_start. */
5413 decl
= build_fake_var_decl (ptr_type_node
);
5415 argvi
= new_var_info (decl
, newname
);
5416 argvi
->offset
= fi_parm_base
+ num_args
;
5418 argvi
->is_full_var
= true;
5419 argvi
->is_heap_var
= true;
5420 argvi
->fullsize
= vi
->fullsize
;
5421 gcc_assert (prev_vi
->offset
< argvi
->offset
);
5422 prev_vi
->next
= argvi
;
5430 /* Return true if FIELDSTACK contains fields that overlap.
5431 FIELDSTACK is assumed to be sorted by offset. */
5434 check_for_overlaps (vec
<fieldoff_s
> fieldstack
)
5436 fieldoff_s
*fo
= NULL
;
5438 HOST_WIDE_INT lastoffset
= -1;
5440 FOR_EACH_VEC_ELT (fieldstack
, i
, fo
)
5442 if (fo
->offset
== lastoffset
)
5444 lastoffset
= fo
->offset
;
5449 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5450 This will also create any varinfo structures necessary for fields
5454 create_variable_info_for_1 (tree decl
, const char *name
)
5456 varinfo_t vi
, newvi
;
5457 tree decl_type
= TREE_TYPE (decl
);
5458 tree declsize
= DECL_P (decl
) ? DECL_SIZE (decl
) : TYPE_SIZE (decl_type
);
5459 vec
<fieldoff_s
> fieldstack
= vNULL
;
5464 || !host_integerp (declsize
, 1))
5466 vi
= new_var_info (decl
, name
);
5470 vi
->is_unknown_size_var
= true;
5471 vi
->is_full_var
= true;
5472 vi
->may_have_pointers
= true;
5476 /* Collect field information. */
5477 if (use_field_sensitive
5478 && var_can_have_subvars (decl
)
5479 /* ??? Force us to not use subfields for global initializers
5480 in IPA mode. Else we'd have to parse arbitrary initializers. */
5482 && is_global_var (decl
)
5483 && DECL_INITIAL (decl
)))
5485 fieldoff_s
*fo
= NULL
;
5486 bool notokay
= false;
5489 push_fields_onto_fieldstack (decl_type
, &fieldstack
, 0);
5491 for (i
= 0; !notokay
&& fieldstack
.iterate (i
, &fo
); i
++)
5492 if (fo
->has_unknown_size
5499 /* We can't sort them if we have a field with a variable sized type,
5500 which will make notokay = true. In that case, we are going to return
5501 without creating varinfos for the fields anyway, so sorting them is a
5505 sort_fieldstack (fieldstack
);
5506 /* Due to some C++ FE issues, like PR 22488, we might end up
5507 what appear to be overlapping fields even though they,
5508 in reality, do not overlap. Until the C++ FE is fixed,
5509 we will simply disable field-sensitivity for these cases. */
5510 notokay
= check_for_overlaps (fieldstack
);
5514 fieldstack
.release ();
5517 /* If we didn't end up collecting sub-variables create a full
5518 variable for the decl. */
5519 if (fieldstack
.length () <= 1
5520 || fieldstack
.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE
)
5522 vi
= new_var_info (decl
, name
);
5524 vi
->may_have_pointers
= true;
5525 vi
->fullsize
= TREE_INT_CST_LOW (declsize
);
5526 vi
->size
= vi
->fullsize
;
5527 vi
->is_full_var
= true;
5528 fieldstack
.release ();
5532 vi
= new_var_info (decl
, name
);
5533 vi
->fullsize
= TREE_INT_CST_LOW (declsize
);
5534 for (i
= 0, newvi
= vi
;
5535 fieldstack
.iterate (i
, &fo
);
5536 ++i
, newvi
= newvi
->next
)
5538 const char *newname
= "NULL";
5543 asprintf (&tempname
, "%s." HOST_WIDE_INT_PRINT_DEC
5544 "+" HOST_WIDE_INT_PRINT_DEC
, name
, fo
->offset
, fo
->size
);
5545 newname
= ggc_strdup (tempname
);
5548 newvi
->name
= newname
;
5549 newvi
->offset
= fo
->offset
;
5550 newvi
->size
= fo
->size
;
5551 newvi
->fullsize
= vi
->fullsize
;
5552 newvi
->may_have_pointers
= fo
->may_have_pointers
;
5553 newvi
->only_restrict_pointers
= fo
->only_restrict_pointers
;
5554 if (i
+ 1 < fieldstack
.length ())
5555 newvi
->next
= new_var_info (decl
, name
);
5558 fieldstack
.release ();
5564 create_variable_info_for (tree decl
, const char *name
)
5566 varinfo_t vi
= create_variable_info_for_1 (decl
, name
);
5567 unsigned int id
= vi
->id
;
5569 insert_vi_for_tree (decl
, vi
);
5571 if (TREE_CODE (decl
) != VAR_DECL
)
5574 /* Create initial constraints for globals. */
5575 for (; vi
; vi
= vi
->next
)
5577 if (!vi
->may_have_pointers
5578 || !vi
->is_global_var
)
5581 /* Mark global restrict qualified pointers. */
5582 if ((POINTER_TYPE_P (TREE_TYPE (decl
))
5583 && TYPE_RESTRICT (TREE_TYPE (decl
)))
5584 || vi
->only_restrict_pointers
)
5586 make_constraint_from_global_restrict (vi
, "GLOBAL_RESTRICT");
5590 /* In non-IPA mode the initializer from nonlocal is all we need. */
5592 || DECL_HARD_REGISTER (decl
))
5593 make_copy_constraint (vi
, nonlocal_id
);
5595 /* In IPA mode parse the initializer and generate proper constraints
5599 struct varpool_node
*vnode
= varpool_get_node (decl
);
5601 /* For escaped variables initialize them from nonlocal. */
5602 if (!varpool_all_refs_explicit_p (vnode
))
5603 make_copy_constraint (vi
, nonlocal_id
);
5605 /* If this is a global variable with an initializer and we are in
5606 IPA mode generate constraints for it. */
5607 if (DECL_INITIAL (decl
)
5610 vec
<ce_s
> rhsc
= vNULL
;
5611 struct constraint_expr lhs
, *rhsp
;
5613 get_constraint_for_rhs (DECL_INITIAL (decl
), &rhsc
);
5617 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
5618 process_constraint (new_constraint (lhs
, *rhsp
));
5619 /* If this is a variable that escapes from the unit
5620 the initializer escapes as well. */
5621 if (!varpool_all_refs_explicit_p (vnode
))
5623 lhs
.var
= escaped_id
;
5626 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
5627 process_constraint (new_constraint (lhs
, *rhsp
));
5637 /* Print out the points-to solution for VAR to FILE. */
5640 dump_solution_for_var (FILE *file
, unsigned int var
)
5642 varinfo_t vi
= get_varinfo (var
);
5646 /* Dump the solution for unified vars anyway, this avoids difficulties
5647 in scanning dumps in the testsuite. */
5648 fprintf (file
, "%s = { ", vi
->name
);
5649 vi
= get_varinfo (find (var
));
5650 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
5651 fprintf (file
, "%s ", get_varinfo (i
)->name
);
5652 fprintf (file
, "}");
5654 /* But note when the variable was unified. */
5656 fprintf (file
, " same as %s", vi
->name
);
5658 fprintf (file
, "\n");
5661 /* Print the points-to solution for VAR to stdout. */
5664 debug_solution_for_var (unsigned int var
)
5666 dump_solution_for_var (stdout
, var
);
5669 /* Create varinfo structures for all of the variables in the
5670 function for intraprocedural mode. */
5673 intra_create_variable_infos (void)
5677 /* For each incoming pointer argument arg, create the constraint ARG
5678 = NONLOCAL or a dummy variable if it is a restrict qualified
5679 passed-by-reference argument. */
5680 for (t
= DECL_ARGUMENTS (current_function_decl
); t
; t
= DECL_CHAIN (t
))
5682 varinfo_t p
= get_vi_for_tree (t
);
5684 /* For restrict qualified pointers to objects passed by
5685 reference build a real representative for the pointed-to object.
5686 Treat restrict qualified references the same. */
5687 if (TYPE_RESTRICT (TREE_TYPE (t
))
5688 && ((DECL_BY_REFERENCE (t
) && POINTER_TYPE_P (TREE_TYPE (t
)))
5689 || TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
)
5690 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t
))))
5692 struct constraint_expr lhsc
, rhsc
;
5694 tree heapvar
= build_fake_var_decl (TREE_TYPE (TREE_TYPE (t
)));
5695 DECL_EXTERNAL (heapvar
) = 1;
5696 vi
= create_variable_info_for_1 (heapvar
, "PARM_NOALIAS");
5697 insert_vi_for_tree (heapvar
, vi
);
5702 rhsc
.type
= ADDRESSOF
;
5704 process_constraint (new_constraint (lhsc
, rhsc
));
5705 for (; vi
; vi
= vi
->next
)
5706 if (vi
->may_have_pointers
)
5708 if (vi
->only_restrict_pointers
)
5709 make_constraint_from_global_restrict (vi
, "GLOBAL_RESTRICT");
5711 make_copy_constraint (vi
, nonlocal_id
);
5716 if (POINTER_TYPE_P (TREE_TYPE (t
))
5717 && TYPE_RESTRICT (TREE_TYPE (t
)))
5718 make_constraint_from_global_restrict (p
, "PARM_RESTRICT");
5721 for (; p
; p
= p
->next
)
5723 if (p
->only_restrict_pointers
)
5724 make_constraint_from_global_restrict (p
, "PARM_RESTRICT");
5725 else if (p
->may_have_pointers
)
5726 make_constraint_from (p
, nonlocal_id
);
5731 /* Add a constraint for a result decl that is passed by reference. */
5732 if (DECL_RESULT (cfun
->decl
)
5733 && DECL_BY_REFERENCE (DECL_RESULT (cfun
->decl
)))
5735 varinfo_t p
, result_vi
= get_vi_for_tree (DECL_RESULT (cfun
->decl
));
5737 for (p
= result_vi
; p
; p
= p
->next
)
5738 make_constraint_from (p
, nonlocal_id
);
5741 /* Add a constraint for the incoming static chain parameter. */
5742 if (cfun
->static_chain_decl
!= NULL_TREE
)
5744 varinfo_t p
, chain_vi
= get_vi_for_tree (cfun
->static_chain_decl
);
5746 for (p
= chain_vi
; p
; p
= p
->next
)
5747 make_constraint_from (p
, nonlocal_id
);
5751 /* Structure used to put solution bitmaps in a hashtable so they can
5752 be shared among variables with the same points-to set. */
5754 typedef struct shared_bitmap_info
5758 } *shared_bitmap_info_t
;
5759 typedef const struct shared_bitmap_info
*const_shared_bitmap_info_t
;
5761 static htab_t shared_bitmap_table
;
5763 /* Hash function for a shared_bitmap_info_t */
5766 shared_bitmap_hash (const void *p
)
5768 const_shared_bitmap_info_t
const bi
= (const_shared_bitmap_info_t
) p
;
5769 return bi
->hashcode
;
5772 /* Equality function for two shared_bitmap_info_t's. */
5775 shared_bitmap_eq (const void *p1
, const void *p2
)
5777 const_shared_bitmap_info_t
const sbi1
= (const_shared_bitmap_info_t
) p1
;
5778 const_shared_bitmap_info_t
const sbi2
= (const_shared_bitmap_info_t
) p2
;
5779 return bitmap_equal_p (sbi1
->pt_vars
, sbi2
->pt_vars
);
5782 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5783 existing instance if there is one, NULL otherwise. */
5786 shared_bitmap_lookup (bitmap pt_vars
)
5789 struct shared_bitmap_info sbi
;
5791 sbi
.pt_vars
= pt_vars
;
5792 sbi
.hashcode
= bitmap_hash (pt_vars
);
5794 slot
= htab_find_slot_with_hash (shared_bitmap_table
, &sbi
,
5795 sbi
.hashcode
, NO_INSERT
);
5799 return ((shared_bitmap_info_t
) *slot
)->pt_vars
;
5803 /* Add a bitmap to the shared bitmap hashtable. */
5806 shared_bitmap_add (bitmap pt_vars
)
5809 shared_bitmap_info_t sbi
= XNEW (struct shared_bitmap_info
);
5811 sbi
->pt_vars
= pt_vars
;
5812 sbi
->hashcode
= bitmap_hash (pt_vars
);
5814 slot
= htab_find_slot_with_hash (shared_bitmap_table
, sbi
,
5815 sbi
->hashcode
, INSERT
);
5816 gcc_assert (!*slot
);
5817 *slot
= (void *) sbi
;
5821 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5824 set_uids_in_ptset (bitmap into
, bitmap from
, struct pt_solution
*pt
)
5829 EXECUTE_IF_SET_IN_BITMAP (from
, 0, i
, bi
)
5831 varinfo_t vi
= get_varinfo (i
);
5833 /* The only artificial variables that are allowed in a may-alias
5834 set are heap variables. */
5835 if (vi
->is_artificial_var
&& !vi
->is_heap_var
)
5838 if (TREE_CODE (vi
->decl
) == VAR_DECL
5839 || TREE_CODE (vi
->decl
) == PARM_DECL
5840 || TREE_CODE (vi
->decl
) == RESULT_DECL
)
5842 /* If we are in IPA mode we will not recompute points-to
5843 sets after inlining so make sure they stay valid. */
5845 && !DECL_PT_UID_SET_P (vi
->decl
))
5846 SET_DECL_PT_UID (vi
->decl
, DECL_UID (vi
->decl
));
5848 /* Add the decl to the points-to set. Note that the points-to
5849 set contains global variables. */
5850 bitmap_set_bit (into
, DECL_PT_UID (vi
->decl
));
5851 if (vi
->is_global_var
)
5852 pt
->vars_contains_global
= true;
5858 /* Compute the points-to solution *PT for the variable VI. */
5861 find_what_var_points_to (varinfo_t orig_vi
, struct pt_solution
*pt
)
5865 bitmap finished_solution
;
5869 memset (pt
, 0, sizeof (struct pt_solution
));
5871 /* This variable may have been collapsed, let's get the real
5873 vi
= get_varinfo (find (orig_vi
->id
));
5875 /* Translate artificial variables into SSA_NAME_PTR_INFO
5877 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
5879 varinfo_t vi
= get_varinfo (i
);
5881 if (vi
->is_artificial_var
)
5883 if (vi
->id
== nothing_id
)
5885 else if (vi
->id
== escaped_id
)
5888 pt
->ipa_escaped
= 1;
5892 else if (vi
->id
== nonlocal_id
)
5894 else if (vi
->is_heap_var
)
5895 /* We represent heapvars in the points-to set properly. */
5897 else if (vi
->id
== readonly_id
)
5900 else if (vi
->id
== anything_id
5901 || vi
->id
== integer_id
)
5906 /* Instead of doing extra work, simply do not create
5907 elaborate points-to information for pt_anything pointers. */
5911 /* Share the final set of variables when possible. */
5912 finished_solution
= BITMAP_GGC_ALLOC ();
5913 stats
.points_to_sets_created
++;
5915 set_uids_in_ptset (finished_solution
, vi
->solution
, pt
);
5916 result
= shared_bitmap_lookup (finished_solution
);
5919 shared_bitmap_add (finished_solution
);
5920 pt
->vars
= finished_solution
;
5925 bitmap_clear (finished_solution
);
5929 /* Given a pointer variable P, fill in its points-to set. */
5932 find_what_p_points_to (tree p
)
5934 struct ptr_info_def
*pi
;
5938 /* For parameters, get at the points-to set for the actual parm
5940 if (TREE_CODE (p
) == SSA_NAME
5941 && SSA_NAME_IS_DEFAULT_DEF (p
)
5942 && (TREE_CODE (SSA_NAME_VAR (p
)) == PARM_DECL
5943 || TREE_CODE (SSA_NAME_VAR (p
)) == RESULT_DECL
))
5944 lookup_p
= SSA_NAME_VAR (p
);
5946 vi
= lookup_vi_for_tree (lookup_p
);
5950 pi
= get_ptr_info (p
);
5951 find_what_var_points_to (vi
, &pi
->pt
);
5955 /* Query statistics for points-to solutions. */
5958 unsigned HOST_WIDE_INT pt_solution_includes_may_alias
;
5959 unsigned HOST_WIDE_INT pt_solution_includes_no_alias
;
5960 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias
;
5961 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias
;
5965 dump_pta_stats (FILE *s
)
5967 fprintf (s
, "\nPTA query stats:\n");
5968 fprintf (s
, " pt_solution_includes: "
5969 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
5970 HOST_WIDE_INT_PRINT_DEC
" queries\n",
5971 pta_stats
.pt_solution_includes_no_alias
,
5972 pta_stats
.pt_solution_includes_no_alias
5973 + pta_stats
.pt_solution_includes_may_alias
);
5974 fprintf (s
, " pt_solutions_intersect: "
5975 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
5976 HOST_WIDE_INT_PRINT_DEC
" queries\n",
5977 pta_stats
.pt_solutions_intersect_no_alias
,
5978 pta_stats
.pt_solutions_intersect_no_alias
5979 + pta_stats
.pt_solutions_intersect_may_alias
);
5983 /* Reset the points-to solution *PT to a conservative default
5984 (point to anything). */
5987 pt_solution_reset (struct pt_solution
*pt
)
5989 memset (pt
, 0, sizeof (struct pt_solution
));
5990 pt
->anything
= true;
5993 /* Set the points-to solution *PT to point only to the variables
5994 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
5995 global variables and VARS_CONTAINS_RESTRICT specifies whether
5996 it contains restrict tag variables. */
5999 pt_solution_set (struct pt_solution
*pt
, bitmap vars
, bool vars_contains_global
)
6001 memset (pt
, 0, sizeof (struct pt_solution
));
6003 pt
->vars_contains_global
= vars_contains_global
;
6006 /* Set the points-to solution *PT to point only to the variable VAR. */
6009 pt_solution_set_var (struct pt_solution
*pt
, tree var
)
6011 memset (pt
, 0, sizeof (struct pt_solution
));
6012 pt
->vars
= BITMAP_GGC_ALLOC ();
6013 bitmap_set_bit (pt
->vars
, DECL_PT_UID (var
));
6014 pt
->vars_contains_global
= is_global_var (var
);
6017 /* Computes the union of the points-to solutions *DEST and *SRC and
6018 stores the result in *DEST. This changes the points-to bitmap
6019 of *DEST and thus may not be used if that might be shared.
6020 The points-to bitmap of *SRC and *DEST will not be shared after
6021 this function if they were not before. */
6024 pt_solution_ior_into (struct pt_solution
*dest
, struct pt_solution
*src
)
6026 dest
->anything
|= src
->anything
;
6029 pt_solution_reset (dest
);
6033 dest
->nonlocal
|= src
->nonlocal
;
6034 dest
->escaped
|= src
->escaped
;
6035 dest
->ipa_escaped
|= src
->ipa_escaped
;
6036 dest
->null
|= src
->null
;
6037 dest
->vars_contains_global
|= src
->vars_contains_global
;
6042 dest
->vars
= BITMAP_GGC_ALLOC ();
6043 bitmap_ior_into (dest
->vars
, src
->vars
);
6046 /* Return true if the points-to solution *PT is empty. */
6049 pt_solution_empty_p (struct pt_solution
*pt
)
6056 && !bitmap_empty_p (pt
->vars
))
6059 /* If the solution includes ESCAPED, check if that is empty. */
6061 && !pt_solution_empty_p (&cfun
->gimple_df
->escaped
))
6064 /* If the solution includes ESCAPED, check if that is empty. */
6066 && !pt_solution_empty_p (&ipa_escaped_pt
))
6072 /* Return true if the points-to solution *PT only point to a single var, and
6073 return the var uid in *UID. */
6076 pt_solution_singleton_p (struct pt_solution
*pt
, unsigned *uid
)
6078 if (pt
->anything
|| pt
->nonlocal
|| pt
->escaped
|| pt
->ipa_escaped
6079 || pt
->null
|| pt
->vars
== NULL
6080 || !bitmap_single_bit_set_p (pt
->vars
))
6083 *uid
= bitmap_first_set_bit (pt
->vars
);
6087 /* Return true if the points-to solution *PT includes global memory. */
6090 pt_solution_includes_global (struct pt_solution
*pt
)
6094 || pt
->vars_contains_global
)
6098 return pt_solution_includes_global (&cfun
->gimple_df
->escaped
);
6100 if (pt
->ipa_escaped
)
6101 return pt_solution_includes_global (&ipa_escaped_pt
);
6103 /* ??? This predicate is not correct for the IPA-PTA solution
6104 as we do not properly distinguish between unit escape points
6105 and global variables. */
6106 if (cfun
->gimple_df
->ipa_pta
)
6112 /* Return true if the points-to solution *PT includes the variable
6113 declaration DECL. */
6116 pt_solution_includes_1 (struct pt_solution
*pt
, const_tree decl
)
6122 && is_global_var (decl
))
6126 && bitmap_bit_p (pt
->vars
, DECL_PT_UID (decl
)))
6129 /* If the solution includes ESCAPED, check it. */
6131 && pt_solution_includes_1 (&cfun
->gimple_df
->escaped
, decl
))
6134 /* If the solution includes ESCAPED, check it. */
6136 && pt_solution_includes_1 (&ipa_escaped_pt
, decl
))
6143 pt_solution_includes (struct pt_solution
*pt
, const_tree decl
)
6145 bool res
= pt_solution_includes_1 (pt
, decl
);
6147 ++pta_stats
.pt_solution_includes_may_alias
;
6149 ++pta_stats
.pt_solution_includes_no_alias
;
6153 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6157 pt_solutions_intersect_1 (struct pt_solution
*pt1
, struct pt_solution
*pt2
)
6159 if (pt1
->anything
|| pt2
->anything
)
6162 /* If either points to unknown global memory and the other points to
6163 any global memory they alias. */
6166 || pt2
->vars_contains_global
))
6168 && pt1
->vars_contains_global
))
6171 /* Check the escaped solution if required. */
6172 if ((pt1
->escaped
|| pt2
->escaped
)
6173 && !pt_solution_empty_p (&cfun
->gimple_df
->escaped
))
6175 /* If both point to escaped memory and that solution
6176 is not empty they alias. */
6177 if (pt1
->escaped
&& pt2
->escaped
)
6180 /* If either points to escaped memory see if the escaped solution
6181 intersects with the other. */
6183 && pt_solutions_intersect_1 (&cfun
->gimple_df
->escaped
, pt2
))
6185 && pt_solutions_intersect_1 (&cfun
->gimple_df
->escaped
, pt1
)))
6189 /* Check the escaped solution if required.
6190 ??? Do we need to check the local against the IPA escaped sets? */
6191 if ((pt1
->ipa_escaped
|| pt2
->ipa_escaped
)
6192 && !pt_solution_empty_p (&ipa_escaped_pt
))
6194 /* If both point to escaped memory and that solution
6195 is not empty they alias. */
6196 if (pt1
->ipa_escaped
&& pt2
->ipa_escaped
)
6199 /* If either points to escaped memory see if the escaped solution
6200 intersects with the other. */
6201 if ((pt1
->ipa_escaped
6202 && pt_solutions_intersect_1 (&ipa_escaped_pt
, pt2
))
6203 || (pt2
->ipa_escaped
6204 && pt_solutions_intersect_1 (&ipa_escaped_pt
, pt1
)))
6208 /* Now both pointers alias if their points-to solution intersects. */
6211 && bitmap_intersect_p (pt1
->vars
, pt2
->vars
));
6215 pt_solutions_intersect (struct pt_solution
*pt1
, struct pt_solution
*pt2
)
6217 bool res
= pt_solutions_intersect_1 (pt1
, pt2
);
6219 ++pta_stats
.pt_solutions_intersect_may_alias
;
6221 ++pta_stats
.pt_solutions_intersect_no_alias
;
6226 /* Dump points-to information to OUTFILE. */
6229 dump_sa_points_to_info (FILE *outfile
)
6233 fprintf (outfile
, "\nPoints-to sets\n\n");
6235 if (dump_flags
& TDF_STATS
)
6237 fprintf (outfile
, "Stats:\n");
6238 fprintf (outfile
, "Total vars: %d\n", stats
.total_vars
);
6239 fprintf (outfile
, "Non-pointer vars: %d\n",
6240 stats
.nonpointer_vars
);
6241 fprintf (outfile
, "Statically unified vars: %d\n",
6242 stats
.unified_vars_static
);
6243 fprintf (outfile
, "Dynamically unified vars: %d\n",
6244 stats
.unified_vars_dynamic
);
6245 fprintf (outfile
, "Iterations: %d\n", stats
.iterations
);
6246 fprintf (outfile
, "Number of edges: %d\n", stats
.num_edges
);
6247 fprintf (outfile
, "Number of implicit edges: %d\n",
6248 stats
.num_implicit_edges
);
6251 for (i
= 0; i
< varmap
.length (); i
++)
6253 varinfo_t vi
= get_varinfo (i
);
6254 if (!vi
->may_have_pointers
)
6256 dump_solution_for_var (outfile
, i
);
6261 /* Debug points-to information to stderr. */
6264 debug_sa_points_to_info (void)
6266 dump_sa_points_to_info (stderr
);
6270 /* Initialize the always-existing constraint variables for NULL
6271 ANYTHING, READONLY, and INTEGER */
6274 init_base_vars (void)
6276 struct constraint_expr lhs
, rhs
;
6277 varinfo_t var_anything
;
6278 varinfo_t var_nothing
;
6279 varinfo_t var_readonly
;
6280 varinfo_t var_escaped
;
6281 varinfo_t var_nonlocal
;
6282 varinfo_t var_storedanything
;
6283 varinfo_t var_integer
;
6285 /* Create the NULL variable, used to represent that a variable points
6287 var_nothing
= new_var_info (NULL_TREE
, "NULL");
6288 gcc_assert (var_nothing
->id
== nothing_id
);
6289 var_nothing
->is_artificial_var
= 1;
6290 var_nothing
->offset
= 0;
6291 var_nothing
->size
= ~0;
6292 var_nothing
->fullsize
= ~0;
6293 var_nothing
->is_special_var
= 1;
6294 var_nothing
->may_have_pointers
= 0;
6295 var_nothing
->is_global_var
= 0;
6297 /* Create the ANYTHING variable, used to represent that a variable
6298 points to some unknown piece of memory. */
6299 var_anything
= new_var_info (NULL_TREE
, "ANYTHING");
6300 gcc_assert (var_anything
->id
== anything_id
);
6301 var_anything
->is_artificial_var
= 1;
6302 var_anything
->size
= ~0;
6303 var_anything
->offset
= 0;
6304 var_anything
->next
= NULL
;
6305 var_anything
->fullsize
= ~0;
6306 var_anything
->is_special_var
= 1;
6308 /* Anything points to anything. This makes deref constraints just
6309 work in the presence of linked list and other p = *p type loops,
6310 by saying that *ANYTHING = ANYTHING. */
6312 lhs
.var
= anything_id
;
6314 rhs
.type
= ADDRESSOF
;
6315 rhs
.var
= anything_id
;
6318 /* This specifically does not use process_constraint because
6319 process_constraint ignores all anything = anything constraints, since all
6320 but this one are redundant. */
6321 constraints
.safe_push (new_constraint (lhs
, rhs
));
6323 /* Create the READONLY variable, used to represent that a variable
6324 points to readonly memory. */
6325 var_readonly
= new_var_info (NULL_TREE
, "READONLY");
6326 gcc_assert (var_readonly
->id
== readonly_id
);
6327 var_readonly
->is_artificial_var
= 1;
6328 var_readonly
->offset
= 0;
6329 var_readonly
->size
= ~0;
6330 var_readonly
->fullsize
= ~0;
6331 var_readonly
->next
= NULL
;
6332 var_readonly
->is_special_var
= 1;
6334 /* readonly memory points to anything, in order to make deref
6335 easier. In reality, it points to anything the particular
6336 readonly variable can point to, but we don't track this
6339 lhs
.var
= readonly_id
;
6341 rhs
.type
= ADDRESSOF
;
6342 rhs
.var
= readonly_id
; /* FIXME */
6344 process_constraint (new_constraint (lhs
, rhs
));
6346 /* Create the ESCAPED variable, used to represent the set of escaped
6348 var_escaped
= new_var_info (NULL_TREE
, "ESCAPED");
6349 gcc_assert (var_escaped
->id
== escaped_id
);
6350 var_escaped
->is_artificial_var
= 1;
6351 var_escaped
->offset
= 0;
6352 var_escaped
->size
= ~0;
6353 var_escaped
->fullsize
= ~0;
6354 var_escaped
->is_special_var
= 0;
6356 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6358 var_nonlocal
= new_var_info (NULL_TREE
, "NONLOCAL");
6359 gcc_assert (var_nonlocal
->id
== nonlocal_id
);
6360 var_nonlocal
->is_artificial_var
= 1;
6361 var_nonlocal
->offset
= 0;
6362 var_nonlocal
->size
= ~0;
6363 var_nonlocal
->fullsize
= ~0;
6364 var_nonlocal
->is_special_var
= 1;
6366 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6368 lhs
.var
= escaped_id
;
6371 rhs
.var
= escaped_id
;
6373 process_constraint (new_constraint (lhs
, rhs
));
6375 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6376 whole variable escapes. */
6378 lhs
.var
= escaped_id
;
6381 rhs
.var
= escaped_id
;
6382 rhs
.offset
= UNKNOWN_OFFSET
;
6383 process_constraint (new_constraint (lhs
, rhs
));
6385 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6386 everything pointed to by escaped points to what global memory can
6389 lhs
.var
= escaped_id
;
6392 rhs
.var
= nonlocal_id
;
6394 process_constraint (new_constraint (lhs
, rhs
));
6396 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6397 global memory may point to global memory and escaped memory. */
6399 lhs
.var
= nonlocal_id
;
6401 rhs
.type
= ADDRESSOF
;
6402 rhs
.var
= nonlocal_id
;
6404 process_constraint (new_constraint (lhs
, rhs
));
6405 rhs
.type
= ADDRESSOF
;
6406 rhs
.var
= escaped_id
;
6408 process_constraint (new_constraint (lhs
, rhs
));
6410 /* Create the STOREDANYTHING variable, used to represent the set of
6411 variables stored to *ANYTHING. */
6412 var_storedanything
= new_var_info (NULL_TREE
, "STOREDANYTHING");
6413 gcc_assert (var_storedanything
->id
== storedanything_id
);
6414 var_storedanything
->is_artificial_var
= 1;
6415 var_storedanything
->offset
= 0;
6416 var_storedanything
->size
= ~0;
6417 var_storedanything
->fullsize
= ~0;
6418 var_storedanything
->is_special_var
= 0;
6420 /* Create the INTEGER variable, used to represent that a variable points
6421 to what an INTEGER "points to". */
6422 var_integer
= new_var_info (NULL_TREE
, "INTEGER");
6423 gcc_assert (var_integer
->id
== integer_id
);
6424 var_integer
->is_artificial_var
= 1;
6425 var_integer
->size
= ~0;
6426 var_integer
->fullsize
= ~0;
6427 var_integer
->offset
= 0;
6428 var_integer
->next
= NULL
;
6429 var_integer
->is_special_var
= 1;
6431 /* INTEGER = ANYTHING, because we don't know where a dereference of
6432 a random integer will point to. */
6434 lhs
.var
= integer_id
;
6436 rhs
.type
= ADDRESSOF
;
6437 rhs
.var
= anything_id
;
6439 process_constraint (new_constraint (lhs
, rhs
));
6442 /* Initialize things necessary to perform PTA */
6445 init_alias_vars (void)
6447 use_field_sensitive
= (MAX_FIELDS_FOR_FIELD_SENSITIVE
> 1);
6449 bitmap_obstack_initialize (&pta_obstack
);
6450 bitmap_obstack_initialize (&oldpta_obstack
);
6451 bitmap_obstack_initialize (&predbitmap_obstack
);
6453 constraint_pool
= create_alloc_pool ("Constraint pool",
6454 sizeof (struct constraint
), 30);
6455 variable_info_pool
= create_alloc_pool ("Variable info pool",
6456 sizeof (struct variable_info
), 30);
6457 constraints
.create (8);
6459 vi_for_tree
= pointer_map_create ();
6460 call_stmt_vars
= pointer_map_create ();
6462 memset (&stats
, 0, sizeof (stats
));
6463 shared_bitmap_table
= htab_create (511, shared_bitmap_hash
,
6464 shared_bitmap_eq
, free
);
6467 gcc_obstack_init (&fake_var_decl_obstack
);
6470 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6471 predecessor edges. */
6474 remove_preds_and_fake_succs (constraint_graph_t graph
)
6478 /* Clear the implicit ref and address nodes from the successor
6480 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
6482 if (graph
->succs
[i
])
6483 bitmap_clear_range (graph
->succs
[i
], FIRST_REF_NODE
,
6484 FIRST_REF_NODE
* 2);
6487 /* Free the successor list for the non-ref nodes. */
6488 for (i
= FIRST_REF_NODE
; i
< graph
->size
; i
++)
6490 if (graph
->succs
[i
])
6491 BITMAP_FREE (graph
->succs
[i
]);
6494 /* Now reallocate the size of the successor list as, and blow away
6495 the predecessor bitmaps. */
6496 graph
->size
= varmap
.length ();
6497 graph
->succs
= XRESIZEVEC (bitmap
, graph
->succs
, graph
->size
);
6499 free (graph
->implicit_preds
);
6500 graph
->implicit_preds
= NULL
;
6501 free (graph
->preds
);
6502 graph
->preds
= NULL
;
6503 bitmap_obstack_release (&predbitmap_obstack
);
6506 /* Solve the constraint set. */
6509 solve_constraints (void)
6511 struct scc_info
*si
;
6515 "\nCollapsing static cycles and doing variable "
6518 init_graph (varmap
.length () * 2);
6521 fprintf (dump_file
, "Building predecessor graph\n");
6522 build_pred_graph ();
6525 fprintf (dump_file
, "Detecting pointer and location "
6527 si
= perform_var_substitution (graph
);
6530 fprintf (dump_file
, "Rewriting constraints and unifying "
6532 rewrite_constraints (graph
, si
);
6534 build_succ_graph ();
6536 free_var_substitution_info (si
);
6538 /* Attach complex constraints to graph nodes. */
6539 move_complex_constraints (graph
);
6542 fprintf (dump_file
, "Uniting pointer but not location equivalent "
6544 unite_pointer_equivalences (graph
);
6547 fprintf (dump_file
, "Finding indirect cycles\n");
6548 find_indirect_cycles (graph
);
6550 /* Implicit nodes and predecessors are no longer necessary at this
6552 remove_preds_and_fake_succs (graph
);
6554 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
6556 fprintf (dump_file
, "\n\n// The constraint graph before solve-graph "
6557 "in dot format:\n");
6558 dump_constraint_graph (dump_file
);
6559 fprintf (dump_file
, "\n\n");
6563 fprintf (dump_file
, "Solving graph\n");
6565 solve_graph (graph
);
6567 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
6569 fprintf (dump_file
, "\n\n// The constraint graph after solve-graph "
6570 "in dot format:\n");
6571 dump_constraint_graph (dump_file
);
6572 fprintf (dump_file
, "\n\n");
6576 dump_sa_points_to_info (dump_file
);
6579 /* Create points-to sets for the current function. See the comments
6580 at the start of the file for an algorithmic overview. */
6583 compute_points_to_sets (void)
6589 timevar_push (TV_TREE_PTA
);
6593 intra_create_variable_infos ();
6595 /* Now walk all statements and build the constraint set. */
6598 gimple_stmt_iterator gsi
;
6600 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6602 gimple phi
= gsi_stmt (gsi
);
6604 if (! virtual_operand_p (gimple_phi_result (phi
)))
6605 find_func_aliases (phi
);
6608 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6610 gimple stmt
= gsi_stmt (gsi
);
6612 find_func_aliases (stmt
);
6618 fprintf (dump_file
, "Points-to analysis\n\nConstraints:\n\n");
6619 dump_constraints (dump_file
, 0);
6622 /* From the constraints compute the points-to sets. */
6623 solve_constraints ();
6625 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6626 find_what_var_points_to (get_varinfo (escaped_id
),
6627 &cfun
->gimple_df
->escaped
);
6629 /* Make sure the ESCAPED solution (which is used as placeholder in
6630 other solutions) does not reference itself. This simplifies
6631 points-to solution queries. */
6632 cfun
->gimple_df
->escaped
.escaped
= 0;
6634 /* Mark escaped HEAP variables as global. */
6635 FOR_EACH_VEC_ELT (varmap
, i
, vi
)
6637 && !vi
->is_global_var
)
6638 DECL_EXTERNAL (vi
->decl
) = vi
->is_global_var
6639 = pt_solution_includes (&cfun
->gimple_df
->escaped
, vi
->decl
);
6641 /* Compute the points-to sets for pointer SSA_NAMEs. */
6642 for (i
= 0; i
< num_ssa_names
; ++i
)
6644 tree ptr
= ssa_name (i
);
6646 && POINTER_TYPE_P (TREE_TYPE (ptr
)))
6647 find_what_p_points_to (ptr
);
6650 /* Compute the call-used/clobbered sets. */
6653 gimple_stmt_iterator gsi
;
6655 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6657 gimple stmt
= gsi_stmt (gsi
);
6658 struct pt_solution
*pt
;
6659 if (!is_gimple_call (stmt
))
6662 pt
= gimple_call_use_set (stmt
);
6663 if (gimple_call_flags (stmt
) & ECF_CONST
)
6664 memset (pt
, 0, sizeof (struct pt_solution
));
6665 else if ((vi
= lookup_call_use_vi (stmt
)) != NULL
)
6667 find_what_var_points_to (vi
, pt
);
6668 /* Escaped (and thus nonlocal) variables are always
6669 implicitly used by calls. */
6670 /* ??? ESCAPED can be empty even though NONLOCAL
6677 /* If there is nothing special about this call then
6678 we have made everything that is used also escape. */
6679 *pt
= cfun
->gimple_df
->escaped
;
6683 pt
= gimple_call_clobber_set (stmt
);
6684 if (gimple_call_flags (stmt
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
6685 memset (pt
, 0, sizeof (struct pt_solution
));
6686 else if ((vi
= lookup_call_clobber_vi (stmt
)) != NULL
)
6688 find_what_var_points_to (vi
, pt
);
6689 /* Escaped (and thus nonlocal) variables are always
6690 implicitly clobbered by calls. */
6691 /* ??? ESCAPED can be empty even though NONLOCAL
6698 /* If there is nothing special about this call then
6699 we have made everything that is used also escape. */
6700 *pt
= cfun
->gimple_df
->escaped
;
6706 timevar_pop (TV_TREE_PTA
);
6710 /* Delete created points-to sets. */
6713 delete_points_to_sets (void)
6717 htab_delete (shared_bitmap_table
);
6718 if (dump_file
&& (dump_flags
& TDF_STATS
))
6719 fprintf (dump_file
, "Points to sets created:%d\n",
6720 stats
.points_to_sets_created
);
6722 pointer_map_destroy (vi_for_tree
);
6723 pointer_map_destroy (call_stmt_vars
);
6724 bitmap_obstack_release (&pta_obstack
);
6725 constraints
.release ();
6727 for (i
= 0; i
< graph
->size
; i
++)
6728 graph
->complex[i
].release ();
6729 free (graph
->complex);
6732 free (graph
->succs
);
6734 free (graph
->pe_rep
);
6735 free (graph
->indirect_cycles
);
6739 free_alloc_pool (variable_info_pool
);
6740 free_alloc_pool (constraint_pool
);
6742 obstack_free (&fake_var_decl_obstack
, NULL
);
6746 /* Compute points-to information for every SSA_NAME pointer in the
6747 current function and compute the transitive closure of escaped
6748 variables to re-initialize the call-clobber states of local variables. */
6751 compute_may_aliases (void)
6753 if (cfun
->gimple_df
->ipa_pta
)
6757 fprintf (dump_file
, "\nNot re-computing points-to information "
6758 "because IPA points-to information is available.\n\n");
6760 /* But still dump what we have remaining it. */
6761 dump_alias_info (dump_file
);
6767 /* For each pointer P_i, determine the sets of variables that P_i may
6768 point-to. Compute the reachability set of escaped and call-used
6770 compute_points_to_sets ();
6772 /* Debugging dumps. */
6774 dump_alias_info (dump_file
);
6776 /* Deallocate memory used by aliasing data structures and the internal
6777 points-to solution. */
6778 delete_points_to_sets ();
6780 gcc_assert (!need_ssa_update_p (cfun
));
6786 gate_tree_pta (void)
6788 return flag_tree_pta
;
6791 /* A dummy pass to cause points-to information to be computed via
6792 TODO_rebuild_alias. */
6794 struct gimple_opt_pass pass_build_alias
=
6799 OPTGROUP_NONE
, /* optinfo_flags */
6800 gate_tree_pta
, /* gate */
6804 0, /* static_pass_number */
6805 TV_NONE
, /* tv_id */
6806 PROP_cfg
| PROP_ssa
, /* properties_required */
6807 0, /* properties_provided */
6808 0, /* properties_destroyed */
6809 0, /* todo_flags_start */
6810 TODO_rebuild_alias
/* todo_flags_finish */
6814 /* A dummy pass to cause points-to information to be computed via
6815 TODO_rebuild_alias. */
6817 struct gimple_opt_pass pass_build_ealias
=
6821 "ealias", /* name */
6822 OPTGROUP_NONE
, /* optinfo_flags */
6823 gate_tree_pta
, /* gate */
6827 0, /* static_pass_number */
6828 TV_NONE
, /* tv_id */
6829 PROP_cfg
| PROP_ssa
, /* properties_required */
6830 0, /* properties_provided */
6831 0, /* properties_destroyed */
6832 0, /* todo_flags_start */
6833 TODO_rebuild_alias
/* todo_flags_finish */
6838 /* Return true if we should execute IPA PTA. */
6844 /* Don't bother doing anything if the program has errors. */
6848 /* IPA PTA solutions for ESCAPED. */
6849 struct pt_solution ipa_escaped_pt
6850 = { true, false, false, false, false, false, NULL
};
6852 /* Associate node with varinfo DATA. Worker for
6853 cgraph_for_node_and_aliases. */
6855 associate_varinfo_to_alias (struct cgraph_node
*node
, void *data
)
6857 if (node
->alias
|| node
->thunk
.thunk_p
)
6858 insert_vi_for_tree (node
->symbol
.decl
, (varinfo_t
)data
);
6862 /* Execute the driver for IPA PTA. */
6864 ipa_pta_execute (void)
6866 struct cgraph_node
*node
;
6867 struct varpool_node
*var
;
6874 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6876 dump_symtab (dump_file
);
6877 fprintf (dump_file
, "\n");
6880 /* Build the constraints. */
6881 FOR_EACH_DEFINED_FUNCTION (node
)
6884 /* Nodes without a body are not interesting. Especially do not
6885 visit clones at this point for now - we get duplicate decls
6886 there for inline clones at least. */
6887 if (!cgraph_function_with_gimple_body_p (node
))
6890 gcc_assert (!node
->clone_of
);
6892 vi
= create_function_info_for (node
->symbol
.decl
,
6893 alias_get_name (node
->symbol
.decl
));
6894 cgraph_for_node_and_aliases (node
, associate_varinfo_to_alias
, vi
, true);
6897 /* Create constraints for global variables and their initializers. */
6898 FOR_EACH_VARIABLE (var
)
6903 get_vi_for_tree (var
->symbol
.decl
);
6909 "Generating constraints for global initializers\n\n");
6910 dump_constraints (dump_file
, 0);
6911 fprintf (dump_file
, "\n");
6913 from
= constraints
.length ();
6915 FOR_EACH_DEFINED_FUNCTION (node
)
6917 struct function
*func
;
6920 /* Nodes without a body are not interesting. */
6921 if (!cgraph_function_with_gimple_body_p (node
))
6927 "Generating constraints for %s", cgraph_node_name (node
));
6928 if (DECL_ASSEMBLER_NAME_SET_P (node
->symbol
.decl
))
6929 fprintf (dump_file
, " (%s)",
6931 (DECL_ASSEMBLER_NAME (node
->symbol
.decl
)));
6932 fprintf (dump_file
, "\n");
6935 func
= DECL_STRUCT_FUNCTION (node
->symbol
.decl
);
6938 /* For externally visible or attribute used annotated functions use
6939 local constraints for their arguments.
6940 For local functions we see all callers and thus do not need initial
6941 constraints for parameters. */
6942 if (node
->symbol
.used_from_other_partition
6943 || node
->symbol
.externally_visible
6944 || node
->symbol
.force_output
)
6946 intra_create_variable_infos ();
6948 /* We also need to make function return values escape. Nothing
6949 escapes by returning from main though. */
6950 if (!MAIN_NAME_P (DECL_NAME (node
->symbol
.decl
)))
6953 fi
= lookup_vi_for_tree (node
->symbol
.decl
);
6954 rvi
= first_vi_for_offset (fi
, fi_result
);
6955 if (rvi
&& rvi
->offset
== fi_result
)
6957 struct constraint_expr includes
;
6958 struct constraint_expr var
;
6959 includes
.var
= escaped_id
;
6960 includes
.offset
= 0;
6961 includes
.type
= SCALAR
;
6965 process_constraint (new_constraint (includes
, var
));
6970 /* Build constriants for the function body. */
6971 FOR_EACH_BB_FN (bb
, func
)
6973 gimple_stmt_iterator gsi
;
6975 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
6978 gimple phi
= gsi_stmt (gsi
);
6980 if (! virtual_operand_p (gimple_phi_result (phi
)))
6981 find_func_aliases (phi
);
6984 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6986 gimple stmt
= gsi_stmt (gsi
);
6988 find_func_aliases (stmt
);
6989 find_func_clobbers (stmt
);
6997 fprintf (dump_file
, "\n");
6998 dump_constraints (dump_file
, from
);
6999 fprintf (dump_file
, "\n");
7001 from
= constraints
.length ();
7004 /* From the constraints compute the points-to sets. */
7005 solve_constraints ();
7007 /* Compute the global points-to sets for ESCAPED.
7008 ??? Note that the computed escape set is not correct
7009 for the whole unit as we fail to consider graph edges to
7010 externally visible functions. */
7011 find_what_var_points_to (get_varinfo (escaped_id
), &ipa_escaped_pt
);
7013 /* Make sure the ESCAPED solution (which is used as placeholder in
7014 other solutions) does not reference itself. This simplifies
7015 points-to solution queries. */
7016 ipa_escaped_pt
.ipa_escaped
= 0;
7018 /* Assign the points-to sets to the SSA names in the unit. */
7019 FOR_EACH_DEFINED_FUNCTION (node
)
7022 struct function
*fn
;
7026 struct pt_solution uses
, clobbers
;
7027 struct cgraph_edge
*e
;
7029 /* Nodes without a body are not interesting. */
7030 if (!cgraph_function_with_gimple_body_p (node
))
7033 fn
= DECL_STRUCT_FUNCTION (node
->symbol
.decl
);
7035 /* Compute the points-to sets for pointer SSA_NAMEs. */
7036 FOR_EACH_VEC_ELT (*fn
->gimple_df
->ssa_names
, i
, ptr
)
7039 && POINTER_TYPE_P (TREE_TYPE (ptr
)))
7040 find_what_p_points_to (ptr
);
7043 /* Compute the call-use and call-clobber sets for all direct calls. */
7044 fi
= lookup_vi_for_tree (node
->symbol
.decl
);
7045 gcc_assert (fi
->is_fn_info
);
7046 find_what_var_points_to (first_vi_for_offset (fi
, fi_clobbers
),
7048 find_what_var_points_to (first_vi_for_offset (fi
, fi_uses
), &uses
);
7049 for (e
= node
->callers
; e
; e
= e
->next_caller
)
7054 *gimple_call_clobber_set (e
->call_stmt
) = clobbers
;
7055 *gimple_call_use_set (e
->call_stmt
) = uses
;
7058 /* Compute the call-use and call-clobber sets for indirect calls
7059 and calls to external functions. */
7060 FOR_EACH_BB_FN (bb
, fn
)
7062 gimple_stmt_iterator gsi
;
7064 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
7066 gimple stmt
= gsi_stmt (gsi
);
7067 struct pt_solution
*pt
;
7071 if (!is_gimple_call (stmt
))
7074 /* Handle direct calls to external functions. */
7075 decl
= gimple_call_fndecl (stmt
);
7077 && (!(fi
= lookup_vi_for_tree (decl
))
7078 || !fi
->is_fn_info
))
7080 pt
= gimple_call_use_set (stmt
);
7081 if (gimple_call_flags (stmt
) & ECF_CONST
)
7082 memset (pt
, 0, sizeof (struct pt_solution
));
7083 else if ((vi
= lookup_call_use_vi (stmt
)) != NULL
)
7085 find_what_var_points_to (vi
, pt
);
7086 /* Escaped (and thus nonlocal) variables are always
7087 implicitly used by calls. */
7088 /* ??? ESCAPED can be empty even though NONLOCAL
7091 pt
->ipa_escaped
= 1;
7095 /* If there is nothing special about this call then
7096 we have made everything that is used also escape. */
7097 *pt
= ipa_escaped_pt
;
7101 pt
= gimple_call_clobber_set (stmt
);
7102 if (gimple_call_flags (stmt
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
7103 memset (pt
, 0, sizeof (struct pt_solution
));
7104 else if ((vi
= lookup_call_clobber_vi (stmt
)) != NULL
)
7106 find_what_var_points_to (vi
, pt
);
7107 /* Escaped (and thus nonlocal) variables are always
7108 implicitly clobbered by calls. */
7109 /* ??? ESCAPED can be empty even though NONLOCAL
7112 pt
->ipa_escaped
= 1;
7116 /* If there is nothing special about this call then
7117 we have made everything that is used also escape. */
7118 *pt
= ipa_escaped_pt
;
7123 /* Handle indirect calls. */
7125 && (fi
= get_fi_for_callee (stmt
)))
7127 /* We need to accumulate all clobbers/uses of all possible
7129 fi
= get_varinfo (find (fi
->id
));
7130 /* If we cannot constrain the set of functions we'll end up
7131 calling we end up using/clobbering everything. */
7132 if (bitmap_bit_p (fi
->solution
, anything_id
)
7133 || bitmap_bit_p (fi
->solution
, nonlocal_id
)
7134 || bitmap_bit_p (fi
->solution
, escaped_id
))
7136 pt_solution_reset (gimple_call_clobber_set (stmt
));
7137 pt_solution_reset (gimple_call_use_set (stmt
));
7143 struct pt_solution
*uses
, *clobbers
;
7145 uses
= gimple_call_use_set (stmt
);
7146 clobbers
= gimple_call_clobber_set (stmt
);
7147 memset (uses
, 0, sizeof (struct pt_solution
));
7148 memset (clobbers
, 0, sizeof (struct pt_solution
));
7149 EXECUTE_IF_SET_IN_BITMAP (fi
->solution
, 0, i
, bi
)
7151 struct pt_solution sol
;
7153 vi
= get_varinfo (i
);
7154 if (!vi
->is_fn_info
)
7156 /* ??? We could be more precise here? */
7158 uses
->ipa_escaped
= 1;
7159 clobbers
->nonlocal
= 1;
7160 clobbers
->ipa_escaped
= 1;
7164 if (!uses
->anything
)
7166 find_what_var_points_to
7167 (first_vi_for_offset (vi
, fi_uses
), &sol
);
7168 pt_solution_ior_into (uses
, &sol
);
7170 if (!clobbers
->anything
)
7172 find_what_var_points_to
7173 (first_vi_for_offset (vi
, fi_clobbers
), &sol
);
7174 pt_solution_ior_into (clobbers
, &sol
);
7182 fn
->gimple_df
->ipa_pta
= true;
7185 delete_points_to_sets ();
7192 struct simple_ipa_opt_pass pass_ipa_pta
=
7197 OPTGROUP_NONE
, /* optinfo_flags */
7198 gate_ipa_pta
, /* gate */
7199 ipa_pta_execute
, /* execute */
7202 0, /* static_pass_number */
7203 TV_IPA_PTA
, /* tv_id */
7204 0, /* properties_required */
7205 0, /* properties_provided */
7206 0, /* properties_destroyed */
7207 0, /* todo_flags_start */
7208 TODO_update_ssa
/* todo_flags_finish */