1 /* Tree based points-to analysis
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "basic-block.h"
31 #include "tree-flow.h"
32 #include "tree-inline.h"
33 #include "diagnostic-core.h"
38 #include "tree-pass.h"
39 #include "alloc-pool.h"
40 #include "splay-tree.h"
44 #include "pointer-set.h"
46 /* The idea behind this analyzer is to generate set constraints from the
47 program, then solve the resulting constraints in order to generate the
50 Set constraints are a way of modeling program analysis problems that
51 involve sets. They consist of an inclusion constraint language,
52 describing the variables (each variable is a set) and operations that
53 are involved on the variables, and a set of rules that derive facts
54 from these operations. To solve a system of set constraints, you derive
55 all possible facts under the rules, which gives you the correct sets
58 See "Efficient Field-sensitive pointer analysis for C" by "David
59 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
60 http://citeseer.ist.psu.edu/pearce04efficient.html
62 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
63 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
64 http://citeseer.ist.psu.edu/heintze01ultrafast.html
66 There are three types of real constraint expressions, DEREF,
67 ADDRESSOF, and SCALAR. Each constraint expression consists
68 of a constraint type, a variable, and an offset.
70 SCALAR is a constraint expression type used to represent x, whether
71 it appears on the LHS or the RHS of a statement.
72 DEREF is a constraint expression type used to represent *x, whether
73 it appears on the LHS or the RHS of a statement.
74 ADDRESSOF is a constraint expression used to represent &x, whether
75 it appears on the LHS or the RHS of a statement.
77 Each pointer variable in the program is assigned an integer id, and
78 each field of a structure variable is assigned an integer id as well.
80 Structure variables are linked to their list of fields through a "next
81 field" in each variable that points to the next field in offset
83 Each variable for a structure field has
85 1. "size", that tells the size in bits of that field.
86 2. "fullsize, that tells the size in bits of the entire structure.
87 3. "offset", that tells the offset in bits from the beginning of the
88 structure to this field.
100 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
101 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
102 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
105 In order to solve the system of set constraints, the following is
108 1. Each constraint variable x has a solution set associated with it,
111 2. Constraints are separated into direct, copy, and complex.
112 Direct constraints are ADDRESSOF constraints that require no extra
113 processing, such as P = &Q
114 Copy constraints are those of the form P = Q.
115 Complex constraints are all the constraints involving dereferences
116 and offsets (including offsetted copies).
118 3. All direct constraints of the form P = &Q are processed, such
119 that Q is added to Sol(P)
121 4. All complex constraints for a given constraint variable are stored in a
122 linked list attached to that variable's node.
124 5. A directed graph is built out of the copy constraints. Each
125 constraint variable is a node in the graph, and an edge from
126 Q to P is added for each copy constraint of the form P = Q
128 6. The graph is then walked, and solution sets are
129 propagated along the copy edges, such that an edge from Q to P
130 causes Sol(P) <- Sol(P) union Sol(Q).
132 7. As we visit each node, all complex constraints associated with
133 that node are processed by adding appropriate copy edges to the graph, or the
134 appropriate variables to the solution set.
136 8. The process of walking the graph is iterated until no solution
139 Prior to walking the graph in steps 6 and 7, We perform static
140 cycle elimination on the constraint graph, as well
141 as off-line variable substitution.
143 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
144 on and turned into anything), but isn't. You can just see what offset
145 inside the pointed-to struct it's going to access.
147 TODO: Constant bounded arrays can be handled as if they were structs of the
148 same number of elements.
150 TODO: Modeling heap and incoming pointers becomes much better if we
151 add fields to them as we discover them, which we could do.
153 TODO: We could handle unions, but to be honest, it's probably not
154 worth the pain or slowdown. */
156 /* IPA-PTA optimizations possible.
158 When the indirect function called is ANYTHING we can add disambiguation
159 based on the function signatures (or simply the parameter count which
160 is the varinfo size). We also do not need to consider functions that
161 do not have their address taken.
163 The is_global_var bit which marks escape points is overly conservative
164 in IPA mode. Split it to is_escape_point and is_global_var - only
165 externally visible globals are escape points in IPA mode. This is
166 also needed to fix the pt_solution_includes_global predicate
167 (and thus ptr_deref_may_alias_global_p).
169 The way we introduce DECL_PT_UID to avoid fixing up all points-to
170 sets in the translation unit when we copy a DECL during inlining
171 pessimizes precision. The advantage is that the DECL_PT_UID keeps
172 compile-time and memory usage overhead low - the points-to sets
173 do not grow or get unshared as they would during a fixup phase.
174 An alternative solution is to delay IPA PTA until after all
175 inlining transformations have been applied.
177 The way we propagate clobber/use information isn't optimized.
178 It should use a new complex constraint that properly filters
179 out local variables of the callee (though that would make
180 the sets invalid after inlining). OTOH we might as well
181 admit defeat to WHOPR and simply do all the clobber/use analysis
182 and propagation after PTA finished but before we threw away
183 points-to information for memory variables. WHOPR and PTA
184 do not play along well anyway - the whole constraint solving
185 would need to be done in WPA phase and it will be very interesting
186 to apply the results to local SSA names during LTRANS phase.
188 We probably should compute a per-function unit-ESCAPE solution
189 propagating it simply like the clobber / uses solutions. The
190 solution can go alongside the non-IPA espaced solution and be
191 used to query which vars escape the unit through a function.
193 We never put function decls in points-to sets so we do not
194 keep the set of called functions for indirect calls.
196 And probably more. */
198 static bool use_field_sensitive
= true;
199 static int in_ipa_mode
= 0;
201 /* Used for predecessor bitmaps. */
202 static bitmap_obstack predbitmap_obstack
;
204 /* Used for points-to sets. */
205 static bitmap_obstack pta_obstack
;
207 /* Used for oldsolution members of variables. */
208 static bitmap_obstack oldpta_obstack
;
210 /* Used for per-solver-iteration bitmaps. */
211 static bitmap_obstack iteration_obstack
;
213 static unsigned int create_variable_info_for (tree
, const char *);
214 typedef struct constraint_graph
*constraint_graph_t
;
215 static void unify_nodes (constraint_graph_t
, unsigned int, unsigned int, bool);
218 typedef struct constraint
*constraint_t
;
221 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
223 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
225 static struct constraint_stats
227 unsigned int total_vars
;
228 unsigned int nonpointer_vars
;
229 unsigned int unified_vars_static
;
230 unsigned int unified_vars_dynamic
;
231 unsigned int iterations
;
232 unsigned int num_edges
;
233 unsigned int num_implicit_edges
;
234 unsigned int points_to_sets_created
;
239 /* ID of this variable */
242 /* True if this is a variable created by the constraint analysis, such as
243 heap variables and constraints we had to break up. */
244 unsigned int is_artificial_var
: 1;
246 /* True if this is a special variable whose solution set should not be
248 unsigned int is_special_var
: 1;
250 /* True for variables whose size is not known or variable. */
251 unsigned int is_unknown_size_var
: 1;
253 /* True for (sub-)fields that represent a whole variable. */
254 unsigned int is_full_var
: 1;
256 /* True if this is a heap variable. */
257 unsigned int is_heap_var
: 1;
259 /* True if this field may contain pointers. */
260 unsigned int may_have_pointers
: 1;
262 /* True if this field has only restrict qualified pointers. */
263 unsigned int only_restrict_pointers
: 1;
265 /* True if this represents a global variable. */
266 unsigned int is_global_var
: 1;
268 /* True if this represents a IPA function info. */
269 unsigned int is_fn_info
: 1;
271 /* The ID of the variable for the next field in this structure
272 or zero for the last field in this structure. */
275 /* The ID of the variable for the first field in this structure. */
278 /* Offset of this variable, in bits, from the base variable */
279 unsigned HOST_WIDE_INT offset
;
281 /* Size of the variable, in bits. */
282 unsigned HOST_WIDE_INT size
;
284 /* Full size of the base variable, in bits. */
285 unsigned HOST_WIDE_INT fullsize
;
287 /* Name of this variable */
290 /* Tree that this variable is associated with. */
293 /* Points-to set for this variable. */
296 /* Old points-to set for this variable. */
299 typedef struct variable_info
*varinfo_t
;
301 static varinfo_t
first_vi_for_offset (varinfo_t
, unsigned HOST_WIDE_INT
);
302 static varinfo_t
first_or_preceding_vi_for_offset (varinfo_t
,
303 unsigned HOST_WIDE_INT
);
304 static varinfo_t
lookup_vi_for_tree (tree
);
305 static inline bool type_can_have_subvars (const_tree
);
307 /* Pool of variable info structures. */
308 static alloc_pool variable_info_pool
;
310 /* Map varinfo to final pt_solution. */
311 static pointer_map_t
*final_solutions
;
312 struct obstack final_solutions_obstack
;
314 /* Table of variable info structures for constraint variables.
315 Indexed directly by variable info id. */
316 static vec
<varinfo_t
> varmap
;
318 /* Return the varmap element N */
320 static inline varinfo_t
321 get_varinfo (unsigned int n
)
326 /* Return the next variable in the list of sub-variables of VI
327 or NULL if VI is the last sub-variable. */
329 static inline varinfo_t
330 vi_next (varinfo_t vi
)
332 return get_varinfo (vi
->next
);
335 /* Static IDs for the special variables. Variable ID zero is unused
336 and used as terminator for the sub-variable chain. */
337 enum { nothing_id
= 1, anything_id
= 2, readonly_id
= 3,
338 escaped_id
= 4, nonlocal_id
= 5,
339 storedanything_id
= 6, integer_id
= 7 };
341 /* Return a new variable info structure consisting for a variable
342 named NAME, and using constraint graph node NODE. Append it
343 to the vector of variable info structures. */
346 new_var_info (tree t
, const char *name
)
348 unsigned index
= varmap
.length ();
349 varinfo_t ret
= (varinfo_t
) pool_alloc (variable_info_pool
);
354 /* Vars without decl are artificial and do not have sub-variables. */
355 ret
->is_artificial_var
= (t
== NULL_TREE
);
356 ret
->is_special_var
= false;
357 ret
->is_unknown_size_var
= false;
358 ret
->is_full_var
= (t
== NULL_TREE
);
359 ret
->is_heap_var
= false;
360 ret
->may_have_pointers
= true;
361 ret
->only_restrict_pointers
= false;
362 ret
->is_global_var
= (t
== NULL_TREE
);
363 ret
->is_fn_info
= false;
365 ret
->is_global_var
= (is_global_var (t
)
366 /* We have to treat even local register variables
368 || (TREE_CODE (t
) == VAR_DECL
369 && DECL_HARD_REGISTER (t
)));
370 ret
->solution
= BITMAP_ALLOC (&pta_obstack
);
371 ret
->oldsolution
= NULL
;
377 varmap
.safe_push (ret
);
383 /* A map mapping call statements to per-stmt variables for uses
384 and clobbers specific to the call. */
385 struct pointer_map_t
*call_stmt_vars
;
387 /* Lookup or create the variable for the call statement CALL. */
390 get_call_vi (gimple call
)
395 slot_p
= pointer_map_insert (call_stmt_vars
, call
);
397 return (varinfo_t
) *slot_p
;
399 vi
= new_var_info (NULL_TREE
, "CALLUSED");
403 vi
->is_full_var
= true;
405 vi2
= new_var_info (NULL_TREE
, "CALLCLOBBERED");
409 vi2
->is_full_var
= true;
413 *slot_p
= (void *) vi
;
417 /* Lookup the variable for the call statement CALL representing
418 the uses. Returns NULL if there is nothing special about this call. */
421 lookup_call_use_vi (gimple call
)
425 slot_p
= pointer_map_contains (call_stmt_vars
, call
);
427 return (varinfo_t
) *slot_p
;
432 /* Lookup the variable for the call statement CALL representing
433 the clobbers. Returns NULL if there is nothing special about this call. */
436 lookup_call_clobber_vi (gimple call
)
438 varinfo_t uses
= lookup_call_use_vi (call
);
442 return vi_next (uses
);
445 /* Lookup or create the variable for the call statement CALL representing
449 get_call_use_vi (gimple call
)
451 return get_call_vi (call
);
454 /* Lookup or create the variable for the call statement CALL representing
457 static varinfo_t ATTRIBUTE_UNUSED
458 get_call_clobber_vi (gimple call
)
460 return vi_next (get_call_vi (call
));
464 typedef enum {SCALAR
, DEREF
, ADDRESSOF
} constraint_expr_type
;
466 /* An expression that appears in a constraint. */
468 struct constraint_expr
470 /* Constraint type. */
471 constraint_expr_type type
;
473 /* Variable we are referring to in the constraint. */
476 /* Offset, in bits, of this constraint from the beginning of
477 variables it ends up referring to.
479 IOW, in a deref constraint, we would deref, get the result set,
480 then add OFFSET to each member. */
481 HOST_WIDE_INT offset
;
484 /* Use 0x8000... as special unknown offset. */
485 #define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
487 typedef struct constraint_expr ce_s
;
488 static void get_constraint_for_1 (tree
, vec
<ce_s
> *, bool, bool);
489 static void get_constraint_for (tree
, vec
<ce_s
> *);
490 static void get_constraint_for_rhs (tree
, vec
<ce_s
> *);
491 static void do_deref (vec
<ce_s
> *);
493 /* Our set constraints are made up of two constraint expressions, one
496 As described in the introduction, our set constraints each represent an
497 operation between set valued variables.
501 struct constraint_expr lhs
;
502 struct constraint_expr rhs
;
505 /* List of constraints that we use to build the constraint graph from. */
507 static vec
<constraint_t
> constraints
;
508 static alloc_pool constraint_pool
;
510 /* The constraint graph is represented as an array of bitmaps
511 containing successor nodes. */
513 struct constraint_graph
515 /* Size of this graph, which may be different than the number of
516 nodes in the variable map. */
519 /* Explicit successors of each node. */
522 /* Implicit predecessors of each node (Used for variable
524 bitmap
*implicit_preds
;
526 /* Explicit predecessors of each node (Used for variable substitution). */
529 /* Indirect cycle representatives, or -1 if the node has no indirect
531 int *indirect_cycles
;
533 /* Representative node for a node. rep[a] == a unless the node has
537 /* Equivalence class representative for a label. This is used for
538 variable substitution. */
541 /* Pointer equivalence label for a node. All nodes with the same
542 pointer equivalence label can be unified together at some point
543 (either during constraint optimization or after the constraint
547 /* Pointer equivalence representative for a label. This is used to
548 handle nodes that are pointer equivalent but not location
549 equivalent. We can unite these once the addressof constraints
550 are transformed into initial points-to sets. */
553 /* Pointer equivalence label for each node, used during variable
555 unsigned int *pointer_label
;
557 /* Location equivalence label for each node, used during location
558 equivalence finding. */
559 unsigned int *loc_label
;
561 /* Pointed-by set for each node, used during location equivalence
562 finding. This is pointed-by rather than pointed-to, because it
563 is constructed using the predecessor graph. */
566 /* Points to sets for pointer equivalence. This is *not* the actual
567 points-to sets for nodes. */
570 /* Bitmap of nodes where the bit is set if the node is a direct
571 node. Used for variable substitution. */
572 sbitmap direct_nodes
;
574 /* Bitmap of nodes where the bit is set if the node is address
575 taken. Used for variable substitution. */
576 bitmap address_taken
;
578 /* Vector of complex constraints for each graph node. Complex
579 constraints are those involving dereferences or offsets that are
581 vec
<constraint_t
> *complex;
584 static constraint_graph_t graph
;
586 /* During variable substitution and the offline version of indirect
587 cycle finding, we create nodes to represent dereferences and
588 address taken constraints. These represent where these start and
590 #define FIRST_REF_NODE (varmap).length ()
591 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
593 /* Return the representative node for NODE, if NODE has been unioned
595 This function performs path compression along the way to finding
596 the representative. */
599 find (unsigned int node
)
601 gcc_checking_assert (node
< graph
->size
);
602 if (graph
->rep
[node
] != node
)
603 return graph
->rep
[node
] = find (graph
->rep
[node
]);
607 /* Union the TO and FROM nodes to the TO nodes.
608 Note that at some point in the future, we may want to do
609 union-by-rank, in which case we are going to have to return the
610 node we unified to. */
613 unite (unsigned int to
, unsigned int from
)
615 gcc_checking_assert (to
< graph
->size
&& from
< graph
->size
);
616 if (to
!= from
&& graph
->rep
[from
] != to
)
618 graph
->rep
[from
] = to
;
624 /* Create a new constraint consisting of LHS and RHS expressions. */
627 new_constraint (const struct constraint_expr lhs
,
628 const struct constraint_expr rhs
)
630 constraint_t ret
= (constraint_t
) pool_alloc (constraint_pool
);
636 /* Print out constraint C to FILE. */
639 dump_constraint (FILE *file
, constraint_t c
)
641 if (c
->lhs
.type
== ADDRESSOF
)
643 else if (c
->lhs
.type
== DEREF
)
645 fprintf (file
, "%s", get_varinfo (c
->lhs
.var
)->name
);
646 if (c
->lhs
.offset
== UNKNOWN_OFFSET
)
647 fprintf (file
, " + UNKNOWN");
648 else if (c
->lhs
.offset
!= 0)
649 fprintf (file
, " + " HOST_WIDE_INT_PRINT_DEC
, c
->lhs
.offset
);
650 fprintf (file
, " = ");
651 if (c
->rhs
.type
== ADDRESSOF
)
653 else if (c
->rhs
.type
== DEREF
)
655 fprintf (file
, "%s", get_varinfo (c
->rhs
.var
)->name
);
656 if (c
->rhs
.offset
== UNKNOWN_OFFSET
)
657 fprintf (file
, " + UNKNOWN");
658 else if (c
->rhs
.offset
!= 0)
659 fprintf (file
, " + " HOST_WIDE_INT_PRINT_DEC
, c
->rhs
.offset
);
663 void debug_constraint (constraint_t
);
664 void debug_constraints (void);
665 void debug_constraint_graph (void);
666 void debug_solution_for_var (unsigned int);
667 void debug_sa_points_to_info (void);
669 /* Print out constraint C to stderr. */
672 debug_constraint (constraint_t c
)
674 dump_constraint (stderr
, c
);
675 fprintf (stderr
, "\n");
678 /* Print out all constraints to FILE */
681 dump_constraints (FILE *file
, int from
)
685 for (i
= from
; constraints
.iterate (i
, &c
); i
++)
688 dump_constraint (file
, c
);
689 fprintf (file
, "\n");
693 /* Print out all constraints to stderr. */
696 debug_constraints (void)
698 dump_constraints (stderr
, 0);
701 /* Print the constraint graph in dot format. */
704 dump_constraint_graph (FILE *file
)
708 /* Only print the graph if it has already been initialized: */
712 /* Prints the header of the dot file: */
713 fprintf (file
, "strict digraph {\n");
714 fprintf (file
, " node [\n shape = box\n ]\n");
715 fprintf (file
, " edge [\n fontsize = \"12\"\n ]\n");
716 fprintf (file
, "\n // List of nodes and complex constraints in "
717 "the constraint graph:\n");
719 /* The next lines print the nodes in the graph together with the
720 complex constraints attached to them. */
721 for (i
= 1; i
< graph
->size
; i
++)
723 if (i
== FIRST_REF_NODE
)
727 if (i
< FIRST_REF_NODE
)
728 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
730 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
731 if (graph
->complex[i
].exists ())
735 fprintf (file
, " [label=\"\\N\\n");
736 for (j
= 0; graph
->complex[i
].iterate (j
, &c
); ++j
)
738 dump_constraint (file
, c
);
739 fprintf (file
, "\\l");
741 fprintf (file
, "\"]");
743 fprintf (file
, ";\n");
746 /* Go over the edges. */
747 fprintf (file
, "\n // Edges in the constraint graph:\n");
748 for (i
= 1; i
< graph
->size
; i
++)
754 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[i
], 0, j
, bi
)
756 unsigned to
= find (j
);
759 if (i
< FIRST_REF_NODE
)
760 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
762 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
763 fprintf (file
, " -> ");
764 if (to
< FIRST_REF_NODE
)
765 fprintf (file
, "\"%s\"", get_varinfo (to
)->name
);
767 fprintf (file
, "\"*%s\"", get_varinfo (to
- FIRST_REF_NODE
)->name
);
768 fprintf (file
, ";\n");
772 /* Prints the tail of the dot file. */
773 fprintf (file
, "}\n");
776 /* Print out the constraint graph to stderr. */
779 debug_constraint_graph (void)
781 dump_constraint_graph (stderr
);
786 The solver is a simple worklist solver, that works on the following
789 sbitmap changed_nodes = all zeroes;
791 For each node that is not already collapsed:
793 set bit in changed nodes
795 while (changed_count > 0)
797 compute topological ordering for constraint graph
799 find and collapse cycles in the constraint graph (updating
800 changed if necessary)
802 for each node (n) in the graph in topological order:
805 Process each complex constraint associated with the node,
806 updating changed if necessary.
808 For each outgoing edge from n, propagate the solution from n to
809 the destination of the edge, updating changed as necessary.
813 /* Return true if two constraint expressions A and B are equal. */
816 constraint_expr_equal (struct constraint_expr a
, struct constraint_expr b
)
818 return a
.type
== b
.type
&& a
.var
== b
.var
&& a
.offset
== b
.offset
;
821 /* Return true if constraint expression A is less than constraint expression
822 B. This is just arbitrary, but consistent, in order to give them an
826 constraint_expr_less (struct constraint_expr a
, struct constraint_expr b
)
828 if (a
.type
== b
.type
)
831 return a
.offset
< b
.offset
;
833 return a
.var
< b
.var
;
836 return a
.type
< b
.type
;
839 /* Return true if constraint A is less than constraint B. This is just
840 arbitrary, but consistent, in order to give them an ordering. */
843 constraint_less (const constraint_t
&a
, const constraint_t
&b
)
845 if (constraint_expr_less (a
->lhs
, b
->lhs
))
847 else if (constraint_expr_less (b
->lhs
, a
->lhs
))
850 return constraint_expr_less (a
->rhs
, b
->rhs
);
853 /* Return true if two constraints A and B are equal. */
856 constraint_equal (struct constraint a
, struct constraint b
)
858 return constraint_expr_equal (a
.lhs
, b
.lhs
)
859 && constraint_expr_equal (a
.rhs
, b
.rhs
);
863 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
866 constraint_vec_find (vec
<constraint_t
> vec
,
867 struct constraint lookfor
)
875 place
= vec
.lower_bound (&lookfor
, constraint_less
);
876 if (place
>= vec
.length ())
879 if (!constraint_equal (*found
, lookfor
))
884 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
887 constraint_set_union (vec
<constraint_t
> *to
,
888 vec
<constraint_t
> *from
)
893 FOR_EACH_VEC_ELT (*from
, i
, c
)
895 if (constraint_vec_find (*to
, *c
) == NULL
)
897 unsigned int place
= to
->lower_bound (c
, constraint_less
);
898 to
->safe_insert (place
, c
);
903 /* Expands the solution in SET to all sub-fields of variables included. */
906 solution_set_expand (bitmap set
)
911 /* In a first pass expand to the head of the variables we need to
912 add all sub-fields off. This avoids quadratic behavior. */
913 EXECUTE_IF_SET_IN_BITMAP (set
, 0, j
, bi
)
915 varinfo_t v
= get_varinfo (j
);
916 if (v
->is_artificial_var
919 bitmap_set_bit (set
, v
->head
);
922 /* In the second pass now expand all head variables with subfields. */
923 EXECUTE_IF_SET_IN_BITMAP (set
, 0, j
, bi
)
925 varinfo_t v
= get_varinfo (j
);
926 if (v
->is_artificial_var
930 for (v
= vi_next (v
); v
!= NULL
; v
= vi_next (v
))
931 bitmap_set_bit (set
, v
->id
);
935 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
939 set_union_with_increment (bitmap to
, bitmap from
, HOST_WIDE_INT inc
)
941 bool changed
= false;
945 /* If the solution of FROM contains anything it is good enough to transfer
947 if (bitmap_bit_p (from
, anything_id
))
948 return bitmap_set_bit (to
, anything_id
);
950 /* For zero offset simply union the solution into the destination. */
952 return bitmap_ior_into (to
, from
);
954 /* If the offset is unknown we have to expand the solution to
956 if (inc
== UNKNOWN_OFFSET
)
958 bitmap tmp
= BITMAP_ALLOC (&iteration_obstack
);
959 bitmap_copy (tmp
, from
);
960 solution_set_expand (tmp
);
961 changed
|= bitmap_ior_into (to
, tmp
);
966 /* For non-zero offset union the offsetted solution into the destination. */
967 EXECUTE_IF_SET_IN_BITMAP (from
, 0, i
, bi
)
969 varinfo_t vi
= get_varinfo (i
);
971 /* If this is a variable with just one field just set its bit
973 if (vi
->is_artificial_var
974 || vi
->is_unknown_size_var
976 changed
|= bitmap_set_bit (to
, i
);
979 unsigned HOST_WIDE_INT fieldoffset
= vi
->offset
+ inc
;
981 /* If the offset makes the pointer point to before the
982 variable use offset zero for the field lookup. */
984 && fieldoffset
> vi
->offset
)
987 vi
= first_or_preceding_vi_for_offset (vi
, fieldoffset
);
989 changed
|= bitmap_set_bit (to
, vi
->id
);
990 /* If the result is not exactly at fieldoffset include the next
991 field as well. See get_constraint_for_ptr_offset for more
993 if (vi
->offset
!= fieldoffset
995 changed
|= bitmap_set_bit (to
, vi
->next
);
1002 /* Insert constraint C into the list of complex constraints for graph
1006 insert_into_complex (constraint_graph_t graph
,
1007 unsigned int var
, constraint_t c
)
1009 vec
<constraint_t
> complex = graph
->complex[var
];
1010 unsigned int place
= complex.lower_bound (c
, constraint_less
);
1012 /* Only insert constraints that do not already exist. */
1013 if (place
>= complex.length ()
1014 || !constraint_equal (*c
, *complex[place
]))
1015 graph
->complex[var
].safe_insert (place
, c
);
1019 /* Condense two variable nodes into a single variable node, by moving
1020 all associated info from SRC to TO. */
1023 merge_node_constraints (constraint_graph_t graph
, unsigned int to
,
1029 gcc_checking_assert (find (from
) == to
);
1031 /* Move all complex constraints from src node into to node */
1032 FOR_EACH_VEC_ELT (graph
->complex[from
], i
, c
)
1034 /* In complex constraints for node src, we may have either
1035 a = *src, and *src = a, or an offseted constraint which are
1036 always added to the rhs node's constraints. */
1038 if (c
->rhs
.type
== DEREF
)
1040 else if (c
->lhs
.type
== DEREF
)
1045 constraint_set_union (&graph
->complex[to
], &graph
->complex[from
]);
1046 graph
->complex[from
].release ();
1050 /* Remove edges involving NODE from GRAPH. */
1053 clear_edges_for_node (constraint_graph_t graph
, unsigned int node
)
1055 if (graph
->succs
[node
])
1056 BITMAP_FREE (graph
->succs
[node
]);
1059 /* Merge GRAPH nodes FROM and TO into node TO. */
1062 merge_graph_nodes (constraint_graph_t graph
, unsigned int to
,
1065 if (graph
->indirect_cycles
[from
] != -1)
1067 /* If we have indirect cycles with the from node, and we have
1068 none on the to node, the to node has indirect cycles from the
1069 from node now that they are unified.
1070 If indirect cycles exist on both, unify the nodes that they
1071 are in a cycle with, since we know they are in a cycle with
1073 if (graph
->indirect_cycles
[to
] == -1)
1074 graph
->indirect_cycles
[to
] = graph
->indirect_cycles
[from
];
1077 /* Merge all the successor edges. */
1078 if (graph
->succs
[from
])
1080 if (!graph
->succs
[to
])
1081 graph
->succs
[to
] = BITMAP_ALLOC (&pta_obstack
);
1082 bitmap_ior_into (graph
->succs
[to
],
1083 graph
->succs
[from
]);
1086 clear_edges_for_node (graph
, from
);
1090 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1091 it doesn't exist in the graph already. */
1094 add_implicit_graph_edge (constraint_graph_t graph
, unsigned int to
,
1100 if (!graph
->implicit_preds
[to
])
1101 graph
->implicit_preds
[to
] = BITMAP_ALLOC (&predbitmap_obstack
);
1103 if (bitmap_set_bit (graph
->implicit_preds
[to
], from
))
1104 stats
.num_implicit_edges
++;
1107 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1108 it doesn't exist in the graph already.
1109 Return false if the edge already existed, true otherwise. */
1112 add_pred_graph_edge (constraint_graph_t graph
, unsigned int to
,
1115 if (!graph
->preds
[to
])
1116 graph
->preds
[to
] = BITMAP_ALLOC (&predbitmap_obstack
);
1117 bitmap_set_bit (graph
->preds
[to
], from
);
1120 /* Add a graph edge to GRAPH, going from FROM to TO if
1121 it doesn't exist in the graph already.
1122 Return false if the edge already existed, true otherwise. */
1125 add_graph_edge (constraint_graph_t graph
, unsigned int to
,
1136 if (!graph
->succs
[from
])
1137 graph
->succs
[from
] = BITMAP_ALLOC (&pta_obstack
);
1138 if (bitmap_set_bit (graph
->succs
[from
], to
))
1141 if (to
< FIRST_REF_NODE
&& from
< FIRST_REF_NODE
)
1149 /* Initialize the constraint graph structure to contain SIZE nodes. */
1152 init_graph (unsigned int size
)
1156 graph
= XCNEW (struct constraint_graph
);
1158 graph
->succs
= XCNEWVEC (bitmap
, graph
->size
);
1159 graph
->indirect_cycles
= XNEWVEC (int, graph
->size
);
1160 graph
->rep
= XNEWVEC (unsigned int, graph
->size
);
1161 /* ??? Macros do not support template types with multiple arguments,
1162 so we use a typedef to work around it. */
1163 typedef vec
<constraint_t
> vec_constraint_t_heap
;
1164 graph
->complex = XCNEWVEC (vec_constraint_t_heap
, size
);
1165 graph
->pe
= XCNEWVEC (unsigned int, graph
->size
);
1166 graph
->pe_rep
= XNEWVEC (int, graph
->size
);
1168 for (j
= 0; j
< graph
->size
; j
++)
1171 graph
->pe_rep
[j
] = -1;
1172 graph
->indirect_cycles
[j
] = -1;
1176 /* Build the constraint graph, adding only predecessor edges right now. */
1179 build_pred_graph (void)
1185 graph
->implicit_preds
= XCNEWVEC (bitmap
, graph
->size
);
1186 graph
->preds
= XCNEWVEC (bitmap
, graph
->size
);
1187 graph
->pointer_label
= XCNEWVEC (unsigned int, graph
->size
);
1188 graph
->loc_label
= XCNEWVEC (unsigned int, graph
->size
);
1189 graph
->pointed_by
= XCNEWVEC (bitmap
, graph
->size
);
1190 graph
->points_to
= XCNEWVEC (bitmap
, graph
->size
);
1191 graph
->eq_rep
= XNEWVEC (int, graph
->size
);
1192 graph
->direct_nodes
= sbitmap_alloc (graph
->size
);
1193 graph
->address_taken
= BITMAP_ALLOC (&predbitmap_obstack
);
1194 bitmap_clear (graph
->direct_nodes
);
1196 for (j
= 1; j
< FIRST_REF_NODE
; j
++)
1198 if (!get_varinfo (j
)->is_special_var
)
1199 bitmap_set_bit (graph
->direct_nodes
, j
);
1202 for (j
= 0; j
< graph
->size
; j
++)
1203 graph
->eq_rep
[j
] = -1;
1205 for (j
= 0; j
< varmap
.length (); j
++)
1206 graph
->indirect_cycles
[j
] = -1;
1208 FOR_EACH_VEC_ELT (constraints
, i
, c
)
1210 struct constraint_expr lhs
= c
->lhs
;
1211 struct constraint_expr rhs
= c
->rhs
;
1212 unsigned int lhsvar
= lhs
.var
;
1213 unsigned int rhsvar
= rhs
.var
;
1215 if (lhs
.type
== DEREF
)
1218 if (rhs
.offset
== 0 && lhs
.offset
== 0 && rhs
.type
== SCALAR
)
1219 add_pred_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1221 else if (rhs
.type
== DEREF
)
1224 if (rhs
.offset
== 0 && lhs
.offset
== 0 && lhs
.type
== SCALAR
)
1225 add_pred_graph_edge (graph
, lhsvar
, FIRST_REF_NODE
+ rhsvar
);
1227 bitmap_clear_bit (graph
->direct_nodes
, lhsvar
);
1229 else if (rhs
.type
== ADDRESSOF
)
1234 if (graph
->points_to
[lhsvar
] == NULL
)
1235 graph
->points_to
[lhsvar
] = BITMAP_ALLOC (&predbitmap_obstack
);
1236 bitmap_set_bit (graph
->points_to
[lhsvar
], rhsvar
);
1238 if (graph
->pointed_by
[rhsvar
] == NULL
)
1239 graph
->pointed_by
[rhsvar
] = BITMAP_ALLOC (&predbitmap_obstack
);
1240 bitmap_set_bit (graph
->pointed_by
[rhsvar
], lhsvar
);
1242 /* Implicitly, *x = y */
1243 add_implicit_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1245 /* All related variables are no longer direct nodes. */
1246 bitmap_clear_bit (graph
->direct_nodes
, rhsvar
);
1247 v
= get_varinfo (rhsvar
);
1248 if (!v
->is_full_var
)
1250 v
= get_varinfo (v
->head
);
1253 bitmap_clear_bit (graph
->direct_nodes
, v
->id
);
1258 bitmap_set_bit (graph
->address_taken
, rhsvar
);
1260 else if (lhsvar
> anything_id
1261 && lhsvar
!= rhsvar
&& lhs
.offset
== 0 && rhs
.offset
== 0)
1264 add_pred_graph_edge (graph
, lhsvar
, rhsvar
);
1265 /* Implicitly, *x = *y */
1266 add_implicit_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
,
1267 FIRST_REF_NODE
+ rhsvar
);
1269 else if (lhs
.offset
!= 0 || rhs
.offset
!= 0)
1271 if (rhs
.offset
!= 0)
1272 bitmap_clear_bit (graph
->direct_nodes
, lhs
.var
);
1273 else if (lhs
.offset
!= 0)
1274 bitmap_clear_bit (graph
->direct_nodes
, rhs
.var
);
1279 /* Build the constraint graph, adding successor edges. */
1282 build_succ_graph (void)
1287 FOR_EACH_VEC_ELT (constraints
, i
, c
)
1289 struct constraint_expr lhs
;
1290 struct constraint_expr rhs
;
1291 unsigned int lhsvar
;
1292 unsigned int rhsvar
;
1299 lhsvar
= find (lhs
.var
);
1300 rhsvar
= find (rhs
.var
);
1302 if (lhs
.type
== DEREF
)
1304 if (rhs
.offset
== 0 && lhs
.offset
== 0 && rhs
.type
== SCALAR
)
1305 add_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1307 else if (rhs
.type
== DEREF
)
1309 if (rhs
.offset
== 0 && lhs
.offset
== 0 && lhs
.type
== SCALAR
)
1310 add_graph_edge (graph
, lhsvar
, FIRST_REF_NODE
+ rhsvar
);
1312 else if (rhs
.type
== ADDRESSOF
)
1315 gcc_checking_assert (find (rhs
.var
) == rhs
.var
);
1316 bitmap_set_bit (get_varinfo (lhsvar
)->solution
, rhsvar
);
1318 else if (lhsvar
> anything_id
1319 && lhsvar
!= rhsvar
&& lhs
.offset
== 0 && rhs
.offset
== 0)
1321 add_graph_edge (graph
, lhsvar
, rhsvar
);
1325 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1326 receive pointers. */
1327 t
= find (storedanything_id
);
1328 for (i
= integer_id
+ 1; i
< FIRST_REF_NODE
; ++i
)
1330 if (!bitmap_bit_p (graph
->direct_nodes
, i
)
1331 && get_varinfo (i
)->may_have_pointers
)
1332 add_graph_edge (graph
, find (i
), t
);
1335 /* Everything stored to ANYTHING also potentially escapes. */
1336 add_graph_edge (graph
, find (escaped_id
), t
);
1340 /* Changed variables on the last iteration. */
1341 static bitmap changed
;
1343 /* Strongly Connected Component visitation info. */
1350 unsigned int *node_mapping
;
1352 vec
<unsigned> scc_stack
;
1356 /* Recursive routine to find strongly connected components in GRAPH.
1357 SI is the SCC info to store the information in, and N is the id of current
1358 graph node we are processing.
1360 This is Tarjan's strongly connected component finding algorithm, as
1361 modified by Nuutila to keep only non-root nodes on the stack.
1362 The algorithm can be found in "On finding the strongly connected
1363 connected components in a directed graph" by Esko Nuutila and Eljas
1364 Soisalon-Soininen, in Information Processing Letters volume 49,
1365 number 1, pages 9-14. */
1368 scc_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
1372 unsigned int my_dfs
;
1374 bitmap_set_bit (si
->visited
, n
);
1375 si
->dfs
[n
] = si
->current_index
++;
1376 my_dfs
= si
->dfs
[n
];
1378 /* Visit all the successors. */
1379 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[n
], 0, i
, bi
)
1383 if (i
> LAST_REF_NODE
)
1387 if (bitmap_bit_p (si
->deleted
, w
))
1390 if (!bitmap_bit_p (si
->visited
, w
))
1391 scc_visit (graph
, si
, w
);
1393 unsigned int t
= find (w
);
1394 gcc_checking_assert (find (n
) == n
);
1395 if (si
->dfs
[t
] < si
->dfs
[n
])
1396 si
->dfs
[n
] = si
->dfs
[t
];
1399 /* See if any components have been identified. */
1400 if (si
->dfs
[n
] == my_dfs
)
1402 if (si
->scc_stack
.length () > 0
1403 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
1405 bitmap scc
= BITMAP_ALLOC (NULL
);
1406 unsigned int lowest_node
;
1409 bitmap_set_bit (scc
, n
);
1411 while (si
->scc_stack
.length () != 0
1412 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
1414 unsigned int w
= si
->scc_stack
.pop ();
1416 bitmap_set_bit (scc
, w
);
1419 lowest_node
= bitmap_first_set_bit (scc
);
1420 gcc_assert (lowest_node
< FIRST_REF_NODE
);
1422 /* Collapse the SCC nodes into a single node, and mark the
1424 EXECUTE_IF_SET_IN_BITMAP (scc
, 0, i
, bi
)
1426 if (i
< FIRST_REF_NODE
)
1428 if (unite (lowest_node
, i
))
1429 unify_nodes (graph
, lowest_node
, i
, false);
1433 unite (lowest_node
, i
);
1434 graph
->indirect_cycles
[i
- FIRST_REF_NODE
] = lowest_node
;
1438 bitmap_set_bit (si
->deleted
, n
);
1441 si
->scc_stack
.safe_push (n
);
1444 /* Unify node FROM into node TO, updating the changed count if
1445 necessary when UPDATE_CHANGED is true. */
1448 unify_nodes (constraint_graph_t graph
, unsigned int to
, unsigned int from
,
1449 bool update_changed
)
1451 gcc_checking_assert (to
!= from
&& find (to
) == to
);
1453 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1454 fprintf (dump_file
, "Unifying %s to %s\n",
1455 get_varinfo (from
)->name
,
1456 get_varinfo (to
)->name
);
1459 stats
.unified_vars_dynamic
++;
1461 stats
.unified_vars_static
++;
1463 merge_graph_nodes (graph
, to
, from
);
1464 merge_node_constraints (graph
, to
, from
);
1466 /* Mark TO as changed if FROM was changed. If TO was already marked
1467 as changed, decrease the changed count. */
1470 && bitmap_clear_bit (changed
, from
))
1471 bitmap_set_bit (changed
, to
);
1472 varinfo_t fromvi
= get_varinfo (from
);
1473 if (fromvi
->solution
)
1475 /* If the solution changes because of the merging, we need to mark
1476 the variable as changed. */
1477 varinfo_t tovi
= get_varinfo (to
);
1478 if (bitmap_ior_into (tovi
->solution
, fromvi
->solution
))
1481 bitmap_set_bit (changed
, to
);
1484 BITMAP_FREE (fromvi
->solution
);
1485 if (fromvi
->oldsolution
)
1486 BITMAP_FREE (fromvi
->oldsolution
);
1488 if (stats
.iterations
> 0
1489 && tovi
->oldsolution
)
1490 BITMAP_FREE (tovi
->oldsolution
);
1492 if (graph
->succs
[to
])
1493 bitmap_clear_bit (graph
->succs
[to
], to
);
1496 /* Information needed to compute the topological ordering of a graph. */
1500 /* sbitmap of visited nodes. */
1502 /* Array that stores the topological order of the graph, *in
1504 vec
<unsigned> topo_order
;
1508 /* Initialize and return a topological info structure. */
1510 static struct topo_info
*
1511 init_topo_info (void)
1513 size_t size
= graph
->size
;
1514 struct topo_info
*ti
= XNEW (struct topo_info
);
1515 ti
->visited
= sbitmap_alloc (size
);
1516 bitmap_clear (ti
->visited
);
1517 ti
->topo_order
.create (1);
1522 /* Free the topological sort info pointed to by TI. */
1525 free_topo_info (struct topo_info
*ti
)
1527 sbitmap_free (ti
->visited
);
1528 ti
->topo_order
.release ();
1532 /* Visit the graph in topological order, and store the order in the
1533 topo_info structure. */
1536 topo_visit (constraint_graph_t graph
, struct topo_info
*ti
,
1542 bitmap_set_bit (ti
->visited
, n
);
1544 if (graph
->succs
[n
])
1545 EXECUTE_IF_SET_IN_BITMAP (graph
->succs
[n
], 0, j
, bi
)
1547 if (!bitmap_bit_p (ti
->visited
, j
))
1548 topo_visit (graph
, ti
, j
);
1551 ti
->topo_order
.safe_push (n
);
1554 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1555 starting solution for y. */
1558 do_sd_constraint (constraint_graph_t graph
, constraint_t c
,
1561 unsigned int lhs
= c
->lhs
.var
;
1563 bitmap sol
= get_varinfo (lhs
)->solution
;
1566 HOST_WIDE_INT roffset
= c
->rhs
.offset
;
1568 /* Our IL does not allow this. */
1569 gcc_checking_assert (c
->lhs
.offset
== 0);
1571 /* If the solution of Y contains anything it is good enough to transfer
1573 if (bitmap_bit_p (delta
, anything_id
))
1575 flag
|= bitmap_set_bit (sol
, anything_id
);
1579 /* If we do not know at with offset the rhs is dereferenced compute
1580 the reachability set of DELTA, conservatively assuming it is
1581 dereferenced at all valid offsets. */
1582 if (roffset
== UNKNOWN_OFFSET
)
1584 solution_set_expand (delta
);
1585 /* No further offset processing is necessary. */
1589 /* For each variable j in delta (Sol(y)), add
1590 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1591 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1593 varinfo_t v
= get_varinfo (j
);
1594 HOST_WIDE_INT fieldoffset
= v
->offset
+ roffset
;
1598 fieldoffset
= v
->offset
;
1599 else if (roffset
!= 0)
1600 v
= first_vi_for_offset (v
, fieldoffset
);
1601 /* If the access is outside of the variable we can ignore it. */
1609 /* Adding edges from the special vars is pointless.
1610 They don't have sets that can change. */
1611 if (get_varinfo (t
)->is_special_var
)
1612 flag
|= bitmap_ior_into (sol
, get_varinfo (t
)->solution
);
1613 /* Merging the solution from ESCAPED needlessly increases
1614 the set. Use ESCAPED as representative instead. */
1615 else if (v
->id
== escaped_id
)
1616 flag
|= bitmap_set_bit (sol
, escaped_id
);
1617 else if (v
->may_have_pointers
1618 && add_graph_edge (graph
, lhs
, t
))
1619 flag
|= bitmap_ior_into (sol
, get_varinfo (t
)->solution
);
1621 /* If the variable is not exactly at the requested offset
1622 we have to include the next one. */
1623 if (v
->offset
== (unsigned HOST_WIDE_INT
)fieldoffset
1628 fieldoffset
= v
->offset
;
1634 /* If the LHS solution changed, mark the var as changed. */
1637 get_varinfo (lhs
)->solution
= sol
;
1638 bitmap_set_bit (changed
, lhs
);
1642 /* Process a constraint C that represents *(x + off) = y using DELTA
1643 as the starting solution for x. */
1646 do_ds_constraint (constraint_t c
, bitmap delta
)
1648 unsigned int rhs
= c
->rhs
.var
;
1649 bitmap sol
= get_varinfo (rhs
)->solution
;
1652 HOST_WIDE_INT loff
= c
->lhs
.offset
;
1653 bool escaped_p
= false;
1655 /* Our IL does not allow this. */
1656 gcc_checking_assert (c
->rhs
.offset
== 0);
1658 /* If the solution of y contains ANYTHING simply use the ANYTHING
1659 solution. This avoids needlessly increasing the points-to sets. */
1660 if (bitmap_bit_p (sol
, anything_id
))
1661 sol
= get_varinfo (find (anything_id
))->solution
;
1663 /* If the solution for x contains ANYTHING we have to merge the
1664 solution of y into all pointer variables which we do via
1666 if (bitmap_bit_p (delta
, anything_id
))
1668 unsigned t
= find (storedanything_id
);
1669 if (add_graph_edge (graph
, t
, rhs
))
1671 if (bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1672 bitmap_set_bit (changed
, t
);
1677 /* If we do not know at with offset the rhs is dereferenced compute
1678 the reachability set of DELTA, conservatively assuming it is
1679 dereferenced at all valid offsets. */
1680 if (loff
== UNKNOWN_OFFSET
)
1682 solution_set_expand (delta
);
1686 /* For each member j of delta (Sol(x)), add an edge from y to j and
1687 union Sol(y) into Sol(j) */
1688 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1690 varinfo_t v
= get_varinfo (j
);
1692 HOST_WIDE_INT fieldoffset
= v
->offset
+ loff
;
1695 fieldoffset
= v
->offset
;
1697 v
= first_vi_for_offset (v
, fieldoffset
);
1698 /* If the access is outside of the variable we can ignore it. */
1704 if (v
->may_have_pointers
)
1706 /* If v is a global variable then this is an escape point. */
1707 if (v
->is_global_var
1710 t
= find (escaped_id
);
1711 if (add_graph_edge (graph
, t
, rhs
)
1712 && bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1713 bitmap_set_bit (changed
, t
);
1714 /* Enough to let rhs escape once. */
1718 if (v
->is_special_var
)
1722 if (add_graph_edge (graph
, t
, rhs
)
1723 && bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1724 bitmap_set_bit (changed
, t
);
1727 /* If the variable is not exactly at the requested offset
1728 we have to include the next one. */
1729 if (v
->offset
== (unsigned HOST_WIDE_INT
)fieldoffset
1734 fieldoffset
= v
->offset
;
1740 /* Handle a non-simple (simple meaning requires no iteration),
1741 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1744 do_complex_constraint (constraint_graph_t graph
, constraint_t c
, bitmap delta
)
1746 if (c
->lhs
.type
== DEREF
)
1748 if (c
->rhs
.type
== ADDRESSOF
)
1755 do_ds_constraint (c
, delta
);
1758 else if (c
->rhs
.type
== DEREF
)
1761 if (!(get_varinfo (c
->lhs
.var
)->is_special_var
))
1762 do_sd_constraint (graph
, c
, delta
);
1770 gcc_checking_assert (c
->rhs
.type
== SCALAR
&& c
->lhs
.type
== SCALAR
);
1771 solution
= get_varinfo (c
->rhs
.var
)->solution
;
1772 tmp
= get_varinfo (c
->lhs
.var
)->solution
;
1774 flag
= set_union_with_increment (tmp
, solution
, c
->rhs
.offset
);
1777 bitmap_set_bit (changed
, c
->lhs
.var
);
1781 /* Initialize and return a new SCC info structure. */
1783 static struct scc_info
*
1784 init_scc_info (size_t size
)
1786 struct scc_info
*si
= XNEW (struct scc_info
);
1789 si
->current_index
= 0;
1790 si
->visited
= sbitmap_alloc (size
);
1791 bitmap_clear (si
->visited
);
1792 si
->deleted
= sbitmap_alloc (size
);
1793 bitmap_clear (si
->deleted
);
1794 si
->node_mapping
= XNEWVEC (unsigned int, size
);
1795 si
->dfs
= XCNEWVEC (unsigned int, size
);
1797 for (i
= 0; i
< size
; i
++)
1798 si
->node_mapping
[i
] = i
;
1800 si
->scc_stack
.create (1);
1804 /* Free an SCC info structure pointed to by SI */
1807 free_scc_info (struct scc_info
*si
)
1809 sbitmap_free (si
->visited
);
1810 sbitmap_free (si
->deleted
);
1811 free (si
->node_mapping
);
1813 si
->scc_stack
.release ();
1818 /* Find indirect cycles in GRAPH that occur, using strongly connected
1819 components, and note them in the indirect cycles map.
1821 This technique comes from Ben Hardekopf and Calvin Lin,
1822 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1823 Lines of Code", submitted to PLDI 2007. */
1826 find_indirect_cycles (constraint_graph_t graph
)
1829 unsigned int size
= graph
->size
;
1830 struct scc_info
*si
= init_scc_info (size
);
1832 for (i
= 0; i
< MIN (LAST_REF_NODE
, size
); i
++ )
1833 if (!bitmap_bit_p (si
->visited
, i
) && find (i
) == i
)
1834 scc_visit (graph
, si
, i
);
1839 /* Compute a topological ordering for GRAPH, and store the result in the
1840 topo_info structure TI. */
1843 compute_topo_order (constraint_graph_t graph
,
1844 struct topo_info
*ti
)
1847 unsigned int size
= graph
->size
;
1849 for (i
= 0; i
!= size
; ++i
)
1850 if (!bitmap_bit_p (ti
->visited
, i
) && find (i
) == i
)
1851 topo_visit (graph
, ti
, i
);
1854 /* Structure used to for hash value numbering of pointer equivalence
1857 typedef struct equiv_class_label
1860 unsigned int equivalence_class
;
1862 } *equiv_class_label_t
;
1863 typedef const struct equiv_class_label
*const_equiv_class_label_t
;
1865 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1867 static htab_t pointer_equiv_class_table
;
1869 /* A hashtable for mapping a bitmap of labels->location equivalence
1871 static htab_t location_equiv_class_table
;
1873 /* Hash function for a equiv_class_label_t */
1876 equiv_class_label_hash (const void *p
)
1878 const_equiv_class_label_t
const ecl
= (const_equiv_class_label_t
) p
;
1879 return ecl
->hashcode
;
1882 /* Equality function for two equiv_class_label_t's. */
1885 equiv_class_label_eq (const void *p1
, const void *p2
)
1887 const_equiv_class_label_t
const eql1
= (const_equiv_class_label_t
) p1
;
1888 const_equiv_class_label_t
const eql2
= (const_equiv_class_label_t
) p2
;
1889 return (eql1
->hashcode
== eql2
->hashcode
1890 && bitmap_equal_p (eql1
->labels
, eql2
->labels
));
1893 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1894 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1895 is equivalent to. */
1897 static equiv_class_label
*
1898 equiv_class_lookup_or_add (htab_t table
, bitmap labels
)
1900 equiv_class_label
**slot
;
1901 equiv_class_label ecl
;
1903 ecl
.labels
= labels
;
1904 ecl
.hashcode
= bitmap_hash (labels
);
1905 slot
= (equiv_class_label
**) htab_find_slot_with_hash (table
, &ecl
,
1906 ecl
.hashcode
, INSERT
);
1909 *slot
= XNEW (struct equiv_class_label
);
1910 (*slot
)->labels
= labels
;
1911 (*slot
)->hashcode
= ecl
.hashcode
;
1912 (*slot
)->equivalence_class
= 0;
1918 /* Perform offline variable substitution.
1920 This is a worst case quadratic time way of identifying variables
1921 that must have equivalent points-to sets, including those caused by
1922 static cycles, and single entry subgraphs, in the constraint graph.
1924 The technique is described in "Exploiting Pointer and Location
1925 Equivalence to Optimize Pointer Analysis. In the 14th International
1926 Static Analysis Symposium (SAS), August 2007." It is known as the
1927 "HU" algorithm, and is equivalent to value numbering the collapsed
1928 constraint graph including evaluating unions.
1930 The general method of finding equivalence classes is as follows:
1931 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1932 Initialize all non-REF nodes to be direct nodes.
1933 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1935 For each constraint containing the dereference, we also do the same
1938 We then compute SCC's in the graph and unify nodes in the same SCC,
1941 For each non-collapsed node x:
1942 Visit all unvisited explicit incoming edges.
1943 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1945 Lookup the equivalence class for pts(x).
1946 If we found one, equivalence_class(x) = found class.
1947 Otherwise, equivalence_class(x) = new class, and new_class is
1948 added to the lookup table.
1950 All direct nodes with the same equivalence class can be replaced
1951 with a single representative node.
1952 All unlabeled nodes (label == 0) are not pointers and all edges
1953 involving them can be eliminated.
1954 We perform these optimizations during rewrite_constraints
1956 In addition to pointer equivalence class finding, we also perform
1957 location equivalence class finding. This is the set of variables
1958 that always appear together in points-to sets. We use this to
1959 compress the size of the points-to sets. */
1961 /* Current maximum pointer equivalence class id. */
1962 static int pointer_equiv_class
;
1964 /* Current maximum location equivalence class id. */
1965 static int location_equiv_class
;
1967 /* Recursive routine to find strongly connected components in GRAPH,
1968 and label it's nodes with DFS numbers. */
1971 condense_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
1975 unsigned int my_dfs
;
1977 gcc_checking_assert (si
->node_mapping
[n
] == n
);
1978 bitmap_set_bit (si
->visited
, n
);
1979 si
->dfs
[n
] = si
->current_index
++;
1980 my_dfs
= si
->dfs
[n
];
1982 /* Visit all the successors. */
1983 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
1985 unsigned int w
= si
->node_mapping
[i
];
1987 if (bitmap_bit_p (si
->deleted
, w
))
1990 if (!bitmap_bit_p (si
->visited
, w
))
1991 condense_visit (graph
, si
, w
);
1993 unsigned int t
= si
->node_mapping
[w
];
1994 gcc_checking_assert (si
->node_mapping
[n
] == n
);
1995 if (si
->dfs
[t
] < si
->dfs
[n
])
1996 si
->dfs
[n
] = si
->dfs
[t
];
1999 /* Visit all the implicit predecessors. */
2000 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->implicit_preds
[n
], 0, i
, bi
)
2002 unsigned int w
= si
->node_mapping
[i
];
2004 if (bitmap_bit_p (si
->deleted
, w
))
2007 if (!bitmap_bit_p (si
->visited
, w
))
2008 condense_visit (graph
, si
, w
);
2010 unsigned int t
= si
->node_mapping
[w
];
2011 gcc_assert (si
->node_mapping
[n
] == n
);
2012 if (si
->dfs
[t
] < si
->dfs
[n
])
2013 si
->dfs
[n
] = si
->dfs
[t
];
2016 /* See if any components have been identified. */
2017 if (si
->dfs
[n
] == my_dfs
)
2019 while (si
->scc_stack
.length () != 0
2020 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
2022 unsigned int w
= si
->scc_stack
.pop ();
2023 si
->node_mapping
[w
] = n
;
2025 if (!bitmap_bit_p (graph
->direct_nodes
, w
))
2026 bitmap_clear_bit (graph
->direct_nodes
, n
);
2028 /* Unify our nodes. */
2029 if (graph
->preds
[w
])
2031 if (!graph
->preds
[n
])
2032 graph
->preds
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2033 bitmap_ior_into (graph
->preds
[n
], graph
->preds
[w
]);
2035 if (graph
->implicit_preds
[w
])
2037 if (!graph
->implicit_preds
[n
])
2038 graph
->implicit_preds
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2039 bitmap_ior_into (graph
->implicit_preds
[n
],
2040 graph
->implicit_preds
[w
]);
2042 if (graph
->points_to
[w
])
2044 if (!graph
->points_to
[n
])
2045 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2046 bitmap_ior_into (graph
->points_to
[n
],
2047 graph
->points_to
[w
]);
2050 bitmap_set_bit (si
->deleted
, n
);
2053 si
->scc_stack
.safe_push (n
);
2056 /* Label pointer equivalences. */
2059 label_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
2061 unsigned int i
, first_pred
;
2064 bitmap_set_bit (si
->visited
, n
);
2066 /* Label and union our incoming edges's points to sets. */
2068 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
2070 unsigned int w
= si
->node_mapping
[i
];
2071 if (!bitmap_bit_p (si
->visited
, w
))
2072 label_visit (graph
, si
, w
);
2074 /* Skip unused edges */
2075 if (w
== n
|| graph
->pointer_label
[w
] == 0)
2078 if (graph
->points_to
[w
])
2080 if (!graph
->points_to
[n
])
2082 if (first_pred
== -1U)
2086 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2087 bitmap_ior (graph
->points_to
[n
],
2088 graph
->points_to
[first_pred
],
2089 graph
->points_to
[w
]);
2093 bitmap_ior_into(graph
->points_to
[n
], graph
->points_to
[w
]);
2097 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2098 if (!bitmap_bit_p (graph
->direct_nodes
, n
))
2100 if (!graph
->points_to
[n
])
2102 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2103 if (first_pred
!= -1U)
2104 bitmap_copy (graph
->points_to
[n
], graph
->points_to
[first_pred
]);
2106 bitmap_set_bit (graph
->points_to
[n
], FIRST_REF_NODE
+ n
);
2107 graph
->pointer_label
[n
] = pointer_equiv_class
++;
2108 equiv_class_label_t ecl
;
2109 ecl
= equiv_class_lookup_or_add (pointer_equiv_class_table
,
2110 graph
->points_to
[n
]);
2111 ecl
->equivalence_class
= graph
->pointer_label
[n
];
2115 /* If there was only a single non-empty predecessor the pointer equiv
2116 class is the same. */
2117 if (!graph
->points_to
[n
])
2119 if (first_pred
!= -1U)
2121 graph
->pointer_label
[n
] = graph
->pointer_label
[first_pred
];
2122 graph
->points_to
[n
] = graph
->points_to
[first_pred
];
2127 if (!bitmap_empty_p (graph
->points_to
[n
]))
2129 equiv_class_label_t ecl
;
2130 ecl
= equiv_class_lookup_or_add (pointer_equiv_class_table
,
2131 graph
->points_to
[n
]);
2132 if (ecl
->equivalence_class
== 0)
2133 ecl
->equivalence_class
= pointer_equiv_class
++;
2136 BITMAP_FREE (graph
->points_to
[n
]);
2137 graph
->points_to
[n
] = ecl
->labels
;
2139 graph
->pointer_label
[n
] = ecl
->equivalence_class
;
2143 /* Print the pred graph in dot format. */
2146 dump_pred_graph (struct scc_info
*si
, FILE *file
)
2150 /* Only print the graph if it has already been initialized: */
2154 /* Prints the header of the dot file: */
2155 fprintf (file
, "strict digraph {\n");
2156 fprintf (file
, " node [\n shape = box\n ]\n");
2157 fprintf (file
, " edge [\n fontsize = \"12\"\n ]\n");
2158 fprintf (file
, "\n // List of nodes and complex constraints in "
2159 "the constraint graph:\n");
2161 /* The next lines print the nodes in the graph together with the
2162 complex constraints attached to them. */
2163 for (i
= 1; i
< graph
->size
; i
++)
2165 if (i
== FIRST_REF_NODE
)
2167 if (si
->node_mapping
[i
] != i
)
2169 if (i
< FIRST_REF_NODE
)
2170 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
2172 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
2173 if (graph
->points_to
[i
]
2174 && !bitmap_empty_p (graph
->points_to
[i
]))
2176 fprintf (file
, "[label=\"%s = {", get_varinfo (i
)->name
);
2179 EXECUTE_IF_SET_IN_BITMAP (graph
->points_to
[i
], 0, j
, bi
)
2180 fprintf (file
, " %d", j
);
2181 fprintf (file
, " }\"]");
2183 fprintf (file
, ";\n");
2186 /* Go over the edges. */
2187 fprintf (file
, "\n // Edges in the constraint graph:\n");
2188 for (i
= 1; i
< graph
->size
; i
++)
2192 if (si
->node_mapping
[i
] != i
)
2194 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[i
], 0, j
, bi
)
2196 unsigned from
= si
->node_mapping
[j
];
2197 if (from
< FIRST_REF_NODE
)
2198 fprintf (file
, "\"%s\"", get_varinfo (from
)->name
);
2200 fprintf (file
, "\"*%s\"", get_varinfo (from
- FIRST_REF_NODE
)->name
);
2201 fprintf (file
, " -> ");
2202 if (i
< FIRST_REF_NODE
)
2203 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
2205 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
2206 fprintf (file
, ";\n");
2210 /* Prints the tail of the dot file. */
2211 fprintf (file
, "}\n");
2214 /* Perform offline variable substitution, discovering equivalence
2215 classes, and eliminating non-pointer variables. */
2217 static struct scc_info
*
2218 perform_var_substitution (constraint_graph_t graph
)
2221 unsigned int size
= graph
->size
;
2222 struct scc_info
*si
= init_scc_info (size
);
2224 bitmap_obstack_initialize (&iteration_obstack
);
2225 pointer_equiv_class_table
= htab_create (511, equiv_class_label_hash
,
2226 equiv_class_label_eq
, free
);
2227 location_equiv_class_table
= htab_create (511, equiv_class_label_hash
,
2228 equiv_class_label_eq
, free
);
2229 pointer_equiv_class
= 1;
2230 location_equiv_class
= 1;
2232 /* Condense the nodes, which means to find SCC's, count incoming
2233 predecessors, and unite nodes in SCC's. */
2234 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2235 if (!bitmap_bit_p (si
->visited
, si
->node_mapping
[i
]))
2236 condense_visit (graph
, si
, si
->node_mapping
[i
]);
2238 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
2240 fprintf (dump_file
, "\n\n// The constraint graph before var-substitution "
2241 "in dot format:\n");
2242 dump_pred_graph (si
, dump_file
);
2243 fprintf (dump_file
, "\n\n");
2246 bitmap_clear (si
->visited
);
2247 /* Actually the label the nodes for pointer equivalences */
2248 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2249 if (!bitmap_bit_p (si
->visited
, si
->node_mapping
[i
]))
2250 label_visit (graph
, si
, si
->node_mapping
[i
]);
2252 /* Calculate location equivalence labels. */
2253 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2259 if (!graph
->pointed_by
[i
])
2261 pointed_by
= BITMAP_ALLOC (&iteration_obstack
);
2263 /* Translate the pointed-by mapping for pointer equivalence
2265 EXECUTE_IF_SET_IN_BITMAP (graph
->pointed_by
[i
], 0, j
, bi
)
2267 bitmap_set_bit (pointed_by
,
2268 graph
->pointer_label
[si
->node_mapping
[j
]]);
2270 /* The original pointed_by is now dead. */
2271 BITMAP_FREE (graph
->pointed_by
[i
]);
2273 /* Look up the location equivalence label if one exists, or make
2275 equiv_class_label_t ecl
;
2276 ecl
= equiv_class_lookup_or_add (location_equiv_class_table
, pointed_by
);
2277 if (ecl
->equivalence_class
== 0)
2278 ecl
->equivalence_class
= location_equiv_class
++;
2281 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2282 fprintf (dump_file
, "Found location equivalence for node %s\n",
2283 get_varinfo (i
)->name
);
2284 BITMAP_FREE (pointed_by
);
2286 graph
->loc_label
[i
] = ecl
->equivalence_class
;
2290 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2291 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2293 unsigned j
= si
->node_mapping
[i
];
2295 fprintf (dump_file
, "%s node id %d (%s) mapped to SCC leader "
2296 "node id %d (%s)\n",
2297 bitmap_bit_p (graph
->direct_nodes
, i
)
2298 ? "Direct" : "Indirect", i
, get_varinfo (i
)->name
,
2299 j
, get_varinfo (j
)->name
);
2302 "Equivalence classes for %s node id %d (%s): pointer %d"
2304 bitmap_bit_p (graph
->direct_nodes
, i
)
2305 ? "direct" : "indirect", i
, get_varinfo (i
)->name
,
2306 graph
->pointer_label
[i
], graph
->loc_label
[i
]);
2309 /* Quickly eliminate our non-pointer variables. */
2311 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2313 unsigned int node
= si
->node_mapping
[i
];
2315 if (graph
->pointer_label
[node
] == 0)
2317 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2319 "%s is a non-pointer variable, eliminating edges.\n",
2320 get_varinfo (node
)->name
);
2321 stats
.nonpointer_vars
++;
2322 clear_edges_for_node (graph
, node
);
2329 /* Free information that was only necessary for variable
2333 free_var_substitution_info (struct scc_info
*si
)
2336 free (graph
->pointer_label
);
2337 free (graph
->loc_label
);
2338 free (graph
->pointed_by
);
2339 free (graph
->points_to
);
2340 free (graph
->eq_rep
);
2341 sbitmap_free (graph
->direct_nodes
);
2342 htab_delete (pointer_equiv_class_table
);
2343 htab_delete (location_equiv_class_table
);
2344 bitmap_obstack_release (&iteration_obstack
);
2347 /* Return an existing node that is equivalent to NODE, which has
2348 equivalence class LABEL, if one exists. Return NODE otherwise. */
2351 find_equivalent_node (constraint_graph_t graph
,
2352 unsigned int node
, unsigned int label
)
2354 /* If the address version of this variable is unused, we can
2355 substitute it for anything else with the same label.
2356 Otherwise, we know the pointers are equivalent, but not the
2357 locations, and we can unite them later. */
2359 if (!bitmap_bit_p (graph
->address_taken
, node
))
2361 gcc_checking_assert (label
< graph
->size
);
2363 if (graph
->eq_rep
[label
] != -1)
2365 /* Unify the two variables since we know they are equivalent. */
2366 if (unite (graph
->eq_rep
[label
], node
))
2367 unify_nodes (graph
, graph
->eq_rep
[label
], node
, false);
2368 return graph
->eq_rep
[label
];
2372 graph
->eq_rep
[label
] = node
;
2373 graph
->pe_rep
[label
] = node
;
2378 gcc_checking_assert (label
< graph
->size
);
2379 graph
->pe
[node
] = label
;
2380 if (graph
->pe_rep
[label
] == -1)
2381 graph
->pe_rep
[label
] = node
;
2387 /* Unite pointer equivalent but not location equivalent nodes in
2388 GRAPH. This may only be performed once variable substitution is
2392 unite_pointer_equivalences (constraint_graph_t graph
)
2396 /* Go through the pointer equivalences and unite them to their
2397 representative, if they aren't already. */
2398 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2400 unsigned int label
= graph
->pe
[i
];
2403 int label_rep
= graph
->pe_rep
[label
];
2405 if (label_rep
== -1)
2408 label_rep
= find (label_rep
);
2409 if (label_rep
>= 0 && unite (label_rep
, find (i
)))
2410 unify_nodes (graph
, label_rep
, i
, false);
2415 /* Move complex constraints to the GRAPH nodes they belong to. */
2418 move_complex_constraints (constraint_graph_t graph
)
2423 FOR_EACH_VEC_ELT (constraints
, i
, c
)
2427 struct constraint_expr lhs
= c
->lhs
;
2428 struct constraint_expr rhs
= c
->rhs
;
2430 if (lhs
.type
== DEREF
)
2432 insert_into_complex (graph
, lhs
.var
, c
);
2434 else if (rhs
.type
== DEREF
)
2436 if (!(get_varinfo (lhs
.var
)->is_special_var
))
2437 insert_into_complex (graph
, rhs
.var
, c
);
2439 else if (rhs
.type
!= ADDRESSOF
&& lhs
.var
> anything_id
2440 && (lhs
.offset
!= 0 || rhs
.offset
!= 0))
2442 insert_into_complex (graph
, rhs
.var
, c
);
2449 /* Optimize and rewrite complex constraints while performing
2450 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2451 result of perform_variable_substitution. */
2454 rewrite_constraints (constraint_graph_t graph
,
2455 struct scc_info
*si
)
2460 #ifdef ENABLE_CHECKING
2461 for (unsigned int j
= 0; j
< graph
->size
; j
++)
2462 gcc_assert (find (j
) == j
);
2465 FOR_EACH_VEC_ELT (constraints
, i
, c
)
2467 struct constraint_expr lhs
= c
->lhs
;
2468 struct constraint_expr rhs
= c
->rhs
;
2469 unsigned int lhsvar
= find (lhs
.var
);
2470 unsigned int rhsvar
= find (rhs
.var
);
2471 unsigned int lhsnode
, rhsnode
;
2472 unsigned int lhslabel
, rhslabel
;
2474 lhsnode
= si
->node_mapping
[lhsvar
];
2475 rhsnode
= si
->node_mapping
[rhsvar
];
2476 lhslabel
= graph
->pointer_label
[lhsnode
];
2477 rhslabel
= graph
->pointer_label
[rhsnode
];
2479 /* See if it is really a non-pointer variable, and if so, ignore
2483 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2486 fprintf (dump_file
, "%s is a non-pointer variable,"
2487 "ignoring constraint:",
2488 get_varinfo (lhs
.var
)->name
);
2489 dump_constraint (dump_file
, c
);
2490 fprintf (dump_file
, "\n");
2492 constraints
[i
] = NULL
;
2498 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2501 fprintf (dump_file
, "%s is a non-pointer variable,"
2502 "ignoring constraint:",
2503 get_varinfo (rhs
.var
)->name
);
2504 dump_constraint (dump_file
, c
);
2505 fprintf (dump_file
, "\n");
2507 constraints
[i
] = NULL
;
2511 lhsvar
= find_equivalent_node (graph
, lhsvar
, lhslabel
);
2512 rhsvar
= find_equivalent_node (graph
, rhsvar
, rhslabel
);
2513 c
->lhs
.var
= lhsvar
;
2514 c
->rhs
.var
= rhsvar
;
2518 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2519 part of an SCC, false otherwise. */
2522 eliminate_indirect_cycles (unsigned int node
)
2524 if (graph
->indirect_cycles
[node
] != -1
2525 && !bitmap_empty_p (get_varinfo (node
)->solution
))
2528 vec
<unsigned> queue
= vNULL
;
2530 unsigned int to
= find (graph
->indirect_cycles
[node
]);
2533 /* We can't touch the solution set and call unify_nodes
2534 at the same time, because unify_nodes is going to do
2535 bitmap unions into it. */
2537 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node
)->solution
, 0, i
, bi
)
2539 if (find (i
) == i
&& i
!= to
)
2542 queue
.safe_push (i
);
2547 queue
.iterate (queuepos
, &i
);
2550 unify_nodes (graph
, to
, i
, true);
2558 /* Solve the constraint graph GRAPH using our worklist solver.
2559 This is based on the PW* family of solvers from the "Efficient Field
2560 Sensitive Pointer Analysis for C" paper.
2561 It works by iterating over all the graph nodes, processing the complex
2562 constraints and propagating the copy constraints, until everything stops
2563 changed. This corresponds to steps 6-8 in the solving list given above. */
2566 solve_graph (constraint_graph_t graph
)
2568 unsigned int size
= graph
->size
;
2572 changed
= BITMAP_ALLOC (NULL
);
2574 /* Mark all initial non-collapsed nodes as changed. */
2575 for (i
= 1; i
< size
; i
++)
2577 varinfo_t ivi
= get_varinfo (i
);
2578 if (find (i
) == i
&& !bitmap_empty_p (ivi
->solution
)
2579 && ((graph
->succs
[i
] && !bitmap_empty_p (graph
->succs
[i
]))
2580 || graph
->complex[i
].length () > 0))
2581 bitmap_set_bit (changed
, i
);
2584 /* Allocate a bitmap to be used to store the changed bits. */
2585 pts
= BITMAP_ALLOC (&pta_obstack
);
2587 while (!bitmap_empty_p (changed
))
2590 struct topo_info
*ti
= init_topo_info ();
2593 bitmap_obstack_initialize (&iteration_obstack
);
2595 compute_topo_order (graph
, ti
);
2597 while (ti
->topo_order
.length () != 0)
2600 i
= ti
->topo_order
.pop ();
2602 /* If this variable is not a representative, skip it. */
2606 /* In certain indirect cycle cases, we may merge this
2607 variable to another. */
2608 if (eliminate_indirect_cycles (i
) && find (i
) != i
)
2611 /* If the node has changed, we need to process the
2612 complex constraints and outgoing edges again. */
2613 if (bitmap_clear_bit (changed
, i
))
2618 vec
<constraint_t
> complex = graph
->complex[i
];
2619 varinfo_t vi
= get_varinfo (i
);
2620 bool solution_empty
;
2622 /* Compute the changed set of solution bits. If anything
2623 is in the solution just propagate that. */
2624 if (bitmap_bit_p (vi
->solution
, anything_id
))
2626 /* If anything is also in the old solution there is
2628 ??? But we shouldn't ended up with "changed" set ... */
2630 && bitmap_bit_p (vi
->oldsolution
, anything_id
))
2632 bitmap_copy (pts
, get_varinfo (find (anything_id
))->solution
);
2634 else if (vi
->oldsolution
)
2635 bitmap_and_compl (pts
, vi
->solution
, vi
->oldsolution
);
2637 bitmap_copy (pts
, vi
->solution
);
2639 if (bitmap_empty_p (pts
))
2642 if (vi
->oldsolution
)
2643 bitmap_ior_into (vi
->oldsolution
, pts
);
2646 vi
->oldsolution
= BITMAP_ALLOC (&oldpta_obstack
);
2647 bitmap_copy (vi
->oldsolution
, pts
);
2650 solution
= vi
->solution
;
2651 solution_empty
= bitmap_empty_p (solution
);
2653 /* Process the complex constraints */
2654 FOR_EACH_VEC_ELT (complex, j
, c
)
2656 /* XXX: This is going to unsort the constraints in
2657 some cases, which will occasionally add duplicate
2658 constraints during unification. This does not
2659 affect correctness. */
2660 c
->lhs
.var
= find (c
->lhs
.var
);
2661 c
->rhs
.var
= find (c
->rhs
.var
);
2663 /* The only complex constraint that can change our
2664 solution to non-empty, given an empty solution,
2665 is a constraint where the lhs side is receiving
2666 some set from elsewhere. */
2667 if (!solution_empty
|| c
->lhs
.type
!= DEREF
)
2668 do_complex_constraint (graph
, c
, pts
);
2671 solution_empty
= bitmap_empty_p (solution
);
2673 if (!solution_empty
)
2676 unsigned eff_escaped_id
= find (escaped_id
);
2678 /* Propagate solution to all successors. */
2679 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[i
],
2685 unsigned int to
= find (j
);
2686 tmp
= get_varinfo (to
)->solution
;
2689 /* Don't try to propagate to ourselves. */
2693 /* If we propagate from ESCAPED use ESCAPED as
2695 if (i
== eff_escaped_id
)
2696 flag
= bitmap_set_bit (tmp
, escaped_id
);
2698 flag
= bitmap_ior_into (tmp
, pts
);
2701 bitmap_set_bit (changed
, to
);
2706 free_topo_info (ti
);
2707 bitmap_obstack_release (&iteration_obstack
);
2711 BITMAP_FREE (changed
);
2712 bitmap_obstack_release (&oldpta_obstack
);
2715 /* Map from trees to variable infos. */
2716 static struct pointer_map_t
*vi_for_tree
;
2719 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2722 insert_vi_for_tree (tree t
, varinfo_t vi
)
2724 void **slot
= pointer_map_insert (vi_for_tree
, t
);
2726 gcc_assert (*slot
== NULL
);
2730 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2731 exist in the map, return NULL, otherwise, return the varinfo we found. */
2734 lookup_vi_for_tree (tree t
)
2736 void **slot
= pointer_map_contains (vi_for_tree
, t
);
2740 return (varinfo_t
) *slot
;
2743 /* Return a printable name for DECL */
2746 alias_get_name (tree decl
)
2748 const char *res
= NULL
;
2750 int num_printed
= 0;
2755 if (TREE_CODE (decl
) == SSA_NAME
)
2757 res
= get_name (decl
);
2759 num_printed
= asprintf (&temp
, "%s_%u", res
, SSA_NAME_VERSION (decl
));
2761 num_printed
= asprintf (&temp
, "_%u", SSA_NAME_VERSION (decl
));
2762 if (num_printed
> 0)
2764 res
= ggc_strdup (temp
);
2768 else if (DECL_P (decl
))
2770 if (DECL_ASSEMBLER_NAME_SET_P (decl
))
2771 res
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
2774 res
= get_name (decl
);
2777 num_printed
= asprintf (&temp
, "D.%u", DECL_UID (decl
));
2778 if (num_printed
> 0)
2780 res
= ggc_strdup (temp
);
2792 /* Find the variable id for tree T in the map.
2793 If T doesn't exist in the map, create an entry for it and return it. */
2796 get_vi_for_tree (tree t
)
2798 void **slot
= pointer_map_contains (vi_for_tree
, t
);
2800 return get_varinfo (create_variable_info_for (t
, alias_get_name (t
)));
2802 return (varinfo_t
) *slot
;
2805 /* Get a scalar constraint expression for a new temporary variable. */
2807 static struct constraint_expr
2808 new_scalar_tmp_constraint_exp (const char *name
)
2810 struct constraint_expr tmp
;
2813 vi
= new_var_info (NULL_TREE
, name
);
2817 vi
->is_full_var
= 1;
2826 /* Get a constraint expression vector from an SSA_VAR_P node.
2827 If address_p is true, the result will be taken its address of. */
2830 get_constraint_for_ssa_var (tree t
, vec
<ce_s
> *results
, bool address_p
)
2832 struct constraint_expr cexpr
;
2835 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2836 gcc_assert (TREE_CODE (t
) == SSA_NAME
|| DECL_P (t
));
2838 /* For parameters, get at the points-to set for the actual parm
2840 if (TREE_CODE (t
) == SSA_NAME
2841 && SSA_NAME_IS_DEFAULT_DEF (t
)
2842 && (TREE_CODE (SSA_NAME_VAR (t
)) == PARM_DECL
2843 || TREE_CODE (SSA_NAME_VAR (t
)) == RESULT_DECL
))
2845 get_constraint_for_ssa_var (SSA_NAME_VAR (t
), results
, address_p
);
2849 /* For global variables resort to the alias target. */
2850 if (TREE_CODE (t
) == VAR_DECL
2851 && (TREE_STATIC (t
) || DECL_EXTERNAL (t
)))
2853 struct varpool_node
*node
= varpool_get_node (t
);
2854 if (node
&& node
->alias
)
2856 node
= varpool_variable_node (node
, NULL
);
2857 t
= node
->symbol
.decl
;
2861 vi
= get_vi_for_tree (t
);
2863 cexpr
.type
= SCALAR
;
2865 /* If we determine the result is "anything", and we know this is readonly,
2866 say it points to readonly memory instead. */
2867 if (cexpr
.var
== anything_id
&& TREE_READONLY (t
))
2870 cexpr
.type
= ADDRESSOF
;
2871 cexpr
.var
= readonly_id
;
2874 /* If we are not taking the address of the constraint expr, add all
2875 sub-fiels of the variable as well. */
2877 && !vi
->is_full_var
)
2879 for (; vi
; vi
= vi_next (vi
))
2882 results
->safe_push (cexpr
);
2887 results
->safe_push (cexpr
);
2890 /* Process constraint T, performing various simplifications and then
2891 adding it to our list of overall constraints. */
2894 process_constraint (constraint_t t
)
2896 struct constraint_expr rhs
= t
->rhs
;
2897 struct constraint_expr lhs
= t
->lhs
;
2899 gcc_assert (rhs
.var
< varmap
.length ());
2900 gcc_assert (lhs
.var
< varmap
.length ());
2902 /* If we didn't get any useful constraint from the lhs we get
2903 &ANYTHING as fallback from get_constraint_for. Deal with
2904 it here by turning it into *ANYTHING. */
2905 if (lhs
.type
== ADDRESSOF
2906 && lhs
.var
== anything_id
)
2909 /* ADDRESSOF on the lhs is invalid. */
2910 gcc_assert (lhs
.type
!= ADDRESSOF
);
2912 /* We shouldn't add constraints from things that cannot have pointers.
2913 It's not completely trivial to avoid in the callers, so do it here. */
2914 if (rhs
.type
!= ADDRESSOF
2915 && !get_varinfo (rhs
.var
)->may_have_pointers
)
2918 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2919 if (!get_varinfo (lhs
.var
)->may_have_pointers
)
2922 /* This can happen in our IR with things like n->a = *p */
2923 if (rhs
.type
== DEREF
&& lhs
.type
== DEREF
&& rhs
.var
!= anything_id
)
2925 /* Split into tmp = *rhs, *lhs = tmp */
2926 struct constraint_expr tmplhs
;
2927 tmplhs
= new_scalar_tmp_constraint_exp ("doubledereftmp");
2928 process_constraint (new_constraint (tmplhs
, rhs
));
2929 process_constraint (new_constraint (lhs
, tmplhs
));
2931 else if (rhs
.type
== ADDRESSOF
&& lhs
.type
== DEREF
)
2933 /* Split into tmp = &rhs, *lhs = tmp */
2934 struct constraint_expr tmplhs
;
2935 tmplhs
= new_scalar_tmp_constraint_exp ("derefaddrtmp");
2936 process_constraint (new_constraint (tmplhs
, rhs
));
2937 process_constraint (new_constraint (lhs
, tmplhs
));
2941 gcc_assert (rhs
.type
!= ADDRESSOF
|| rhs
.offset
== 0);
2942 constraints
.safe_push (t
);
2947 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2950 static HOST_WIDE_INT
2951 bitpos_of_field (const tree fdecl
)
2953 if (!host_integerp (DECL_FIELD_OFFSET (fdecl
), 0)
2954 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl
), 0))
2957 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl
)) * BITS_PER_UNIT
2958 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl
)));
2962 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2963 resulting constraint expressions in *RESULTS. */
2966 get_constraint_for_ptr_offset (tree ptr
, tree offset
,
2969 struct constraint_expr c
;
2971 HOST_WIDE_INT rhsoffset
;
2973 /* If we do not do field-sensitive PTA adding offsets to pointers
2974 does not change the points-to solution. */
2975 if (!use_field_sensitive
)
2977 get_constraint_for_rhs (ptr
, results
);
2981 /* If the offset is not a non-negative integer constant that fits
2982 in a HOST_WIDE_INT, we have to fall back to a conservative
2983 solution which includes all sub-fields of all pointed-to
2984 variables of ptr. */
2985 if (offset
== NULL_TREE
2986 || TREE_CODE (offset
) != INTEGER_CST
)
2987 rhsoffset
= UNKNOWN_OFFSET
;
2990 /* Sign-extend the offset. */
2991 double_int soffset
= tree_to_double_int (offset
)
2992 .sext (TYPE_PRECISION (TREE_TYPE (offset
)));
2993 if (!soffset
.fits_shwi ())
2994 rhsoffset
= UNKNOWN_OFFSET
;
2997 /* Make sure the bit-offset also fits. */
2998 HOST_WIDE_INT rhsunitoffset
= soffset
.low
;
2999 rhsoffset
= rhsunitoffset
* BITS_PER_UNIT
;
3000 if (rhsunitoffset
!= rhsoffset
/ BITS_PER_UNIT
)
3001 rhsoffset
= UNKNOWN_OFFSET
;
3005 get_constraint_for_rhs (ptr
, results
);
3009 /* As we are eventually appending to the solution do not use
3010 vec::iterate here. */
3011 n
= results
->length ();
3012 for (j
= 0; j
< n
; j
++)
3016 curr
= get_varinfo (c
.var
);
3018 if (c
.type
== ADDRESSOF
3019 /* If this varinfo represents a full variable just use it. */
3020 && curr
->is_full_var
)
3022 else if (c
.type
== ADDRESSOF
3023 /* If we do not know the offset add all subfields. */
3024 && rhsoffset
== UNKNOWN_OFFSET
)
3026 varinfo_t temp
= get_varinfo (curr
->head
);
3029 struct constraint_expr c2
;
3031 c2
.type
= ADDRESSOF
;
3033 if (c2
.var
!= c
.var
)
3034 results
->safe_push (c2
);
3035 temp
= vi_next (temp
);
3039 else if (c
.type
== ADDRESSOF
)
3042 unsigned HOST_WIDE_INT offset
= curr
->offset
+ rhsoffset
;
3044 /* Search the sub-field which overlaps with the
3045 pointed-to offset. If the result is outside of the variable
3046 we have to provide a conservative result, as the variable is
3047 still reachable from the resulting pointer (even though it
3048 technically cannot point to anything). The last and first
3049 sub-fields are such conservative results.
3050 ??? If we always had a sub-field for &object + 1 then
3051 we could represent this in a more precise way. */
3053 && curr
->offset
< offset
)
3055 temp
= first_or_preceding_vi_for_offset (curr
, offset
);
3057 /* If the found variable is not exactly at the pointed to
3058 result, we have to include the next variable in the
3059 solution as well. Otherwise two increments by offset / 2
3060 do not result in the same or a conservative superset
3062 if (temp
->offset
!= offset
3065 struct constraint_expr c2
;
3066 c2
.var
= temp
->next
;
3067 c2
.type
= ADDRESSOF
;
3069 results
->safe_push (c2
);
3075 c
.offset
= rhsoffset
;
3082 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3083 If address_p is true the result will be taken its address of.
3084 If lhs_p is true then the constraint expression is assumed to be used
3088 get_constraint_for_component_ref (tree t
, vec
<ce_s
> *results
,
3089 bool address_p
, bool lhs_p
)
3092 HOST_WIDE_INT bitsize
= -1;
3093 HOST_WIDE_INT bitmaxsize
= -1;
3094 HOST_WIDE_INT bitpos
;
3097 /* Some people like to do cute things like take the address of
3100 while (handled_component_p (forzero
)
3101 || INDIRECT_REF_P (forzero
)
3102 || TREE_CODE (forzero
) == MEM_REF
)
3103 forzero
= TREE_OPERAND (forzero
, 0);
3105 if (CONSTANT_CLASS_P (forzero
) && integer_zerop (forzero
))
3107 struct constraint_expr temp
;
3110 temp
.var
= integer_id
;
3112 results
->safe_push (temp
);
3116 /* Handle type-punning through unions. If we are extracting a pointer
3117 from a union via a possibly type-punning access that pointer
3118 points to anything, similar to a conversion of an integer to
3124 TREE_CODE (u
) == COMPONENT_REF
|| TREE_CODE (u
) == ARRAY_REF
;
3125 u
= TREE_OPERAND (u
, 0))
3126 if (TREE_CODE (u
) == COMPONENT_REF
3127 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u
, 0))) == UNION_TYPE
)
3129 struct constraint_expr temp
;
3132 temp
.var
= anything_id
;
3133 temp
.type
= ADDRESSOF
;
3134 results
->safe_push (temp
);
3139 t
= get_ref_base_and_extent (t
, &bitpos
, &bitsize
, &bitmaxsize
);
3141 /* Pretend to take the address of the base, we'll take care of
3142 adding the required subset of sub-fields below. */
3143 get_constraint_for_1 (t
, results
, true, lhs_p
);
3144 gcc_assert (results
->length () == 1);
3145 struct constraint_expr
&result
= results
->last ();
3147 if (result
.type
== SCALAR
3148 && get_varinfo (result
.var
)->is_full_var
)
3149 /* For single-field vars do not bother about the offset. */
3151 else if (result
.type
== SCALAR
)
3153 /* In languages like C, you can access one past the end of an
3154 array. You aren't allowed to dereference it, so we can
3155 ignore this constraint. When we handle pointer subtraction,
3156 we may have to do something cute here. */
3158 if ((unsigned HOST_WIDE_INT
)bitpos
< get_varinfo (result
.var
)->fullsize
3161 /* It's also not true that the constraint will actually start at the
3162 right offset, it may start in some padding. We only care about
3163 setting the constraint to the first actual field it touches, so
3165 struct constraint_expr cexpr
= result
;
3169 for (curr
= get_varinfo (cexpr
.var
); curr
; curr
= vi_next (curr
))
3171 if (ranges_overlap_p (curr
->offset
, curr
->size
,
3172 bitpos
, bitmaxsize
))
3174 cexpr
.var
= curr
->id
;
3175 results
->safe_push (cexpr
);
3180 /* If we are going to take the address of this field then
3181 to be able to compute reachability correctly add at least
3182 the last field of the variable. */
3183 if (address_p
&& results
->length () == 0)
3185 curr
= get_varinfo (cexpr
.var
);
3186 while (curr
->next
!= 0)
3187 curr
= vi_next (curr
);
3188 cexpr
.var
= curr
->id
;
3189 results
->safe_push (cexpr
);
3191 else if (results
->length () == 0)
3192 /* Assert that we found *some* field there. The user couldn't be
3193 accessing *only* padding. */
3194 /* Still the user could access one past the end of an array
3195 embedded in a struct resulting in accessing *only* padding. */
3196 /* Or accessing only padding via type-punning to a type
3197 that has a filed just in padding space. */
3199 cexpr
.type
= SCALAR
;
3200 cexpr
.var
= anything_id
;
3202 results
->safe_push (cexpr
);
3205 else if (bitmaxsize
== 0)
3207 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3208 fprintf (dump_file
, "Access to zero-sized part of variable,"
3212 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3213 fprintf (dump_file
, "Access to past the end of variable, ignoring\n");
3215 else if (result
.type
== DEREF
)
3217 /* If we do not know exactly where the access goes say so. Note
3218 that only for non-structure accesses we know that we access
3219 at most one subfiled of any variable. */
3221 || bitsize
!= bitmaxsize
3222 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t
))
3223 || result
.offset
== UNKNOWN_OFFSET
)
3224 result
.offset
= UNKNOWN_OFFSET
;
3226 result
.offset
+= bitpos
;
3228 else if (result
.type
== ADDRESSOF
)
3230 /* We can end up here for component references on a
3231 VIEW_CONVERT_EXPR <>(&foobar). */
3232 result
.type
= SCALAR
;
3233 result
.var
= anything_id
;
3241 /* Dereference the constraint expression CONS, and return the result.
3242 DEREF (ADDRESSOF) = SCALAR
3243 DEREF (SCALAR) = DEREF
3244 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3245 This is needed so that we can handle dereferencing DEREF constraints. */
3248 do_deref (vec
<ce_s
> *constraints
)
3250 struct constraint_expr
*c
;
3253 FOR_EACH_VEC_ELT (*constraints
, i
, c
)
3255 if (c
->type
== SCALAR
)
3257 else if (c
->type
== ADDRESSOF
)
3259 else if (c
->type
== DEREF
)
3261 struct constraint_expr tmplhs
;
3262 tmplhs
= new_scalar_tmp_constraint_exp ("dereftmp");
3263 process_constraint (new_constraint (tmplhs
, *c
));
3264 c
->var
= tmplhs
.var
;
3271 /* Given a tree T, return the constraint expression for taking the
3275 get_constraint_for_address_of (tree t
, vec
<ce_s
> *results
)
3277 struct constraint_expr
*c
;
3280 get_constraint_for_1 (t
, results
, true, true);
3282 FOR_EACH_VEC_ELT (*results
, i
, c
)
3284 if (c
->type
== DEREF
)
3287 c
->type
= ADDRESSOF
;
3291 /* Given a tree T, return the constraint expression for it. */
3294 get_constraint_for_1 (tree t
, vec
<ce_s
> *results
, bool address_p
,
3297 struct constraint_expr temp
;
3299 /* x = integer is all glommed to a single variable, which doesn't
3300 point to anything by itself. That is, of course, unless it is an
3301 integer constant being treated as a pointer, in which case, we
3302 will return that this is really the addressof anything. This
3303 happens below, since it will fall into the default case. The only
3304 case we know something about an integer treated like a pointer is
3305 when it is the NULL pointer, and then we just say it points to
3308 Do not do that if -fno-delete-null-pointer-checks though, because
3309 in that case *NULL does not fail, so it _should_ alias *anything.
3310 It is not worth adding a new option or renaming the existing one,
3311 since this case is relatively obscure. */
3312 if ((TREE_CODE (t
) == INTEGER_CST
3313 && integer_zerop (t
))
3314 /* The only valid CONSTRUCTORs in gimple with pointer typed
3315 elements are zero-initializer. But in IPA mode we also
3316 process global initializers, so verify at least. */
3317 || (TREE_CODE (t
) == CONSTRUCTOR
3318 && CONSTRUCTOR_NELTS (t
) == 0))
3320 if (flag_delete_null_pointer_checks
)
3321 temp
.var
= nothing_id
;
3323 temp
.var
= nonlocal_id
;
3324 temp
.type
= ADDRESSOF
;
3326 results
->safe_push (temp
);
3330 /* String constants are read-only. */
3331 if (TREE_CODE (t
) == STRING_CST
)
3333 temp
.var
= readonly_id
;
3336 results
->safe_push (temp
);
3340 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
3342 case tcc_expression
:
3344 switch (TREE_CODE (t
))
3347 get_constraint_for_address_of (TREE_OPERAND (t
, 0), results
);
3355 switch (TREE_CODE (t
))
3359 struct constraint_expr cs
;
3361 get_constraint_for_ptr_offset (TREE_OPERAND (t
, 0),
3362 TREE_OPERAND (t
, 1), results
);
3365 /* If we are not taking the address then make sure to process
3366 all subvariables we might access. */
3370 cs
= results
->last ();
3371 if (cs
.type
== DEREF
3372 && type_can_have_subvars (TREE_TYPE (t
)))
3374 /* For dereferences this means we have to defer it
3376 results
->last ().offset
= UNKNOWN_OFFSET
;
3379 if (cs
.type
!= SCALAR
)
3382 vi
= get_varinfo (cs
.var
);
3383 curr
= vi_next (vi
);
3384 if (!vi
->is_full_var
3387 unsigned HOST_WIDE_INT size
;
3388 if (host_integerp (TYPE_SIZE (TREE_TYPE (t
)), 1))
3389 size
= TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t
)));
3392 for (; curr
; curr
= vi_next (curr
))
3394 if (curr
->offset
- vi
->offset
< size
)
3397 results
->safe_push (cs
);
3406 case ARRAY_RANGE_REF
:
3408 get_constraint_for_component_ref (t
, results
, address_p
, lhs_p
);
3410 case VIEW_CONVERT_EXPR
:
3411 get_constraint_for_1 (TREE_OPERAND (t
, 0), results
, address_p
,
3414 /* We are missing handling for TARGET_MEM_REF here. */
3419 case tcc_exceptional
:
3421 switch (TREE_CODE (t
))
3425 get_constraint_for_ssa_var (t
, results
, address_p
);
3432 vec
<ce_s
> tmp
= vNULL
;
3433 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), i
, val
)
3435 struct constraint_expr
*rhsp
;
3437 get_constraint_for_1 (val
, &tmp
, address_p
, lhs_p
);
3438 FOR_EACH_VEC_ELT (tmp
, j
, rhsp
)
3439 results
->safe_push (*rhsp
);
3443 /* We do not know whether the constructor was complete,
3444 so technically we have to add &NOTHING or &ANYTHING
3445 like we do for an empty constructor as well. */
3452 case tcc_declaration
:
3454 get_constraint_for_ssa_var (t
, results
, address_p
);
3459 /* We cannot refer to automatic variables through constants. */
3460 temp
.type
= ADDRESSOF
;
3461 temp
.var
= nonlocal_id
;
3463 results
->safe_push (temp
);
3469 /* The default fallback is a constraint from anything. */
3470 temp
.type
= ADDRESSOF
;
3471 temp
.var
= anything_id
;
3473 results
->safe_push (temp
);
3476 /* Given a gimple tree T, return the constraint expression vector for it. */
3479 get_constraint_for (tree t
, vec
<ce_s
> *results
)
3481 gcc_assert (results
->length () == 0);
3483 get_constraint_for_1 (t
, results
, false, true);
3486 /* Given a gimple tree T, return the constraint expression vector for it
3487 to be used as the rhs of a constraint. */
3490 get_constraint_for_rhs (tree t
, vec
<ce_s
> *results
)
3492 gcc_assert (results
->length () == 0);
3494 get_constraint_for_1 (t
, results
, false, false);
3498 /* Efficiently generates constraints from all entries in *RHSC to all
3499 entries in *LHSC. */
3502 process_all_all_constraints (vec
<ce_s
> lhsc
,
3505 struct constraint_expr
*lhsp
, *rhsp
;
3508 if (lhsc
.length () <= 1 || rhsc
.length () <= 1)
3510 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
3511 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
3512 process_constraint (new_constraint (*lhsp
, *rhsp
));
3516 struct constraint_expr tmp
;
3517 tmp
= new_scalar_tmp_constraint_exp ("allalltmp");
3518 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
3519 process_constraint (new_constraint (tmp
, *rhsp
));
3520 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
3521 process_constraint (new_constraint (*lhsp
, tmp
));
3525 /* Handle aggregate copies by expanding into copies of the respective
3526 fields of the structures. */
3529 do_structure_copy (tree lhsop
, tree rhsop
)
3531 struct constraint_expr
*lhsp
, *rhsp
;
3532 vec
<ce_s
> lhsc
= vNULL
;
3533 vec
<ce_s
> rhsc
= vNULL
;
3536 get_constraint_for (lhsop
, &lhsc
);
3537 get_constraint_for_rhs (rhsop
, &rhsc
);
3540 if (lhsp
->type
== DEREF
3541 || (lhsp
->type
== ADDRESSOF
&& lhsp
->var
== anything_id
)
3542 || rhsp
->type
== DEREF
)
3544 if (lhsp
->type
== DEREF
)
3546 gcc_assert (lhsc
.length () == 1);
3547 lhsp
->offset
= UNKNOWN_OFFSET
;
3549 if (rhsp
->type
== DEREF
)
3551 gcc_assert (rhsc
.length () == 1);
3552 rhsp
->offset
= UNKNOWN_OFFSET
;
3554 process_all_all_constraints (lhsc
, rhsc
);
3556 else if (lhsp
->type
== SCALAR
3557 && (rhsp
->type
== SCALAR
3558 || rhsp
->type
== ADDRESSOF
))
3560 HOST_WIDE_INT lhssize
, lhsmaxsize
, lhsoffset
;
3561 HOST_WIDE_INT rhssize
, rhsmaxsize
, rhsoffset
;
3563 get_ref_base_and_extent (lhsop
, &lhsoffset
, &lhssize
, &lhsmaxsize
);
3564 get_ref_base_and_extent (rhsop
, &rhsoffset
, &rhssize
, &rhsmaxsize
);
3565 for (j
= 0; lhsc
.iterate (j
, &lhsp
);)
3567 varinfo_t lhsv
, rhsv
;
3569 lhsv
= get_varinfo (lhsp
->var
);
3570 rhsv
= get_varinfo (rhsp
->var
);
3571 if (lhsv
->may_have_pointers
3572 && (lhsv
->is_full_var
3573 || rhsv
->is_full_var
3574 || ranges_overlap_p (lhsv
->offset
+ rhsoffset
, lhsv
->size
,
3575 rhsv
->offset
+ lhsoffset
, rhsv
->size
)))
3576 process_constraint (new_constraint (*lhsp
, *rhsp
));
3577 if (!rhsv
->is_full_var
3578 && (lhsv
->is_full_var
3579 || (lhsv
->offset
+ rhsoffset
+ lhsv
->size
3580 > rhsv
->offset
+ lhsoffset
+ rhsv
->size
)))
3583 if (k
>= rhsc
.length ())
3597 /* Create constraints ID = { rhsc }. */
3600 make_constraints_to (unsigned id
, vec
<ce_s
> rhsc
)
3602 struct constraint_expr
*c
;
3603 struct constraint_expr includes
;
3607 includes
.offset
= 0;
3608 includes
.type
= SCALAR
;
3610 FOR_EACH_VEC_ELT (rhsc
, j
, c
)
3611 process_constraint (new_constraint (includes
, *c
));
3614 /* Create a constraint ID = OP. */
3617 make_constraint_to (unsigned id
, tree op
)
3619 vec
<ce_s
> rhsc
= vNULL
;
3620 get_constraint_for_rhs (op
, &rhsc
);
3621 make_constraints_to (id
, rhsc
);
3625 /* Create a constraint ID = &FROM. */
3628 make_constraint_from (varinfo_t vi
, int from
)
3630 struct constraint_expr lhs
, rhs
;
3638 rhs
.type
= ADDRESSOF
;
3639 process_constraint (new_constraint (lhs
, rhs
));
3642 /* Create a constraint ID = FROM. */
3645 make_copy_constraint (varinfo_t vi
, int from
)
3647 struct constraint_expr lhs
, rhs
;
3656 process_constraint (new_constraint (lhs
, rhs
));
3659 /* Make constraints necessary to make OP escape. */
3662 make_escape_constraint (tree op
)
3664 make_constraint_to (escaped_id
, op
);
3667 /* Add constraints to that the solution of VI is transitively closed. */
3670 make_transitive_closure_constraints (varinfo_t vi
)
3672 struct constraint_expr lhs
, rhs
;
3681 process_constraint (new_constraint (lhs
, rhs
));
3683 /* VAR = VAR + UNKNOWN; */
3689 rhs
.offset
= UNKNOWN_OFFSET
;
3690 process_constraint (new_constraint (lhs
, rhs
));
3693 /* Temporary storage for fake var decls. */
3694 struct obstack fake_var_decl_obstack
;
3696 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3699 build_fake_var_decl (tree type
)
3701 tree decl
= (tree
) XOBNEW (&fake_var_decl_obstack
, struct tree_var_decl
);
3702 memset (decl
, 0, sizeof (struct tree_var_decl
));
3703 TREE_SET_CODE (decl
, VAR_DECL
);
3704 TREE_TYPE (decl
) = type
;
3705 DECL_UID (decl
) = allocate_decl_uid ();
3706 SET_DECL_PT_UID (decl
, -1);
3707 layout_decl (decl
, 0);
3711 /* Create a new artificial heap variable with NAME.
3712 Return the created variable. */
3715 make_heapvar (const char *name
)
3720 heapvar
= build_fake_var_decl (ptr_type_node
);
3721 DECL_EXTERNAL (heapvar
) = 1;
3723 vi
= new_var_info (heapvar
, name
);
3724 vi
->is_artificial_var
= true;
3725 vi
->is_heap_var
= true;
3726 vi
->is_unknown_size_var
= true;
3730 vi
->is_full_var
= true;
3731 insert_vi_for_tree (heapvar
, vi
);
3736 /* Create a new artificial heap variable with NAME and make a
3737 constraint from it to LHS. Set flags according to a tag used
3738 for tracking restrict pointers. */
3741 make_constraint_from_restrict (varinfo_t lhs
, const char *name
)
3743 varinfo_t vi
= make_heapvar (name
);
3744 vi
->is_global_var
= 1;
3745 vi
->may_have_pointers
= 1;
3746 make_constraint_from (lhs
, vi
->id
);
3750 /* Create a new artificial heap variable with NAME and make a
3751 constraint from it to LHS. Set flags according to a tag used
3752 for tracking restrict pointers and make the artificial heap
3753 point to global memory. */
3756 make_constraint_from_global_restrict (varinfo_t lhs
, const char *name
)
3758 varinfo_t vi
= make_constraint_from_restrict (lhs
, name
);
3759 make_copy_constraint (vi
, nonlocal_id
);
3763 /* In IPA mode there are varinfos for different aspects of reach
3764 function designator. One for the points-to set of the return
3765 value, one for the variables that are clobbered by the function,
3766 one for its uses and one for each parameter (including a single
3767 glob for remaining variadic arguments). */
3769 enum { fi_clobbers
= 1, fi_uses
= 2,
3770 fi_static_chain
= 3, fi_result
= 4, fi_parm_base
= 5 };
3772 /* Get a constraint for the requested part of a function designator FI
3773 when operating in IPA mode. */
3775 static struct constraint_expr
3776 get_function_part_constraint (varinfo_t fi
, unsigned part
)
3778 struct constraint_expr c
;
3780 gcc_assert (in_ipa_mode
);
3782 if (fi
->id
== anything_id
)
3784 /* ??? We probably should have a ANYFN special variable. */
3785 c
.var
= anything_id
;
3789 else if (TREE_CODE (fi
->decl
) == FUNCTION_DECL
)
3791 varinfo_t ai
= first_vi_for_offset (fi
, part
);
3795 c
.var
= anything_id
;
3809 /* For non-IPA mode, generate constraints necessary for a call on the
3813 handle_rhs_call (gimple stmt
, vec
<ce_s
> *results
)
3815 struct constraint_expr rhsc
;
3817 bool returns_uses
= false;
3819 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3821 tree arg
= gimple_call_arg (stmt
, i
);
3822 int flags
= gimple_call_arg_flags (stmt
, i
);
3824 /* If the argument is not used we can ignore it. */
3825 if (flags
& EAF_UNUSED
)
3828 /* As we compute ESCAPED context-insensitive we do not gain
3829 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3830 set. The argument would still get clobbered through the
3832 if ((flags
& EAF_NOCLOBBER
)
3833 && (flags
& EAF_NOESCAPE
))
3835 varinfo_t uses
= get_call_use_vi (stmt
);
3836 if (!(flags
& EAF_DIRECT
))
3838 varinfo_t tem
= new_var_info (NULL_TREE
, "callarg");
3839 make_constraint_to (tem
->id
, arg
);
3840 make_transitive_closure_constraints (tem
);
3841 make_copy_constraint (uses
, tem
->id
);
3844 make_constraint_to (uses
->id
, arg
);
3845 returns_uses
= true;
3847 else if (flags
& EAF_NOESCAPE
)
3849 struct constraint_expr lhs
, rhs
;
3850 varinfo_t uses
= get_call_use_vi (stmt
);
3851 varinfo_t clobbers
= get_call_clobber_vi (stmt
);
3852 varinfo_t tem
= new_var_info (NULL_TREE
, "callarg");
3853 make_constraint_to (tem
->id
, arg
);
3854 if (!(flags
& EAF_DIRECT
))
3855 make_transitive_closure_constraints (tem
);
3856 make_copy_constraint (uses
, tem
->id
);
3857 make_copy_constraint (clobbers
, tem
->id
);
3858 /* Add *tem = nonlocal, do not add *tem = callused as
3859 EAF_NOESCAPE parameters do not escape to other parameters
3860 and all other uses appear in NONLOCAL as well. */
3865 rhs
.var
= nonlocal_id
;
3867 process_constraint (new_constraint (lhs
, rhs
));
3868 returns_uses
= true;
3871 make_escape_constraint (arg
);
3874 /* If we added to the calls uses solution make sure we account for
3875 pointers to it to be returned. */
3878 rhsc
.var
= get_call_use_vi (stmt
)->id
;
3881 results
->safe_push (rhsc
);
3884 /* The static chain escapes as well. */
3885 if (gimple_call_chain (stmt
))
3886 make_escape_constraint (gimple_call_chain (stmt
));
3888 /* And if we applied NRV the address of the return slot escapes as well. */
3889 if (gimple_call_return_slot_opt_p (stmt
)
3890 && gimple_call_lhs (stmt
) != NULL_TREE
3891 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt
))))
3893 vec
<ce_s
> tmpc
= vNULL
;
3894 struct constraint_expr lhsc
, *c
;
3895 get_constraint_for_address_of (gimple_call_lhs (stmt
), &tmpc
);
3896 lhsc
.var
= escaped_id
;
3899 FOR_EACH_VEC_ELT (tmpc
, i
, c
)
3900 process_constraint (new_constraint (lhsc
, *c
));
3904 /* Regular functions return nonlocal memory. */
3905 rhsc
.var
= nonlocal_id
;
3908 results
->safe_push (rhsc
);
3911 /* For non-IPA mode, generate constraints necessary for a call
3912 that returns a pointer and assigns it to LHS. This simply makes
3913 the LHS point to global and escaped variables. */
3916 handle_lhs_call (gimple stmt
, tree lhs
, int flags
, vec
<ce_s
> rhsc
,
3919 vec
<ce_s
> lhsc
= vNULL
;
3921 get_constraint_for (lhs
, &lhsc
);
3922 /* If the store is to a global decl make sure to
3923 add proper escape constraints. */
3924 lhs
= get_base_address (lhs
);
3927 && is_global_var (lhs
))
3929 struct constraint_expr tmpc
;
3930 tmpc
.var
= escaped_id
;
3933 lhsc
.safe_push (tmpc
);
3936 /* If the call returns an argument unmodified override the rhs
3938 flags
= gimple_call_return_flags (stmt
);
3939 if (flags
& ERF_RETURNS_ARG
3940 && (flags
& ERF_RETURN_ARG_MASK
) < gimple_call_num_args (stmt
))
3944 arg
= gimple_call_arg (stmt
, flags
& ERF_RETURN_ARG_MASK
);
3945 get_constraint_for (arg
, &rhsc
);
3946 process_all_all_constraints (lhsc
, rhsc
);
3949 else if (flags
& ERF_NOALIAS
)
3952 struct constraint_expr tmpc
;
3954 vi
= make_heapvar ("HEAP");
3955 /* We delay marking allocated storage global until we know if
3957 DECL_EXTERNAL (vi
->decl
) = 0;
3958 vi
->is_global_var
= 0;
3959 /* If this is not a real malloc call assume the memory was
3960 initialized and thus may point to global memory. All
3961 builtin functions with the malloc attribute behave in a sane way. */
3963 || DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_NORMAL
)
3964 make_constraint_from (vi
, nonlocal_id
);
3967 tmpc
.type
= ADDRESSOF
;
3968 rhsc
.safe_push (tmpc
);
3969 process_all_all_constraints (lhsc
, rhsc
);
3973 process_all_all_constraints (lhsc
, rhsc
);
3978 /* For non-IPA mode, generate constraints necessary for a call of a
3979 const function that returns a pointer in the statement STMT. */
3982 handle_const_call (gimple stmt
, vec
<ce_s
> *results
)
3984 struct constraint_expr rhsc
;
3987 /* Treat nested const functions the same as pure functions as far
3988 as the static chain is concerned. */
3989 if (gimple_call_chain (stmt
))
3991 varinfo_t uses
= get_call_use_vi (stmt
);
3992 make_transitive_closure_constraints (uses
);
3993 make_constraint_to (uses
->id
, gimple_call_chain (stmt
));
3994 rhsc
.var
= uses
->id
;
3997 results
->safe_push (rhsc
);
4000 /* May return arguments. */
4001 for (k
= 0; k
< gimple_call_num_args (stmt
); ++k
)
4003 tree arg
= gimple_call_arg (stmt
, k
);
4004 vec
<ce_s
> argc
= vNULL
;
4006 struct constraint_expr
*argp
;
4007 get_constraint_for_rhs (arg
, &argc
);
4008 FOR_EACH_VEC_ELT (argc
, i
, argp
)
4009 results
->safe_push (*argp
);
4013 /* May return addresses of globals. */
4014 rhsc
.var
= nonlocal_id
;
4016 rhsc
.type
= ADDRESSOF
;
4017 results
->safe_push (rhsc
);
4020 /* For non-IPA mode, generate constraints necessary for a call to a
4021 pure function in statement STMT. */
4024 handle_pure_call (gimple stmt
, vec
<ce_s
> *results
)
4026 struct constraint_expr rhsc
;
4028 varinfo_t uses
= NULL
;
4030 /* Memory reached from pointer arguments is call-used. */
4031 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4033 tree arg
= gimple_call_arg (stmt
, i
);
4036 uses
= get_call_use_vi (stmt
);
4037 make_transitive_closure_constraints (uses
);
4039 make_constraint_to (uses
->id
, arg
);
4042 /* The static chain is used as well. */
4043 if (gimple_call_chain (stmt
))
4047 uses
= get_call_use_vi (stmt
);
4048 make_transitive_closure_constraints (uses
);
4050 make_constraint_to (uses
->id
, gimple_call_chain (stmt
));
4053 /* Pure functions may return call-used and nonlocal memory. */
4056 rhsc
.var
= uses
->id
;
4059 results
->safe_push (rhsc
);
4061 rhsc
.var
= nonlocal_id
;
4064 results
->safe_push (rhsc
);
4068 /* Return the varinfo for the callee of CALL. */
4071 get_fi_for_callee (gimple call
)
4073 tree decl
, fn
= gimple_call_fn (call
);
4075 if (fn
&& TREE_CODE (fn
) == OBJ_TYPE_REF
)
4076 fn
= OBJ_TYPE_REF_EXPR (fn
);
4078 /* If we can directly resolve the function being called, do so.
4079 Otherwise, it must be some sort of indirect expression that
4080 we should still be able to handle. */
4081 decl
= gimple_call_addr_fndecl (fn
);
4083 return get_vi_for_tree (decl
);
4085 /* If the function is anything other than a SSA name pointer we have no
4086 clue and should be getting ANYFN (well, ANYTHING for now). */
4087 if (!fn
|| TREE_CODE (fn
) != SSA_NAME
)
4088 return get_varinfo (anything_id
);
4090 if (SSA_NAME_IS_DEFAULT_DEF (fn
)
4091 && (TREE_CODE (SSA_NAME_VAR (fn
)) == PARM_DECL
4092 || TREE_CODE (SSA_NAME_VAR (fn
)) == RESULT_DECL
))
4093 fn
= SSA_NAME_VAR (fn
);
4095 return get_vi_for_tree (fn
);
4098 /* Create constraints for the builtin call T. Return true if the call
4099 was handled, otherwise false. */
4102 find_func_aliases_for_builtin_call (gimple t
)
4104 tree fndecl
= gimple_call_fndecl (t
);
4105 vec
<ce_s
> lhsc
= vNULL
;
4106 vec
<ce_s
> rhsc
= vNULL
;
4109 if (gimple_call_builtin_p (t
, BUILT_IN_NORMAL
))
4110 /* ??? All builtins that are handled here need to be handled
4111 in the alias-oracle query functions explicitly! */
4112 switch (DECL_FUNCTION_CODE (fndecl
))
4114 /* All the following functions return a pointer to the same object
4115 as their first argument points to. The functions do not add
4116 to the ESCAPED solution. The functions make the first argument
4117 pointed to memory point to what the second argument pointed to
4118 memory points to. */
4119 case BUILT_IN_STRCPY
:
4120 case BUILT_IN_STRNCPY
:
4121 case BUILT_IN_BCOPY
:
4122 case BUILT_IN_MEMCPY
:
4123 case BUILT_IN_MEMMOVE
:
4124 case BUILT_IN_MEMPCPY
:
4125 case BUILT_IN_STPCPY
:
4126 case BUILT_IN_STPNCPY
:
4127 case BUILT_IN_STRCAT
:
4128 case BUILT_IN_STRNCAT
:
4129 case BUILT_IN_STRCPY_CHK
:
4130 case BUILT_IN_STRNCPY_CHK
:
4131 case BUILT_IN_MEMCPY_CHK
:
4132 case BUILT_IN_MEMMOVE_CHK
:
4133 case BUILT_IN_MEMPCPY_CHK
:
4134 case BUILT_IN_STPCPY_CHK
:
4135 case BUILT_IN_STPNCPY_CHK
:
4136 case BUILT_IN_STRCAT_CHK
:
4137 case BUILT_IN_STRNCAT_CHK
:
4138 case BUILT_IN_TM_MEMCPY
:
4139 case BUILT_IN_TM_MEMMOVE
:
4141 tree res
= gimple_call_lhs (t
);
4142 tree dest
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (fndecl
)
4143 == BUILT_IN_BCOPY
? 1 : 0));
4144 tree src
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (fndecl
)
4145 == BUILT_IN_BCOPY
? 0 : 1));
4146 if (res
!= NULL_TREE
)
4148 get_constraint_for (res
, &lhsc
);
4149 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMPCPY
4150 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPCPY
4151 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPNCPY
4152 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMPCPY_CHK
4153 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPCPY_CHK
4154 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPNCPY_CHK
)
4155 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &rhsc
);
4157 get_constraint_for (dest
, &rhsc
);
4158 process_all_all_constraints (lhsc
, rhsc
);
4162 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4163 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
4166 process_all_all_constraints (lhsc
, rhsc
);
4171 case BUILT_IN_MEMSET
:
4172 case BUILT_IN_MEMSET_CHK
:
4173 case BUILT_IN_TM_MEMSET
:
4175 tree res
= gimple_call_lhs (t
);
4176 tree dest
= gimple_call_arg (t
, 0);
4179 struct constraint_expr ac
;
4180 if (res
!= NULL_TREE
)
4182 get_constraint_for (res
, &lhsc
);
4183 get_constraint_for (dest
, &rhsc
);
4184 process_all_all_constraints (lhsc
, rhsc
);
4188 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4190 if (flag_delete_null_pointer_checks
4191 && integer_zerop (gimple_call_arg (t
, 1)))
4193 ac
.type
= ADDRESSOF
;
4194 ac
.var
= nothing_id
;
4199 ac
.var
= integer_id
;
4202 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4203 process_constraint (new_constraint (*lhsp
, ac
));
4207 case BUILT_IN_ASSUME_ALIGNED
:
4209 tree res
= gimple_call_lhs (t
);
4210 tree dest
= gimple_call_arg (t
, 0);
4211 if (res
!= NULL_TREE
)
4213 get_constraint_for (res
, &lhsc
);
4214 get_constraint_for (dest
, &rhsc
);
4215 process_all_all_constraints (lhsc
, rhsc
);
4221 /* All the following functions do not return pointers, do not
4222 modify the points-to sets of memory reachable from their
4223 arguments and do not add to the ESCAPED solution. */
4224 case BUILT_IN_SINCOS
:
4225 case BUILT_IN_SINCOSF
:
4226 case BUILT_IN_SINCOSL
:
4227 case BUILT_IN_FREXP
:
4228 case BUILT_IN_FREXPF
:
4229 case BUILT_IN_FREXPL
:
4230 case BUILT_IN_GAMMA_R
:
4231 case BUILT_IN_GAMMAF_R
:
4232 case BUILT_IN_GAMMAL_R
:
4233 case BUILT_IN_LGAMMA_R
:
4234 case BUILT_IN_LGAMMAF_R
:
4235 case BUILT_IN_LGAMMAL_R
:
4237 case BUILT_IN_MODFF
:
4238 case BUILT_IN_MODFL
:
4239 case BUILT_IN_REMQUO
:
4240 case BUILT_IN_REMQUOF
:
4241 case BUILT_IN_REMQUOL
:
4244 case BUILT_IN_STRDUP
:
4245 case BUILT_IN_STRNDUP
:
4246 if (gimple_call_lhs (t
))
4248 handle_lhs_call (t
, gimple_call_lhs (t
), gimple_call_flags (t
),
4250 get_constraint_for_ptr_offset (gimple_call_lhs (t
),
4252 get_constraint_for_ptr_offset (gimple_call_arg (t
, 0),
4256 process_all_all_constraints (lhsc
, rhsc
);
4262 /* String / character search functions return a pointer into the
4263 source string or NULL. */
4264 case BUILT_IN_INDEX
:
4265 case BUILT_IN_STRCHR
:
4266 case BUILT_IN_STRRCHR
:
4267 case BUILT_IN_MEMCHR
:
4268 case BUILT_IN_STRSTR
:
4269 case BUILT_IN_STRPBRK
:
4270 if (gimple_call_lhs (t
))
4272 tree src
= gimple_call_arg (t
, 0);
4273 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
4274 constraint_expr nul
;
4275 nul
.var
= nothing_id
;
4277 nul
.type
= ADDRESSOF
;
4278 rhsc
.safe_push (nul
);
4279 get_constraint_for (gimple_call_lhs (t
), &lhsc
);
4280 process_all_all_constraints (lhsc
, rhsc
);
4285 /* Trampolines are special - they set up passing the static
4287 case BUILT_IN_INIT_TRAMPOLINE
:
4289 tree tramp
= gimple_call_arg (t
, 0);
4290 tree nfunc
= gimple_call_arg (t
, 1);
4291 tree frame
= gimple_call_arg (t
, 2);
4293 struct constraint_expr lhs
, *rhsp
;
4296 varinfo_t nfi
= NULL
;
4297 gcc_assert (TREE_CODE (nfunc
) == ADDR_EXPR
);
4298 nfi
= lookup_vi_for_tree (TREE_OPERAND (nfunc
, 0));
4301 lhs
= get_function_part_constraint (nfi
, fi_static_chain
);
4302 get_constraint_for (frame
, &rhsc
);
4303 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4304 process_constraint (new_constraint (lhs
, *rhsp
));
4307 /* Make the frame point to the function for
4308 the trampoline adjustment call. */
4309 get_constraint_for (tramp
, &lhsc
);
4311 get_constraint_for (nfunc
, &rhsc
);
4312 process_all_all_constraints (lhsc
, rhsc
);
4319 /* Else fallthru to generic handling which will let
4320 the frame escape. */
4323 case BUILT_IN_ADJUST_TRAMPOLINE
:
4325 tree tramp
= gimple_call_arg (t
, 0);
4326 tree res
= gimple_call_lhs (t
);
4327 if (in_ipa_mode
&& res
)
4329 get_constraint_for (res
, &lhsc
);
4330 get_constraint_for (tramp
, &rhsc
);
4332 process_all_all_constraints (lhsc
, rhsc
);
4338 CASE_BUILT_IN_TM_STORE (1):
4339 CASE_BUILT_IN_TM_STORE (2):
4340 CASE_BUILT_IN_TM_STORE (4):
4341 CASE_BUILT_IN_TM_STORE (8):
4342 CASE_BUILT_IN_TM_STORE (FLOAT
):
4343 CASE_BUILT_IN_TM_STORE (DOUBLE
):
4344 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
4345 CASE_BUILT_IN_TM_STORE (M64
):
4346 CASE_BUILT_IN_TM_STORE (M128
):
4347 CASE_BUILT_IN_TM_STORE (M256
):
4349 tree addr
= gimple_call_arg (t
, 0);
4350 tree src
= gimple_call_arg (t
, 1);
4352 get_constraint_for (addr
, &lhsc
);
4354 get_constraint_for (src
, &rhsc
);
4355 process_all_all_constraints (lhsc
, rhsc
);
4360 CASE_BUILT_IN_TM_LOAD (1):
4361 CASE_BUILT_IN_TM_LOAD (2):
4362 CASE_BUILT_IN_TM_LOAD (4):
4363 CASE_BUILT_IN_TM_LOAD (8):
4364 CASE_BUILT_IN_TM_LOAD (FLOAT
):
4365 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
4366 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
4367 CASE_BUILT_IN_TM_LOAD (M64
):
4368 CASE_BUILT_IN_TM_LOAD (M128
):
4369 CASE_BUILT_IN_TM_LOAD (M256
):
4371 tree dest
= gimple_call_lhs (t
);
4372 tree addr
= gimple_call_arg (t
, 0);
4374 get_constraint_for (dest
, &lhsc
);
4375 get_constraint_for (addr
, &rhsc
);
4377 process_all_all_constraints (lhsc
, rhsc
);
4382 /* Variadic argument handling needs to be handled in IPA
4384 case BUILT_IN_VA_START
:
4386 tree valist
= gimple_call_arg (t
, 0);
4387 struct constraint_expr rhs
, *lhsp
;
4389 get_constraint_for (valist
, &lhsc
);
4391 /* The va_list gets access to pointers in variadic
4392 arguments. Which we know in the case of IPA analysis
4393 and otherwise are just all nonlocal variables. */
4396 fi
= lookup_vi_for_tree (cfun
->decl
);
4397 rhs
= get_function_part_constraint (fi
, ~0);
4398 rhs
.type
= ADDRESSOF
;
4402 rhs
.var
= nonlocal_id
;
4403 rhs
.type
= ADDRESSOF
;
4406 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4407 process_constraint (new_constraint (*lhsp
, rhs
));
4409 /* va_list is clobbered. */
4410 make_constraint_to (get_call_clobber_vi (t
)->id
, valist
);
4413 /* va_end doesn't have any effect that matters. */
4414 case BUILT_IN_VA_END
:
4416 /* Alternate return. Simply give up for now. */
4417 case BUILT_IN_RETURN
:
4421 || !(fi
= get_vi_for_tree (cfun
->decl
)))
4422 make_constraint_from (get_varinfo (escaped_id
), anything_id
);
4423 else if (in_ipa_mode
4426 struct constraint_expr lhs
, rhs
;
4427 lhs
= get_function_part_constraint (fi
, fi_result
);
4428 rhs
.var
= anything_id
;
4431 process_constraint (new_constraint (lhs
, rhs
));
4435 /* printf-style functions may have hooks to set pointers to
4436 point to somewhere into the generated string. Leave them
4437 for a later excercise... */
4439 /* Fallthru to general call handling. */;
4445 /* Create constraints for the call T. */
4448 find_func_aliases_for_call (gimple t
)
4450 tree fndecl
= gimple_call_fndecl (t
);
4451 vec
<ce_s
> lhsc
= vNULL
;
4452 vec
<ce_s
> rhsc
= vNULL
;
4455 if (fndecl
!= NULL_TREE
4456 && DECL_BUILT_IN (fndecl
)
4457 && find_func_aliases_for_builtin_call (t
))
4460 fi
= get_fi_for_callee (t
);
4462 || (fndecl
&& !fi
->is_fn_info
))
4464 vec
<ce_s
> rhsc
= vNULL
;
4465 int flags
= gimple_call_flags (t
);
4467 /* Const functions can return their arguments and addresses
4468 of global memory but not of escaped memory. */
4469 if (flags
& (ECF_CONST
|ECF_NOVOPS
))
4471 if (gimple_call_lhs (t
))
4472 handle_const_call (t
, &rhsc
);
4474 /* Pure functions can return addresses in and of memory
4475 reachable from their arguments, but they are not an escape
4476 point for reachable memory of their arguments. */
4477 else if (flags
& (ECF_PURE
|ECF_LOOPING_CONST_OR_PURE
))
4478 handle_pure_call (t
, &rhsc
);
4480 handle_rhs_call (t
, &rhsc
);
4481 if (gimple_call_lhs (t
))
4482 handle_lhs_call (t
, gimple_call_lhs (t
), flags
, rhsc
, fndecl
);
4490 /* Assign all the passed arguments to the appropriate incoming
4491 parameters of the function. */
4492 for (j
= 0; j
< gimple_call_num_args (t
); j
++)
4494 struct constraint_expr lhs
;
4495 struct constraint_expr
*rhsp
;
4496 tree arg
= gimple_call_arg (t
, j
);
4498 get_constraint_for_rhs (arg
, &rhsc
);
4499 lhs
= get_function_part_constraint (fi
, fi_parm_base
+ j
);
4500 while (rhsc
.length () != 0)
4502 rhsp
= &rhsc
.last ();
4503 process_constraint (new_constraint (lhs
, *rhsp
));
4508 /* If we are returning a value, assign it to the result. */
4509 lhsop
= gimple_call_lhs (t
);
4512 struct constraint_expr rhs
;
4513 struct constraint_expr
*lhsp
;
4515 get_constraint_for (lhsop
, &lhsc
);
4516 rhs
= get_function_part_constraint (fi
, fi_result
);
4518 && DECL_RESULT (fndecl
)
4519 && DECL_BY_REFERENCE (DECL_RESULT (fndecl
)))
4521 vec
<ce_s
> tem
= vNULL
;
4522 tem
.safe_push (rhs
);
4527 FOR_EACH_VEC_ELT (lhsc
, j
, lhsp
)
4528 process_constraint (new_constraint (*lhsp
, rhs
));
4531 /* If we pass the result decl by reference, honor that. */
4534 && DECL_RESULT (fndecl
)
4535 && DECL_BY_REFERENCE (DECL_RESULT (fndecl
)))
4537 struct constraint_expr lhs
;
4538 struct constraint_expr
*rhsp
;
4540 get_constraint_for_address_of (lhsop
, &rhsc
);
4541 lhs
= get_function_part_constraint (fi
, fi_result
);
4542 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
4543 process_constraint (new_constraint (lhs
, *rhsp
));
4547 /* If we use a static chain, pass it along. */
4548 if (gimple_call_chain (t
))
4550 struct constraint_expr lhs
;
4551 struct constraint_expr
*rhsp
;
4553 get_constraint_for (gimple_call_chain (t
), &rhsc
);
4554 lhs
= get_function_part_constraint (fi
, fi_static_chain
);
4555 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
4556 process_constraint (new_constraint (lhs
, *rhsp
));
4561 /* Walk statement T setting up aliasing constraints according to the
4562 references found in T. This function is the main part of the
4563 constraint builder. AI points to auxiliary alias information used
4564 when building alias sets and computing alias grouping heuristics. */
4567 find_func_aliases (gimple origt
)
4570 vec
<ce_s
> lhsc
= vNULL
;
4571 vec
<ce_s
> rhsc
= vNULL
;
4572 struct constraint_expr
*c
;
4575 /* Now build constraints expressions. */
4576 if (gimple_code (t
) == GIMPLE_PHI
)
4581 /* For a phi node, assign all the arguments to
4583 get_constraint_for (gimple_phi_result (t
), &lhsc
);
4584 for (i
= 0; i
< gimple_phi_num_args (t
); i
++)
4586 tree strippedrhs
= PHI_ARG_DEF (t
, i
);
4588 STRIP_NOPS (strippedrhs
);
4589 get_constraint_for_rhs (gimple_phi_arg_def (t
, i
), &rhsc
);
4591 FOR_EACH_VEC_ELT (lhsc
, j
, c
)
4593 struct constraint_expr
*c2
;
4594 while (rhsc
.length () > 0)
4597 process_constraint (new_constraint (*c
, *c2
));
4603 /* In IPA mode, we need to generate constraints to pass call
4604 arguments through their calls. There are two cases,
4605 either a GIMPLE_CALL returning a value, or just a plain
4606 GIMPLE_CALL when we are not.
4608 In non-ipa mode, we need to generate constraints for each
4609 pointer passed by address. */
4610 else if (is_gimple_call (t
))
4611 find_func_aliases_for_call (t
);
4613 /* Otherwise, just a regular assignment statement. Only care about
4614 operations with pointer result, others are dealt with as escape
4615 points if they have pointer operands. */
4616 else if (is_gimple_assign (t
))
4618 /* Otherwise, just a regular assignment statement. */
4619 tree lhsop
= gimple_assign_lhs (t
);
4620 tree rhsop
= (gimple_num_ops (t
) == 2) ? gimple_assign_rhs1 (t
) : NULL
;
4622 if (rhsop
&& TREE_CLOBBER_P (rhsop
))
4623 /* Ignore clobbers, they don't actually store anything into
4626 else if (rhsop
&& AGGREGATE_TYPE_P (TREE_TYPE (lhsop
)))
4627 do_structure_copy (lhsop
, rhsop
);
4630 enum tree_code code
= gimple_assign_rhs_code (t
);
4632 get_constraint_for (lhsop
, &lhsc
);
4634 if (FLOAT_TYPE_P (TREE_TYPE (lhsop
)))
4635 /* If the operation produces a floating point result then
4636 assume the value is not produced to transfer a pointer. */
4638 else if (code
== POINTER_PLUS_EXPR
)
4639 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t
),
4640 gimple_assign_rhs2 (t
), &rhsc
);
4641 else if (code
== BIT_AND_EXPR
4642 && TREE_CODE (gimple_assign_rhs2 (t
)) == INTEGER_CST
)
4644 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4645 the pointer. Handle it by offsetting it by UNKNOWN. */
4646 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t
),
4649 else if ((CONVERT_EXPR_CODE_P (code
)
4650 && !(POINTER_TYPE_P (gimple_expr_type (t
))
4651 && !POINTER_TYPE_P (TREE_TYPE (rhsop
))))
4652 || gimple_assign_single_p (t
))
4653 get_constraint_for_rhs (rhsop
, &rhsc
);
4654 else if (code
== COND_EXPR
)
4656 /* The result is a merge of both COND_EXPR arms. */
4657 vec
<ce_s
> tmp
= vNULL
;
4658 struct constraint_expr
*rhsp
;
4660 get_constraint_for_rhs (gimple_assign_rhs2 (t
), &rhsc
);
4661 get_constraint_for_rhs (gimple_assign_rhs3 (t
), &tmp
);
4662 FOR_EACH_VEC_ELT (tmp
, i
, rhsp
)
4663 rhsc
.safe_push (*rhsp
);
4666 else if (truth_value_p (code
))
4667 /* Truth value results are not pointer (parts). Or at least
4668 very very unreasonable obfuscation of a part. */
4672 /* All other operations are merges. */
4673 vec
<ce_s
> tmp
= vNULL
;
4674 struct constraint_expr
*rhsp
;
4676 get_constraint_for_rhs (gimple_assign_rhs1 (t
), &rhsc
);
4677 for (i
= 2; i
< gimple_num_ops (t
); ++i
)
4679 get_constraint_for_rhs (gimple_op (t
, i
), &tmp
);
4680 FOR_EACH_VEC_ELT (tmp
, j
, rhsp
)
4681 rhsc
.safe_push (*rhsp
);
4686 process_all_all_constraints (lhsc
, rhsc
);
4688 /* If there is a store to a global variable the rhs escapes. */
4689 if ((lhsop
= get_base_address (lhsop
)) != NULL_TREE
4691 && is_global_var (lhsop
)
4693 || DECL_EXTERNAL (lhsop
) || TREE_PUBLIC (lhsop
)))
4694 make_escape_constraint (rhsop
);
4696 /* Handle escapes through return. */
4697 else if (gimple_code (t
) == GIMPLE_RETURN
4698 && gimple_return_retval (t
) != NULL_TREE
)
4702 || !(fi
= get_vi_for_tree (cfun
->decl
)))
4703 make_escape_constraint (gimple_return_retval (t
));
4704 else if (in_ipa_mode
4707 struct constraint_expr lhs
;
4708 struct constraint_expr
*rhsp
;
4711 lhs
= get_function_part_constraint (fi
, fi_result
);
4712 get_constraint_for_rhs (gimple_return_retval (t
), &rhsc
);
4713 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4714 process_constraint (new_constraint (lhs
, *rhsp
));
4717 /* Handle asms conservatively by adding escape constraints to everything. */
4718 else if (gimple_code (t
) == GIMPLE_ASM
)
4720 unsigned i
, noutputs
;
4721 const char **oconstraints
;
4722 const char *constraint
;
4723 bool allows_mem
, allows_reg
, is_inout
;
4725 noutputs
= gimple_asm_noutputs (t
);
4726 oconstraints
= XALLOCAVEC (const char *, noutputs
);
4728 for (i
= 0; i
< noutputs
; ++i
)
4730 tree link
= gimple_asm_output_op (t
, i
);
4731 tree op
= TREE_VALUE (link
);
4733 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4734 oconstraints
[i
] = constraint
;
4735 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
4736 &allows_reg
, &is_inout
);
4738 /* A memory constraint makes the address of the operand escape. */
4739 if (!allows_reg
&& allows_mem
)
4740 make_escape_constraint (build_fold_addr_expr (op
));
4742 /* The asm may read global memory, so outputs may point to
4743 any global memory. */
4746 vec
<ce_s
> lhsc
= vNULL
;
4747 struct constraint_expr rhsc
, *lhsp
;
4749 get_constraint_for (op
, &lhsc
);
4750 rhsc
.var
= nonlocal_id
;
4753 FOR_EACH_VEC_ELT (lhsc
, j
, lhsp
)
4754 process_constraint (new_constraint (*lhsp
, rhsc
));
4758 for (i
= 0; i
< gimple_asm_ninputs (t
); ++i
)
4760 tree link
= gimple_asm_input_op (t
, i
);
4761 tree op
= TREE_VALUE (link
);
4763 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4765 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0, oconstraints
,
4766 &allows_mem
, &allows_reg
);
4768 /* A memory constraint makes the address of the operand escape. */
4769 if (!allows_reg
&& allows_mem
)
4770 make_escape_constraint (build_fold_addr_expr (op
));
4771 /* Strictly we'd only need the constraint to ESCAPED if
4772 the asm clobbers memory, otherwise using something
4773 along the lines of per-call clobbers/uses would be enough. */
4775 make_escape_constraint (op
);
4784 /* Create a constraint adding to the clobber set of FI the memory
4785 pointed to by PTR. */
4788 process_ipa_clobber (varinfo_t fi
, tree ptr
)
4790 vec
<ce_s
> ptrc
= vNULL
;
4791 struct constraint_expr
*c
, lhs
;
4793 get_constraint_for_rhs (ptr
, &ptrc
);
4794 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
4795 FOR_EACH_VEC_ELT (ptrc
, i
, c
)
4796 process_constraint (new_constraint (lhs
, *c
));
4800 /* Walk statement T setting up clobber and use constraints according to the
4801 references found in T. This function is a main part of the
4802 IPA constraint builder. */
4805 find_func_clobbers (gimple origt
)
4808 vec
<ce_s
> lhsc
= vNULL
;
4809 vec
<ce_s
> rhsc
= vNULL
;
4812 /* Add constraints for clobbered/used in IPA mode.
4813 We are not interested in what automatic variables are clobbered
4814 or used as we only use the information in the caller to which
4815 they do not escape. */
4816 gcc_assert (in_ipa_mode
);
4818 /* If the stmt refers to memory in any way it better had a VUSE. */
4819 if (gimple_vuse (t
) == NULL_TREE
)
4822 /* We'd better have function information for the current function. */
4823 fi
= lookup_vi_for_tree (cfun
->decl
);
4824 gcc_assert (fi
!= NULL
);
4826 /* Account for stores in assignments and calls. */
4827 if (gimple_vdef (t
) != NULL_TREE
4828 && gimple_has_lhs (t
))
4830 tree lhs
= gimple_get_lhs (t
);
4832 while (handled_component_p (tem
))
4833 tem
= TREE_OPERAND (tem
, 0);
4835 && !auto_var_in_fn_p (tem
, cfun
->decl
))
4836 || INDIRECT_REF_P (tem
)
4837 || (TREE_CODE (tem
) == MEM_REF
4838 && !(TREE_CODE (TREE_OPERAND (tem
, 0)) == ADDR_EXPR
4840 (TREE_OPERAND (TREE_OPERAND (tem
, 0), 0), cfun
->decl
))))
4842 struct constraint_expr lhsc
, *rhsp
;
4844 lhsc
= get_function_part_constraint (fi
, fi_clobbers
);
4845 get_constraint_for_address_of (lhs
, &rhsc
);
4846 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4847 process_constraint (new_constraint (lhsc
, *rhsp
));
4852 /* Account for uses in assigments and returns. */
4853 if (gimple_assign_single_p (t
)
4854 || (gimple_code (t
) == GIMPLE_RETURN
4855 && gimple_return_retval (t
) != NULL_TREE
))
4857 tree rhs
= (gimple_assign_single_p (t
)
4858 ? gimple_assign_rhs1 (t
) : gimple_return_retval (t
));
4860 while (handled_component_p (tem
))
4861 tem
= TREE_OPERAND (tem
, 0);
4863 && !auto_var_in_fn_p (tem
, cfun
->decl
))
4864 || INDIRECT_REF_P (tem
)
4865 || (TREE_CODE (tem
) == MEM_REF
4866 && !(TREE_CODE (TREE_OPERAND (tem
, 0)) == ADDR_EXPR
4868 (TREE_OPERAND (TREE_OPERAND (tem
, 0), 0), cfun
->decl
))))
4870 struct constraint_expr lhs
, *rhsp
;
4872 lhs
= get_function_part_constraint (fi
, fi_uses
);
4873 get_constraint_for_address_of (rhs
, &rhsc
);
4874 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4875 process_constraint (new_constraint (lhs
, *rhsp
));
4880 if (is_gimple_call (t
))
4882 varinfo_t cfi
= NULL
;
4883 tree decl
= gimple_call_fndecl (t
);
4884 struct constraint_expr lhs
, rhs
;
4887 /* For builtins we do not have separate function info. For those
4888 we do not generate escapes for we have to generate clobbers/uses. */
4889 if (gimple_call_builtin_p (t
, BUILT_IN_NORMAL
))
4890 switch (DECL_FUNCTION_CODE (decl
))
4892 /* The following functions use and clobber memory pointed to
4893 by their arguments. */
4894 case BUILT_IN_STRCPY
:
4895 case BUILT_IN_STRNCPY
:
4896 case BUILT_IN_BCOPY
:
4897 case BUILT_IN_MEMCPY
:
4898 case BUILT_IN_MEMMOVE
:
4899 case BUILT_IN_MEMPCPY
:
4900 case BUILT_IN_STPCPY
:
4901 case BUILT_IN_STPNCPY
:
4902 case BUILT_IN_STRCAT
:
4903 case BUILT_IN_STRNCAT
:
4904 case BUILT_IN_STRCPY_CHK
:
4905 case BUILT_IN_STRNCPY_CHK
:
4906 case BUILT_IN_MEMCPY_CHK
:
4907 case BUILT_IN_MEMMOVE_CHK
:
4908 case BUILT_IN_MEMPCPY_CHK
:
4909 case BUILT_IN_STPCPY_CHK
:
4910 case BUILT_IN_STPNCPY_CHK
:
4911 case BUILT_IN_STRCAT_CHK
:
4912 case BUILT_IN_STRNCAT_CHK
:
4914 tree dest
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (decl
)
4915 == BUILT_IN_BCOPY
? 1 : 0));
4916 tree src
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (decl
)
4917 == BUILT_IN_BCOPY
? 0 : 1));
4919 struct constraint_expr
*rhsp
, *lhsp
;
4920 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4921 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
4922 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4923 process_constraint (new_constraint (lhs
, *lhsp
));
4925 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
4926 lhs
= get_function_part_constraint (fi
, fi_uses
);
4927 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4928 process_constraint (new_constraint (lhs
, *rhsp
));
4932 /* The following function clobbers memory pointed to by
4934 case BUILT_IN_MEMSET
:
4935 case BUILT_IN_MEMSET_CHK
:
4937 tree dest
= gimple_call_arg (t
, 0);
4940 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4941 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
4942 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4943 process_constraint (new_constraint (lhs
, *lhsp
));
4947 /* The following functions clobber their second and third
4949 case BUILT_IN_SINCOS
:
4950 case BUILT_IN_SINCOSF
:
4951 case BUILT_IN_SINCOSL
:
4953 process_ipa_clobber (fi
, gimple_call_arg (t
, 1));
4954 process_ipa_clobber (fi
, gimple_call_arg (t
, 2));
4957 /* The following functions clobber their second argument. */
4958 case BUILT_IN_FREXP
:
4959 case BUILT_IN_FREXPF
:
4960 case BUILT_IN_FREXPL
:
4961 case BUILT_IN_LGAMMA_R
:
4962 case BUILT_IN_LGAMMAF_R
:
4963 case BUILT_IN_LGAMMAL_R
:
4964 case BUILT_IN_GAMMA_R
:
4965 case BUILT_IN_GAMMAF_R
:
4966 case BUILT_IN_GAMMAL_R
:
4968 case BUILT_IN_MODFF
:
4969 case BUILT_IN_MODFL
:
4971 process_ipa_clobber (fi
, gimple_call_arg (t
, 1));
4974 /* The following functions clobber their third argument. */
4975 case BUILT_IN_REMQUO
:
4976 case BUILT_IN_REMQUOF
:
4977 case BUILT_IN_REMQUOL
:
4979 process_ipa_clobber (fi
, gimple_call_arg (t
, 2));
4982 /* The following functions neither read nor clobber memory. */
4983 case BUILT_IN_ASSUME_ALIGNED
:
4986 /* Trampolines are of no interest to us. */
4987 case BUILT_IN_INIT_TRAMPOLINE
:
4988 case BUILT_IN_ADJUST_TRAMPOLINE
:
4990 case BUILT_IN_VA_START
:
4991 case BUILT_IN_VA_END
:
4993 /* printf-style functions may have hooks to set pointers to
4994 point to somewhere into the generated string. Leave them
4995 for a later excercise... */
4997 /* Fallthru to general call handling. */;
5000 /* Parameters passed by value are used. */
5001 lhs
= get_function_part_constraint (fi
, fi_uses
);
5002 for (i
= 0; i
< gimple_call_num_args (t
); i
++)
5004 struct constraint_expr
*rhsp
;
5005 tree arg
= gimple_call_arg (t
, i
);
5007 if (TREE_CODE (arg
) == SSA_NAME
5008 || is_gimple_min_invariant (arg
))
5011 get_constraint_for_address_of (arg
, &rhsc
);
5012 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
5013 process_constraint (new_constraint (lhs
, *rhsp
));
5017 /* Build constraints for propagating clobbers/uses along the
5019 cfi
= get_fi_for_callee (t
);
5020 if (cfi
->id
== anything_id
)
5022 if (gimple_vdef (t
))
5023 make_constraint_from (first_vi_for_offset (fi
, fi_clobbers
),
5025 make_constraint_from (first_vi_for_offset (fi
, fi_uses
),
5030 /* For callees without function info (that's external functions),
5031 ESCAPED is clobbered and used. */
5032 if (gimple_call_fndecl (t
)
5033 && !cfi
->is_fn_info
)
5037 if (gimple_vdef (t
))
5038 make_copy_constraint (first_vi_for_offset (fi
, fi_clobbers
),
5040 make_copy_constraint (first_vi_for_offset (fi
, fi_uses
), escaped_id
);
5042 /* Also honor the call statement use/clobber info. */
5043 if ((vi
= lookup_call_clobber_vi (t
)) != NULL
)
5044 make_copy_constraint (first_vi_for_offset (fi
, fi_clobbers
),
5046 if ((vi
= lookup_call_use_vi (t
)) != NULL
)
5047 make_copy_constraint (first_vi_for_offset (fi
, fi_uses
),
5052 /* Otherwise the caller clobbers and uses what the callee does.
5053 ??? This should use a new complex constraint that filters
5054 local variables of the callee. */
5055 if (gimple_vdef (t
))
5057 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
5058 rhs
= get_function_part_constraint (cfi
, fi_clobbers
);
5059 process_constraint (new_constraint (lhs
, rhs
));
5061 lhs
= get_function_part_constraint (fi
, fi_uses
);
5062 rhs
= get_function_part_constraint (cfi
, fi_uses
);
5063 process_constraint (new_constraint (lhs
, rhs
));
5065 else if (gimple_code (t
) == GIMPLE_ASM
)
5067 /* ??? Ick. We can do better. */
5068 if (gimple_vdef (t
))
5069 make_constraint_from (first_vi_for_offset (fi
, fi_clobbers
),
5071 make_constraint_from (first_vi_for_offset (fi
, fi_uses
),
5079 /* Find the first varinfo in the same variable as START that overlaps with
5080 OFFSET. Return NULL if we can't find one. */
5083 first_vi_for_offset (varinfo_t start
, unsigned HOST_WIDE_INT offset
)
5085 /* If the offset is outside of the variable, bail out. */
5086 if (offset
>= start
->fullsize
)
5089 /* If we cannot reach offset from start, lookup the first field
5090 and start from there. */
5091 if (start
->offset
> offset
)
5092 start
= get_varinfo (start
->head
);
5096 /* We may not find a variable in the field list with the actual
5097 offset when when we have glommed a structure to a variable.
5098 In that case, however, offset should still be within the size
5100 if (offset
>= start
->offset
5101 && (offset
- start
->offset
) < start
->size
)
5104 start
= vi_next (start
);
5110 /* Find the first varinfo in the same variable as START that overlaps with
5111 OFFSET. If there is no such varinfo the varinfo directly preceding
5112 OFFSET is returned. */
5115 first_or_preceding_vi_for_offset (varinfo_t start
,
5116 unsigned HOST_WIDE_INT offset
)
5118 /* If we cannot reach offset from start, lookup the first field
5119 and start from there. */
5120 if (start
->offset
> offset
)
5121 start
= get_varinfo (start
->head
);
5123 /* We may not find a variable in the field list with the actual
5124 offset when when we have glommed a structure to a variable.
5125 In that case, however, offset should still be within the size
5127 If we got beyond the offset we look for return the field
5128 directly preceding offset which may be the last field. */
5130 && offset
>= start
->offset
5131 && !((offset
- start
->offset
) < start
->size
))
5132 start
= vi_next (start
);
5138 /* This structure is used during pushing fields onto the fieldstack
5139 to track the offset of the field, since bitpos_of_field gives it
5140 relative to its immediate containing type, and we want it relative
5141 to the ultimate containing object. */
5145 /* Offset from the base of the base containing object to this field. */
5146 HOST_WIDE_INT offset
;
5148 /* Size, in bits, of the field. */
5149 unsigned HOST_WIDE_INT size
;
5151 unsigned has_unknown_size
: 1;
5153 unsigned must_have_pointers
: 1;
5155 unsigned may_have_pointers
: 1;
5157 unsigned only_restrict_pointers
: 1;
5159 typedef struct fieldoff fieldoff_s
;
5162 /* qsort comparison function for two fieldoff's PA and PB */
5165 fieldoff_compare (const void *pa
, const void *pb
)
5167 const fieldoff_s
*foa
= (const fieldoff_s
*)pa
;
5168 const fieldoff_s
*fob
= (const fieldoff_s
*)pb
;
5169 unsigned HOST_WIDE_INT foasize
, fobsize
;
5171 if (foa
->offset
< fob
->offset
)
5173 else if (foa
->offset
> fob
->offset
)
5176 foasize
= foa
->size
;
5177 fobsize
= fob
->size
;
5178 if (foasize
< fobsize
)
5180 else if (foasize
> fobsize
)
5185 /* Sort a fieldstack according to the field offset and sizes. */
5187 sort_fieldstack (vec
<fieldoff_s
> fieldstack
)
5189 fieldstack
.qsort (fieldoff_compare
);
5192 /* Return true if T is a type that can have subvars. */
5195 type_can_have_subvars (const_tree t
)
5197 /* Aggregates without overlapping fields can have subvars. */
5198 return TREE_CODE (t
) == RECORD_TYPE
;
5201 /* Return true if V is a tree that we can have subvars for.
5202 Normally, this is any aggregate type. Also complex
5203 types which are not gimple registers can have subvars. */
5206 var_can_have_subvars (const_tree v
)
5208 /* Volatile variables should never have subvars. */
5209 if (TREE_THIS_VOLATILE (v
))
5212 /* Non decls or memory tags can never have subvars. */
5216 return type_can_have_subvars (TREE_TYPE (v
));
5219 /* Return true if T is a type that does contain pointers. */
5222 type_must_have_pointers (tree type
)
5224 if (POINTER_TYPE_P (type
))
5227 if (TREE_CODE (type
) == ARRAY_TYPE
)
5228 return type_must_have_pointers (TREE_TYPE (type
));
5230 /* A function or method can have pointers as arguments, so track
5231 those separately. */
5232 if (TREE_CODE (type
) == FUNCTION_TYPE
5233 || TREE_CODE (type
) == METHOD_TYPE
)
5240 field_must_have_pointers (tree t
)
5242 return type_must_have_pointers (TREE_TYPE (t
));
5245 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5246 the fields of TYPE onto fieldstack, recording their offsets along
5249 OFFSET is used to keep track of the offset in this entire
5250 structure, rather than just the immediately containing structure.
5251 Returns false if the caller is supposed to handle the field we
5255 push_fields_onto_fieldstack (tree type
, vec
<fieldoff_s
> *fieldstack
,
5256 HOST_WIDE_INT offset
)
5259 bool empty_p
= true;
5261 if (TREE_CODE (type
) != RECORD_TYPE
)
5264 /* If the vector of fields is growing too big, bail out early.
5265 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5267 if (fieldstack
->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE
)
5270 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
5271 if (TREE_CODE (field
) == FIELD_DECL
)
5274 HOST_WIDE_INT foff
= bitpos_of_field (field
);
5276 if (!var_can_have_subvars (field
)
5277 || TREE_CODE (TREE_TYPE (field
)) == QUAL_UNION_TYPE
5278 || TREE_CODE (TREE_TYPE (field
)) == UNION_TYPE
)
5280 else if (!push_fields_onto_fieldstack
5281 (TREE_TYPE (field
), fieldstack
, offset
+ foff
)
5282 && (DECL_SIZE (field
)
5283 && !integer_zerop (DECL_SIZE (field
))))
5284 /* Empty structures may have actual size, like in C++. So
5285 see if we didn't push any subfields and the size is
5286 nonzero, push the field onto the stack. */
5291 fieldoff_s
*pair
= NULL
;
5292 bool has_unknown_size
= false;
5293 bool must_have_pointers_p
;
5295 if (!fieldstack
->is_empty ())
5296 pair
= &fieldstack
->last ();
5298 /* If there isn't anything at offset zero, create sth. */
5300 && offset
+ foff
!= 0)
5302 fieldoff_s e
= {0, offset
+ foff
, false, false, false, false};
5303 pair
= fieldstack
->safe_push (e
);
5306 if (!DECL_SIZE (field
)
5307 || !host_integerp (DECL_SIZE (field
), 1))
5308 has_unknown_size
= true;
5310 /* If adjacent fields do not contain pointers merge them. */
5311 must_have_pointers_p
= field_must_have_pointers (field
);
5313 && !has_unknown_size
5314 && !must_have_pointers_p
5315 && !pair
->must_have_pointers
5316 && !pair
->has_unknown_size
5317 && pair
->offset
+ (HOST_WIDE_INT
)pair
->size
== offset
+ foff
)
5319 pair
->size
+= TREE_INT_CST_LOW (DECL_SIZE (field
));
5324 e
.offset
= offset
+ foff
;
5325 e
.has_unknown_size
= has_unknown_size
;
5326 if (!has_unknown_size
)
5327 e
.size
= TREE_INT_CST_LOW (DECL_SIZE (field
));
5330 e
.must_have_pointers
= must_have_pointers_p
;
5331 e
.may_have_pointers
= true;
5332 e
.only_restrict_pointers
5333 = (!has_unknown_size
5334 && POINTER_TYPE_P (TREE_TYPE (field
))
5335 && TYPE_RESTRICT (TREE_TYPE (field
)));
5336 fieldstack
->safe_push (e
);
5346 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5347 if it is a varargs function. */
5350 count_num_arguments (tree decl
, bool *is_varargs
)
5352 unsigned int num
= 0;
5355 /* Capture named arguments for K&R functions. They do not
5356 have a prototype and thus no TYPE_ARG_TYPES. */
5357 for (t
= DECL_ARGUMENTS (decl
); t
; t
= DECL_CHAIN (t
))
5360 /* Check if the function has variadic arguments. */
5361 for (t
= TYPE_ARG_TYPES (TREE_TYPE (decl
)); t
; t
= TREE_CHAIN (t
))
5362 if (TREE_VALUE (t
) == void_type_node
)
5370 /* Creation function node for DECL, using NAME, and return the index
5371 of the variable we've created for the function. */
5374 create_function_info_for (tree decl
, const char *name
)
5376 struct function
*fn
= DECL_STRUCT_FUNCTION (decl
);
5377 varinfo_t vi
, prev_vi
;
5380 bool is_varargs
= false;
5381 unsigned int num_args
= count_num_arguments (decl
, &is_varargs
);
5383 /* Create the variable info. */
5385 vi
= new_var_info (decl
, name
);
5388 vi
->fullsize
= fi_parm_base
+ num_args
;
5390 vi
->may_have_pointers
= false;
5393 insert_vi_for_tree (vi
->decl
, vi
);
5397 /* Create a variable for things the function clobbers and one for
5398 things the function uses. */
5400 varinfo_t clobbervi
, usevi
;
5401 const char *newname
;
5404 asprintf (&tempname
, "%s.clobber", name
);
5405 newname
= ggc_strdup (tempname
);
5408 clobbervi
= new_var_info (NULL
, newname
);
5409 clobbervi
->offset
= fi_clobbers
;
5410 clobbervi
->size
= 1;
5411 clobbervi
->fullsize
= vi
->fullsize
;
5412 clobbervi
->is_full_var
= true;
5413 clobbervi
->is_global_var
= false;
5414 gcc_assert (prev_vi
->offset
< clobbervi
->offset
);
5415 prev_vi
->next
= clobbervi
->id
;
5416 prev_vi
= clobbervi
;
5418 asprintf (&tempname
, "%s.use", name
);
5419 newname
= ggc_strdup (tempname
);
5422 usevi
= new_var_info (NULL
, newname
);
5423 usevi
->offset
= fi_uses
;
5425 usevi
->fullsize
= vi
->fullsize
;
5426 usevi
->is_full_var
= true;
5427 usevi
->is_global_var
= false;
5428 gcc_assert (prev_vi
->offset
< usevi
->offset
);
5429 prev_vi
->next
= usevi
->id
;
5433 /* And one for the static chain. */
5434 if (fn
->static_chain_decl
!= NULL_TREE
)
5437 const char *newname
;
5440 asprintf (&tempname
, "%s.chain", name
);
5441 newname
= ggc_strdup (tempname
);
5444 chainvi
= new_var_info (fn
->static_chain_decl
, newname
);
5445 chainvi
->offset
= fi_static_chain
;
5447 chainvi
->fullsize
= vi
->fullsize
;
5448 chainvi
->is_full_var
= true;
5449 chainvi
->is_global_var
= false;
5450 gcc_assert (prev_vi
->offset
< chainvi
->offset
);
5451 prev_vi
->next
= chainvi
->id
;
5453 insert_vi_for_tree (fn
->static_chain_decl
, chainvi
);
5456 /* Create a variable for the return var. */
5457 if (DECL_RESULT (decl
) != NULL
5458 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
5461 const char *newname
;
5463 tree resultdecl
= decl
;
5465 if (DECL_RESULT (decl
))
5466 resultdecl
= DECL_RESULT (decl
);
5468 asprintf (&tempname
, "%s.result", name
);
5469 newname
= ggc_strdup (tempname
);
5472 resultvi
= new_var_info (resultdecl
, newname
);
5473 resultvi
->offset
= fi_result
;
5475 resultvi
->fullsize
= vi
->fullsize
;
5476 resultvi
->is_full_var
= true;
5477 if (DECL_RESULT (decl
))
5478 resultvi
->may_have_pointers
= true;
5479 gcc_assert (prev_vi
->offset
< resultvi
->offset
);
5480 prev_vi
->next
= resultvi
->id
;
5482 if (DECL_RESULT (decl
))
5483 insert_vi_for_tree (DECL_RESULT (decl
), resultvi
);
5486 /* Set up variables for each argument. */
5487 arg
= DECL_ARGUMENTS (decl
);
5488 for (i
= 0; i
< num_args
; i
++)
5491 const char *newname
;
5493 tree argdecl
= decl
;
5498 asprintf (&tempname
, "%s.arg%d", name
, i
);
5499 newname
= ggc_strdup (tempname
);
5502 argvi
= new_var_info (argdecl
, newname
);
5503 argvi
->offset
= fi_parm_base
+ i
;
5505 argvi
->is_full_var
= true;
5506 argvi
->fullsize
= vi
->fullsize
;
5508 argvi
->may_have_pointers
= true;
5509 gcc_assert (prev_vi
->offset
< argvi
->offset
);
5510 prev_vi
->next
= argvi
->id
;
5514 insert_vi_for_tree (arg
, argvi
);
5515 arg
= DECL_CHAIN (arg
);
5519 /* Add one representative for all further args. */
5523 const char *newname
;
5527 asprintf (&tempname
, "%s.varargs", name
);
5528 newname
= ggc_strdup (tempname
);
5531 /* We need sth that can be pointed to for va_start. */
5532 decl
= build_fake_var_decl (ptr_type_node
);
5534 argvi
= new_var_info (decl
, newname
);
5535 argvi
->offset
= fi_parm_base
+ num_args
;
5537 argvi
->is_full_var
= true;
5538 argvi
->is_heap_var
= true;
5539 argvi
->fullsize
= vi
->fullsize
;
5540 gcc_assert (prev_vi
->offset
< argvi
->offset
);
5541 prev_vi
->next
= argvi
->id
;
5549 /* Return true if FIELDSTACK contains fields that overlap.
5550 FIELDSTACK is assumed to be sorted by offset. */
5553 check_for_overlaps (vec
<fieldoff_s
> fieldstack
)
5555 fieldoff_s
*fo
= NULL
;
5557 HOST_WIDE_INT lastoffset
= -1;
5559 FOR_EACH_VEC_ELT (fieldstack
, i
, fo
)
5561 if (fo
->offset
== lastoffset
)
5563 lastoffset
= fo
->offset
;
5568 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5569 This will also create any varinfo structures necessary for fields
5573 create_variable_info_for_1 (tree decl
, const char *name
)
5575 varinfo_t vi
, newvi
;
5576 tree decl_type
= TREE_TYPE (decl
);
5577 tree declsize
= DECL_P (decl
) ? DECL_SIZE (decl
) : TYPE_SIZE (decl_type
);
5578 vec
<fieldoff_s
> fieldstack
= vNULL
;
5583 || !host_integerp (declsize
, 1))
5585 vi
= new_var_info (decl
, name
);
5589 vi
->is_unknown_size_var
= true;
5590 vi
->is_full_var
= true;
5591 vi
->may_have_pointers
= true;
5595 /* Collect field information. */
5596 if (use_field_sensitive
5597 && var_can_have_subvars (decl
)
5598 /* ??? Force us to not use subfields for global initializers
5599 in IPA mode. Else we'd have to parse arbitrary initializers. */
5601 && is_global_var (decl
)
5602 && DECL_INITIAL (decl
)))
5604 fieldoff_s
*fo
= NULL
;
5605 bool notokay
= false;
5608 push_fields_onto_fieldstack (decl_type
, &fieldstack
, 0);
5610 for (i
= 0; !notokay
&& fieldstack
.iterate (i
, &fo
); i
++)
5611 if (fo
->has_unknown_size
5618 /* We can't sort them if we have a field with a variable sized type,
5619 which will make notokay = true. In that case, we are going to return
5620 without creating varinfos for the fields anyway, so sorting them is a
5624 sort_fieldstack (fieldstack
);
5625 /* Due to some C++ FE issues, like PR 22488, we might end up
5626 what appear to be overlapping fields even though they,
5627 in reality, do not overlap. Until the C++ FE is fixed,
5628 we will simply disable field-sensitivity for these cases. */
5629 notokay
= check_for_overlaps (fieldstack
);
5633 fieldstack
.release ();
5636 /* If we didn't end up collecting sub-variables create a full
5637 variable for the decl. */
5638 if (fieldstack
.length () <= 1
5639 || fieldstack
.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE
)
5641 vi
= new_var_info (decl
, name
);
5643 vi
->may_have_pointers
= true;
5644 vi
->fullsize
= TREE_INT_CST_LOW (declsize
);
5645 vi
->size
= vi
->fullsize
;
5646 vi
->is_full_var
= true;
5647 fieldstack
.release ();
5651 vi
= new_var_info (decl
, name
);
5652 vi
->fullsize
= TREE_INT_CST_LOW (declsize
);
5653 for (i
= 0, newvi
= vi
;
5654 fieldstack
.iterate (i
, &fo
);
5655 ++i
, newvi
= vi_next (newvi
))
5657 const char *newname
= "NULL";
5662 asprintf (&tempname
, "%s." HOST_WIDE_INT_PRINT_DEC
5663 "+" HOST_WIDE_INT_PRINT_DEC
, name
, fo
->offset
, fo
->size
);
5664 newname
= ggc_strdup (tempname
);
5667 newvi
->name
= newname
;
5668 newvi
->offset
= fo
->offset
;
5669 newvi
->size
= fo
->size
;
5670 newvi
->fullsize
= vi
->fullsize
;
5671 newvi
->may_have_pointers
= fo
->may_have_pointers
;
5672 newvi
->only_restrict_pointers
= fo
->only_restrict_pointers
;
5673 if (i
+ 1 < fieldstack
.length ())
5675 varinfo_t tem
= new_var_info (decl
, name
);
5676 newvi
->next
= tem
->id
;
5681 fieldstack
.release ();
5687 create_variable_info_for (tree decl
, const char *name
)
5689 varinfo_t vi
= create_variable_info_for_1 (decl
, name
);
5690 unsigned int id
= vi
->id
;
5692 insert_vi_for_tree (decl
, vi
);
5694 if (TREE_CODE (decl
) != VAR_DECL
)
5697 /* Create initial constraints for globals. */
5698 for (; vi
; vi
= vi_next (vi
))
5700 if (!vi
->may_have_pointers
5701 || !vi
->is_global_var
)
5704 /* Mark global restrict qualified pointers. */
5705 if ((POINTER_TYPE_P (TREE_TYPE (decl
))
5706 && TYPE_RESTRICT (TREE_TYPE (decl
)))
5707 || vi
->only_restrict_pointers
)
5709 make_constraint_from_global_restrict (vi
, "GLOBAL_RESTRICT");
5713 /* In non-IPA mode the initializer from nonlocal is all we need. */
5715 || DECL_HARD_REGISTER (decl
))
5716 make_copy_constraint (vi
, nonlocal_id
);
5718 /* In IPA mode parse the initializer and generate proper constraints
5722 struct varpool_node
*vnode
= varpool_get_node (decl
);
5724 /* For escaped variables initialize them from nonlocal. */
5725 if (!varpool_all_refs_explicit_p (vnode
))
5726 make_copy_constraint (vi
, nonlocal_id
);
5728 /* If this is a global variable with an initializer and we are in
5729 IPA mode generate constraints for it. */
5730 if (DECL_INITIAL (decl
)
5733 vec
<ce_s
> rhsc
= vNULL
;
5734 struct constraint_expr lhs
, *rhsp
;
5736 get_constraint_for_rhs (DECL_INITIAL (decl
), &rhsc
);
5740 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
5741 process_constraint (new_constraint (lhs
, *rhsp
));
5742 /* If this is a variable that escapes from the unit
5743 the initializer escapes as well. */
5744 if (!varpool_all_refs_explicit_p (vnode
))
5746 lhs
.var
= escaped_id
;
5749 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
5750 process_constraint (new_constraint (lhs
, *rhsp
));
5760 /* Print out the points-to solution for VAR to FILE. */
5763 dump_solution_for_var (FILE *file
, unsigned int var
)
5765 varinfo_t vi
= get_varinfo (var
);
5769 /* Dump the solution for unified vars anyway, this avoids difficulties
5770 in scanning dumps in the testsuite. */
5771 fprintf (file
, "%s = { ", vi
->name
);
5772 vi
= get_varinfo (find (var
));
5773 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
5774 fprintf (file
, "%s ", get_varinfo (i
)->name
);
5775 fprintf (file
, "}");
5777 /* But note when the variable was unified. */
5779 fprintf (file
, " same as %s", vi
->name
);
5781 fprintf (file
, "\n");
5784 /* Print the points-to solution for VAR to stdout. */
5787 debug_solution_for_var (unsigned int var
)
5789 dump_solution_for_var (stdout
, var
);
5792 /* Create varinfo structures for all of the variables in the
5793 function for intraprocedural mode. */
5796 intra_create_variable_infos (void)
5800 /* For each incoming pointer argument arg, create the constraint ARG
5801 = NONLOCAL or a dummy variable if it is a restrict qualified
5802 passed-by-reference argument. */
5803 for (t
= DECL_ARGUMENTS (current_function_decl
); t
; t
= DECL_CHAIN (t
))
5805 varinfo_t p
= get_vi_for_tree (t
);
5807 /* For restrict qualified pointers to objects passed by
5808 reference build a real representative for the pointed-to object.
5809 Treat restrict qualified references the same. */
5810 if (TYPE_RESTRICT (TREE_TYPE (t
))
5811 && ((DECL_BY_REFERENCE (t
) && POINTER_TYPE_P (TREE_TYPE (t
)))
5812 || TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
)
5813 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t
))))
5815 struct constraint_expr lhsc
, rhsc
;
5817 tree heapvar
= build_fake_var_decl (TREE_TYPE (TREE_TYPE (t
)));
5818 DECL_EXTERNAL (heapvar
) = 1;
5819 vi
= create_variable_info_for_1 (heapvar
, "PARM_NOALIAS");
5820 insert_vi_for_tree (heapvar
, vi
);
5825 rhsc
.type
= ADDRESSOF
;
5827 process_constraint (new_constraint (lhsc
, rhsc
));
5828 for (; vi
; vi
= vi_next (vi
))
5829 if (vi
->may_have_pointers
)
5831 if (vi
->only_restrict_pointers
)
5832 make_constraint_from_global_restrict (vi
, "GLOBAL_RESTRICT");
5834 make_copy_constraint (vi
, nonlocal_id
);
5839 if (POINTER_TYPE_P (TREE_TYPE (t
))
5840 && TYPE_RESTRICT (TREE_TYPE (t
)))
5841 make_constraint_from_global_restrict (p
, "PARM_RESTRICT");
5844 for (; p
; p
= vi_next (p
))
5846 if (p
->only_restrict_pointers
)
5847 make_constraint_from_global_restrict (p
, "PARM_RESTRICT");
5848 else if (p
->may_have_pointers
)
5849 make_constraint_from (p
, nonlocal_id
);
5854 /* Add a constraint for a result decl that is passed by reference. */
5855 if (DECL_RESULT (cfun
->decl
)
5856 && DECL_BY_REFERENCE (DECL_RESULT (cfun
->decl
)))
5858 varinfo_t p
, result_vi
= get_vi_for_tree (DECL_RESULT (cfun
->decl
));
5860 for (p
= result_vi
; p
; p
= vi_next (p
))
5861 make_constraint_from (p
, nonlocal_id
);
5864 /* Add a constraint for the incoming static chain parameter. */
5865 if (cfun
->static_chain_decl
!= NULL_TREE
)
5867 varinfo_t p
, chain_vi
= get_vi_for_tree (cfun
->static_chain_decl
);
5869 for (p
= chain_vi
; p
; p
= vi_next (p
))
5870 make_constraint_from (p
, nonlocal_id
);
5874 /* Structure used to put solution bitmaps in a hashtable so they can
5875 be shared among variables with the same points-to set. */
5877 typedef struct shared_bitmap_info
5881 } *shared_bitmap_info_t
;
5882 typedef const struct shared_bitmap_info
*const_shared_bitmap_info_t
;
5884 static htab_t shared_bitmap_table
;
5886 /* Hash function for a shared_bitmap_info_t */
5889 shared_bitmap_hash (const void *p
)
5891 const_shared_bitmap_info_t
const bi
= (const_shared_bitmap_info_t
) p
;
5892 return bi
->hashcode
;
5895 /* Equality function for two shared_bitmap_info_t's. */
5898 shared_bitmap_eq (const void *p1
, const void *p2
)
5900 const_shared_bitmap_info_t
const sbi1
= (const_shared_bitmap_info_t
) p1
;
5901 const_shared_bitmap_info_t
const sbi2
= (const_shared_bitmap_info_t
) p2
;
5902 return bitmap_equal_p (sbi1
->pt_vars
, sbi2
->pt_vars
);
5905 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5906 existing instance if there is one, NULL otherwise. */
5909 shared_bitmap_lookup (bitmap pt_vars
)
5912 struct shared_bitmap_info sbi
;
5914 sbi
.pt_vars
= pt_vars
;
5915 sbi
.hashcode
= bitmap_hash (pt_vars
);
5917 slot
= htab_find_slot_with_hash (shared_bitmap_table
, &sbi
,
5918 sbi
.hashcode
, NO_INSERT
);
5922 return ((shared_bitmap_info_t
) *slot
)->pt_vars
;
5926 /* Add a bitmap to the shared bitmap hashtable. */
5929 shared_bitmap_add (bitmap pt_vars
)
5932 shared_bitmap_info_t sbi
= XNEW (struct shared_bitmap_info
);
5934 sbi
->pt_vars
= pt_vars
;
5935 sbi
->hashcode
= bitmap_hash (pt_vars
);
5937 slot
= htab_find_slot_with_hash (shared_bitmap_table
, sbi
,
5938 sbi
->hashcode
, INSERT
);
5939 gcc_assert (!*slot
);
5940 *slot
= (void *) sbi
;
5944 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5947 set_uids_in_ptset (bitmap into
, bitmap from
, struct pt_solution
*pt
)
5952 EXECUTE_IF_SET_IN_BITMAP (from
, 0, i
, bi
)
5954 varinfo_t vi
= get_varinfo (i
);
5956 /* The only artificial variables that are allowed in a may-alias
5957 set are heap variables. */
5958 if (vi
->is_artificial_var
&& !vi
->is_heap_var
)
5961 if (TREE_CODE (vi
->decl
) == VAR_DECL
5962 || TREE_CODE (vi
->decl
) == PARM_DECL
5963 || TREE_CODE (vi
->decl
) == RESULT_DECL
)
5965 /* If we are in IPA mode we will not recompute points-to
5966 sets after inlining so make sure they stay valid. */
5968 && !DECL_PT_UID_SET_P (vi
->decl
))
5969 SET_DECL_PT_UID (vi
->decl
, DECL_UID (vi
->decl
));
5971 /* Add the decl to the points-to set. Note that the points-to
5972 set contains global variables. */
5973 bitmap_set_bit (into
, DECL_PT_UID (vi
->decl
));
5974 if (vi
->is_global_var
)
5975 pt
->vars_contains_global
= true;
5981 /* Compute the points-to solution *PT for the variable VI. */
5983 static struct pt_solution
5984 find_what_var_points_to (varinfo_t orig_vi
)
5988 bitmap finished_solution
;
5992 struct pt_solution
*pt
;
5994 /* This variable may have been collapsed, let's get the real
5996 vi
= get_varinfo (find (orig_vi
->id
));
5998 /* See if we have already computed the solution and return it. */
5999 slot
= pointer_map_insert (final_solutions
, vi
);
6001 return *(struct pt_solution
*)*slot
;
6003 *slot
= pt
= XOBNEW (&final_solutions_obstack
, struct pt_solution
);
6004 memset (pt
, 0, sizeof (struct pt_solution
));
6006 /* Translate artificial variables into SSA_NAME_PTR_INFO
6008 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
6010 varinfo_t vi
= get_varinfo (i
);
6012 if (vi
->is_artificial_var
)
6014 if (vi
->id
== nothing_id
)
6016 else if (vi
->id
== escaped_id
)
6019 pt
->ipa_escaped
= 1;
6023 else if (vi
->id
== nonlocal_id
)
6025 else if (vi
->is_heap_var
)
6026 /* We represent heapvars in the points-to set properly. */
6028 else if (vi
->id
== readonly_id
)
6031 else if (vi
->id
== anything_id
6032 || vi
->id
== integer_id
)
6037 /* Instead of doing extra work, simply do not create
6038 elaborate points-to information for pt_anything pointers. */
6042 /* Share the final set of variables when possible. */
6043 finished_solution
= BITMAP_GGC_ALLOC ();
6044 stats
.points_to_sets_created
++;
6046 set_uids_in_ptset (finished_solution
, vi
->solution
, pt
);
6047 result
= shared_bitmap_lookup (finished_solution
);
6050 shared_bitmap_add (finished_solution
);
6051 pt
->vars
= finished_solution
;
6056 bitmap_clear (finished_solution
);
6062 /* Given a pointer variable P, fill in its points-to set. */
6065 find_what_p_points_to (tree p
)
6067 struct ptr_info_def
*pi
;
6071 /* For parameters, get at the points-to set for the actual parm
6073 if (TREE_CODE (p
) == SSA_NAME
6074 && SSA_NAME_IS_DEFAULT_DEF (p
)
6075 && (TREE_CODE (SSA_NAME_VAR (p
)) == PARM_DECL
6076 || TREE_CODE (SSA_NAME_VAR (p
)) == RESULT_DECL
))
6077 lookup_p
= SSA_NAME_VAR (p
);
6079 vi
= lookup_vi_for_tree (lookup_p
);
6083 pi
= get_ptr_info (p
);
6084 pi
->pt
= find_what_var_points_to (vi
);
6088 /* Query statistics for points-to solutions. */
6091 unsigned HOST_WIDE_INT pt_solution_includes_may_alias
;
6092 unsigned HOST_WIDE_INT pt_solution_includes_no_alias
;
6093 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias
;
6094 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias
;
6098 dump_pta_stats (FILE *s
)
6100 fprintf (s
, "\nPTA query stats:\n");
6101 fprintf (s
, " pt_solution_includes: "
6102 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
6103 HOST_WIDE_INT_PRINT_DEC
" queries\n",
6104 pta_stats
.pt_solution_includes_no_alias
,
6105 pta_stats
.pt_solution_includes_no_alias
6106 + pta_stats
.pt_solution_includes_may_alias
);
6107 fprintf (s
, " pt_solutions_intersect: "
6108 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
6109 HOST_WIDE_INT_PRINT_DEC
" queries\n",
6110 pta_stats
.pt_solutions_intersect_no_alias
,
6111 pta_stats
.pt_solutions_intersect_no_alias
6112 + pta_stats
.pt_solutions_intersect_may_alias
);
6116 /* Reset the points-to solution *PT to a conservative default
6117 (point to anything). */
6120 pt_solution_reset (struct pt_solution
*pt
)
6122 memset (pt
, 0, sizeof (struct pt_solution
));
6123 pt
->anything
= true;
6126 /* Set the points-to solution *PT to point only to the variables
6127 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6128 global variables and VARS_CONTAINS_RESTRICT specifies whether
6129 it contains restrict tag variables. */
6132 pt_solution_set (struct pt_solution
*pt
, bitmap vars
, bool vars_contains_global
)
6134 memset (pt
, 0, sizeof (struct pt_solution
));
6136 pt
->vars_contains_global
= vars_contains_global
;
6139 /* Set the points-to solution *PT to point only to the variable VAR. */
6142 pt_solution_set_var (struct pt_solution
*pt
, tree var
)
6144 memset (pt
, 0, sizeof (struct pt_solution
));
6145 pt
->vars
= BITMAP_GGC_ALLOC ();
6146 bitmap_set_bit (pt
->vars
, DECL_PT_UID (var
));
6147 pt
->vars_contains_global
= is_global_var (var
);
6150 /* Computes the union of the points-to solutions *DEST and *SRC and
6151 stores the result in *DEST. This changes the points-to bitmap
6152 of *DEST and thus may not be used if that might be shared.
6153 The points-to bitmap of *SRC and *DEST will not be shared after
6154 this function if they were not before. */
6157 pt_solution_ior_into (struct pt_solution
*dest
, struct pt_solution
*src
)
6159 dest
->anything
|= src
->anything
;
6162 pt_solution_reset (dest
);
6166 dest
->nonlocal
|= src
->nonlocal
;
6167 dest
->escaped
|= src
->escaped
;
6168 dest
->ipa_escaped
|= src
->ipa_escaped
;
6169 dest
->null
|= src
->null
;
6170 dest
->vars_contains_global
|= src
->vars_contains_global
;
6175 dest
->vars
= BITMAP_GGC_ALLOC ();
6176 bitmap_ior_into (dest
->vars
, src
->vars
);
6179 /* Return true if the points-to solution *PT is empty. */
6182 pt_solution_empty_p (struct pt_solution
*pt
)
6189 && !bitmap_empty_p (pt
->vars
))
6192 /* If the solution includes ESCAPED, check if that is empty. */
6194 && !pt_solution_empty_p (&cfun
->gimple_df
->escaped
))
6197 /* If the solution includes ESCAPED, check if that is empty. */
6199 && !pt_solution_empty_p (&ipa_escaped_pt
))
6205 /* Return true if the points-to solution *PT only point to a single var, and
6206 return the var uid in *UID. */
6209 pt_solution_singleton_p (struct pt_solution
*pt
, unsigned *uid
)
6211 if (pt
->anything
|| pt
->nonlocal
|| pt
->escaped
|| pt
->ipa_escaped
6212 || pt
->null
|| pt
->vars
== NULL
6213 || !bitmap_single_bit_set_p (pt
->vars
))
6216 *uid
= bitmap_first_set_bit (pt
->vars
);
6220 /* Return true if the points-to solution *PT includes global memory. */
6223 pt_solution_includes_global (struct pt_solution
*pt
)
6227 || pt
->vars_contains_global
)
6231 return pt_solution_includes_global (&cfun
->gimple_df
->escaped
);
6233 if (pt
->ipa_escaped
)
6234 return pt_solution_includes_global (&ipa_escaped_pt
);
6236 /* ??? This predicate is not correct for the IPA-PTA solution
6237 as we do not properly distinguish between unit escape points
6238 and global variables. */
6239 if (cfun
->gimple_df
->ipa_pta
)
6245 /* Return true if the points-to solution *PT includes the variable
6246 declaration DECL. */
6249 pt_solution_includes_1 (struct pt_solution
*pt
, const_tree decl
)
6255 && is_global_var (decl
))
6259 && bitmap_bit_p (pt
->vars
, DECL_PT_UID (decl
)))
6262 /* If the solution includes ESCAPED, check it. */
6264 && pt_solution_includes_1 (&cfun
->gimple_df
->escaped
, decl
))
6267 /* If the solution includes ESCAPED, check it. */
6269 && pt_solution_includes_1 (&ipa_escaped_pt
, decl
))
6276 pt_solution_includes (struct pt_solution
*pt
, const_tree decl
)
6278 bool res
= pt_solution_includes_1 (pt
, decl
);
6280 ++pta_stats
.pt_solution_includes_may_alias
;
6282 ++pta_stats
.pt_solution_includes_no_alias
;
6286 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6290 pt_solutions_intersect_1 (struct pt_solution
*pt1
, struct pt_solution
*pt2
)
6292 if (pt1
->anything
|| pt2
->anything
)
6295 /* If either points to unknown global memory and the other points to
6296 any global memory they alias. */
6299 || pt2
->vars_contains_global
))
6301 && pt1
->vars_contains_global
))
6304 /* Check the escaped solution if required. */
6305 if ((pt1
->escaped
|| pt2
->escaped
)
6306 && !pt_solution_empty_p (&cfun
->gimple_df
->escaped
))
6308 /* If both point to escaped memory and that solution
6309 is not empty they alias. */
6310 if (pt1
->escaped
&& pt2
->escaped
)
6313 /* If either points to escaped memory see if the escaped solution
6314 intersects with the other. */
6316 && pt_solutions_intersect_1 (&cfun
->gimple_df
->escaped
, pt2
))
6318 && pt_solutions_intersect_1 (&cfun
->gimple_df
->escaped
, pt1
)))
6322 /* Check the escaped solution if required.
6323 ??? Do we need to check the local against the IPA escaped sets? */
6324 if ((pt1
->ipa_escaped
|| pt2
->ipa_escaped
)
6325 && !pt_solution_empty_p (&ipa_escaped_pt
))
6327 /* If both point to escaped memory and that solution
6328 is not empty they alias. */
6329 if (pt1
->ipa_escaped
&& pt2
->ipa_escaped
)
6332 /* If either points to escaped memory see if the escaped solution
6333 intersects with the other. */
6334 if ((pt1
->ipa_escaped
6335 && pt_solutions_intersect_1 (&ipa_escaped_pt
, pt2
))
6336 || (pt2
->ipa_escaped
6337 && pt_solutions_intersect_1 (&ipa_escaped_pt
, pt1
)))
6341 /* Now both pointers alias if their points-to solution intersects. */
6344 && bitmap_intersect_p (pt1
->vars
, pt2
->vars
));
6348 pt_solutions_intersect (struct pt_solution
*pt1
, struct pt_solution
*pt2
)
6350 bool res
= pt_solutions_intersect_1 (pt1
, pt2
);
6352 ++pta_stats
.pt_solutions_intersect_may_alias
;
6354 ++pta_stats
.pt_solutions_intersect_no_alias
;
6359 /* Dump points-to information to OUTFILE. */
6362 dump_sa_points_to_info (FILE *outfile
)
6366 fprintf (outfile
, "\nPoints-to sets\n\n");
6368 if (dump_flags
& TDF_STATS
)
6370 fprintf (outfile
, "Stats:\n");
6371 fprintf (outfile
, "Total vars: %d\n", stats
.total_vars
);
6372 fprintf (outfile
, "Non-pointer vars: %d\n",
6373 stats
.nonpointer_vars
);
6374 fprintf (outfile
, "Statically unified vars: %d\n",
6375 stats
.unified_vars_static
);
6376 fprintf (outfile
, "Dynamically unified vars: %d\n",
6377 stats
.unified_vars_dynamic
);
6378 fprintf (outfile
, "Iterations: %d\n", stats
.iterations
);
6379 fprintf (outfile
, "Number of edges: %d\n", stats
.num_edges
);
6380 fprintf (outfile
, "Number of implicit edges: %d\n",
6381 stats
.num_implicit_edges
);
6384 for (i
= 1; i
< varmap
.length (); i
++)
6386 varinfo_t vi
= get_varinfo (i
);
6387 if (!vi
->may_have_pointers
)
6389 dump_solution_for_var (outfile
, i
);
6394 /* Debug points-to information to stderr. */
6397 debug_sa_points_to_info (void)
6399 dump_sa_points_to_info (stderr
);
6403 /* Initialize the always-existing constraint variables for NULL
6404 ANYTHING, READONLY, and INTEGER */
6407 init_base_vars (void)
6409 struct constraint_expr lhs
, rhs
;
6410 varinfo_t var_anything
;
6411 varinfo_t var_nothing
;
6412 varinfo_t var_readonly
;
6413 varinfo_t var_escaped
;
6414 varinfo_t var_nonlocal
;
6415 varinfo_t var_storedanything
;
6416 varinfo_t var_integer
;
6418 /* Variable ID zero is reserved and should be NULL. */
6419 varmap
.safe_push (NULL
);
6421 /* Create the NULL variable, used to represent that a variable points
6423 var_nothing
= new_var_info (NULL_TREE
, "NULL");
6424 gcc_assert (var_nothing
->id
== nothing_id
);
6425 var_nothing
->is_artificial_var
= 1;
6426 var_nothing
->offset
= 0;
6427 var_nothing
->size
= ~0;
6428 var_nothing
->fullsize
= ~0;
6429 var_nothing
->is_special_var
= 1;
6430 var_nothing
->may_have_pointers
= 0;
6431 var_nothing
->is_global_var
= 0;
6433 /* Create the ANYTHING variable, used to represent that a variable
6434 points to some unknown piece of memory. */
6435 var_anything
= new_var_info (NULL_TREE
, "ANYTHING");
6436 gcc_assert (var_anything
->id
== anything_id
);
6437 var_anything
->is_artificial_var
= 1;
6438 var_anything
->size
= ~0;
6439 var_anything
->offset
= 0;
6440 var_anything
->fullsize
= ~0;
6441 var_anything
->is_special_var
= 1;
6443 /* Anything points to anything. This makes deref constraints just
6444 work in the presence of linked list and other p = *p type loops,
6445 by saying that *ANYTHING = ANYTHING. */
6447 lhs
.var
= anything_id
;
6449 rhs
.type
= ADDRESSOF
;
6450 rhs
.var
= anything_id
;
6453 /* This specifically does not use process_constraint because
6454 process_constraint ignores all anything = anything constraints, since all
6455 but this one are redundant. */
6456 constraints
.safe_push (new_constraint (lhs
, rhs
));
6458 /* Create the READONLY variable, used to represent that a variable
6459 points to readonly memory. */
6460 var_readonly
= new_var_info (NULL_TREE
, "READONLY");
6461 gcc_assert (var_readonly
->id
== readonly_id
);
6462 var_readonly
->is_artificial_var
= 1;
6463 var_readonly
->offset
= 0;
6464 var_readonly
->size
= ~0;
6465 var_readonly
->fullsize
= ~0;
6466 var_readonly
->is_special_var
= 1;
6468 /* readonly memory points to anything, in order to make deref
6469 easier. In reality, it points to anything the particular
6470 readonly variable can point to, but we don't track this
6473 lhs
.var
= readonly_id
;
6475 rhs
.type
= ADDRESSOF
;
6476 rhs
.var
= readonly_id
; /* FIXME */
6478 process_constraint (new_constraint (lhs
, rhs
));
6480 /* Create the ESCAPED variable, used to represent the set of escaped
6482 var_escaped
= new_var_info (NULL_TREE
, "ESCAPED");
6483 gcc_assert (var_escaped
->id
== escaped_id
);
6484 var_escaped
->is_artificial_var
= 1;
6485 var_escaped
->offset
= 0;
6486 var_escaped
->size
= ~0;
6487 var_escaped
->fullsize
= ~0;
6488 var_escaped
->is_special_var
= 0;
6490 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6492 var_nonlocal
= new_var_info (NULL_TREE
, "NONLOCAL");
6493 gcc_assert (var_nonlocal
->id
== nonlocal_id
);
6494 var_nonlocal
->is_artificial_var
= 1;
6495 var_nonlocal
->offset
= 0;
6496 var_nonlocal
->size
= ~0;
6497 var_nonlocal
->fullsize
= ~0;
6498 var_nonlocal
->is_special_var
= 1;
6500 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6502 lhs
.var
= escaped_id
;
6505 rhs
.var
= escaped_id
;
6507 process_constraint (new_constraint (lhs
, rhs
));
6509 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6510 whole variable escapes. */
6512 lhs
.var
= escaped_id
;
6515 rhs
.var
= escaped_id
;
6516 rhs
.offset
= UNKNOWN_OFFSET
;
6517 process_constraint (new_constraint (lhs
, rhs
));
6519 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6520 everything pointed to by escaped points to what global memory can
6523 lhs
.var
= escaped_id
;
6526 rhs
.var
= nonlocal_id
;
6528 process_constraint (new_constraint (lhs
, rhs
));
6530 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6531 global memory may point to global memory and escaped memory. */
6533 lhs
.var
= nonlocal_id
;
6535 rhs
.type
= ADDRESSOF
;
6536 rhs
.var
= nonlocal_id
;
6538 process_constraint (new_constraint (lhs
, rhs
));
6539 rhs
.type
= ADDRESSOF
;
6540 rhs
.var
= escaped_id
;
6542 process_constraint (new_constraint (lhs
, rhs
));
6544 /* Create the STOREDANYTHING variable, used to represent the set of
6545 variables stored to *ANYTHING. */
6546 var_storedanything
= new_var_info (NULL_TREE
, "STOREDANYTHING");
6547 gcc_assert (var_storedanything
->id
== storedanything_id
);
6548 var_storedanything
->is_artificial_var
= 1;
6549 var_storedanything
->offset
= 0;
6550 var_storedanything
->size
= ~0;
6551 var_storedanything
->fullsize
= ~0;
6552 var_storedanything
->is_special_var
= 0;
6554 /* Create the INTEGER variable, used to represent that a variable points
6555 to what an INTEGER "points to". */
6556 var_integer
= new_var_info (NULL_TREE
, "INTEGER");
6557 gcc_assert (var_integer
->id
== integer_id
);
6558 var_integer
->is_artificial_var
= 1;
6559 var_integer
->size
= ~0;
6560 var_integer
->fullsize
= ~0;
6561 var_integer
->offset
= 0;
6562 var_integer
->is_special_var
= 1;
6564 /* INTEGER = ANYTHING, because we don't know where a dereference of
6565 a random integer will point to. */
6567 lhs
.var
= integer_id
;
6569 rhs
.type
= ADDRESSOF
;
6570 rhs
.var
= anything_id
;
6572 process_constraint (new_constraint (lhs
, rhs
));
6575 /* Initialize things necessary to perform PTA */
6578 init_alias_vars (void)
6580 use_field_sensitive
= (MAX_FIELDS_FOR_FIELD_SENSITIVE
> 1);
6582 bitmap_obstack_initialize (&pta_obstack
);
6583 bitmap_obstack_initialize (&oldpta_obstack
);
6584 bitmap_obstack_initialize (&predbitmap_obstack
);
6586 constraint_pool
= create_alloc_pool ("Constraint pool",
6587 sizeof (struct constraint
), 30);
6588 variable_info_pool
= create_alloc_pool ("Variable info pool",
6589 sizeof (struct variable_info
), 30);
6590 constraints
.create (8);
6592 vi_for_tree
= pointer_map_create ();
6593 call_stmt_vars
= pointer_map_create ();
6595 memset (&stats
, 0, sizeof (stats
));
6596 shared_bitmap_table
= htab_create (511, shared_bitmap_hash
,
6597 shared_bitmap_eq
, free
);
6600 gcc_obstack_init (&fake_var_decl_obstack
);
6602 final_solutions
= pointer_map_create ();
6603 gcc_obstack_init (&final_solutions_obstack
);
6606 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6607 predecessor edges. */
6610 remove_preds_and_fake_succs (constraint_graph_t graph
)
6614 /* Clear the implicit ref and address nodes from the successor
6616 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
6618 if (graph
->succs
[i
])
6619 bitmap_clear_range (graph
->succs
[i
], FIRST_REF_NODE
,
6620 FIRST_REF_NODE
* 2);
6623 /* Free the successor list for the non-ref nodes. */
6624 for (i
= FIRST_REF_NODE
+ 1; i
< graph
->size
; i
++)
6626 if (graph
->succs
[i
])
6627 BITMAP_FREE (graph
->succs
[i
]);
6630 /* Now reallocate the size of the successor list as, and blow away
6631 the predecessor bitmaps. */
6632 graph
->size
= varmap
.length ();
6633 graph
->succs
= XRESIZEVEC (bitmap
, graph
->succs
, graph
->size
);
6635 free (graph
->implicit_preds
);
6636 graph
->implicit_preds
= NULL
;
6637 free (graph
->preds
);
6638 graph
->preds
= NULL
;
6639 bitmap_obstack_release (&predbitmap_obstack
);
6642 /* Solve the constraint set. */
6645 solve_constraints (void)
6647 struct scc_info
*si
;
6651 "\nCollapsing static cycles and doing variable "
6654 init_graph (varmap
.length () * 2);
6657 fprintf (dump_file
, "Building predecessor graph\n");
6658 build_pred_graph ();
6661 fprintf (dump_file
, "Detecting pointer and location "
6663 si
= perform_var_substitution (graph
);
6666 fprintf (dump_file
, "Rewriting constraints and unifying "
6668 rewrite_constraints (graph
, si
);
6670 build_succ_graph ();
6672 free_var_substitution_info (si
);
6674 /* Attach complex constraints to graph nodes. */
6675 move_complex_constraints (graph
);
6678 fprintf (dump_file
, "Uniting pointer but not location equivalent "
6680 unite_pointer_equivalences (graph
);
6683 fprintf (dump_file
, "Finding indirect cycles\n");
6684 find_indirect_cycles (graph
);
6686 /* Implicit nodes and predecessors are no longer necessary at this
6688 remove_preds_and_fake_succs (graph
);
6690 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
6692 fprintf (dump_file
, "\n\n// The constraint graph before solve-graph "
6693 "in dot format:\n");
6694 dump_constraint_graph (dump_file
);
6695 fprintf (dump_file
, "\n\n");
6699 fprintf (dump_file
, "Solving graph\n");
6701 solve_graph (graph
);
6703 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
6705 fprintf (dump_file
, "\n\n// The constraint graph after solve-graph "
6706 "in dot format:\n");
6707 dump_constraint_graph (dump_file
);
6708 fprintf (dump_file
, "\n\n");
6712 dump_sa_points_to_info (dump_file
);
6715 /* Create points-to sets for the current function. See the comments
6716 at the start of the file for an algorithmic overview. */
6719 compute_points_to_sets (void)
6725 timevar_push (TV_TREE_PTA
);
6729 intra_create_variable_infos ();
6731 /* Now walk all statements and build the constraint set. */
6734 gimple_stmt_iterator gsi
;
6736 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6738 gimple phi
= gsi_stmt (gsi
);
6740 if (! virtual_operand_p (gimple_phi_result (phi
)))
6741 find_func_aliases (phi
);
6744 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6746 gimple stmt
= gsi_stmt (gsi
);
6748 find_func_aliases (stmt
);
6754 fprintf (dump_file
, "Points-to analysis\n\nConstraints:\n\n");
6755 dump_constraints (dump_file
, 0);
6758 /* From the constraints compute the points-to sets. */
6759 solve_constraints ();
6761 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6762 cfun
->gimple_df
->escaped
= find_what_var_points_to (get_varinfo (escaped_id
));
6764 /* Make sure the ESCAPED solution (which is used as placeholder in
6765 other solutions) does not reference itself. This simplifies
6766 points-to solution queries. */
6767 cfun
->gimple_df
->escaped
.escaped
= 0;
6769 /* Mark escaped HEAP variables as global. */
6770 FOR_EACH_VEC_ELT (varmap
, i
, vi
)
6773 && !vi
->is_global_var
)
6774 DECL_EXTERNAL (vi
->decl
) = vi
->is_global_var
6775 = pt_solution_includes (&cfun
->gimple_df
->escaped
, vi
->decl
);
6777 /* Compute the points-to sets for pointer SSA_NAMEs. */
6778 for (i
= 0; i
< num_ssa_names
; ++i
)
6780 tree ptr
= ssa_name (i
);
6782 && POINTER_TYPE_P (TREE_TYPE (ptr
)))
6783 find_what_p_points_to (ptr
);
6786 /* Compute the call-used/clobbered sets. */
6789 gimple_stmt_iterator gsi
;
6791 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6793 gimple stmt
= gsi_stmt (gsi
);
6794 struct pt_solution
*pt
;
6795 if (!is_gimple_call (stmt
))
6798 pt
= gimple_call_use_set (stmt
);
6799 if (gimple_call_flags (stmt
) & ECF_CONST
)
6800 memset (pt
, 0, sizeof (struct pt_solution
));
6801 else if ((vi
= lookup_call_use_vi (stmt
)) != NULL
)
6803 *pt
= find_what_var_points_to (vi
);
6804 /* Escaped (and thus nonlocal) variables are always
6805 implicitly used by calls. */
6806 /* ??? ESCAPED can be empty even though NONLOCAL
6813 /* If there is nothing special about this call then
6814 we have made everything that is used also escape. */
6815 *pt
= cfun
->gimple_df
->escaped
;
6819 pt
= gimple_call_clobber_set (stmt
);
6820 if (gimple_call_flags (stmt
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
6821 memset (pt
, 0, sizeof (struct pt_solution
));
6822 else if ((vi
= lookup_call_clobber_vi (stmt
)) != NULL
)
6824 *pt
= find_what_var_points_to (vi
);
6825 /* Escaped (and thus nonlocal) variables are always
6826 implicitly clobbered by calls. */
6827 /* ??? ESCAPED can be empty even though NONLOCAL
6834 /* If there is nothing special about this call then
6835 we have made everything that is used also escape. */
6836 *pt
= cfun
->gimple_df
->escaped
;
6842 timevar_pop (TV_TREE_PTA
);
6846 /* Delete created points-to sets. */
6849 delete_points_to_sets (void)
6853 htab_delete (shared_bitmap_table
);
6854 if (dump_file
&& (dump_flags
& TDF_STATS
))
6855 fprintf (dump_file
, "Points to sets created:%d\n",
6856 stats
.points_to_sets_created
);
6858 pointer_map_destroy (vi_for_tree
);
6859 pointer_map_destroy (call_stmt_vars
);
6860 bitmap_obstack_release (&pta_obstack
);
6861 constraints
.release ();
6863 for (i
= 0; i
< graph
->size
; i
++)
6864 graph
->complex[i
].release ();
6865 free (graph
->complex);
6868 free (graph
->succs
);
6870 free (graph
->pe_rep
);
6871 free (graph
->indirect_cycles
);
6875 free_alloc_pool (variable_info_pool
);
6876 free_alloc_pool (constraint_pool
);
6878 obstack_free (&fake_var_decl_obstack
, NULL
);
6880 pointer_map_destroy (final_solutions
);
6881 obstack_free (&final_solutions_obstack
, NULL
);
6885 /* Compute points-to information for every SSA_NAME pointer in the
6886 current function and compute the transitive closure of escaped
6887 variables to re-initialize the call-clobber states of local variables. */
6890 compute_may_aliases (void)
6892 if (cfun
->gimple_df
->ipa_pta
)
6896 fprintf (dump_file
, "\nNot re-computing points-to information "
6897 "because IPA points-to information is available.\n\n");
6899 /* But still dump what we have remaining it. */
6900 dump_alias_info (dump_file
);
6906 /* For each pointer P_i, determine the sets of variables that P_i may
6907 point-to. Compute the reachability set of escaped and call-used
6909 compute_points_to_sets ();
6911 /* Debugging dumps. */
6913 dump_alias_info (dump_file
);
6915 /* Deallocate memory used by aliasing data structures and the internal
6916 points-to solution. */
6917 delete_points_to_sets ();
6919 gcc_assert (!need_ssa_update_p (cfun
));
6925 gate_tree_pta (void)
6927 return flag_tree_pta
;
6930 /* A dummy pass to cause points-to information to be computed via
6931 TODO_rebuild_alias. */
6933 struct gimple_opt_pass pass_build_alias
=
6938 OPTGROUP_NONE
, /* optinfo_flags */
6939 gate_tree_pta
, /* gate */
6943 0, /* static_pass_number */
6944 TV_NONE
, /* tv_id */
6945 PROP_cfg
| PROP_ssa
, /* properties_required */
6946 0, /* properties_provided */
6947 0, /* properties_destroyed */
6948 0, /* todo_flags_start */
6949 TODO_rebuild_alias
/* todo_flags_finish */
6953 /* A dummy pass to cause points-to information to be computed via
6954 TODO_rebuild_alias. */
6956 struct gimple_opt_pass pass_build_ealias
=
6960 "ealias", /* name */
6961 OPTGROUP_NONE
, /* optinfo_flags */
6962 gate_tree_pta
, /* gate */
6966 0, /* static_pass_number */
6967 TV_NONE
, /* tv_id */
6968 PROP_cfg
| PROP_ssa
, /* properties_required */
6969 0, /* properties_provided */
6970 0, /* properties_destroyed */
6971 0, /* todo_flags_start */
6972 TODO_rebuild_alias
/* todo_flags_finish */
6977 /* Return true if we should execute IPA PTA. */
6983 /* Don't bother doing anything if the program has errors. */
6987 /* IPA PTA solutions for ESCAPED. */
6988 struct pt_solution ipa_escaped_pt
6989 = { true, false, false, false, false, false, NULL
};
6991 /* Associate node with varinfo DATA. Worker for
6992 cgraph_for_node_and_aliases. */
6994 associate_varinfo_to_alias (struct cgraph_node
*node
, void *data
)
6996 if (node
->alias
|| node
->thunk
.thunk_p
)
6997 insert_vi_for_tree (node
->symbol
.decl
, (varinfo_t
)data
);
7001 /* Execute the driver for IPA PTA. */
7003 ipa_pta_execute (void)
7005 struct cgraph_node
*node
;
7006 struct varpool_node
*var
;
7013 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7015 dump_symtab (dump_file
);
7016 fprintf (dump_file
, "\n");
7019 /* Build the constraints. */
7020 FOR_EACH_DEFINED_FUNCTION (node
)
7023 /* Nodes without a body are not interesting. Especially do not
7024 visit clones at this point for now - we get duplicate decls
7025 there for inline clones at least. */
7026 if (!cgraph_function_with_gimple_body_p (node
))
7029 gcc_assert (!node
->clone_of
);
7031 vi
= create_function_info_for (node
->symbol
.decl
,
7032 alias_get_name (node
->symbol
.decl
));
7033 cgraph_for_node_and_aliases (node
, associate_varinfo_to_alias
, vi
, true);
7036 /* Create constraints for global variables and their initializers. */
7037 FOR_EACH_VARIABLE (var
)
7042 get_vi_for_tree (var
->symbol
.decl
);
7048 "Generating constraints for global initializers\n\n");
7049 dump_constraints (dump_file
, 0);
7050 fprintf (dump_file
, "\n");
7052 from
= constraints
.length ();
7054 FOR_EACH_DEFINED_FUNCTION (node
)
7056 struct function
*func
;
7059 /* Nodes without a body are not interesting. */
7060 if (!cgraph_function_with_gimple_body_p (node
))
7066 "Generating constraints for %s", cgraph_node_name (node
));
7067 if (DECL_ASSEMBLER_NAME_SET_P (node
->symbol
.decl
))
7068 fprintf (dump_file
, " (%s)",
7070 (DECL_ASSEMBLER_NAME (node
->symbol
.decl
)));
7071 fprintf (dump_file
, "\n");
7074 func
= DECL_STRUCT_FUNCTION (node
->symbol
.decl
);
7077 /* For externally visible or attribute used annotated functions use
7078 local constraints for their arguments.
7079 For local functions we see all callers and thus do not need initial
7080 constraints for parameters. */
7081 if (node
->symbol
.used_from_other_partition
7082 || node
->symbol
.externally_visible
7083 || node
->symbol
.force_output
)
7085 intra_create_variable_infos ();
7087 /* We also need to make function return values escape. Nothing
7088 escapes by returning from main though. */
7089 if (!MAIN_NAME_P (DECL_NAME (node
->symbol
.decl
)))
7092 fi
= lookup_vi_for_tree (node
->symbol
.decl
);
7093 rvi
= first_vi_for_offset (fi
, fi_result
);
7094 if (rvi
&& rvi
->offset
== fi_result
)
7096 struct constraint_expr includes
;
7097 struct constraint_expr var
;
7098 includes
.var
= escaped_id
;
7099 includes
.offset
= 0;
7100 includes
.type
= SCALAR
;
7104 process_constraint (new_constraint (includes
, var
));
7109 /* Build constriants for the function body. */
7110 FOR_EACH_BB_FN (bb
, func
)
7112 gimple_stmt_iterator gsi
;
7114 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
7117 gimple phi
= gsi_stmt (gsi
);
7119 if (! virtual_operand_p (gimple_phi_result (phi
)))
7120 find_func_aliases (phi
);
7123 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
7125 gimple stmt
= gsi_stmt (gsi
);
7127 find_func_aliases (stmt
);
7128 find_func_clobbers (stmt
);
7136 fprintf (dump_file
, "\n");
7137 dump_constraints (dump_file
, from
);
7138 fprintf (dump_file
, "\n");
7140 from
= constraints
.length ();
7143 /* From the constraints compute the points-to sets. */
7144 solve_constraints ();
7146 /* Compute the global points-to sets for ESCAPED.
7147 ??? Note that the computed escape set is not correct
7148 for the whole unit as we fail to consider graph edges to
7149 externally visible functions. */
7150 ipa_escaped_pt
= find_what_var_points_to (get_varinfo (escaped_id
));
7152 /* Make sure the ESCAPED solution (which is used as placeholder in
7153 other solutions) does not reference itself. This simplifies
7154 points-to solution queries. */
7155 ipa_escaped_pt
.ipa_escaped
= 0;
7157 /* Assign the points-to sets to the SSA names in the unit. */
7158 FOR_EACH_DEFINED_FUNCTION (node
)
7161 struct function
*fn
;
7165 struct pt_solution uses
, clobbers
;
7166 struct cgraph_edge
*e
;
7168 /* Nodes without a body are not interesting. */
7169 if (!cgraph_function_with_gimple_body_p (node
))
7172 fn
= DECL_STRUCT_FUNCTION (node
->symbol
.decl
);
7174 /* Compute the points-to sets for pointer SSA_NAMEs. */
7175 FOR_EACH_VEC_ELT (*fn
->gimple_df
->ssa_names
, i
, ptr
)
7178 && POINTER_TYPE_P (TREE_TYPE (ptr
)))
7179 find_what_p_points_to (ptr
);
7182 /* Compute the call-use and call-clobber sets for all direct calls. */
7183 fi
= lookup_vi_for_tree (node
->symbol
.decl
);
7184 gcc_assert (fi
->is_fn_info
);
7186 = find_what_var_points_to (first_vi_for_offset (fi
, fi_clobbers
));
7187 uses
= find_what_var_points_to (first_vi_for_offset (fi
, fi_uses
));
7188 for (e
= node
->callers
; e
; e
= e
->next_caller
)
7193 *gimple_call_clobber_set (e
->call_stmt
) = clobbers
;
7194 *gimple_call_use_set (e
->call_stmt
) = uses
;
7197 /* Compute the call-use and call-clobber sets for indirect calls
7198 and calls to external functions. */
7199 FOR_EACH_BB_FN (bb
, fn
)
7201 gimple_stmt_iterator gsi
;
7203 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
7205 gimple stmt
= gsi_stmt (gsi
);
7206 struct pt_solution
*pt
;
7210 if (!is_gimple_call (stmt
))
7213 /* Handle direct calls to external functions. */
7214 decl
= gimple_call_fndecl (stmt
);
7216 && (!(fi
= lookup_vi_for_tree (decl
))
7217 || !fi
->is_fn_info
))
7219 pt
= gimple_call_use_set (stmt
);
7220 if (gimple_call_flags (stmt
) & ECF_CONST
)
7221 memset (pt
, 0, sizeof (struct pt_solution
));
7222 else if ((vi
= lookup_call_use_vi (stmt
)) != NULL
)
7224 *pt
= find_what_var_points_to (vi
);
7225 /* Escaped (and thus nonlocal) variables are always
7226 implicitly used by calls. */
7227 /* ??? ESCAPED can be empty even though NONLOCAL
7230 pt
->ipa_escaped
= 1;
7234 /* If there is nothing special about this call then
7235 we have made everything that is used also escape. */
7236 *pt
= ipa_escaped_pt
;
7240 pt
= gimple_call_clobber_set (stmt
);
7241 if (gimple_call_flags (stmt
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
7242 memset (pt
, 0, sizeof (struct pt_solution
));
7243 else if ((vi
= lookup_call_clobber_vi (stmt
)) != NULL
)
7245 *pt
= find_what_var_points_to (vi
);
7246 /* Escaped (and thus nonlocal) variables are always
7247 implicitly clobbered by calls. */
7248 /* ??? ESCAPED can be empty even though NONLOCAL
7251 pt
->ipa_escaped
= 1;
7255 /* If there is nothing special about this call then
7256 we have made everything that is used also escape. */
7257 *pt
= ipa_escaped_pt
;
7262 /* Handle indirect calls. */
7264 && (fi
= get_fi_for_callee (stmt
)))
7266 /* We need to accumulate all clobbers/uses of all possible
7268 fi
= get_varinfo (find (fi
->id
));
7269 /* If we cannot constrain the set of functions we'll end up
7270 calling we end up using/clobbering everything. */
7271 if (bitmap_bit_p (fi
->solution
, anything_id
)
7272 || bitmap_bit_p (fi
->solution
, nonlocal_id
)
7273 || bitmap_bit_p (fi
->solution
, escaped_id
))
7275 pt_solution_reset (gimple_call_clobber_set (stmt
));
7276 pt_solution_reset (gimple_call_use_set (stmt
));
7282 struct pt_solution
*uses
, *clobbers
;
7284 uses
= gimple_call_use_set (stmt
);
7285 clobbers
= gimple_call_clobber_set (stmt
);
7286 memset (uses
, 0, sizeof (struct pt_solution
));
7287 memset (clobbers
, 0, sizeof (struct pt_solution
));
7288 EXECUTE_IF_SET_IN_BITMAP (fi
->solution
, 0, i
, bi
)
7290 struct pt_solution sol
;
7292 vi
= get_varinfo (i
);
7293 if (!vi
->is_fn_info
)
7295 /* ??? We could be more precise here? */
7297 uses
->ipa_escaped
= 1;
7298 clobbers
->nonlocal
= 1;
7299 clobbers
->ipa_escaped
= 1;
7303 if (!uses
->anything
)
7305 sol
= find_what_var_points_to
7306 (first_vi_for_offset (vi
, fi_uses
));
7307 pt_solution_ior_into (uses
, &sol
);
7309 if (!clobbers
->anything
)
7311 sol
= find_what_var_points_to
7312 (first_vi_for_offset (vi
, fi_clobbers
));
7313 pt_solution_ior_into (clobbers
, &sol
);
7321 fn
->gimple_df
->ipa_pta
= true;
7324 delete_points_to_sets ();
7331 struct simple_ipa_opt_pass pass_ipa_pta
=
7336 OPTGROUP_NONE
, /* optinfo_flags */
7337 gate_ipa_pta
, /* gate */
7338 ipa_pta_execute
, /* execute */
7341 0, /* static_pass_number */
7342 TV_IPA_PTA
, /* tv_id */
7343 0, /* properties_required */
7344 0, /* properties_provided */
7345 0, /* properties_destroyed */
7346 0, /* todo_flags_start */
7347 TODO_update_ssa
/* todo_flags_finish */