decl.c, [...]: Replace host_integerp (..., 1) with tree_fits_uhwi_p throughout.
[gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "sbitmap.h"
29 #include "flags.h"
30 #include "basic-block.h"
31 #include "tree.h"
32 #include "gimple.h"
33 #include "gimple-iterator.h"
34 #include "gimple-ssa.h"
35 #include "cgraph.h"
36 #include "tree-ssanames.h"
37 #include "tree-into-ssa.h"
38 #include "tree-dfa.h"
39 #include "tree-inline.h"
40 #include "diagnostic-core.h"
41 #include "hash-table.h"
42 #include "function.h"
43 #include "tree-pass.h"
44 #include "alloc-pool.h"
45 #include "splay-tree.h"
46 #include "params.h"
47 #include "alias.h"
48 #include "pointer-set.h"
49
50 /* The idea behind this analyzer is to generate set constraints from the
51 program, then solve the resulting constraints in order to generate the
52 points-to sets.
53
54 Set constraints are a way of modeling program analysis problems that
55 involve sets. They consist of an inclusion constraint language,
56 describing the variables (each variable is a set) and operations that
57 are involved on the variables, and a set of rules that derive facts
58 from these operations. To solve a system of set constraints, you derive
59 all possible facts under the rules, which gives you the correct sets
60 as a consequence.
61
62 See "Efficient Field-sensitive pointer analysis for C" by "David
63 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
64 http://citeseer.ist.psu.edu/pearce04efficient.html
65
66 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
67 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
68 http://citeseer.ist.psu.edu/heintze01ultrafast.html
69
70 There are three types of real constraint expressions, DEREF,
71 ADDRESSOF, and SCALAR. Each constraint expression consists
72 of a constraint type, a variable, and an offset.
73
74 SCALAR is a constraint expression type used to represent x, whether
75 it appears on the LHS or the RHS of a statement.
76 DEREF is a constraint expression type used to represent *x, whether
77 it appears on the LHS or the RHS of a statement.
78 ADDRESSOF is a constraint expression used to represent &x, whether
79 it appears on the LHS or the RHS of a statement.
80
81 Each pointer variable in the program is assigned an integer id, and
82 each field of a structure variable is assigned an integer id as well.
83
84 Structure variables are linked to their list of fields through a "next
85 field" in each variable that points to the next field in offset
86 order.
87 Each variable for a structure field has
88
89 1. "size", that tells the size in bits of that field.
90 2. "fullsize, that tells the size in bits of the entire structure.
91 3. "offset", that tells the offset in bits from the beginning of the
92 structure to this field.
93
94 Thus,
95 struct f
96 {
97 int a;
98 int b;
99 } foo;
100 int *bar;
101
102 looks like
103
104 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
105 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
106 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
107
108
109 In order to solve the system of set constraints, the following is
110 done:
111
112 1. Each constraint variable x has a solution set associated with it,
113 Sol(x).
114
115 2. Constraints are separated into direct, copy, and complex.
116 Direct constraints are ADDRESSOF constraints that require no extra
117 processing, such as P = &Q
118 Copy constraints are those of the form P = Q.
119 Complex constraints are all the constraints involving dereferences
120 and offsets (including offsetted copies).
121
122 3. All direct constraints of the form P = &Q are processed, such
123 that Q is added to Sol(P)
124
125 4. All complex constraints for a given constraint variable are stored in a
126 linked list attached to that variable's node.
127
128 5. A directed graph is built out of the copy constraints. Each
129 constraint variable is a node in the graph, and an edge from
130 Q to P is added for each copy constraint of the form P = Q
131
132 6. The graph is then walked, and solution sets are
133 propagated along the copy edges, such that an edge from Q to P
134 causes Sol(P) <- Sol(P) union Sol(Q).
135
136 7. As we visit each node, all complex constraints associated with
137 that node are processed by adding appropriate copy edges to the graph, or the
138 appropriate variables to the solution set.
139
140 8. The process of walking the graph is iterated until no solution
141 sets change.
142
143 Prior to walking the graph in steps 6 and 7, We perform static
144 cycle elimination on the constraint graph, as well
145 as off-line variable substitution.
146
147 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
148 on and turned into anything), but isn't. You can just see what offset
149 inside the pointed-to struct it's going to access.
150
151 TODO: Constant bounded arrays can be handled as if they were structs of the
152 same number of elements.
153
154 TODO: Modeling heap and incoming pointers becomes much better if we
155 add fields to them as we discover them, which we could do.
156
157 TODO: We could handle unions, but to be honest, it's probably not
158 worth the pain or slowdown. */
159
160 /* IPA-PTA optimizations possible.
161
162 When the indirect function called is ANYTHING we can add disambiguation
163 based on the function signatures (or simply the parameter count which
164 is the varinfo size). We also do not need to consider functions that
165 do not have their address taken.
166
167 The is_global_var bit which marks escape points is overly conservative
168 in IPA mode. Split it to is_escape_point and is_global_var - only
169 externally visible globals are escape points in IPA mode. This is
170 also needed to fix the pt_solution_includes_global predicate
171 (and thus ptr_deref_may_alias_global_p).
172
173 The way we introduce DECL_PT_UID to avoid fixing up all points-to
174 sets in the translation unit when we copy a DECL during inlining
175 pessimizes precision. The advantage is that the DECL_PT_UID keeps
176 compile-time and memory usage overhead low - the points-to sets
177 do not grow or get unshared as they would during a fixup phase.
178 An alternative solution is to delay IPA PTA until after all
179 inlining transformations have been applied.
180
181 The way we propagate clobber/use information isn't optimized.
182 It should use a new complex constraint that properly filters
183 out local variables of the callee (though that would make
184 the sets invalid after inlining). OTOH we might as well
185 admit defeat to WHOPR and simply do all the clobber/use analysis
186 and propagation after PTA finished but before we threw away
187 points-to information for memory variables. WHOPR and PTA
188 do not play along well anyway - the whole constraint solving
189 would need to be done in WPA phase and it will be very interesting
190 to apply the results to local SSA names during LTRANS phase.
191
192 We probably should compute a per-function unit-ESCAPE solution
193 propagating it simply like the clobber / uses solutions. The
194 solution can go alongside the non-IPA espaced solution and be
195 used to query which vars escape the unit through a function.
196
197 We never put function decls in points-to sets so we do not
198 keep the set of called functions for indirect calls.
199
200 And probably more. */
201
202 static bool use_field_sensitive = true;
203 static int in_ipa_mode = 0;
204
205 /* Used for predecessor bitmaps. */
206 static bitmap_obstack predbitmap_obstack;
207
208 /* Used for points-to sets. */
209 static bitmap_obstack pta_obstack;
210
211 /* Used for oldsolution members of variables. */
212 static bitmap_obstack oldpta_obstack;
213
214 /* Used for per-solver-iteration bitmaps. */
215 static bitmap_obstack iteration_obstack;
216
217 static unsigned int create_variable_info_for (tree, const char *);
218 typedef struct constraint_graph *constraint_graph_t;
219 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
220
221 struct constraint;
222 typedef struct constraint *constraint_t;
223
224
225 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
226 if (a) \
227 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
228
229 static struct constraint_stats
230 {
231 unsigned int total_vars;
232 unsigned int nonpointer_vars;
233 unsigned int unified_vars_static;
234 unsigned int unified_vars_dynamic;
235 unsigned int iterations;
236 unsigned int num_edges;
237 unsigned int num_implicit_edges;
238 unsigned int points_to_sets_created;
239 } stats;
240
241 struct variable_info
242 {
243 /* ID of this variable */
244 unsigned int id;
245
246 /* True if this is a variable created by the constraint analysis, such as
247 heap variables and constraints we had to break up. */
248 unsigned int is_artificial_var : 1;
249
250 /* True if this is a special variable whose solution set should not be
251 changed. */
252 unsigned int is_special_var : 1;
253
254 /* True for variables whose size is not known or variable. */
255 unsigned int is_unknown_size_var : 1;
256
257 /* True for (sub-)fields that represent a whole variable. */
258 unsigned int is_full_var : 1;
259
260 /* True if this is a heap variable. */
261 unsigned int is_heap_var : 1;
262
263 /* True if this field may contain pointers. */
264 unsigned int may_have_pointers : 1;
265
266 /* True if this field has only restrict qualified pointers. */
267 unsigned int only_restrict_pointers : 1;
268
269 /* True if this represents a global variable. */
270 unsigned int is_global_var : 1;
271
272 /* True if this represents a IPA function info. */
273 unsigned int is_fn_info : 1;
274
275 /* The ID of the variable for the next field in this structure
276 or zero for the last field in this structure. */
277 unsigned next;
278
279 /* The ID of the variable for the first field in this structure. */
280 unsigned head;
281
282 /* Offset of this variable, in bits, from the base variable */
283 unsigned HOST_WIDE_INT offset;
284
285 /* Size of the variable, in bits. */
286 unsigned HOST_WIDE_INT size;
287
288 /* Full size of the base variable, in bits. */
289 unsigned HOST_WIDE_INT fullsize;
290
291 /* Name of this variable */
292 const char *name;
293
294 /* Tree that this variable is associated with. */
295 tree decl;
296
297 /* Points-to set for this variable. */
298 bitmap solution;
299
300 /* Old points-to set for this variable. */
301 bitmap oldsolution;
302 };
303 typedef struct variable_info *varinfo_t;
304
305 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
306 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
307 unsigned HOST_WIDE_INT);
308 static varinfo_t lookup_vi_for_tree (tree);
309 static inline bool type_can_have_subvars (const_tree);
310
311 /* Pool of variable info structures. */
312 static alloc_pool variable_info_pool;
313
314 /* Map varinfo to final pt_solution. */
315 static pointer_map_t *final_solutions;
316 struct obstack final_solutions_obstack;
317
318 /* Table of variable info structures for constraint variables.
319 Indexed directly by variable info id. */
320 static vec<varinfo_t> varmap;
321
322 /* Return the varmap element N */
323
324 static inline varinfo_t
325 get_varinfo (unsigned int n)
326 {
327 return varmap[n];
328 }
329
330 /* Return the next variable in the list of sub-variables of VI
331 or NULL if VI is the last sub-variable. */
332
333 static inline varinfo_t
334 vi_next (varinfo_t vi)
335 {
336 return get_varinfo (vi->next);
337 }
338
339 /* Static IDs for the special variables. Variable ID zero is unused
340 and used as terminator for the sub-variable chain. */
341 enum { nothing_id = 1, anything_id = 2, readonly_id = 3,
342 escaped_id = 4, nonlocal_id = 5,
343 storedanything_id = 6, integer_id = 7 };
344
345 /* Return a new variable info structure consisting for a variable
346 named NAME, and using constraint graph node NODE. Append it
347 to the vector of variable info structures. */
348
349 static varinfo_t
350 new_var_info (tree t, const char *name)
351 {
352 unsigned index = varmap.length ();
353 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
354
355 ret->id = index;
356 ret->name = name;
357 ret->decl = t;
358 /* Vars without decl are artificial and do not have sub-variables. */
359 ret->is_artificial_var = (t == NULL_TREE);
360 ret->is_special_var = false;
361 ret->is_unknown_size_var = false;
362 ret->is_full_var = (t == NULL_TREE);
363 ret->is_heap_var = false;
364 ret->may_have_pointers = true;
365 ret->only_restrict_pointers = false;
366 ret->is_global_var = (t == NULL_TREE);
367 ret->is_fn_info = false;
368 if (t && DECL_P (t))
369 ret->is_global_var = (is_global_var (t)
370 /* We have to treat even local register variables
371 as escape points. */
372 || (TREE_CODE (t) == VAR_DECL
373 && DECL_HARD_REGISTER (t)));
374 ret->solution = BITMAP_ALLOC (&pta_obstack);
375 ret->oldsolution = NULL;
376 ret->next = 0;
377 ret->head = ret->id;
378
379 stats.total_vars++;
380
381 varmap.safe_push (ret);
382
383 return ret;
384 }
385
386
387 /* A map mapping call statements to per-stmt variables for uses
388 and clobbers specific to the call. */
389 static struct pointer_map_t *call_stmt_vars;
390
391 /* Lookup or create the variable for the call statement CALL. */
392
393 static varinfo_t
394 get_call_vi (gimple call)
395 {
396 void **slot_p;
397 varinfo_t vi, vi2;
398
399 slot_p = pointer_map_insert (call_stmt_vars, call);
400 if (*slot_p)
401 return (varinfo_t) *slot_p;
402
403 vi = new_var_info (NULL_TREE, "CALLUSED");
404 vi->offset = 0;
405 vi->size = 1;
406 vi->fullsize = 2;
407 vi->is_full_var = true;
408
409 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
410 vi2->offset = 1;
411 vi2->size = 1;
412 vi2->fullsize = 2;
413 vi2->is_full_var = true;
414
415 vi->next = vi2->id;
416
417 *slot_p = (void *) vi;
418 return vi;
419 }
420
421 /* Lookup the variable for the call statement CALL representing
422 the uses. Returns NULL if there is nothing special about this call. */
423
424 static varinfo_t
425 lookup_call_use_vi (gimple call)
426 {
427 void **slot_p;
428
429 slot_p = pointer_map_contains (call_stmt_vars, call);
430 if (slot_p)
431 return (varinfo_t) *slot_p;
432
433 return NULL;
434 }
435
436 /* Lookup the variable for the call statement CALL representing
437 the clobbers. Returns NULL if there is nothing special about this call. */
438
439 static varinfo_t
440 lookup_call_clobber_vi (gimple call)
441 {
442 varinfo_t uses = lookup_call_use_vi (call);
443 if (!uses)
444 return NULL;
445
446 return vi_next (uses);
447 }
448
449 /* Lookup or create the variable for the call statement CALL representing
450 the uses. */
451
452 static varinfo_t
453 get_call_use_vi (gimple call)
454 {
455 return get_call_vi (call);
456 }
457
458 /* Lookup or create the variable for the call statement CALL representing
459 the clobbers. */
460
461 static varinfo_t ATTRIBUTE_UNUSED
462 get_call_clobber_vi (gimple call)
463 {
464 return vi_next (get_call_vi (call));
465 }
466
467
468 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
469
470 /* An expression that appears in a constraint. */
471
472 struct constraint_expr
473 {
474 /* Constraint type. */
475 constraint_expr_type type;
476
477 /* Variable we are referring to in the constraint. */
478 unsigned int var;
479
480 /* Offset, in bits, of this constraint from the beginning of
481 variables it ends up referring to.
482
483 IOW, in a deref constraint, we would deref, get the result set,
484 then add OFFSET to each member. */
485 HOST_WIDE_INT offset;
486 };
487
488 /* Use 0x8000... as special unknown offset. */
489 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
490
491 typedef struct constraint_expr ce_s;
492 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
493 static void get_constraint_for (tree, vec<ce_s> *);
494 static void get_constraint_for_rhs (tree, vec<ce_s> *);
495 static void do_deref (vec<ce_s> *);
496
497 /* Our set constraints are made up of two constraint expressions, one
498 LHS, and one RHS.
499
500 As described in the introduction, our set constraints each represent an
501 operation between set valued variables.
502 */
503 struct constraint
504 {
505 struct constraint_expr lhs;
506 struct constraint_expr rhs;
507 };
508
509 /* List of constraints that we use to build the constraint graph from. */
510
511 static vec<constraint_t> constraints;
512 static alloc_pool constraint_pool;
513
514 /* The constraint graph is represented as an array of bitmaps
515 containing successor nodes. */
516
517 struct constraint_graph
518 {
519 /* Size of this graph, which may be different than the number of
520 nodes in the variable map. */
521 unsigned int size;
522
523 /* Explicit successors of each node. */
524 bitmap *succs;
525
526 /* Implicit predecessors of each node (Used for variable
527 substitution). */
528 bitmap *implicit_preds;
529
530 /* Explicit predecessors of each node (Used for variable substitution). */
531 bitmap *preds;
532
533 /* Indirect cycle representatives, or -1 if the node has no indirect
534 cycles. */
535 int *indirect_cycles;
536
537 /* Representative node for a node. rep[a] == a unless the node has
538 been unified. */
539 unsigned int *rep;
540
541 /* Equivalence class representative for a label. This is used for
542 variable substitution. */
543 int *eq_rep;
544
545 /* Pointer equivalence label for a node. All nodes with the same
546 pointer equivalence label can be unified together at some point
547 (either during constraint optimization or after the constraint
548 graph is built). */
549 unsigned int *pe;
550
551 /* Pointer equivalence representative for a label. This is used to
552 handle nodes that are pointer equivalent but not location
553 equivalent. We can unite these once the addressof constraints
554 are transformed into initial points-to sets. */
555 int *pe_rep;
556
557 /* Pointer equivalence label for each node, used during variable
558 substitution. */
559 unsigned int *pointer_label;
560
561 /* Location equivalence label for each node, used during location
562 equivalence finding. */
563 unsigned int *loc_label;
564
565 /* Pointed-by set for each node, used during location equivalence
566 finding. This is pointed-by rather than pointed-to, because it
567 is constructed using the predecessor graph. */
568 bitmap *pointed_by;
569
570 /* Points to sets for pointer equivalence. This is *not* the actual
571 points-to sets for nodes. */
572 bitmap *points_to;
573
574 /* Bitmap of nodes where the bit is set if the node is a direct
575 node. Used for variable substitution. */
576 sbitmap direct_nodes;
577
578 /* Bitmap of nodes where the bit is set if the node is address
579 taken. Used for variable substitution. */
580 bitmap address_taken;
581
582 /* Vector of complex constraints for each graph node. Complex
583 constraints are those involving dereferences or offsets that are
584 not 0. */
585 vec<constraint_t> *complex;
586 };
587
588 static constraint_graph_t graph;
589
590 /* During variable substitution and the offline version of indirect
591 cycle finding, we create nodes to represent dereferences and
592 address taken constraints. These represent where these start and
593 end. */
594 #define FIRST_REF_NODE (varmap).length ()
595 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
596
597 /* Return the representative node for NODE, if NODE has been unioned
598 with another NODE.
599 This function performs path compression along the way to finding
600 the representative. */
601
602 static unsigned int
603 find (unsigned int node)
604 {
605 gcc_checking_assert (node < graph->size);
606 if (graph->rep[node] != node)
607 return graph->rep[node] = find (graph->rep[node]);
608 return node;
609 }
610
611 /* Union the TO and FROM nodes to the TO nodes.
612 Note that at some point in the future, we may want to do
613 union-by-rank, in which case we are going to have to return the
614 node we unified to. */
615
616 static bool
617 unite (unsigned int to, unsigned int from)
618 {
619 gcc_checking_assert (to < graph->size && from < graph->size);
620 if (to != from && graph->rep[from] != to)
621 {
622 graph->rep[from] = to;
623 return true;
624 }
625 return false;
626 }
627
628 /* Create a new constraint consisting of LHS and RHS expressions. */
629
630 static constraint_t
631 new_constraint (const struct constraint_expr lhs,
632 const struct constraint_expr rhs)
633 {
634 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
635 ret->lhs = lhs;
636 ret->rhs = rhs;
637 return ret;
638 }
639
640 /* Print out constraint C to FILE. */
641
642 static void
643 dump_constraint (FILE *file, constraint_t c)
644 {
645 if (c->lhs.type == ADDRESSOF)
646 fprintf (file, "&");
647 else if (c->lhs.type == DEREF)
648 fprintf (file, "*");
649 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
650 if (c->lhs.offset == UNKNOWN_OFFSET)
651 fprintf (file, " + UNKNOWN");
652 else if (c->lhs.offset != 0)
653 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
654 fprintf (file, " = ");
655 if (c->rhs.type == ADDRESSOF)
656 fprintf (file, "&");
657 else if (c->rhs.type == DEREF)
658 fprintf (file, "*");
659 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
660 if (c->rhs.offset == UNKNOWN_OFFSET)
661 fprintf (file, " + UNKNOWN");
662 else if (c->rhs.offset != 0)
663 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
664 }
665
666
667 void debug_constraint (constraint_t);
668 void debug_constraints (void);
669 void debug_constraint_graph (void);
670 void debug_solution_for_var (unsigned int);
671 void debug_sa_points_to_info (void);
672
673 /* Print out constraint C to stderr. */
674
675 DEBUG_FUNCTION void
676 debug_constraint (constraint_t c)
677 {
678 dump_constraint (stderr, c);
679 fprintf (stderr, "\n");
680 }
681
682 /* Print out all constraints to FILE */
683
684 static void
685 dump_constraints (FILE *file, int from)
686 {
687 int i;
688 constraint_t c;
689 for (i = from; constraints.iterate (i, &c); i++)
690 if (c)
691 {
692 dump_constraint (file, c);
693 fprintf (file, "\n");
694 }
695 }
696
697 /* Print out all constraints to stderr. */
698
699 DEBUG_FUNCTION void
700 debug_constraints (void)
701 {
702 dump_constraints (stderr, 0);
703 }
704
705 /* Print the constraint graph in dot format. */
706
707 static void
708 dump_constraint_graph (FILE *file)
709 {
710 unsigned int i;
711
712 /* Only print the graph if it has already been initialized: */
713 if (!graph)
714 return;
715
716 /* Prints the header of the dot file: */
717 fprintf (file, "strict digraph {\n");
718 fprintf (file, " node [\n shape = box\n ]\n");
719 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
720 fprintf (file, "\n // List of nodes and complex constraints in "
721 "the constraint graph:\n");
722
723 /* The next lines print the nodes in the graph together with the
724 complex constraints attached to them. */
725 for (i = 1; i < graph->size; i++)
726 {
727 if (i == FIRST_REF_NODE)
728 continue;
729 if (find (i) != i)
730 continue;
731 if (i < FIRST_REF_NODE)
732 fprintf (file, "\"%s\"", get_varinfo (i)->name);
733 else
734 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
735 if (graph->complex[i].exists ())
736 {
737 unsigned j;
738 constraint_t c;
739 fprintf (file, " [label=\"\\N\\n");
740 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
741 {
742 dump_constraint (file, c);
743 fprintf (file, "\\l");
744 }
745 fprintf (file, "\"]");
746 }
747 fprintf (file, ";\n");
748 }
749
750 /* Go over the edges. */
751 fprintf (file, "\n // Edges in the constraint graph:\n");
752 for (i = 1; i < graph->size; i++)
753 {
754 unsigned j;
755 bitmap_iterator bi;
756 if (find (i) != i)
757 continue;
758 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
759 {
760 unsigned to = find (j);
761 if (i == to)
762 continue;
763 if (i < FIRST_REF_NODE)
764 fprintf (file, "\"%s\"", get_varinfo (i)->name);
765 else
766 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
767 fprintf (file, " -> ");
768 if (to < FIRST_REF_NODE)
769 fprintf (file, "\"%s\"", get_varinfo (to)->name);
770 else
771 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
772 fprintf (file, ";\n");
773 }
774 }
775
776 /* Prints the tail of the dot file. */
777 fprintf (file, "}\n");
778 }
779
780 /* Print out the constraint graph to stderr. */
781
782 DEBUG_FUNCTION void
783 debug_constraint_graph (void)
784 {
785 dump_constraint_graph (stderr);
786 }
787
788 /* SOLVER FUNCTIONS
789
790 The solver is a simple worklist solver, that works on the following
791 algorithm:
792
793 sbitmap changed_nodes = all zeroes;
794 changed_count = 0;
795 For each node that is not already collapsed:
796 changed_count++;
797 set bit in changed nodes
798
799 while (changed_count > 0)
800 {
801 compute topological ordering for constraint graph
802
803 find and collapse cycles in the constraint graph (updating
804 changed if necessary)
805
806 for each node (n) in the graph in topological order:
807 changed_count--;
808
809 Process each complex constraint associated with the node,
810 updating changed if necessary.
811
812 For each outgoing edge from n, propagate the solution from n to
813 the destination of the edge, updating changed as necessary.
814
815 } */
816
817 /* Return true if two constraint expressions A and B are equal. */
818
819 static bool
820 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
821 {
822 return a.type == b.type && a.var == b.var && a.offset == b.offset;
823 }
824
825 /* Return true if constraint expression A is less than constraint expression
826 B. This is just arbitrary, but consistent, in order to give them an
827 ordering. */
828
829 static bool
830 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
831 {
832 if (a.type == b.type)
833 {
834 if (a.var == b.var)
835 return a.offset < b.offset;
836 else
837 return a.var < b.var;
838 }
839 else
840 return a.type < b.type;
841 }
842
843 /* Return true if constraint A is less than constraint B. This is just
844 arbitrary, but consistent, in order to give them an ordering. */
845
846 static bool
847 constraint_less (const constraint_t &a, const constraint_t &b)
848 {
849 if (constraint_expr_less (a->lhs, b->lhs))
850 return true;
851 else if (constraint_expr_less (b->lhs, a->lhs))
852 return false;
853 else
854 return constraint_expr_less (a->rhs, b->rhs);
855 }
856
857 /* Return true if two constraints A and B are equal. */
858
859 static bool
860 constraint_equal (struct constraint a, struct constraint b)
861 {
862 return constraint_expr_equal (a.lhs, b.lhs)
863 && constraint_expr_equal (a.rhs, b.rhs);
864 }
865
866
867 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
868
869 static constraint_t
870 constraint_vec_find (vec<constraint_t> vec,
871 struct constraint lookfor)
872 {
873 unsigned int place;
874 constraint_t found;
875
876 if (!vec.exists ())
877 return NULL;
878
879 place = vec.lower_bound (&lookfor, constraint_less);
880 if (place >= vec.length ())
881 return NULL;
882 found = vec[place];
883 if (!constraint_equal (*found, lookfor))
884 return NULL;
885 return found;
886 }
887
888 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
889
890 static void
891 constraint_set_union (vec<constraint_t> *to,
892 vec<constraint_t> *from)
893 {
894 int i;
895 constraint_t c;
896
897 FOR_EACH_VEC_ELT (*from, i, c)
898 {
899 if (constraint_vec_find (*to, *c) == NULL)
900 {
901 unsigned int place = to->lower_bound (c, constraint_less);
902 to->safe_insert (place, c);
903 }
904 }
905 }
906
907 /* Expands the solution in SET to all sub-fields of variables included. */
908
909 static void
910 solution_set_expand (bitmap set)
911 {
912 bitmap_iterator bi;
913 unsigned j;
914
915 /* In a first pass expand to the head of the variables we need to
916 add all sub-fields off. This avoids quadratic behavior. */
917 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
918 {
919 varinfo_t v = get_varinfo (j);
920 if (v->is_artificial_var
921 || v->is_full_var)
922 continue;
923 bitmap_set_bit (set, v->head);
924 }
925
926 /* In the second pass now expand all head variables with subfields. */
927 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
928 {
929 varinfo_t v = get_varinfo (j);
930 if (v->is_artificial_var
931 || v->is_full_var
932 || v->head != j)
933 continue;
934 for (v = vi_next (v); v != NULL; v = vi_next (v))
935 bitmap_set_bit (set, v->id);
936 }
937 }
938
939 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
940 process. */
941
942 static bool
943 set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
944 {
945 bool changed = false;
946 bitmap_iterator bi;
947 unsigned int i;
948
949 /* If the solution of FROM contains anything it is good enough to transfer
950 this to TO. */
951 if (bitmap_bit_p (from, anything_id))
952 return bitmap_set_bit (to, anything_id);
953
954 /* For zero offset simply union the solution into the destination. */
955 if (inc == 0)
956 return bitmap_ior_into (to, from);
957
958 /* If the offset is unknown we have to expand the solution to
959 all subfields. */
960 if (inc == UNKNOWN_OFFSET)
961 {
962 bitmap tmp = BITMAP_ALLOC (&iteration_obstack);
963 bitmap_copy (tmp, from);
964 solution_set_expand (tmp);
965 changed |= bitmap_ior_into (to, tmp);
966 BITMAP_FREE (tmp);
967 return changed;
968 }
969
970 /* For non-zero offset union the offsetted solution into the destination. */
971 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
972 {
973 varinfo_t vi = get_varinfo (i);
974
975 /* If this is a variable with just one field just set its bit
976 in the result. */
977 if (vi->is_artificial_var
978 || vi->is_unknown_size_var
979 || vi->is_full_var)
980 changed |= bitmap_set_bit (to, i);
981 else
982 {
983 unsigned HOST_WIDE_INT fieldoffset = vi->offset + inc;
984
985 /* If the offset makes the pointer point to before the
986 variable use offset zero for the field lookup. */
987 if (inc < 0
988 && fieldoffset > vi->offset)
989 fieldoffset = 0;
990
991 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
992
993 changed |= bitmap_set_bit (to, vi->id);
994 /* If the result is not exactly at fieldoffset include the next
995 field as well. See get_constraint_for_ptr_offset for more
996 rationale. */
997 if (vi->offset != fieldoffset
998 && vi->next != 0)
999 changed |= bitmap_set_bit (to, vi->next);
1000 }
1001 }
1002
1003 return changed;
1004 }
1005
1006 /* Insert constraint C into the list of complex constraints for graph
1007 node VAR. */
1008
1009 static void
1010 insert_into_complex (constraint_graph_t graph,
1011 unsigned int var, constraint_t c)
1012 {
1013 vec<constraint_t> complex = graph->complex[var];
1014 unsigned int place = complex.lower_bound (c, constraint_less);
1015
1016 /* Only insert constraints that do not already exist. */
1017 if (place >= complex.length ()
1018 || !constraint_equal (*c, *complex[place]))
1019 graph->complex[var].safe_insert (place, c);
1020 }
1021
1022
1023 /* Condense two variable nodes into a single variable node, by moving
1024 all associated info from SRC to TO. */
1025
1026 static void
1027 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1028 unsigned int from)
1029 {
1030 unsigned int i;
1031 constraint_t c;
1032
1033 gcc_checking_assert (find (from) == to);
1034
1035 /* Move all complex constraints from src node into to node */
1036 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1037 {
1038 /* In complex constraints for node src, we may have either
1039 a = *src, and *src = a, or an offseted constraint which are
1040 always added to the rhs node's constraints. */
1041
1042 if (c->rhs.type == DEREF)
1043 c->rhs.var = to;
1044 else if (c->lhs.type == DEREF)
1045 c->lhs.var = to;
1046 else
1047 c->rhs.var = to;
1048 }
1049 constraint_set_union (&graph->complex[to], &graph->complex[from]);
1050 graph->complex[from].release ();
1051 }
1052
1053
1054 /* Remove edges involving NODE from GRAPH. */
1055
1056 static void
1057 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1058 {
1059 if (graph->succs[node])
1060 BITMAP_FREE (graph->succs[node]);
1061 }
1062
1063 /* Merge GRAPH nodes FROM and TO into node TO. */
1064
1065 static void
1066 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1067 unsigned int from)
1068 {
1069 if (graph->indirect_cycles[from] != -1)
1070 {
1071 /* If we have indirect cycles with the from node, and we have
1072 none on the to node, the to node has indirect cycles from the
1073 from node now that they are unified.
1074 If indirect cycles exist on both, unify the nodes that they
1075 are in a cycle with, since we know they are in a cycle with
1076 each other. */
1077 if (graph->indirect_cycles[to] == -1)
1078 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1079 }
1080
1081 /* Merge all the successor edges. */
1082 if (graph->succs[from])
1083 {
1084 if (!graph->succs[to])
1085 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1086 bitmap_ior_into (graph->succs[to],
1087 graph->succs[from]);
1088 }
1089
1090 clear_edges_for_node (graph, from);
1091 }
1092
1093
1094 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1095 it doesn't exist in the graph already. */
1096
1097 static void
1098 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1099 unsigned int from)
1100 {
1101 if (to == from)
1102 return;
1103
1104 if (!graph->implicit_preds[to])
1105 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1106
1107 if (bitmap_set_bit (graph->implicit_preds[to], from))
1108 stats.num_implicit_edges++;
1109 }
1110
1111 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1112 it doesn't exist in the graph already.
1113 Return false if the edge already existed, true otherwise. */
1114
1115 static void
1116 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1117 unsigned int from)
1118 {
1119 if (!graph->preds[to])
1120 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1121 bitmap_set_bit (graph->preds[to], from);
1122 }
1123
1124 /* Add a graph edge to GRAPH, going from FROM to TO if
1125 it doesn't exist in the graph already.
1126 Return false if the edge already existed, true otherwise. */
1127
1128 static bool
1129 add_graph_edge (constraint_graph_t graph, unsigned int to,
1130 unsigned int from)
1131 {
1132 if (to == from)
1133 {
1134 return false;
1135 }
1136 else
1137 {
1138 bool r = false;
1139
1140 if (!graph->succs[from])
1141 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1142 if (bitmap_set_bit (graph->succs[from], to))
1143 {
1144 r = true;
1145 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1146 stats.num_edges++;
1147 }
1148 return r;
1149 }
1150 }
1151
1152
1153 /* Initialize the constraint graph structure to contain SIZE nodes. */
1154
1155 static void
1156 init_graph (unsigned int size)
1157 {
1158 unsigned int j;
1159
1160 graph = XCNEW (struct constraint_graph);
1161 graph->size = size;
1162 graph->succs = XCNEWVEC (bitmap, graph->size);
1163 graph->indirect_cycles = XNEWVEC (int, graph->size);
1164 graph->rep = XNEWVEC (unsigned int, graph->size);
1165 /* ??? Macros do not support template types with multiple arguments,
1166 so we use a typedef to work around it. */
1167 typedef vec<constraint_t> vec_constraint_t_heap;
1168 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1169 graph->pe = XCNEWVEC (unsigned int, graph->size);
1170 graph->pe_rep = XNEWVEC (int, graph->size);
1171
1172 for (j = 0; j < graph->size; j++)
1173 {
1174 graph->rep[j] = j;
1175 graph->pe_rep[j] = -1;
1176 graph->indirect_cycles[j] = -1;
1177 }
1178 }
1179
1180 /* Build the constraint graph, adding only predecessor edges right now. */
1181
1182 static void
1183 build_pred_graph (void)
1184 {
1185 int i;
1186 constraint_t c;
1187 unsigned int j;
1188
1189 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1190 graph->preds = XCNEWVEC (bitmap, graph->size);
1191 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1192 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1193 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1194 graph->points_to = XCNEWVEC (bitmap, graph->size);
1195 graph->eq_rep = XNEWVEC (int, graph->size);
1196 graph->direct_nodes = sbitmap_alloc (graph->size);
1197 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1198 bitmap_clear (graph->direct_nodes);
1199
1200 for (j = 1; j < FIRST_REF_NODE; j++)
1201 {
1202 if (!get_varinfo (j)->is_special_var)
1203 bitmap_set_bit (graph->direct_nodes, j);
1204 }
1205
1206 for (j = 0; j < graph->size; j++)
1207 graph->eq_rep[j] = -1;
1208
1209 for (j = 0; j < varmap.length (); j++)
1210 graph->indirect_cycles[j] = -1;
1211
1212 FOR_EACH_VEC_ELT (constraints, i, c)
1213 {
1214 struct constraint_expr lhs = c->lhs;
1215 struct constraint_expr rhs = c->rhs;
1216 unsigned int lhsvar = lhs.var;
1217 unsigned int rhsvar = rhs.var;
1218
1219 if (lhs.type == DEREF)
1220 {
1221 /* *x = y. */
1222 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1223 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1224 }
1225 else if (rhs.type == DEREF)
1226 {
1227 /* x = *y */
1228 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1229 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1230 else
1231 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1232 }
1233 else if (rhs.type == ADDRESSOF)
1234 {
1235 varinfo_t v;
1236
1237 /* x = &y */
1238 if (graph->points_to[lhsvar] == NULL)
1239 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1240 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1241
1242 if (graph->pointed_by[rhsvar] == NULL)
1243 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1244 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1245
1246 /* Implicitly, *x = y */
1247 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1248
1249 /* All related variables are no longer direct nodes. */
1250 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1251 v = get_varinfo (rhsvar);
1252 if (!v->is_full_var)
1253 {
1254 v = get_varinfo (v->head);
1255 do
1256 {
1257 bitmap_clear_bit (graph->direct_nodes, v->id);
1258 v = vi_next (v);
1259 }
1260 while (v != NULL);
1261 }
1262 bitmap_set_bit (graph->address_taken, rhsvar);
1263 }
1264 else if (lhsvar > anything_id
1265 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1266 {
1267 /* x = y */
1268 add_pred_graph_edge (graph, lhsvar, rhsvar);
1269 /* Implicitly, *x = *y */
1270 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1271 FIRST_REF_NODE + rhsvar);
1272 }
1273 else if (lhs.offset != 0 || rhs.offset != 0)
1274 {
1275 if (rhs.offset != 0)
1276 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1277 else if (lhs.offset != 0)
1278 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1279 }
1280 }
1281 }
1282
1283 /* Build the constraint graph, adding successor edges. */
1284
1285 static void
1286 build_succ_graph (void)
1287 {
1288 unsigned i, t;
1289 constraint_t c;
1290
1291 FOR_EACH_VEC_ELT (constraints, i, c)
1292 {
1293 struct constraint_expr lhs;
1294 struct constraint_expr rhs;
1295 unsigned int lhsvar;
1296 unsigned int rhsvar;
1297
1298 if (!c)
1299 continue;
1300
1301 lhs = c->lhs;
1302 rhs = c->rhs;
1303 lhsvar = find (lhs.var);
1304 rhsvar = find (rhs.var);
1305
1306 if (lhs.type == DEREF)
1307 {
1308 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1309 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1310 }
1311 else if (rhs.type == DEREF)
1312 {
1313 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1314 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1315 }
1316 else if (rhs.type == ADDRESSOF)
1317 {
1318 /* x = &y */
1319 gcc_checking_assert (find (rhs.var) == rhs.var);
1320 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1321 }
1322 else if (lhsvar > anything_id
1323 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1324 {
1325 add_graph_edge (graph, lhsvar, rhsvar);
1326 }
1327 }
1328
1329 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1330 receive pointers. */
1331 t = find (storedanything_id);
1332 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1333 {
1334 if (!bitmap_bit_p (graph->direct_nodes, i)
1335 && get_varinfo (i)->may_have_pointers)
1336 add_graph_edge (graph, find (i), t);
1337 }
1338
1339 /* Everything stored to ANYTHING also potentially escapes. */
1340 add_graph_edge (graph, find (escaped_id), t);
1341 }
1342
1343
1344 /* Changed variables on the last iteration. */
1345 static bitmap changed;
1346
1347 /* Strongly Connected Component visitation info. */
1348
1349 struct scc_info
1350 {
1351 sbitmap visited;
1352 sbitmap deleted;
1353 unsigned int *dfs;
1354 unsigned int *node_mapping;
1355 int current_index;
1356 vec<unsigned> scc_stack;
1357 };
1358
1359
1360 /* Recursive routine to find strongly connected components in GRAPH.
1361 SI is the SCC info to store the information in, and N is the id of current
1362 graph node we are processing.
1363
1364 This is Tarjan's strongly connected component finding algorithm, as
1365 modified by Nuutila to keep only non-root nodes on the stack.
1366 The algorithm can be found in "On finding the strongly connected
1367 connected components in a directed graph" by Esko Nuutila and Eljas
1368 Soisalon-Soininen, in Information Processing Letters volume 49,
1369 number 1, pages 9-14. */
1370
1371 static void
1372 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1373 {
1374 unsigned int i;
1375 bitmap_iterator bi;
1376 unsigned int my_dfs;
1377
1378 bitmap_set_bit (si->visited, n);
1379 si->dfs[n] = si->current_index ++;
1380 my_dfs = si->dfs[n];
1381
1382 /* Visit all the successors. */
1383 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1384 {
1385 unsigned int w;
1386
1387 if (i > LAST_REF_NODE)
1388 break;
1389
1390 w = find (i);
1391 if (bitmap_bit_p (si->deleted, w))
1392 continue;
1393
1394 if (!bitmap_bit_p (si->visited, w))
1395 scc_visit (graph, si, w);
1396
1397 unsigned int t = find (w);
1398 gcc_checking_assert (find (n) == n);
1399 if (si->dfs[t] < si->dfs[n])
1400 si->dfs[n] = si->dfs[t];
1401 }
1402
1403 /* See if any components have been identified. */
1404 if (si->dfs[n] == my_dfs)
1405 {
1406 if (si->scc_stack.length () > 0
1407 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1408 {
1409 bitmap scc = BITMAP_ALLOC (NULL);
1410 unsigned int lowest_node;
1411 bitmap_iterator bi;
1412
1413 bitmap_set_bit (scc, n);
1414
1415 while (si->scc_stack.length () != 0
1416 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1417 {
1418 unsigned int w = si->scc_stack.pop ();
1419
1420 bitmap_set_bit (scc, w);
1421 }
1422
1423 lowest_node = bitmap_first_set_bit (scc);
1424 gcc_assert (lowest_node < FIRST_REF_NODE);
1425
1426 /* Collapse the SCC nodes into a single node, and mark the
1427 indirect cycles. */
1428 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1429 {
1430 if (i < FIRST_REF_NODE)
1431 {
1432 if (unite (lowest_node, i))
1433 unify_nodes (graph, lowest_node, i, false);
1434 }
1435 else
1436 {
1437 unite (lowest_node, i);
1438 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1439 }
1440 }
1441 }
1442 bitmap_set_bit (si->deleted, n);
1443 }
1444 else
1445 si->scc_stack.safe_push (n);
1446 }
1447
1448 /* Unify node FROM into node TO, updating the changed count if
1449 necessary when UPDATE_CHANGED is true. */
1450
1451 static void
1452 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1453 bool update_changed)
1454 {
1455 gcc_checking_assert (to != from && find (to) == to);
1456
1457 if (dump_file && (dump_flags & TDF_DETAILS))
1458 fprintf (dump_file, "Unifying %s to %s\n",
1459 get_varinfo (from)->name,
1460 get_varinfo (to)->name);
1461
1462 if (update_changed)
1463 stats.unified_vars_dynamic++;
1464 else
1465 stats.unified_vars_static++;
1466
1467 merge_graph_nodes (graph, to, from);
1468 merge_node_constraints (graph, to, from);
1469
1470 /* Mark TO as changed if FROM was changed. If TO was already marked
1471 as changed, decrease the changed count. */
1472
1473 if (update_changed
1474 && bitmap_clear_bit (changed, from))
1475 bitmap_set_bit (changed, to);
1476 varinfo_t fromvi = get_varinfo (from);
1477 if (fromvi->solution)
1478 {
1479 /* If the solution changes because of the merging, we need to mark
1480 the variable as changed. */
1481 varinfo_t tovi = get_varinfo (to);
1482 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1483 {
1484 if (update_changed)
1485 bitmap_set_bit (changed, to);
1486 }
1487
1488 BITMAP_FREE (fromvi->solution);
1489 if (fromvi->oldsolution)
1490 BITMAP_FREE (fromvi->oldsolution);
1491
1492 if (stats.iterations > 0
1493 && tovi->oldsolution)
1494 BITMAP_FREE (tovi->oldsolution);
1495 }
1496 if (graph->succs[to])
1497 bitmap_clear_bit (graph->succs[to], to);
1498 }
1499
1500 /* Information needed to compute the topological ordering of a graph. */
1501
1502 struct topo_info
1503 {
1504 /* sbitmap of visited nodes. */
1505 sbitmap visited;
1506 /* Array that stores the topological order of the graph, *in
1507 reverse*. */
1508 vec<unsigned> topo_order;
1509 };
1510
1511
1512 /* Initialize and return a topological info structure. */
1513
1514 static struct topo_info *
1515 init_topo_info (void)
1516 {
1517 size_t size = graph->size;
1518 struct topo_info *ti = XNEW (struct topo_info);
1519 ti->visited = sbitmap_alloc (size);
1520 bitmap_clear (ti->visited);
1521 ti->topo_order.create (1);
1522 return ti;
1523 }
1524
1525
1526 /* Free the topological sort info pointed to by TI. */
1527
1528 static void
1529 free_topo_info (struct topo_info *ti)
1530 {
1531 sbitmap_free (ti->visited);
1532 ti->topo_order.release ();
1533 free (ti);
1534 }
1535
1536 /* Visit the graph in topological order, and store the order in the
1537 topo_info structure. */
1538
1539 static void
1540 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1541 unsigned int n)
1542 {
1543 bitmap_iterator bi;
1544 unsigned int j;
1545
1546 bitmap_set_bit (ti->visited, n);
1547
1548 if (graph->succs[n])
1549 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1550 {
1551 if (!bitmap_bit_p (ti->visited, j))
1552 topo_visit (graph, ti, j);
1553 }
1554
1555 ti->topo_order.safe_push (n);
1556 }
1557
1558 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1559 starting solution for y. */
1560
1561 static void
1562 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1563 bitmap delta)
1564 {
1565 unsigned int lhs = c->lhs.var;
1566 bool flag = false;
1567 bitmap sol = get_varinfo (lhs)->solution;
1568 unsigned int j;
1569 bitmap_iterator bi;
1570 HOST_WIDE_INT roffset = c->rhs.offset;
1571
1572 /* Our IL does not allow this. */
1573 gcc_checking_assert (c->lhs.offset == 0);
1574
1575 /* If the solution of Y contains anything it is good enough to transfer
1576 this to the LHS. */
1577 if (bitmap_bit_p (delta, anything_id))
1578 {
1579 flag |= bitmap_set_bit (sol, anything_id);
1580 goto done;
1581 }
1582
1583 /* If we do not know at with offset the rhs is dereferenced compute
1584 the reachability set of DELTA, conservatively assuming it is
1585 dereferenced at all valid offsets. */
1586 if (roffset == UNKNOWN_OFFSET)
1587 {
1588 solution_set_expand (delta);
1589 /* No further offset processing is necessary. */
1590 roffset = 0;
1591 }
1592
1593 /* For each variable j in delta (Sol(y)), add
1594 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1595 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1596 {
1597 varinfo_t v = get_varinfo (j);
1598 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1599 unsigned int t;
1600
1601 if (v->is_full_var)
1602 fieldoffset = v->offset;
1603 else if (roffset != 0)
1604 v = first_vi_for_offset (v, fieldoffset);
1605 /* If the access is outside of the variable we can ignore it. */
1606 if (!v)
1607 continue;
1608
1609 do
1610 {
1611 t = find (v->id);
1612
1613 /* Adding edges from the special vars is pointless.
1614 They don't have sets that can change. */
1615 if (get_varinfo (t)->is_special_var)
1616 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1617 /* Merging the solution from ESCAPED needlessly increases
1618 the set. Use ESCAPED as representative instead. */
1619 else if (v->id == escaped_id)
1620 flag |= bitmap_set_bit (sol, escaped_id);
1621 else if (v->may_have_pointers
1622 && add_graph_edge (graph, lhs, t))
1623 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1624
1625 /* If the variable is not exactly at the requested offset
1626 we have to include the next one. */
1627 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1628 || v->next == 0)
1629 break;
1630
1631 v = vi_next (v);
1632 fieldoffset = v->offset;
1633 }
1634 while (1);
1635 }
1636
1637 done:
1638 /* If the LHS solution changed, mark the var as changed. */
1639 if (flag)
1640 {
1641 get_varinfo (lhs)->solution = sol;
1642 bitmap_set_bit (changed, lhs);
1643 }
1644 }
1645
1646 /* Process a constraint C that represents *(x + off) = y using DELTA
1647 as the starting solution for x. */
1648
1649 static void
1650 do_ds_constraint (constraint_t c, bitmap delta)
1651 {
1652 unsigned int rhs = c->rhs.var;
1653 bitmap sol = get_varinfo (rhs)->solution;
1654 unsigned int j;
1655 bitmap_iterator bi;
1656 HOST_WIDE_INT loff = c->lhs.offset;
1657 bool escaped_p = false;
1658
1659 /* Our IL does not allow this. */
1660 gcc_checking_assert (c->rhs.offset == 0);
1661
1662 /* If the solution of y contains ANYTHING simply use the ANYTHING
1663 solution. This avoids needlessly increasing the points-to sets. */
1664 if (bitmap_bit_p (sol, anything_id))
1665 sol = get_varinfo (find (anything_id))->solution;
1666
1667 /* If the solution for x contains ANYTHING we have to merge the
1668 solution of y into all pointer variables which we do via
1669 STOREDANYTHING. */
1670 if (bitmap_bit_p (delta, anything_id))
1671 {
1672 unsigned t = find (storedanything_id);
1673 if (add_graph_edge (graph, t, rhs))
1674 {
1675 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1676 bitmap_set_bit (changed, t);
1677 }
1678 return;
1679 }
1680
1681 /* If we do not know at with offset the rhs is dereferenced compute
1682 the reachability set of DELTA, conservatively assuming it is
1683 dereferenced at all valid offsets. */
1684 if (loff == UNKNOWN_OFFSET)
1685 {
1686 solution_set_expand (delta);
1687 loff = 0;
1688 }
1689
1690 /* For each member j of delta (Sol(x)), add an edge from y to j and
1691 union Sol(y) into Sol(j) */
1692 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1693 {
1694 varinfo_t v = get_varinfo (j);
1695 unsigned int t;
1696 HOST_WIDE_INT fieldoffset = v->offset + loff;
1697
1698 if (v->is_full_var)
1699 fieldoffset = v->offset;
1700 else if (loff != 0)
1701 v = first_vi_for_offset (v, fieldoffset);
1702 /* If the access is outside of the variable we can ignore it. */
1703 if (!v)
1704 continue;
1705
1706 do
1707 {
1708 if (v->may_have_pointers)
1709 {
1710 /* If v is a global variable then this is an escape point. */
1711 if (v->is_global_var
1712 && !escaped_p)
1713 {
1714 t = find (escaped_id);
1715 if (add_graph_edge (graph, t, rhs)
1716 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1717 bitmap_set_bit (changed, t);
1718 /* Enough to let rhs escape once. */
1719 escaped_p = true;
1720 }
1721
1722 if (v->is_special_var)
1723 break;
1724
1725 t = find (v->id);
1726 if (add_graph_edge (graph, t, rhs)
1727 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1728 bitmap_set_bit (changed, t);
1729 }
1730
1731 /* If the variable is not exactly at the requested offset
1732 we have to include the next one. */
1733 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1734 || v->next == 0)
1735 break;
1736
1737 v = vi_next (v);
1738 fieldoffset = v->offset;
1739 }
1740 while (1);
1741 }
1742 }
1743
1744 /* Handle a non-simple (simple meaning requires no iteration),
1745 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1746
1747 static void
1748 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1749 {
1750 if (c->lhs.type == DEREF)
1751 {
1752 if (c->rhs.type == ADDRESSOF)
1753 {
1754 gcc_unreachable ();
1755 }
1756 else
1757 {
1758 /* *x = y */
1759 do_ds_constraint (c, delta);
1760 }
1761 }
1762 else if (c->rhs.type == DEREF)
1763 {
1764 /* x = *y */
1765 if (!(get_varinfo (c->lhs.var)->is_special_var))
1766 do_sd_constraint (graph, c, delta);
1767 }
1768 else
1769 {
1770 bitmap tmp;
1771 bitmap solution;
1772 bool flag = false;
1773
1774 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1775 solution = get_varinfo (c->rhs.var)->solution;
1776 tmp = get_varinfo (c->lhs.var)->solution;
1777
1778 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1779
1780 if (flag)
1781 bitmap_set_bit (changed, c->lhs.var);
1782 }
1783 }
1784
1785 /* Initialize and return a new SCC info structure. */
1786
1787 static struct scc_info *
1788 init_scc_info (size_t size)
1789 {
1790 struct scc_info *si = XNEW (struct scc_info);
1791 size_t i;
1792
1793 si->current_index = 0;
1794 si->visited = sbitmap_alloc (size);
1795 bitmap_clear (si->visited);
1796 si->deleted = sbitmap_alloc (size);
1797 bitmap_clear (si->deleted);
1798 si->node_mapping = XNEWVEC (unsigned int, size);
1799 si->dfs = XCNEWVEC (unsigned int, size);
1800
1801 for (i = 0; i < size; i++)
1802 si->node_mapping[i] = i;
1803
1804 si->scc_stack.create (1);
1805 return si;
1806 }
1807
1808 /* Free an SCC info structure pointed to by SI */
1809
1810 static void
1811 free_scc_info (struct scc_info *si)
1812 {
1813 sbitmap_free (si->visited);
1814 sbitmap_free (si->deleted);
1815 free (si->node_mapping);
1816 free (si->dfs);
1817 si->scc_stack.release ();
1818 free (si);
1819 }
1820
1821
1822 /* Find indirect cycles in GRAPH that occur, using strongly connected
1823 components, and note them in the indirect cycles map.
1824
1825 This technique comes from Ben Hardekopf and Calvin Lin,
1826 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1827 Lines of Code", submitted to PLDI 2007. */
1828
1829 static void
1830 find_indirect_cycles (constraint_graph_t graph)
1831 {
1832 unsigned int i;
1833 unsigned int size = graph->size;
1834 struct scc_info *si = init_scc_info (size);
1835
1836 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1837 if (!bitmap_bit_p (si->visited, i) && find (i) == i)
1838 scc_visit (graph, si, i);
1839
1840 free_scc_info (si);
1841 }
1842
1843 /* Compute a topological ordering for GRAPH, and store the result in the
1844 topo_info structure TI. */
1845
1846 static void
1847 compute_topo_order (constraint_graph_t graph,
1848 struct topo_info *ti)
1849 {
1850 unsigned int i;
1851 unsigned int size = graph->size;
1852
1853 for (i = 0; i != size; ++i)
1854 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1855 topo_visit (graph, ti, i);
1856 }
1857
1858 /* Structure used to for hash value numbering of pointer equivalence
1859 classes. */
1860
1861 typedef struct equiv_class_label
1862 {
1863 hashval_t hashcode;
1864 unsigned int equivalence_class;
1865 bitmap labels;
1866 } *equiv_class_label_t;
1867 typedef const struct equiv_class_label *const_equiv_class_label_t;
1868
1869 /* Equiv_class_label hashtable helpers. */
1870
1871 struct equiv_class_hasher : typed_free_remove <equiv_class_label>
1872 {
1873 typedef equiv_class_label value_type;
1874 typedef equiv_class_label compare_type;
1875 static inline hashval_t hash (const value_type *);
1876 static inline bool equal (const value_type *, const compare_type *);
1877 };
1878
1879 /* Hash function for a equiv_class_label_t */
1880
1881 inline hashval_t
1882 equiv_class_hasher::hash (const value_type *ecl)
1883 {
1884 return ecl->hashcode;
1885 }
1886
1887 /* Equality function for two equiv_class_label_t's. */
1888
1889 inline bool
1890 equiv_class_hasher::equal (const value_type *eql1, const compare_type *eql2)
1891 {
1892 return (eql1->hashcode == eql2->hashcode
1893 && bitmap_equal_p (eql1->labels, eql2->labels));
1894 }
1895
1896 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1897 classes. */
1898 static hash_table <equiv_class_hasher> pointer_equiv_class_table;
1899
1900 /* A hashtable for mapping a bitmap of labels->location equivalence
1901 classes. */
1902 static hash_table <equiv_class_hasher> location_equiv_class_table;
1903
1904 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1905 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1906 is equivalent to. */
1907
1908 static equiv_class_label *
1909 equiv_class_lookup_or_add (hash_table <equiv_class_hasher> table, bitmap labels)
1910 {
1911 equiv_class_label **slot;
1912 equiv_class_label ecl;
1913
1914 ecl.labels = labels;
1915 ecl.hashcode = bitmap_hash (labels);
1916 slot = table.find_slot_with_hash (&ecl, ecl.hashcode, INSERT);
1917 if (!*slot)
1918 {
1919 *slot = XNEW (struct equiv_class_label);
1920 (*slot)->labels = labels;
1921 (*slot)->hashcode = ecl.hashcode;
1922 (*slot)->equivalence_class = 0;
1923 }
1924
1925 return *slot;
1926 }
1927
1928 /* Perform offline variable substitution.
1929
1930 This is a worst case quadratic time way of identifying variables
1931 that must have equivalent points-to sets, including those caused by
1932 static cycles, and single entry subgraphs, in the constraint graph.
1933
1934 The technique is described in "Exploiting Pointer and Location
1935 Equivalence to Optimize Pointer Analysis. In the 14th International
1936 Static Analysis Symposium (SAS), August 2007." It is known as the
1937 "HU" algorithm, and is equivalent to value numbering the collapsed
1938 constraint graph including evaluating unions.
1939
1940 The general method of finding equivalence classes is as follows:
1941 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1942 Initialize all non-REF nodes to be direct nodes.
1943 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1944 variable}
1945 For each constraint containing the dereference, we also do the same
1946 thing.
1947
1948 We then compute SCC's in the graph and unify nodes in the same SCC,
1949 including pts sets.
1950
1951 For each non-collapsed node x:
1952 Visit all unvisited explicit incoming edges.
1953 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1954 where y->x.
1955 Lookup the equivalence class for pts(x).
1956 If we found one, equivalence_class(x) = found class.
1957 Otherwise, equivalence_class(x) = new class, and new_class is
1958 added to the lookup table.
1959
1960 All direct nodes with the same equivalence class can be replaced
1961 with a single representative node.
1962 All unlabeled nodes (label == 0) are not pointers and all edges
1963 involving them can be eliminated.
1964 We perform these optimizations during rewrite_constraints
1965
1966 In addition to pointer equivalence class finding, we also perform
1967 location equivalence class finding. This is the set of variables
1968 that always appear together in points-to sets. We use this to
1969 compress the size of the points-to sets. */
1970
1971 /* Current maximum pointer equivalence class id. */
1972 static int pointer_equiv_class;
1973
1974 /* Current maximum location equivalence class id. */
1975 static int location_equiv_class;
1976
1977 /* Recursive routine to find strongly connected components in GRAPH,
1978 and label it's nodes with DFS numbers. */
1979
1980 static void
1981 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1982 {
1983 unsigned int i;
1984 bitmap_iterator bi;
1985 unsigned int my_dfs;
1986
1987 gcc_checking_assert (si->node_mapping[n] == n);
1988 bitmap_set_bit (si->visited, n);
1989 si->dfs[n] = si->current_index ++;
1990 my_dfs = si->dfs[n];
1991
1992 /* Visit all the successors. */
1993 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1994 {
1995 unsigned int w = si->node_mapping[i];
1996
1997 if (bitmap_bit_p (si->deleted, w))
1998 continue;
1999
2000 if (!bitmap_bit_p (si->visited, w))
2001 condense_visit (graph, si, w);
2002
2003 unsigned int t = si->node_mapping[w];
2004 gcc_checking_assert (si->node_mapping[n] == n);
2005 if (si->dfs[t] < si->dfs[n])
2006 si->dfs[n] = si->dfs[t];
2007 }
2008
2009 /* Visit all the implicit predecessors. */
2010 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2011 {
2012 unsigned int w = si->node_mapping[i];
2013
2014 if (bitmap_bit_p (si->deleted, w))
2015 continue;
2016
2017 if (!bitmap_bit_p (si->visited, w))
2018 condense_visit (graph, si, w);
2019
2020 unsigned int t = si->node_mapping[w];
2021 gcc_assert (si->node_mapping[n] == n);
2022 if (si->dfs[t] < si->dfs[n])
2023 si->dfs[n] = si->dfs[t];
2024 }
2025
2026 /* See if any components have been identified. */
2027 if (si->dfs[n] == my_dfs)
2028 {
2029 while (si->scc_stack.length () != 0
2030 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2031 {
2032 unsigned int w = si->scc_stack.pop ();
2033 si->node_mapping[w] = n;
2034
2035 if (!bitmap_bit_p (graph->direct_nodes, w))
2036 bitmap_clear_bit (graph->direct_nodes, n);
2037
2038 /* Unify our nodes. */
2039 if (graph->preds[w])
2040 {
2041 if (!graph->preds[n])
2042 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2043 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2044 }
2045 if (graph->implicit_preds[w])
2046 {
2047 if (!graph->implicit_preds[n])
2048 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2049 bitmap_ior_into (graph->implicit_preds[n],
2050 graph->implicit_preds[w]);
2051 }
2052 if (graph->points_to[w])
2053 {
2054 if (!graph->points_to[n])
2055 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2056 bitmap_ior_into (graph->points_to[n],
2057 graph->points_to[w]);
2058 }
2059 }
2060 bitmap_set_bit (si->deleted, n);
2061 }
2062 else
2063 si->scc_stack.safe_push (n);
2064 }
2065
2066 /* Label pointer equivalences.
2067
2068 This performs a value numbering of the constraint graph to
2069 discover which variables will always have the same points-to sets
2070 under the current set of constraints.
2071
2072 The way it value numbers is to store the set of points-to bits
2073 generated by the constraints and graph edges. This is just used as a
2074 hash and equality comparison. The *actual set of points-to bits* is
2075 completely irrelevant, in that we don't care about being able to
2076 extract them later.
2077
2078 The equality values (currently bitmaps) just have to satisfy a few
2079 constraints, the main ones being:
2080 1. The combining operation must be order independent.
2081 2. The end result of a given set of operations must be unique iff the
2082 combination of input values is unique
2083 3. Hashable. */
2084
2085 static void
2086 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2087 {
2088 unsigned int i, first_pred;
2089 bitmap_iterator bi;
2090
2091 bitmap_set_bit (si->visited, n);
2092
2093 /* Label and union our incoming edges's points to sets. */
2094 first_pred = -1U;
2095 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2096 {
2097 unsigned int w = si->node_mapping[i];
2098 if (!bitmap_bit_p (si->visited, w))
2099 label_visit (graph, si, w);
2100
2101 /* Skip unused edges */
2102 if (w == n || graph->pointer_label[w] == 0)
2103 continue;
2104
2105 if (graph->points_to[w])
2106 {
2107 if (!graph->points_to[n])
2108 {
2109 if (first_pred == -1U)
2110 first_pred = w;
2111 else
2112 {
2113 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2114 bitmap_ior (graph->points_to[n],
2115 graph->points_to[first_pred],
2116 graph->points_to[w]);
2117 }
2118 }
2119 else
2120 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2121 }
2122 }
2123
2124 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2125 if (!bitmap_bit_p (graph->direct_nodes, n))
2126 {
2127 if (!graph->points_to[n])
2128 {
2129 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2130 if (first_pred != -1U)
2131 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2132 }
2133 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2134 graph->pointer_label[n] = pointer_equiv_class++;
2135 equiv_class_label_t ecl;
2136 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2137 graph->points_to[n]);
2138 ecl->equivalence_class = graph->pointer_label[n];
2139 return;
2140 }
2141
2142 /* If there was only a single non-empty predecessor the pointer equiv
2143 class is the same. */
2144 if (!graph->points_to[n])
2145 {
2146 if (first_pred != -1U)
2147 {
2148 graph->pointer_label[n] = graph->pointer_label[first_pred];
2149 graph->points_to[n] = graph->points_to[first_pred];
2150 }
2151 return;
2152 }
2153
2154 if (!bitmap_empty_p (graph->points_to[n]))
2155 {
2156 equiv_class_label_t ecl;
2157 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2158 graph->points_to[n]);
2159 if (ecl->equivalence_class == 0)
2160 ecl->equivalence_class = pointer_equiv_class++;
2161 else
2162 {
2163 BITMAP_FREE (graph->points_to[n]);
2164 graph->points_to[n] = ecl->labels;
2165 }
2166 graph->pointer_label[n] = ecl->equivalence_class;
2167 }
2168 }
2169
2170 /* Print the pred graph in dot format. */
2171
2172 static void
2173 dump_pred_graph (struct scc_info *si, FILE *file)
2174 {
2175 unsigned int i;
2176
2177 /* Only print the graph if it has already been initialized: */
2178 if (!graph)
2179 return;
2180
2181 /* Prints the header of the dot file: */
2182 fprintf (file, "strict digraph {\n");
2183 fprintf (file, " node [\n shape = box\n ]\n");
2184 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2185 fprintf (file, "\n // List of nodes and complex constraints in "
2186 "the constraint graph:\n");
2187
2188 /* The next lines print the nodes in the graph together with the
2189 complex constraints attached to them. */
2190 for (i = 1; i < graph->size; i++)
2191 {
2192 if (i == FIRST_REF_NODE)
2193 continue;
2194 if (si->node_mapping[i] != i)
2195 continue;
2196 if (i < FIRST_REF_NODE)
2197 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2198 else
2199 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2200 if (graph->points_to[i]
2201 && !bitmap_empty_p (graph->points_to[i]))
2202 {
2203 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2204 unsigned j;
2205 bitmap_iterator bi;
2206 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2207 fprintf (file, " %d", j);
2208 fprintf (file, " }\"]");
2209 }
2210 fprintf (file, ";\n");
2211 }
2212
2213 /* Go over the edges. */
2214 fprintf (file, "\n // Edges in the constraint graph:\n");
2215 for (i = 1; i < graph->size; i++)
2216 {
2217 unsigned j;
2218 bitmap_iterator bi;
2219 if (si->node_mapping[i] != i)
2220 continue;
2221 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2222 {
2223 unsigned from = si->node_mapping[j];
2224 if (from < FIRST_REF_NODE)
2225 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2226 else
2227 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2228 fprintf (file, " -> ");
2229 if (i < FIRST_REF_NODE)
2230 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2231 else
2232 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2233 fprintf (file, ";\n");
2234 }
2235 }
2236
2237 /* Prints the tail of the dot file. */
2238 fprintf (file, "}\n");
2239 }
2240
2241 /* Perform offline variable substitution, discovering equivalence
2242 classes, and eliminating non-pointer variables. */
2243
2244 static struct scc_info *
2245 perform_var_substitution (constraint_graph_t graph)
2246 {
2247 unsigned int i;
2248 unsigned int size = graph->size;
2249 struct scc_info *si = init_scc_info (size);
2250
2251 bitmap_obstack_initialize (&iteration_obstack);
2252 pointer_equiv_class_table.create (511);
2253 location_equiv_class_table.create (511);
2254 pointer_equiv_class = 1;
2255 location_equiv_class = 1;
2256
2257 /* Condense the nodes, which means to find SCC's, count incoming
2258 predecessors, and unite nodes in SCC's. */
2259 for (i = 1; i < FIRST_REF_NODE; i++)
2260 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2261 condense_visit (graph, si, si->node_mapping[i]);
2262
2263 if (dump_file && (dump_flags & TDF_GRAPH))
2264 {
2265 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2266 "in dot format:\n");
2267 dump_pred_graph (si, dump_file);
2268 fprintf (dump_file, "\n\n");
2269 }
2270
2271 bitmap_clear (si->visited);
2272 /* Actually the label the nodes for pointer equivalences */
2273 for (i = 1; i < FIRST_REF_NODE; i++)
2274 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2275 label_visit (graph, si, si->node_mapping[i]);
2276
2277 /* Calculate location equivalence labels. */
2278 for (i = 1; i < FIRST_REF_NODE; i++)
2279 {
2280 bitmap pointed_by;
2281 bitmap_iterator bi;
2282 unsigned int j;
2283
2284 if (!graph->pointed_by[i])
2285 continue;
2286 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2287
2288 /* Translate the pointed-by mapping for pointer equivalence
2289 labels. */
2290 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2291 {
2292 bitmap_set_bit (pointed_by,
2293 graph->pointer_label[si->node_mapping[j]]);
2294 }
2295 /* The original pointed_by is now dead. */
2296 BITMAP_FREE (graph->pointed_by[i]);
2297
2298 /* Look up the location equivalence label if one exists, or make
2299 one otherwise. */
2300 equiv_class_label_t ecl;
2301 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2302 if (ecl->equivalence_class == 0)
2303 ecl->equivalence_class = location_equiv_class++;
2304 else
2305 {
2306 if (dump_file && (dump_flags & TDF_DETAILS))
2307 fprintf (dump_file, "Found location equivalence for node %s\n",
2308 get_varinfo (i)->name);
2309 BITMAP_FREE (pointed_by);
2310 }
2311 graph->loc_label[i] = ecl->equivalence_class;
2312
2313 }
2314
2315 if (dump_file && (dump_flags & TDF_DETAILS))
2316 for (i = 1; i < FIRST_REF_NODE; i++)
2317 {
2318 unsigned j = si->node_mapping[i];
2319 if (j != i)
2320 {
2321 fprintf (dump_file, "%s node id %d ",
2322 bitmap_bit_p (graph->direct_nodes, i)
2323 ? "Direct" : "Indirect", i);
2324 if (i < FIRST_REF_NODE)
2325 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2326 else
2327 fprintf (dump_file, "\"*%s\"",
2328 get_varinfo (i - FIRST_REF_NODE)->name);
2329 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2330 if (j < FIRST_REF_NODE)
2331 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2332 else
2333 fprintf (dump_file, "\"*%s\"\n",
2334 get_varinfo (j - FIRST_REF_NODE)->name);
2335 }
2336 else
2337 {
2338 fprintf (dump_file,
2339 "Equivalence classes for %s node id %d ",
2340 bitmap_bit_p (graph->direct_nodes, i)
2341 ? "direct" : "indirect", i);
2342 if (i < FIRST_REF_NODE)
2343 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2344 else
2345 fprintf (dump_file, "\"*%s\"",
2346 get_varinfo (i - FIRST_REF_NODE)->name);
2347 fprintf (dump_file,
2348 ": pointer %d, location %d\n",
2349 graph->pointer_label[i], graph->loc_label[i]);
2350 }
2351 }
2352
2353 /* Quickly eliminate our non-pointer variables. */
2354
2355 for (i = 1; i < FIRST_REF_NODE; i++)
2356 {
2357 unsigned int node = si->node_mapping[i];
2358
2359 if (graph->pointer_label[node] == 0)
2360 {
2361 if (dump_file && (dump_flags & TDF_DETAILS))
2362 fprintf (dump_file,
2363 "%s is a non-pointer variable, eliminating edges.\n",
2364 get_varinfo (node)->name);
2365 stats.nonpointer_vars++;
2366 clear_edges_for_node (graph, node);
2367 }
2368 }
2369
2370 return si;
2371 }
2372
2373 /* Free information that was only necessary for variable
2374 substitution. */
2375
2376 static void
2377 free_var_substitution_info (struct scc_info *si)
2378 {
2379 free_scc_info (si);
2380 free (graph->pointer_label);
2381 free (graph->loc_label);
2382 free (graph->pointed_by);
2383 free (graph->points_to);
2384 free (graph->eq_rep);
2385 sbitmap_free (graph->direct_nodes);
2386 pointer_equiv_class_table.dispose ();
2387 location_equiv_class_table.dispose ();
2388 bitmap_obstack_release (&iteration_obstack);
2389 }
2390
2391 /* Return an existing node that is equivalent to NODE, which has
2392 equivalence class LABEL, if one exists. Return NODE otherwise. */
2393
2394 static unsigned int
2395 find_equivalent_node (constraint_graph_t graph,
2396 unsigned int node, unsigned int label)
2397 {
2398 /* If the address version of this variable is unused, we can
2399 substitute it for anything else with the same label.
2400 Otherwise, we know the pointers are equivalent, but not the
2401 locations, and we can unite them later. */
2402
2403 if (!bitmap_bit_p (graph->address_taken, node))
2404 {
2405 gcc_checking_assert (label < graph->size);
2406
2407 if (graph->eq_rep[label] != -1)
2408 {
2409 /* Unify the two variables since we know they are equivalent. */
2410 if (unite (graph->eq_rep[label], node))
2411 unify_nodes (graph, graph->eq_rep[label], node, false);
2412 return graph->eq_rep[label];
2413 }
2414 else
2415 {
2416 graph->eq_rep[label] = node;
2417 graph->pe_rep[label] = node;
2418 }
2419 }
2420 else
2421 {
2422 gcc_checking_assert (label < graph->size);
2423 graph->pe[node] = label;
2424 if (graph->pe_rep[label] == -1)
2425 graph->pe_rep[label] = node;
2426 }
2427
2428 return node;
2429 }
2430
2431 /* Unite pointer equivalent but not location equivalent nodes in
2432 GRAPH. This may only be performed once variable substitution is
2433 finished. */
2434
2435 static void
2436 unite_pointer_equivalences (constraint_graph_t graph)
2437 {
2438 unsigned int i;
2439
2440 /* Go through the pointer equivalences and unite them to their
2441 representative, if they aren't already. */
2442 for (i = 1; i < FIRST_REF_NODE; i++)
2443 {
2444 unsigned int label = graph->pe[i];
2445 if (label)
2446 {
2447 int label_rep = graph->pe_rep[label];
2448
2449 if (label_rep == -1)
2450 continue;
2451
2452 label_rep = find (label_rep);
2453 if (label_rep >= 0 && unite (label_rep, find (i)))
2454 unify_nodes (graph, label_rep, i, false);
2455 }
2456 }
2457 }
2458
2459 /* Move complex constraints to the GRAPH nodes they belong to. */
2460
2461 static void
2462 move_complex_constraints (constraint_graph_t graph)
2463 {
2464 int i;
2465 constraint_t c;
2466
2467 FOR_EACH_VEC_ELT (constraints, i, c)
2468 {
2469 if (c)
2470 {
2471 struct constraint_expr lhs = c->lhs;
2472 struct constraint_expr rhs = c->rhs;
2473
2474 if (lhs.type == DEREF)
2475 {
2476 insert_into_complex (graph, lhs.var, c);
2477 }
2478 else if (rhs.type == DEREF)
2479 {
2480 if (!(get_varinfo (lhs.var)->is_special_var))
2481 insert_into_complex (graph, rhs.var, c);
2482 }
2483 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2484 && (lhs.offset != 0 || rhs.offset != 0))
2485 {
2486 insert_into_complex (graph, rhs.var, c);
2487 }
2488 }
2489 }
2490 }
2491
2492
2493 /* Optimize and rewrite complex constraints while performing
2494 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2495 result of perform_variable_substitution. */
2496
2497 static void
2498 rewrite_constraints (constraint_graph_t graph,
2499 struct scc_info *si)
2500 {
2501 int i;
2502 constraint_t c;
2503
2504 #ifdef ENABLE_CHECKING
2505 for (unsigned int j = 0; j < graph->size; j++)
2506 gcc_assert (find (j) == j);
2507 #endif
2508
2509 FOR_EACH_VEC_ELT (constraints, i, c)
2510 {
2511 struct constraint_expr lhs = c->lhs;
2512 struct constraint_expr rhs = c->rhs;
2513 unsigned int lhsvar = find (lhs.var);
2514 unsigned int rhsvar = find (rhs.var);
2515 unsigned int lhsnode, rhsnode;
2516 unsigned int lhslabel, rhslabel;
2517
2518 lhsnode = si->node_mapping[lhsvar];
2519 rhsnode = si->node_mapping[rhsvar];
2520 lhslabel = graph->pointer_label[lhsnode];
2521 rhslabel = graph->pointer_label[rhsnode];
2522
2523 /* See if it is really a non-pointer variable, and if so, ignore
2524 the constraint. */
2525 if (lhslabel == 0)
2526 {
2527 if (dump_file && (dump_flags & TDF_DETAILS))
2528 {
2529
2530 fprintf (dump_file, "%s is a non-pointer variable,"
2531 "ignoring constraint:",
2532 get_varinfo (lhs.var)->name);
2533 dump_constraint (dump_file, c);
2534 fprintf (dump_file, "\n");
2535 }
2536 constraints[i] = NULL;
2537 continue;
2538 }
2539
2540 if (rhslabel == 0)
2541 {
2542 if (dump_file && (dump_flags & TDF_DETAILS))
2543 {
2544
2545 fprintf (dump_file, "%s is a non-pointer variable,"
2546 "ignoring constraint:",
2547 get_varinfo (rhs.var)->name);
2548 dump_constraint (dump_file, c);
2549 fprintf (dump_file, "\n");
2550 }
2551 constraints[i] = NULL;
2552 continue;
2553 }
2554
2555 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2556 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2557 c->lhs.var = lhsvar;
2558 c->rhs.var = rhsvar;
2559 }
2560 }
2561
2562 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2563 part of an SCC, false otherwise. */
2564
2565 static bool
2566 eliminate_indirect_cycles (unsigned int node)
2567 {
2568 if (graph->indirect_cycles[node] != -1
2569 && !bitmap_empty_p (get_varinfo (node)->solution))
2570 {
2571 unsigned int i;
2572 vec<unsigned> queue = vNULL;
2573 int queuepos;
2574 unsigned int to = find (graph->indirect_cycles[node]);
2575 bitmap_iterator bi;
2576
2577 /* We can't touch the solution set and call unify_nodes
2578 at the same time, because unify_nodes is going to do
2579 bitmap unions into it. */
2580
2581 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2582 {
2583 if (find (i) == i && i != to)
2584 {
2585 if (unite (to, i))
2586 queue.safe_push (i);
2587 }
2588 }
2589
2590 for (queuepos = 0;
2591 queue.iterate (queuepos, &i);
2592 queuepos++)
2593 {
2594 unify_nodes (graph, to, i, true);
2595 }
2596 queue.release ();
2597 return true;
2598 }
2599 return false;
2600 }
2601
2602 /* Solve the constraint graph GRAPH using our worklist solver.
2603 This is based on the PW* family of solvers from the "Efficient Field
2604 Sensitive Pointer Analysis for C" paper.
2605 It works by iterating over all the graph nodes, processing the complex
2606 constraints and propagating the copy constraints, until everything stops
2607 changed. This corresponds to steps 6-8 in the solving list given above. */
2608
2609 static void
2610 solve_graph (constraint_graph_t graph)
2611 {
2612 unsigned int size = graph->size;
2613 unsigned int i;
2614 bitmap pts;
2615
2616 changed = BITMAP_ALLOC (NULL);
2617
2618 /* Mark all initial non-collapsed nodes as changed. */
2619 for (i = 1; i < size; i++)
2620 {
2621 varinfo_t ivi = get_varinfo (i);
2622 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2623 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2624 || graph->complex[i].length () > 0))
2625 bitmap_set_bit (changed, i);
2626 }
2627
2628 /* Allocate a bitmap to be used to store the changed bits. */
2629 pts = BITMAP_ALLOC (&pta_obstack);
2630
2631 while (!bitmap_empty_p (changed))
2632 {
2633 unsigned int i;
2634 struct topo_info *ti = init_topo_info ();
2635 stats.iterations++;
2636
2637 bitmap_obstack_initialize (&iteration_obstack);
2638
2639 compute_topo_order (graph, ti);
2640
2641 while (ti->topo_order.length () != 0)
2642 {
2643
2644 i = ti->topo_order.pop ();
2645
2646 /* If this variable is not a representative, skip it. */
2647 if (find (i) != i)
2648 continue;
2649
2650 /* In certain indirect cycle cases, we may merge this
2651 variable to another. */
2652 if (eliminate_indirect_cycles (i) && find (i) != i)
2653 continue;
2654
2655 /* If the node has changed, we need to process the
2656 complex constraints and outgoing edges again. */
2657 if (bitmap_clear_bit (changed, i))
2658 {
2659 unsigned int j;
2660 constraint_t c;
2661 bitmap solution;
2662 vec<constraint_t> complex = graph->complex[i];
2663 varinfo_t vi = get_varinfo (i);
2664 bool solution_empty;
2665
2666 /* Compute the changed set of solution bits. If anything
2667 is in the solution just propagate that. */
2668 if (bitmap_bit_p (vi->solution, anything_id))
2669 {
2670 /* If anything is also in the old solution there is
2671 nothing to do.
2672 ??? But we shouldn't ended up with "changed" set ... */
2673 if (vi->oldsolution
2674 && bitmap_bit_p (vi->oldsolution, anything_id))
2675 continue;
2676 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2677 }
2678 else if (vi->oldsolution)
2679 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2680 else
2681 bitmap_copy (pts, vi->solution);
2682
2683 if (bitmap_empty_p (pts))
2684 continue;
2685
2686 if (vi->oldsolution)
2687 bitmap_ior_into (vi->oldsolution, pts);
2688 else
2689 {
2690 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2691 bitmap_copy (vi->oldsolution, pts);
2692 }
2693
2694 solution = vi->solution;
2695 solution_empty = bitmap_empty_p (solution);
2696
2697 /* Process the complex constraints */
2698 FOR_EACH_VEC_ELT (complex, j, c)
2699 {
2700 /* XXX: This is going to unsort the constraints in
2701 some cases, which will occasionally add duplicate
2702 constraints during unification. This does not
2703 affect correctness. */
2704 c->lhs.var = find (c->lhs.var);
2705 c->rhs.var = find (c->rhs.var);
2706
2707 /* The only complex constraint that can change our
2708 solution to non-empty, given an empty solution,
2709 is a constraint where the lhs side is receiving
2710 some set from elsewhere. */
2711 if (!solution_empty || c->lhs.type != DEREF)
2712 do_complex_constraint (graph, c, pts);
2713 }
2714
2715 solution_empty = bitmap_empty_p (solution);
2716
2717 if (!solution_empty)
2718 {
2719 bitmap_iterator bi;
2720 unsigned eff_escaped_id = find (escaped_id);
2721
2722 /* Propagate solution to all successors. */
2723 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2724 0, j, bi)
2725 {
2726 bitmap tmp;
2727 bool flag;
2728
2729 unsigned int to = find (j);
2730 tmp = get_varinfo (to)->solution;
2731 flag = false;
2732
2733 /* Don't try to propagate to ourselves. */
2734 if (to == i)
2735 continue;
2736
2737 /* If we propagate from ESCAPED use ESCAPED as
2738 placeholder. */
2739 if (i == eff_escaped_id)
2740 flag = bitmap_set_bit (tmp, escaped_id);
2741 else
2742 flag = bitmap_ior_into (tmp, pts);
2743
2744 if (flag)
2745 bitmap_set_bit (changed, to);
2746 }
2747 }
2748 }
2749 }
2750 free_topo_info (ti);
2751 bitmap_obstack_release (&iteration_obstack);
2752 }
2753
2754 BITMAP_FREE (pts);
2755 BITMAP_FREE (changed);
2756 bitmap_obstack_release (&oldpta_obstack);
2757 }
2758
2759 /* Map from trees to variable infos. */
2760 static struct pointer_map_t *vi_for_tree;
2761
2762
2763 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2764
2765 static void
2766 insert_vi_for_tree (tree t, varinfo_t vi)
2767 {
2768 void **slot = pointer_map_insert (vi_for_tree, t);
2769 gcc_assert (vi);
2770 gcc_assert (*slot == NULL);
2771 *slot = vi;
2772 }
2773
2774 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2775 exist in the map, return NULL, otherwise, return the varinfo we found. */
2776
2777 static varinfo_t
2778 lookup_vi_for_tree (tree t)
2779 {
2780 void **slot = pointer_map_contains (vi_for_tree, t);
2781 if (slot == NULL)
2782 return NULL;
2783
2784 return (varinfo_t) *slot;
2785 }
2786
2787 /* Return a printable name for DECL */
2788
2789 static const char *
2790 alias_get_name (tree decl)
2791 {
2792 const char *res = NULL;
2793 char *temp;
2794 int num_printed = 0;
2795
2796 if (!dump_file)
2797 return "NULL";
2798
2799 if (TREE_CODE (decl) == SSA_NAME)
2800 {
2801 res = get_name (decl);
2802 if (res)
2803 num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2804 else
2805 num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2806 if (num_printed > 0)
2807 {
2808 res = ggc_strdup (temp);
2809 free (temp);
2810 }
2811 }
2812 else if (DECL_P (decl))
2813 {
2814 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2815 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2816 else
2817 {
2818 res = get_name (decl);
2819 if (!res)
2820 {
2821 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2822 if (num_printed > 0)
2823 {
2824 res = ggc_strdup (temp);
2825 free (temp);
2826 }
2827 }
2828 }
2829 }
2830 if (res != NULL)
2831 return res;
2832
2833 return "NULL";
2834 }
2835
2836 /* Find the variable id for tree T in the map.
2837 If T doesn't exist in the map, create an entry for it and return it. */
2838
2839 static varinfo_t
2840 get_vi_for_tree (tree t)
2841 {
2842 void **slot = pointer_map_contains (vi_for_tree, t);
2843 if (slot == NULL)
2844 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2845
2846 return (varinfo_t) *slot;
2847 }
2848
2849 /* Get a scalar constraint expression for a new temporary variable. */
2850
2851 static struct constraint_expr
2852 new_scalar_tmp_constraint_exp (const char *name)
2853 {
2854 struct constraint_expr tmp;
2855 varinfo_t vi;
2856
2857 vi = new_var_info (NULL_TREE, name);
2858 vi->offset = 0;
2859 vi->size = -1;
2860 vi->fullsize = -1;
2861 vi->is_full_var = 1;
2862
2863 tmp.var = vi->id;
2864 tmp.type = SCALAR;
2865 tmp.offset = 0;
2866
2867 return tmp;
2868 }
2869
2870 /* Get a constraint expression vector from an SSA_VAR_P node.
2871 If address_p is true, the result will be taken its address of. */
2872
2873 static void
2874 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2875 {
2876 struct constraint_expr cexpr;
2877 varinfo_t vi;
2878
2879 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2880 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2881
2882 /* For parameters, get at the points-to set for the actual parm
2883 decl. */
2884 if (TREE_CODE (t) == SSA_NAME
2885 && SSA_NAME_IS_DEFAULT_DEF (t)
2886 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2887 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2888 {
2889 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2890 return;
2891 }
2892
2893 /* For global variables resort to the alias target. */
2894 if (TREE_CODE (t) == VAR_DECL
2895 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2896 {
2897 struct varpool_node *node = varpool_get_node (t);
2898 if (node && node->alias && node->analyzed)
2899 {
2900 node = varpool_variable_node (node, NULL);
2901 t = node->decl;
2902 }
2903 }
2904
2905 vi = get_vi_for_tree (t);
2906 cexpr.var = vi->id;
2907 cexpr.type = SCALAR;
2908 cexpr.offset = 0;
2909 /* If we determine the result is "anything", and we know this is readonly,
2910 say it points to readonly memory instead. */
2911 if (cexpr.var == anything_id && TREE_READONLY (t))
2912 {
2913 gcc_unreachable ();
2914 cexpr.type = ADDRESSOF;
2915 cexpr.var = readonly_id;
2916 }
2917
2918 /* If we are not taking the address of the constraint expr, add all
2919 sub-fiels of the variable as well. */
2920 if (!address_p
2921 && !vi->is_full_var)
2922 {
2923 for (; vi; vi = vi_next (vi))
2924 {
2925 cexpr.var = vi->id;
2926 results->safe_push (cexpr);
2927 }
2928 return;
2929 }
2930
2931 results->safe_push (cexpr);
2932 }
2933
2934 /* Process constraint T, performing various simplifications and then
2935 adding it to our list of overall constraints. */
2936
2937 static void
2938 process_constraint (constraint_t t)
2939 {
2940 struct constraint_expr rhs = t->rhs;
2941 struct constraint_expr lhs = t->lhs;
2942
2943 gcc_assert (rhs.var < varmap.length ());
2944 gcc_assert (lhs.var < varmap.length ());
2945
2946 /* If we didn't get any useful constraint from the lhs we get
2947 &ANYTHING as fallback from get_constraint_for. Deal with
2948 it here by turning it into *ANYTHING. */
2949 if (lhs.type == ADDRESSOF
2950 && lhs.var == anything_id)
2951 lhs.type = DEREF;
2952
2953 /* ADDRESSOF on the lhs is invalid. */
2954 gcc_assert (lhs.type != ADDRESSOF);
2955
2956 /* We shouldn't add constraints from things that cannot have pointers.
2957 It's not completely trivial to avoid in the callers, so do it here. */
2958 if (rhs.type != ADDRESSOF
2959 && !get_varinfo (rhs.var)->may_have_pointers)
2960 return;
2961
2962 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2963 if (!get_varinfo (lhs.var)->may_have_pointers)
2964 return;
2965
2966 /* This can happen in our IR with things like n->a = *p */
2967 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2968 {
2969 /* Split into tmp = *rhs, *lhs = tmp */
2970 struct constraint_expr tmplhs;
2971 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2972 process_constraint (new_constraint (tmplhs, rhs));
2973 process_constraint (new_constraint (lhs, tmplhs));
2974 }
2975 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2976 {
2977 /* Split into tmp = &rhs, *lhs = tmp */
2978 struct constraint_expr tmplhs;
2979 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2980 process_constraint (new_constraint (tmplhs, rhs));
2981 process_constraint (new_constraint (lhs, tmplhs));
2982 }
2983 else
2984 {
2985 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2986 constraints.safe_push (t);
2987 }
2988 }
2989
2990
2991 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2992 structure. */
2993
2994 static HOST_WIDE_INT
2995 bitpos_of_field (const tree fdecl)
2996 {
2997 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
2998 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
2999 return -1;
3000
3001 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3002 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
3003 }
3004
3005
3006 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3007 resulting constraint expressions in *RESULTS. */
3008
3009 static void
3010 get_constraint_for_ptr_offset (tree ptr, tree offset,
3011 vec<ce_s> *results)
3012 {
3013 struct constraint_expr c;
3014 unsigned int j, n;
3015 HOST_WIDE_INT rhsoffset;
3016
3017 /* If we do not do field-sensitive PTA adding offsets to pointers
3018 does not change the points-to solution. */
3019 if (!use_field_sensitive)
3020 {
3021 get_constraint_for_rhs (ptr, results);
3022 return;
3023 }
3024
3025 /* If the offset is not a non-negative integer constant that fits
3026 in a HOST_WIDE_INT, we have to fall back to a conservative
3027 solution which includes all sub-fields of all pointed-to
3028 variables of ptr. */
3029 if (offset == NULL_TREE
3030 || TREE_CODE (offset) != INTEGER_CST)
3031 rhsoffset = UNKNOWN_OFFSET;
3032 else
3033 {
3034 /* Sign-extend the offset. */
3035 double_int soffset = tree_to_double_int (offset)
3036 .sext (TYPE_PRECISION (TREE_TYPE (offset)));
3037 if (!soffset.fits_shwi ())
3038 rhsoffset = UNKNOWN_OFFSET;
3039 else
3040 {
3041 /* Make sure the bit-offset also fits. */
3042 HOST_WIDE_INT rhsunitoffset = soffset.low;
3043 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3044 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3045 rhsoffset = UNKNOWN_OFFSET;
3046 }
3047 }
3048
3049 get_constraint_for_rhs (ptr, results);
3050 if (rhsoffset == 0)
3051 return;
3052
3053 /* As we are eventually appending to the solution do not use
3054 vec::iterate here. */
3055 n = results->length ();
3056 for (j = 0; j < n; j++)
3057 {
3058 varinfo_t curr;
3059 c = (*results)[j];
3060 curr = get_varinfo (c.var);
3061
3062 if (c.type == ADDRESSOF
3063 /* If this varinfo represents a full variable just use it. */
3064 && curr->is_full_var)
3065 c.offset = 0;
3066 else if (c.type == ADDRESSOF
3067 /* If we do not know the offset add all subfields. */
3068 && rhsoffset == UNKNOWN_OFFSET)
3069 {
3070 varinfo_t temp = get_varinfo (curr->head);
3071 do
3072 {
3073 struct constraint_expr c2;
3074 c2.var = temp->id;
3075 c2.type = ADDRESSOF;
3076 c2.offset = 0;
3077 if (c2.var != c.var)
3078 results->safe_push (c2);
3079 temp = vi_next (temp);
3080 }
3081 while (temp);
3082 }
3083 else if (c.type == ADDRESSOF)
3084 {
3085 varinfo_t temp;
3086 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3087
3088 /* Search the sub-field which overlaps with the
3089 pointed-to offset. If the result is outside of the variable
3090 we have to provide a conservative result, as the variable is
3091 still reachable from the resulting pointer (even though it
3092 technically cannot point to anything). The last and first
3093 sub-fields are such conservative results.
3094 ??? If we always had a sub-field for &object + 1 then
3095 we could represent this in a more precise way. */
3096 if (rhsoffset < 0
3097 && curr->offset < offset)
3098 offset = 0;
3099 temp = first_or_preceding_vi_for_offset (curr, offset);
3100
3101 /* If the found variable is not exactly at the pointed to
3102 result, we have to include the next variable in the
3103 solution as well. Otherwise two increments by offset / 2
3104 do not result in the same or a conservative superset
3105 solution. */
3106 if (temp->offset != offset
3107 && temp->next != 0)
3108 {
3109 struct constraint_expr c2;
3110 c2.var = temp->next;
3111 c2.type = ADDRESSOF;
3112 c2.offset = 0;
3113 results->safe_push (c2);
3114 }
3115 c.var = temp->id;
3116 c.offset = 0;
3117 }
3118 else
3119 c.offset = rhsoffset;
3120
3121 (*results)[j] = c;
3122 }
3123 }
3124
3125
3126 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3127 If address_p is true the result will be taken its address of.
3128 If lhs_p is true then the constraint expression is assumed to be used
3129 as the lhs. */
3130
3131 static void
3132 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3133 bool address_p, bool lhs_p)
3134 {
3135 tree orig_t = t;
3136 HOST_WIDE_INT bitsize = -1;
3137 HOST_WIDE_INT bitmaxsize = -1;
3138 HOST_WIDE_INT bitpos;
3139 tree forzero;
3140
3141 /* Some people like to do cute things like take the address of
3142 &0->a.b */
3143 forzero = t;
3144 while (handled_component_p (forzero)
3145 || INDIRECT_REF_P (forzero)
3146 || TREE_CODE (forzero) == MEM_REF)
3147 forzero = TREE_OPERAND (forzero, 0);
3148
3149 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3150 {
3151 struct constraint_expr temp;
3152
3153 temp.offset = 0;
3154 temp.var = integer_id;
3155 temp.type = SCALAR;
3156 results->safe_push (temp);
3157 return;
3158 }
3159
3160 /* Handle type-punning through unions. If we are extracting a pointer
3161 from a union via a possibly type-punning access that pointer
3162 points to anything, similar to a conversion of an integer to
3163 a pointer. */
3164 if (!lhs_p)
3165 {
3166 tree u;
3167 for (u = t;
3168 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3169 u = TREE_OPERAND (u, 0))
3170 if (TREE_CODE (u) == COMPONENT_REF
3171 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3172 {
3173 struct constraint_expr temp;
3174
3175 temp.offset = 0;
3176 temp.var = anything_id;
3177 temp.type = ADDRESSOF;
3178 results->safe_push (temp);
3179 return;
3180 }
3181 }
3182
3183 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3184
3185 /* Pretend to take the address of the base, we'll take care of
3186 adding the required subset of sub-fields below. */
3187 get_constraint_for_1 (t, results, true, lhs_p);
3188 gcc_assert (results->length () == 1);
3189 struct constraint_expr &result = results->last ();
3190
3191 if (result.type == SCALAR
3192 && get_varinfo (result.var)->is_full_var)
3193 /* For single-field vars do not bother about the offset. */
3194 result.offset = 0;
3195 else if (result.type == SCALAR)
3196 {
3197 /* In languages like C, you can access one past the end of an
3198 array. You aren't allowed to dereference it, so we can
3199 ignore this constraint. When we handle pointer subtraction,
3200 we may have to do something cute here. */
3201
3202 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3203 && bitmaxsize != 0)
3204 {
3205 /* It's also not true that the constraint will actually start at the
3206 right offset, it may start in some padding. We only care about
3207 setting the constraint to the first actual field it touches, so
3208 walk to find it. */
3209 struct constraint_expr cexpr = result;
3210 varinfo_t curr;
3211 results->pop ();
3212 cexpr.offset = 0;
3213 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3214 {
3215 if (ranges_overlap_p (curr->offset, curr->size,
3216 bitpos, bitmaxsize))
3217 {
3218 cexpr.var = curr->id;
3219 results->safe_push (cexpr);
3220 if (address_p)
3221 break;
3222 }
3223 }
3224 /* If we are going to take the address of this field then
3225 to be able to compute reachability correctly add at least
3226 the last field of the variable. */
3227 if (address_p && results->length () == 0)
3228 {
3229 curr = get_varinfo (cexpr.var);
3230 while (curr->next != 0)
3231 curr = vi_next (curr);
3232 cexpr.var = curr->id;
3233 results->safe_push (cexpr);
3234 }
3235 else if (results->length () == 0)
3236 /* Assert that we found *some* field there. The user couldn't be
3237 accessing *only* padding. */
3238 /* Still the user could access one past the end of an array
3239 embedded in a struct resulting in accessing *only* padding. */
3240 /* Or accessing only padding via type-punning to a type
3241 that has a filed just in padding space. */
3242 {
3243 cexpr.type = SCALAR;
3244 cexpr.var = anything_id;
3245 cexpr.offset = 0;
3246 results->safe_push (cexpr);
3247 }
3248 }
3249 else if (bitmaxsize == 0)
3250 {
3251 if (dump_file && (dump_flags & TDF_DETAILS))
3252 fprintf (dump_file, "Access to zero-sized part of variable,"
3253 "ignoring\n");
3254 }
3255 else
3256 if (dump_file && (dump_flags & TDF_DETAILS))
3257 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3258 }
3259 else if (result.type == DEREF)
3260 {
3261 /* If we do not know exactly where the access goes say so. Note
3262 that only for non-structure accesses we know that we access
3263 at most one subfiled of any variable. */
3264 if (bitpos == -1
3265 || bitsize != bitmaxsize
3266 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3267 || result.offset == UNKNOWN_OFFSET)
3268 result.offset = UNKNOWN_OFFSET;
3269 else
3270 result.offset += bitpos;
3271 }
3272 else if (result.type == ADDRESSOF)
3273 {
3274 /* We can end up here for component references on a
3275 VIEW_CONVERT_EXPR <>(&foobar). */
3276 result.type = SCALAR;
3277 result.var = anything_id;
3278 result.offset = 0;
3279 }
3280 else
3281 gcc_unreachable ();
3282 }
3283
3284
3285 /* Dereference the constraint expression CONS, and return the result.
3286 DEREF (ADDRESSOF) = SCALAR
3287 DEREF (SCALAR) = DEREF
3288 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3289 This is needed so that we can handle dereferencing DEREF constraints. */
3290
3291 static void
3292 do_deref (vec<ce_s> *constraints)
3293 {
3294 struct constraint_expr *c;
3295 unsigned int i = 0;
3296
3297 FOR_EACH_VEC_ELT (*constraints, i, c)
3298 {
3299 if (c->type == SCALAR)
3300 c->type = DEREF;
3301 else if (c->type == ADDRESSOF)
3302 c->type = SCALAR;
3303 else if (c->type == DEREF)
3304 {
3305 struct constraint_expr tmplhs;
3306 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3307 process_constraint (new_constraint (tmplhs, *c));
3308 c->var = tmplhs.var;
3309 }
3310 else
3311 gcc_unreachable ();
3312 }
3313 }
3314
3315 /* Given a tree T, return the constraint expression for taking the
3316 address of it. */
3317
3318 static void
3319 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3320 {
3321 struct constraint_expr *c;
3322 unsigned int i;
3323
3324 get_constraint_for_1 (t, results, true, true);
3325
3326 FOR_EACH_VEC_ELT (*results, i, c)
3327 {
3328 if (c->type == DEREF)
3329 c->type = SCALAR;
3330 else
3331 c->type = ADDRESSOF;
3332 }
3333 }
3334
3335 /* Given a tree T, return the constraint expression for it. */
3336
3337 static void
3338 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3339 bool lhs_p)
3340 {
3341 struct constraint_expr temp;
3342
3343 /* x = integer is all glommed to a single variable, which doesn't
3344 point to anything by itself. That is, of course, unless it is an
3345 integer constant being treated as a pointer, in which case, we
3346 will return that this is really the addressof anything. This
3347 happens below, since it will fall into the default case. The only
3348 case we know something about an integer treated like a pointer is
3349 when it is the NULL pointer, and then we just say it points to
3350 NULL.
3351
3352 Do not do that if -fno-delete-null-pointer-checks though, because
3353 in that case *NULL does not fail, so it _should_ alias *anything.
3354 It is not worth adding a new option or renaming the existing one,
3355 since this case is relatively obscure. */
3356 if ((TREE_CODE (t) == INTEGER_CST
3357 && integer_zerop (t))
3358 /* The only valid CONSTRUCTORs in gimple with pointer typed
3359 elements are zero-initializer. But in IPA mode we also
3360 process global initializers, so verify at least. */
3361 || (TREE_CODE (t) == CONSTRUCTOR
3362 && CONSTRUCTOR_NELTS (t) == 0))
3363 {
3364 if (flag_delete_null_pointer_checks)
3365 temp.var = nothing_id;
3366 else
3367 temp.var = nonlocal_id;
3368 temp.type = ADDRESSOF;
3369 temp.offset = 0;
3370 results->safe_push (temp);
3371 return;
3372 }
3373
3374 /* String constants are read-only. */
3375 if (TREE_CODE (t) == STRING_CST)
3376 {
3377 temp.var = readonly_id;
3378 temp.type = SCALAR;
3379 temp.offset = 0;
3380 results->safe_push (temp);
3381 return;
3382 }
3383
3384 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3385 {
3386 case tcc_expression:
3387 {
3388 switch (TREE_CODE (t))
3389 {
3390 case ADDR_EXPR:
3391 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3392 return;
3393 default:;
3394 }
3395 break;
3396 }
3397 case tcc_reference:
3398 {
3399 switch (TREE_CODE (t))
3400 {
3401 case MEM_REF:
3402 {
3403 struct constraint_expr cs;
3404 varinfo_t vi, curr;
3405 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3406 TREE_OPERAND (t, 1), results);
3407 do_deref (results);
3408
3409 /* If we are not taking the address then make sure to process
3410 all subvariables we might access. */
3411 if (address_p)
3412 return;
3413
3414 cs = results->last ();
3415 if (cs.type == DEREF
3416 && type_can_have_subvars (TREE_TYPE (t)))
3417 {
3418 /* For dereferences this means we have to defer it
3419 to solving time. */
3420 results->last ().offset = UNKNOWN_OFFSET;
3421 return;
3422 }
3423 if (cs.type != SCALAR)
3424 return;
3425
3426 vi = get_varinfo (cs.var);
3427 curr = vi_next (vi);
3428 if (!vi->is_full_var
3429 && curr)
3430 {
3431 unsigned HOST_WIDE_INT size;
3432 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3433 size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t)));
3434 else
3435 size = -1;
3436 for (; curr; curr = vi_next (curr))
3437 {
3438 if (curr->offset - vi->offset < size)
3439 {
3440 cs.var = curr->id;
3441 results->safe_push (cs);
3442 }
3443 else
3444 break;
3445 }
3446 }
3447 return;
3448 }
3449 case ARRAY_REF:
3450 case ARRAY_RANGE_REF:
3451 case COMPONENT_REF:
3452 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3453 return;
3454 case VIEW_CONVERT_EXPR:
3455 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3456 lhs_p);
3457 return;
3458 /* We are missing handling for TARGET_MEM_REF here. */
3459 default:;
3460 }
3461 break;
3462 }
3463 case tcc_exceptional:
3464 {
3465 switch (TREE_CODE (t))
3466 {
3467 case SSA_NAME:
3468 {
3469 get_constraint_for_ssa_var (t, results, address_p);
3470 return;
3471 }
3472 case CONSTRUCTOR:
3473 {
3474 unsigned int i;
3475 tree val;
3476 vec<ce_s> tmp = vNULL;
3477 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3478 {
3479 struct constraint_expr *rhsp;
3480 unsigned j;
3481 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3482 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3483 results->safe_push (*rhsp);
3484 tmp.truncate (0);
3485 }
3486 tmp.release ();
3487 /* We do not know whether the constructor was complete,
3488 so technically we have to add &NOTHING or &ANYTHING
3489 like we do for an empty constructor as well. */
3490 return;
3491 }
3492 default:;
3493 }
3494 break;
3495 }
3496 case tcc_declaration:
3497 {
3498 get_constraint_for_ssa_var (t, results, address_p);
3499 return;
3500 }
3501 case tcc_constant:
3502 {
3503 /* We cannot refer to automatic variables through constants. */
3504 temp.type = ADDRESSOF;
3505 temp.var = nonlocal_id;
3506 temp.offset = 0;
3507 results->safe_push (temp);
3508 return;
3509 }
3510 default:;
3511 }
3512
3513 /* The default fallback is a constraint from anything. */
3514 temp.type = ADDRESSOF;
3515 temp.var = anything_id;
3516 temp.offset = 0;
3517 results->safe_push (temp);
3518 }
3519
3520 /* Given a gimple tree T, return the constraint expression vector for it. */
3521
3522 static void
3523 get_constraint_for (tree t, vec<ce_s> *results)
3524 {
3525 gcc_assert (results->length () == 0);
3526
3527 get_constraint_for_1 (t, results, false, true);
3528 }
3529
3530 /* Given a gimple tree T, return the constraint expression vector for it
3531 to be used as the rhs of a constraint. */
3532
3533 static void
3534 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3535 {
3536 gcc_assert (results->length () == 0);
3537
3538 get_constraint_for_1 (t, results, false, false);
3539 }
3540
3541
3542 /* Efficiently generates constraints from all entries in *RHSC to all
3543 entries in *LHSC. */
3544
3545 static void
3546 process_all_all_constraints (vec<ce_s> lhsc,
3547 vec<ce_s> rhsc)
3548 {
3549 struct constraint_expr *lhsp, *rhsp;
3550 unsigned i, j;
3551
3552 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3553 {
3554 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3555 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3556 process_constraint (new_constraint (*lhsp, *rhsp));
3557 }
3558 else
3559 {
3560 struct constraint_expr tmp;
3561 tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3562 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3563 process_constraint (new_constraint (tmp, *rhsp));
3564 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3565 process_constraint (new_constraint (*lhsp, tmp));
3566 }
3567 }
3568
3569 /* Handle aggregate copies by expanding into copies of the respective
3570 fields of the structures. */
3571
3572 static void
3573 do_structure_copy (tree lhsop, tree rhsop)
3574 {
3575 struct constraint_expr *lhsp, *rhsp;
3576 vec<ce_s> lhsc = vNULL;
3577 vec<ce_s> rhsc = vNULL;
3578 unsigned j;
3579
3580 get_constraint_for (lhsop, &lhsc);
3581 get_constraint_for_rhs (rhsop, &rhsc);
3582 lhsp = &lhsc[0];
3583 rhsp = &rhsc[0];
3584 if (lhsp->type == DEREF
3585 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3586 || rhsp->type == DEREF)
3587 {
3588 if (lhsp->type == DEREF)
3589 {
3590 gcc_assert (lhsc.length () == 1);
3591 lhsp->offset = UNKNOWN_OFFSET;
3592 }
3593 if (rhsp->type == DEREF)
3594 {
3595 gcc_assert (rhsc.length () == 1);
3596 rhsp->offset = UNKNOWN_OFFSET;
3597 }
3598 process_all_all_constraints (lhsc, rhsc);
3599 }
3600 else if (lhsp->type == SCALAR
3601 && (rhsp->type == SCALAR
3602 || rhsp->type == ADDRESSOF))
3603 {
3604 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3605 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3606 unsigned k = 0;
3607 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3608 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3609 for (j = 0; lhsc.iterate (j, &lhsp);)
3610 {
3611 varinfo_t lhsv, rhsv;
3612 rhsp = &rhsc[k];
3613 lhsv = get_varinfo (lhsp->var);
3614 rhsv = get_varinfo (rhsp->var);
3615 if (lhsv->may_have_pointers
3616 && (lhsv->is_full_var
3617 || rhsv->is_full_var
3618 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3619 rhsv->offset + lhsoffset, rhsv->size)))
3620 process_constraint (new_constraint (*lhsp, *rhsp));
3621 if (!rhsv->is_full_var
3622 && (lhsv->is_full_var
3623 || (lhsv->offset + rhsoffset + lhsv->size
3624 > rhsv->offset + lhsoffset + rhsv->size)))
3625 {
3626 ++k;
3627 if (k >= rhsc.length ())
3628 break;
3629 }
3630 else
3631 ++j;
3632 }
3633 }
3634 else
3635 gcc_unreachable ();
3636
3637 lhsc.release ();
3638 rhsc.release ();
3639 }
3640
3641 /* Create constraints ID = { rhsc }. */
3642
3643 static void
3644 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3645 {
3646 struct constraint_expr *c;
3647 struct constraint_expr includes;
3648 unsigned int j;
3649
3650 includes.var = id;
3651 includes.offset = 0;
3652 includes.type = SCALAR;
3653
3654 FOR_EACH_VEC_ELT (rhsc, j, c)
3655 process_constraint (new_constraint (includes, *c));
3656 }
3657
3658 /* Create a constraint ID = OP. */
3659
3660 static void
3661 make_constraint_to (unsigned id, tree op)
3662 {
3663 vec<ce_s> rhsc = vNULL;
3664 get_constraint_for_rhs (op, &rhsc);
3665 make_constraints_to (id, rhsc);
3666 rhsc.release ();
3667 }
3668
3669 /* Create a constraint ID = &FROM. */
3670
3671 static void
3672 make_constraint_from (varinfo_t vi, int from)
3673 {
3674 struct constraint_expr lhs, rhs;
3675
3676 lhs.var = vi->id;
3677 lhs.offset = 0;
3678 lhs.type = SCALAR;
3679
3680 rhs.var = from;
3681 rhs.offset = 0;
3682 rhs.type = ADDRESSOF;
3683 process_constraint (new_constraint (lhs, rhs));
3684 }
3685
3686 /* Create a constraint ID = FROM. */
3687
3688 static void
3689 make_copy_constraint (varinfo_t vi, int from)
3690 {
3691 struct constraint_expr lhs, rhs;
3692
3693 lhs.var = vi->id;
3694 lhs.offset = 0;
3695 lhs.type = SCALAR;
3696
3697 rhs.var = from;
3698 rhs.offset = 0;
3699 rhs.type = SCALAR;
3700 process_constraint (new_constraint (lhs, rhs));
3701 }
3702
3703 /* Make constraints necessary to make OP escape. */
3704
3705 static void
3706 make_escape_constraint (tree op)
3707 {
3708 make_constraint_to (escaped_id, op);
3709 }
3710
3711 /* Add constraints to that the solution of VI is transitively closed. */
3712
3713 static void
3714 make_transitive_closure_constraints (varinfo_t vi)
3715 {
3716 struct constraint_expr lhs, rhs;
3717
3718 /* VAR = *VAR; */
3719 lhs.type = SCALAR;
3720 lhs.var = vi->id;
3721 lhs.offset = 0;
3722 rhs.type = DEREF;
3723 rhs.var = vi->id;
3724 rhs.offset = 0;
3725 process_constraint (new_constraint (lhs, rhs));
3726
3727 /* VAR = VAR + UNKNOWN; */
3728 lhs.type = SCALAR;
3729 lhs.var = vi->id;
3730 lhs.offset = 0;
3731 rhs.type = SCALAR;
3732 rhs.var = vi->id;
3733 rhs.offset = UNKNOWN_OFFSET;
3734 process_constraint (new_constraint (lhs, rhs));
3735 }
3736
3737 /* Temporary storage for fake var decls. */
3738 struct obstack fake_var_decl_obstack;
3739
3740 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3741
3742 static tree
3743 build_fake_var_decl (tree type)
3744 {
3745 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3746 memset (decl, 0, sizeof (struct tree_var_decl));
3747 TREE_SET_CODE (decl, VAR_DECL);
3748 TREE_TYPE (decl) = type;
3749 DECL_UID (decl) = allocate_decl_uid ();
3750 SET_DECL_PT_UID (decl, -1);
3751 layout_decl (decl, 0);
3752 return decl;
3753 }
3754
3755 /* Create a new artificial heap variable with NAME.
3756 Return the created variable. */
3757
3758 static varinfo_t
3759 make_heapvar (const char *name)
3760 {
3761 varinfo_t vi;
3762 tree heapvar;
3763
3764 heapvar = build_fake_var_decl (ptr_type_node);
3765 DECL_EXTERNAL (heapvar) = 1;
3766
3767 vi = new_var_info (heapvar, name);
3768 vi->is_artificial_var = true;
3769 vi->is_heap_var = true;
3770 vi->is_unknown_size_var = true;
3771 vi->offset = 0;
3772 vi->fullsize = ~0;
3773 vi->size = ~0;
3774 vi->is_full_var = true;
3775 insert_vi_for_tree (heapvar, vi);
3776
3777 return vi;
3778 }
3779
3780 /* Create a new artificial heap variable with NAME and make a
3781 constraint from it to LHS. Set flags according to a tag used
3782 for tracking restrict pointers. */
3783
3784 static varinfo_t
3785 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3786 {
3787 varinfo_t vi = make_heapvar (name);
3788 vi->is_global_var = 1;
3789 vi->may_have_pointers = 1;
3790 make_constraint_from (lhs, vi->id);
3791 return vi;
3792 }
3793
3794 /* Create a new artificial heap variable with NAME and make a
3795 constraint from it to LHS. Set flags according to a tag used
3796 for tracking restrict pointers and make the artificial heap
3797 point to global memory. */
3798
3799 static varinfo_t
3800 make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
3801 {
3802 varinfo_t vi = make_constraint_from_restrict (lhs, name);
3803 make_copy_constraint (vi, nonlocal_id);
3804 return vi;
3805 }
3806
3807 /* In IPA mode there are varinfos for different aspects of reach
3808 function designator. One for the points-to set of the return
3809 value, one for the variables that are clobbered by the function,
3810 one for its uses and one for each parameter (including a single
3811 glob for remaining variadic arguments). */
3812
3813 enum { fi_clobbers = 1, fi_uses = 2,
3814 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3815
3816 /* Get a constraint for the requested part of a function designator FI
3817 when operating in IPA mode. */
3818
3819 static struct constraint_expr
3820 get_function_part_constraint (varinfo_t fi, unsigned part)
3821 {
3822 struct constraint_expr c;
3823
3824 gcc_assert (in_ipa_mode);
3825
3826 if (fi->id == anything_id)
3827 {
3828 /* ??? We probably should have a ANYFN special variable. */
3829 c.var = anything_id;
3830 c.offset = 0;
3831 c.type = SCALAR;
3832 }
3833 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3834 {
3835 varinfo_t ai = first_vi_for_offset (fi, part);
3836 if (ai)
3837 c.var = ai->id;
3838 else
3839 c.var = anything_id;
3840 c.offset = 0;
3841 c.type = SCALAR;
3842 }
3843 else
3844 {
3845 c.var = fi->id;
3846 c.offset = part;
3847 c.type = DEREF;
3848 }
3849
3850 return c;
3851 }
3852
3853 /* For non-IPA mode, generate constraints necessary for a call on the
3854 RHS. */
3855
3856 static void
3857 handle_rhs_call (gimple stmt, vec<ce_s> *results)
3858 {
3859 struct constraint_expr rhsc;
3860 unsigned i;
3861 bool returns_uses = false;
3862
3863 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3864 {
3865 tree arg = gimple_call_arg (stmt, i);
3866 int flags = gimple_call_arg_flags (stmt, i);
3867
3868 /* If the argument is not used we can ignore it. */
3869 if (flags & EAF_UNUSED)
3870 continue;
3871
3872 /* As we compute ESCAPED context-insensitive we do not gain
3873 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3874 set. The argument would still get clobbered through the
3875 escape solution. */
3876 if ((flags & EAF_NOCLOBBER)
3877 && (flags & EAF_NOESCAPE))
3878 {
3879 varinfo_t uses = get_call_use_vi (stmt);
3880 if (!(flags & EAF_DIRECT))
3881 {
3882 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3883 make_constraint_to (tem->id, arg);
3884 make_transitive_closure_constraints (tem);
3885 make_copy_constraint (uses, tem->id);
3886 }
3887 else
3888 make_constraint_to (uses->id, arg);
3889 returns_uses = true;
3890 }
3891 else if (flags & EAF_NOESCAPE)
3892 {
3893 struct constraint_expr lhs, rhs;
3894 varinfo_t uses = get_call_use_vi (stmt);
3895 varinfo_t clobbers = get_call_clobber_vi (stmt);
3896 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3897 make_constraint_to (tem->id, arg);
3898 if (!(flags & EAF_DIRECT))
3899 make_transitive_closure_constraints (tem);
3900 make_copy_constraint (uses, tem->id);
3901 make_copy_constraint (clobbers, tem->id);
3902 /* Add *tem = nonlocal, do not add *tem = callused as
3903 EAF_NOESCAPE parameters do not escape to other parameters
3904 and all other uses appear in NONLOCAL as well. */
3905 lhs.type = DEREF;
3906 lhs.var = tem->id;
3907 lhs.offset = 0;
3908 rhs.type = SCALAR;
3909 rhs.var = nonlocal_id;
3910 rhs.offset = 0;
3911 process_constraint (new_constraint (lhs, rhs));
3912 returns_uses = true;
3913 }
3914 else
3915 make_escape_constraint (arg);
3916 }
3917
3918 /* If we added to the calls uses solution make sure we account for
3919 pointers to it to be returned. */
3920 if (returns_uses)
3921 {
3922 rhsc.var = get_call_use_vi (stmt)->id;
3923 rhsc.offset = 0;
3924 rhsc.type = SCALAR;
3925 results->safe_push (rhsc);
3926 }
3927
3928 /* The static chain escapes as well. */
3929 if (gimple_call_chain (stmt))
3930 make_escape_constraint (gimple_call_chain (stmt));
3931
3932 /* And if we applied NRV the address of the return slot escapes as well. */
3933 if (gimple_call_return_slot_opt_p (stmt)
3934 && gimple_call_lhs (stmt) != NULL_TREE
3935 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3936 {
3937 vec<ce_s> tmpc = vNULL;
3938 struct constraint_expr lhsc, *c;
3939 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3940 lhsc.var = escaped_id;
3941 lhsc.offset = 0;
3942 lhsc.type = SCALAR;
3943 FOR_EACH_VEC_ELT (tmpc, i, c)
3944 process_constraint (new_constraint (lhsc, *c));
3945 tmpc.release ();
3946 }
3947
3948 /* Regular functions return nonlocal memory. */
3949 rhsc.var = nonlocal_id;
3950 rhsc.offset = 0;
3951 rhsc.type = SCALAR;
3952 results->safe_push (rhsc);
3953 }
3954
3955 /* For non-IPA mode, generate constraints necessary for a call
3956 that returns a pointer and assigns it to LHS. This simply makes
3957 the LHS point to global and escaped variables. */
3958
3959 static void
3960 handle_lhs_call (gimple stmt, tree lhs, int flags, vec<ce_s> rhsc,
3961 tree fndecl)
3962 {
3963 vec<ce_s> lhsc = vNULL;
3964
3965 get_constraint_for (lhs, &lhsc);
3966 /* If the store is to a global decl make sure to
3967 add proper escape constraints. */
3968 lhs = get_base_address (lhs);
3969 if (lhs
3970 && DECL_P (lhs)
3971 && is_global_var (lhs))
3972 {
3973 struct constraint_expr tmpc;
3974 tmpc.var = escaped_id;
3975 tmpc.offset = 0;
3976 tmpc.type = SCALAR;
3977 lhsc.safe_push (tmpc);
3978 }
3979
3980 /* If the call returns an argument unmodified override the rhs
3981 constraints. */
3982 flags = gimple_call_return_flags (stmt);
3983 if (flags & ERF_RETURNS_ARG
3984 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
3985 {
3986 tree arg;
3987 rhsc.create (0);
3988 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
3989 get_constraint_for (arg, &rhsc);
3990 process_all_all_constraints (lhsc, rhsc);
3991 rhsc.release ();
3992 }
3993 else if (flags & ERF_NOALIAS)
3994 {
3995 varinfo_t vi;
3996 struct constraint_expr tmpc;
3997 rhsc.create (0);
3998 vi = make_heapvar ("HEAP");
3999 /* We marking allocated storage local, we deal with it becoming
4000 global by escaping and setting of vars_contains_escaped_heap. */
4001 DECL_EXTERNAL (vi->decl) = 0;
4002 vi->is_global_var = 0;
4003 /* If this is not a real malloc call assume the memory was
4004 initialized and thus may point to global memory. All
4005 builtin functions with the malloc attribute behave in a sane way. */
4006 if (!fndecl
4007 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
4008 make_constraint_from (vi, nonlocal_id);
4009 tmpc.var = vi->id;
4010 tmpc.offset = 0;
4011 tmpc.type = ADDRESSOF;
4012 rhsc.safe_push (tmpc);
4013 process_all_all_constraints (lhsc, rhsc);
4014 rhsc.release ();
4015 }
4016 else
4017 process_all_all_constraints (lhsc, rhsc);
4018
4019 lhsc.release ();
4020 }
4021
4022 /* For non-IPA mode, generate constraints necessary for a call of a
4023 const function that returns a pointer in the statement STMT. */
4024
4025 static void
4026 handle_const_call (gimple stmt, vec<ce_s> *results)
4027 {
4028 struct constraint_expr rhsc;
4029 unsigned int k;
4030
4031 /* Treat nested const functions the same as pure functions as far
4032 as the static chain is concerned. */
4033 if (gimple_call_chain (stmt))
4034 {
4035 varinfo_t uses = get_call_use_vi (stmt);
4036 make_transitive_closure_constraints (uses);
4037 make_constraint_to (uses->id, gimple_call_chain (stmt));
4038 rhsc.var = uses->id;
4039 rhsc.offset = 0;
4040 rhsc.type = SCALAR;
4041 results->safe_push (rhsc);
4042 }
4043
4044 /* May return arguments. */
4045 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4046 {
4047 tree arg = gimple_call_arg (stmt, k);
4048 vec<ce_s> argc = vNULL;
4049 unsigned i;
4050 struct constraint_expr *argp;
4051 get_constraint_for_rhs (arg, &argc);
4052 FOR_EACH_VEC_ELT (argc, i, argp)
4053 results->safe_push (*argp);
4054 argc.release ();
4055 }
4056
4057 /* May return addresses of globals. */
4058 rhsc.var = nonlocal_id;
4059 rhsc.offset = 0;
4060 rhsc.type = ADDRESSOF;
4061 results->safe_push (rhsc);
4062 }
4063
4064 /* For non-IPA mode, generate constraints necessary for a call to a
4065 pure function in statement STMT. */
4066
4067 static void
4068 handle_pure_call (gimple stmt, vec<ce_s> *results)
4069 {
4070 struct constraint_expr rhsc;
4071 unsigned i;
4072 varinfo_t uses = NULL;
4073
4074 /* Memory reached from pointer arguments is call-used. */
4075 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4076 {
4077 tree arg = gimple_call_arg (stmt, i);
4078 if (!uses)
4079 {
4080 uses = get_call_use_vi (stmt);
4081 make_transitive_closure_constraints (uses);
4082 }
4083 make_constraint_to (uses->id, arg);
4084 }
4085
4086 /* The static chain is used as well. */
4087 if (gimple_call_chain (stmt))
4088 {
4089 if (!uses)
4090 {
4091 uses = get_call_use_vi (stmt);
4092 make_transitive_closure_constraints (uses);
4093 }
4094 make_constraint_to (uses->id, gimple_call_chain (stmt));
4095 }
4096
4097 /* Pure functions may return call-used and nonlocal memory. */
4098 if (uses)
4099 {
4100 rhsc.var = uses->id;
4101 rhsc.offset = 0;
4102 rhsc.type = SCALAR;
4103 results->safe_push (rhsc);
4104 }
4105 rhsc.var = nonlocal_id;
4106 rhsc.offset = 0;
4107 rhsc.type = SCALAR;
4108 results->safe_push (rhsc);
4109 }
4110
4111
4112 /* Return the varinfo for the callee of CALL. */
4113
4114 static varinfo_t
4115 get_fi_for_callee (gimple call)
4116 {
4117 tree decl, fn = gimple_call_fn (call);
4118
4119 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4120 fn = OBJ_TYPE_REF_EXPR (fn);
4121
4122 /* If we can directly resolve the function being called, do so.
4123 Otherwise, it must be some sort of indirect expression that
4124 we should still be able to handle. */
4125 decl = gimple_call_addr_fndecl (fn);
4126 if (decl)
4127 return get_vi_for_tree (decl);
4128
4129 /* If the function is anything other than a SSA name pointer we have no
4130 clue and should be getting ANYFN (well, ANYTHING for now). */
4131 if (!fn || TREE_CODE (fn) != SSA_NAME)
4132 return get_varinfo (anything_id);
4133
4134 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4135 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4136 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4137 fn = SSA_NAME_VAR (fn);
4138
4139 return get_vi_for_tree (fn);
4140 }
4141
4142 /* Create constraints for the builtin call T. Return true if the call
4143 was handled, otherwise false. */
4144
4145 static bool
4146 find_func_aliases_for_builtin_call (gimple t)
4147 {
4148 tree fndecl = gimple_call_fndecl (t);
4149 vec<ce_s> lhsc = vNULL;
4150 vec<ce_s> rhsc = vNULL;
4151 varinfo_t fi;
4152
4153 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4154 /* ??? All builtins that are handled here need to be handled
4155 in the alias-oracle query functions explicitly! */
4156 switch (DECL_FUNCTION_CODE (fndecl))
4157 {
4158 /* All the following functions return a pointer to the same object
4159 as their first argument points to. The functions do not add
4160 to the ESCAPED solution. The functions make the first argument
4161 pointed to memory point to what the second argument pointed to
4162 memory points to. */
4163 case BUILT_IN_STRCPY:
4164 case BUILT_IN_STRNCPY:
4165 case BUILT_IN_BCOPY:
4166 case BUILT_IN_MEMCPY:
4167 case BUILT_IN_MEMMOVE:
4168 case BUILT_IN_MEMPCPY:
4169 case BUILT_IN_STPCPY:
4170 case BUILT_IN_STPNCPY:
4171 case BUILT_IN_STRCAT:
4172 case BUILT_IN_STRNCAT:
4173 case BUILT_IN_STRCPY_CHK:
4174 case BUILT_IN_STRNCPY_CHK:
4175 case BUILT_IN_MEMCPY_CHK:
4176 case BUILT_IN_MEMMOVE_CHK:
4177 case BUILT_IN_MEMPCPY_CHK:
4178 case BUILT_IN_STPCPY_CHK:
4179 case BUILT_IN_STPNCPY_CHK:
4180 case BUILT_IN_STRCAT_CHK:
4181 case BUILT_IN_STRNCAT_CHK:
4182 case BUILT_IN_TM_MEMCPY:
4183 case BUILT_IN_TM_MEMMOVE:
4184 {
4185 tree res = gimple_call_lhs (t);
4186 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4187 == BUILT_IN_BCOPY ? 1 : 0));
4188 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4189 == BUILT_IN_BCOPY ? 0 : 1));
4190 if (res != NULL_TREE)
4191 {
4192 get_constraint_for (res, &lhsc);
4193 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4194 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4195 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4196 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4197 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4198 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4199 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4200 else
4201 get_constraint_for (dest, &rhsc);
4202 process_all_all_constraints (lhsc, rhsc);
4203 lhsc.release ();
4204 rhsc.release ();
4205 }
4206 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4207 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4208 do_deref (&lhsc);
4209 do_deref (&rhsc);
4210 process_all_all_constraints (lhsc, rhsc);
4211 lhsc.release ();
4212 rhsc.release ();
4213 return true;
4214 }
4215 case BUILT_IN_MEMSET:
4216 case BUILT_IN_MEMSET_CHK:
4217 case BUILT_IN_TM_MEMSET:
4218 {
4219 tree res = gimple_call_lhs (t);
4220 tree dest = gimple_call_arg (t, 0);
4221 unsigned i;
4222 ce_s *lhsp;
4223 struct constraint_expr ac;
4224 if (res != NULL_TREE)
4225 {
4226 get_constraint_for (res, &lhsc);
4227 get_constraint_for (dest, &rhsc);
4228 process_all_all_constraints (lhsc, rhsc);
4229 lhsc.release ();
4230 rhsc.release ();
4231 }
4232 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4233 do_deref (&lhsc);
4234 if (flag_delete_null_pointer_checks
4235 && integer_zerop (gimple_call_arg (t, 1)))
4236 {
4237 ac.type = ADDRESSOF;
4238 ac.var = nothing_id;
4239 }
4240 else
4241 {
4242 ac.type = SCALAR;
4243 ac.var = integer_id;
4244 }
4245 ac.offset = 0;
4246 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4247 process_constraint (new_constraint (*lhsp, ac));
4248 lhsc.release ();
4249 return true;
4250 }
4251 case BUILT_IN_ASSUME_ALIGNED:
4252 {
4253 tree res = gimple_call_lhs (t);
4254 tree dest = gimple_call_arg (t, 0);
4255 if (res != NULL_TREE)
4256 {
4257 get_constraint_for (res, &lhsc);
4258 get_constraint_for (dest, &rhsc);
4259 process_all_all_constraints (lhsc, rhsc);
4260 lhsc.release ();
4261 rhsc.release ();
4262 }
4263 return true;
4264 }
4265 /* All the following functions do not return pointers, do not
4266 modify the points-to sets of memory reachable from their
4267 arguments and do not add to the ESCAPED solution. */
4268 case BUILT_IN_SINCOS:
4269 case BUILT_IN_SINCOSF:
4270 case BUILT_IN_SINCOSL:
4271 case BUILT_IN_FREXP:
4272 case BUILT_IN_FREXPF:
4273 case BUILT_IN_FREXPL:
4274 case BUILT_IN_GAMMA_R:
4275 case BUILT_IN_GAMMAF_R:
4276 case BUILT_IN_GAMMAL_R:
4277 case BUILT_IN_LGAMMA_R:
4278 case BUILT_IN_LGAMMAF_R:
4279 case BUILT_IN_LGAMMAL_R:
4280 case BUILT_IN_MODF:
4281 case BUILT_IN_MODFF:
4282 case BUILT_IN_MODFL:
4283 case BUILT_IN_REMQUO:
4284 case BUILT_IN_REMQUOF:
4285 case BUILT_IN_REMQUOL:
4286 case BUILT_IN_FREE:
4287 return true;
4288 case BUILT_IN_STRDUP:
4289 case BUILT_IN_STRNDUP:
4290 if (gimple_call_lhs (t))
4291 {
4292 handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
4293 vNULL, fndecl);
4294 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4295 NULL_TREE, &lhsc);
4296 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4297 NULL_TREE, &rhsc);
4298 do_deref (&lhsc);
4299 do_deref (&rhsc);
4300 process_all_all_constraints (lhsc, rhsc);
4301 lhsc.release ();
4302 rhsc.release ();
4303 return true;
4304 }
4305 break;
4306 /* String / character search functions return a pointer into the
4307 source string or NULL. */
4308 case BUILT_IN_INDEX:
4309 case BUILT_IN_STRCHR:
4310 case BUILT_IN_STRRCHR:
4311 case BUILT_IN_MEMCHR:
4312 case BUILT_IN_STRSTR:
4313 case BUILT_IN_STRPBRK:
4314 if (gimple_call_lhs (t))
4315 {
4316 tree src = gimple_call_arg (t, 0);
4317 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4318 constraint_expr nul;
4319 nul.var = nothing_id;
4320 nul.offset = 0;
4321 nul.type = ADDRESSOF;
4322 rhsc.safe_push (nul);
4323 get_constraint_for (gimple_call_lhs (t), &lhsc);
4324 process_all_all_constraints (lhsc, rhsc);
4325 lhsc.release ();
4326 rhsc.release ();
4327 }
4328 return true;
4329 /* Trampolines are special - they set up passing the static
4330 frame. */
4331 case BUILT_IN_INIT_TRAMPOLINE:
4332 {
4333 tree tramp = gimple_call_arg (t, 0);
4334 tree nfunc = gimple_call_arg (t, 1);
4335 tree frame = gimple_call_arg (t, 2);
4336 unsigned i;
4337 struct constraint_expr lhs, *rhsp;
4338 if (in_ipa_mode)
4339 {
4340 varinfo_t nfi = NULL;
4341 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4342 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4343 if (nfi)
4344 {
4345 lhs = get_function_part_constraint (nfi, fi_static_chain);
4346 get_constraint_for (frame, &rhsc);
4347 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4348 process_constraint (new_constraint (lhs, *rhsp));
4349 rhsc.release ();
4350
4351 /* Make the frame point to the function for
4352 the trampoline adjustment call. */
4353 get_constraint_for (tramp, &lhsc);
4354 do_deref (&lhsc);
4355 get_constraint_for (nfunc, &rhsc);
4356 process_all_all_constraints (lhsc, rhsc);
4357 rhsc.release ();
4358 lhsc.release ();
4359
4360 return true;
4361 }
4362 }
4363 /* Else fallthru to generic handling which will let
4364 the frame escape. */
4365 break;
4366 }
4367 case BUILT_IN_ADJUST_TRAMPOLINE:
4368 {
4369 tree tramp = gimple_call_arg (t, 0);
4370 tree res = gimple_call_lhs (t);
4371 if (in_ipa_mode && res)
4372 {
4373 get_constraint_for (res, &lhsc);
4374 get_constraint_for (tramp, &rhsc);
4375 do_deref (&rhsc);
4376 process_all_all_constraints (lhsc, rhsc);
4377 rhsc.release ();
4378 lhsc.release ();
4379 }
4380 return true;
4381 }
4382 CASE_BUILT_IN_TM_STORE (1):
4383 CASE_BUILT_IN_TM_STORE (2):
4384 CASE_BUILT_IN_TM_STORE (4):
4385 CASE_BUILT_IN_TM_STORE (8):
4386 CASE_BUILT_IN_TM_STORE (FLOAT):
4387 CASE_BUILT_IN_TM_STORE (DOUBLE):
4388 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4389 CASE_BUILT_IN_TM_STORE (M64):
4390 CASE_BUILT_IN_TM_STORE (M128):
4391 CASE_BUILT_IN_TM_STORE (M256):
4392 {
4393 tree addr = gimple_call_arg (t, 0);
4394 tree src = gimple_call_arg (t, 1);
4395
4396 get_constraint_for (addr, &lhsc);
4397 do_deref (&lhsc);
4398 get_constraint_for (src, &rhsc);
4399 process_all_all_constraints (lhsc, rhsc);
4400 lhsc.release ();
4401 rhsc.release ();
4402 return true;
4403 }
4404 CASE_BUILT_IN_TM_LOAD (1):
4405 CASE_BUILT_IN_TM_LOAD (2):
4406 CASE_BUILT_IN_TM_LOAD (4):
4407 CASE_BUILT_IN_TM_LOAD (8):
4408 CASE_BUILT_IN_TM_LOAD (FLOAT):
4409 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4410 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4411 CASE_BUILT_IN_TM_LOAD (M64):
4412 CASE_BUILT_IN_TM_LOAD (M128):
4413 CASE_BUILT_IN_TM_LOAD (M256):
4414 {
4415 tree dest = gimple_call_lhs (t);
4416 tree addr = gimple_call_arg (t, 0);
4417
4418 get_constraint_for (dest, &lhsc);
4419 get_constraint_for (addr, &rhsc);
4420 do_deref (&rhsc);
4421 process_all_all_constraints (lhsc, rhsc);
4422 lhsc.release ();
4423 rhsc.release ();
4424 return true;
4425 }
4426 /* Variadic argument handling needs to be handled in IPA
4427 mode as well. */
4428 case BUILT_IN_VA_START:
4429 {
4430 tree valist = gimple_call_arg (t, 0);
4431 struct constraint_expr rhs, *lhsp;
4432 unsigned i;
4433 get_constraint_for (valist, &lhsc);
4434 do_deref (&lhsc);
4435 /* The va_list gets access to pointers in variadic
4436 arguments. Which we know in the case of IPA analysis
4437 and otherwise are just all nonlocal variables. */
4438 if (in_ipa_mode)
4439 {
4440 fi = lookup_vi_for_tree (cfun->decl);
4441 rhs = get_function_part_constraint (fi, ~0);
4442 rhs.type = ADDRESSOF;
4443 }
4444 else
4445 {
4446 rhs.var = nonlocal_id;
4447 rhs.type = ADDRESSOF;
4448 rhs.offset = 0;
4449 }
4450 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4451 process_constraint (new_constraint (*lhsp, rhs));
4452 lhsc.release ();
4453 /* va_list is clobbered. */
4454 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4455 return true;
4456 }
4457 /* va_end doesn't have any effect that matters. */
4458 case BUILT_IN_VA_END:
4459 return true;
4460 /* Alternate return. Simply give up for now. */
4461 case BUILT_IN_RETURN:
4462 {
4463 fi = NULL;
4464 if (!in_ipa_mode
4465 || !(fi = get_vi_for_tree (cfun->decl)))
4466 make_constraint_from (get_varinfo (escaped_id), anything_id);
4467 else if (in_ipa_mode
4468 && fi != NULL)
4469 {
4470 struct constraint_expr lhs, rhs;
4471 lhs = get_function_part_constraint (fi, fi_result);
4472 rhs.var = anything_id;
4473 rhs.offset = 0;
4474 rhs.type = SCALAR;
4475 process_constraint (new_constraint (lhs, rhs));
4476 }
4477 return true;
4478 }
4479 /* printf-style functions may have hooks to set pointers to
4480 point to somewhere into the generated string. Leave them
4481 for a later exercise... */
4482 default:
4483 /* Fallthru to general call handling. */;
4484 }
4485
4486 return false;
4487 }
4488
4489 /* Create constraints for the call T. */
4490
4491 static void
4492 find_func_aliases_for_call (gimple t)
4493 {
4494 tree fndecl = gimple_call_fndecl (t);
4495 vec<ce_s> lhsc = vNULL;
4496 vec<ce_s> rhsc = vNULL;
4497 varinfo_t fi;
4498
4499 if (fndecl != NULL_TREE
4500 && DECL_BUILT_IN (fndecl)
4501 && find_func_aliases_for_builtin_call (t))
4502 return;
4503
4504 fi = get_fi_for_callee (t);
4505 if (!in_ipa_mode
4506 || (fndecl && !fi->is_fn_info))
4507 {
4508 vec<ce_s> rhsc = vNULL;
4509 int flags = gimple_call_flags (t);
4510
4511 /* Const functions can return their arguments and addresses
4512 of global memory but not of escaped memory. */
4513 if (flags & (ECF_CONST|ECF_NOVOPS))
4514 {
4515 if (gimple_call_lhs (t))
4516 handle_const_call (t, &rhsc);
4517 }
4518 /* Pure functions can return addresses in and of memory
4519 reachable from their arguments, but they are not an escape
4520 point for reachable memory of their arguments. */
4521 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4522 handle_pure_call (t, &rhsc);
4523 else
4524 handle_rhs_call (t, &rhsc);
4525 if (gimple_call_lhs (t))
4526 handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
4527 rhsc.release ();
4528 }
4529 else
4530 {
4531 tree lhsop;
4532 unsigned j;
4533
4534 /* Assign all the passed arguments to the appropriate incoming
4535 parameters of the function. */
4536 for (j = 0; j < gimple_call_num_args (t); j++)
4537 {
4538 struct constraint_expr lhs ;
4539 struct constraint_expr *rhsp;
4540 tree arg = gimple_call_arg (t, j);
4541
4542 get_constraint_for_rhs (arg, &rhsc);
4543 lhs = get_function_part_constraint (fi, fi_parm_base + j);
4544 while (rhsc.length () != 0)
4545 {
4546 rhsp = &rhsc.last ();
4547 process_constraint (new_constraint (lhs, *rhsp));
4548 rhsc.pop ();
4549 }
4550 }
4551
4552 /* If we are returning a value, assign it to the result. */
4553 lhsop = gimple_call_lhs (t);
4554 if (lhsop)
4555 {
4556 struct constraint_expr rhs;
4557 struct constraint_expr *lhsp;
4558
4559 get_constraint_for (lhsop, &lhsc);
4560 rhs = get_function_part_constraint (fi, fi_result);
4561 if (fndecl
4562 && DECL_RESULT (fndecl)
4563 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4564 {
4565 vec<ce_s> tem = vNULL;
4566 tem.safe_push (rhs);
4567 do_deref (&tem);
4568 rhs = tem[0];
4569 tem.release ();
4570 }
4571 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4572 process_constraint (new_constraint (*lhsp, rhs));
4573 }
4574
4575 /* If we pass the result decl by reference, honor that. */
4576 if (lhsop
4577 && fndecl
4578 && DECL_RESULT (fndecl)
4579 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4580 {
4581 struct constraint_expr lhs;
4582 struct constraint_expr *rhsp;
4583
4584 get_constraint_for_address_of (lhsop, &rhsc);
4585 lhs = get_function_part_constraint (fi, fi_result);
4586 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4587 process_constraint (new_constraint (lhs, *rhsp));
4588 rhsc.release ();
4589 }
4590
4591 /* If we use a static chain, pass it along. */
4592 if (gimple_call_chain (t))
4593 {
4594 struct constraint_expr lhs;
4595 struct constraint_expr *rhsp;
4596
4597 get_constraint_for (gimple_call_chain (t), &rhsc);
4598 lhs = get_function_part_constraint (fi, fi_static_chain);
4599 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4600 process_constraint (new_constraint (lhs, *rhsp));
4601 }
4602 }
4603 }
4604
4605 /* Walk statement T setting up aliasing constraints according to the
4606 references found in T. This function is the main part of the
4607 constraint builder. AI points to auxiliary alias information used
4608 when building alias sets and computing alias grouping heuristics. */
4609
4610 static void
4611 find_func_aliases (gimple origt)
4612 {
4613 gimple t = origt;
4614 vec<ce_s> lhsc = vNULL;
4615 vec<ce_s> rhsc = vNULL;
4616 struct constraint_expr *c;
4617 varinfo_t fi;
4618
4619 /* Now build constraints expressions. */
4620 if (gimple_code (t) == GIMPLE_PHI)
4621 {
4622 size_t i;
4623 unsigned int j;
4624
4625 /* For a phi node, assign all the arguments to
4626 the result. */
4627 get_constraint_for (gimple_phi_result (t), &lhsc);
4628 for (i = 0; i < gimple_phi_num_args (t); i++)
4629 {
4630 tree strippedrhs = PHI_ARG_DEF (t, i);
4631
4632 STRIP_NOPS (strippedrhs);
4633 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4634
4635 FOR_EACH_VEC_ELT (lhsc, j, c)
4636 {
4637 struct constraint_expr *c2;
4638 while (rhsc.length () > 0)
4639 {
4640 c2 = &rhsc.last ();
4641 process_constraint (new_constraint (*c, *c2));
4642 rhsc.pop ();
4643 }
4644 }
4645 }
4646 }
4647 /* In IPA mode, we need to generate constraints to pass call
4648 arguments through their calls. There are two cases,
4649 either a GIMPLE_CALL returning a value, or just a plain
4650 GIMPLE_CALL when we are not.
4651
4652 In non-ipa mode, we need to generate constraints for each
4653 pointer passed by address. */
4654 else if (is_gimple_call (t))
4655 find_func_aliases_for_call (t);
4656
4657 /* Otherwise, just a regular assignment statement. Only care about
4658 operations with pointer result, others are dealt with as escape
4659 points if they have pointer operands. */
4660 else if (is_gimple_assign (t))
4661 {
4662 /* Otherwise, just a regular assignment statement. */
4663 tree lhsop = gimple_assign_lhs (t);
4664 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4665
4666 if (rhsop && TREE_CLOBBER_P (rhsop))
4667 /* Ignore clobbers, they don't actually store anything into
4668 the LHS. */
4669 ;
4670 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4671 do_structure_copy (lhsop, rhsop);
4672 else
4673 {
4674 enum tree_code code = gimple_assign_rhs_code (t);
4675
4676 get_constraint_for (lhsop, &lhsc);
4677
4678 if (FLOAT_TYPE_P (TREE_TYPE (lhsop)))
4679 /* If the operation produces a floating point result then
4680 assume the value is not produced to transfer a pointer. */
4681 ;
4682 else if (code == POINTER_PLUS_EXPR)
4683 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4684 gimple_assign_rhs2 (t), &rhsc);
4685 else if (code == BIT_AND_EXPR
4686 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4687 {
4688 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4689 the pointer. Handle it by offsetting it by UNKNOWN. */
4690 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4691 NULL_TREE, &rhsc);
4692 }
4693 else if ((CONVERT_EXPR_CODE_P (code)
4694 && !(POINTER_TYPE_P (gimple_expr_type (t))
4695 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4696 || gimple_assign_single_p (t))
4697 get_constraint_for_rhs (rhsop, &rhsc);
4698 else if (code == COND_EXPR)
4699 {
4700 /* The result is a merge of both COND_EXPR arms. */
4701 vec<ce_s> tmp = vNULL;
4702 struct constraint_expr *rhsp;
4703 unsigned i;
4704 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4705 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4706 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4707 rhsc.safe_push (*rhsp);
4708 tmp.release ();
4709 }
4710 else if (truth_value_p (code))
4711 /* Truth value results are not pointer (parts). Or at least
4712 very very unreasonable obfuscation of a part. */
4713 ;
4714 else
4715 {
4716 /* All other operations are merges. */
4717 vec<ce_s> tmp = vNULL;
4718 struct constraint_expr *rhsp;
4719 unsigned i, j;
4720 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4721 for (i = 2; i < gimple_num_ops (t); ++i)
4722 {
4723 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4724 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4725 rhsc.safe_push (*rhsp);
4726 tmp.truncate (0);
4727 }
4728 tmp.release ();
4729 }
4730 process_all_all_constraints (lhsc, rhsc);
4731 }
4732 /* If there is a store to a global variable the rhs escapes. */
4733 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4734 && DECL_P (lhsop)
4735 && is_global_var (lhsop)
4736 && (!in_ipa_mode
4737 || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4738 make_escape_constraint (rhsop);
4739 }
4740 /* Handle escapes through return. */
4741 else if (gimple_code (t) == GIMPLE_RETURN
4742 && gimple_return_retval (t) != NULL_TREE)
4743 {
4744 fi = NULL;
4745 if (!in_ipa_mode
4746 || !(fi = get_vi_for_tree (cfun->decl)))
4747 make_escape_constraint (gimple_return_retval (t));
4748 else if (in_ipa_mode
4749 && fi != NULL)
4750 {
4751 struct constraint_expr lhs ;
4752 struct constraint_expr *rhsp;
4753 unsigned i;
4754
4755 lhs = get_function_part_constraint (fi, fi_result);
4756 get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
4757 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4758 process_constraint (new_constraint (lhs, *rhsp));
4759 }
4760 }
4761 /* Handle asms conservatively by adding escape constraints to everything. */
4762 else if (gimple_code (t) == GIMPLE_ASM)
4763 {
4764 unsigned i, noutputs;
4765 const char **oconstraints;
4766 const char *constraint;
4767 bool allows_mem, allows_reg, is_inout;
4768
4769 noutputs = gimple_asm_noutputs (t);
4770 oconstraints = XALLOCAVEC (const char *, noutputs);
4771
4772 for (i = 0; i < noutputs; ++i)
4773 {
4774 tree link = gimple_asm_output_op (t, i);
4775 tree op = TREE_VALUE (link);
4776
4777 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4778 oconstraints[i] = constraint;
4779 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4780 &allows_reg, &is_inout);
4781
4782 /* A memory constraint makes the address of the operand escape. */
4783 if (!allows_reg && allows_mem)
4784 make_escape_constraint (build_fold_addr_expr (op));
4785
4786 /* The asm may read global memory, so outputs may point to
4787 any global memory. */
4788 if (op)
4789 {
4790 vec<ce_s> lhsc = vNULL;
4791 struct constraint_expr rhsc, *lhsp;
4792 unsigned j;
4793 get_constraint_for (op, &lhsc);
4794 rhsc.var = nonlocal_id;
4795 rhsc.offset = 0;
4796 rhsc.type = SCALAR;
4797 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4798 process_constraint (new_constraint (*lhsp, rhsc));
4799 lhsc.release ();
4800 }
4801 }
4802 for (i = 0; i < gimple_asm_ninputs (t); ++i)
4803 {
4804 tree link = gimple_asm_input_op (t, i);
4805 tree op = TREE_VALUE (link);
4806
4807 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4808
4809 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4810 &allows_mem, &allows_reg);
4811
4812 /* A memory constraint makes the address of the operand escape. */
4813 if (!allows_reg && allows_mem)
4814 make_escape_constraint (build_fold_addr_expr (op));
4815 /* Strictly we'd only need the constraint to ESCAPED if
4816 the asm clobbers memory, otherwise using something
4817 along the lines of per-call clobbers/uses would be enough. */
4818 else if (op)
4819 make_escape_constraint (op);
4820 }
4821 }
4822
4823 rhsc.release ();
4824 lhsc.release ();
4825 }
4826
4827
4828 /* Create a constraint adding to the clobber set of FI the memory
4829 pointed to by PTR. */
4830
4831 static void
4832 process_ipa_clobber (varinfo_t fi, tree ptr)
4833 {
4834 vec<ce_s> ptrc = vNULL;
4835 struct constraint_expr *c, lhs;
4836 unsigned i;
4837 get_constraint_for_rhs (ptr, &ptrc);
4838 lhs = get_function_part_constraint (fi, fi_clobbers);
4839 FOR_EACH_VEC_ELT (ptrc, i, c)
4840 process_constraint (new_constraint (lhs, *c));
4841 ptrc.release ();
4842 }
4843
4844 /* Walk statement T setting up clobber and use constraints according to the
4845 references found in T. This function is a main part of the
4846 IPA constraint builder. */
4847
4848 static void
4849 find_func_clobbers (gimple origt)
4850 {
4851 gimple t = origt;
4852 vec<ce_s> lhsc = vNULL;
4853 vec<ce_s> rhsc = vNULL;
4854 varinfo_t fi;
4855
4856 /* Add constraints for clobbered/used in IPA mode.
4857 We are not interested in what automatic variables are clobbered
4858 or used as we only use the information in the caller to which
4859 they do not escape. */
4860 gcc_assert (in_ipa_mode);
4861
4862 /* If the stmt refers to memory in any way it better had a VUSE. */
4863 if (gimple_vuse (t) == NULL_TREE)
4864 return;
4865
4866 /* We'd better have function information for the current function. */
4867 fi = lookup_vi_for_tree (cfun->decl);
4868 gcc_assert (fi != NULL);
4869
4870 /* Account for stores in assignments and calls. */
4871 if (gimple_vdef (t) != NULL_TREE
4872 && gimple_has_lhs (t))
4873 {
4874 tree lhs = gimple_get_lhs (t);
4875 tree tem = lhs;
4876 while (handled_component_p (tem))
4877 tem = TREE_OPERAND (tem, 0);
4878 if ((DECL_P (tem)
4879 && !auto_var_in_fn_p (tem, cfun->decl))
4880 || INDIRECT_REF_P (tem)
4881 || (TREE_CODE (tem) == MEM_REF
4882 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4883 && auto_var_in_fn_p
4884 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4885 {
4886 struct constraint_expr lhsc, *rhsp;
4887 unsigned i;
4888 lhsc = get_function_part_constraint (fi, fi_clobbers);
4889 get_constraint_for_address_of (lhs, &rhsc);
4890 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4891 process_constraint (new_constraint (lhsc, *rhsp));
4892 rhsc.release ();
4893 }
4894 }
4895
4896 /* Account for uses in assigments and returns. */
4897 if (gimple_assign_single_p (t)
4898 || (gimple_code (t) == GIMPLE_RETURN
4899 && gimple_return_retval (t) != NULL_TREE))
4900 {
4901 tree rhs = (gimple_assign_single_p (t)
4902 ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4903 tree tem = rhs;
4904 while (handled_component_p (tem))
4905 tem = TREE_OPERAND (tem, 0);
4906 if ((DECL_P (tem)
4907 && !auto_var_in_fn_p (tem, cfun->decl))
4908 || INDIRECT_REF_P (tem)
4909 || (TREE_CODE (tem) == MEM_REF
4910 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4911 && auto_var_in_fn_p
4912 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4913 {
4914 struct constraint_expr lhs, *rhsp;
4915 unsigned i;
4916 lhs = get_function_part_constraint (fi, fi_uses);
4917 get_constraint_for_address_of (rhs, &rhsc);
4918 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4919 process_constraint (new_constraint (lhs, *rhsp));
4920 rhsc.release ();
4921 }
4922 }
4923
4924 if (is_gimple_call (t))
4925 {
4926 varinfo_t cfi = NULL;
4927 tree decl = gimple_call_fndecl (t);
4928 struct constraint_expr lhs, rhs;
4929 unsigned i, j;
4930
4931 /* For builtins we do not have separate function info. For those
4932 we do not generate escapes for we have to generate clobbers/uses. */
4933 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4934 switch (DECL_FUNCTION_CODE (decl))
4935 {
4936 /* The following functions use and clobber memory pointed to
4937 by their arguments. */
4938 case BUILT_IN_STRCPY:
4939 case BUILT_IN_STRNCPY:
4940 case BUILT_IN_BCOPY:
4941 case BUILT_IN_MEMCPY:
4942 case BUILT_IN_MEMMOVE:
4943 case BUILT_IN_MEMPCPY:
4944 case BUILT_IN_STPCPY:
4945 case BUILT_IN_STPNCPY:
4946 case BUILT_IN_STRCAT:
4947 case BUILT_IN_STRNCAT:
4948 case BUILT_IN_STRCPY_CHK:
4949 case BUILT_IN_STRNCPY_CHK:
4950 case BUILT_IN_MEMCPY_CHK:
4951 case BUILT_IN_MEMMOVE_CHK:
4952 case BUILT_IN_MEMPCPY_CHK:
4953 case BUILT_IN_STPCPY_CHK:
4954 case BUILT_IN_STPNCPY_CHK:
4955 case BUILT_IN_STRCAT_CHK:
4956 case BUILT_IN_STRNCAT_CHK:
4957 {
4958 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4959 == BUILT_IN_BCOPY ? 1 : 0));
4960 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4961 == BUILT_IN_BCOPY ? 0 : 1));
4962 unsigned i;
4963 struct constraint_expr *rhsp, *lhsp;
4964 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4965 lhs = get_function_part_constraint (fi, fi_clobbers);
4966 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4967 process_constraint (new_constraint (lhs, *lhsp));
4968 lhsc.release ();
4969 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4970 lhs = get_function_part_constraint (fi, fi_uses);
4971 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4972 process_constraint (new_constraint (lhs, *rhsp));
4973 rhsc.release ();
4974 return;
4975 }
4976 /* The following function clobbers memory pointed to by
4977 its argument. */
4978 case BUILT_IN_MEMSET:
4979 case BUILT_IN_MEMSET_CHK:
4980 {
4981 tree dest = gimple_call_arg (t, 0);
4982 unsigned i;
4983 ce_s *lhsp;
4984 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4985 lhs = get_function_part_constraint (fi, fi_clobbers);
4986 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4987 process_constraint (new_constraint (lhs, *lhsp));
4988 lhsc.release ();
4989 return;
4990 }
4991 /* The following functions clobber their second and third
4992 arguments. */
4993 case BUILT_IN_SINCOS:
4994 case BUILT_IN_SINCOSF:
4995 case BUILT_IN_SINCOSL:
4996 {
4997 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4998 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4999 return;
5000 }
5001 /* The following functions clobber their second argument. */
5002 case BUILT_IN_FREXP:
5003 case BUILT_IN_FREXPF:
5004 case BUILT_IN_FREXPL:
5005 case BUILT_IN_LGAMMA_R:
5006 case BUILT_IN_LGAMMAF_R:
5007 case BUILT_IN_LGAMMAL_R:
5008 case BUILT_IN_GAMMA_R:
5009 case BUILT_IN_GAMMAF_R:
5010 case BUILT_IN_GAMMAL_R:
5011 case BUILT_IN_MODF:
5012 case BUILT_IN_MODFF:
5013 case BUILT_IN_MODFL:
5014 {
5015 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5016 return;
5017 }
5018 /* The following functions clobber their third argument. */
5019 case BUILT_IN_REMQUO:
5020 case BUILT_IN_REMQUOF:
5021 case BUILT_IN_REMQUOL:
5022 {
5023 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5024 return;
5025 }
5026 /* The following functions neither read nor clobber memory. */
5027 case BUILT_IN_ASSUME_ALIGNED:
5028 case BUILT_IN_FREE:
5029 return;
5030 /* Trampolines are of no interest to us. */
5031 case BUILT_IN_INIT_TRAMPOLINE:
5032 case BUILT_IN_ADJUST_TRAMPOLINE:
5033 return;
5034 case BUILT_IN_VA_START:
5035 case BUILT_IN_VA_END:
5036 return;
5037 /* printf-style functions may have hooks to set pointers to
5038 point to somewhere into the generated string. Leave them
5039 for a later exercise... */
5040 default:
5041 /* Fallthru to general call handling. */;
5042 }
5043
5044 /* Parameters passed by value are used. */
5045 lhs = get_function_part_constraint (fi, fi_uses);
5046 for (i = 0; i < gimple_call_num_args (t); i++)
5047 {
5048 struct constraint_expr *rhsp;
5049 tree arg = gimple_call_arg (t, i);
5050
5051 if (TREE_CODE (arg) == SSA_NAME
5052 || is_gimple_min_invariant (arg))
5053 continue;
5054
5055 get_constraint_for_address_of (arg, &rhsc);
5056 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5057 process_constraint (new_constraint (lhs, *rhsp));
5058 rhsc.release ();
5059 }
5060
5061 /* Build constraints for propagating clobbers/uses along the
5062 callgraph edges. */
5063 cfi = get_fi_for_callee (t);
5064 if (cfi->id == anything_id)
5065 {
5066 if (gimple_vdef (t))
5067 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5068 anything_id);
5069 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5070 anything_id);
5071 return;
5072 }
5073
5074 /* For callees without function info (that's external functions),
5075 ESCAPED is clobbered and used. */
5076 if (gimple_call_fndecl (t)
5077 && !cfi->is_fn_info)
5078 {
5079 varinfo_t vi;
5080
5081 if (gimple_vdef (t))
5082 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5083 escaped_id);
5084 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5085
5086 /* Also honor the call statement use/clobber info. */
5087 if ((vi = lookup_call_clobber_vi (t)) != NULL)
5088 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5089 vi->id);
5090 if ((vi = lookup_call_use_vi (t)) != NULL)
5091 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5092 vi->id);
5093 return;
5094 }
5095
5096 /* Otherwise the caller clobbers and uses what the callee does.
5097 ??? This should use a new complex constraint that filters
5098 local variables of the callee. */
5099 if (gimple_vdef (t))
5100 {
5101 lhs = get_function_part_constraint (fi, fi_clobbers);
5102 rhs = get_function_part_constraint (cfi, fi_clobbers);
5103 process_constraint (new_constraint (lhs, rhs));
5104 }
5105 lhs = get_function_part_constraint (fi, fi_uses);
5106 rhs = get_function_part_constraint (cfi, fi_uses);
5107 process_constraint (new_constraint (lhs, rhs));
5108 }
5109 else if (gimple_code (t) == GIMPLE_ASM)
5110 {
5111 /* ??? Ick. We can do better. */
5112 if (gimple_vdef (t))
5113 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5114 anything_id);
5115 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5116 anything_id);
5117 }
5118
5119 rhsc.release ();
5120 }
5121
5122
5123 /* Find the first varinfo in the same variable as START that overlaps with
5124 OFFSET. Return NULL if we can't find one. */
5125
5126 static varinfo_t
5127 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5128 {
5129 /* If the offset is outside of the variable, bail out. */
5130 if (offset >= start->fullsize)
5131 return NULL;
5132
5133 /* If we cannot reach offset from start, lookup the first field
5134 and start from there. */
5135 if (start->offset > offset)
5136 start = get_varinfo (start->head);
5137
5138 while (start)
5139 {
5140 /* We may not find a variable in the field list with the actual
5141 offset when when we have glommed a structure to a variable.
5142 In that case, however, offset should still be within the size
5143 of the variable. */
5144 if (offset >= start->offset
5145 && (offset - start->offset) < start->size)
5146 return start;
5147
5148 start = vi_next (start);
5149 }
5150
5151 return NULL;
5152 }
5153
5154 /* Find the first varinfo in the same variable as START that overlaps with
5155 OFFSET. If there is no such varinfo the varinfo directly preceding
5156 OFFSET is returned. */
5157
5158 static varinfo_t
5159 first_or_preceding_vi_for_offset (varinfo_t start,
5160 unsigned HOST_WIDE_INT offset)
5161 {
5162 /* If we cannot reach offset from start, lookup the first field
5163 and start from there. */
5164 if (start->offset > offset)
5165 start = get_varinfo (start->head);
5166
5167 /* We may not find a variable in the field list with the actual
5168 offset when when we have glommed a structure to a variable.
5169 In that case, however, offset should still be within the size
5170 of the variable.
5171 If we got beyond the offset we look for return the field
5172 directly preceding offset which may be the last field. */
5173 while (start->next
5174 && offset >= start->offset
5175 && !((offset - start->offset) < start->size))
5176 start = vi_next (start);
5177
5178 return start;
5179 }
5180
5181
5182 /* This structure is used during pushing fields onto the fieldstack
5183 to track the offset of the field, since bitpos_of_field gives it
5184 relative to its immediate containing type, and we want it relative
5185 to the ultimate containing object. */
5186
5187 struct fieldoff
5188 {
5189 /* Offset from the base of the base containing object to this field. */
5190 HOST_WIDE_INT offset;
5191
5192 /* Size, in bits, of the field. */
5193 unsigned HOST_WIDE_INT size;
5194
5195 unsigned has_unknown_size : 1;
5196
5197 unsigned must_have_pointers : 1;
5198
5199 unsigned may_have_pointers : 1;
5200
5201 unsigned only_restrict_pointers : 1;
5202 };
5203 typedef struct fieldoff fieldoff_s;
5204
5205
5206 /* qsort comparison function for two fieldoff's PA and PB */
5207
5208 static int
5209 fieldoff_compare (const void *pa, const void *pb)
5210 {
5211 const fieldoff_s *foa = (const fieldoff_s *)pa;
5212 const fieldoff_s *fob = (const fieldoff_s *)pb;
5213 unsigned HOST_WIDE_INT foasize, fobsize;
5214
5215 if (foa->offset < fob->offset)
5216 return -1;
5217 else if (foa->offset > fob->offset)
5218 return 1;
5219
5220 foasize = foa->size;
5221 fobsize = fob->size;
5222 if (foasize < fobsize)
5223 return -1;
5224 else if (foasize > fobsize)
5225 return 1;
5226 return 0;
5227 }
5228
5229 /* Sort a fieldstack according to the field offset and sizes. */
5230 static void
5231 sort_fieldstack (vec<fieldoff_s> fieldstack)
5232 {
5233 fieldstack.qsort (fieldoff_compare);
5234 }
5235
5236 /* Return true if T is a type that can have subvars. */
5237
5238 static inline bool
5239 type_can_have_subvars (const_tree t)
5240 {
5241 /* Aggregates without overlapping fields can have subvars. */
5242 return TREE_CODE (t) == RECORD_TYPE;
5243 }
5244
5245 /* Return true if V is a tree that we can have subvars for.
5246 Normally, this is any aggregate type. Also complex
5247 types which are not gimple registers can have subvars. */
5248
5249 static inline bool
5250 var_can_have_subvars (const_tree v)
5251 {
5252 /* Volatile variables should never have subvars. */
5253 if (TREE_THIS_VOLATILE (v))
5254 return false;
5255
5256 /* Non decls or memory tags can never have subvars. */
5257 if (!DECL_P (v))
5258 return false;
5259
5260 return type_can_have_subvars (TREE_TYPE (v));
5261 }
5262
5263 /* Return true if T is a type that does contain pointers. */
5264
5265 static bool
5266 type_must_have_pointers (tree type)
5267 {
5268 if (POINTER_TYPE_P (type))
5269 return true;
5270
5271 if (TREE_CODE (type) == ARRAY_TYPE)
5272 return type_must_have_pointers (TREE_TYPE (type));
5273
5274 /* A function or method can have pointers as arguments, so track
5275 those separately. */
5276 if (TREE_CODE (type) == FUNCTION_TYPE
5277 || TREE_CODE (type) == METHOD_TYPE)
5278 return true;
5279
5280 return false;
5281 }
5282
5283 static bool
5284 field_must_have_pointers (tree t)
5285 {
5286 return type_must_have_pointers (TREE_TYPE (t));
5287 }
5288
5289 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5290 the fields of TYPE onto fieldstack, recording their offsets along
5291 the way.
5292
5293 OFFSET is used to keep track of the offset in this entire
5294 structure, rather than just the immediately containing structure.
5295 Returns false if the caller is supposed to handle the field we
5296 recursed for. */
5297
5298 static bool
5299 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5300 HOST_WIDE_INT offset)
5301 {
5302 tree field;
5303 bool empty_p = true;
5304
5305 if (TREE_CODE (type) != RECORD_TYPE)
5306 return false;
5307
5308 /* If the vector of fields is growing too big, bail out early.
5309 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5310 sure this fails. */
5311 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5312 return false;
5313
5314 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5315 if (TREE_CODE (field) == FIELD_DECL)
5316 {
5317 bool push = false;
5318 HOST_WIDE_INT foff = bitpos_of_field (field);
5319
5320 if (!var_can_have_subvars (field)
5321 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
5322 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
5323 push = true;
5324 else if (!push_fields_onto_fieldstack
5325 (TREE_TYPE (field), fieldstack, offset + foff)
5326 && (DECL_SIZE (field)
5327 && !integer_zerop (DECL_SIZE (field))))
5328 /* Empty structures may have actual size, like in C++. So
5329 see if we didn't push any subfields and the size is
5330 nonzero, push the field onto the stack. */
5331 push = true;
5332
5333 if (push)
5334 {
5335 fieldoff_s *pair = NULL;
5336 bool has_unknown_size = false;
5337 bool must_have_pointers_p;
5338
5339 if (!fieldstack->is_empty ())
5340 pair = &fieldstack->last ();
5341
5342 /* If there isn't anything at offset zero, create sth. */
5343 if (!pair
5344 && offset + foff != 0)
5345 {
5346 fieldoff_s e = {0, offset + foff, false, false, false, false};
5347 pair = fieldstack->safe_push (e);
5348 }
5349
5350 if (!DECL_SIZE (field)
5351 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5352 has_unknown_size = true;
5353
5354 /* If adjacent fields do not contain pointers merge them. */
5355 must_have_pointers_p = field_must_have_pointers (field);
5356 if (pair
5357 && !has_unknown_size
5358 && !must_have_pointers_p
5359 && !pair->must_have_pointers
5360 && !pair->has_unknown_size
5361 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5362 {
5363 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
5364 }
5365 else
5366 {
5367 fieldoff_s e;
5368 e.offset = offset + foff;
5369 e.has_unknown_size = has_unknown_size;
5370 if (!has_unknown_size)
5371 e.size = TREE_INT_CST_LOW (DECL_SIZE (field));
5372 else
5373 e.size = -1;
5374 e.must_have_pointers = must_have_pointers_p;
5375 e.may_have_pointers = true;
5376 e.only_restrict_pointers
5377 = (!has_unknown_size
5378 && POINTER_TYPE_P (TREE_TYPE (field))
5379 && TYPE_RESTRICT (TREE_TYPE (field)));
5380 fieldstack->safe_push (e);
5381 }
5382 }
5383
5384 empty_p = false;
5385 }
5386
5387 return !empty_p;
5388 }
5389
5390 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5391 if it is a varargs function. */
5392
5393 static unsigned int
5394 count_num_arguments (tree decl, bool *is_varargs)
5395 {
5396 unsigned int num = 0;
5397 tree t;
5398
5399 /* Capture named arguments for K&R functions. They do not
5400 have a prototype and thus no TYPE_ARG_TYPES. */
5401 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5402 ++num;
5403
5404 /* Check if the function has variadic arguments. */
5405 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5406 if (TREE_VALUE (t) == void_type_node)
5407 break;
5408 if (!t)
5409 *is_varargs = true;
5410
5411 return num;
5412 }
5413
5414 /* Creation function node for DECL, using NAME, and return the index
5415 of the variable we've created for the function. */
5416
5417 static varinfo_t
5418 create_function_info_for (tree decl, const char *name)
5419 {
5420 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5421 varinfo_t vi, prev_vi;
5422 tree arg;
5423 unsigned int i;
5424 bool is_varargs = false;
5425 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5426
5427 /* Create the variable info. */
5428
5429 vi = new_var_info (decl, name);
5430 vi->offset = 0;
5431 vi->size = 1;
5432 vi->fullsize = fi_parm_base + num_args;
5433 vi->is_fn_info = 1;
5434 vi->may_have_pointers = false;
5435 if (is_varargs)
5436 vi->fullsize = ~0;
5437 insert_vi_for_tree (vi->decl, vi);
5438
5439 prev_vi = vi;
5440
5441 /* Create a variable for things the function clobbers and one for
5442 things the function uses. */
5443 {
5444 varinfo_t clobbervi, usevi;
5445 const char *newname;
5446 char *tempname;
5447
5448 asprintf (&tempname, "%s.clobber", name);
5449 newname = ggc_strdup (tempname);
5450 free (tempname);
5451
5452 clobbervi = new_var_info (NULL, newname);
5453 clobbervi->offset = fi_clobbers;
5454 clobbervi->size = 1;
5455 clobbervi->fullsize = vi->fullsize;
5456 clobbervi->is_full_var = true;
5457 clobbervi->is_global_var = false;
5458 gcc_assert (prev_vi->offset < clobbervi->offset);
5459 prev_vi->next = clobbervi->id;
5460 prev_vi = clobbervi;
5461
5462 asprintf (&tempname, "%s.use", name);
5463 newname = ggc_strdup (tempname);
5464 free (tempname);
5465
5466 usevi = new_var_info (NULL, newname);
5467 usevi->offset = fi_uses;
5468 usevi->size = 1;
5469 usevi->fullsize = vi->fullsize;
5470 usevi->is_full_var = true;
5471 usevi->is_global_var = false;
5472 gcc_assert (prev_vi->offset < usevi->offset);
5473 prev_vi->next = usevi->id;
5474 prev_vi = usevi;
5475 }
5476
5477 /* And one for the static chain. */
5478 if (fn->static_chain_decl != NULL_TREE)
5479 {
5480 varinfo_t chainvi;
5481 const char *newname;
5482 char *tempname;
5483
5484 asprintf (&tempname, "%s.chain", name);
5485 newname = ggc_strdup (tempname);
5486 free (tempname);
5487
5488 chainvi = new_var_info (fn->static_chain_decl, newname);
5489 chainvi->offset = fi_static_chain;
5490 chainvi->size = 1;
5491 chainvi->fullsize = vi->fullsize;
5492 chainvi->is_full_var = true;
5493 chainvi->is_global_var = false;
5494 gcc_assert (prev_vi->offset < chainvi->offset);
5495 prev_vi->next = chainvi->id;
5496 prev_vi = chainvi;
5497 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5498 }
5499
5500 /* Create a variable for the return var. */
5501 if (DECL_RESULT (decl) != NULL
5502 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5503 {
5504 varinfo_t resultvi;
5505 const char *newname;
5506 char *tempname;
5507 tree resultdecl = decl;
5508
5509 if (DECL_RESULT (decl))
5510 resultdecl = DECL_RESULT (decl);
5511
5512 asprintf (&tempname, "%s.result", name);
5513 newname = ggc_strdup (tempname);
5514 free (tempname);
5515
5516 resultvi = new_var_info (resultdecl, newname);
5517 resultvi->offset = fi_result;
5518 resultvi->size = 1;
5519 resultvi->fullsize = vi->fullsize;
5520 resultvi->is_full_var = true;
5521 if (DECL_RESULT (decl))
5522 resultvi->may_have_pointers = true;
5523 gcc_assert (prev_vi->offset < resultvi->offset);
5524 prev_vi->next = resultvi->id;
5525 prev_vi = resultvi;
5526 if (DECL_RESULT (decl))
5527 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5528 }
5529
5530 /* Set up variables for each argument. */
5531 arg = DECL_ARGUMENTS (decl);
5532 for (i = 0; i < num_args; i++)
5533 {
5534 varinfo_t argvi;
5535 const char *newname;
5536 char *tempname;
5537 tree argdecl = decl;
5538
5539 if (arg)
5540 argdecl = arg;
5541
5542 asprintf (&tempname, "%s.arg%d", name, i);
5543 newname = ggc_strdup (tempname);
5544 free (tempname);
5545
5546 argvi = new_var_info (argdecl, newname);
5547 argvi->offset = fi_parm_base + i;
5548 argvi->size = 1;
5549 argvi->is_full_var = true;
5550 argvi->fullsize = vi->fullsize;
5551 if (arg)
5552 argvi->may_have_pointers = true;
5553 gcc_assert (prev_vi->offset < argvi->offset);
5554 prev_vi->next = argvi->id;
5555 prev_vi = argvi;
5556 if (arg)
5557 {
5558 insert_vi_for_tree (arg, argvi);
5559 arg = DECL_CHAIN (arg);
5560 }
5561 }
5562
5563 /* Add one representative for all further args. */
5564 if (is_varargs)
5565 {
5566 varinfo_t argvi;
5567 const char *newname;
5568 char *tempname;
5569 tree decl;
5570
5571 asprintf (&tempname, "%s.varargs", name);
5572 newname = ggc_strdup (tempname);
5573 free (tempname);
5574
5575 /* We need sth that can be pointed to for va_start. */
5576 decl = build_fake_var_decl (ptr_type_node);
5577
5578 argvi = new_var_info (decl, newname);
5579 argvi->offset = fi_parm_base + num_args;
5580 argvi->size = ~0;
5581 argvi->is_full_var = true;
5582 argvi->is_heap_var = true;
5583 argvi->fullsize = vi->fullsize;
5584 gcc_assert (prev_vi->offset < argvi->offset);
5585 prev_vi->next = argvi->id;
5586 prev_vi = argvi;
5587 }
5588
5589 return vi;
5590 }
5591
5592
5593 /* Return true if FIELDSTACK contains fields that overlap.
5594 FIELDSTACK is assumed to be sorted by offset. */
5595
5596 static bool
5597 check_for_overlaps (vec<fieldoff_s> fieldstack)
5598 {
5599 fieldoff_s *fo = NULL;
5600 unsigned int i;
5601 HOST_WIDE_INT lastoffset = -1;
5602
5603 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5604 {
5605 if (fo->offset == lastoffset)
5606 return true;
5607 lastoffset = fo->offset;
5608 }
5609 return false;
5610 }
5611
5612 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5613 This will also create any varinfo structures necessary for fields
5614 of DECL. */
5615
5616 static varinfo_t
5617 create_variable_info_for_1 (tree decl, const char *name)
5618 {
5619 varinfo_t vi, newvi;
5620 tree decl_type = TREE_TYPE (decl);
5621 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5622 vec<fieldoff_s> fieldstack = vNULL;
5623 fieldoff_s *fo;
5624 unsigned int i;
5625
5626 if (!declsize
5627 || !tree_fits_uhwi_p (declsize))
5628 {
5629 vi = new_var_info (decl, name);
5630 vi->offset = 0;
5631 vi->size = ~0;
5632 vi->fullsize = ~0;
5633 vi->is_unknown_size_var = true;
5634 vi->is_full_var = true;
5635 vi->may_have_pointers = true;
5636 return vi;
5637 }
5638
5639 /* Collect field information. */
5640 if (use_field_sensitive
5641 && var_can_have_subvars (decl)
5642 /* ??? Force us to not use subfields for global initializers
5643 in IPA mode. Else we'd have to parse arbitrary initializers. */
5644 && !(in_ipa_mode
5645 && is_global_var (decl)
5646 && DECL_INITIAL (decl)))
5647 {
5648 fieldoff_s *fo = NULL;
5649 bool notokay = false;
5650 unsigned int i;
5651
5652 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5653
5654 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5655 if (fo->has_unknown_size
5656 || fo->offset < 0)
5657 {
5658 notokay = true;
5659 break;
5660 }
5661
5662 /* We can't sort them if we have a field with a variable sized type,
5663 which will make notokay = true. In that case, we are going to return
5664 without creating varinfos for the fields anyway, so sorting them is a
5665 waste to boot. */
5666 if (!notokay)
5667 {
5668 sort_fieldstack (fieldstack);
5669 /* Due to some C++ FE issues, like PR 22488, we might end up
5670 what appear to be overlapping fields even though they,
5671 in reality, do not overlap. Until the C++ FE is fixed,
5672 we will simply disable field-sensitivity for these cases. */
5673 notokay = check_for_overlaps (fieldstack);
5674 }
5675
5676 if (notokay)
5677 fieldstack.release ();
5678 }
5679
5680 /* If we didn't end up collecting sub-variables create a full
5681 variable for the decl. */
5682 if (fieldstack.length () <= 1
5683 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5684 {
5685 vi = new_var_info (decl, name);
5686 vi->offset = 0;
5687 vi->may_have_pointers = true;
5688 vi->fullsize = TREE_INT_CST_LOW (declsize);
5689 vi->size = vi->fullsize;
5690 vi->is_full_var = true;
5691 fieldstack.release ();
5692 return vi;
5693 }
5694
5695 vi = new_var_info (decl, name);
5696 vi->fullsize = TREE_INT_CST_LOW (declsize);
5697 for (i = 0, newvi = vi;
5698 fieldstack.iterate (i, &fo);
5699 ++i, newvi = vi_next (newvi))
5700 {
5701 const char *newname = "NULL";
5702 char *tempname;
5703
5704 if (dump_file)
5705 {
5706 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5707 "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
5708 newname = ggc_strdup (tempname);
5709 free (tempname);
5710 }
5711 newvi->name = newname;
5712 newvi->offset = fo->offset;
5713 newvi->size = fo->size;
5714 newvi->fullsize = vi->fullsize;
5715 newvi->may_have_pointers = fo->may_have_pointers;
5716 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5717 if (i + 1 < fieldstack.length ())
5718 {
5719 varinfo_t tem = new_var_info (decl, name);
5720 newvi->next = tem->id;
5721 tem->head = vi->id;
5722 }
5723 }
5724
5725 fieldstack.release ();
5726
5727 return vi;
5728 }
5729
5730 static unsigned int
5731 create_variable_info_for (tree decl, const char *name)
5732 {
5733 varinfo_t vi = create_variable_info_for_1 (decl, name);
5734 unsigned int id = vi->id;
5735
5736 insert_vi_for_tree (decl, vi);
5737
5738 if (TREE_CODE (decl) != VAR_DECL)
5739 return id;
5740
5741 /* Create initial constraints for globals. */
5742 for (; vi; vi = vi_next (vi))
5743 {
5744 if (!vi->may_have_pointers
5745 || !vi->is_global_var)
5746 continue;
5747
5748 /* Mark global restrict qualified pointers. */
5749 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5750 && TYPE_RESTRICT (TREE_TYPE (decl)))
5751 || vi->only_restrict_pointers)
5752 {
5753 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5754 continue;
5755 }
5756
5757 /* In non-IPA mode the initializer from nonlocal is all we need. */
5758 if (!in_ipa_mode
5759 || DECL_HARD_REGISTER (decl))
5760 make_copy_constraint (vi, nonlocal_id);
5761
5762 /* In IPA mode parse the initializer and generate proper constraints
5763 for it. */
5764 else
5765 {
5766 struct varpool_node *vnode = varpool_get_node (decl);
5767
5768 /* For escaped variables initialize them from nonlocal. */
5769 if (!varpool_all_refs_explicit_p (vnode))
5770 make_copy_constraint (vi, nonlocal_id);
5771
5772 /* If this is a global variable with an initializer and we are in
5773 IPA mode generate constraints for it. */
5774 if (DECL_INITIAL (decl)
5775 && vnode->definition)
5776 {
5777 vec<ce_s> rhsc = vNULL;
5778 struct constraint_expr lhs, *rhsp;
5779 unsigned i;
5780 get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
5781 lhs.var = vi->id;
5782 lhs.offset = 0;
5783 lhs.type = SCALAR;
5784 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5785 process_constraint (new_constraint (lhs, *rhsp));
5786 /* If this is a variable that escapes from the unit
5787 the initializer escapes as well. */
5788 if (!varpool_all_refs_explicit_p (vnode))
5789 {
5790 lhs.var = escaped_id;
5791 lhs.offset = 0;
5792 lhs.type = SCALAR;
5793 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5794 process_constraint (new_constraint (lhs, *rhsp));
5795 }
5796 rhsc.release ();
5797 }
5798 }
5799 }
5800
5801 return id;
5802 }
5803
5804 /* Print out the points-to solution for VAR to FILE. */
5805
5806 static void
5807 dump_solution_for_var (FILE *file, unsigned int var)
5808 {
5809 varinfo_t vi = get_varinfo (var);
5810 unsigned int i;
5811 bitmap_iterator bi;
5812
5813 /* Dump the solution for unified vars anyway, this avoids difficulties
5814 in scanning dumps in the testsuite. */
5815 fprintf (file, "%s = { ", vi->name);
5816 vi = get_varinfo (find (var));
5817 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5818 fprintf (file, "%s ", get_varinfo (i)->name);
5819 fprintf (file, "}");
5820
5821 /* But note when the variable was unified. */
5822 if (vi->id != var)
5823 fprintf (file, " same as %s", vi->name);
5824
5825 fprintf (file, "\n");
5826 }
5827
5828 /* Print the points-to solution for VAR to stdout. */
5829
5830 DEBUG_FUNCTION void
5831 debug_solution_for_var (unsigned int var)
5832 {
5833 dump_solution_for_var (stdout, var);
5834 }
5835
5836 /* Create varinfo structures for all of the variables in the
5837 function for intraprocedural mode. */
5838
5839 static void
5840 intra_create_variable_infos (void)
5841 {
5842 tree t;
5843
5844 /* For each incoming pointer argument arg, create the constraint ARG
5845 = NONLOCAL or a dummy variable if it is a restrict qualified
5846 passed-by-reference argument. */
5847 for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
5848 {
5849 varinfo_t p = get_vi_for_tree (t);
5850
5851 /* For restrict qualified pointers to objects passed by
5852 reference build a real representative for the pointed-to object.
5853 Treat restrict qualified references the same. */
5854 if (TYPE_RESTRICT (TREE_TYPE (t))
5855 && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
5856 || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
5857 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
5858 {
5859 struct constraint_expr lhsc, rhsc;
5860 varinfo_t vi;
5861 tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
5862 DECL_EXTERNAL (heapvar) = 1;
5863 vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
5864 insert_vi_for_tree (heapvar, vi);
5865 lhsc.var = p->id;
5866 lhsc.type = SCALAR;
5867 lhsc.offset = 0;
5868 rhsc.var = vi->id;
5869 rhsc.type = ADDRESSOF;
5870 rhsc.offset = 0;
5871 process_constraint (new_constraint (lhsc, rhsc));
5872 for (; vi; vi = vi_next (vi))
5873 if (vi->may_have_pointers)
5874 {
5875 if (vi->only_restrict_pointers)
5876 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5877 else
5878 make_copy_constraint (vi, nonlocal_id);
5879 }
5880 continue;
5881 }
5882
5883 if (POINTER_TYPE_P (TREE_TYPE (t))
5884 && TYPE_RESTRICT (TREE_TYPE (t)))
5885 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5886 else
5887 {
5888 for (; p; p = vi_next (p))
5889 {
5890 if (p->only_restrict_pointers)
5891 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5892 else if (p->may_have_pointers)
5893 make_constraint_from (p, nonlocal_id);
5894 }
5895 }
5896 }
5897
5898 /* Add a constraint for a result decl that is passed by reference. */
5899 if (DECL_RESULT (cfun->decl)
5900 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5901 {
5902 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5903
5904 for (p = result_vi; p; p = vi_next (p))
5905 make_constraint_from (p, nonlocal_id);
5906 }
5907
5908 /* Add a constraint for the incoming static chain parameter. */
5909 if (cfun->static_chain_decl != NULL_TREE)
5910 {
5911 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5912
5913 for (p = chain_vi; p; p = vi_next (p))
5914 make_constraint_from (p, nonlocal_id);
5915 }
5916 }
5917
5918 /* Structure used to put solution bitmaps in a hashtable so they can
5919 be shared among variables with the same points-to set. */
5920
5921 typedef struct shared_bitmap_info
5922 {
5923 bitmap pt_vars;
5924 hashval_t hashcode;
5925 } *shared_bitmap_info_t;
5926 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5927
5928 /* Shared_bitmap hashtable helpers. */
5929
5930 struct shared_bitmap_hasher : typed_free_remove <shared_bitmap_info>
5931 {
5932 typedef shared_bitmap_info value_type;
5933 typedef shared_bitmap_info compare_type;
5934 static inline hashval_t hash (const value_type *);
5935 static inline bool equal (const value_type *, const compare_type *);
5936 };
5937
5938 /* Hash function for a shared_bitmap_info_t */
5939
5940 inline hashval_t
5941 shared_bitmap_hasher::hash (const value_type *bi)
5942 {
5943 return bi->hashcode;
5944 }
5945
5946 /* Equality function for two shared_bitmap_info_t's. */
5947
5948 inline bool
5949 shared_bitmap_hasher::equal (const value_type *sbi1, const compare_type *sbi2)
5950 {
5951 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5952 }
5953
5954 /* Shared_bitmap hashtable. */
5955
5956 static hash_table <shared_bitmap_hasher> shared_bitmap_table;
5957
5958 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5959 existing instance if there is one, NULL otherwise. */
5960
5961 static bitmap
5962 shared_bitmap_lookup (bitmap pt_vars)
5963 {
5964 shared_bitmap_info **slot;
5965 struct shared_bitmap_info sbi;
5966
5967 sbi.pt_vars = pt_vars;
5968 sbi.hashcode = bitmap_hash (pt_vars);
5969
5970 slot = shared_bitmap_table.find_slot_with_hash (&sbi, sbi.hashcode,
5971 NO_INSERT);
5972 if (!slot)
5973 return NULL;
5974 else
5975 return (*slot)->pt_vars;
5976 }
5977
5978
5979 /* Add a bitmap to the shared bitmap hashtable. */
5980
5981 static void
5982 shared_bitmap_add (bitmap pt_vars)
5983 {
5984 shared_bitmap_info **slot;
5985 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5986
5987 sbi->pt_vars = pt_vars;
5988 sbi->hashcode = bitmap_hash (pt_vars);
5989
5990 slot = shared_bitmap_table.find_slot_with_hash (sbi, sbi->hashcode, INSERT);
5991 gcc_assert (!*slot);
5992 *slot = sbi;
5993 }
5994
5995
5996 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5997
5998 static void
5999 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
6000 {
6001 unsigned int i;
6002 bitmap_iterator bi;
6003 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6004 bool everything_escaped
6005 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6006
6007 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6008 {
6009 varinfo_t vi = get_varinfo (i);
6010
6011 /* The only artificial variables that are allowed in a may-alias
6012 set are heap variables. */
6013 if (vi->is_artificial_var && !vi->is_heap_var)
6014 continue;
6015
6016 if (everything_escaped
6017 || (escaped_vi->solution
6018 && bitmap_bit_p (escaped_vi->solution, i)))
6019 {
6020 pt->vars_contains_escaped = true;
6021 pt->vars_contains_escaped_heap = vi->is_heap_var;
6022 }
6023
6024 if (TREE_CODE (vi->decl) == VAR_DECL
6025 || TREE_CODE (vi->decl) == PARM_DECL
6026 || TREE_CODE (vi->decl) == RESULT_DECL)
6027 {
6028 /* If we are in IPA mode we will not recompute points-to
6029 sets after inlining so make sure they stay valid. */
6030 if (in_ipa_mode
6031 && !DECL_PT_UID_SET_P (vi->decl))
6032 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6033
6034 /* Add the decl to the points-to set. Note that the points-to
6035 set contains global variables. */
6036 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6037 if (vi->is_global_var)
6038 pt->vars_contains_nonlocal = true;
6039 }
6040 }
6041 }
6042
6043
6044 /* Compute the points-to solution *PT for the variable VI. */
6045
6046 static struct pt_solution
6047 find_what_var_points_to (varinfo_t orig_vi)
6048 {
6049 unsigned int i;
6050 bitmap_iterator bi;
6051 bitmap finished_solution;
6052 bitmap result;
6053 varinfo_t vi;
6054 void **slot;
6055 struct pt_solution *pt;
6056
6057 /* This variable may have been collapsed, let's get the real
6058 variable. */
6059 vi = get_varinfo (find (orig_vi->id));
6060
6061 /* See if we have already computed the solution and return it. */
6062 slot = pointer_map_insert (final_solutions, vi);
6063 if (*slot != NULL)
6064 return *(struct pt_solution *)*slot;
6065
6066 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6067 memset (pt, 0, sizeof (struct pt_solution));
6068
6069 /* Translate artificial variables into SSA_NAME_PTR_INFO
6070 attributes. */
6071 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6072 {
6073 varinfo_t vi = get_varinfo (i);
6074
6075 if (vi->is_artificial_var)
6076 {
6077 if (vi->id == nothing_id)
6078 pt->null = 1;
6079 else if (vi->id == escaped_id)
6080 {
6081 if (in_ipa_mode)
6082 pt->ipa_escaped = 1;
6083 else
6084 pt->escaped = 1;
6085 }
6086 else if (vi->id == nonlocal_id)
6087 pt->nonlocal = 1;
6088 else if (vi->is_heap_var)
6089 /* We represent heapvars in the points-to set properly. */
6090 ;
6091 else if (vi->id == readonly_id)
6092 /* Nobody cares. */
6093 ;
6094 else if (vi->id == anything_id
6095 || vi->id == integer_id)
6096 pt->anything = 1;
6097 }
6098 }
6099
6100 /* Instead of doing extra work, simply do not create
6101 elaborate points-to information for pt_anything pointers. */
6102 if (pt->anything)
6103 return *pt;
6104
6105 /* Share the final set of variables when possible. */
6106 finished_solution = BITMAP_GGC_ALLOC ();
6107 stats.points_to_sets_created++;
6108
6109 set_uids_in_ptset (finished_solution, vi->solution, pt);
6110 result = shared_bitmap_lookup (finished_solution);
6111 if (!result)
6112 {
6113 shared_bitmap_add (finished_solution);
6114 pt->vars = finished_solution;
6115 }
6116 else
6117 {
6118 pt->vars = result;
6119 bitmap_clear (finished_solution);
6120 }
6121
6122 return *pt;
6123 }
6124
6125 /* Given a pointer variable P, fill in its points-to set. */
6126
6127 static void
6128 find_what_p_points_to (tree p)
6129 {
6130 struct ptr_info_def *pi;
6131 tree lookup_p = p;
6132 varinfo_t vi;
6133
6134 /* For parameters, get at the points-to set for the actual parm
6135 decl. */
6136 if (TREE_CODE (p) == SSA_NAME
6137 && SSA_NAME_IS_DEFAULT_DEF (p)
6138 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6139 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6140 lookup_p = SSA_NAME_VAR (p);
6141
6142 vi = lookup_vi_for_tree (lookup_p);
6143 if (!vi)
6144 return;
6145
6146 pi = get_ptr_info (p);
6147 pi->pt = find_what_var_points_to (vi);
6148 }
6149
6150
6151 /* Query statistics for points-to solutions. */
6152
6153 static struct {
6154 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6155 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6156 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6157 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6158 } pta_stats;
6159
6160 void
6161 dump_pta_stats (FILE *s)
6162 {
6163 fprintf (s, "\nPTA query stats:\n");
6164 fprintf (s, " pt_solution_includes: "
6165 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6166 HOST_WIDE_INT_PRINT_DEC" queries\n",
6167 pta_stats.pt_solution_includes_no_alias,
6168 pta_stats.pt_solution_includes_no_alias
6169 + pta_stats.pt_solution_includes_may_alias);
6170 fprintf (s, " pt_solutions_intersect: "
6171 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6172 HOST_WIDE_INT_PRINT_DEC" queries\n",
6173 pta_stats.pt_solutions_intersect_no_alias,
6174 pta_stats.pt_solutions_intersect_no_alias
6175 + pta_stats.pt_solutions_intersect_may_alias);
6176 }
6177
6178
6179 /* Reset the points-to solution *PT to a conservative default
6180 (point to anything). */
6181
6182 void
6183 pt_solution_reset (struct pt_solution *pt)
6184 {
6185 memset (pt, 0, sizeof (struct pt_solution));
6186 pt->anything = true;
6187 }
6188
6189 /* Set the points-to solution *PT to point only to the variables
6190 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6191 global variables and VARS_CONTAINS_RESTRICT specifies whether
6192 it contains restrict tag variables. */
6193
6194 void
6195 pt_solution_set (struct pt_solution *pt, bitmap vars,
6196 bool vars_contains_nonlocal)
6197 {
6198 memset (pt, 0, sizeof (struct pt_solution));
6199 pt->vars = vars;
6200 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6201 pt->vars_contains_escaped
6202 = (cfun->gimple_df->escaped.anything
6203 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6204 }
6205
6206 /* Set the points-to solution *PT to point only to the variable VAR. */
6207
6208 void
6209 pt_solution_set_var (struct pt_solution *pt, tree var)
6210 {
6211 memset (pt, 0, sizeof (struct pt_solution));
6212 pt->vars = BITMAP_GGC_ALLOC ();
6213 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6214 pt->vars_contains_nonlocal = is_global_var (var);
6215 pt->vars_contains_escaped
6216 = (cfun->gimple_df->escaped.anything
6217 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6218 }
6219
6220 /* Computes the union of the points-to solutions *DEST and *SRC and
6221 stores the result in *DEST. This changes the points-to bitmap
6222 of *DEST and thus may not be used if that might be shared.
6223 The points-to bitmap of *SRC and *DEST will not be shared after
6224 this function if they were not before. */
6225
6226 static void
6227 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6228 {
6229 dest->anything |= src->anything;
6230 if (dest->anything)
6231 {
6232 pt_solution_reset (dest);
6233 return;
6234 }
6235
6236 dest->nonlocal |= src->nonlocal;
6237 dest->escaped |= src->escaped;
6238 dest->ipa_escaped |= src->ipa_escaped;
6239 dest->null |= src->null;
6240 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6241 dest->vars_contains_escaped |= src->vars_contains_escaped;
6242 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6243 if (!src->vars)
6244 return;
6245
6246 if (!dest->vars)
6247 dest->vars = BITMAP_GGC_ALLOC ();
6248 bitmap_ior_into (dest->vars, src->vars);
6249 }
6250
6251 /* Return true if the points-to solution *PT is empty. */
6252
6253 bool
6254 pt_solution_empty_p (struct pt_solution *pt)
6255 {
6256 if (pt->anything
6257 || pt->nonlocal)
6258 return false;
6259
6260 if (pt->vars
6261 && !bitmap_empty_p (pt->vars))
6262 return false;
6263
6264 /* If the solution includes ESCAPED, check if that is empty. */
6265 if (pt->escaped
6266 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6267 return false;
6268
6269 /* If the solution includes ESCAPED, check if that is empty. */
6270 if (pt->ipa_escaped
6271 && !pt_solution_empty_p (&ipa_escaped_pt))
6272 return false;
6273
6274 return true;
6275 }
6276
6277 /* Return true if the points-to solution *PT only point to a single var, and
6278 return the var uid in *UID. */
6279
6280 bool
6281 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6282 {
6283 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6284 || pt->null || pt->vars == NULL
6285 || !bitmap_single_bit_set_p (pt->vars))
6286 return false;
6287
6288 *uid = bitmap_first_set_bit (pt->vars);
6289 return true;
6290 }
6291
6292 /* Return true if the points-to solution *PT includes global memory. */
6293
6294 bool
6295 pt_solution_includes_global (struct pt_solution *pt)
6296 {
6297 if (pt->anything
6298 || pt->nonlocal
6299 || pt->vars_contains_nonlocal
6300 /* The following is a hack to make the malloc escape hack work.
6301 In reality we'd need different sets for escaped-through-return
6302 and escaped-to-callees and passes would need to be updated. */
6303 || pt->vars_contains_escaped_heap)
6304 return true;
6305
6306 /* 'escaped' is also a placeholder so we have to look into it. */
6307 if (pt->escaped)
6308 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6309
6310 if (pt->ipa_escaped)
6311 return pt_solution_includes_global (&ipa_escaped_pt);
6312
6313 /* ??? This predicate is not correct for the IPA-PTA solution
6314 as we do not properly distinguish between unit escape points
6315 and global variables. */
6316 if (cfun->gimple_df->ipa_pta)
6317 return true;
6318
6319 return false;
6320 }
6321
6322 /* Return true if the points-to solution *PT includes the variable
6323 declaration DECL. */
6324
6325 static bool
6326 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6327 {
6328 if (pt->anything)
6329 return true;
6330
6331 if (pt->nonlocal
6332 && is_global_var (decl))
6333 return true;
6334
6335 if (pt->vars
6336 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6337 return true;
6338
6339 /* If the solution includes ESCAPED, check it. */
6340 if (pt->escaped
6341 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6342 return true;
6343
6344 /* If the solution includes ESCAPED, check it. */
6345 if (pt->ipa_escaped
6346 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6347 return true;
6348
6349 return false;
6350 }
6351
6352 bool
6353 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6354 {
6355 bool res = pt_solution_includes_1 (pt, decl);
6356 if (res)
6357 ++pta_stats.pt_solution_includes_may_alias;
6358 else
6359 ++pta_stats.pt_solution_includes_no_alias;
6360 return res;
6361 }
6362
6363 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6364 intersection. */
6365
6366 static bool
6367 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6368 {
6369 if (pt1->anything || pt2->anything)
6370 return true;
6371
6372 /* If either points to unknown global memory and the other points to
6373 any global memory they alias. */
6374 if ((pt1->nonlocal
6375 && (pt2->nonlocal
6376 || pt2->vars_contains_nonlocal))
6377 || (pt2->nonlocal
6378 && pt1->vars_contains_nonlocal))
6379 return true;
6380
6381 /* If either points to all escaped memory and the other points to
6382 any escaped memory they alias. */
6383 if ((pt1->escaped
6384 && (pt2->escaped
6385 || pt2->vars_contains_escaped))
6386 || (pt2->escaped
6387 && pt1->vars_contains_escaped))
6388 return true;
6389
6390 /* Check the escaped solution if required.
6391 ??? Do we need to check the local against the IPA escaped sets? */
6392 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6393 && !pt_solution_empty_p (&ipa_escaped_pt))
6394 {
6395 /* If both point to escaped memory and that solution
6396 is not empty they alias. */
6397 if (pt1->ipa_escaped && pt2->ipa_escaped)
6398 return true;
6399
6400 /* If either points to escaped memory see if the escaped solution
6401 intersects with the other. */
6402 if ((pt1->ipa_escaped
6403 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6404 || (pt2->ipa_escaped
6405 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6406 return true;
6407 }
6408
6409 /* Now both pointers alias if their points-to solution intersects. */
6410 return (pt1->vars
6411 && pt2->vars
6412 && bitmap_intersect_p (pt1->vars, pt2->vars));
6413 }
6414
6415 bool
6416 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6417 {
6418 bool res = pt_solutions_intersect_1 (pt1, pt2);
6419 if (res)
6420 ++pta_stats.pt_solutions_intersect_may_alias;
6421 else
6422 ++pta_stats.pt_solutions_intersect_no_alias;
6423 return res;
6424 }
6425
6426
6427 /* Dump points-to information to OUTFILE. */
6428
6429 static void
6430 dump_sa_points_to_info (FILE *outfile)
6431 {
6432 unsigned int i;
6433
6434 fprintf (outfile, "\nPoints-to sets\n\n");
6435
6436 if (dump_flags & TDF_STATS)
6437 {
6438 fprintf (outfile, "Stats:\n");
6439 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6440 fprintf (outfile, "Non-pointer vars: %d\n",
6441 stats.nonpointer_vars);
6442 fprintf (outfile, "Statically unified vars: %d\n",
6443 stats.unified_vars_static);
6444 fprintf (outfile, "Dynamically unified vars: %d\n",
6445 stats.unified_vars_dynamic);
6446 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6447 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6448 fprintf (outfile, "Number of implicit edges: %d\n",
6449 stats.num_implicit_edges);
6450 }
6451
6452 for (i = 1; i < varmap.length (); i++)
6453 {
6454 varinfo_t vi = get_varinfo (i);
6455 if (!vi->may_have_pointers)
6456 continue;
6457 dump_solution_for_var (outfile, i);
6458 }
6459 }
6460
6461
6462 /* Debug points-to information to stderr. */
6463
6464 DEBUG_FUNCTION void
6465 debug_sa_points_to_info (void)
6466 {
6467 dump_sa_points_to_info (stderr);
6468 }
6469
6470
6471 /* Initialize the always-existing constraint variables for NULL
6472 ANYTHING, READONLY, and INTEGER */
6473
6474 static void
6475 init_base_vars (void)
6476 {
6477 struct constraint_expr lhs, rhs;
6478 varinfo_t var_anything;
6479 varinfo_t var_nothing;
6480 varinfo_t var_readonly;
6481 varinfo_t var_escaped;
6482 varinfo_t var_nonlocal;
6483 varinfo_t var_storedanything;
6484 varinfo_t var_integer;
6485
6486 /* Variable ID zero is reserved and should be NULL. */
6487 varmap.safe_push (NULL);
6488
6489 /* Create the NULL variable, used to represent that a variable points
6490 to NULL. */
6491 var_nothing = new_var_info (NULL_TREE, "NULL");
6492 gcc_assert (var_nothing->id == nothing_id);
6493 var_nothing->is_artificial_var = 1;
6494 var_nothing->offset = 0;
6495 var_nothing->size = ~0;
6496 var_nothing->fullsize = ~0;
6497 var_nothing->is_special_var = 1;
6498 var_nothing->may_have_pointers = 0;
6499 var_nothing->is_global_var = 0;
6500
6501 /* Create the ANYTHING variable, used to represent that a variable
6502 points to some unknown piece of memory. */
6503 var_anything = new_var_info (NULL_TREE, "ANYTHING");
6504 gcc_assert (var_anything->id == anything_id);
6505 var_anything->is_artificial_var = 1;
6506 var_anything->size = ~0;
6507 var_anything->offset = 0;
6508 var_anything->fullsize = ~0;
6509 var_anything->is_special_var = 1;
6510
6511 /* Anything points to anything. This makes deref constraints just
6512 work in the presence of linked list and other p = *p type loops,
6513 by saying that *ANYTHING = ANYTHING. */
6514 lhs.type = SCALAR;
6515 lhs.var = anything_id;
6516 lhs.offset = 0;
6517 rhs.type = ADDRESSOF;
6518 rhs.var = anything_id;
6519 rhs.offset = 0;
6520
6521 /* This specifically does not use process_constraint because
6522 process_constraint ignores all anything = anything constraints, since all
6523 but this one are redundant. */
6524 constraints.safe_push (new_constraint (lhs, rhs));
6525
6526 /* Create the READONLY variable, used to represent that a variable
6527 points to readonly memory. */
6528 var_readonly = new_var_info (NULL_TREE, "READONLY");
6529 gcc_assert (var_readonly->id == readonly_id);
6530 var_readonly->is_artificial_var = 1;
6531 var_readonly->offset = 0;
6532 var_readonly->size = ~0;
6533 var_readonly->fullsize = ~0;
6534 var_readonly->is_special_var = 1;
6535
6536 /* readonly memory points to anything, in order to make deref
6537 easier. In reality, it points to anything the particular
6538 readonly variable can point to, but we don't track this
6539 separately. */
6540 lhs.type = SCALAR;
6541 lhs.var = readonly_id;
6542 lhs.offset = 0;
6543 rhs.type = ADDRESSOF;
6544 rhs.var = readonly_id; /* FIXME */
6545 rhs.offset = 0;
6546 process_constraint (new_constraint (lhs, rhs));
6547
6548 /* Create the ESCAPED variable, used to represent the set of escaped
6549 memory. */
6550 var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6551 gcc_assert (var_escaped->id == escaped_id);
6552 var_escaped->is_artificial_var = 1;
6553 var_escaped->offset = 0;
6554 var_escaped->size = ~0;
6555 var_escaped->fullsize = ~0;
6556 var_escaped->is_special_var = 0;
6557
6558 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6559 memory. */
6560 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6561 gcc_assert (var_nonlocal->id == nonlocal_id);
6562 var_nonlocal->is_artificial_var = 1;
6563 var_nonlocal->offset = 0;
6564 var_nonlocal->size = ~0;
6565 var_nonlocal->fullsize = ~0;
6566 var_nonlocal->is_special_var = 1;
6567
6568 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6569 lhs.type = SCALAR;
6570 lhs.var = escaped_id;
6571 lhs.offset = 0;
6572 rhs.type = DEREF;
6573 rhs.var = escaped_id;
6574 rhs.offset = 0;
6575 process_constraint (new_constraint (lhs, rhs));
6576
6577 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6578 whole variable escapes. */
6579 lhs.type = SCALAR;
6580 lhs.var = escaped_id;
6581 lhs.offset = 0;
6582 rhs.type = SCALAR;
6583 rhs.var = escaped_id;
6584 rhs.offset = UNKNOWN_OFFSET;
6585 process_constraint (new_constraint (lhs, rhs));
6586
6587 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6588 everything pointed to by escaped points to what global memory can
6589 point to. */
6590 lhs.type = DEREF;
6591 lhs.var = escaped_id;
6592 lhs.offset = 0;
6593 rhs.type = SCALAR;
6594 rhs.var = nonlocal_id;
6595 rhs.offset = 0;
6596 process_constraint (new_constraint (lhs, rhs));
6597
6598 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6599 global memory may point to global memory and escaped memory. */
6600 lhs.type = SCALAR;
6601 lhs.var = nonlocal_id;
6602 lhs.offset = 0;
6603 rhs.type = ADDRESSOF;
6604 rhs.var = nonlocal_id;
6605 rhs.offset = 0;
6606 process_constraint (new_constraint (lhs, rhs));
6607 rhs.type = ADDRESSOF;
6608 rhs.var = escaped_id;
6609 rhs.offset = 0;
6610 process_constraint (new_constraint (lhs, rhs));
6611
6612 /* Create the STOREDANYTHING variable, used to represent the set of
6613 variables stored to *ANYTHING. */
6614 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6615 gcc_assert (var_storedanything->id == storedanything_id);
6616 var_storedanything->is_artificial_var = 1;
6617 var_storedanything->offset = 0;
6618 var_storedanything->size = ~0;
6619 var_storedanything->fullsize = ~0;
6620 var_storedanything->is_special_var = 0;
6621
6622 /* Create the INTEGER variable, used to represent that a variable points
6623 to what an INTEGER "points to". */
6624 var_integer = new_var_info (NULL_TREE, "INTEGER");
6625 gcc_assert (var_integer->id == integer_id);
6626 var_integer->is_artificial_var = 1;
6627 var_integer->size = ~0;
6628 var_integer->fullsize = ~0;
6629 var_integer->offset = 0;
6630 var_integer->is_special_var = 1;
6631
6632 /* INTEGER = ANYTHING, because we don't know where a dereference of
6633 a random integer will point to. */
6634 lhs.type = SCALAR;
6635 lhs.var = integer_id;
6636 lhs.offset = 0;
6637 rhs.type = ADDRESSOF;
6638 rhs.var = anything_id;
6639 rhs.offset = 0;
6640 process_constraint (new_constraint (lhs, rhs));
6641 }
6642
6643 /* Initialize things necessary to perform PTA */
6644
6645 static void
6646 init_alias_vars (void)
6647 {
6648 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6649
6650 bitmap_obstack_initialize (&pta_obstack);
6651 bitmap_obstack_initialize (&oldpta_obstack);
6652 bitmap_obstack_initialize (&predbitmap_obstack);
6653
6654 constraint_pool = create_alloc_pool ("Constraint pool",
6655 sizeof (struct constraint), 30);
6656 variable_info_pool = create_alloc_pool ("Variable info pool",
6657 sizeof (struct variable_info), 30);
6658 constraints.create (8);
6659 varmap.create (8);
6660 vi_for_tree = pointer_map_create ();
6661 call_stmt_vars = pointer_map_create ();
6662
6663 memset (&stats, 0, sizeof (stats));
6664 shared_bitmap_table.create (511);
6665 init_base_vars ();
6666
6667 gcc_obstack_init (&fake_var_decl_obstack);
6668
6669 final_solutions = pointer_map_create ();
6670 gcc_obstack_init (&final_solutions_obstack);
6671 }
6672
6673 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6674 predecessor edges. */
6675
6676 static void
6677 remove_preds_and_fake_succs (constraint_graph_t graph)
6678 {
6679 unsigned int i;
6680
6681 /* Clear the implicit ref and address nodes from the successor
6682 lists. */
6683 for (i = 1; i < FIRST_REF_NODE; i++)
6684 {
6685 if (graph->succs[i])
6686 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6687 FIRST_REF_NODE * 2);
6688 }
6689
6690 /* Free the successor list for the non-ref nodes. */
6691 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
6692 {
6693 if (graph->succs[i])
6694 BITMAP_FREE (graph->succs[i]);
6695 }
6696
6697 /* Now reallocate the size of the successor list as, and blow away
6698 the predecessor bitmaps. */
6699 graph->size = varmap.length ();
6700 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6701
6702 free (graph->implicit_preds);
6703 graph->implicit_preds = NULL;
6704 free (graph->preds);
6705 graph->preds = NULL;
6706 bitmap_obstack_release (&predbitmap_obstack);
6707 }
6708
6709 /* Solve the constraint set. */
6710
6711 static void
6712 solve_constraints (void)
6713 {
6714 struct scc_info *si;
6715
6716 if (dump_file)
6717 fprintf (dump_file,
6718 "\nCollapsing static cycles and doing variable "
6719 "substitution\n");
6720
6721 init_graph (varmap.length () * 2);
6722
6723 if (dump_file)
6724 fprintf (dump_file, "Building predecessor graph\n");
6725 build_pred_graph ();
6726
6727 if (dump_file)
6728 fprintf (dump_file, "Detecting pointer and location "
6729 "equivalences\n");
6730 si = perform_var_substitution (graph);
6731
6732 if (dump_file)
6733 fprintf (dump_file, "Rewriting constraints and unifying "
6734 "variables\n");
6735 rewrite_constraints (graph, si);
6736
6737 build_succ_graph ();
6738
6739 free_var_substitution_info (si);
6740
6741 /* Attach complex constraints to graph nodes. */
6742 move_complex_constraints (graph);
6743
6744 if (dump_file)
6745 fprintf (dump_file, "Uniting pointer but not location equivalent "
6746 "variables\n");
6747 unite_pointer_equivalences (graph);
6748
6749 if (dump_file)
6750 fprintf (dump_file, "Finding indirect cycles\n");
6751 find_indirect_cycles (graph);
6752
6753 /* Implicit nodes and predecessors are no longer necessary at this
6754 point. */
6755 remove_preds_and_fake_succs (graph);
6756
6757 if (dump_file && (dump_flags & TDF_GRAPH))
6758 {
6759 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
6760 "in dot format:\n");
6761 dump_constraint_graph (dump_file);
6762 fprintf (dump_file, "\n\n");
6763 }
6764
6765 if (dump_file)
6766 fprintf (dump_file, "Solving graph\n");
6767
6768 solve_graph (graph);
6769
6770 if (dump_file && (dump_flags & TDF_GRAPH))
6771 {
6772 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
6773 "in dot format:\n");
6774 dump_constraint_graph (dump_file);
6775 fprintf (dump_file, "\n\n");
6776 }
6777
6778 if (dump_file)
6779 dump_sa_points_to_info (dump_file);
6780 }
6781
6782 /* Create points-to sets for the current function. See the comments
6783 at the start of the file for an algorithmic overview. */
6784
6785 static void
6786 compute_points_to_sets (void)
6787 {
6788 basic_block bb;
6789 unsigned i;
6790 varinfo_t vi;
6791
6792 timevar_push (TV_TREE_PTA);
6793
6794 init_alias_vars ();
6795
6796 intra_create_variable_infos ();
6797
6798 /* Now walk all statements and build the constraint set. */
6799 FOR_EACH_BB (bb)
6800 {
6801 gimple_stmt_iterator gsi;
6802
6803 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6804 {
6805 gimple phi = gsi_stmt (gsi);
6806
6807 if (! virtual_operand_p (gimple_phi_result (phi)))
6808 find_func_aliases (phi);
6809 }
6810
6811 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6812 {
6813 gimple stmt = gsi_stmt (gsi);
6814
6815 find_func_aliases (stmt);
6816 }
6817 }
6818
6819 if (dump_file)
6820 {
6821 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6822 dump_constraints (dump_file, 0);
6823 }
6824
6825 /* From the constraints compute the points-to sets. */
6826 solve_constraints ();
6827
6828 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6829 cfun->gimple_df->escaped = find_what_var_points_to (get_varinfo (escaped_id));
6830
6831 /* Make sure the ESCAPED solution (which is used as placeholder in
6832 other solutions) does not reference itself. This simplifies
6833 points-to solution queries. */
6834 cfun->gimple_df->escaped.escaped = 0;
6835
6836 /* Compute the points-to sets for pointer SSA_NAMEs. */
6837 for (i = 0; i < num_ssa_names; ++i)
6838 {
6839 tree ptr = ssa_name (i);
6840 if (ptr
6841 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6842 find_what_p_points_to (ptr);
6843 }
6844
6845 /* Compute the call-used/clobbered sets. */
6846 FOR_EACH_BB (bb)
6847 {
6848 gimple_stmt_iterator gsi;
6849
6850 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6851 {
6852 gimple stmt = gsi_stmt (gsi);
6853 struct pt_solution *pt;
6854 if (!is_gimple_call (stmt))
6855 continue;
6856
6857 pt = gimple_call_use_set (stmt);
6858 if (gimple_call_flags (stmt) & ECF_CONST)
6859 memset (pt, 0, sizeof (struct pt_solution));
6860 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6861 {
6862 *pt = find_what_var_points_to (vi);
6863 /* Escaped (and thus nonlocal) variables are always
6864 implicitly used by calls. */
6865 /* ??? ESCAPED can be empty even though NONLOCAL
6866 always escaped. */
6867 pt->nonlocal = 1;
6868 pt->escaped = 1;
6869 }
6870 else
6871 {
6872 /* If there is nothing special about this call then
6873 we have made everything that is used also escape. */
6874 *pt = cfun->gimple_df->escaped;
6875 pt->nonlocal = 1;
6876 }
6877
6878 pt = gimple_call_clobber_set (stmt);
6879 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6880 memset (pt, 0, sizeof (struct pt_solution));
6881 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6882 {
6883 *pt = find_what_var_points_to (vi);
6884 /* Escaped (and thus nonlocal) variables are always
6885 implicitly clobbered by calls. */
6886 /* ??? ESCAPED can be empty even though NONLOCAL
6887 always escaped. */
6888 pt->nonlocal = 1;
6889 pt->escaped = 1;
6890 }
6891 else
6892 {
6893 /* If there is nothing special about this call then
6894 we have made everything that is used also escape. */
6895 *pt = cfun->gimple_df->escaped;
6896 pt->nonlocal = 1;
6897 }
6898 }
6899 }
6900
6901 timevar_pop (TV_TREE_PTA);
6902 }
6903
6904
6905 /* Delete created points-to sets. */
6906
6907 static void
6908 delete_points_to_sets (void)
6909 {
6910 unsigned int i;
6911
6912 shared_bitmap_table.dispose ();
6913 if (dump_file && (dump_flags & TDF_STATS))
6914 fprintf (dump_file, "Points to sets created:%d\n",
6915 stats.points_to_sets_created);
6916
6917 pointer_map_destroy (vi_for_tree);
6918 pointer_map_destroy (call_stmt_vars);
6919 bitmap_obstack_release (&pta_obstack);
6920 constraints.release ();
6921
6922 for (i = 0; i < graph->size; i++)
6923 graph->complex[i].release ();
6924 free (graph->complex);
6925
6926 free (graph->rep);
6927 free (graph->succs);
6928 free (graph->pe);
6929 free (graph->pe_rep);
6930 free (graph->indirect_cycles);
6931 free (graph);
6932
6933 varmap.release ();
6934 free_alloc_pool (variable_info_pool);
6935 free_alloc_pool (constraint_pool);
6936
6937 obstack_free (&fake_var_decl_obstack, NULL);
6938
6939 pointer_map_destroy (final_solutions);
6940 obstack_free (&final_solutions_obstack, NULL);
6941 }
6942
6943
6944 /* Compute points-to information for every SSA_NAME pointer in the
6945 current function and compute the transitive closure of escaped
6946 variables to re-initialize the call-clobber states of local variables. */
6947
6948 unsigned int
6949 compute_may_aliases (void)
6950 {
6951 if (cfun->gimple_df->ipa_pta)
6952 {
6953 if (dump_file)
6954 {
6955 fprintf (dump_file, "\nNot re-computing points-to information "
6956 "because IPA points-to information is available.\n\n");
6957
6958 /* But still dump what we have remaining it. */
6959 dump_alias_info (dump_file);
6960 }
6961
6962 return 0;
6963 }
6964
6965 /* For each pointer P_i, determine the sets of variables that P_i may
6966 point-to. Compute the reachability set of escaped and call-used
6967 variables. */
6968 compute_points_to_sets ();
6969
6970 /* Debugging dumps. */
6971 if (dump_file)
6972 dump_alias_info (dump_file);
6973
6974 /* Deallocate memory used by aliasing data structures and the internal
6975 points-to solution. */
6976 delete_points_to_sets ();
6977
6978 gcc_assert (!need_ssa_update_p (cfun));
6979
6980 return 0;
6981 }
6982
6983 static bool
6984 gate_tree_pta (void)
6985 {
6986 return flag_tree_pta;
6987 }
6988
6989 /* A dummy pass to cause points-to information to be computed via
6990 TODO_rebuild_alias. */
6991
6992 namespace {
6993
6994 const pass_data pass_data_build_alias =
6995 {
6996 GIMPLE_PASS, /* type */
6997 "alias", /* name */
6998 OPTGROUP_NONE, /* optinfo_flags */
6999 true, /* has_gate */
7000 false, /* has_execute */
7001 TV_NONE, /* tv_id */
7002 ( PROP_cfg | PROP_ssa ), /* properties_required */
7003 0, /* properties_provided */
7004 0, /* properties_destroyed */
7005 0, /* todo_flags_start */
7006 TODO_rebuild_alias, /* todo_flags_finish */
7007 };
7008
7009 class pass_build_alias : public gimple_opt_pass
7010 {
7011 public:
7012 pass_build_alias (gcc::context *ctxt)
7013 : gimple_opt_pass (pass_data_build_alias, ctxt)
7014 {}
7015
7016 /* opt_pass methods: */
7017 bool gate () { return gate_tree_pta (); }
7018
7019 }; // class pass_build_alias
7020
7021 } // anon namespace
7022
7023 gimple_opt_pass *
7024 make_pass_build_alias (gcc::context *ctxt)
7025 {
7026 return new pass_build_alias (ctxt);
7027 }
7028
7029 /* A dummy pass to cause points-to information to be computed via
7030 TODO_rebuild_alias. */
7031
7032 namespace {
7033
7034 const pass_data pass_data_build_ealias =
7035 {
7036 GIMPLE_PASS, /* type */
7037 "ealias", /* name */
7038 OPTGROUP_NONE, /* optinfo_flags */
7039 true, /* has_gate */
7040 false, /* has_execute */
7041 TV_NONE, /* tv_id */
7042 ( PROP_cfg | PROP_ssa ), /* properties_required */
7043 0, /* properties_provided */
7044 0, /* properties_destroyed */
7045 0, /* todo_flags_start */
7046 TODO_rebuild_alias, /* todo_flags_finish */
7047 };
7048
7049 class pass_build_ealias : public gimple_opt_pass
7050 {
7051 public:
7052 pass_build_ealias (gcc::context *ctxt)
7053 : gimple_opt_pass (pass_data_build_ealias, ctxt)
7054 {}
7055
7056 /* opt_pass methods: */
7057 bool gate () { return gate_tree_pta (); }
7058
7059 }; // class pass_build_ealias
7060
7061 } // anon namespace
7062
7063 gimple_opt_pass *
7064 make_pass_build_ealias (gcc::context *ctxt)
7065 {
7066 return new pass_build_ealias (ctxt);
7067 }
7068
7069
7070 /* Return true if we should execute IPA PTA. */
7071 static bool
7072 gate_ipa_pta (void)
7073 {
7074 return (optimize
7075 && flag_ipa_pta
7076 /* Don't bother doing anything if the program has errors. */
7077 && !seen_error ());
7078 }
7079
7080 /* IPA PTA solutions for ESCAPED. */
7081 struct pt_solution ipa_escaped_pt
7082 = { true, false, false, false, false, false, false, false, NULL };
7083
7084 /* Associate node with varinfo DATA. Worker for
7085 cgraph_for_node_and_aliases. */
7086 static bool
7087 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7088 {
7089 if ((node->alias || node->thunk.thunk_p)
7090 && node->analyzed)
7091 insert_vi_for_tree (node->decl, (varinfo_t)data);
7092 return false;
7093 }
7094
7095 /* Execute the driver for IPA PTA. */
7096 static unsigned int
7097 ipa_pta_execute (void)
7098 {
7099 struct cgraph_node *node;
7100 struct varpool_node *var;
7101 int from;
7102
7103 in_ipa_mode = 1;
7104
7105 init_alias_vars ();
7106
7107 if (dump_file && (dump_flags & TDF_DETAILS))
7108 {
7109 dump_symtab (dump_file);
7110 fprintf (dump_file, "\n");
7111 }
7112
7113 /* Build the constraints. */
7114 FOR_EACH_DEFINED_FUNCTION (node)
7115 {
7116 varinfo_t vi;
7117 /* Nodes without a body are not interesting. Especially do not
7118 visit clones at this point for now - we get duplicate decls
7119 there for inline clones at least. */
7120 if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
7121 continue;
7122 cgraph_get_body (node);
7123
7124 gcc_assert (!node->clone_of);
7125
7126 vi = create_function_info_for (node->decl,
7127 alias_get_name (node->decl));
7128 cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
7129 }
7130
7131 /* Create constraints for global variables and their initializers. */
7132 FOR_EACH_VARIABLE (var)
7133 {
7134 if (var->alias && var->analyzed)
7135 continue;
7136
7137 get_vi_for_tree (var->decl);
7138 }
7139
7140 if (dump_file)
7141 {
7142 fprintf (dump_file,
7143 "Generating constraints for global initializers\n\n");
7144 dump_constraints (dump_file, 0);
7145 fprintf (dump_file, "\n");
7146 }
7147 from = constraints.length ();
7148
7149 FOR_EACH_DEFINED_FUNCTION (node)
7150 {
7151 struct function *func;
7152 basic_block bb;
7153
7154 /* Nodes without a body are not interesting. */
7155 if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
7156 continue;
7157
7158 if (dump_file)
7159 {
7160 fprintf (dump_file,
7161 "Generating constraints for %s", node->name ());
7162 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7163 fprintf (dump_file, " (%s)",
7164 IDENTIFIER_POINTER
7165 (DECL_ASSEMBLER_NAME (node->decl)));
7166 fprintf (dump_file, "\n");
7167 }
7168
7169 func = DECL_STRUCT_FUNCTION (node->decl);
7170 push_cfun (func);
7171
7172 /* For externally visible or attribute used annotated functions use
7173 local constraints for their arguments.
7174 For local functions we see all callers and thus do not need initial
7175 constraints for parameters. */
7176 if (node->used_from_other_partition
7177 || node->externally_visible
7178 || node->force_output)
7179 {
7180 intra_create_variable_infos ();
7181
7182 /* We also need to make function return values escape. Nothing
7183 escapes by returning from main though. */
7184 if (!MAIN_NAME_P (DECL_NAME (node->decl)))
7185 {
7186 varinfo_t fi, rvi;
7187 fi = lookup_vi_for_tree (node->decl);
7188 rvi = first_vi_for_offset (fi, fi_result);
7189 if (rvi && rvi->offset == fi_result)
7190 {
7191 struct constraint_expr includes;
7192 struct constraint_expr var;
7193 includes.var = escaped_id;
7194 includes.offset = 0;
7195 includes.type = SCALAR;
7196 var.var = rvi->id;
7197 var.offset = 0;
7198 var.type = SCALAR;
7199 process_constraint (new_constraint (includes, var));
7200 }
7201 }
7202 }
7203
7204 /* Build constriants for the function body. */
7205 FOR_EACH_BB_FN (bb, func)
7206 {
7207 gimple_stmt_iterator gsi;
7208
7209 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7210 gsi_next (&gsi))
7211 {
7212 gimple phi = gsi_stmt (gsi);
7213
7214 if (! virtual_operand_p (gimple_phi_result (phi)))
7215 find_func_aliases (phi);
7216 }
7217
7218 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7219 {
7220 gimple stmt = gsi_stmt (gsi);
7221
7222 find_func_aliases (stmt);
7223 find_func_clobbers (stmt);
7224 }
7225 }
7226
7227 pop_cfun ();
7228
7229 if (dump_file)
7230 {
7231 fprintf (dump_file, "\n");
7232 dump_constraints (dump_file, from);
7233 fprintf (dump_file, "\n");
7234 }
7235 from = constraints.length ();
7236 }
7237
7238 /* From the constraints compute the points-to sets. */
7239 solve_constraints ();
7240
7241 /* Compute the global points-to sets for ESCAPED.
7242 ??? Note that the computed escape set is not correct
7243 for the whole unit as we fail to consider graph edges to
7244 externally visible functions. */
7245 ipa_escaped_pt = find_what_var_points_to (get_varinfo (escaped_id));
7246
7247 /* Make sure the ESCAPED solution (which is used as placeholder in
7248 other solutions) does not reference itself. This simplifies
7249 points-to solution queries. */
7250 ipa_escaped_pt.ipa_escaped = 0;
7251
7252 /* Assign the points-to sets to the SSA names in the unit. */
7253 FOR_EACH_DEFINED_FUNCTION (node)
7254 {
7255 tree ptr;
7256 struct function *fn;
7257 unsigned i;
7258 varinfo_t fi;
7259 basic_block bb;
7260 struct pt_solution uses, clobbers;
7261 struct cgraph_edge *e;
7262
7263 /* Nodes without a body are not interesting. */
7264 if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
7265 continue;
7266
7267 fn = DECL_STRUCT_FUNCTION (node->decl);
7268
7269 /* Compute the points-to sets for pointer SSA_NAMEs. */
7270 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7271 {
7272 if (ptr
7273 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7274 find_what_p_points_to (ptr);
7275 }
7276
7277 /* Compute the call-use and call-clobber sets for all direct calls. */
7278 fi = lookup_vi_for_tree (node->decl);
7279 gcc_assert (fi->is_fn_info);
7280 clobbers
7281 = find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers));
7282 uses = find_what_var_points_to (first_vi_for_offset (fi, fi_uses));
7283 for (e = node->callers; e; e = e->next_caller)
7284 {
7285 if (!e->call_stmt)
7286 continue;
7287
7288 *gimple_call_clobber_set (e->call_stmt) = clobbers;
7289 *gimple_call_use_set (e->call_stmt) = uses;
7290 }
7291
7292 /* Compute the call-use and call-clobber sets for indirect calls
7293 and calls to external functions. */
7294 FOR_EACH_BB_FN (bb, fn)
7295 {
7296 gimple_stmt_iterator gsi;
7297
7298 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7299 {
7300 gimple stmt = gsi_stmt (gsi);
7301 struct pt_solution *pt;
7302 varinfo_t vi;
7303 tree decl;
7304
7305 if (!is_gimple_call (stmt))
7306 continue;
7307
7308 /* Handle direct calls to external functions. */
7309 decl = gimple_call_fndecl (stmt);
7310 if (decl
7311 && (!(fi = lookup_vi_for_tree (decl))
7312 || !fi->is_fn_info))
7313 {
7314 pt = gimple_call_use_set (stmt);
7315 if (gimple_call_flags (stmt) & ECF_CONST)
7316 memset (pt, 0, sizeof (struct pt_solution));
7317 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7318 {
7319 *pt = find_what_var_points_to (vi);
7320 /* Escaped (and thus nonlocal) variables are always
7321 implicitly used by calls. */
7322 /* ??? ESCAPED can be empty even though NONLOCAL
7323 always escaped. */
7324 pt->nonlocal = 1;
7325 pt->ipa_escaped = 1;
7326 }
7327 else
7328 {
7329 /* If there is nothing special about this call then
7330 we have made everything that is used also escape. */
7331 *pt = ipa_escaped_pt;
7332 pt->nonlocal = 1;
7333 }
7334
7335 pt = gimple_call_clobber_set (stmt);
7336 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7337 memset (pt, 0, sizeof (struct pt_solution));
7338 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7339 {
7340 *pt = find_what_var_points_to (vi);
7341 /* Escaped (and thus nonlocal) variables are always
7342 implicitly clobbered by calls. */
7343 /* ??? ESCAPED can be empty even though NONLOCAL
7344 always escaped. */
7345 pt->nonlocal = 1;
7346 pt->ipa_escaped = 1;
7347 }
7348 else
7349 {
7350 /* If there is nothing special about this call then
7351 we have made everything that is used also escape. */
7352 *pt = ipa_escaped_pt;
7353 pt->nonlocal = 1;
7354 }
7355 }
7356
7357 /* Handle indirect calls. */
7358 if (!decl
7359 && (fi = get_fi_for_callee (stmt)))
7360 {
7361 /* We need to accumulate all clobbers/uses of all possible
7362 callees. */
7363 fi = get_varinfo (find (fi->id));
7364 /* If we cannot constrain the set of functions we'll end up
7365 calling we end up using/clobbering everything. */
7366 if (bitmap_bit_p (fi->solution, anything_id)
7367 || bitmap_bit_p (fi->solution, nonlocal_id)
7368 || bitmap_bit_p (fi->solution, escaped_id))
7369 {
7370 pt_solution_reset (gimple_call_clobber_set (stmt));
7371 pt_solution_reset (gimple_call_use_set (stmt));
7372 }
7373 else
7374 {
7375 bitmap_iterator bi;
7376 unsigned i;
7377 struct pt_solution *uses, *clobbers;
7378
7379 uses = gimple_call_use_set (stmt);
7380 clobbers = gimple_call_clobber_set (stmt);
7381 memset (uses, 0, sizeof (struct pt_solution));
7382 memset (clobbers, 0, sizeof (struct pt_solution));
7383 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7384 {
7385 struct pt_solution sol;
7386
7387 vi = get_varinfo (i);
7388 if (!vi->is_fn_info)
7389 {
7390 /* ??? We could be more precise here? */
7391 uses->nonlocal = 1;
7392 uses->ipa_escaped = 1;
7393 clobbers->nonlocal = 1;
7394 clobbers->ipa_escaped = 1;
7395 continue;
7396 }
7397
7398 if (!uses->anything)
7399 {
7400 sol = find_what_var_points_to
7401 (first_vi_for_offset (vi, fi_uses));
7402 pt_solution_ior_into (uses, &sol);
7403 }
7404 if (!clobbers->anything)
7405 {
7406 sol = find_what_var_points_to
7407 (first_vi_for_offset (vi, fi_clobbers));
7408 pt_solution_ior_into (clobbers, &sol);
7409 }
7410 }
7411 }
7412 }
7413 }
7414 }
7415
7416 fn->gimple_df->ipa_pta = true;
7417 }
7418
7419 delete_points_to_sets ();
7420
7421 in_ipa_mode = 0;
7422
7423 return 0;
7424 }
7425
7426 namespace {
7427
7428 const pass_data pass_data_ipa_pta =
7429 {
7430 SIMPLE_IPA_PASS, /* type */
7431 "pta", /* name */
7432 OPTGROUP_NONE, /* optinfo_flags */
7433 true, /* has_gate */
7434 true, /* has_execute */
7435 TV_IPA_PTA, /* tv_id */
7436 0, /* properties_required */
7437 0, /* properties_provided */
7438 0, /* properties_destroyed */
7439 0, /* todo_flags_start */
7440 0, /* todo_flags_finish */
7441 };
7442
7443 class pass_ipa_pta : public simple_ipa_opt_pass
7444 {
7445 public:
7446 pass_ipa_pta (gcc::context *ctxt)
7447 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
7448 {}
7449
7450 /* opt_pass methods: */
7451 bool gate () { return gate_ipa_pta (); }
7452 unsigned int execute () { return ipa_pta_execute (); }
7453
7454 }; // class pass_ipa_pta
7455
7456 } // anon namespace
7457
7458 simple_ipa_opt_pass *
7459 make_pass_ipa_pta (gcc::context *ctxt)
7460 {
7461 return new pass_ipa_pta (ctxt);
7462 }