common.opt (-ftree-forwprop, [...]): New options, enabled by default.
[gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "output.h"
34 #include "tree.h"
35 #include "tree-flow.h"
36 #include "tree-inline.h"
37 #include "varray.h"
38 #include "diagnostic.h"
39 #include "toplev.h"
40 #include "gimple.h"
41 #include "hashtab.h"
42 #include "function.h"
43 #include "cgraph.h"
44 #include "tree-pass.h"
45 #include "timevar.h"
46 #include "alloc-pool.h"
47 #include "splay-tree.h"
48 #include "params.h"
49 #include "cgraph.h"
50 #include "alias.h"
51 #include "pointer-set.h"
52
53 /* The idea behind this analyzer is to generate set constraints from the
54 program, then solve the resulting constraints in order to generate the
55 points-to sets.
56
57 Set constraints are a way of modeling program analysis problems that
58 involve sets. They consist of an inclusion constraint language,
59 describing the variables (each variable is a set) and operations that
60 are involved on the variables, and a set of rules that derive facts
61 from these operations. To solve a system of set constraints, you derive
62 all possible facts under the rules, which gives you the correct sets
63 as a consequence.
64
65 See "Efficient Field-sensitive pointer analysis for C" by "David
66 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
67 http://citeseer.ist.psu.edu/pearce04efficient.html
68
69 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
70 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
71 http://citeseer.ist.psu.edu/heintze01ultrafast.html
72
73 There are three types of real constraint expressions, DEREF,
74 ADDRESSOF, and SCALAR. Each constraint expression consists
75 of a constraint type, a variable, and an offset.
76
77 SCALAR is a constraint expression type used to represent x, whether
78 it appears on the LHS or the RHS of a statement.
79 DEREF is a constraint expression type used to represent *x, whether
80 it appears on the LHS or the RHS of a statement.
81 ADDRESSOF is a constraint expression used to represent &x, whether
82 it appears on the LHS or the RHS of a statement.
83
84 Each pointer variable in the program is assigned an integer id, and
85 each field of a structure variable is assigned an integer id as well.
86
87 Structure variables are linked to their list of fields through a "next
88 field" in each variable that points to the next field in offset
89 order.
90 Each variable for a structure field has
91
92 1. "size", that tells the size in bits of that field.
93 2. "fullsize, that tells the size in bits of the entire structure.
94 3. "offset", that tells the offset in bits from the beginning of the
95 structure to this field.
96
97 Thus,
98 struct f
99 {
100 int a;
101 int b;
102 } foo;
103 int *bar;
104
105 looks like
106
107 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
108 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
109 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
110
111
112 In order to solve the system of set constraints, the following is
113 done:
114
115 1. Each constraint variable x has a solution set associated with it,
116 Sol(x).
117
118 2. Constraints are separated into direct, copy, and complex.
119 Direct constraints are ADDRESSOF constraints that require no extra
120 processing, such as P = &Q
121 Copy constraints are those of the form P = Q.
122 Complex constraints are all the constraints involving dereferences
123 and offsets (including offsetted copies).
124
125 3. All direct constraints of the form P = &Q are processed, such
126 that Q is added to Sol(P)
127
128 4. All complex constraints for a given constraint variable are stored in a
129 linked list attached to that variable's node.
130
131 5. A directed graph is built out of the copy constraints. Each
132 constraint variable is a node in the graph, and an edge from
133 Q to P is added for each copy constraint of the form P = Q
134
135 6. The graph is then walked, and solution sets are
136 propagated along the copy edges, such that an edge from Q to P
137 causes Sol(P) <- Sol(P) union Sol(Q).
138
139 7. As we visit each node, all complex constraints associated with
140 that node are processed by adding appropriate copy edges to the graph, or the
141 appropriate variables to the solution set.
142
143 8. The process of walking the graph is iterated until no solution
144 sets change.
145
146 Prior to walking the graph in steps 6 and 7, We perform static
147 cycle elimination on the constraint graph, as well
148 as off-line variable substitution.
149
150 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
151 on and turned into anything), but isn't. You can just see what offset
152 inside the pointed-to struct it's going to access.
153
154 TODO: Constant bounded arrays can be handled as if they were structs of the
155 same number of elements.
156
157 TODO: Modeling heap and incoming pointers becomes much better if we
158 add fields to them as we discover them, which we could do.
159
160 TODO: We could handle unions, but to be honest, it's probably not
161 worth the pain or slowdown. */
162
163 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
164 htab_t heapvar_for_stmt;
165
166 static bool use_field_sensitive = true;
167 static int in_ipa_mode = 0;
168
169 /* Used for predecessor bitmaps. */
170 static bitmap_obstack predbitmap_obstack;
171
172 /* Used for points-to sets. */
173 static bitmap_obstack pta_obstack;
174
175 /* Used for oldsolution members of variables. */
176 static bitmap_obstack oldpta_obstack;
177
178 /* Used for per-solver-iteration bitmaps. */
179 static bitmap_obstack iteration_obstack;
180
181 static unsigned int create_variable_info_for (tree, const char *);
182 typedef struct constraint_graph *constraint_graph_t;
183 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
184
185 struct constraint;
186 typedef struct constraint *constraint_t;
187
188 DEF_VEC_P(constraint_t);
189 DEF_VEC_ALLOC_P(constraint_t,heap);
190
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
192 if (a) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
194
195 static struct constraint_stats
196 {
197 unsigned int total_vars;
198 unsigned int nonpointer_vars;
199 unsigned int unified_vars_static;
200 unsigned int unified_vars_dynamic;
201 unsigned int iterations;
202 unsigned int num_edges;
203 unsigned int num_implicit_edges;
204 unsigned int points_to_sets_created;
205 } stats;
206
207 struct variable_info
208 {
209 /* ID of this variable */
210 unsigned int id;
211
212 /* True if this is a variable created by the constraint analysis, such as
213 heap variables and constraints we had to break up. */
214 unsigned int is_artificial_var:1;
215
216 /* True if this is a special variable whose solution set should not be
217 changed. */
218 unsigned int is_special_var:1;
219
220 /* True for variables whose size is not known or variable. */
221 unsigned int is_unknown_size_var:1;
222
223 /* True for (sub-)fields that represent a whole variable. */
224 unsigned int is_full_var : 1;
225
226 /* True if this is a heap variable. */
227 unsigned int is_heap_var:1;
228
229 /* True if we may not use TBAA to prune references to this
230 variable. This is used for C++ placement new. */
231 unsigned int no_tbaa_pruning : 1;
232
233 /* True if this field may contain pointers. */
234 unsigned int may_have_pointers : 1;
235
236 /* A link to the variable for the next field in this structure. */
237 struct variable_info *next;
238
239 /* Offset of this variable, in bits, from the base variable */
240 unsigned HOST_WIDE_INT offset;
241
242 /* Size of the variable, in bits. */
243 unsigned HOST_WIDE_INT size;
244
245 /* Full size of the base variable, in bits. */
246 unsigned HOST_WIDE_INT fullsize;
247
248 /* Name of this variable */
249 const char *name;
250
251 /* Tree that this variable is associated with. */
252 tree decl;
253
254 /* Points-to set for this variable. */
255 bitmap solution;
256
257 /* Old points-to set for this variable. */
258 bitmap oldsolution;
259 };
260 typedef struct variable_info *varinfo_t;
261
262 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
263 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
264 unsigned HOST_WIDE_INT);
265 static varinfo_t lookup_vi_for_tree (tree);
266
267 /* Pool of variable info structures. */
268 static alloc_pool variable_info_pool;
269
270 DEF_VEC_P(varinfo_t);
271
272 DEF_VEC_ALLOC_P(varinfo_t, heap);
273
274 /* Table of variable info structures for constraint variables.
275 Indexed directly by variable info id. */
276 static VEC(varinfo_t,heap) *varmap;
277
278 /* Return the varmap element N */
279
280 static inline varinfo_t
281 get_varinfo (unsigned int n)
282 {
283 return VEC_index (varinfo_t, varmap, n);
284 }
285
286 /* Static IDs for the special variables. */
287 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
288 escaped_id = 3, nonlocal_id = 4, callused_id = 5,
289 storedanything_id = 6, integer_id = 7 };
290
291 /* Variable that represents the unknown pointer. */
292 static varinfo_t var_anything;
293 static tree anything_tree;
294
295 /* Variable that represents the NULL pointer. */
296 static varinfo_t var_nothing;
297 static tree nothing_tree;
298
299 /* Variable that represents read only memory. */
300 static varinfo_t var_readonly;
301 static tree readonly_tree;
302
303 /* Variable that represents escaped memory. */
304 static varinfo_t var_escaped;
305 static tree escaped_tree;
306
307 /* Variable that represents nonlocal memory. */
308 static varinfo_t var_nonlocal;
309 static tree nonlocal_tree;
310
311 /* Variable that represents call-used memory. */
312 static varinfo_t var_callused;
313 static tree callused_tree;
314
315 /* Variable that represents variables that are stored to anything. */
316 static varinfo_t var_storedanything;
317 static tree storedanything_tree;
318
319 /* Variable that represents integers. This is used for when people do things
320 like &0->a.b. */
321 static varinfo_t var_integer;
322 static tree integer_tree;
323
324 /* Lookup a heap var for FROM, and return it if we find one. */
325
326 static tree
327 heapvar_lookup (tree from)
328 {
329 struct tree_map *h, in;
330 in.base.from = from;
331
332 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
333 htab_hash_pointer (from));
334 if (h)
335 return h->to;
336 return NULL_TREE;
337 }
338
339 /* Insert a mapping FROM->TO in the heap var for statement
340 hashtable. */
341
342 static void
343 heapvar_insert (tree from, tree to)
344 {
345 struct tree_map *h;
346 void **loc;
347
348 h = GGC_NEW (struct tree_map);
349 h->hash = htab_hash_pointer (from);
350 h->base.from = from;
351 h->to = to;
352 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
353 *(struct tree_map **) loc = h;
354 }
355
356 /* Return a new variable info structure consisting for a variable
357 named NAME, and using constraint graph node NODE. */
358
359 static varinfo_t
360 new_var_info (tree t, unsigned int id, const char *name)
361 {
362 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
363 tree var;
364
365 ret->id = id;
366 ret->name = name;
367 ret->decl = t;
368 ret->is_artificial_var = false;
369 ret->is_heap_var = false;
370 ret->is_special_var = false;
371 ret->is_unknown_size_var = false;
372 ret->is_full_var = false;
373 ret->may_have_pointers = true;
374 var = t;
375 if (TREE_CODE (var) == SSA_NAME)
376 var = SSA_NAME_VAR (var);
377 ret->no_tbaa_pruning = (DECL_P (var)
378 && POINTER_TYPE_P (TREE_TYPE (var))
379 && DECL_NO_TBAA_P (var));
380 ret->solution = BITMAP_ALLOC (&pta_obstack);
381 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
382 ret->next = NULL;
383 return ret;
384 }
385
386 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
387
388 /* An expression that appears in a constraint. */
389
390 struct constraint_expr
391 {
392 /* Constraint type. */
393 constraint_expr_type type;
394
395 /* Variable we are referring to in the constraint. */
396 unsigned int var;
397
398 /* Offset, in bits, of this constraint from the beginning of
399 variables it ends up referring to.
400
401 IOW, in a deref constraint, we would deref, get the result set,
402 then add OFFSET to each member. */
403 HOST_WIDE_INT offset;
404 };
405
406 /* Use 0x8000... as special unknown offset. */
407 #define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
408
409 typedef struct constraint_expr ce_s;
410 DEF_VEC_O(ce_s);
411 DEF_VEC_ALLOC_O(ce_s, heap);
412 static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool);
413 static void get_constraint_for (tree, VEC(ce_s, heap) **);
414 static void do_deref (VEC (ce_s, heap) **);
415
416 /* Our set constraints are made up of two constraint expressions, one
417 LHS, and one RHS.
418
419 As described in the introduction, our set constraints each represent an
420 operation between set valued variables.
421 */
422 struct constraint
423 {
424 struct constraint_expr lhs;
425 struct constraint_expr rhs;
426 };
427
428 /* List of constraints that we use to build the constraint graph from. */
429
430 static VEC(constraint_t,heap) *constraints;
431 static alloc_pool constraint_pool;
432
433
434 DEF_VEC_I(int);
435 DEF_VEC_ALLOC_I(int, heap);
436
437 /* The constraint graph is represented as an array of bitmaps
438 containing successor nodes. */
439
440 struct constraint_graph
441 {
442 /* Size of this graph, which may be different than the number of
443 nodes in the variable map. */
444 unsigned int size;
445
446 /* Explicit successors of each node. */
447 bitmap *succs;
448
449 /* Implicit predecessors of each node (Used for variable
450 substitution). */
451 bitmap *implicit_preds;
452
453 /* Explicit predecessors of each node (Used for variable substitution). */
454 bitmap *preds;
455
456 /* Indirect cycle representatives, or -1 if the node has no indirect
457 cycles. */
458 int *indirect_cycles;
459
460 /* Representative node for a node. rep[a] == a unless the node has
461 been unified. */
462 unsigned int *rep;
463
464 /* Equivalence class representative for a label. This is used for
465 variable substitution. */
466 int *eq_rep;
467
468 /* Pointer equivalence label for a node. All nodes with the same
469 pointer equivalence label can be unified together at some point
470 (either during constraint optimization or after the constraint
471 graph is built). */
472 unsigned int *pe;
473
474 /* Pointer equivalence representative for a label. This is used to
475 handle nodes that are pointer equivalent but not location
476 equivalent. We can unite these once the addressof constraints
477 are transformed into initial points-to sets. */
478 int *pe_rep;
479
480 /* Pointer equivalence label for each node, used during variable
481 substitution. */
482 unsigned int *pointer_label;
483
484 /* Location equivalence label for each node, used during location
485 equivalence finding. */
486 unsigned int *loc_label;
487
488 /* Pointed-by set for each node, used during location equivalence
489 finding. This is pointed-by rather than pointed-to, because it
490 is constructed using the predecessor graph. */
491 bitmap *pointed_by;
492
493 /* Points to sets for pointer equivalence. This is *not* the actual
494 points-to sets for nodes. */
495 bitmap *points_to;
496
497 /* Bitmap of nodes where the bit is set if the node is a direct
498 node. Used for variable substitution. */
499 sbitmap direct_nodes;
500
501 /* Bitmap of nodes where the bit is set if the node is address
502 taken. Used for variable substitution. */
503 bitmap address_taken;
504
505 /* Vector of complex constraints for each graph node. Complex
506 constraints are those involving dereferences or offsets that are
507 not 0. */
508 VEC(constraint_t,heap) **complex;
509 };
510
511 static constraint_graph_t graph;
512
513 /* During variable substitution and the offline version of indirect
514 cycle finding, we create nodes to represent dereferences and
515 address taken constraints. These represent where these start and
516 end. */
517 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
518 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
519
520 /* Return the representative node for NODE, if NODE has been unioned
521 with another NODE.
522 This function performs path compression along the way to finding
523 the representative. */
524
525 static unsigned int
526 find (unsigned int node)
527 {
528 gcc_assert (node < graph->size);
529 if (graph->rep[node] != node)
530 return graph->rep[node] = find (graph->rep[node]);
531 return node;
532 }
533
534 /* Union the TO and FROM nodes to the TO nodes.
535 Note that at some point in the future, we may want to do
536 union-by-rank, in which case we are going to have to return the
537 node we unified to. */
538
539 static bool
540 unite (unsigned int to, unsigned int from)
541 {
542 gcc_assert (to < graph->size && from < graph->size);
543 if (to != from && graph->rep[from] != to)
544 {
545 graph->rep[from] = to;
546 return true;
547 }
548 return false;
549 }
550
551 /* Create a new constraint consisting of LHS and RHS expressions. */
552
553 static constraint_t
554 new_constraint (const struct constraint_expr lhs,
555 const struct constraint_expr rhs)
556 {
557 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
558 ret->lhs = lhs;
559 ret->rhs = rhs;
560 return ret;
561 }
562
563 /* Print out constraint C to FILE. */
564
565 static void
566 dump_constraint (FILE *file, constraint_t c)
567 {
568 if (c->lhs.type == ADDRESSOF)
569 fprintf (file, "&");
570 else if (c->lhs.type == DEREF)
571 fprintf (file, "*");
572 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
573 if (c->lhs.offset == UNKNOWN_OFFSET)
574 fprintf (file, " + UNKNOWN");
575 else if (c->lhs.offset != 0)
576 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
577 fprintf (file, " = ");
578 if (c->rhs.type == ADDRESSOF)
579 fprintf (file, "&");
580 else if (c->rhs.type == DEREF)
581 fprintf (file, "*");
582 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
583 if (c->rhs.offset == UNKNOWN_OFFSET)
584 fprintf (file, " + UNKNOWN");
585 else if (c->rhs.offset != 0)
586 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
587 fprintf (file, "\n");
588 }
589
590
591 void debug_constraint (constraint_t);
592 void debug_constraints (void);
593 void debug_constraint_graph (void);
594 void debug_solution_for_var (unsigned int);
595 void debug_sa_points_to_info (void);
596
597 /* Print out constraint C to stderr. */
598
599 void
600 debug_constraint (constraint_t c)
601 {
602 dump_constraint (stderr, c);
603 }
604
605 /* Print out all constraints to FILE */
606
607 static void
608 dump_constraints (FILE *file)
609 {
610 int i;
611 constraint_t c;
612 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
613 dump_constraint (file, c);
614 }
615
616 /* Print out all constraints to stderr. */
617
618 void
619 debug_constraints (void)
620 {
621 dump_constraints (stderr);
622 }
623
624 /* Print out to FILE the edge in the constraint graph that is created by
625 constraint c. The edge may have a label, depending on the type of
626 constraint that it represents. If complex1, e.g: a = *b, then the label
627 is "=*", if complex2, e.g: *a = b, then the label is "*=", if
628 complex with an offset, e.g: a = b + 8, then the label is "+".
629 Otherwise the edge has no label. */
630
631 static void
632 dump_constraint_edge (FILE *file, constraint_t c)
633 {
634 if (c->rhs.type != ADDRESSOF)
635 {
636 const char *src = get_varinfo (c->rhs.var)->name;
637 const char *dst = get_varinfo (c->lhs.var)->name;
638 fprintf (file, " \"%s\" -> \"%s\" ", src, dst);
639 /* Due to preprocessing of constraints, instructions like *a = *b are
640 illegal; thus, we do not have to handle such cases. */
641 if (c->lhs.type == DEREF)
642 fprintf (file, " [ label=\"*=\" ] ;\n");
643 else if (c->rhs.type == DEREF)
644 fprintf (file, " [ label=\"=*\" ] ;\n");
645 else
646 {
647 /* We must check the case where the constraint is an offset.
648 In this case, it is treated as a complex constraint. */
649 if (c->rhs.offset != c->lhs.offset)
650 fprintf (file, " [ label=\"+\" ] ;\n");
651 else
652 fprintf (file, " ;\n");
653 }
654 }
655 }
656
657 /* Print the constraint graph in dot format. */
658
659 static void
660 dump_constraint_graph (FILE *file)
661 {
662 unsigned int i=0, size;
663 constraint_t c;
664
665 /* Only print the graph if it has already been initialized: */
666 if (!graph)
667 return;
668
669 /* Print the constraints used to produce the constraint graph. The
670 constraints will be printed as comments in the dot file: */
671 fprintf (file, "\n\n/* Constraints used in the constraint graph:\n");
672 dump_constraints (file);
673 fprintf (file, "*/\n");
674
675 /* Prints the header of the dot file: */
676 fprintf (file, "\n\n// The constraint graph in dot format:\n");
677 fprintf (file, "strict digraph {\n");
678 fprintf (file, " node [\n shape = box\n ]\n");
679 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
680 fprintf (file, "\n // List of nodes in the constraint graph:\n");
681
682 /* The next lines print the nodes in the graph. In order to get the
683 number of nodes in the graph, we must choose the minimum between the
684 vector VEC (varinfo_t, varmap) and graph->size. If the graph has not
685 yet been initialized, then graph->size == 0, otherwise we must only
686 read nodes that have an entry in VEC (varinfo_t, varmap). */
687 size = VEC_length (varinfo_t, varmap);
688 size = size < graph->size ? size : graph->size;
689 for (i = 0; i < size; i++)
690 {
691 const char *name = get_varinfo (graph->rep[i])->name;
692 fprintf (file, " \"%s\" ;\n", name);
693 }
694
695 /* Go over the list of constraints printing the edges in the constraint
696 graph. */
697 fprintf (file, "\n // The constraint edges:\n");
698 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
699 if (c)
700 dump_constraint_edge (file, c);
701
702 /* Prints the tail of the dot file. By now, only the closing bracket. */
703 fprintf (file, "}\n\n\n");
704 }
705
706 /* Print out the constraint graph to stderr. */
707
708 void
709 debug_constraint_graph (void)
710 {
711 dump_constraint_graph (stderr);
712 }
713
714 /* SOLVER FUNCTIONS
715
716 The solver is a simple worklist solver, that works on the following
717 algorithm:
718
719 sbitmap changed_nodes = all zeroes;
720 changed_count = 0;
721 For each node that is not already collapsed:
722 changed_count++;
723 set bit in changed nodes
724
725 while (changed_count > 0)
726 {
727 compute topological ordering for constraint graph
728
729 find and collapse cycles in the constraint graph (updating
730 changed if necessary)
731
732 for each node (n) in the graph in topological order:
733 changed_count--;
734
735 Process each complex constraint associated with the node,
736 updating changed if necessary.
737
738 For each outgoing edge from n, propagate the solution from n to
739 the destination of the edge, updating changed as necessary.
740
741 } */
742
743 /* Return true if two constraint expressions A and B are equal. */
744
745 static bool
746 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
747 {
748 return a.type == b.type && a.var == b.var && a.offset == b.offset;
749 }
750
751 /* Return true if constraint expression A is less than constraint expression
752 B. This is just arbitrary, but consistent, in order to give them an
753 ordering. */
754
755 static bool
756 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
757 {
758 if (a.type == b.type)
759 {
760 if (a.var == b.var)
761 return a.offset < b.offset;
762 else
763 return a.var < b.var;
764 }
765 else
766 return a.type < b.type;
767 }
768
769 /* Return true if constraint A is less than constraint B. This is just
770 arbitrary, but consistent, in order to give them an ordering. */
771
772 static bool
773 constraint_less (const constraint_t a, const constraint_t b)
774 {
775 if (constraint_expr_less (a->lhs, b->lhs))
776 return true;
777 else if (constraint_expr_less (b->lhs, a->lhs))
778 return false;
779 else
780 return constraint_expr_less (a->rhs, b->rhs);
781 }
782
783 /* Return true if two constraints A and B are equal. */
784
785 static bool
786 constraint_equal (struct constraint a, struct constraint b)
787 {
788 return constraint_expr_equal (a.lhs, b.lhs)
789 && constraint_expr_equal (a.rhs, b.rhs);
790 }
791
792
793 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
794
795 static constraint_t
796 constraint_vec_find (VEC(constraint_t,heap) *vec,
797 struct constraint lookfor)
798 {
799 unsigned int place;
800 constraint_t found;
801
802 if (vec == NULL)
803 return NULL;
804
805 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
806 if (place >= VEC_length (constraint_t, vec))
807 return NULL;
808 found = VEC_index (constraint_t, vec, place);
809 if (!constraint_equal (*found, lookfor))
810 return NULL;
811 return found;
812 }
813
814 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
815
816 static void
817 constraint_set_union (VEC(constraint_t,heap) **to,
818 VEC(constraint_t,heap) **from)
819 {
820 int i;
821 constraint_t c;
822
823 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
824 {
825 if (constraint_vec_find (*to, *c) == NULL)
826 {
827 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
828 constraint_less);
829 VEC_safe_insert (constraint_t, heap, *to, place, c);
830 }
831 }
832 }
833
834 /* Expands the solution in SET to all sub-fields of variables included.
835 Union the expanded result into RESULT. */
836
837 static void
838 solution_set_expand (bitmap result, bitmap set)
839 {
840 bitmap_iterator bi;
841 bitmap vars = NULL;
842 unsigned j;
843
844 /* In a first pass record all variables we need to add all
845 sub-fields off. This avoids quadratic behavior. */
846 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
847 {
848 varinfo_t v = get_varinfo (j);
849 if (v->is_artificial_var
850 || v->is_full_var)
851 continue;
852 v = lookup_vi_for_tree (v->decl);
853 if (vars == NULL)
854 vars = BITMAP_ALLOC (NULL);
855 bitmap_set_bit (vars, v->id);
856 }
857
858 /* In the second pass now do the addition to the solution and
859 to speed up solving add it to the delta as well. */
860 if (vars != NULL)
861 {
862 EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
863 {
864 varinfo_t v = get_varinfo (j);
865 for (; v != NULL; v = v->next)
866 bitmap_set_bit (result, v->id);
867 }
868 BITMAP_FREE (vars);
869 }
870 }
871
872 /* Take a solution set SET, add OFFSET to each member of the set, and
873 overwrite SET with the result when done. */
874
875 static void
876 solution_set_add (bitmap set, HOST_WIDE_INT offset)
877 {
878 bitmap result = BITMAP_ALLOC (&iteration_obstack);
879 unsigned int i;
880 bitmap_iterator bi;
881
882 /* If the offset is unknown we have to expand the solution to
883 all subfields. */
884 if (offset == UNKNOWN_OFFSET)
885 {
886 solution_set_expand (set, set);
887 return;
888 }
889
890 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
891 {
892 varinfo_t vi = get_varinfo (i);
893
894 /* If this is a variable with just one field just set its bit
895 in the result. */
896 if (vi->is_artificial_var
897 || vi->is_unknown_size_var
898 || vi->is_full_var)
899 bitmap_set_bit (result, i);
900 else
901 {
902 unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
903
904 /* If the offset makes the pointer point to before the
905 variable use offset zero for the field lookup. */
906 if (offset < 0
907 && fieldoffset > vi->offset)
908 fieldoffset = 0;
909
910 if (offset != 0)
911 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
912
913 bitmap_set_bit (result, vi->id);
914 /* If the result is not exactly at fieldoffset include the next
915 field as well. See get_constraint_for_ptr_offset for more
916 rationale. */
917 if (vi->offset != fieldoffset
918 && vi->next != NULL)
919 bitmap_set_bit (result, vi->next->id);
920 }
921 }
922
923 bitmap_copy (set, result);
924 BITMAP_FREE (result);
925 }
926
927 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
928 process. */
929
930 static bool
931 set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
932 {
933 if (inc == 0)
934 return bitmap_ior_into (to, from);
935 else
936 {
937 bitmap tmp;
938 bool res;
939
940 tmp = BITMAP_ALLOC (&iteration_obstack);
941 bitmap_copy (tmp, from);
942 solution_set_add (tmp, inc);
943 res = bitmap_ior_into (to, tmp);
944 BITMAP_FREE (tmp);
945 return res;
946 }
947 }
948
949 /* Insert constraint C into the list of complex constraints for graph
950 node VAR. */
951
952 static void
953 insert_into_complex (constraint_graph_t graph,
954 unsigned int var, constraint_t c)
955 {
956 VEC (constraint_t, heap) *complex = graph->complex[var];
957 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
958 constraint_less);
959
960 /* Only insert constraints that do not already exist. */
961 if (place >= VEC_length (constraint_t, complex)
962 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
963 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
964 }
965
966
967 /* Condense two variable nodes into a single variable node, by moving
968 all associated info from SRC to TO. */
969
970 static void
971 merge_node_constraints (constraint_graph_t graph, unsigned int to,
972 unsigned int from)
973 {
974 unsigned int i;
975 constraint_t c;
976
977 gcc_assert (find (from) == to);
978
979 /* Move all complex constraints from src node into to node */
980 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
981 {
982 /* In complex constraints for node src, we may have either
983 a = *src, and *src = a, or an offseted constraint which are
984 always added to the rhs node's constraints. */
985
986 if (c->rhs.type == DEREF)
987 c->rhs.var = to;
988 else if (c->lhs.type == DEREF)
989 c->lhs.var = to;
990 else
991 c->rhs.var = to;
992 }
993 constraint_set_union (&graph->complex[to], &graph->complex[from]);
994 VEC_free (constraint_t, heap, graph->complex[from]);
995 graph->complex[from] = NULL;
996 }
997
998
999 /* Remove edges involving NODE from GRAPH. */
1000
1001 static void
1002 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1003 {
1004 if (graph->succs[node])
1005 BITMAP_FREE (graph->succs[node]);
1006 }
1007
1008 /* Merge GRAPH nodes FROM and TO into node TO. */
1009
1010 static void
1011 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1012 unsigned int from)
1013 {
1014 if (graph->indirect_cycles[from] != -1)
1015 {
1016 /* If we have indirect cycles with the from node, and we have
1017 none on the to node, the to node has indirect cycles from the
1018 from node now that they are unified.
1019 If indirect cycles exist on both, unify the nodes that they
1020 are in a cycle with, since we know they are in a cycle with
1021 each other. */
1022 if (graph->indirect_cycles[to] == -1)
1023 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1024 }
1025
1026 /* Merge all the successor edges. */
1027 if (graph->succs[from])
1028 {
1029 if (!graph->succs[to])
1030 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1031 bitmap_ior_into (graph->succs[to],
1032 graph->succs[from]);
1033 }
1034
1035 clear_edges_for_node (graph, from);
1036 }
1037
1038
1039 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1040 it doesn't exist in the graph already. */
1041
1042 static void
1043 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1044 unsigned int from)
1045 {
1046 if (to == from)
1047 return;
1048
1049 if (!graph->implicit_preds[to])
1050 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1051
1052 if (bitmap_set_bit (graph->implicit_preds[to], from))
1053 stats.num_implicit_edges++;
1054 }
1055
1056 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1057 it doesn't exist in the graph already.
1058 Return false if the edge already existed, true otherwise. */
1059
1060 static void
1061 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1062 unsigned int from)
1063 {
1064 if (!graph->preds[to])
1065 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1066 bitmap_set_bit (graph->preds[to], from);
1067 }
1068
1069 /* Add a graph edge to GRAPH, going from FROM to TO if
1070 it doesn't exist in the graph already.
1071 Return false if the edge already existed, true otherwise. */
1072
1073 static bool
1074 add_graph_edge (constraint_graph_t graph, unsigned int to,
1075 unsigned int from)
1076 {
1077 if (to == from)
1078 {
1079 return false;
1080 }
1081 else
1082 {
1083 bool r = false;
1084
1085 if (!graph->succs[from])
1086 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1087 if (bitmap_set_bit (graph->succs[from], to))
1088 {
1089 r = true;
1090 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1091 stats.num_edges++;
1092 }
1093 return r;
1094 }
1095 }
1096
1097
1098 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1099
1100 static bool
1101 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1102 unsigned int dest)
1103 {
1104 return (graph->succs[dest]
1105 && bitmap_bit_p (graph->succs[dest], src));
1106 }
1107
1108 /* Initialize the constraint graph structure to contain SIZE nodes. */
1109
1110 static void
1111 init_graph (unsigned int size)
1112 {
1113 unsigned int j;
1114
1115 graph = XCNEW (struct constraint_graph);
1116 graph->size = size;
1117 graph->succs = XCNEWVEC (bitmap, graph->size);
1118 graph->indirect_cycles = XNEWVEC (int, graph->size);
1119 graph->rep = XNEWVEC (unsigned int, graph->size);
1120 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
1121 graph->pe = XCNEWVEC (unsigned int, graph->size);
1122 graph->pe_rep = XNEWVEC (int, graph->size);
1123
1124 for (j = 0; j < graph->size; j++)
1125 {
1126 graph->rep[j] = j;
1127 graph->pe_rep[j] = -1;
1128 graph->indirect_cycles[j] = -1;
1129 }
1130 }
1131
1132 /* Build the constraint graph, adding only predecessor edges right now. */
1133
1134 static void
1135 build_pred_graph (void)
1136 {
1137 int i;
1138 constraint_t c;
1139 unsigned int j;
1140
1141 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1142 graph->preds = XCNEWVEC (bitmap, graph->size);
1143 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1144 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1145 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1146 graph->points_to = XCNEWVEC (bitmap, graph->size);
1147 graph->eq_rep = XNEWVEC (int, graph->size);
1148 graph->direct_nodes = sbitmap_alloc (graph->size);
1149 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1150 sbitmap_zero (graph->direct_nodes);
1151
1152 for (j = 0; j < FIRST_REF_NODE; j++)
1153 {
1154 if (!get_varinfo (j)->is_special_var)
1155 SET_BIT (graph->direct_nodes, j);
1156 }
1157
1158 for (j = 0; j < graph->size; j++)
1159 graph->eq_rep[j] = -1;
1160
1161 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1162 graph->indirect_cycles[j] = -1;
1163
1164 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1165 {
1166 struct constraint_expr lhs = c->lhs;
1167 struct constraint_expr rhs = c->rhs;
1168 unsigned int lhsvar = lhs.var;
1169 unsigned int rhsvar = rhs.var;
1170
1171 if (lhs.type == DEREF)
1172 {
1173 /* *x = y. */
1174 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1175 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1176 }
1177 else if (rhs.type == DEREF)
1178 {
1179 /* x = *y */
1180 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1181 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1182 else
1183 RESET_BIT (graph->direct_nodes, lhsvar);
1184 }
1185 else if (rhs.type == ADDRESSOF)
1186 {
1187 varinfo_t v;
1188
1189 /* x = &y */
1190 if (graph->points_to[lhsvar] == NULL)
1191 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1192 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1193
1194 if (graph->pointed_by[rhsvar] == NULL)
1195 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1196 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1197
1198 /* Implicitly, *x = y */
1199 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1200
1201 /* All related variables are no longer direct nodes. */
1202 RESET_BIT (graph->direct_nodes, rhsvar);
1203 v = get_varinfo (rhsvar);
1204 if (!v->is_full_var)
1205 {
1206 v = lookup_vi_for_tree (v->decl);
1207 do
1208 {
1209 RESET_BIT (graph->direct_nodes, v->id);
1210 v = v->next;
1211 }
1212 while (v != NULL);
1213 }
1214 bitmap_set_bit (graph->address_taken, rhsvar);
1215 }
1216 else if (lhsvar > anything_id
1217 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1218 {
1219 /* x = y */
1220 add_pred_graph_edge (graph, lhsvar, rhsvar);
1221 /* Implicitly, *x = *y */
1222 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1223 FIRST_REF_NODE + rhsvar);
1224 }
1225 else if (lhs.offset != 0 || rhs.offset != 0)
1226 {
1227 if (rhs.offset != 0)
1228 RESET_BIT (graph->direct_nodes, lhs.var);
1229 else if (lhs.offset != 0)
1230 RESET_BIT (graph->direct_nodes, rhs.var);
1231 }
1232 }
1233 }
1234
1235 /* Build the constraint graph, adding successor edges. */
1236
1237 static void
1238 build_succ_graph (void)
1239 {
1240 unsigned i, t;
1241 constraint_t c;
1242
1243 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1244 {
1245 struct constraint_expr lhs;
1246 struct constraint_expr rhs;
1247 unsigned int lhsvar;
1248 unsigned int rhsvar;
1249
1250 if (!c)
1251 continue;
1252
1253 lhs = c->lhs;
1254 rhs = c->rhs;
1255 lhsvar = find (lhs.var);
1256 rhsvar = find (rhs.var);
1257
1258 if (lhs.type == DEREF)
1259 {
1260 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1261 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1262 }
1263 else if (rhs.type == DEREF)
1264 {
1265 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1266 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1267 }
1268 else if (rhs.type == ADDRESSOF)
1269 {
1270 /* x = &y */
1271 gcc_assert (find (rhs.var) == rhs.var);
1272 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1273 }
1274 else if (lhsvar > anything_id
1275 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1276 {
1277 add_graph_edge (graph, lhsvar, rhsvar);
1278 }
1279 }
1280
1281 /* Add edges from STOREDANYTHING to all non-direct nodes. */
1282 t = find (storedanything_id);
1283 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1284 {
1285 if (!TEST_BIT (graph->direct_nodes, i))
1286 add_graph_edge (graph, find (i), t);
1287 }
1288 }
1289
1290
1291 /* Changed variables on the last iteration. */
1292 static unsigned int changed_count;
1293 static sbitmap changed;
1294
1295 DEF_VEC_I(unsigned);
1296 DEF_VEC_ALLOC_I(unsigned,heap);
1297
1298
1299 /* Strongly Connected Component visitation info. */
1300
1301 struct scc_info
1302 {
1303 sbitmap visited;
1304 sbitmap deleted;
1305 unsigned int *dfs;
1306 unsigned int *node_mapping;
1307 int current_index;
1308 VEC(unsigned,heap) *scc_stack;
1309 };
1310
1311
1312 /* Recursive routine to find strongly connected components in GRAPH.
1313 SI is the SCC info to store the information in, and N is the id of current
1314 graph node we are processing.
1315
1316 This is Tarjan's strongly connected component finding algorithm, as
1317 modified by Nuutila to keep only non-root nodes on the stack.
1318 The algorithm can be found in "On finding the strongly connected
1319 connected components in a directed graph" by Esko Nuutila and Eljas
1320 Soisalon-Soininen, in Information Processing Letters volume 49,
1321 number 1, pages 9-14. */
1322
1323 static void
1324 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1325 {
1326 unsigned int i;
1327 bitmap_iterator bi;
1328 unsigned int my_dfs;
1329
1330 SET_BIT (si->visited, n);
1331 si->dfs[n] = si->current_index ++;
1332 my_dfs = si->dfs[n];
1333
1334 /* Visit all the successors. */
1335 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1336 {
1337 unsigned int w;
1338
1339 if (i > LAST_REF_NODE)
1340 break;
1341
1342 w = find (i);
1343 if (TEST_BIT (si->deleted, w))
1344 continue;
1345
1346 if (!TEST_BIT (si->visited, w))
1347 scc_visit (graph, si, w);
1348 {
1349 unsigned int t = find (w);
1350 unsigned int nnode = find (n);
1351 gcc_assert (nnode == n);
1352
1353 if (si->dfs[t] < si->dfs[nnode])
1354 si->dfs[n] = si->dfs[t];
1355 }
1356 }
1357
1358 /* See if any components have been identified. */
1359 if (si->dfs[n] == my_dfs)
1360 {
1361 if (VEC_length (unsigned, si->scc_stack) > 0
1362 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1363 {
1364 bitmap scc = BITMAP_ALLOC (NULL);
1365 bool have_ref_node = n >= FIRST_REF_NODE;
1366 unsigned int lowest_node;
1367 bitmap_iterator bi;
1368
1369 bitmap_set_bit (scc, n);
1370
1371 while (VEC_length (unsigned, si->scc_stack) != 0
1372 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1373 {
1374 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1375
1376 bitmap_set_bit (scc, w);
1377 if (w >= FIRST_REF_NODE)
1378 have_ref_node = true;
1379 }
1380
1381 lowest_node = bitmap_first_set_bit (scc);
1382 gcc_assert (lowest_node < FIRST_REF_NODE);
1383
1384 /* Collapse the SCC nodes into a single node, and mark the
1385 indirect cycles. */
1386 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1387 {
1388 if (i < FIRST_REF_NODE)
1389 {
1390 if (unite (lowest_node, i))
1391 unify_nodes (graph, lowest_node, i, false);
1392 }
1393 else
1394 {
1395 unite (lowest_node, i);
1396 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1397 }
1398 }
1399 }
1400 SET_BIT (si->deleted, n);
1401 }
1402 else
1403 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1404 }
1405
1406 /* Unify node FROM into node TO, updating the changed count if
1407 necessary when UPDATE_CHANGED is true. */
1408
1409 static void
1410 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1411 bool update_changed)
1412 {
1413
1414 gcc_assert (to != from && find (to) == to);
1415 if (dump_file && (dump_flags & TDF_DETAILS))
1416 fprintf (dump_file, "Unifying %s to %s\n",
1417 get_varinfo (from)->name,
1418 get_varinfo (to)->name);
1419
1420 if (update_changed)
1421 stats.unified_vars_dynamic++;
1422 else
1423 stats.unified_vars_static++;
1424
1425 merge_graph_nodes (graph, to, from);
1426 merge_node_constraints (graph, to, from);
1427
1428 if (get_varinfo (from)->no_tbaa_pruning)
1429 get_varinfo (to)->no_tbaa_pruning = true;
1430
1431 /* Mark TO as changed if FROM was changed. If TO was already marked
1432 as changed, decrease the changed count. */
1433
1434 if (update_changed && TEST_BIT (changed, from))
1435 {
1436 RESET_BIT (changed, from);
1437 if (!TEST_BIT (changed, to))
1438 SET_BIT (changed, to);
1439 else
1440 {
1441 gcc_assert (changed_count > 0);
1442 changed_count--;
1443 }
1444 }
1445 if (get_varinfo (from)->solution)
1446 {
1447 /* If the solution changes because of the merging, we need to mark
1448 the variable as changed. */
1449 if (bitmap_ior_into (get_varinfo (to)->solution,
1450 get_varinfo (from)->solution))
1451 {
1452 if (update_changed && !TEST_BIT (changed, to))
1453 {
1454 SET_BIT (changed, to);
1455 changed_count++;
1456 }
1457 }
1458
1459 BITMAP_FREE (get_varinfo (from)->solution);
1460 BITMAP_FREE (get_varinfo (from)->oldsolution);
1461
1462 if (stats.iterations > 0)
1463 {
1464 BITMAP_FREE (get_varinfo (to)->oldsolution);
1465 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1466 }
1467 }
1468 if (valid_graph_edge (graph, to, to))
1469 {
1470 if (graph->succs[to])
1471 bitmap_clear_bit (graph->succs[to], to);
1472 }
1473 }
1474
1475 /* Information needed to compute the topological ordering of a graph. */
1476
1477 struct topo_info
1478 {
1479 /* sbitmap of visited nodes. */
1480 sbitmap visited;
1481 /* Array that stores the topological order of the graph, *in
1482 reverse*. */
1483 VEC(unsigned,heap) *topo_order;
1484 };
1485
1486
1487 /* Initialize and return a topological info structure. */
1488
1489 static struct topo_info *
1490 init_topo_info (void)
1491 {
1492 size_t size = graph->size;
1493 struct topo_info *ti = XNEW (struct topo_info);
1494 ti->visited = sbitmap_alloc (size);
1495 sbitmap_zero (ti->visited);
1496 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1497 return ti;
1498 }
1499
1500
1501 /* Free the topological sort info pointed to by TI. */
1502
1503 static void
1504 free_topo_info (struct topo_info *ti)
1505 {
1506 sbitmap_free (ti->visited);
1507 VEC_free (unsigned, heap, ti->topo_order);
1508 free (ti);
1509 }
1510
1511 /* Visit the graph in topological order, and store the order in the
1512 topo_info structure. */
1513
1514 static void
1515 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1516 unsigned int n)
1517 {
1518 bitmap_iterator bi;
1519 unsigned int j;
1520
1521 SET_BIT (ti->visited, n);
1522
1523 if (graph->succs[n])
1524 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1525 {
1526 if (!TEST_BIT (ti->visited, j))
1527 topo_visit (graph, ti, j);
1528 }
1529
1530 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1531 }
1532
1533 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1534 starting solution for y. */
1535
1536 static void
1537 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1538 bitmap delta)
1539 {
1540 unsigned int lhs = c->lhs.var;
1541 bool flag = false;
1542 bitmap sol = get_varinfo (lhs)->solution;
1543 unsigned int j;
1544 bitmap_iterator bi;
1545 HOST_WIDE_INT roffset = c->rhs.offset;
1546
1547 /* Our IL does not allow this. */
1548 gcc_assert (c->lhs.offset == 0);
1549
1550 /* If the solution of Y contains anything it is good enough to transfer
1551 this to the LHS. */
1552 if (bitmap_bit_p (delta, anything_id))
1553 {
1554 flag |= bitmap_set_bit (sol, anything_id);
1555 goto done;
1556 }
1557
1558 /* If we do not know at with offset the rhs is dereferenced compute
1559 the reachability set of DELTA, conservatively assuming it is
1560 dereferenced at all valid offsets. */
1561 if (roffset == UNKNOWN_OFFSET)
1562 {
1563 solution_set_expand (delta, delta);
1564 /* No further offset processing is necessary. */
1565 roffset = 0;
1566 }
1567
1568 /* For each variable j in delta (Sol(y)), add
1569 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1570 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1571 {
1572 varinfo_t v = get_varinfo (j);
1573 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1574 unsigned int t;
1575
1576 if (v->is_full_var)
1577 fieldoffset = v->offset;
1578 else if (roffset != 0)
1579 v = first_vi_for_offset (v, fieldoffset);
1580 /* If the access is outside of the variable we can ignore it. */
1581 if (!v)
1582 continue;
1583
1584 do
1585 {
1586 t = find (v->id);
1587
1588 /* Adding edges from the special vars is pointless.
1589 They don't have sets that can change. */
1590 if (get_varinfo (t)->is_special_var)
1591 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1592 /* Merging the solution from ESCAPED needlessly increases
1593 the set. Use ESCAPED as representative instead. */
1594 else if (v->id == escaped_id)
1595 flag |= bitmap_set_bit (sol, escaped_id);
1596 else if (add_graph_edge (graph, lhs, t))
1597 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1598
1599 /* If the variable is not exactly at the requested offset
1600 we have to include the next one. */
1601 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1602 || v->next == NULL)
1603 break;
1604
1605 v = v->next;
1606 fieldoffset = v->offset;
1607 }
1608 while (1);
1609 }
1610
1611 done:
1612 /* If the LHS solution changed, mark the var as changed. */
1613 if (flag)
1614 {
1615 get_varinfo (lhs)->solution = sol;
1616 if (!TEST_BIT (changed, lhs))
1617 {
1618 SET_BIT (changed, lhs);
1619 changed_count++;
1620 }
1621 }
1622 }
1623
1624 /* Process a constraint C that represents *(x + off) = y using DELTA
1625 as the starting solution for x. */
1626
1627 static void
1628 do_ds_constraint (constraint_t c, bitmap delta)
1629 {
1630 unsigned int rhs = c->rhs.var;
1631 bitmap sol = get_varinfo (rhs)->solution;
1632 unsigned int j;
1633 bitmap_iterator bi;
1634 HOST_WIDE_INT loff = c->lhs.offset;
1635
1636 /* Our IL does not allow this. */
1637 gcc_assert (c->rhs.offset == 0);
1638
1639 /* If the solution of y contains ANYTHING simply use the ANYTHING
1640 solution. This avoids needlessly increasing the points-to sets. */
1641 if (bitmap_bit_p (sol, anything_id))
1642 sol = get_varinfo (find (anything_id))->solution;
1643
1644 /* If the solution for x contains ANYTHING we have to merge the
1645 solution of y into all pointer variables which we do via
1646 STOREDANYTHING. */
1647 if (bitmap_bit_p (delta, anything_id))
1648 {
1649 unsigned t = find (storedanything_id);
1650 if (add_graph_edge (graph, t, rhs))
1651 {
1652 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1653 {
1654 if (!TEST_BIT (changed, t))
1655 {
1656 SET_BIT (changed, t);
1657 changed_count++;
1658 }
1659 }
1660 }
1661 return;
1662 }
1663
1664 /* If we do not know at with offset the rhs is dereferenced compute
1665 the reachability set of DELTA, conservatively assuming it is
1666 dereferenced at all valid offsets. */
1667 if (loff == UNKNOWN_OFFSET)
1668 {
1669 solution_set_expand (delta, delta);
1670 loff = 0;
1671 }
1672
1673 /* For each member j of delta (Sol(x)), add an edge from y to j and
1674 union Sol(y) into Sol(j) */
1675 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1676 {
1677 varinfo_t v = get_varinfo (j);
1678 unsigned int t;
1679 HOST_WIDE_INT fieldoffset = v->offset + loff;
1680
1681 if (v->is_special_var)
1682 continue;
1683
1684 if (v->is_full_var)
1685 fieldoffset = v->offset;
1686 else if (loff != 0)
1687 v = first_vi_for_offset (v, fieldoffset);
1688 /* If the access is outside of the variable we can ignore it. */
1689 if (!v)
1690 continue;
1691
1692 do
1693 {
1694 if (v->may_have_pointers)
1695 {
1696 t = find (v->id);
1697 if (add_graph_edge (graph, t, rhs))
1698 {
1699 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1700 {
1701 if (t == rhs)
1702 sol = get_varinfo (rhs)->solution;
1703 if (!TEST_BIT (changed, t))
1704 {
1705 SET_BIT (changed, t);
1706 changed_count++;
1707 }
1708 }
1709 }
1710 }
1711
1712 /* If the variable is not exactly at the requested offset
1713 we have to include the next one. */
1714 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1715 || v->next == NULL)
1716 break;
1717
1718 v = v->next;
1719 fieldoffset = v->offset;
1720 }
1721 while (1);
1722 }
1723 }
1724
1725 /* Handle a non-simple (simple meaning requires no iteration),
1726 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1727
1728 static void
1729 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1730 {
1731 if (c->lhs.type == DEREF)
1732 {
1733 if (c->rhs.type == ADDRESSOF)
1734 {
1735 gcc_unreachable();
1736 }
1737 else
1738 {
1739 /* *x = y */
1740 do_ds_constraint (c, delta);
1741 }
1742 }
1743 else if (c->rhs.type == DEREF)
1744 {
1745 /* x = *y */
1746 if (!(get_varinfo (c->lhs.var)->is_special_var))
1747 do_sd_constraint (graph, c, delta);
1748 }
1749 else
1750 {
1751 bitmap tmp;
1752 bitmap solution;
1753 bool flag = false;
1754
1755 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1756 solution = get_varinfo (c->rhs.var)->solution;
1757 tmp = get_varinfo (c->lhs.var)->solution;
1758
1759 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1760
1761 if (flag)
1762 {
1763 get_varinfo (c->lhs.var)->solution = tmp;
1764 if (!TEST_BIT (changed, c->lhs.var))
1765 {
1766 SET_BIT (changed, c->lhs.var);
1767 changed_count++;
1768 }
1769 }
1770 }
1771 }
1772
1773 /* Initialize and return a new SCC info structure. */
1774
1775 static struct scc_info *
1776 init_scc_info (size_t size)
1777 {
1778 struct scc_info *si = XNEW (struct scc_info);
1779 size_t i;
1780
1781 si->current_index = 0;
1782 si->visited = sbitmap_alloc (size);
1783 sbitmap_zero (si->visited);
1784 si->deleted = sbitmap_alloc (size);
1785 sbitmap_zero (si->deleted);
1786 si->node_mapping = XNEWVEC (unsigned int, size);
1787 si->dfs = XCNEWVEC (unsigned int, size);
1788
1789 for (i = 0; i < size; i++)
1790 si->node_mapping[i] = i;
1791
1792 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1793 return si;
1794 }
1795
1796 /* Free an SCC info structure pointed to by SI */
1797
1798 static void
1799 free_scc_info (struct scc_info *si)
1800 {
1801 sbitmap_free (si->visited);
1802 sbitmap_free (si->deleted);
1803 free (si->node_mapping);
1804 free (si->dfs);
1805 VEC_free (unsigned, heap, si->scc_stack);
1806 free (si);
1807 }
1808
1809
1810 /* Find indirect cycles in GRAPH that occur, using strongly connected
1811 components, and note them in the indirect cycles map.
1812
1813 This technique comes from Ben Hardekopf and Calvin Lin,
1814 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1815 Lines of Code", submitted to PLDI 2007. */
1816
1817 static void
1818 find_indirect_cycles (constraint_graph_t graph)
1819 {
1820 unsigned int i;
1821 unsigned int size = graph->size;
1822 struct scc_info *si = init_scc_info (size);
1823
1824 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1825 if (!TEST_BIT (si->visited, i) && find (i) == i)
1826 scc_visit (graph, si, i);
1827
1828 free_scc_info (si);
1829 }
1830
1831 /* Compute a topological ordering for GRAPH, and store the result in the
1832 topo_info structure TI. */
1833
1834 static void
1835 compute_topo_order (constraint_graph_t graph,
1836 struct topo_info *ti)
1837 {
1838 unsigned int i;
1839 unsigned int size = graph->size;
1840
1841 for (i = 0; i != size; ++i)
1842 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1843 topo_visit (graph, ti, i);
1844 }
1845
1846 /* Structure used to for hash value numbering of pointer equivalence
1847 classes. */
1848
1849 typedef struct equiv_class_label
1850 {
1851 hashval_t hashcode;
1852 unsigned int equivalence_class;
1853 bitmap labels;
1854 } *equiv_class_label_t;
1855 typedef const struct equiv_class_label *const_equiv_class_label_t;
1856
1857 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1858 classes. */
1859 static htab_t pointer_equiv_class_table;
1860
1861 /* A hashtable for mapping a bitmap of labels->location equivalence
1862 classes. */
1863 static htab_t location_equiv_class_table;
1864
1865 /* Hash function for a equiv_class_label_t */
1866
1867 static hashval_t
1868 equiv_class_label_hash (const void *p)
1869 {
1870 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1871 return ecl->hashcode;
1872 }
1873
1874 /* Equality function for two equiv_class_label_t's. */
1875
1876 static int
1877 equiv_class_label_eq (const void *p1, const void *p2)
1878 {
1879 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1880 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1881 return bitmap_equal_p (eql1->labels, eql2->labels);
1882 }
1883
1884 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1885 contains. */
1886
1887 static unsigned int
1888 equiv_class_lookup (htab_t table, bitmap labels)
1889 {
1890 void **slot;
1891 struct equiv_class_label ecl;
1892
1893 ecl.labels = labels;
1894 ecl.hashcode = bitmap_hash (labels);
1895
1896 slot = htab_find_slot_with_hash (table, &ecl,
1897 ecl.hashcode, NO_INSERT);
1898 if (!slot)
1899 return 0;
1900 else
1901 return ((equiv_class_label_t) *slot)->equivalence_class;
1902 }
1903
1904
1905 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1906 to TABLE. */
1907
1908 static void
1909 equiv_class_add (htab_t table, unsigned int equivalence_class,
1910 bitmap labels)
1911 {
1912 void **slot;
1913 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1914
1915 ecl->labels = labels;
1916 ecl->equivalence_class = equivalence_class;
1917 ecl->hashcode = bitmap_hash (labels);
1918
1919 slot = htab_find_slot_with_hash (table, ecl,
1920 ecl->hashcode, INSERT);
1921 gcc_assert (!*slot);
1922 *slot = (void *) ecl;
1923 }
1924
1925 /* Perform offline variable substitution.
1926
1927 This is a worst case quadratic time way of identifying variables
1928 that must have equivalent points-to sets, including those caused by
1929 static cycles, and single entry subgraphs, in the constraint graph.
1930
1931 The technique is described in "Exploiting Pointer and Location
1932 Equivalence to Optimize Pointer Analysis. In the 14th International
1933 Static Analysis Symposium (SAS), August 2007." It is known as the
1934 "HU" algorithm, and is equivalent to value numbering the collapsed
1935 constraint graph including evaluating unions.
1936
1937 The general method of finding equivalence classes is as follows:
1938 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1939 Initialize all non-REF nodes to be direct nodes.
1940 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1941 variable}
1942 For each constraint containing the dereference, we also do the same
1943 thing.
1944
1945 We then compute SCC's in the graph and unify nodes in the same SCC,
1946 including pts sets.
1947
1948 For each non-collapsed node x:
1949 Visit all unvisited explicit incoming edges.
1950 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1951 where y->x.
1952 Lookup the equivalence class for pts(x).
1953 If we found one, equivalence_class(x) = found class.
1954 Otherwise, equivalence_class(x) = new class, and new_class is
1955 added to the lookup table.
1956
1957 All direct nodes with the same equivalence class can be replaced
1958 with a single representative node.
1959 All unlabeled nodes (label == 0) are not pointers and all edges
1960 involving them can be eliminated.
1961 We perform these optimizations during rewrite_constraints
1962
1963 In addition to pointer equivalence class finding, we also perform
1964 location equivalence class finding. This is the set of variables
1965 that always appear together in points-to sets. We use this to
1966 compress the size of the points-to sets. */
1967
1968 /* Current maximum pointer equivalence class id. */
1969 static int pointer_equiv_class;
1970
1971 /* Current maximum location equivalence class id. */
1972 static int location_equiv_class;
1973
1974 /* Recursive routine to find strongly connected components in GRAPH,
1975 and label it's nodes with DFS numbers. */
1976
1977 static void
1978 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1979 {
1980 unsigned int i;
1981 bitmap_iterator bi;
1982 unsigned int my_dfs;
1983
1984 gcc_assert (si->node_mapping[n] == n);
1985 SET_BIT (si->visited, n);
1986 si->dfs[n] = si->current_index ++;
1987 my_dfs = si->dfs[n];
1988
1989 /* Visit all the successors. */
1990 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1991 {
1992 unsigned int w = si->node_mapping[i];
1993
1994 if (TEST_BIT (si->deleted, w))
1995 continue;
1996
1997 if (!TEST_BIT (si->visited, w))
1998 condense_visit (graph, si, w);
1999 {
2000 unsigned int t = si->node_mapping[w];
2001 unsigned int nnode = si->node_mapping[n];
2002 gcc_assert (nnode == n);
2003
2004 if (si->dfs[t] < si->dfs[nnode])
2005 si->dfs[n] = si->dfs[t];
2006 }
2007 }
2008
2009 /* Visit all the implicit predecessors. */
2010 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2011 {
2012 unsigned int w = si->node_mapping[i];
2013
2014 if (TEST_BIT (si->deleted, w))
2015 continue;
2016
2017 if (!TEST_BIT (si->visited, w))
2018 condense_visit (graph, si, w);
2019 {
2020 unsigned int t = si->node_mapping[w];
2021 unsigned int nnode = si->node_mapping[n];
2022 gcc_assert (nnode == n);
2023
2024 if (si->dfs[t] < si->dfs[nnode])
2025 si->dfs[n] = si->dfs[t];
2026 }
2027 }
2028
2029 /* See if any components have been identified. */
2030 if (si->dfs[n] == my_dfs)
2031 {
2032 while (VEC_length (unsigned, si->scc_stack) != 0
2033 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
2034 {
2035 unsigned int w = VEC_pop (unsigned, si->scc_stack);
2036 si->node_mapping[w] = n;
2037
2038 if (!TEST_BIT (graph->direct_nodes, w))
2039 RESET_BIT (graph->direct_nodes, n);
2040
2041 /* Unify our nodes. */
2042 if (graph->preds[w])
2043 {
2044 if (!graph->preds[n])
2045 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2046 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2047 }
2048 if (graph->implicit_preds[w])
2049 {
2050 if (!graph->implicit_preds[n])
2051 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2052 bitmap_ior_into (graph->implicit_preds[n],
2053 graph->implicit_preds[w]);
2054 }
2055 if (graph->points_to[w])
2056 {
2057 if (!graph->points_to[n])
2058 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2059 bitmap_ior_into (graph->points_to[n],
2060 graph->points_to[w]);
2061 }
2062 }
2063 SET_BIT (si->deleted, n);
2064 }
2065 else
2066 VEC_safe_push (unsigned, heap, si->scc_stack, n);
2067 }
2068
2069 /* Label pointer equivalences. */
2070
2071 static void
2072 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2073 {
2074 unsigned int i;
2075 bitmap_iterator bi;
2076 SET_BIT (si->visited, n);
2077
2078 if (!graph->points_to[n])
2079 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2080
2081 /* Label and union our incoming edges's points to sets. */
2082 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2083 {
2084 unsigned int w = si->node_mapping[i];
2085 if (!TEST_BIT (si->visited, w))
2086 label_visit (graph, si, w);
2087
2088 /* Skip unused edges */
2089 if (w == n || graph->pointer_label[w] == 0)
2090 continue;
2091
2092 if (graph->points_to[w])
2093 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2094 }
2095 /* Indirect nodes get fresh variables. */
2096 if (!TEST_BIT (graph->direct_nodes, n))
2097 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2098
2099 if (!bitmap_empty_p (graph->points_to[n]))
2100 {
2101 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
2102 graph->points_to[n]);
2103 if (!label)
2104 {
2105 label = pointer_equiv_class++;
2106 equiv_class_add (pointer_equiv_class_table,
2107 label, graph->points_to[n]);
2108 }
2109 graph->pointer_label[n] = label;
2110 }
2111 }
2112
2113 /* Perform offline variable substitution, discovering equivalence
2114 classes, and eliminating non-pointer variables. */
2115
2116 static struct scc_info *
2117 perform_var_substitution (constraint_graph_t graph)
2118 {
2119 unsigned int i;
2120 unsigned int size = graph->size;
2121 struct scc_info *si = init_scc_info (size);
2122
2123 bitmap_obstack_initialize (&iteration_obstack);
2124 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2125 equiv_class_label_eq, free);
2126 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2127 equiv_class_label_eq, free);
2128 pointer_equiv_class = 1;
2129 location_equiv_class = 1;
2130
2131 /* Condense the nodes, which means to find SCC's, count incoming
2132 predecessors, and unite nodes in SCC's. */
2133 for (i = 0; i < FIRST_REF_NODE; i++)
2134 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2135 condense_visit (graph, si, si->node_mapping[i]);
2136
2137 sbitmap_zero (si->visited);
2138 /* Actually the label the nodes for pointer equivalences */
2139 for (i = 0; i < FIRST_REF_NODE; i++)
2140 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2141 label_visit (graph, si, si->node_mapping[i]);
2142
2143 /* Calculate location equivalence labels. */
2144 for (i = 0; i < FIRST_REF_NODE; i++)
2145 {
2146 bitmap pointed_by;
2147 bitmap_iterator bi;
2148 unsigned int j;
2149 unsigned int label;
2150
2151 if (!graph->pointed_by[i])
2152 continue;
2153 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2154
2155 /* Translate the pointed-by mapping for pointer equivalence
2156 labels. */
2157 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2158 {
2159 bitmap_set_bit (pointed_by,
2160 graph->pointer_label[si->node_mapping[j]]);
2161 }
2162 /* The original pointed_by is now dead. */
2163 BITMAP_FREE (graph->pointed_by[i]);
2164
2165 /* Look up the location equivalence label if one exists, or make
2166 one otherwise. */
2167 label = equiv_class_lookup (location_equiv_class_table,
2168 pointed_by);
2169 if (label == 0)
2170 {
2171 label = location_equiv_class++;
2172 equiv_class_add (location_equiv_class_table,
2173 label, pointed_by);
2174 }
2175 else
2176 {
2177 if (dump_file && (dump_flags & TDF_DETAILS))
2178 fprintf (dump_file, "Found location equivalence for node %s\n",
2179 get_varinfo (i)->name);
2180 BITMAP_FREE (pointed_by);
2181 }
2182 graph->loc_label[i] = label;
2183
2184 }
2185
2186 if (dump_file && (dump_flags & TDF_DETAILS))
2187 for (i = 0; i < FIRST_REF_NODE; i++)
2188 {
2189 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2190 fprintf (dump_file,
2191 "Equivalence classes for %s node id %d:%s are pointer: %d"
2192 ", location:%d\n",
2193 direct_node ? "Direct node" : "Indirect node", i,
2194 get_varinfo (i)->name,
2195 graph->pointer_label[si->node_mapping[i]],
2196 graph->loc_label[si->node_mapping[i]]);
2197 }
2198
2199 /* Quickly eliminate our non-pointer variables. */
2200
2201 for (i = 0; i < FIRST_REF_NODE; i++)
2202 {
2203 unsigned int node = si->node_mapping[i];
2204
2205 if (graph->pointer_label[node] == 0)
2206 {
2207 if (dump_file && (dump_flags & TDF_DETAILS))
2208 fprintf (dump_file,
2209 "%s is a non-pointer variable, eliminating edges.\n",
2210 get_varinfo (node)->name);
2211 stats.nonpointer_vars++;
2212 clear_edges_for_node (graph, node);
2213 }
2214 }
2215
2216 return si;
2217 }
2218
2219 /* Free information that was only necessary for variable
2220 substitution. */
2221
2222 static void
2223 free_var_substitution_info (struct scc_info *si)
2224 {
2225 free_scc_info (si);
2226 free (graph->pointer_label);
2227 free (graph->loc_label);
2228 free (graph->pointed_by);
2229 free (graph->points_to);
2230 free (graph->eq_rep);
2231 sbitmap_free (graph->direct_nodes);
2232 htab_delete (pointer_equiv_class_table);
2233 htab_delete (location_equiv_class_table);
2234 bitmap_obstack_release (&iteration_obstack);
2235 }
2236
2237 /* Return an existing node that is equivalent to NODE, which has
2238 equivalence class LABEL, if one exists. Return NODE otherwise. */
2239
2240 static unsigned int
2241 find_equivalent_node (constraint_graph_t graph,
2242 unsigned int node, unsigned int label)
2243 {
2244 /* If the address version of this variable is unused, we can
2245 substitute it for anything else with the same label.
2246 Otherwise, we know the pointers are equivalent, but not the
2247 locations, and we can unite them later. */
2248
2249 if (!bitmap_bit_p (graph->address_taken, node))
2250 {
2251 gcc_assert (label < graph->size);
2252
2253 if (graph->eq_rep[label] != -1)
2254 {
2255 /* Unify the two variables since we know they are equivalent. */
2256 if (unite (graph->eq_rep[label], node))
2257 unify_nodes (graph, graph->eq_rep[label], node, false);
2258 return graph->eq_rep[label];
2259 }
2260 else
2261 {
2262 graph->eq_rep[label] = node;
2263 graph->pe_rep[label] = node;
2264 }
2265 }
2266 else
2267 {
2268 gcc_assert (label < graph->size);
2269 graph->pe[node] = label;
2270 if (graph->pe_rep[label] == -1)
2271 graph->pe_rep[label] = node;
2272 }
2273
2274 return node;
2275 }
2276
2277 /* Unite pointer equivalent but not location equivalent nodes in
2278 GRAPH. This may only be performed once variable substitution is
2279 finished. */
2280
2281 static void
2282 unite_pointer_equivalences (constraint_graph_t graph)
2283 {
2284 unsigned int i;
2285
2286 /* Go through the pointer equivalences and unite them to their
2287 representative, if they aren't already. */
2288 for (i = 0; i < FIRST_REF_NODE; i++)
2289 {
2290 unsigned int label = graph->pe[i];
2291 if (label)
2292 {
2293 int label_rep = graph->pe_rep[label];
2294
2295 if (label_rep == -1)
2296 continue;
2297
2298 label_rep = find (label_rep);
2299 if (label_rep >= 0 && unite (label_rep, find (i)))
2300 unify_nodes (graph, label_rep, i, false);
2301 }
2302 }
2303 }
2304
2305 /* Move complex constraints to the GRAPH nodes they belong to. */
2306
2307 static void
2308 move_complex_constraints (constraint_graph_t graph)
2309 {
2310 int i;
2311 constraint_t c;
2312
2313 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2314 {
2315 if (c)
2316 {
2317 struct constraint_expr lhs = c->lhs;
2318 struct constraint_expr rhs = c->rhs;
2319
2320 if (lhs.type == DEREF)
2321 {
2322 insert_into_complex (graph, lhs.var, c);
2323 }
2324 else if (rhs.type == DEREF)
2325 {
2326 if (!(get_varinfo (lhs.var)->is_special_var))
2327 insert_into_complex (graph, rhs.var, c);
2328 }
2329 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2330 && (lhs.offset != 0 || rhs.offset != 0))
2331 {
2332 insert_into_complex (graph, rhs.var, c);
2333 }
2334 }
2335 }
2336 }
2337
2338
2339 /* Optimize and rewrite complex constraints while performing
2340 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2341 result of perform_variable_substitution. */
2342
2343 static void
2344 rewrite_constraints (constraint_graph_t graph,
2345 struct scc_info *si)
2346 {
2347 int i;
2348 unsigned int j;
2349 constraint_t c;
2350
2351 for (j = 0; j < graph->size; j++)
2352 gcc_assert (find (j) == j);
2353
2354 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2355 {
2356 struct constraint_expr lhs = c->lhs;
2357 struct constraint_expr rhs = c->rhs;
2358 unsigned int lhsvar = find (lhs.var);
2359 unsigned int rhsvar = find (rhs.var);
2360 unsigned int lhsnode, rhsnode;
2361 unsigned int lhslabel, rhslabel;
2362
2363 lhsnode = si->node_mapping[lhsvar];
2364 rhsnode = si->node_mapping[rhsvar];
2365 lhslabel = graph->pointer_label[lhsnode];
2366 rhslabel = graph->pointer_label[rhsnode];
2367
2368 /* See if it is really a non-pointer variable, and if so, ignore
2369 the constraint. */
2370 if (lhslabel == 0)
2371 {
2372 if (dump_file && (dump_flags & TDF_DETAILS))
2373 {
2374
2375 fprintf (dump_file, "%s is a non-pointer variable,"
2376 "ignoring constraint:",
2377 get_varinfo (lhs.var)->name);
2378 dump_constraint (dump_file, c);
2379 }
2380 VEC_replace (constraint_t, constraints, i, NULL);
2381 continue;
2382 }
2383
2384 if (rhslabel == 0)
2385 {
2386 if (dump_file && (dump_flags & TDF_DETAILS))
2387 {
2388
2389 fprintf (dump_file, "%s is a non-pointer variable,"
2390 "ignoring constraint:",
2391 get_varinfo (rhs.var)->name);
2392 dump_constraint (dump_file, c);
2393 }
2394 VEC_replace (constraint_t, constraints, i, NULL);
2395 continue;
2396 }
2397
2398 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2399 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2400 c->lhs.var = lhsvar;
2401 c->rhs.var = rhsvar;
2402
2403 }
2404 }
2405
2406 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2407 part of an SCC, false otherwise. */
2408
2409 static bool
2410 eliminate_indirect_cycles (unsigned int node)
2411 {
2412 if (graph->indirect_cycles[node] != -1
2413 && !bitmap_empty_p (get_varinfo (node)->solution))
2414 {
2415 unsigned int i;
2416 VEC(unsigned,heap) *queue = NULL;
2417 int queuepos;
2418 unsigned int to = find (graph->indirect_cycles[node]);
2419 bitmap_iterator bi;
2420
2421 /* We can't touch the solution set and call unify_nodes
2422 at the same time, because unify_nodes is going to do
2423 bitmap unions into it. */
2424
2425 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2426 {
2427 if (find (i) == i && i != to)
2428 {
2429 if (unite (to, i))
2430 VEC_safe_push (unsigned, heap, queue, i);
2431 }
2432 }
2433
2434 for (queuepos = 0;
2435 VEC_iterate (unsigned, queue, queuepos, i);
2436 queuepos++)
2437 {
2438 unify_nodes (graph, to, i, true);
2439 }
2440 VEC_free (unsigned, heap, queue);
2441 return true;
2442 }
2443 return false;
2444 }
2445
2446 /* Solve the constraint graph GRAPH using our worklist solver.
2447 This is based on the PW* family of solvers from the "Efficient Field
2448 Sensitive Pointer Analysis for C" paper.
2449 It works by iterating over all the graph nodes, processing the complex
2450 constraints and propagating the copy constraints, until everything stops
2451 changed. This corresponds to steps 6-8 in the solving list given above. */
2452
2453 static void
2454 solve_graph (constraint_graph_t graph)
2455 {
2456 unsigned int size = graph->size;
2457 unsigned int i;
2458 bitmap pts;
2459
2460 changed_count = 0;
2461 changed = sbitmap_alloc (size);
2462 sbitmap_zero (changed);
2463
2464 /* Mark all initial non-collapsed nodes as changed. */
2465 for (i = 0; i < size; i++)
2466 {
2467 varinfo_t ivi = get_varinfo (i);
2468 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2469 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2470 || VEC_length (constraint_t, graph->complex[i]) > 0))
2471 {
2472 SET_BIT (changed, i);
2473 changed_count++;
2474 }
2475 }
2476
2477 /* Allocate a bitmap to be used to store the changed bits. */
2478 pts = BITMAP_ALLOC (&pta_obstack);
2479
2480 while (changed_count > 0)
2481 {
2482 unsigned int i;
2483 struct topo_info *ti = init_topo_info ();
2484 stats.iterations++;
2485
2486 bitmap_obstack_initialize (&iteration_obstack);
2487
2488 compute_topo_order (graph, ti);
2489
2490 while (VEC_length (unsigned, ti->topo_order) != 0)
2491 {
2492
2493 i = VEC_pop (unsigned, ti->topo_order);
2494
2495 /* If this variable is not a representative, skip it. */
2496 if (find (i) != i)
2497 continue;
2498
2499 /* In certain indirect cycle cases, we may merge this
2500 variable to another. */
2501 if (eliminate_indirect_cycles (i) && find (i) != i)
2502 continue;
2503
2504 /* If the node has changed, we need to process the
2505 complex constraints and outgoing edges again. */
2506 if (TEST_BIT (changed, i))
2507 {
2508 unsigned int j;
2509 constraint_t c;
2510 bitmap solution;
2511 VEC(constraint_t,heap) *complex = graph->complex[i];
2512 bool solution_empty;
2513
2514 RESET_BIT (changed, i);
2515 changed_count--;
2516
2517 /* Compute the changed set of solution bits. */
2518 bitmap_and_compl (pts, get_varinfo (i)->solution,
2519 get_varinfo (i)->oldsolution);
2520
2521 if (bitmap_empty_p (pts))
2522 continue;
2523
2524 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2525
2526 solution = get_varinfo (i)->solution;
2527 solution_empty = bitmap_empty_p (solution);
2528
2529 /* Process the complex constraints */
2530 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2531 {
2532 /* XXX: This is going to unsort the constraints in
2533 some cases, which will occasionally add duplicate
2534 constraints during unification. This does not
2535 affect correctness. */
2536 c->lhs.var = find (c->lhs.var);
2537 c->rhs.var = find (c->rhs.var);
2538
2539 /* The only complex constraint that can change our
2540 solution to non-empty, given an empty solution,
2541 is a constraint where the lhs side is receiving
2542 some set from elsewhere. */
2543 if (!solution_empty || c->lhs.type != DEREF)
2544 do_complex_constraint (graph, c, pts);
2545 }
2546
2547 solution_empty = bitmap_empty_p (solution);
2548
2549 if (!solution_empty)
2550 {
2551 bitmap_iterator bi;
2552 unsigned eff_escaped_id = find (escaped_id);
2553
2554 /* Propagate solution to all successors. */
2555 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2556 0, j, bi)
2557 {
2558 bitmap tmp;
2559 bool flag;
2560
2561 unsigned int to = find (j);
2562 tmp = get_varinfo (to)->solution;
2563 flag = false;
2564
2565 /* Don't try to propagate to ourselves. */
2566 if (to == i)
2567 continue;
2568
2569 /* If we propagate from ESCAPED use ESCAPED as
2570 placeholder. */
2571 if (i == eff_escaped_id)
2572 flag = bitmap_set_bit (tmp, escaped_id);
2573 else
2574 flag = set_union_with_increment (tmp, pts, 0);
2575
2576 if (flag)
2577 {
2578 get_varinfo (to)->solution = tmp;
2579 if (!TEST_BIT (changed, to))
2580 {
2581 SET_BIT (changed, to);
2582 changed_count++;
2583 }
2584 }
2585 }
2586 }
2587 }
2588 }
2589 free_topo_info (ti);
2590 bitmap_obstack_release (&iteration_obstack);
2591 }
2592
2593 BITMAP_FREE (pts);
2594 sbitmap_free (changed);
2595 bitmap_obstack_release (&oldpta_obstack);
2596 }
2597
2598 /* Map from trees to variable infos. */
2599 static struct pointer_map_t *vi_for_tree;
2600
2601
2602 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2603
2604 static void
2605 insert_vi_for_tree (tree t, varinfo_t vi)
2606 {
2607 void **slot = pointer_map_insert (vi_for_tree, t);
2608 gcc_assert (vi);
2609 gcc_assert (*slot == NULL);
2610 *slot = vi;
2611 }
2612
2613 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2614 exist in the map, return NULL, otherwise, return the varinfo we found. */
2615
2616 static varinfo_t
2617 lookup_vi_for_tree (tree t)
2618 {
2619 void **slot = pointer_map_contains (vi_for_tree, t);
2620 if (slot == NULL)
2621 return NULL;
2622
2623 return (varinfo_t) *slot;
2624 }
2625
2626 /* Return a printable name for DECL */
2627
2628 static const char *
2629 alias_get_name (tree decl)
2630 {
2631 const char *res = get_name (decl);
2632 char *temp;
2633 int num_printed = 0;
2634
2635 if (res != NULL)
2636 return res;
2637
2638 res = "NULL";
2639 if (!dump_file)
2640 return res;
2641
2642 if (TREE_CODE (decl) == SSA_NAME)
2643 {
2644 num_printed = asprintf (&temp, "%s_%u",
2645 alias_get_name (SSA_NAME_VAR (decl)),
2646 SSA_NAME_VERSION (decl));
2647 }
2648 else if (DECL_P (decl))
2649 {
2650 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2651 }
2652 if (num_printed > 0)
2653 {
2654 res = ggc_strdup (temp);
2655 free (temp);
2656 }
2657 return res;
2658 }
2659
2660 /* Find the variable id for tree T in the map.
2661 If T doesn't exist in the map, create an entry for it and return it. */
2662
2663 static varinfo_t
2664 get_vi_for_tree (tree t)
2665 {
2666 void **slot = pointer_map_contains (vi_for_tree, t);
2667 if (slot == NULL)
2668 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2669
2670 return (varinfo_t) *slot;
2671 }
2672
2673 /* Get a constraint expression for a new temporary variable. */
2674
2675 static struct constraint_expr
2676 get_constraint_exp_for_temp (tree t)
2677 {
2678 struct constraint_expr cexpr;
2679
2680 gcc_assert (SSA_VAR_P (t));
2681
2682 cexpr.type = SCALAR;
2683 cexpr.var = get_vi_for_tree (t)->id;
2684 cexpr.offset = 0;
2685
2686 return cexpr;
2687 }
2688
2689 /* Get a constraint expression vector from an SSA_VAR_P node.
2690 If address_p is true, the result will be taken its address of. */
2691
2692 static void
2693 get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
2694 {
2695 struct constraint_expr cexpr;
2696 varinfo_t vi;
2697
2698 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2699 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2700
2701 /* For parameters, get at the points-to set for the actual parm
2702 decl. */
2703 if (TREE_CODE (t) == SSA_NAME
2704 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2705 && SSA_NAME_IS_DEFAULT_DEF (t))
2706 {
2707 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2708 return;
2709 }
2710
2711 vi = get_vi_for_tree (t);
2712 cexpr.var = vi->id;
2713 cexpr.type = SCALAR;
2714 cexpr.offset = 0;
2715 /* If we determine the result is "anything", and we know this is readonly,
2716 say it points to readonly memory instead. */
2717 if (cexpr.var == anything_id && TREE_READONLY (t))
2718 {
2719 gcc_unreachable ();
2720 cexpr.type = ADDRESSOF;
2721 cexpr.var = readonly_id;
2722 }
2723
2724 /* If we are not taking the address of the constraint expr, add all
2725 sub-fiels of the variable as well. */
2726 if (!address_p)
2727 {
2728 for (; vi; vi = vi->next)
2729 {
2730 cexpr.var = vi->id;
2731 VEC_safe_push (ce_s, heap, *results, &cexpr);
2732 }
2733 return;
2734 }
2735
2736 VEC_safe_push (ce_s, heap, *results, &cexpr);
2737 }
2738
2739 /* Process constraint T, performing various simplifications and then
2740 adding it to our list of overall constraints. */
2741
2742 static void
2743 process_constraint (constraint_t t)
2744 {
2745 struct constraint_expr rhs = t->rhs;
2746 struct constraint_expr lhs = t->lhs;
2747
2748 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2749 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2750
2751 /* If we didn't get any useful constraint from the lhs we get
2752 &ANYTHING as fallback from get_constraint_for. Deal with
2753 it here by turning it into *ANYTHING. */
2754 if (lhs.type == ADDRESSOF
2755 && lhs.var == anything_id)
2756 lhs.type = DEREF;
2757
2758 /* ADDRESSOF on the lhs is invalid. */
2759 gcc_assert (lhs.type != ADDRESSOF);
2760
2761 /* This can happen in our IR with things like n->a = *p */
2762 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2763 {
2764 /* Split into tmp = *rhs, *lhs = tmp */
2765 tree rhsdecl = get_varinfo (rhs.var)->decl;
2766 tree pointertype = TREE_TYPE (rhsdecl);
2767 tree pointedtotype = TREE_TYPE (pointertype);
2768 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2769 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2770
2771 process_constraint (new_constraint (tmplhs, rhs));
2772 process_constraint (new_constraint (lhs, tmplhs));
2773 }
2774 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2775 {
2776 /* Split into tmp = &rhs, *lhs = tmp */
2777 tree rhsdecl = get_varinfo (rhs.var)->decl;
2778 tree pointertype = TREE_TYPE (rhsdecl);
2779 tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
2780 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2781
2782 process_constraint (new_constraint (tmplhs, rhs));
2783 process_constraint (new_constraint (lhs, tmplhs));
2784 }
2785 else
2786 {
2787 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2788 VEC_safe_push (constraint_t, heap, constraints, t);
2789 }
2790 }
2791
2792 /* Return true if T is a type that could contain pointers. */
2793
2794 static bool
2795 type_could_have_pointers (tree type)
2796 {
2797 if (POINTER_TYPE_P (type))
2798 return true;
2799
2800 if (TREE_CODE (type) == ARRAY_TYPE)
2801 return type_could_have_pointers (TREE_TYPE (type));
2802
2803 return AGGREGATE_TYPE_P (type);
2804 }
2805
2806 /* Return true if T is a variable of a type that could contain
2807 pointers. */
2808
2809 static bool
2810 could_have_pointers (tree t)
2811 {
2812 return type_could_have_pointers (TREE_TYPE (t));
2813 }
2814
2815 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2816 structure. */
2817
2818 static HOST_WIDE_INT
2819 bitpos_of_field (const tree fdecl)
2820 {
2821
2822 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2823 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2824 return -1;
2825
2826 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * 8
2827 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2828 }
2829
2830
2831 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2832 resulting constraint expressions in *RESULTS. */
2833
2834 static void
2835 get_constraint_for_ptr_offset (tree ptr, tree offset,
2836 VEC (ce_s, heap) **results)
2837 {
2838 struct constraint_expr *c;
2839 unsigned int j, n;
2840 HOST_WIDE_INT rhsunitoffset, rhsoffset;
2841
2842 /* If we do not do field-sensitive PTA adding offsets to pointers
2843 does not change the points-to solution. */
2844 if (!use_field_sensitive)
2845 {
2846 get_constraint_for (ptr, results);
2847 return;
2848 }
2849
2850 /* If the offset is not a non-negative integer constant that fits
2851 in a HOST_WIDE_INT, we have to fall back to a conservative
2852 solution which includes all sub-fields of all pointed-to
2853 variables of ptr. */
2854 if (!host_integerp (offset, 0))
2855 rhsoffset = UNKNOWN_OFFSET;
2856 else
2857 {
2858 /* Make sure the bit-offset also fits. */
2859 rhsunitoffset = TREE_INT_CST_LOW (offset);
2860 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
2861 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
2862 rhsoffset = UNKNOWN_OFFSET;
2863 }
2864
2865 get_constraint_for (ptr, results);
2866 if (rhsoffset == 0)
2867 return;
2868
2869 /* As we are eventually appending to the solution do not use
2870 VEC_iterate here. */
2871 n = VEC_length (ce_s, *results);
2872 for (j = 0; j < n; j++)
2873 {
2874 varinfo_t curr;
2875 c = VEC_index (ce_s, *results, j);
2876 curr = get_varinfo (c->var);
2877
2878 if (c->type == ADDRESSOF
2879 /* If this varinfo represents a full variable just use it. */
2880 && curr->is_full_var)
2881 c->offset = 0;
2882 else if (c->type == ADDRESSOF
2883 /* If we do not know the offset add all subfields. */
2884 && rhsoffset == UNKNOWN_OFFSET)
2885 {
2886 varinfo_t temp = lookup_vi_for_tree (curr->decl);
2887 do
2888 {
2889 struct constraint_expr c2;
2890 c2.var = temp->id;
2891 c2.type = ADDRESSOF;
2892 c2.offset = 0;
2893 VEC_safe_push (ce_s, heap, *results, &c2);
2894 temp = temp->next;
2895 }
2896 while (temp);
2897 }
2898 else if (c->type == ADDRESSOF)
2899 {
2900 varinfo_t temp;
2901 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
2902
2903 /* Search the sub-field which overlaps with the
2904 pointed-to offset. If the result is outside of the variable
2905 we have to provide a conservative result, as the variable is
2906 still reachable from the resulting pointer (even though it
2907 technically cannot point to anything). The last and first
2908 sub-fields are such conservative results.
2909 ??? If we always had a sub-field for &object + 1 then
2910 we could represent this in a more precise way. */
2911 if (rhsoffset < 0
2912 && curr->offset < offset)
2913 offset = 0;
2914 temp = first_or_preceding_vi_for_offset (curr, offset);
2915
2916 /* If the found variable is not exactly at the pointed to
2917 result, we have to include the next variable in the
2918 solution as well. Otherwise two increments by offset / 2
2919 do not result in the same or a conservative superset
2920 solution. */
2921 if (temp->offset != offset
2922 && temp->next != NULL)
2923 {
2924 struct constraint_expr c2;
2925 c2.var = temp->next->id;
2926 c2.type = ADDRESSOF;
2927 c2.offset = 0;
2928 VEC_safe_push (ce_s, heap, *results, &c2);
2929 }
2930 c->var = temp->id;
2931 c->offset = 0;
2932 }
2933 else
2934 c->offset = rhsoffset;
2935 }
2936 }
2937
2938
2939 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
2940 If address_p is true the result will be taken its address of. */
2941
2942 static void
2943 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
2944 bool address_p)
2945 {
2946 tree orig_t = t;
2947 HOST_WIDE_INT bitsize = -1;
2948 HOST_WIDE_INT bitmaxsize = -1;
2949 HOST_WIDE_INT bitpos;
2950 tree forzero;
2951 struct constraint_expr *result;
2952
2953 /* Some people like to do cute things like take the address of
2954 &0->a.b */
2955 forzero = t;
2956 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2957 forzero = TREE_OPERAND (forzero, 0);
2958
2959 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2960 {
2961 struct constraint_expr temp;
2962
2963 temp.offset = 0;
2964 temp.var = integer_id;
2965 temp.type = SCALAR;
2966 VEC_safe_push (ce_s, heap, *results, &temp);
2967 return;
2968 }
2969
2970 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2971
2972 /* Pretend to take the address of the base, we'll take care of
2973 adding the required subset of sub-fields below. */
2974 get_constraint_for_1 (t, results, true);
2975 gcc_assert (VEC_length (ce_s, *results) == 1);
2976 result = VEC_last (ce_s, *results);
2977
2978 if (result->type == SCALAR
2979 && get_varinfo (result->var)->is_full_var)
2980 /* For single-field vars do not bother about the offset. */
2981 result->offset = 0;
2982 else if (result->type == SCALAR)
2983 {
2984 /* In languages like C, you can access one past the end of an
2985 array. You aren't allowed to dereference it, so we can
2986 ignore this constraint. When we handle pointer subtraction,
2987 we may have to do something cute here. */
2988
2989 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
2990 && bitmaxsize != 0)
2991 {
2992 /* It's also not true that the constraint will actually start at the
2993 right offset, it may start in some padding. We only care about
2994 setting the constraint to the first actual field it touches, so
2995 walk to find it. */
2996 struct constraint_expr cexpr = *result;
2997 varinfo_t curr;
2998 VEC_pop (ce_s, *results);
2999 cexpr.offset = 0;
3000 for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
3001 {
3002 if (ranges_overlap_p (curr->offset, curr->size,
3003 bitpos, bitmaxsize))
3004 {
3005 cexpr.var = curr->id;
3006 VEC_safe_push (ce_s, heap, *results, &cexpr);
3007 if (address_p)
3008 break;
3009 }
3010 }
3011 /* If we are going to take the address of this field then
3012 to be able to compute reachability correctly add at least
3013 the last field of the variable. */
3014 if (address_p
3015 && VEC_length (ce_s, *results) == 0)
3016 {
3017 curr = get_varinfo (cexpr.var);
3018 while (curr->next != NULL)
3019 curr = curr->next;
3020 cexpr.var = curr->id;
3021 VEC_safe_push (ce_s, heap, *results, &cexpr);
3022 }
3023 else
3024 /* Assert that we found *some* field there. The user couldn't be
3025 accessing *only* padding. */
3026 /* Still the user could access one past the end of an array
3027 embedded in a struct resulting in accessing *only* padding. */
3028 gcc_assert (VEC_length (ce_s, *results) >= 1
3029 || ref_contains_array_ref (orig_t));
3030 }
3031 else if (bitmaxsize == 0)
3032 {
3033 if (dump_file && (dump_flags & TDF_DETAILS))
3034 fprintf (dump_file, "Access to zero-sized part of variable,"
3035 "ignoring\n");
3036 }
3037 else
3038 if (dump_file && (dump_flags & TDF_DETAILS))
3039 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3040 }
3041 else if (result->type == DEREF)
3042 {
3043 /* If we do not know exactly where the access goes say so. Note
3044 that only for non-structure accesses we know that we access
3045 at most one subfiled of any variable. */
3046 if (bitpos == -1
3047 || bitsize != bitmaxsize
3048 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t)))
3049 result->offset = UNKNOWN_OFFSET;
3050 else
3051 result->offset = bitpos;
3052 }
3053 else if (result->type == ADDRESSOF)
3054 {
3055 /* We can end up here for component references on a
3056 VIEW_CONVERT_EXPR <>(&foobar). */
3057 result->type = SCALAR;
3058 result->var = anything_id;
3059 result->offset = 0;
3060 }
3061 else
3062 gcc_unreachable ();
3063 }
3064
3065
3066 /* Dereference the constraint expression CONS, and return the result.
3067 DEREF (ADDRESSOF) = SCALAR
3068 DEREF (SCALAR) = DEREF
3069 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3070 This is needed so that we can handle dereferencing DEREF constraints. */
3071
3072 static void
3073 do_deref (VEC (ce_s, heap) **constraints)
3074 {
3075 struct constraint_expr *c;
3076 unsigned int i = 0;
3077
3078 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
3079 {
3080 if (c->type == SCALAR)
3081 c->type = DEREF;
3082 else if (c->type == ADDRESSOF)
3083 c->type = SCALAR;
3084 else if (c->type == DEREF)
3085 {
3086 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
3087 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
3088 process_constraint (new_constraint (tmplhs, *c));
3089 c->var = tmplhs.var;
3090 }
3091 else
3092 gcc_unreachable ();
3093 }
3094 }
3095
3096 /* Given a tree T, return the constraint expression for it. */
3097
3098 static void
3099 get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
3100 {
3101 struct constraint_expr temp;
3102
3103 /* x = integer is all glommed to a single variable, which doesn't
3104 point to anything by itself. That is, of course, unless it is an
3105 integer constant being treated as a pointer, in which case, we
3106 will return that this is really the addressof anything. This
3107 happens below, since it will fall into the default case. The only
3108 case we know something about an integer treated like a pointer is
3109 when it is the NULL pointer, and then we just say it points to
3110 NULL.
3111
3112 Do not do that if -fno-delete-null-pointer-checks though, because
3113 in that case *NULL does not fail, so it _should_ alias *anything.
3114 It is not worth adding a new option or renaming the existing one,
3115 since this case is relatively obscure. */
3116 if (flag_delete_null_pointer_checks
3117 && ((TREE_CODE (t) == INTEGER_CST
3118 && integer_zerop (t))
3119 /* The only valid CONSTRUCTORs in gimple with pointer typed
3120 elements are zero-initializer. */
3121 || TREE_CODE (t) == CONSTRUCTOR))
3122 {
3123 temp.var = nothing_id;
3124 temp.type = ADDRESSOF;
3125 temp.offset = 0;
3126 VEC_safe_push (ce_s, heap, *results, &temp);
3127 return;
3128 }
3129
3130 /* String constants are read-only. */
3131 if (TREE_CODE (t) == STRING_CST)
3132 {
3133 temp.var = readonly_id;
3134 temp.type = SCALAR;
3135 temp.offset = 0;
3136 VEC_safe_push (ce_s, heap, *results, &temp);
3137 return;
3138 }
3139
3140 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3141 {
3142 case tcc_expression:
3143 {
3144 switch (TREE_CODE (t))
3145 {
3146 case ADDR_EXPR:
3147 {
3148 struct constraint_expr *c;
3149 unsigned int i;
3150 tree exp = TREE_OPERAND (t, 0);
3151
3152 get_constraint_for_1 (exp, results, true);
3153
3154 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
3155 {
3156 if (c->type == DEREF)
3157 c->type = SCALAR;
3158 else
3159 c->type = ADDRESSOF;
3160 }
3161 return;
3162 }
3163 break;
3164 default:;
3165 }
3166 break;
3167 }
3168 case tcc_reference:
3169 {
3170 switch (TREE_CODE (t))
3171 {
3172 case INDIRECT_REF:
3173 {
3174 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
3175 do_deref (results);
3176 return;
3177 }
3178 case ARRAY_REF:
3179 case ARRAY_RANGE_REF:
3180 case COMPONENT_REF:
3181 get_constraint_for_component_ref (t, results, address_p);
3182 return;
3183 case VIEW_CONVERT_EXPR:
3184 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
3185 return;
3186 /* We are missing handling for TARGET_MEM_REF here. */
3187 default:;
3188 }
3189 break;
3190 }
3191 case tcc_exceptional:
3192 {
3193 switch (TREE_CODE (t))
3194 {
3195 case SSA_NAME:
3196 {
3197 get_constraint_for_ssa_var (t, results, address_p);
3198 return;
3199 }
3200 default:;
3201 }
3202 break;
3203 }
3204 case tcc_declaration:
3205 {
3206 get_constraint_for_ssa_var (t, results, address_p);
3207 return;
3208 }
3209 default:;
3210 }
3211
3212 /* The default fallback is a constraint from anything. */
3213 temp.type = ADDRESSOF;
3214 temp.var = anything_id;
3215 temp.offset = 0;
3216 VEC_safe_push (ce_s, heap, *results, &temp);
3217 }
3218
3219 /* Given a gimple tree T, return the constraint expression vector for it. */
3220
3221 static void
3222 get_constraint_for (tree t, VEC (ce_s, heap) **results)
3223 {
3224 gcc_assert (VEC_length (ce_s, *results) == 0);
3225
3226 get_constraint_for_1 (t, results, false);
3227 }
3228
3229 /* Handle aggregate copies by expanding into copies of the respective
3230 fields of the structures. */
3231
3232 static void
3233 do_structure_copy (tree lhsop, tree rhsop)
3234 {
3235 struct constraint_expr *lhsp, *rhsp;
3236 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3237 unsigned j;
3238
3239 get_constraint_for (lhsop, &lhsc);
3240 get_constraint_for (rhsop, &rhsc);
3241 lhsp = VEC_index (ce_s, lhsc, 0);
3242 rhsp = VEC_index (ce_s, rhsc, 0);
3243 if (lhsp->type == DEREF
3244 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3245 || rhsp->type == DEREF)
3246 {
3247 struct constraint_expr tmp;
3248 tree tmpvar = create_tmp_var_raw (ptr_type_node,
3249 "structcopydereftmp");
3250 tmp.var = get_vi_for_tree (tmpvar)->id;
3251 tmp.type = SCALAR;
3252 tmp.offset = 0;
3253 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); ++j)
3254 process_constraint (new_constraint (tmp, *rhsp));
3255 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); ++j)
3256 process_constraint (new_constraint (*lhsp, tmp));
3257 }
3258 else if (lhsp->type == SCALAR
3259 && (rhsp->type == SCALAR
3260 || rhsp->type == ADDRESSOF))
3261 {
3262 tree lhsbase, rhsbase;
3263 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3264 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3265 unsigned k = 0;
3266 lhsbase = get_ref_base_and_extent (lhsop, &lhsoffset,
3267 &lhssize, &lhsmaxsize);
3268 rhsbase = get_ref_base_and_extent (rhsop, &rhsoffset,
3269 &rhssize, &rhsmaxsize);
3270 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp);)
3271 {
3272 varinfo_t lhsv, rhsv;
3273 rhsp = VEC_index (ce_s, rhsc, k);
3274 lhsv = get_varinfo (lhsp->var);
3275 rhsv = get_varinfo (rhsp->var);
3276 if (lhsv->may_have_pointers
3277 && ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3278 rhsv->offset + lhsoffset, rhsv->size))
3279 process_constraint (new_constraint (*lhsp, *rhsp));
3280 if (lhsv->offset + rhsoffset + lhsv->size
3281 > rhsv->offset + lhsoffset + rhsv->size)
3282 {
3283 ++k;
3284 if (k >= VEC_length (ce_s, rhsc))
3285 break;
3286 }
3287 else
3288 ++j;
3289 }
3290 }
3291 else
3292 gcc_unreachable ();
3293
3294 VEC_free (ce_s, heap, lhsc);
3295 VEC_free (ce_s, heap, rhsc);
3296 }
3297
3298 /* Create a constraint ID = OP. */
3299
3300 static void
3301 make_constraint_to (unsigned id, tree op)
3302 {
3303 VEC(ce_s, heap) *rhsc = NULL;
3304 struct constraint_expr *c;
3305 struct constraint_expr includes;
3306 unsigned int j;
3307
3308 includes.var = id;
3309 includes.offset = 0;
3310 includes.type = SCALAR;
3311
3312 get_constraint_for (op, &rhsc);
3313 for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
3314 process_constraint (new_constraint (includes, *c));
3315 VEC_free (ce_s, heap, rhsc);
3316 }
3317
3318 /* Make constraints necessary to make OP escape. */
3319
3320 static void
3321 make_escape_constraint (tree op)
3322 {
3323 make_constraint_to (escaped_id, op);
3324 }
3325
3326 /* For non-IPA mode, generate constraints necessary for a call on the
3327 RHS. */
3328
3329 static void
3330 handle_rhs_call (gimple stmt, VEC(ce_s, heap) **results)
3331 {
3332 struct constraint_expr rhsc;
3333 unsigned i;
3334
3335 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3336 {
3337 tree arg = gimple_call_arg (stmt, i);
3338
3339 /* Find those pointers being passed, and make sure they end up
3340 pointing to anything. */
3341 if (could_have_pointers (arg))
3342 make_escape_constraint (arg);
3343 }
3344
3345 /* The static chain escapes as well. */
3346 if (gimple_call_chain (stmt))
3347 make_escape_constraint (gimple_call_chain (stmt));
3348
3349 /* Regular functions return nonlocal memory. */
3350 rhsc.var = nonlocal_id;
3351 rhsc.offset = 0;
3352 rhsc.type = SCALAR;
3353 VEC_safe_push (ce_s, heap, *results, &rhsc);
3354 }
3355
3356 /* For non-IPA mode, generate constraints necessary for a call
3357 that returns a pointer and assigns it to LHS. This simply makes
3358 the LHS point to global and escaped variables. */
3359
3360 static void
3361 handle_lhs_call (tree lhs, int flags, VEC(ce_s, heap) *rhsc)
3362 {
3363 VEC(ce_s, heap) *lhsc = NULL;
3364 unsigned int j;
3365 struct constraint_expr *lhsp;
3366
3367 get_constraint_for (lhs, &lhsc);
3368
3369 if (flags & ECF_MALLOC)
3370 {
3371 struct constraint_expr rhsc;
3372 tree heapvar = heapvar_lookup (lhs);
3373 varinfo_t vi;
3374
3375 if (heapvar == NULL)
3376 {
3377 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
3378 DECL_EXTERNAL (heapvar) = 1;
3379 get_var_ann (heapvar)->is_heapvar = 1;
3380 if (gimple_referenced_vars (cfun))
3381 add_referenced_var (heapvar);
3382 heapvar_insert (lhs, heapvar);
3383 }
3384
3385 rhsc.var = create_variable_info_for (heapvar,
3386 alias_get_name (heapvar));
3387 vi = get_varinfo (rhsc.var);
3388 vi->is_artificial_var = 1;
3389 vi->is_heap_var = 1;
3390 vi->is_unknown_size_var = true;
3391 vi->fullsize = ~0;
3392 vi->size = ~0;
3393 rhsc.type = ADDRESSOF;
3394 rhsc.offset = 0;
3395 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3396 process_constraint (new_constraint (*lhsp, rhsc));
3397 }
3398 else if (VEC_length (ce_s, rhsc) > 0)
3399 {
3400 struct constraint_expr *lhsp, *rhsp;
3401 unsigned int i, j;
3402 /* If the store is to a global decl make sure to
3403 add proper escape constraints. */
3404 lhs = get_base_address (lhs);
3405 if (lhs
3406 && DECL_P (lhs)
3407 && is_global_var (lhs))
3408 {
3409 struct constraint_expr tmpc;
3410 tmpc.var = escaped_id;
3411 tmpc.offset = 0;
3412 tmpc.type = SCALAR;
3413 VEC_safe_push (ce_s, heap, lhsc, &tmpc);
3414 }
3415 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
3416 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); ++j)
3417 process_constraint (new_constraint (*lhsp, *rhsp));
3418 }
3419 VEC_free (ce_s, heap, lhsc);
3420 }
3421
3422 /* For non-IPA mode, generate constraints necessary for a call of a
3423 const function that returns a pointer in the statement STMT. */
3424
3425 static void
3426 handle_const_call (gimple stmt, VEC(ce_s, heap) **results)
3427 {
3428 struct constraint_expr rhsc, tmpc = {SCALAR, 0, 0};
3429 tree tmpvar = NULL_TREE;
3430 unsigned int k;
3431
3432 /* Treat nested const functions the same as pure functions as far
3433 as the static chain is concerned. */
3434 if (gimple_call_chain (stmt))
3435 {
3436 make_constraint_to (callused_id, gimple_call_chain (stmt));
3437 rhsc.var = callused_id;
3438 rhsc.offset = 0;
3439 rhsc.type = SCALAR;
3440 VEC_safe_push (ce_s, heap, *results, &rhsc);
3441 }
3442
3443 /* May return arguments. */
3444 for (k = 0; k < gimple_call_num_args (stmt); ++k)
3445 {
3446 tree arg = gimple_call_arg (stmt, k);
3447
3448 if (could_have_pointers (arg))
3449 {
3450 VEC(ce_s, heap) *argc = NULL;
3451 struct constraint_expr *argp;
3452 int i;
3453
3454 /* We always use a temporary here, otherwise we end up with
3455 a quadratic amount of constraints for
3456 large_struct = const_call (large_struct);
3457 with field-sensitive PTA. */
3458 if (tmpvar == NULL_TREE)
3459 {
3460 tmpvar = create_tmp_var_raw (ptr_type_node, "consttmp");
3461 tmpc = get_constraint_exp_for_temp (tmpvar);
3462 }
3463
3464 get_constraint_for (arg, &argc);
3465 for (i = 0; VEC_iterate (ce_s, argc, i, argp); i++)
3466 process_constraint (new_constraint (tmpc, *argp));
3467 VEC_free (ce_s, heap, argc);
3468 }
3469 }
3470 if (tmpvar != NULL_TREE)
3471 VEC_safe_push (ce_s, heap, *results, &tmpc);
3472
3473 /* May return addresses of globals. */
3474 rhsc.var = nonlocal_id;
3475 rhsc.offset = 0;
3476 rhsc.type = ADDRESSOF;
3477 VEC_safe_push (ce_s, heap, *results, &rhsc);
3478 }
3479
3480 /* For non-IPA mode, generate constraints necessary for a call to a
3481 pure function in statement STMT. */
3482
3483 static void
3484 handle_pure_call (gimple stmt, VEC(ce_s, heap) **results)
3485 {
3486 struct constraint_expr rhsc;
3487 unsigned i;
3488 bool need_callused = false;
3489
3490 /* Memory reached from pointer arguments is call-used. */
3491 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3492 {
3493 tree arg = gimple_call_arg (stmt, i);
3494
3495 if (could_have_pointers (arg))
3496 {
3497 make_constraint_to (callused_id, arg);
3498 need_callused = true;
3499 }
3500 }
3501
3502 /* The static chain is used as well. */
3503 if (gimple_call_chain (stmt))
3504 {
3505 make_constraint_to (callused_id, gimple_call_chain (stmt));
3506 need_callused = true;
3507 }
3508
3509 /* Pure functions may return callused and nonlocal memory. */
3510 if (need_callused)
3511 {
3512 rhsc.var = callused_id;
3513 rhsc.offset = 0;
3514 rhsc.type = SCALAR;
3515 VEC_safe_push (ce_s, heap, *results, &rhsc);
3516 }
3517 rhsc.var = nonlocal_id;
3518 rhsc.offset = 0;
3519 rhsc.type = SCALAR;
3520 VEC_safe_push (ce_s, heap, *results, &rhsc);
3521 }
3522
3523 /* Walk statement T setting up aliasing constraints according to the
3524 references found in T. This function is the main part of the
3525 constraint builder. AI points to auxiliary alias information used
3526 when building alias sets and computing alias grouping heuristics. */
3527
3528 static void
3529 find_func_aliases (gimple origt)
3530 {
3531 gimple t = origt;
3532 VEC(ce_s, heap) *lhsc = NULL;
3533 VEC(ce_s, heap) *rhsc = NULL;
3534 struct constraint_expr *c;
3535 enum escape_type stmt_escape_type;
3536
3537 /* Now build constraints expressions. */
3538 if (gimple_code (t) == GIMPLE_PHI)
3539 {
3540 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (gimple_phi_result (t))));
3541
3542 /* Only care about pointers and structures containing
3543 pointers. */
3544 if (could_have_pointers (gimple_phi_result (t)))
3545 {
3546 size_t i;
3547 unsigned int j;
3548
3549 /* For a phi node, assign all the arguments to
3550 the result. */
3551 get_constraint_for (gimple_phi_result (t), &lhsc);
3552 for (i = 0; i < gimple_phi_num_args (t); i++)
3553 {
3554 tree rhstype;
3555 tree strippedrhs = PHI_ARG_DEF (t, i);
3556
3557 STRIP_NOPS (strippedrhs);
3558 rhstype = TREE_TYPE (strippedrhs);
3559 get_constraint_for (gimple_phi_arg_def (t, i), &rhsc);
3560
3561 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3562 {
3563 struct constraint_expr *c2;
3564 while (VEC_length (ce_s, rhsc) > 0)
3565 {
3566 c2 = VEC_last (ce_s, rhsc);
3567 process_constraint (new_constraint (*c, *c2));
3568 VEC_pop (ce_s, rhsc);
3569 }
3570 }
3571 }
3572 }
3573 }
3574 /* In IPA mode, we need to generate constraints to pass call
3575 arguments through their calls. There are two cases,
3576 either a GIMPLE_CALL returning a value, or just a plain
3577 GIMPLE_CALL when we are not.
3578
3579 In non-ipa mode, we need to generate constraints for each
3580 pointer passed by address. */
3581 else if (is_gimple_call (t))
3582 {
3583 if (!in_ipa_mode)
3584 {
3585 VEC(ce_s, heap) *rhsc = NULL;
3586 int flags = gimple_call_flags (t);
3587
3588 /* Const functions can return their arguments and addresses
3589 of global memory but not of escaped memory. */
3590 if (flags & (ECF_CONST|ECF_NOVOPS))
3591 {
3592 if (gimple_call_lhs (t)
3593 && could_have_pointers (gimple_call_lhs (t)))
3594 handle_const_call (t, &rhsc);
3595 }
3596 /* Pure functions can return addresses in and of memory
3597 reachable from their arguments, but they are not an escape
3598 point for reachable memory of their arguments. */
3599 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
3600 handle_pure_call (t, &rhsc);
3601 else
3602 handle_rhs_call (t, &rhsc);
3603 if (gimple_call_lhs (t)
3604 && could_have_pointers (gimple_call_lhs (t)))
3605 handle_lhs_call (gimple_call_lhs (t), flags, rhsc);
3606 VEC_free (ce_s, heap, rhsc);
3607 }
3608 else
3609 {
3610 tree lhsop;
3611 varinfo_t fi;
3612 int i = 1;
3613 size_t j;
3614 tree decl;
3615
3616 lhsop = gimple_call_lhs (t);
3617 decl = gimple_call_fndecl (t);
3618
3619 /* If we can directly resolve the function being called, do so.
3620 Otherwise, it must be some sort of indirect expression that
3621 we should still be able to handle. */
3622 if (decl)
3623 fi = get_vi_for_tree (decl);
3624 else
3625 {
3626 decl = gimple_call_fn (t);
3627 fi = get_vi_for_tree (decl);
3628 }
3629
3630 /* Assign all the passed arguments to the appropriate incoming
3631 parameters of the function. */
3632 for (j = 0; j < gimple_call_num_args (t); j++)
3633 {
3634 struct constraint_expr lhs ;
3635 struct constraint_expr *rhsp;
3636 tree arg = gimple_call_arg (t, j);
3637
3638 get_constraint_for (arg, &rhsc);
3639 if (TREE_CODE (decl) != FUNCTION_DECL)
3640 {
3641 lhs.type = DEREF;
3642 lhs.var = fi->id;
3643 lhs.offset = i;
3644 }
3645 else
3646 {
3647 lhs.type = SCALAR;
3648 lhs.var = first_vi_for_offset (fi, i)->id;
3649 lhs.offset = 0;
3650 }
3651 while (VEC_length (ce_s, rhsc) != 0)
3652 {
3653 rhsp = VEC_last (ce_s, rhsc);
3654 process_constraint (new_constraint (lhs, *rhsp));
3655 VEC_pop (ce_s, rhsc);
3656 }
3657 i++;
3658 }
3659
3660 /* If we are returning a value, assign it to the result. */
3661 if (lhsop)
3662 {
3663 struct constraint_expr rhs;
3664 struct constraint_expr *lhsp;
3665 unsigned int j = 0;
3666
3667 get_constraint_for (lhsop, &lhsc);
3668 if (TREE_CODE (decl) != FUNCTION_DECL)
3669 {
3670 rhs.type = DEREF;
3671 rhs.var = fi->id;
3672 rhs.offset = i;
3673 }
3674 else
3675 {
3676 rhs.type = SCALAR;
3677 rhs.var = first_vi_for_offset (fi, i)->id;
3678 rhs.offset = 0;
3679 }
3680 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3681 process_constraint (new_constraint (*lhsp, rhs));
3682 }
3683 }
3684 }
3685 /* Otherwise, just a regular assignment statement. Only care about
3686 operations with pointer result, others are dealt with as escape
3687 points if they have pointer operands. */
3688 else if (is_gimple_assign (t)
3689 && could_have_pointers (gimple_assign_lhs (t)))
3690 {
3691 /* Otherwise, just a regular assignment statement. */
3692 tree lhsop = gimple_assign_lhs (t);
3693 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
3694
3695 if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
3696 do_structure_copy (lhsop, rhsop);
3697 else
3698 {
3699 unsigned int j;
3700 struct constraint_expr temp;
3701 get_constraint_for (lhsop, &lhsc);
3702
3703 if (gimple_assign_rhs_code (t) == POINTER_PLUS_EXPR)
3704 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
3705 gimple_assign_rhs2 (t), &rhsc);
3706 else if ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
3707 && !(POINTER_TYPE_P (gimple_expr_type (t))
3708 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
3709 || gimple_assign_single_p (t))
3710 get_constraint_for (rhsop, &rhsc);
3711 else
3712 {
3713 temp.type = ADDRESSOF;
3714 temp.var = anything_id;
3715 temp.offset = 0;
3716 VEC_safe_push (ce_s, heap, rhsc, &temp);
3717 }
3718 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3719 {
3720 struct constraint_expr *c2;
3721 unsigned int k;
3722
3723 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3724 process_constraint (new_constraint (*c, *c2));
3725 }
3726 }
3727 }
3728 else if (gimple_code (t) == GIMPLE_CHANGE_DYNAMIC_TYPE)
3729 {
3730 unsigned int j;
3731
3732 get_constraint_for (gimple_cdt_location (t), &lhsc);
3733 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
3734 get_varinfo (c->var)->no_tbaa_pruning = true;
3735 }
3736
3737 stmt_escape_type = is_escape_site (t);
3738 if (stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3739 {
3740 gcc_assert (is_gimple_assign (t));
3741 if (gimple_assign_rhs_code (t) == ADDR_EXPR)
3742 {
3743 tree rhs = gimple_assign_rhs1 (t);
3744 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3745 if (base
3746 && (!DECL_P (base)
3747 || !is_global_var (base)))
3748 make_escape_constraint (rhs);
3749 }
3750 else if (get_gimple_rhs_class (gimple_assign_rhs_code (t))
3751 == GIMPLE_SINGLE_RHS)
3752 {
3753 if (could_have_pointers (gimple_assign_rhs1 (t)))
3754 make_escape_constraint (gimple_assign_rhs1 (t));
3755 }
3756 else
3757 gcc_unreachable ();
3758 }
3759 else if (stmt_escape_type == ESCAPE_BAD_CAST)
3760 {
3761 gcc_assert (is_gimple_assign (t));
3762 gcc_assert (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
3763 || gimple_assign_rhs_code (t) == VIEW_CONVERT_EXPR);
3764 make_escape_constraint (gimple_assign_rhs1 (t));
3765 }
3766 else if (stmt_escape_type == ESCAPE_TO_ASM)
3767 {
3768 unsigned i, noutputs;
3769 const char **oconstraints;
3770 const char *constraint;
3771 bool allows_mem, allows_reg, is_inout;
3772
3773 noutputs = gimple_asm_noutputs (t);
3774 oconstraints = XALLOCAVEC (const char *, noutputs);
3775
3776 for (i = 0; i < noutputs; ++i)
3777 {
3778 tree link = gimple_asm_output_op (t, i);
3779 tree op = TREE_VALUE (link);
3780
3781 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3782 oconstraints[i] = constraint;
3783 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
3784 &allows_reg, &is_inout);
3785
3786 /* A memory constraint makes the address of the operand escape. */
3787 if (!allows_reg && allows_mem)
3788 make_escape_constraint (build_fold_addr_expr (op));
3789
3790 /* The asm may read global memory, so outputs may point to
3791 any global memory. */
3792 if (op && could_have_pointers (op))
3793 {
3794 VEC(ce_s, heap) *lhsc = NULL;
3795 struct constraint_expr rhsc, *lhsp;
3796 unsigned j;
3797 get_constraint_for (op, &lhsc);
3798 rhsc.var = nonlocal_id;
3799 rhsc.offset = 0;
3800 rhsc.type = SCALAR;
3801 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3802 process_constraint (new_constraint (*lhsp, rhsc));
3803 VEC_free (ce_s, heap, lhsc);
3804 }
3805 }
3806 for (i = 0; i < gimple_asm_ninputs (t); ++i)
3807 {
3808 tree link = gimple_asm_input_op (t, i);
3809 tree op = TREE_VALUE (link);
3810
3811 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3812
3813 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
3814 &allows_mem, &allows_reg);
3815
3816 /* A memory constraint makes the address of the operand escape. */
3817 if (!allows_reg && allows_mem)
3818 make_escape_constraint (build_fold_addr_expr (op));
3819 /* Strictly we'd only need the constraint to ESCAPED if
3820 the asm clobbers memory, otherwise using CALLUSED
3821 would be enough. */
3822 else if (op && could_have_pointers (op))
3823 make_escape_constraint (op);
3824 }
3825 }
3826
3827 VEC_free (ce_s, heap, rhsc);
3828 VEC_free (ce_s, heap, lhsc);
3829 }
3830
3831
3832 /* Find the first varinfo in the same variable as START that overlaps with
3833 OFFSET. Return NULL if we can't find one. */
3834
3835 static varinfo_t
3836 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3837 {
3838 /* If the offset is outside of the variable, bail out. */
3839 if (offset >= start->fullsize)
3840 return NULL;
3841
3842 /* If we cannot reach offset from start, lookup the first field
3843 and start from there. */
3844 if (start->offset > offset)
3845 start = lookup_vi_for_tree (start->decl);
3846
3847 while (start)
3848 {
3849 /* We may not find a variable in the field list with the actual
3850 offset when when we have glommed a structure to a variable.
3851 In that case, however, offset should still be within the size
3852 of the variable. */
3853 if (offset >= start->offset
3854 && offset < (start->offset + start->size))
3855 return start;
3856
3857 start= start->next;
3858 }
3859
3860 return NULL;
3861 }
3862
3863 /* Find the first varinfo in the same variable as START that overlaps with
3864 OFFSET. If there is no such varinfo the varinfo directly preceding
3865 OFFSET is returned. */
3866
3867 static varinfo_t
3868 first_or_preceding_vi_for_offset (varinfo_t start,
3869 unsigned HOST_WIDE_INT offset)
3870 {
3871 /* If we cannot reach offset from start, lookup the first field
3872 and start from there. */
3873 if (start->offset > offset)
3874 start = lookup_vi_for_tree (start->decl);
3875
3876 /* We may not find a variable in the field list with the actual
3877 offset when when we have glommed a structure to a variable.
3878 In that case, however, offset should still be within the size
3879 of the variable.
3880 If we got beyond the offset we look for return the field
3881 directly preceding offset which may be the last field. */
3882 while (start->next
3883 && offset >= start->offset
3884 && !(offset < (start->offset + start->size)))
3885 start = start->next;
3886
3887 return start;
3888 }
3889
3890
3891 /* Insert the varinfo FIELD into the field list for BASE, at the front
3892 of the list. */
3893
3894 static void
3895 insert_into_field_list (varinfo_t base, varinfo_t field)
3896 {
3897 varinfo_t prev = base;
3898 varinfo_t curr = base->next;
3899
3900 field->next = curr;
3901 prev->next = field;
3902 }
3903
3904 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3905 offset. */
3906
3907 static void
3908 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3909 {
3910 varinfo_t prev = base;
3911 varinfo_t curr = base->next;
3912
3913 if (curr == NULL)
3914 {
3915 prev->next = field;
3916 field->next = NULL;
3917 }
3918 else
3919 {
3920 while (curr)
3921 {
3922 if (field->offset <= curr->offset)
3923 break;
3924 prev = curr;
3925 curr = curr->next;
3926 }
3927 field->next = prev->next;
3928 prev->next = field;
3929 }
3930 }
3931
3932 /* This structure is used during pushing fields onto the fieldstack
3933 to track the offset of the field, since bitpos_of_field gives it
3934 relative to its immediate containing type, and we want it relative
3935 to the ultimate containing object. */
3936
3937 struct fieldoff
3938 {
3939 /* Offset from the base of the base containing object to this field. */
3940 HOST_WIDE_INT offset;
3941
3942 /* Size, in bits, of the field. */
3943 unsigned HOST_WIDE_INT size;
3944
3945 unsigned has_unknown_size : 1;
3946
3947 unsigned may_have_pointers : 1;
3948 };
3949 typedef struct fieldoff fieldoff_s;
3950
3951 DEF_VEC_O(fieldoff_s);
3952 DEF_VEC_ALLOC_O(fieldoff_s,heap);
3953
3954 /* qsort comparison function for two fieldoff's PA and PB */
3955
3956 static int
3957 fieldoff_compare (const void *pa, const void *pb)
3958 {
3959 const fieldoff_s *foa = (const fieldoff_s *)pa;
3960 const fieldoff_s *fob = (const fieldoff_s *)pb;
3961 unsigned HOST_WIDE_INT foasize, fobsize;
3962
3963 if (foa->offset < fob->offset)
3964 return -1;
3965 else if (foa->offset > fob->offset)
3966 return 1;
3967
3968 foasize = foa->size;
3969 fobsize = fob->size;
3970 if (foasize < fobsize)
3971 return -1;
3972 else if (foasize > fobsize)
3973 return 1;
3974 return 0;
3975 }
3976
3977 /* Sort a fieldstack according to the field offset and sizes. */
3978 static void
3979 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
3980 {
3981 qsort (VEC_address (fieldoff_s, fieldstack),
3982 VEC_length (fieldoff_s, fieldstack),
3983 sizeof (fieldoff_s),
3984 fieldoff_compare);
3985 }
3986
3987 /* Return true if V is a tree that we can have subvars for.
3988 Normally, this is any aggregate type. Also complex
3989 types which are not gimple registers can have subvars. */
3990
3991 static inline bool
3992 var_can_have_subvars (const_tree v)
3993 {
3994 /* Volatile variables should never have subvars. */
3995 if (TREE_THIS_VOLATILE (v))
3996 return false;
3997
3998 /* Non decls or memory tags can never have subvars. */
3999 if (!DECL_P (v))
4000 return false;
4001
4002 /* Aggregates without overlapping fields can have subvars. */
4003 if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
4004 return true;
4005
4006 return false;
4007 }
4008
4009 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4010 the fields of TYPE onto fieldstack, recording their offsets along
4011 the way.
4012
4013 OFFSET is used to keep track of the offset in this entire
4014 structure, rather than just the immediately containing structure.
4015 Returns the number of fields pushed. */
4016
4017 static int
4018 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4019 HOST_WIDE_INT offset)
4020 {
4021 tree field;
4022 int count = 0;
4023
4024 if (TREE_CODE (type) != RECORD_TYPE)
4025 return 0;
4026
4027 /* If the vector of fields is growing too big, bail out early.
4028 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4029 sure this fails. */
4030 if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
4031 return 0;
4032
4033 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4034 if (TREE_CODE (field) == FIELD_DECL)
4035 {
4036 bool push = false;
4037 int pushed = 0;
4038 HOST_WIDE_INT foff = bitpos_of_field (field);
4039
4040 if (!var_can_have_subvars (field)
4041 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4042 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
4043 push = true;
4044 else if (!(pushed = push_fields_onto_fieldstack
4045 (TREE_TYPE (field), fieldstack, offset + foff))
4046 && (DECL_SIZE (field)
4047 && !integer_zerop (DECL_SIZE (field))))
4048 /* Empty structures may have actual size, like in C++. So
4049 see if we didn't push any subfields and the size is
4050 nonzero, push the field onto the stack. */
4051 push = true;
4052
4053 if (push)
4054 {
4055 fieldoff_s *pair = NULL;
4056 bool has_unknown_size = false;
4057
4058 if (!VEC_empty (fieldoff_s, *fieldstack))
4059 pair = VEC_last (fieldoff_s, *fieldstack);
4060
4061 if (!DECL_SIZE (field)
4062 || !host_integerp (DECL_SIZE (field), 1))
4063 has_unknown_size = true;
4064
4065 /* If adjacent fields do not contain pointers merge them. */
4066 if (pair
4067 && !pair->may_have_pointers
4068 && !could_have_pointers (field)
4069 && !pair->has_unknown_size
4070 && !has_unknown_size
4071 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
4072 {
4073 pair = VEC_last (fieldoff_s, *fieldstack);
4074 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
4075 }
4076 else
4077 {
4078 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4079 pair->offset = offset + foff;
4080 pair->has_unknown_size = has_unknown_size;
4081 if (!has_unknown_size)
4082 pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
4083 else
4084 pair->size = -1;
4085 pair->may_have_pointers = could_have_pointers (field);
4086 count++;
4087 }
4088 }
4089 else
4090 count += pushed;
4091 }
4092
4093 return count;
4094 }
4095
4096 /* Create a constraint ID = &FROM. */
4097
4098 static void
4099 make_constraint_from (varinfo_t vi, int from)
4100 {
4101 struct constraint_expr lhs, rhs;
4102
4103 lhs.var = vi->id;
4104 lhs.offset = 0;
4105 lhs.type = SCALAR;
4106
4107 rhs.var = from;
4108 rhs.offset = 0;
4109 rhs.type = ADDRESSOF;
4110 process_constraint (new_constraint (lhs, rhs));
4111 }
4112
4113 /* Create a constraint ID = FROM. */
4114
4115 static void
4116 make_copy_constraint (varinfo_t vi, int from)
4117 {
4118 struct constraint_expr lhs, rhs;
4119
4120 lhs.var = vi->id;
4121 lhs.offset = 0;
4122 lhs.type = SCALAR;
4123
4124 rhs.var = from;
4125 rhs.offset = 0;
4126 rhs.type = SCALAR;
4127 process_constraint (new_constraint (lhs, rhs));
4128 }
4129
4130 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4131 if it is a varargs function. */
4132
4133 static unsigned int
4134 count_num_arguments (tree decl, bool *is_varargs)
4135 {
4136 unsigned int i = 0;
4137 tree t;
4138
4139 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4140 t;
4141 t = TREE_CHAIN (t))
4142 {
4143 if (TREE_VALUE (t) == void_type_node)
4144 break;
4145 i++;
4146 }
4147
4148 if (!t)
4149 *is_varargs = true;
4150 return i;
4151 }
4152
4153 /* Creation function node for DECL, using NAME, and return the index
4154 of the variable we've created for the function. */
4155
4156 static unsigned int
4157 create_function_info_for (tree decl, const char *name)
4158 {
4159 unsigned int index = VEC_length (varinfo_t, varmap);
4160 varinfo_t vi;
4161 tree arg;
4162 unsigned int i;
4163 bool is_varargs = false;
4164
4165 /* Create the variable info. */
4166
4167 vi = new_var_info (decl, index, name);
4168 vi->decl = decl;
4169 vi->offset = 0;
4170 vi->size = 1;
4171 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
4172 insert_vi_for_tree (vi->decl, vi);
4173 VEC_safe_push (varinfo_t, heap, varmap, vi);
4174
4175 stats.total_vars++;
4176
4177 /* If it's varargs, we don't know how many arguments it has, so we
4178 can't do much. */
4179 if (is_varargs)
4180 {
4181 vi->fullsize = ~0;
4182 vi->size = ~0;
4183 vi->is_unknown_size_var = true;
4184 return index;
4185 }
4186
4187
4188 arg = DECL_ARGUMENTS (decl);
4189
4190 /* Set up variables for each argument. */
4191 for (i = 1; i < vi->fullsize; i++)
4192 {
4193 varinfo_t argvi;
4194 const char *newname;
4195 char *tempname;
4196 unsigned int newindex;
4197 tree argdecl = decl;
4198
4199 if (arg)
4200 argdecl = arg;
4201
4202 newindex = VEC_length (varinfo_t, varmap);
4203 asprintf (&tempname, "%s.arg%d", name, i-1);
4204 newname = ggc_strdup (tempname);
4205 free (tempname);
4206
4207 argvi = new_var_info (argdecl, newindex, newname);
4208 argvi->decl = argdecl;
4209 VEC_safe_push (varinfo_t, heap, varmap, argvi);
4210 argvi->offset = i;
4211 argvi->size = 1;
4212 argvi->is_full_var = true;
4213 argvi->fullsize = vi->fullsize;
4214 insert_into_field_list_sorted (vi, argvi);
4215 stats.total_vars ++;
4216 if (arg)
4217 {
4218 insert_vi_for_tree (arg, argvi);
4219 arg = TREE_CHAIN (arg);
4220 }
4221 }
4222
4223 /* Create a variable for the return var. */
4224 if (DECL_RESULT (decl) != NULL
4225 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
4226 {
4227 varinfo_t resultvi;
4228 const char *newname;
4229 char *tempname;
4230 unsigned int newindex;
4231 tree resultdecl = decl;
4232
4233 vi->fullsize ++;
4234
4235 if (DECL_RESULT (decl))
4236 resultdecl = DECL_RESULT (decl);
4237
4238 newindex = VEC_length (varinfo_t, varmap);
4239 asprintf (&tempname, "%s.result", name);
4240 newname = ggc_strdup (tempname);
4241 free (tempname);
4242
4243 resultvi = new_var_info (resultdecl, newindex, newname);
4244 resultvi->decl = resultdecl;
4245 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
4246 resultvi->offset = i;
4247 resultvi->size = 1;
4248 resultvi->fullsize = vi->fullsize;
4249 resultvi->is_full_var = true;
4250 insert_into_field_list_sorted (vi, resultvi);
4251 stats.total_vars ++;
4252 if (DECL_RESULT (decl))
4253 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
4254 }
4255 return index;
4256 }
4257
4258
4259 /* Return true if FIELDSTACK contains fields that overlap.
4260 FIELDSTACK is assumed to be sorted by offset. */
4261
4262 static bool
4263 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
4264 {
4265 fieldoff_s *fo = NULL;
4266 unsigned int i;
4267 HOST_WIDE_INT lastoffset = -1;
4268
4269 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4270 {
4271 if (fo->offset == lastoffset)
4272 return true;
4273 lastoffset = fo->offset;
4274 }
4275 return false;
4276 }
4277
4278 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4279 This will also create any varinfo structures necessary for fields
4280 of DECL. */
4281
4282 static unsigned int
4283 create_variable_info_for (tree decl, const char *name)
4284 {
4285 unsigned int index = VEC_length (varinfo_t, varmap);
4286 varinfo_t vi;
4287 tree decl_type = TREE_TYPE (decl);
4288 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
4289 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
4290 VEC (fieldoff_s,heap) *fieldstack = NULL;
4291
4292 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
4293 return create_function_info_for (decl, name);
4294
4295 if (var_can_have_subvars (decl) && use_field_sensitive
4296 && (!var_ann (decl)
4297 || var_ann (decl)->noalias_state == 0)
4298 && (!var_ann (decl)
4299 || !var_ann (decl)->is_heapvar))
4300 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
4301
4302 /* If the variable doesn't have subvars, we may end up needing to
4303 sort the field list and create fake variables for all the
4304 fields. */
4305 vi = new_var_info (decl, index, name);
4306 vi->decl = decl;
4307 vi->offset = 0;
4308 vi->may_have_pointers = could_have_pointers (decl);
4309 if (!declsize
4310 || !host_integerp (declsize, 1))
4311 {
4312 vi->is_unknown_size_var = true;
4313 vi->fullsize = ~0;
4314 vi->size = ~0;
4315 }
4316 else
4317 {
4318 vi->fullsize = TREE_INT_CST_LOW (declsize);
4319 vi->size = vi->fullsize;
4320 }
4321
4322 insert_vi_for_tree (vi->decl, vi);
4323 VEC_safe_push (varinfo_t, heap, varmap, vi);
4324 if (is_global && (!flag_whole_program || !in_ipa_mode)
4325 && vi->may_have_pointers)
4326 {
4327 if (var_ann (decl)
4328 && var_ann (decl)->noalias_state == NO_ALIAS_ANYTHING)
4329 make_constraint_from (vi, vi->id);
4330 else
4331 make_copy_constraint (vi, nonlocal_id);
4332 }
4333
4334 stats.total_vars++;
4335 if (use_field_sensitive
4336 && !vi->is_unknown_size_var
4337 && var_can_have_subvars (decl)
4338 && VEC_length (fieldoff_s, fieldstack) > 1
4339 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4340 {
4341 unsigned int newindex = VEC_length (varinfo_t, varmap);
4342 fieldoff_s *fo = NULL;
4343 bool notokay = false;
4344 unsigned int i;
4345
4346 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4347 {
4348 if (fo->has_unknown_size
4349 || fo->offset < 0)
4350 {
4351 notokay = true;
4352 break;
4353 }
4354 }
4355
4356 /* We can't sort them if we have a field with a variable sized type,
4357 which will make notokay = true. In that case, we are going to return
4358 without creating varinfos for the fields anyway, so sorting them is a
4359 waste to boot. */
4360 if (!notokay)
4361 {
4362 sort_fieldstack (fieldstack);
4363 /* Due to some C++ FE issues, like PR 22488, we might end up
4364 what appear to be overlapping fields even though they,
4365 in reality, do not overlap. Until the C++ FE is fixed,
4366 we will simply disable field-sensitivity for these cases. */
4367 notokay = check_for_overlaps (fieldstack);
4368 }
4369
4370
4371 if (VEC_length (fieldoff_s, fieldstack) != 0)
4372 fo = VEC_index (fieldoff_s, fieldstack, 0);
4373
4374 if (fo == NULL || notokay)
4375 {
4376 vi->is_unknown_size_var = 1;
4377 vi->fullsize = ~0;
4378 vi->size = ~0;
4379 vi->is_full_var = true;
4380 VEC_free (fieldoff_s, heap, fieldstack);
4381 return index;
4382 }
4383
4384 vi->size = fo->size;
4385 vi->offset = fo->offset;
4386 vi->may_have_pointers = fo->may_have_pointers;
4387 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4388 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4389 i--)
4390 {
4391 varinfo_t newvi;
4392 const char *newname = "NULL";
4393 char *tempname;
4394
4395 newindex = VEC_length (varinfo_t, varmap);
4396 if (dump_file)
4397 {
4398 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
4399 "+" HOST_WIDE_INT_PRINT_DEC,
4400 vi->name, fo->offset, fo->size);
4401 newname = ggc_strdup (tempname);
4402 free (tempname);
4403 }
4404 newvi = new_var_info (decl, newindex, newname);
4405 newvi->offset = fo->offset;
4406 newvi->size = fo->size;
4407 newvi->fullsize = vi->fullsize;
4408 newvi->may_have_pointers = fo->may_have_pointers;
4409 insert_into_field_list (vi, newvi);
4410 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4411 if (is_global && (!flag_whole_program || !in_ipa_mode)
4412 && newvi->may_have_pointers)
4413 make_copy_constraint (newvi, nonlocal_id);
4414
4415 stats.total_vars++;
4416 }
4417 }
4418 else
4419 vi->is_full_var = true;
4420
4421 VEC_free (fieldoff_s, heap, fieldstack);
4422
4423 return index;
4424 }
4425
4426 /* Print out the points-to solution for VAR to FILE. */
4427
4428 static void
4429 dump_solution_for_var (FILE *file, unsigned int var)
4430 {
4431 varinfo_t vi = get_varinfo (var);
4432 unsigned int i;
4433 bitmap_iterator bi;
4434
4435 if (find (var) != var)
4436 {
4437 varinfo_t vipt = get_varinfo (find (var));
4438 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4439 }
4440 else
4441 {
4442 fprintf (file, "%s = { ", vi->name);
4443 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4444 {
4445 fprintf (file, "%s ", get_varinfo (i)->name);
4446 }
4447 fprintf (file, "}");
4448 if (vi->no_tbaa_pruning)
4449 fprintf (file, " no-tbaa-pruning");
4450 fprintf (file, "\n");
4451 }
4452 }
4453
4454 /* Print the points-to solution for VAR to stdout. */
4455
4456 void
4457 debug_solution_for_var (unsigned int var)
4458 {
4459 dump_solution_for_var (stdout, var);
4460 }
4461
4462 /* Create varinfo structures for all of the variables in the
4463 function for intraprocedural mode. */
4464
4465 static void
4466 intra_create_variable_infos (void)
4467 {
4468 tree t;
4469 struct constraint_expr lhs, rhs;
4470
4471 /* For each incoming pointer argument arg, create the constraint ARG
4472 = NONLOCAL or a dummy variable if flag_argument_noalias is set. */
4473 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4474 {
4475 varinfo_t p;
4476
4477 if (!could_have_pointers (t))
4478 continue;
4479
4480 /* If flag_argument_noalias is set, then function pointer
4481 arguments are guaranteed not to point to each other. In that
4482 case, create an artificial variable PARM_NOALIAS and the
4483 constraint ARG = &PARM_NOALIAS. */
4484 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4485 {
4486 varinfo_t vi;
4487 tree heapvar = heapvar_lookup (t);
4488
4489 lhs.offset = 0;
4490 lhs.type = SCALAR;
4491 lhs.var = get_vi_for_tree (t)->id;
4492
4493 if (heapvar == NULL_TREE)
4494 {
4495 var_ann_t ann;
4496 heapvar = create_tmp_var_raw (ptr_type_node,
4497 "PARM_NOALIAS");
4498 DECL_EXTERNAL (heapvar) = 1;
4499 if (gimple_referenced_vars (cfun))
4500 add_referenced_var (heapvar);
4501
4502 heapvar_insert (t, heapvar);
4503
4504 ann = get_var_ann (heapvar);
4505 ann->is_heapvar = 1;
4506 if (flag_argument_noalias == 1)
4507 ann->noalias_state = NO_ALIAS;
4508 else if (flag_argument_noalias == 2)
4509 ann->noalias_state = NO_ALIAS_GLOBAL;
4510 else if (flag_argument_noalias == 3)
4511 ann->noalias_state = NO_ALIAS_ANYTHING;
4512 else
4513 gcc_unreachable ();
4514 }
4515
4516 vi = get_vi_for_tree (heapvar);
4517 vi->is_artificial_var = 1;
4518 vi->is_heap_var = 1;
4519 vi->is_unknown_size_var = true;
4520 vi->fullsize = ~0;
4521 vi->size = ~0;
4522 rhs.var = vi->id;
4523 rhs.type = ADDRESSOF;
4524 rhs.offset = 0;
4525 for (p = get_varinfo (lhs.var); p; p = p->next)
4526 {
4527 struct constraint_expr temp = lhs;
4528 temp.var = p->id;
4529 process_constraint (new_constraint (temp, rhs));
4530 }
4531 }
4532 else
4533 {
4534 varinfo_t arg_vi = get_vi_for_tree (t);
4535
4536 for (p = arg_vi; p; p = p->next)
4537 make_constraint_from (p, nonlocal_id);
4538 }
4539 }
4540
4541 /* Add a constraint for a result decl that is passed by reference. */
4542 if (DECL_RESULT (cfun->decl)
4543 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
4544 {
4545 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
4546
4547 for (p = result_vi; p; p = p->next)
4548 make_constraint_from (p, nonlocal_id);
4549 }
4550
4551 /* Add a constraint for the incoming static chain parameter. */
4552 if (cfun->static_chain_decl != NULL_TREE)
4553 {
4554 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
4555
4556 for (p = chain_vi; p; p = p->next)
4557 make_constraint_from (p, nonlocal_id);
4558 }
4559 }
4560
4561 /* Structure used to put solution bitmaps in a hashtable so they can
4562 be shared among variables with the same points-to set. */
4563
4564 typedef struct shared_bitmap_info
4565 {
4566 bitmap pt_vars;
4567 hashval_t hashcode;
4568 } *shared_bitmap_info_t;
4569 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
4570
4571 static htab_t shared_bitmap_table;
4572
4573 /* Hash function for a shared_bitmap_info_t */
4574
4575 static hashval_t
4576 shared_bitmap_hash (const void *p)
4577 {
4578 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
4579 return bi->hashcode;
4580 }
4581
4582 /* Equality function for two shared_bitmap_info_t's. */
4583
4584 static int
4585 shared_bitmap_eq (const void *p1, const void *p2)
4586 {
4587 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
4588 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
4589 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4590 }
4591
4592 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4593 existing instance if there is one, NULL otherwise. */
4594
4595 static bitmap
4596 shared_bitmap_lookup (bitmap pt_vars)
4597 {
4598 void **slot;
4599 struct shared_bitmap_info sbi;
4600
4601 sbi.pt_vars = pt_vars;
4602 sbi.hashcode = bitmap_hash (pt_vars);
4603
4604 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4605 sbi.hashcode, NO_INSERT);
4606 if (!slot)
4607 return NULL;
4608 else
4609 return ((shared_bitmap_info_t) *slot)->pt_vars;
4610 }
4611
4612
4613 /* Add a bitmap to the shared bitmap hashtable. */
4614
4615 static void
4616 shared_bitmap_add (bitmap pt_vars)
4617 {
4618 void **slot;
4619 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4620
4621 sbi->pt_vars = pt_vars;
4622 sbi->hashcode = bitmap_hash (pt_vars);
4623
4624 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4625 sbi->hashcode, INSERT);
4626 gcc_assert (!*slot);
4627 *slot = (void *) sbi;
4628 }
4629
4630
4631 /* Set bits in INTO corresponding to the variable uids in solution set FROM.
4632 If MEM_ALIAS_SET is not zero, we also use type based alias analysis to
4633 prune the points-to sets with this alias-set.
4634 Returns the number of pruned variables and updates the vars_contains_global
4635 member of *PT . */
4636
4637 static unsigned
4638 set_uids_in_ptset (bitmap into, bitmap from,
4639 alias_set_type mem_alias_set, struct pt_solution *pt)
4640 {
4641 unsigned int i;
4642 bitmap_iterator bi;
4643 unsigned pruned = 0;
4644
4645 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4646 {
4647 varinfo_t vi = get_varinfo (i);
4648
4649 /* The only artificial variables that are allowed in a may-alias
4650 set are heap variables. */
4651 if (vi->is_artificial_var && !vi->is_heap_var)
4652 continue;
4653
4654 if (TREE_CODE (vi->decl) == VAR_DECL
4655 || TREE_CODE (vi->decl) == PARM_DECL
4656 || TREE_CODE (vi->decl) == RESULT_DECL)
4657 {
4658 /* Don't type prune artificial vars or points-to sets
4659 for pointers that have not been dereferenced or with
4660 type-based pruning disabled. */
4661 if (!vi->is_artificial_var
4662 && !vi->no_tbaa_pruning
4663 && mem_alias_set != 0)
4664 {
4665 alias_set_type var_alias_set = get_alias_set (vi->decl);
4666 if (mem_alias_set != var_alias_set
4667 && !alias_set_subset_of (mem_alias_set, var_alias_set))
4668 {
4669 ++pruned;
4670 continue;
4671 }
4672 }
4673
4674 /* Add the decl to the points-to set. Note that the points-to
4675 set contains global variables. */
4676 bitmap_set_bit (into, DECL_UID (vi->decl));
4677 if (is_global_var (vi->decl))
4678 pt->vars_contains_global = true;
4679 }
4680 }
4681
4682 return pruned;
4683 }
4684
4685
4686 static bool have_alias_info = false;
4687
4688 /* Emit a note for the pointer initialization point DEF. */
4689
4690 static void
4691 emit_pointer_definition (tree ptr, bitmap visited)
4692 {
4693 gimple def = SSA_NAME_DEF_STMT (ptr);
4694 if (gimple_code (def) == GIMPLE_PHI)
4695 {
4696 use_operand_p argp;
4697 ssa_op_iter oi;
4698
4699 FOR_EACH_PHI_ARG (argp, def, oi, SSA_OP_USE)
4700 {
4701 tree arg = USE_FROM_PTR (argp);
4702 if (TREE_CODE (arg) == SSA_NAME)
4703 {
4704 if (bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
4705 emit_pointer_definition (arg, visited);
4706 }
4707 else
4708 inform (0, "initialized from %qE", arg);
4709 }
4710 }
4711 else if (!gimple_nop_p (def))
4712 inform (gimple_location (def), "initialized from here");
4713 }
4714
4715 /* Emit a strict aliasing warning for dereferencing the pointer PTR. */
4716
4717 static void
4718 emit_alias_warning (tree ptr)
4719 {
4720 gimple use;
4721 imm_use_iterator ui;
4722 bool warned = false;
4723
4724 FOR_EACH_IMM_USE_STMT (use, ui, ptr)
4725 {
4726 tree deref = NULL_TREE;
4727
4728 if (gimple_has_lhs (use))
4729 {
4730 tree lhs = get_base_address (gimple_get_lhs (use));
4731 if (lhs
4732 && INDIRECT_REF_P (lhs)
4733 && TREE_OPERAND (lhs, 0) == ptr)
4734 deref = lhs;
4735 }
4736 if (gimple_assign_single_p (use))
4737 {
4738 tree rhs = get_base_address (gimple_assign_rhs1 (use));
4739 if (rhs
4740 && INDIRECT_REF_P (rhs)
4741 && TREE_OPERAND (rhs, 0) == ptr)
4742 deref = rhs;
4743 }
4744 else if (is_gimple_call (use))
4745 {
4746 unsigned i;
4747 for (i = 0; i < gimple_call_num_args (use); ++i)
4748 {
4749 tree op = get_base_address (gimple_call_arg (use, i));
4750 if (op
4751 && INDIRECT_REF_P (op)
4752 && TREE_OPERAND (op, 0) == ptr)
4753 deref = op;
4754 }
4755 }
4756 if (deref
4757 && !TREE_NO_WARNING (deref))
4758 {
4759 TREE_NO_WARNING (deref) = 1;
4760 warned |= warning_at (gimple_location (use), OPT_Wstrict_aliasing,
4761 "dereferencing pointer %qD does break "
4762 "strict-aliasing rules", SSA_NAME_VAR (ptr));
4763 }
4764 }
4765 if (warned)
4766 {
4767 bitmap visited = BITMAP_ALLOC (NULL);
4768 emit_pointer_definition (ptr, visited);
4769 BITMAP_FREE (visited);
4770 }
4771 }
4772
4773 /* Compute the points-to solution *PT for the variable VI.
4774 Prunes the points-to set based on TBAA rules if DO_TBAA_PRUNING
4775 is true. Returns the number of TBAA pruned variables from the
4776 points-to set. */
4777
4778 static unsigned int
4779 find_what_var_points_to (varinfo_t vi, struct pt_solution *pt,
4780 bool do_tbaa_pruning)
4781 {
4782 unsigned int i, pruned;
4783 bitmap_iterator bi;
4784 bitmap finished_solution;
4785 bitmap result;
4786 tree ptr = vi->decl;
4787 alias_set_type mem_alias_set;
4788
4789 memset (pt, 0, sizeof (struct pt_solution));
4790
4791 /* This variable may have been collapsed, let's get the real
4792 variable. */
4793 vi = get_varinfo (find (vi->id));
4794
4795 /* Translate artificial variables into SSA_NAME_PTR_INFO
4796 attributes. */
4797 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4798 {
4799 varinfo_t vi = get_varinfo (i);
4800
4801 if (vi->is_artificial_var)
4802 {
4803 if (vi->id == nothing_id)
4804 pt->null = 1;
4805 else if (vi->id == escaped_id)
4806 pt->escaped = 1;
4807 else if (vi->id == callused_id)
4808 gcc_unreachable ();
4809 else if (vi->id == nonlocal_id)
4810 pt->nonlocal = 1;
4811 else if (vi->is_heap_var)
4812 /* We represent heapvars in the points-to set properly. */
4813 ;
4814 else if (vi->id == anything_id
4815 || vi->id == readonly_id
4816 || vi->id == integer_id)
4817 pt->anything = 1;
4818 }
4819 }
4820
4821 /* Instead of doing extra work, simply do not create
4822 elaborate points-to information for pt_anything pointers. */
4823 if (pt->anything)
4824 return 0;
4825
4826 /* Share the final set of variables when possible. */
4827 finished_solution = BITMAP_GGC_ALLOC ();
4828 stats.points_to_sets_created++;
4829
4830 if (TREE_CODE (ptr) == SSA_NAME)
4831 ptr = SSA_NAME_VAR (ptr);
4832
4833 /* If the pointer decl is marked that no TBAA is to be applied,
4834 do not do tbaa pruning. */
4835 if (!do_tbaa_pruning
4836 || DECL_NO_TBAA_P (ptr))
4837 mem_alias_set = 0;
4838 else
4839 mem_alias_set = get_deref_alias_set (ptr);
4840 pruned = set_uids_in_ptset (finished_solution, vi->solution,
4841 mem_alias_set, pt);
4842 result = shared_bitmap_lookup (finished_solution);
4843 if (!result)
4844 {
4845 shared_bitmap_add (finished_solution);
4846 pt->vars = finished_solution;
4847 }
4848 else
4849 {
4850 pt->vars = result;
4851 bitmap_clear (finished_solution);
4852 }
4853
4854 return pruned;
4855 }
4856
4857 /* Given a pointer variable P, fill in its points-to set. Apply
4858 type-based pruning if IS_DEREFERENCED is true. */
4859
4860 static void
4861 find_what_p_points_to (tree p, bool is_dereferenced)
4862 {
4863 struct ptr_info_def *pi;
4864 unsigned int pruned;
4865 tree lookup_p = p;
4866 varinfo_t vi;
4867
4868 /* For parameters, get at the points-to set for the actual parm
4869 decl. */
4870 if (TREE_CODE (p) == SSA_NAME
4871 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4872 && SSA_NAME_IS_DEFAULT_DEF (p))
4873 lookup_p = SSA_NAME_VAR (p);
4874
4875 vi = lookup_vi_for_tree (lookup_p);
4876 if (!vi)
4877 return;
4878
4879 pi = get_ptr_info (p);
4880 pruned = find_what_var_points_to (vi, &pi->pt, is_dereferenced);
4881
4882 if (!(pi->pt.anything || pi->pt.nonlocal || pi->pt.escaped)
4883 && bitmap_empty_p (pi->pt.vars)
4884 && pruned > 0
4885 && is_dereferenced
4886 && warn_strict_aliasing > 0
4887 && !SSA_NAME_IS_DEFAULT_DEF (p))
4888 {
4889 if (dump_file && dump_flags & TDF_DETAILS)
4890 {
4891 fprintf (dump_file, "alias warning for ");
4892 print_generic_expr (dump_file, p, 0);
4893 fprintf (dump_file, "\n");
4894 }
4895 emit_alias_warning (p);
4896 }
4897 }
4898
4899
4900 /* Query statistics for points-to solutions. */
4901
4902 static struct {
4903 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
4904 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
4905 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
4906 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
4907 } pta_stats;
4908
4909 void
4910 dump_pta_stats (FILE *s)
4911 {
4912 fprintf (s, "\nPTA query stats:\n");
4913 fprintf (s, " pt_solution_includes: "
4914 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
4915 HOST_WIDE_INT_PRINT_DEC" queries\n",
4916 pta_stats.pt_solution_includes_no_alias,
4917 pta_stats.pt_solution_includes_no_alias
4918 + pta_stats.pt_solution_includes_may_alias);
4919 fprintf (s, " pt_solutions_intersect: "
4920 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
4921 HOST_WIDE_INT_PRINT_DEC" queries\n",
4922 pta_stats.pt_solutions_intersect_no_alias,
4923 pta_stats.pt_solutions_intersect_no_alias
4924 + pta_stats.pt_solutions_intersect_may_alias);
4925 }
4926
4927
4928 /* Reset the points-to solution *PT to a conservative default
4929 (point to anything). */
4930
4931 void
4932 pt_solution_reset (struct pt_solution *pt)
4933 {
4934 memset (pt, 0, sizeof (struct pt_solution));
4935 pt->anything = true;
4936 }
4937
4938 /* Return true if the points-to solution *PT is empty. */
4939
4940 static bool
4941 pt_solution_empty_p (struct pt_solution *pt)
4942 {
4943 if (pt->anything
4944 || pt->nonlocal)
4945 return false;
4946
4947 if (pt->vars
4948 && !bitmap_empty_p (pt->vars))
4949 return false;
4950
4951 /* If the solution includes ESCAPED, check if that is empty. */
4952 if (pt->escaped
4953 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
4954 return false;
4955
4956 return true;
4957 }
4958
4959 /* Return true if the points-to solution *PT includes global memory. */
4960
4961 bool
4962 pt_solution_includes_global (struct pt_solution *pt)
4963 {
4964 if (pt->anything
4965 || pt->nonlocal
4966 || pt->vars_contains_global)
4967 return true;
4968
4969 if (pt->escaped)
4970 return pt_solution_includes_global (&cfun->gimple_df->escaped);
4971
4972 return false;
4973 }
4974
4975 /* Return true if the points-to solution *PT includes the variable
4976 declaration DECL. */
4977
4978 static bool
4979 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
4980 {
4981 if (pt->anything)
4982 return true;
4983
4984 if (pt->nonlocal
4985 && is_global_var (decl))
4986 return true;
4987
4988 if (pt->vars
4989 && bitmap_bit_p (pt->vars, DECL_UID (decl)))
4990 return true;
4991
4992 /* If the solution includes ESCAPED, check it. */
4993 if (pt->escaped
4994 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
4995 return true;
4996
4997 return false;
4998 }
4999
5000 bool
5001 pt_solution_includes (struct pt_solution *pt, const_tree decl)
5002 {
5003 bool res = pt_solution_includes_1 (pt, decl);
5004 if (res)
5005 ++pta_stats.pt_solution_includes_may_alias;
5006 else
5007 ++pta_stats.pt_solution_includes_no_alias;
5008 return res;
5009 }
5010
5011 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
5012 intersection. */
5013
5014 static bool
5015 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
5016 {
5017 if (pt1->anything || pt2->anything)
5018 return true;
5019
5020 /* If either points to unknown global memory and the other points to
5021 any global memory they alias. */
5022 if ((pt1->nonlocal
5023 && (pt2->nonlocal
5024 || pt2->vars_contains_global))
5025 || (pt2->nonlocal
5026 && pt1->vars_contains_global))
5027 return true;
5028
5029 /* Check the escaped solution if required. */
5030 if ((pt1->escaped || pt2->escaped)
5031 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
5032 {
5033 /* If both point to escaped memory and that solution
5034 is not empty they alias. */
5035 if (pt1->escaped && pt2->escaped)
5036 return true;
5037
5038 /* If either points to escaped memory see if the escaped solution
5039 intersects with the other. */
5040 if ((pt1->escaped
5041 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
5042 || (pt2->escaped
5043 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
5044 return true;
5045 }
5046
5047 /* Now both pointers alias if their points-to solution intersects. */
5048 return (pt1->vars
5049 && pt2->vars
5050 && bitmap_intersect_p (pt1->vars, pt2->vars));
5051 }
5052
5053 bool
5054 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
5055 {
5056 bool res = pt_solutions_intersect_1 (pt1, pt2);
5057 if (res)
5058 ++pta_stats.pt_solutions_intersect_may_alias;
5059 else
5060 ++pta_stats.pt_solutions_intersect_no_alias;
5061 return res;
5062 }
5063
5064
5065 /* Dump points-to information to OUTFILE. */
5066
5067 static void
5068 dump_sa_points_to_info (FILE *outfile)
5069 {
5070 unsigned int i;
5071
5072 fprintf (outfile, "\nPoints-to sets\n\n");
5073
5074 if (dump_flags & TDF_STATS)
5075 {
5076 fprintf (outfile, "Stats:\n");
5077 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
5078 fprintf (outfile, "Non-pointer vars: %d\n",
5079 stats.nonpointer_vars);
5080 fprintf (outfile, "Statically unified vars: %d\n",
5081 stats.unified_vars_static);
5082 fprintf (outfile, "Dynamically unified vars: %d\n",
5083 stats.unified_vars_dynamic);
5084 fprintf (outfile, "Iterations: %d\n", stats.iterations);
5085 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
5086 fprintf (outfile, "Number of implicit edges: %d\n",
5087 stats.num_implicit_edges);
5088 }
5089
5090 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
5091 dump_solution_for_var (outfile, i);
5092 }
5093
5094
5095 /* Debug points-to information to stderr. */
5096
5097 void
5098 debug_sa_points_to_info (void)
5099 {
5100 dump_sa_points_to_info (stderr);
5101 }
5102
5103
5104 /* Initialize the always-existing constraint variables for NULL
5105 ANYTHING, READONLY, and INTEGER */
5106
5107 static void
5108 init_base_vars (void)
5109 {
5110 struct constraint_expr lhs, rhs;
5111
5112 /* Create the NULL variable, used to represent that a variable points
5113 to NULL. */
5114 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
5115 var_nothing = new_var_info (nothing_tree, nothing_id, "NULL");
5116 insert_vi_for_tree (nothing_tree, var_nothing);
5117 var_nothing->is_artificial_var = 1;
5118 var_nothing->offset = 0;
5119 var_nothing->size = ~0;
5120 var_nothing->fullsize = ~0;
5121 var_nothing->is_special_var = 1;
5122 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
5123
5124 /* Create the ANYTHING variable, used to represent that a variable
5125 points to some unknown piece of memory. */
5126 anything_tree = create_tmp_var_raw (ptr_type_node, "ANYTHING");
5127 var_anything = new_var_info (anything_tree, anything_id, "ANYTHING");
5128 insert_vi_for_tree (anything_tree, var_anything);
5129 var_anything->is_artificial_var = 1;
5130 var_anything->size = ~0;
5131 var_anything->offset = 0;
5132 var_anything->next = NULL;
5133 var_anything->fullsize = ~0;
5134 var_anything->is_special_var = 1;
5135
5136 /* Anything points to anything. This makes deref constraints just
5137 work in the presence of linked list and other p = *p type loops,
5138 by saying that *ANYTHING = ANYTHING. */
5139 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
5140 lhs.type = SCALAR;
5141 lhs.var = anything_id;
5142 lhs.offset = 0;
5143 rhs.type = ADDRESSOF;
5144 rhs.var = anything_id;
5145 rhs.offset = 0;
5146
5147 /* This specifically does not use process_constraint because
5148 process_constraint ignores all anything = anything constraints, since all
5149 but this one are redundant. */
5150 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
5151
5152 /* Create the READONLY variable, used to represent that a variable
5153 points to readonly memory. */
5154 readonly_tree = create_tmp_var_raw (ptr_type_node, "READONLY");
5155 var_readonly = new_var_info (readonly_tree, readonly_id, "READONLY");
5156 var_readonly->is_artificial_var = 1;
5157 var_readonly->offset = 0;
5158 var_readonly->size = ~0;
5159 var_readonly->fullsize = ~0;
5160 var_readonly->next = NULL;
5161 var_readonly->is_special_var = 1;
5162 insert_vi_for_tree (readonly_tree, var_readonly);
5163 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
5164
5165 /* readonly memory points to anything, in order to make deref
5166 easier. In reality, it points to anything the particular
5167 readonly variable can point to, but we don't track this
5168 separately. */
5169 lhs.type = SCALAR;
5170 lhs.var = readonly_id;
5171 lhs.offset = 0;
5172 rhs.type = ADDRESSOF;
5173 rhs.var = readonly_id; /* FIXME */
5174 rhs.offset = 0;
5175 process_constraint (new_constraint (lhs, rhs));
5176
5177 /* Create the ESCAPED variable, used to represent the set of escaped
5178 memory. */
5179 escaped_tree = create_tmp_var_raw (ptr_type_node, "ESCAPED");
5180 var_escaped = new_var_info (escaped_tree, escaped_id, "ESCAPED");
5181 insert_vi_for_tree (escaped_tree, var_escaped);
5182 var_escaped->is_artificial_var = 1;
5183 var_escaped->offset = 0;
5184 var_escaped->size = ~0;
5185 var_escaped->fullsize = ~0;
5186 var_escaped->is_special_var = 0;
5187 VEC_safe_push (varinfo_t, heap, varmap, var_escaped);
5188 gcc_assert (VEC_index (varinfo_t, varmap, 3) == var_escaped);
5189
5190 /* Create the NONLOCAL variable, used to represent the set of nonlocal
5191 memory. */
5192 nonlocal_tree = create_tmp_var_raw (ptr_type_node, "NONLOCAL");
5193 var_nonlocal = new_var_info (nonlocal_tree, nonlocal_id, "NONLOCAL");
5194 insert_vi_for_tree (nonlocal_tree, var_nonlocal);
5195 var_nonlocal->is_artificial_var = 1;
5196 var_nonlocal->offset = 0;
5197 var_nonlocal->size = ~0;
5198 var_nonlocal->fullsize = ~0;
5199 var_nonlocal->is_special_var = 1;
5200 VEC_safe_push (varinfo_t, heap, varmap, var_nonlocal);
5201
5202 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
5203 lhs.type = SCALAR;
5204 lhs.var = escaped_id;
5205 lhs.offset = 0;
5206 rhs.type = DEREF;
5207 rhs.var = escaped_id;
5208 rhs.offset = 0;
5209 process_constraint (new_constraint (lhs, rhs));
5210
5211 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
5212 whole variable escapes. */
5213 lhs.type = SCALAR;
5214 lhs.var = escaped_id;
5215 lhs.offset = 0;
5216 rhs.type = SCALAR;
5217 rhs.var = escaped_id;
5218 rhs.offset = UNKNOWN_OFFSET;
5219 process_constraint (new_constraint (lhs, rhs));
5220
5221 /* *ESCAPED = NONLOCAL. This is true because we have to assume
5222 everything pointed to by escaped points to what global memory can
5223 point to. */
5224 lhs.type = DEREF;
5225 lhs.var = escaped_id;
5226 lhs.offset = 0;
5227 rhs.type = SCALAR;
5228 rhs.var = nonlocal_id;
5229 rhs.offset = 0;
5230 process_constraint (new_constraint (lhs, rhs));
5231
5232 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
5233 global memory may point to global memory and escaped memory. */
5234 lhs.type = SCALAR;
5235 lhs.var = nonlocal_id;
5236 lhs.offset = 0;
5237 rhs.type = ADDRESSOF;
5238 rhs.var = nonlocal_id;
5239 rhs.offset = 0;
5240 process_constraint (new_constraint (lhs, rhs));
5241 rhs.type = ADDRESSOF;
5242 rhs.var = escaped_id;
5243 rhs.offset = 0;
5244 process_constraint (new_constraint (lhs, rhs));
5245
5246 /* Create the CALLUSED variable, used to represent the set of call-used
5247 memory. */
5248 callused_tree = create_tmp_var_raw (ptr_type_node, "CALLUSED");
5249 var_callused = new_var_info (callused_tree, callused_id, "CALLUSED");
5250 insert_vi_for_tree (callused_tree, var_callused);
5251 var_callused->is_artificial_var = 1;
5252 var_callused->offset = 0;
5253 var_callused->size = ~0;
5254 var_callused->fullsize = ~0;
5255 var_callused->is_special_var = 0;
5256 VEC_safe_push (varinfo_t, heap, varmap, var_callused);
5257
5258 /* CALLUSED = *CALLUSED, because call-used is may-deref'd at calls, etc. */
5259 lhs.type = SCALAR;
5260 lhs.var = callused_id;
5261 lhs.offset = 0;
5262 rhs.type = DEREF;
5263 rhs.var = callused_id;
5264 rhs.offset = 0;
5265 process_constraint (new_constraint (lhs, rhs));
5266
5267 /* CALLUSED = CALLUSED + UNKNOWN, because if a sub-field is call-used the
5268 whole variable is call-used. */
5269 lhs.type = SCALAR;
5270 lhs.var = callused_id;
5271 lhs.offset = 0;
5272 rhs.type = SCALAR;
5273 rhs.var = callused_id;
5274 rhs.offset = UNKNOWN_OFFSET;
5275 process_constraint (new_constraint (lhs, rhs));
5276
5277 /* Create the STOREDANYTHING variable, used to represent the set of
5278 variables stored to *ANYTHING. */
5279 storedanything_tree = create_tmp_var_raw (ptr_type_node, "STOREDANYTHING");
5280 var_storedanything = new_var_info (storedanything_tree, storedanything_id,
5281 "STOREDANYTHING");
5282 insert_vi_for_tree (storedanything_tree, var_storedanything);
5283 var_storedanything->is_artificial_var = 1;
5284 var_storedanything->offset = 0;
5285 var_storedanything->size = ~0;
5286 var_storedanything->fullsize = ~0;
5287 var_storedanything->is_special_var = 0;
5288 VEC_safe_push (varinfo_t, heap, varmap, var_storedanything);
5289
5290 /* Create the INTEGER variable, used to represent that a variable points
5291 to what an INTEGER "points to". */
5292 integer_tree = create_tmp_var_raw (ptr_type_node, "INTEGER");
5293 var_integer = new_var_info (integer_tree, integer_id, "INTEGER");
5294 insert_vi_for_tree (integer_tree, var_integer);
5295 var_integer->is_artificial_var = 1;
5296 var_integer->size = ~0;
5297 var_integer->fullsize = ~0;
5298 var_integer->offset = 0;
5299 var_integer->next = NULL;
5300 var_integer->is_special_var = 1;
5301 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
5302
5303 /* INTEGER = ANYTHING, because we don't know where a dereference of
5304 a random integer will point to. */
5305 lhs.type = SCALAR;
5306 lhs.var = integer_id;
5307 lhs.offset = 0;
5308 rhs.type = ADDRESSOF;
5309 rhs.var = anything_id;
5310 rhs.offset = 0;
5311 process_constraint (new_constraint (lhs, rhs));
5312 }
5313
5314 /* Initialize things necessary to perform PTA */
5315
5316 static void
5317 init_alias_vars (void)
5318 {
5319 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
5320
5321 bitmap_obstack_initialize (&pta_obstack);
5322 bitmap_obstack_initialize (&oldpta_obstack);
5323 bitmap_obstack_initialize (&predbitmap_obstack);
5324
5325 constraint_pool = create_alloc_pool ("Constraint pool",
5326 sizeof (struct constraint), 30);
5327 variable_info_pool = create_alloc_pool ("Variable info pool",
5328 sizeof (struct variable_info), 30);
5329 constraints = VEC_alloc (constraint_t, heap, 8);
5330 varmap = VEC_alloc (varinfo_t, heap, 8);
5331 vi_for_tree = pointer_map_create ();
5332
5333 memset (&stats, 0, sizeof (stats));
5334 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
5335 shared_bitmap_eq, free);
5336 init_base_vars ();
5337 }
5338
5339 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5340 predecessor edges. */
5341
5342 static void
5343 remove_preds_and_fake_succs (constraint_graph_t graph)
5344 {
5345 unsigned int i;
5346
5347 /* Clear the implicit ref and address nodes from the successor
5348 lists. */
5349 for (i = 0; i < FIRST_REF_NODE; i++)
5350 {
5351 if (graph->succs[i])
5352 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
5353 FIRST_REF_NODE * 2);
5354 }
5355
5356 /* Free the successor list for the non-ref nodes. */
5357 for (i = FIRST_REF_NODE; i < graph->size; i++)
5358 {
5359 if (graph->succs[i])
5360 BITMAP_FREE (graph->succs[i]);
5361 }
5362
5363 /* Now reallocate the size of the successor list as, and blow away
5364 the predecessor bitmaps. */
5365 graph->size = VEC_length (varinfo_t, varmap);
5366 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
5367
5368 free (graph->implicit_preds);
5369 graph->implicit_preds = NULL;
5370 free (graph->preds);
5371 graph->preds = NULL;
5372 bitmap_obstack_release (&predbitmap_obstack);
5373 }
5374
5375 /* Compute the set of variables we can't TBAA prune. */
5376
5377 static void
5378 compute_tbaa_pruning (void)
5379 {
5380 unsigned int size = VEC_length (varinfo_t, varmap);
5381 unsigned int i;
5382 bool any;
5383
5384 changed_count = 0;
5385 changed = sbitmap_alloc (size);
5386 sbitmap_zero (changed);
5387
5388 /* Mark all initial no_tbaa_pruning nodes as changed. */
5389 any = false;
5390 for (i = 0; i < size; ++i)
5391 {
5392 varinfo_t ivi = get_varinfo (i);
5393
5394 if (find (i) == i && ivi->no_tbaa_pruning)
5395 {
5396 any = true;
5397 if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
5398 || VEC_length (constraint_t, graph->complex[i]) > 0)
5399 {
5400 SET_BIT (changed, i);
5401 ++changed_count;
5402 }
5403 }
5404 }
5405
5406 while (changed_count > 0)
5407 {
5408 struct topo_info *ti = init_topo_info ();
5409 ++stats.iterations;
5410
5411 compute_topo_order (graph, ti);
5412
5413 while (VEC_length (unsigned, ti->topo_order) != 0)
5414 {
5415 bitmap_iterator bi;
5416
5417 i = VEC_pop (unsigned, ti->topo_order);
5418
5419 /* If this variable is not a representative, skip it. */
5420 if (find (i) != i)
5421 continue;
5422
5423 /* If the node has changed, we need to process the complex
5424 constraints and outgoing edges again. */
5425 if (TEST_BIT (changed, i))
5426 {
5427 unsigned int j;
5428 constraint_t c;
5429 VEC(constraint_t,heap) *complex = graph->complex[i];
5430
5431 RESET_BIT (changed, i);
5432 --changed_count;
5433
5434 /* Process the complex copy constraints. */
5435 for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
5436 {
5437 if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
5438 {
5439 varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
5440
5441 if (!lhsvi->no_tbaa_pruning)
5442 {
5443 lhsvi->no_tbaa_pruning = true;
5444 if (!TEST_BIT (changed, lhsvi->id))
5445 {
5446 SET_BIT (changed, lhsvi->id);
5447 ++changed_count;
5448 }
5449 }
5450 }
5451 }
5452
5453 /* Propagate to all successors. */
5454 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
5455 {
5456 unsigned int to = find (j);
5457 varinfo_t tovi = get_varinfo (to);
5458
5459 /* Don't propagate to ourselves. */
5460 if (to == i)
5461 continue;
5462
5463 if (!tovi->no_tbaa_pruning)
5464 {
5465 tovi->no_tbaa_pruning = true;
5466 if (!TEST_BIT (changed, to))
5467 {
5468 SET_BIT (changed, to);
5469 ++changed_count;
5470 }
5471 }
5472 }
5473 }
5474 }
5475
5476 free_topo_info (ti);
5477 }
5478
5479 sbitmap_free (changed);
5480
5481 if (any)
5482 {
5483 for (i = 0; i < size; ++i)
5484 {
5485 varinfo_t ivi = get_varinfo (i);
5486 varinfo_t ivip = get_varinfo (find (i));
5487
5488 if (ivip->no_tbaa_pruning)
5489 {
5490 tree var = ivi->decl;
5491
5492 if (TREE_CODE (var) == SSA_NAME)
5493 var = SSA_NAME_VAR (var);
5494
5495 if (POINTER_TYPE_P (TREE_TYPE (var)))
5496 {
5497 DECL_NO_TBAA_P (var) = 1;
5498
5499 /* Tell the RTL layer that this pointer can alias
5500 anything. */
5501 DECL_POINTER_ALIAS_SET (var) = 0;
5502 }
5503 }
5504 }
5505 }
5506 }
5507
5508 /* Initialize the heapvar for statement mapping. */
5509
5510 static void
5511 init_alias_heapvars (void)
5512 {
5513 if (!heapvar_for_stmt)
5514 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5515 NULL);
5516 }
5517
5518 /* Delete the heapvar for statement mapping. */
5519
5520 void
5521 delete_alias_heapvars (void)
5522 {
5523 if (heapvar_for_stmt)
5524 htab_delete (heapvar_for_stmt);
5525 heapvar_for_stmt = NULL;
5526 }
5527
5528 /* Create points-to sets for the current function. See the comments
5529 at the start of the file for an algorithmic overview. */
5530
5531 static void
5532 compute_points_to_sets (void)
5533 {
5534 struct scc_info *si;
5535 basic_block bb;
5536 unsigned i;
5537 sbitmap dereferenced_ptrs;
5538
5539 timevar_push (TV_TREE_PTA);
5540
5541 init_alias_vars ();
5542 init_alias_heapvars ();
5543
5544 intra_create_variable_infos ();
5545
5546 /* A bitmap of SSA_NAME pointers that are dereferenced. This is
5547 used to track which points-to sets may be TBAA pruned. */
5548 dereferenced_ptrs = sbitmap_alloc (num_ssa_names);
5549 sbitmap_zero (dereferenced_ptrs);
5550
5551 /* Now walk all statements and derive aliases. */
5552 FOR_EACH_BB (bb)
5553 {
5554 gimple_stmt_iterator gsi;
5555
5556 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5557 {
5558 gimple phi = gsi_stmt (gsi);
5559
5560 if (is_gimple_reg (gimple_phi_result (phi)))
5561 find_func_aliases (phi);
5562 }
5563
5564 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5565 {
5566 gimple stmt = gsi_stmt (gsi);
5567 use_operand_p use_p;
5568 ssa_op_iter iter;
5569
5570 /* Mark dereferenced pointers. This is used by TBAA pruning
5571 of the points-to sets and the alias warning machinery. */
5572 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5573 {
5574 unsigned num_uses, num_loads, num_stores;
5575 tree op = USE_FROM_PTR (use_p);
5576
5577 if (!POINTER_TYPE_P (TREE_TYPE (op)))
5578 continue;
5579
5580 /* Determine whether OP is a dereferenced pointer. */
5581 count_uses_and_derefs (op, stmt,
5582 &num_uses, &num_loads, &num_stores);
5583 if (num_loads + num_stores > 0)
5584 SET_BIT (dereferenced_ptrs, SSA_NAME_VERSION (op));
5585 }
5586
5587 find_func_aliases (stmt);
5588 }
5589 }
5590
5591
5592 if (dump_file)
5593 {
5594 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5595 dump_constraints (dump_file);
5596 }
5597
5598 if (dump_file)
5599 fprintf (dump_file,
5600 "\nCollapsing static cycles and doing variable "
5601 "substitution\n");
5602
5603 init_graph (VEC_length (varinfo_t, varmap) * 2);
5604
5605 if (dump_file)
5606 fprintf (dump_file, "Building predecessor graph\n");
5607 build_pred_graph ();
5608
5609 if (dump_file)
5610 fprintf (dump_file, "Detecting pointer and location "
5611 "equivalences\n");
5612 si = perform_var_substitution (graph);
5613
5614 if (dump_file)
5615 fprintf (dump_file, "Rewriting constraints and unifying "
5616 "variables\n");
5617 rewrite_constraints (graph, si);
5618
5619 build_succ_graph ();
5620 free_var_substitution_info (si);
5621
5622 if (dump_file && (dump_flags & TDF_GRAPH))
5623 dump_constraint_graph (dump_file);
5624
5625 move_complex_constraints (graph);
5626
5627 if (dump_file)
5628 fprintf (dump_file, "Uniting pointer but not location equivalent "
5629 "variables\n");
5630 unite_pointer_equivalences (graph);
5631
5632 if (dump_file)
5633 fprintf (dump_file, "Finding indirect cycles\n");
5634 find_indirect_cycles (graph);
5635
5636 /* Implicit nodes and predecessors are no longer necessary at this
5637 point. */
5638 remove_preds_and_fake_succs (graph);
5639
5640 if (dump_file)
5641 fprintf (dump_file, "Solving graph\n");
5642
5643 solve_graph (graph);
5644
5645 compute_tbaa_pruning ();
5646
5647 if (dump_file)
5648 dump_sa_points_to_info (dump_file);
5649
5650 /* Compute the points-to sets for ESCAPED and CALLUSED used for
5651 call-clobber analysis. */
5652 find_what_var_points_to (var_escaped, &cfun->gimple_df->escaped, false);
5653 find_what_var_points_to (var_callused, &cfun->gimple_df->callused, false);
5654
5655 /* Make sure the ESCAPED solution (which is used as placeholder in
5656 other solutions) does not reference itself. This simplifies
5657 points-to solution queries. */
5658 cfun->gimple_df->escaped.escaped = 0;
5659
5660 /* Compute the points-to sets for pointer SSA_NAMEs. */
5661 for (i = 0; i < num_ssa_names; ++i)
5662 {
5663 tree ptr = ssa_name (i);
5664 if (ptr
5665 && POINTER_TYPE_P (TREE_TYPE (ptr)))
5666 find_what_p_points_to (ptr, TEST_BIT (dereferenced_ptrs, i));
5667 }
5668 sbitmap_free (dereferenced_ptrs);
5669
5670 timevar_pop (TV_TREE_PTA);
5671
5672 have_alias_info = true;
5673 }
5674
5675
5676 /* Delete created points-to sets. */
5677
5678 static void
5679 delete_points_to_sets (void)
5680 {
5681 unsigned int i;
5682
5683 htab_delete (shared_bitmap_table);
5684 if (dump_file && (dump_flags & TDF_STATS))
5685 fprintf (dump_file, "Points to sets created:%d\n",
5686 stats.points_to_sets_created);
5687
5688 pointer_map_destroy (vi_for_tree);
5689 bitmap_obstack_release (&pta_obstack);
5690 VEC_free (constraint_t, heap, constraints);
5691
5692 for (i = 0; i < graph->size; i++)
5693 VEC_free (constraint_t, heap, graph->complex[i]);
5694 free (graph->complex);
5695
5696 free (graph->rep);
5697 free (graph->succs);
5698 free (graph->pe);
5699 free (graph->pe_rep);
5700 free (graph->indirect_cycles);
5701 free (graph);
5702
5703 VEC_free (varinfo_t, heap, varmap);
5704 free_alloc_pool (variable_info_pool);
5705 free_alloc_pool (constraint_pool);
5706 have_alias_info = false;
5707 }
5708
5709
5710 /* Compute points-to information for every SSA_NAME pointer in the
5711 current function and compute the transitive closure of escaped
5712 variables to re-initialize the call-clobber states of local variables. */
5713
5714 unsigned int
5715 compute_may_aliases (void)
5716 {
5717 /* For each pointer P_i, determine the sets of variables that P_i may
5718 point-to. Compute the reachability set of escaped and call-used
5719 variables. */
5720 compute_points_to_sets ();
5721
5722 /* Debugging dumps. */
5723 if (dump_file)
5724 {
5725 dump_alias_info (dump_file);
5726
5727 if (dump_flags & TDF_DETAILS)
5728 dump_referenced_vars (dump_file);
5729 }
5730
5731 /* Deallocate memory used by aliasing data structures and the internal
5732 points-to solution. */
5733 delete_points_to_sets ();
5734
5735 gcc_assert (!need_ssa_update_p (cfun));
5736
5737 return 0;
5738 }
5739
5740 static bool
5741 gate_tree_pta (void)
5742 {
5743 return flag_tree_pta;
5744 }
5745
5746 /* A dummy pass to cause points-to information to be computed via
5747 TODO_rebuild_alias. */
5748
5749 struct gimple_opt_pass pass_build_alias =
5750 {
5751 {
5752 GIMPLE_PASS,
5753 "alias", /* name */
5754 gate_tree_pta, /* gate */
5755 NULL, /* execute */
5756 NULL, /* sub */
5757 NULL, /* next */
5758 0, /* static_pass_number */
5759 TV_NONE, /* tv_id */
5760 PROP_cfg | PROP_ssa, /* properties_required */
5761 0, /* properties_provided */
5762 0, /* properties_destroyed */
5763 0, /* todo_flags_start */
5764 TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */
5765 }
5766 };
5767
5768
5769 /* Return true if we should execute IPA PTA. */
5770 static bool
5771 gate_ipa_pta (void)
5772 {
5773 return (flag_ipa_pta
5774 /* Don't bother doing anything if the program has errors. */
5775 && !(errorcount || sorrycount));
5776 }
5777
5778 /* Execute the driver for IPA PTA. */
5779 static unsigned int
5780 ipa_pta_execute (void)
5781 {
5782 struct cgraph_node *node;
5783 struct scc_info *si;
5784
5785 in_ipa_mode = 1;
5786 init_alias_heapvars ();
5787 init_alias_vars ();
5788
5789 for (node = cgraph_nodes; node; node = node->next)
5790 {
5791 unsigned int varid;
5792
5793 varid = create_function_info_for (node->decl,
5794 cgraph_node_name (node));
5795 if (node->local.externally_visible)
5796 {
5797 varinfo_t fi = get_varinfo (varid);
5798 for (; fi; fi = fi->next)
5799 make_constraint_from (fi, anything_id);
5800 }
5801 }
5802 for (node = cgraph_nodes; node; node = node->next)
5803 {
5804 if (node->analyzed)
5805 {
5806 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
5807 basic_block bb;
5808 tree old_func_decl = current_function_decl;
5809 if (dump_file)
5810 fprintf (dump_file,
5811 "Generating constraints for %s\n",
5812 cgraph_node_name (node));
5813 push_cfun (func);
5814 current_function_decl = node->decl;
5815
5816 FOR_EACH_BB_FN (bb, func)
5817 {
5818 gimple_stmt_iterator gsi;
5819
5820 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
5821 gsi_next (&gsi))
5822 {
5823 gimple phi = gsi_stmt (gsi);
5824
5825 if (is_gimple_reg (gimple_phi_result (phi)))
5826 find_func_aliases (phi);
5827 }
5828
5829 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5830 find_func_aliases (gsi_stmt (gsi));
5831 }
5832 current_function_decl = old_func_decl;
5833 pop_cfun ();
5834 }
5835 else
5836 {
5837 /* Make point to anything. */
5838 }
5839 }
5840
5841 if (dump_file)
5842 {
5843 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5844 dump_constraints (dump_file);
5845 }
5846
5847 if (dump_file)
5848 fprintf (dump_file,
5849 "\nCollapsing static cycles and doing variable "
5850 "substitution:\n");
5851
5852 init_graph (VEC_length (varinfo_t, varmap) * 2);
5853 build_pred_graph ();
5854 si = perform_var_substitution (graph);
5855 rewrite_constraints (graph, si);
5856
5857 build_succ_graph ();
5858 free_var_substitution_info (si);
5859 move_complex_constraints (graph);
5860 unite_pointer_equivalences (graph);
5861 find_indirect_cycles (graph);
5862
5863 /* Implicit nodes and predecessors are no longer necessary at this
5864 point. */
5865 remove_preds_and_fake_succs (graph);
5866
5867 if (dump_file)
5868 fprintf (dump_file, "\nSolving graph\n");
5869
5870 solve_graph (graph);
5871
5872 if (dump_file)
5873 dump_sa_points_to_info (dump_file);
5874
5875 in_ipa_mode = 0;
5876 delete_alias_heapvars ();
5877 delete_points_to_sets ();
5878 return 0;
5879 }
5880
5881 struct simple_ipa_opt_pass pass_ipa_pta =
5882 {
5883 {
5884 SIMPLE_IPA_PASS,
5885 "pta", /* name */
5886 gate_ipa_pta, /* gate */
5887 ipa_pta_execute, /* execute */
5888 NULL, /* sub */
5889 NULL, /* next */
5890 0, /* static_pass_number */
5891 TV_IPA_PTA, /* tv_id */
5892 0, /* properties_required */
5893 0, /* properties_provided */
5894 0, /* properties_destroyed */
5895 0, /* todo_flags_start */
5896 TODO_update_ssa /* todo_flags_finish */
5897 }
5898 };
5899
5900
5901 #include "gt-tree-ssa-structalias.h"