13b9751288ff442dad4b4bd1bad42d1986d2d355
[gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "obstack.h"
28 #include "bitmap.h"
29 #include "flags.h"
30 #include "rtl.h"
31 #include "tm_p.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "output.h"
35 #include "errors.h"
36 #include "diagnostic.h"
37 #include "tree.h"
38 #include "c-common.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
41 #include "varray.h"
42 #include "c-tree.h"
43 #include "tree-gimple.h"
44 #include "hashtab.h"
45 #include "function.h"
46 #include "cgraph.h"
47 #include "tree-pass.h"
48 #include "timevar.h"
49 #include "alloc-pool.h"
50 #include "splay-tree.h"
51 #include "tree-ssa-structalias.h"
52
53 /* The idea behind this analyzer is to generate set constraints from the
54 program, then solve the resulting constraints in order to generate the
55 points-to sets.
56
57 Set constraints are a way of modeling program analysis problems that
58 involve sets. They consist of an inclusion constraint language,
59 describing the variables (each variable is a set) and operations that
60 are involved on the variables, and a set of rules that derive facts
61 from these operations. To solve a system of set constraints, you derive
62 all possible facts under the rules, which gives you the correct sets
63 as a consequence.
64
65 See "Efficient Field-sensitive pointer analysis for C" by "David
66 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
67 http://citeseer.ist.psu.edu/pearce04efficient.html
68
69 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
70 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
71 http://citeseer.ist.psu.edu/heintze01ultrafast.html
72
73 There are three types of constraint expressions, DEREF, ADDRESSOF, and
74 SCALAR. Each constraint expression consists of a constraint type,
75 a variable, and an offset.
76
77 SCALAR is a constraint expression type used to represent x, whether
78 it appears on the LHS or the RHS of a statement.
79 DEREF is a constraint expression type used to represent *x, whether
80 it appears on the LHS or the RHS of a statement.
81 ADDRESSOF is a constraint expression used to represent &x, whether
82 it appears on the LHS or the RHS of a statement.
83
84 Each pointer variable in the program is assigned an integer id, and
85 each field of a structure variable is assigned an integer id as well.
86
87 Structure variables are linked to their list of fields through a "next
88 field" in each variable that points to the next field in offset
89 order.
90 Each variable for a structure field has
91
92 1. "size", that tells the size in bits of that field.
93 2. "fullsize, that tells the size in bits of the entire structure.
94 3. "offset", that tells the offset in bits from the beginning of the
95 structure to this field.
96
97 Thus,
98 struct f
99 {
100 int a;
101 int b;
102 } foo;
103 int *bar;
104
105 looks like
106
107 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
108 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
109 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
110
111
112 In order to solve the system of set constraints, the following is
113 done:
114
115 1. Each constraint variable x has a solution set associated with it,
116 Sol(x).
117
118 2. Constraints are separated into direct, copy, and complex.
119 Direct constraints are ADDRESSOF constraints that require no extra
120 processing, such as P = &Q
121 Copy constraints are those of the form P = Q.
122 Complex constraints are all the constraints involving dereferences.
123
124 3. All direct constraints of the form P = &Q are processed, such
125 that Q is added to Sol(P)
126
127 4. All complex constraints for a given constraint variable are stored in a
128 linked list attached to that variable's node.
129
130 5. A directed graph is built out of the copy constraints. Each
131 constraint variable is a node in the graph, and an edge from
132 Q to P is added for each copy constraint of the form P = Q
133
134 6. The graph is then walked, and solution sets are
135 propagated along the copy edges, such that an edge from Q to P
136 causes Sol(P) <- Sol(P) union Sol(Q).
137
138 7. As we visit each node, all complex constraints associated with
139 that node are processed by adding appropriate copy edges to the graph, or the
140 appropriate variables to the solution set.
141
142 8. The process of walking the graph is iterated until no solution
143 sets change.
144
145 Prior to walking the graph in steps 6 and 7, We perform static
146 cycle elimination on the constraint graph, as well
147 as off-line variable substitution.
148
149 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
150 on and turned into anything), but isn't. You can just see what offset
151 inside the pointed-to struct it's going to access.
152
153 TODO: Constant bounded arrays can be handled as if they were structs of the
154 same number of elements.
155
156 TODO: Modeling heap and incoming pointers becomes much better if we
157 add fields to them as we discover them, which we could do.
158
159 TODO: We could handle unions, but to be honest, it's probably not
160 worth the pain or slowdown. */
161
162 static bool use_field_sensitive = true;
163 static unsigned int create_variable_info_for (tree, const char *);
164 static struct constraint_expr get_constraint_for (tree);
165 static void build_constraint_graph (void);
166
167 static bitmap_obstack ptabitmap_obstack;
168 static bitmap_obstack iteration_obstack;
169 DEF_VEC_P(constraint_t);
170 DEF_VEC_ALLOC_P(constraint_t,heap);
171
172 static struct constraint_stats
173 {
174 unsigned int total_vars;
175 unsigned int collapsed_vars;
176 unsigned int unified_vars_static;
177 unsigned int unified_vars_dynamic;
178 unsigned int iterations;
179 } stats;
180
181 struct variable_info
182 {
183 /* ID of this variable */
184 unsigned int id;
185
186 /* Name of this variable */
187 const char *name;
188
189 /* Tree that this variable is associated with. */
190 tree decl;
191
192 /* Offset of this variable, in bits, from the base variable */
193 unsigned HOST_WIDE_INT offset;
194
195 /* Size of the variable, in bits. */
196 unsigned HOST_WIDE_INT size;
197
198 /* Full size of the base variable, in bits. */
199 unsigned HOST_WIDE_INT fullsize;
200
201 /* A link to the variable for the next field in this structure. */
202 struct variable_info *next;
203
204 /* Node in the graph that represents the constraints and points-to
205 solution for the variable. */
206 unsigned int node;
207
208 /* True if the address of this variable is taken. Needed for
209 variable substitution. */
210 unsigned int address_taken:1;
211
212 /* True if this variable is the target of a dereference. Needed for
213 variable substitution. */
214 unsigned int indirect_target:1;
215
216 /* True if this is a variable created by the constraint analysis, such as
217 heap variables and constraints we had to break up. */
218 unsigned int is_artificial_var:1;
219
220 /* True if this is a special variable whose solution set should not be
221 changed. */
222 unsigned int is_special_var:1;
223
224 /* True for variables whose size is not known or variable. */
225 unsigned int is_unknown_size_var:1;
226
227 /* True for variables that have unions somewhere in them. */
228 unsigned int has_union:1;
229
230 /* True if this is a heap variable. */
231 unsigned int is_heap_var:1;
232
233 /* Points-to set for this variable. */
234 bitmap solution;
235
236 /* Variable ids represented by this node. */
237 bitmap variables;
238
239 /* Vector of complex constraints for this node. Complex
240 constraints are those involving dereferences. */
241 VEC(constraint_t,heap) *complex;
242 };
243 typedef struct variable_info *varinfo_t;
244
245 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
246
247 /* Pool of variable info structures. */
248 static alloc_pool variable_info_pool;
249
250 DEF_VEC_P(varinfo_t);
251
252 DEF_VEC_ALLOC_P(varinfo_t, heap);
253
254 /* Table of variable info structures for constraint variables. Indexed directly
255 by variable info id. */
256 static VEC(varinfo_t,heap) *varmap;
257
258 /* Return the varmap element N */
259
260 static inline varinfo_t
261 get_varinfo(unsigned int n)
262 {
263 return VEC_index(varinfo_t, varmap, n);
264 }
265
266 /* Variable that represents the unknown pointer. */
267 static varinfo_t var_anything;
268 static tree anything_tree;
269 static unsigned int anything_id;
270
271 /* Variable that represents the NULL pointer. */
272 static varinfo_t var_nothing;
273 static tree nothing_tree;
274 static unsigned int nothing_id;
275
276 /* Variable that represents read only memory. */
277 static varinfo_t var_readonly;
278 static tree readonly_tree;
279 static unsigned int readonly_id;
280
281 /* Variable that represents integers. This is used for when people do things
282 like &0->a.b. */
283 static varinfo_t var_integer;
284 static tree integer_tree;
285 static unsigned int integer_id;
286
287 /* Variable that represents arbitrary offsets into an object. Used to
288 represent pointer arithmetic, which may not legally escape the
289 bounds of an object. */
290 static varinfo_t var_anyoffset;
291 static tree anyoffset_tree;
292 static unsigned int anyoffset_id;
293
294 /* Return a new variable info structure consisting for a variable
295 named NAME, and using constraint graph node NODE. */
296
297 static varinfo_t
298 new_var_info (tree t, unsigned int id, const char *name, unsigned int node)
299 {
300 varinfo_t ret = pool_alloc (variable_info_pool);
301
302 ret->id = id;
303 ret->name = name;
304 ret->decl = t;
305 ret->node = node;
306 ret->address_taken = false;
307 ret->indirect_target = false;
308 ret->is_artificial_var = false;
309 ret->is_heap_var = false;
310 ret->is_special_var = false;
311 ret->is_unknown_size_var = false;
312 ret->has_union = false;
313 ret->solution = BITMAP_ALLOC (&ptabitmap_obstack);
314 bitmap_clear (ret->solution);
315 ret->variables = BITMAP_ALLOC (&ptabitmap_obstack);
316 bitmap_clear (ret->variables);
317 ret->complex = NULL;
318 ret->next = NULL;
319 return ret;
320 }
321
322 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
323
324 /* An expression that appears in a constraint. */
325
326 struct constraint_expr
327 {
328 /* Constraint type. */
329 constraint_expr_type type;
330
331 /* Variable we are referring to in the constraint. */
332 unsigned int var;
333
334 /* Offset, in bits, of this constraint from the beginning of
335 variables it ends up referring to.
336
337 IOW, in a deref constraint, we would deref, get the result set,
338 then add OFFSET to each member. */
339 unsigned HOST_WIDE_INT offset;
340 };
341
342 static struct constraint_expr do_deref (struct constraint_expr);
343
344 /* Our set constraints are made up of two constraint expressions, one
345 LHS, and one RHS.
346
347 As described in the introduction, our set constraints each represent an
348 operation between set valued variables.
349 */
350 struct constraint
351 {
352 struct constraint_expr lhs;
353 struct constraint_expr rhs;
354 };
355
356 /* List of constraints that we use to build the constraint graph from. */
357
358 static VEC(constraint_t,heap) *constraints;
359 static alloc_pool constraint_pool;
360
361 /* An edge in the constraint graph. We technically have no use for
362 the src, since it will always be the same node that we are indexing
363 into the pred/succ arrays with, but it's nice for checking
364 purposes. The edges are weighted, with a bit set in weights for
365 each edge from src to dest with that weight. */
366
367 struct constraint_edge
368 {
369 unsigned int src;
370 unsigned int dest;
371 bitmap weights;
372 };
373
374 typedef struct constraint_edge *constraint_edge_t;
375 static alloc_pool constraint_edge_pool;
376
377 /* Return a new constraint edge from SRC to DEST. */
378
379 static constraint_edge_t
380 new_constraint_edge (unsigned int src, unsigned int dest)
381 {
382 constraint_edge_t ret = pool_alloc (constraint_edge_pool);
383 ret->src = src;
384 ret->dest = dest;
385 ret->weights = NULL;
386 return ret;
387 }
388
389 DEF_VEC_P(constraint_edge_t);
390 DEF_VEC_ALLOC_P(constraint_edge_t,heap);
391
392
393 /* The constraint graph is simply a set of adjacency vectors, one per
394 variable. succs[x] is the vector of successors for variable x, and preds[x]
395 is the vector of predecessors for variable x.
396 IOW, all edges are "forward" edges, which is not like our CFG.
397 So remember that
398 preds[x]->src == x, and
399 succs[x]->src == x. */
400
401 struct constraint_graph
402 {
403 VEC(constraint_edge_t,heap) **succs;
404 VEC(constraint_edge_t,heap) **preds;
405 };
406
407 typedef struct constraint_graph *constraint_graph_t;
408
409 static constraint_graph_t graph;
410
411 /* Create a new constraint consisting of LHS and RHS expressions. */
412
413 static constraint_t
414 new_constraint (const struct constraint_expr lhs,
415 const struct constraint_expr rhs)
416 {
417 constraint_t ret = pool_alloc (constraint_pool);
418 ret->lhs = lhs;
419 ret->rhs = rhs;
420 return ret;
421 }
422
423 /* Print out constraint C to FILE. */
424
425 void
426 dump_constraint (FILE *file, constraint_t c)
427 {
428 if (c->lhs.type == ADDRESSOF)
429 fprintf (file, "&");
430 else if (c->lhs.type == DEREF)
431 fprintf (file, "*");
432 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
433 if (c->lhs.offset != 0)
434 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
435 fprintf (file, " = ");
436 if (c->rhs.type == ADDRESSOF)
437 fprintf (file, "&");
438 else if (c->rhs.type == DEREF)
439 fprintf (file, "*");
440 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
441 if (c->rhs.offset != 0)
442 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
443 fprintf (file, "\n");
444 }
445
446 /* Print out constraint C to stderr. */
447
448 void
449 debug_constraint (constraint_t c)
450 {
451 dump_constraint (stderr, c);
452 }
453
454 /* Print out all constraints to FILE */
455
456 void
457 dump_constraints (FILE *file)
458 {
459 int i;
460 constraint_t c;
461 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
462 dump_constraint (file, c);
463 }
464
465 /* Print out all constraints to stderr. */
466
467 void
468 debug_constraints (void)
469 {
470 dump_constraints (stderr);
471 }
472
473 /* SOLVER FUNCTIONS
474
475 The solver is a simple worklist solver, that works on the following
476 algorithm:
477
478 sbitmap changed_nodes = all ones;
479 changed_count = number of nodes;
480 For each node that was already collapsed:
481 changed_count--;
482
483
484 while (changed_count > 0)
485 {
486 compute topological ordering for constraint graph
487
488 find and collapse cycles in the constraint graph (updating
489 changed if necessary)
490
491 for each node (n) in the graph in topological order:
492 changed_count--;
493
494 Process each complex constraint associated with the node,
495 updating changed if necessary.
496
497 For each outgoing edge from n, propagate the solution from n to
498 the destination of the edge, updating changed as necessary.
499
500 } */
501
502 /* Return true if two constraint expressions A and B are equal. */
503
504 static bool
505 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
506 {
507 return a.type == b.type
508 && a.var == b.var
509 && a.offset == b.offset;
510 }
511
512 /* Return true if constraint expression A is less than constraint expression
513 B. This is just arbitrary, but consistent, in order to give them an
514 ordering. */
515
516 static bool
517 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
518 {
519 if (a.type == b.type)
520 {
521 if (a.var == b.var)
522 return a.offset < b.offset;
523 else
524 return a.var < b.var;
525 }
526 else
527 return a.type < b.type;
528 }
529
530 /* Return true if constraint A is less than constraint B. This is just
531 arbitrary, but consistent, in order to give them an ordering. */
532
533 static bool
534 constraint_less (const constraint_t a, const constraint_t b)
535 {
536 if (constraint_expr_less (a->lhs, b->lhs))
537 return true;
538 else if (constraint_expr_less (b->lhs, a->lhs))
539 return false;
540 else
541 return constraint_expr_less (a->rhs, b->rhs);
542 }
543
544 /* Return true if two constraints A and B are equal. */
545
546 static bool
547 constraint_equal (struct constraint a, struct constraint b)
548 {
549 return constraint_expr_equal (a.lhs, b.lhs)
550 && constraint_expr_equal (a.rhs, b.rhs);
551 }
552
553
554 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
555
556 static constraint_t
557 constraint_vec_find (VEC(constraint_t,heap) *vec,
558 struct constraint lookfor)
559 {
560 unsigned int place;
561 constraint_t found;
562
563 if (vec == NULL)
564 return NULL;
565
566 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
567 if (place >= VEC_length (constraint_t, vec))
568 return NULL;
569 found = VEC_index (constraint_t, vec, place);
570 if (!constraint_equal (*found, lookfor))
571 return NULL;
572 return found;
573 }
574
575 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
576
577 static void
578 constraint_set_union (VEC(constraint_t,heap) **to,
579 VEC(constraint_t,heap) **from)
580 {
581 int i;
582 constraint_t c;
583
584 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
585 {
586 if (constraint_vec_find (*to, *c) == NULL)
587 {
588 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
589 constraint_less);
590 VEC_safe_insert (constraint_t, heap, *to, place, c);
591 }
592 }
593 }
594
595 /* Take a solution set SET, add OFFSET to each member of the set, and
596 overwrite SET with the result when done. */
597
598 static void
599 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
600 {
601 bitmap result = BITMAP_ALLOC (&iteration_obstack);
602 unsigned int i;
603 bitmap_iterator bi;
604
605 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
606 {
607 /* If this is a properly sized variable, only add offset if it's
608 less than end. Otherwise, it is globbed to a single
609 variable. */
610
611 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
612 {
613 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
614 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
615 bitmap_set_bit (result, v->id);
616 }
617 else if (get_varinfo (i)->is_artificial_var
618 || get_varinfo (i)->has_union
619 || get_varinfo (i)->is_unknown_size_var)
620 {
621 bitmap_set_bit (result, i);
622 }
623 }
624
625 bitmap_copy (set, result);
626 BITMAP_FREE (result);
627 }
628
629 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
630 process. */
631
632 static bool
633 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
634 {
635 if (inc == 0)
636 return bitmap_ior_into (to, from);
637 else
638 {
639 bitmap tmp;
640 bool res;
641
642 tmp = BITMAP_ALLOC (&iteration_obstack);
643 bitmap_copy (tmp, from);
644 solution_set_add (tmp, inc);
645 res = bitmap_ior_into (to, tmp);
646 BITMAP_FREE (tmp);
647 return res;
648 }
649 }
650
651 /* Insert constraint C into the list of complex constraints for VAR. */
652
653 static void
654 insert_into_complex (unsigned int var, constraint_t c)
655 {
656 varinfo_t vi = get_varinfo (var);
657 unsigned int place = VEC_lower_bound (constraint_t, vi->complex, c,
658 constraint_less);
659 VEC_safe_insert (constraint_t, heap, vi->complex, place, c);
660 }
661
662
663 /* Compare two constraint edges A and B, return true if they are equal. */
664
665 static bool
666 constraint_edge_equal (struct constraint_edge a, struct constraint_edge b)
667 {
668 return a.src == b.src && a.dest == b.dest;
669 }
670
671 /* Compare two constraint edges, return true if A is less than B */
672
673 static bool
674 constraint_edge_less (const constraint_edge_t a, const constraint_edge_t b)
675 {
676 if (a->dest < b->dest)
677 return true;
678 else if (a->dest == b->dest)
679 return a->src < b->src;
680 else
681 return false;
682 }
683
684 /* Find the constraint edge that matches LOOKFOR, in VEC.
685 Return the edge, if found, NULL otherwise. */
686
687 static constraint_edge_t
688 constraint_edge_vec_find (VEC(constraint_edge_t,heap) *vec,
689 struct constraint_edge lookfor)
690 {
691 unsigned int place;
692 constraint_edge_t edge;
693
694 place = VEC_lower_bound (constraint_edge_t, vec, &lookfor,
695 constraint_edge_less);
696 edge = VEC_index (constraint_edge_t, vec, place);
697 if (!constraint_edge_equal (*edge, lookfor))
698 return NULL;
699 return edge;
700 }
701
702 /* Condense two variable nodes into a single variable node, by moving
703 all associated info from SRC to TO. */
704
705 static void
706 condense_varmap_nodes (unsigned int to, unsigned int src)
707 {
708 varinfo_t tovi = get_varinfo (to);
709 varinfo_t srcvi = get_varinfo (src);
710 unsigned int i;
711 constraint_t c;
712 bitmap_iterator bi;
713
714 /* the src node, and all its variables, are now the to node. */
715 srcvi->node = to;
716 EXECUTE_IF_SET_IN_BITMAP (srcvi->variables, 0, i, bi)
717 get_varinfo (i)->node = to;
718
719 /* Merge the src node variables and the to node variables. */
720 bitmap_set_bit (tovi->variables, src);
721 bitmap_ior_into (tovi->variables, srcvi->variables);
722 bitmap_clear (srcvi->variables);
723
724 /* Move all complex constraints from src node into to node */
725 for (i = 0; VEC_iterate (constraint_t, srcvi->complex, i, c); i++)
726 {
727 /* In complex constraints for node src, we may have either
728 a = *src, and *src = a. */
729
730 if (c->rhs.type == DEREF)
731 c->rhs.var = to;
732 else
733 c->lhs.var = to;
734 }
735 constraint_set_union (&tovi->complex, &srcvi->complex);
736 VEC_free (constraint_t, heap, srcvi->complex);
737 srcvi->complex = NULL;
738 }
739
740 /* Erase EDGE from GRAPH. This routine only handles self-edges
741 (e.g. an edge from a to a). */
742
743 static void
744 erase_graph_self_edge (constraint_graph_t graph, struct constraint_edge edge)
745 {
746 VEC(constraint_edge_t,heap) *predvec = graph->preds[edge.src];
747 VEC(constraint_edge_t,heap) *succvec = graph->succs[edge.dest];
748 unsigned int place;
749 gcc_assert (edge.src == edge.dest);
750
751 /* Remove from the successors. */
752 place = VEC_lower_bound (constraint_edge_t, succvec, &edge,
753 constraint_edge_less);
754
755 /* Make sure we found the edge. */
756 #ifdef ENABLE_CHECKING
757 {
758 constraint_edge_t tmp = VEC_index (constraint_edge_t, succvec, place);
759 gcc_assert (constraint_edge_equal (*tmp, edge));
760 }
761 #endif
762 VEC_ordered_remove (constraint_edge_t, succvec, place);
763
764 /* Remove from the predecessors. */
765 place = VEC_lower_bound (constraint_edge_t, predvec, &edge,
766 constraint_edge_less);
767
768 /* Make sure we found the edge. */
769 #ifdef ENABLE_CHECKING
770 {
771 constraint_edge_t tmp = VEC_index (constraint_edge_t, predvec, place);
772 gcc_assert (constraint_edge_equal (*tmp, edge));
773 }
774 #endif
775 VEC_ordered_remove (constraint_edge_t, predvec, place);
776 }
777
778 /* Remove edges involving NODE from GRAPH. */
779
780 static void
781 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
782 {
783 VEC(constraint_edge_t,heap) *succvec = graph->succs[node];
784 VEC(constraint_edge_t,heap) *predvec = graph->preds[node];
785 constraint_edge_t c;
786 int i;
787
788 /* Walk the successors, erase the associated preds. */
789 for (i = 0; VEC_iterate (constraint_edge_t, succvec, i, c); i++)
790 if (c->dest != node)
791 {
792 unsigned int place;
793 struct constraint_edge lookfor;
794 lookfor.src = c->dest;
795 lookfor.dest = node;
796 place = VEC_lower_bound (constraint_edge_t, graph->preds[c->dest],
797 &lookfor, constraint_edge_less);
798 VEC_ordered_remove (constraint_edge_t, graph->preds[c->dest], place);
799 }
800 /* Walk the preds, erase the associated succs. */
801 for (i =0; VEC_iterate (constraint_edge_t, predvec, i, c); i++)
802 if (c->dest != node)
803 {
804 unsigned int place;
805 struct constraint_edge lookfor;
806 lookfor.src = c->dest;
807 lookfor.dest = node;
808 place = VEC_lower_bound (constraint_edge_t, graph->succs[c->dest],
809 &lookfor, constraint_edge_less);
810 VEC_ordered_remove (constraint_edge_t, graph->succs[c->dest], place);
811 }
812
813 VEC_free (constraint_edge_t, heap, graph->preds[node]);
814 VEC_free (constraint_edge_t, heap, graph->succs[node]);
815 graph->preds[node] = NULL;
816 graph->succs[node] = NULL;
817 }
818
819 static bool edge_added = false;
820
821 /* Add edge NEWE to the graph. */
822
823 static bool
824 add_graph_edge (constraint_graph_t graph, struct constraint_edge newe)
825 {
826 unsigned int place;
827 unsigned int src = newe.src;
828 unsigned int dest = newe.dest;
829 VEC(constraint_edge_t,heap) *vec;
830
831 vec = graph->preds[src];
832 place = VEC_lower_bound (constraint_edge_t, vec, &newe,
833 constraint_edge_less);
834 if (place == VEC_length (constraint_edge_t, vec)
835 || VEC_index (constraint_edge_t, vec, place)->dest != dest)
836 {
837 constraint_edge_t edge = new_constraint_edge (src, dest);
838 bitmap weightbitmap;
839
840 weightbitmap = BITMAP_ALLOC (&ptabitmap_obstack);
841 edge->weights = weightbitmap;
842 VEC_safe_insert (constraint_edge_t, heap, graph->preds[edge->src],
843 place, edge);
844 edge = new_constraint_edge (dest, src);
845 edge->weights = weightbitmap;
846 place = VEC_lower_bound (constraint_edge_t, graph->succs[edge->src],
847 edge, constraint_edge_less);
848 VEC_safe_insert (constraint_edge_t, heap, graph->succs[edge->src],
849 place, edge);
850 edge_added = true;
851 return true;
852 }
853 else
854 return false;
855 }
856
857
858 /* Return the bitmap representing the weights of edge LOOKFOR */
859
860 static bitmap
861 get_graph_weights (constraint_graph_t graph, struct constraint_edge lookfor)
862 {
863 constraint_edge_t edge;
864 unsigned int src = lookfor.src;
865 VEC(constraint_edge_t,heap) *vec;
866 vec = graph->preds[src];
867 edge = constraint_edge_vec_find (vec, lookfor);
868 gcc_assert (edge != NULL);
869 return edge->weights;
870 }
871
872
873 /* Merge GRAPH nodes FROM and TO into node TO. */
874
875 static void
876 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
877 unsigned int from)
878 {
879 VEC(constraint_edge_t,heap) *succvec = graph->succs[from];
880 VEC(constraint_edge_t,heap) *predvec = graph->preds[from];
881 int i;
882 constraint_edge_t c;
883
884 /* Merge all the predecessor edges. */
885
886 for (i = 0; VEC_iterate (constraint_edge_t, predvec, i, c); i++)
887 {
888 unsigned int d = c->dest;
889 struct constraint_edge olde;
890 struct constraint_edge newe;
891 bitmap temp;
892 bitmap weights;
893 if (c->dest == from)
894 d = to;
895 newe.src = to;
896 newe.dest = d;
897 add_graph_edge (graph, newe);
898 olde.src = from;
899 olde.dest = c->dest;
900 olde.weights = NULL;
901 temp = get_graph_weights (graph, olde);
902 weights = get_graph_weights (graph, newe);
903 bitmap_ior_into (weights, temp);
904 }
905
906 /* Merge all the successor edges. */
907 for (i = 0; VEC_iterate (constraint_edge_t, succvec, i, c); i++)
908 {
909 unsigned int d = c->dest;
910 struct constraint_edge olde;
911 struct constraint_edge newe;
912 bitmap temp;
913 bitmap weights;
914 if (c->dest == from)
915 d = to;
916 newe.src = d;
917 newe.dest = to;
918 add_graph_edge (graph, newe);
919 olde.src = c->dest;
920 olde.dest = from;
921 olde.weights = NULL;
922 temp = get_graph_weights (graph, olde);
923 weights = get_graph_weights (graph, newe);
924 bitmap_ior_into (weights, temp);
925 }
926 clear_edges_for_node (graph, from);
927 }
928
929 /* Add a graph edge to GRAPH, going from TO to FROM, with WEIGHT, if
930 it doesn't exist in the graph already.
931 Return false if the edge already existed, true otherwise. */
932
933 static bool
934 int_add_graph_edge (constraint_graph_t graph, unsigned int to,
935 unsigned int from, unsigned HOST_WIDE_INT weight)
936 {
937 if (to == from && weight == 0)
938 {
939 return false;
940 }
941 else
942 {
943 bool r;
944 struct constraint_edge edge;
945 edge.src = to;
946 edge.dest = from;
947 edge.weights = NULL;
948 r = add_graph_edge (graph, edge);
949 r |= !bitmap_bit_p (get_graph_weights (graph, edge), weight);
950 bitmap_set_bit (get_graph_weights (graph, edge), weight);
951 return r;
952 }
953 }
954
955
956 /* Return true if LOOKFOR is an existing graph edge. */
957
958 static bool
959 valid_graph_edge (constraint_graph_t graph, struct constraint_edge lookfor)
960 {
961 return constraint_edge_vec_find (graph->preds[lookfor.src], lookfor) != NULL;
962 }
963
964
965 /* Build the constraint graph. */
966
967 static void
968 build_constraint_graph (void)
969 {
970 int i = 0;
971 constraint_t c;
972
973 graph = xmalloc (sizeof (struct constraint_graph));
974 graph->succs = xcalloc (VEC_length (varinfo_t, varmap),
975 sizeof (*graph->succs));
976 graph->preds = xcalloc (VEC_length (varinfo_t, varmap),
977 sizeof (*graph->preds));
978
979 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
980 {
981 struct constraint_expr lhs = c->lhs;
982 struct constraint_expr rhs = c->rhs;
983 if (lhs.type == DEREF)
984 {
985 /* *x = y or *x = &y (complex) */
986 if (rhs.type == ADDRESSOF || rhs.var > anything_id)
987 insert_into_complex (lhs.var, c);
988 }
989 else if (rhs.type == DEREF)
990 {
991 /* !special var= *y */
992 if (!(get_varinfo (lhs.var)->is_special_var))
993 insert_into_complex (rhs.var, c);
994 }
995 else if (rhs.type == ADDRESSOF)
996 {
997 /* x = &y */
998 bitmap_set_bit (get_varinfo (lhs.var)->solution, rhs.var);
999 }
1000 else if (rhs.var > anything_id && lhs.var > anything_id)
1001 {
1002 /* Ignore 0 weighted self edges, as they can't possibly contribute
1003 anything */
1004 if (lhs.var != rhs.var || rhs.offset != 0 || lhs.offset != 0)
1005 {
1006
1007 struct constraint_edge edge;
1008 edge.src = lhs.var;
1009 edge.dest = rhs.var;
1010 /* x = y (simple) */
1011 add_graph_edge (graph, edge);
1012 bitmap_set_bit (get_graph_weights (graph, edge),
1013 rhs.offset);
1014 }
1015
1016 }
1017 }
1018 }
1019
1020
1021 /* Changed variables on the last iteration. */
1022 static unsigned int changed_count;
1023 static sbitmap changed;
1024
1025 DEF_VEC_I(unsigned);
1026 DEF_VEC_ALLOC_I(unsigned,heap);
1027
1028
1029 /* Strongly Connected Component visitation info. */
1030
1031 struct scc_info
1032 {
1033 sbitmap visited;
1034 sbitmap in_component;
1035 int current_index;
1036 unsigned int *visited_index;
1037 VEC(unsigned,heap) *scc_stack;
1038 VEC(unsigned,heap) *unification_queue;
1039 };
1040
1041
1042 /* Recursive routine to find strongly connected components in GRAPH.
1043 SI is the SCC info to store the information in, and N is the id of current
1044 graph node we are processing.
1045
1046 This is Tarjan's strongly connected component finding algorithm, as
1047 modified by Nuutila to keep only non-root nodes on the stack.
1048 The algorithm can be found in "On finding the strongly connected
1049 connected components in a directed graph" by Esko Nuutila and Eljas
1050 Soisalon-Soininen, in Information Processing Letters volume 49,
1051 number 1, pages 9-14. */
1052
1053 static void
1054 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1055 {
1056 constraint_edge_t c;
1057 int i;
1058
1059 gcc_assert (get_varinfo (n)->node == n);
1060 SET_BIT (si->visited, n);
1061 RESET_BIT (si->in_component, n);
1062 si->visited_index[n] = si->current_index ++;
1063
1064 /* Visit all the successors. */
1065 for (i = 0; VEC_iterate (constraint_edge_t, graph->succs[n], i, c); i++)
1066 {
1067 /* We only want to find and collapse the zero weight edges. */
1068 if (bitmap_bit_p (c->weights, 0))
1069 {
1070 unsigned int w = c->dest;
1071 if (!TEST_BIT (si->visited, w))
1072 scc_visit (graph, si, w);
1073 if (!TEST_BIT (si->in_component, w))
1074 {
1075 unsigned int t = get_varinfo (w)->node;
1076 unsigned int nnode = get_varinfo (n)->node;
1077 if (si->visited_index[t] < si->visited_index[nnode])
1078 get_varinfo (n)->node = t;
1079 }
1080 }
1081 }
1082
1083 /* See if any components have been identified. */
1084 if (get_varinfo (n)->node == n)
1085 {
1086 unsigned int t = si->visited_index[n];
1087 SET_BIT (si->in_component, n);
1088 while (VEC_length (unsigned, si->scc_stack) != 0
1089 && t < si->visited_index[VEC_last (unsigned, si->scc_stack)])
1090 {
1091 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1092 get_varinfo (w)->node = n;
1093 SET_BIT (si->in_component, w);
1094 /* Mark this node for collapsing. */
1095 VEC_safe_push (unsigned, heap, si->unification_queue, w);
1096 }
1097 }
1098 else
1099 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1100 }
1101
1102
1103 /* Collapse two variables into one variable. */
1104
1105 static void
1106 collapse_nodes (constraint_graph_t graph, unsigned int to, unsigned int from)
1107 {
1108 bitmap tosol, fromsol;
1109 struct constraint_edge edge;
1110
1111
1112 condense_varmap_nodes (to, from);
1113 tosol = get_varinfo (to)->solution;
1114 fromsol = get_varinfo (from)->solution;
1115 bitmap_ior_into (tosol, fromsol);
1116 merge_graph_nodes (graph, to, from);
1117 edge.src = to;
1118 edge.dest = to;
1119 edge.weights = NULL;
1120 if (valid_graph_edge (graph, edge))
1121 {
1122 bitmap weights = get_graph_weights (graph, edge);
1123 bitmap_clear_bit (weights, 0);
1124 if (bitmap_empty_p (weights))
1125 erase_graph_self_edge (graph, edge);
1126 }
1127 bitmap_clear (fromsol);
1128 get_varinfo (to)->address_taken |= get_varinfo (from)->address_taken;
1129 get_varinfo (to)->indirect_target |= get_varinfo (from)->indirect_target;
1130 }
1131
1132
1133 /* Unify nodes in GRAPH that we have found to be part of a cycle.
1134 SI is the Strongly Connected Components information structure that tells us
1135 what components to unify.
1136 UPDATE_CHANGED should be set to true if the changed sbitmap and changed
1137 count should be updated to reflect the unification. */
1138
1139 static void
1140 process_unification_queue (constraint_graph_t graph, struct scc_info *si,
1141 bool update_changed)
1142 {
1143 size_t i = 0;
1144 bitmap tmp = BITMAP_ALLOC (update_changed ? &iteration_obstack : NULL);
1145 bitmap_clear (tmp);
1146
1147 /* We proceed as follows:
1148
1149 For each component in the queue (components are delineated by
1150 when current_queue_element->node != next_queue_element->node):
1151
1152 rep = representative node for component
1153
1154 For each node (tounify) to be unified in the component,
1155 merge the solution for tounify into tmp bitmap
1156
1157 clear solution for tounify
1158
1159 merge edges from tounify into rep
1160
1161 merge complex constraints from tounify into rep
1162
1163 update changed count to note that tounify will never change
1164 again
1165
1166 Merge tmp into solution for rep, marking rep changed if this
1167 changed rep's solution.
1168
1169 Delete any 0 weighted self-edges we now have for rep. */
1170 while (i != VEC_length (unsigned, si->unification_queue))
1171 {
1172 unsigned int tounify = VEC_index (unsigned, si->unification_queue, i);
1173 unsigned int n = get_varinfo (tounify)->node;
1174
1175 if (dump_file && (dump_flags & TDF_DETAILS))
1176 fprintf (dump_file, "Unifying %s to %s\n",
1177 get_varinfo (tounify)->name,
1178 get_varinfo (n)->name);
1179 if (update_changed)
1180 stats.unified_vars_dynamic++;
1181 else
1182 stats.unified_vars_static++;
1183 bitmap_ior_into (tmp, get_varinfo (tounify)->solution);
1184 merge_graph_nodes (graph, n, tounify);
1185 condense_varmap_nodes (n, tounify);
1186
1187 if (update_changed && TEST_BIT (changed, tounify))
1188 {
1189 RESET_BIT (changed, tounify);
1190 if (!TEST_BIT (changed, n))
1191 SET_BIT (changed, n);
1192 else
1193 {
1194 gcc_assert (changed_count > 0);
1195 changed_count--;
1196 }
1197 }
1198
1199 bitmap_clear (get_varinfo (tounify)->solution);
1200 ++i;
1201
1202 /* If we've either finished processing the entire queue, or
1203 finished processing all nodes for component n, update the solution for
1204 n. */
1205 if (i == VEC_length (unsigned, si->unification_queue)
1206 || get_varinfo (VEC_index (unsigned, si->unification_queue, i))->node != n)
1207 {
1208 struct constraint_edge edge;
1209
1210 /* If the solution changes because of the merging, we need to mark
1211 the variable as changed. */
1212 if (bitmap_ior_into (get_varinfo (n)->solution, tmp))
1213 {
1214 if (update_changed && !TEST_BIT (changed, n))
1215 {
1216 SET_BIT (changed, n);
1217 changed_count++;
1218 }
1219 }
1220 bitmap_clear (tmp);
1221 edge.src = n;
1222 edge.dest = n;
1223 edge.weights = NULL;
1224 if (valid_graph_edge (graph, edge))
1225 {
1226 bitmap weights = get_graph_weights (graph, edge);
1227 bitmap_clear_bit (weights, 0);
1228 if (bitmap_empty_p (weights))
1229 erase_graph_self_edge (graph, edge);
1230 }
1231 }
1232 }
1233 BITMAP_FREE (tmp);
1234 }
1235
1236
1237 /* Information needed to compute the topological ordering of a graph. */
1238
1239 struct topo_info
1240 {
1241 /* sbitmap of visited nodes. */
1242 sbitmap visited;
1243 /* Array that stores the topological order of the graph, *in
1244 reverse*. */
1245 VEC(unsigned,heap) *topo_order;
1246 };
1247
1248
1249 /* Initialize and return a topological info structure. */
1250
1251 static struct topo_info *
1252 init_topo_info (void)
1253 {
1254 size_t size = VEC_length (varinfo_t, varmap);
1255 struct topo_info *ti = xmalloc (sizeof (struct topo_info));
1256 ti->visited = sbitmap_alloc (size);
1257 sbitmap_zero (ti->visited);
1258 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1259 return ti;
1260 }
1261
1262
1263 /* Free the topological sort info pointed to by TI. */
1264
1265 static void
1266 free_topo_info (struct topo_info *ti)
1267 {
1268 sbitmap_free (ti->visited);
1269 VEC_free (unsigned, heap, ti->topo_order);
1270 free (ti);
1271 }
1272
1273 /* Visit the graph in topological order, and store the order in the
1274 topo_info structure. */
1275
1276 static void
1277 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1278 unsigned int n)
1279 {
1280 VEC(constraint_edge_t,heap) *succs = graph->succs[n];
1281 constraint_edge_t c;
1282 int i;
1283 SET_BIT (ti->visited, n);
1284 for (i = 0; VEC_iterate (constraint_edge_t, succs, i, c); i++)
1285 {
1286 if (!TEST_BIT (ti->visited, c->dest))
1287 topo_visit (graph, ti, c->dest);
1288 }
1289 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1290 }
1291
1292 /* Return true if variable N + OFFSET is a legal field of N. */
1293
1294 static bool
1295 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1296 {
1297 varinfo_t ninfo = get_varinfo (n);
1298
1299 /* For things we've globbed to single variables, any offset into the
1300 variable acts like the entire variable, so that it becomes offset
1301 0. */
1302 if (ninfo->is_special_var
1303 || ninfo->is_artificial_var
1304 || ninfo->is_unknown_size_var)
1305 {
1306 *offset = 0;
1307 return true;
1308 }
1309 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1310 }
1311
1312 /* Process a constraint C that represents *x = &y. */
1313
1314 static void
1315 do_da_constraint (constraint_graph_t graph ATTRIBUTE_UNUSED,
1316 constraint_t c, bitmap delta)
1317 {
1318 unsigned int rhs = c->rhs.var;
1319 unsigned int j;
1320 bitmap_iterator bi;
1321
1322 /* For each member j of Delta (Sol(x)), add x to Sol(j) */
1323 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1324 {
1325 unsigned HOST_WIDE_INT offset = c->lhs.offset;
1326 if (type_safe (j, &offset) && !(get_varinfo (j)->is_special_var))
1327 {
1328 /* *x != NULL && *x != ANYTHING*/
1329 varinfo_t v;
1330 unsigned int t;
1331 bitmap sol;
1332 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + offset;
1333
1334 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1335 t = v->node;
1336 sol = get_varinfo (t)->solution;
1337 if (!bitmap_bit_p (sol, rhs))
1338 {
1339 bitmap_set_bit (sol, rhs);
1340 if (!TEST_BIT (changed, t))
1341 {
1342 SET_BIT (changed, t);
1343 changed_count++;
1344 }
1345 }
1346 }
1347 else if (dump_file && !(get_varinfo (j)->is_special_var))
1348 fprintf (dump_file, "Untypesafe usage in do_da_constraint.\n");
1349
1350 }
1351 }
1352
1353 /* Process a constraint C that represents x = *y, using DELTA as the
1354 starting solution. */
1355
1356 static void
1357 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1358 bitmap delta)
1359 {
1360 unsigned int lhs = get_varinfo (c->lhs.var)->node;
1361 bool flag = false;
1362 bitmap sol = get_varinfo (lhs)->solution;
1363 unsigned int j;
1364 bitmap_iterator bi;
1365
1366 /* For each variable j in delta (Sol(y)), add
1367 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1368 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1369 {
1370 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1371 if (type_safe (j, &roffset))
1372 {
1373 varinfo_t v;
1374 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1375 unsigned int t;
1376
1377 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1378 t = v->node;
1379 if (int_add_graph_edge (graph, lhs, t, 0))
1380 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1381 }
1382 else if (dump_file && !(get_varinfo (j)->is_special_var))
1383 fprintf (dump_file, "Untypesafe usage in do_sd_constraint\n");
1384
1385 }
1386
1387 /* If the LHS solution changed, mark the var as changed. */
1388 if (flag)
1389 {
1390 get_varinfo (lhs)->solution = sol;
1391 if (!TEST_BIT (changed, lhs))
1392 {
1393 SET_BIT (changed, lhs);
1394 changed_count++;
1395 }
1396 }
1397 }
1398
1399 /* Process a constraint C that represents *x = y. */
1400
1401 static void
1402 do_ds_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1403 {
1404 unsigned int rhs = get_varinfo (c->rhs.var)->node;
1405 unsigned HOST_WIDE_INT roff = c->rhs.offset;
1406 bitmap sol = get_varinfo (rhs)->solution;
1407 unsigned int j;
1408 bitmap_iterator bi;
1409
1410 /* For each member j of delta (Sol(x)), add an edge from y to j and
1411 union Sol(y) into Sol(j) */
1412 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1413 {
1414 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1415 if (type_safe (j, &loff) && !(get_varinfo(j)->is_special_var))
1416 {
1417 varinfo_t v;
1418 unsigned int t;
1419 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1420
1421 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1422 t = v->node;
1423 if (int_add_graph_edge (graph, t, rhs, roff))
1424 {
1425 bitmap tmp = get_varinfo (t)->solution;
1426 if (set_union_with_increment (tmp, sol, roff))
1427 {
1428 get_varinfo (t)->solution = tmp;
1429 if (t == rhs)
1430 {
1431 sol = get_varinfo (rhs)->solution;
1432 }
1433 if (!TEST_BIT (changed, t))
1434 {
1435 SET_BIT (changed, t);
1436 changed_count++;
1437 }
1438 }
1439 }
1440 }
1441 else if (dump_file && !(get_varinfo (j)->is_special_var))
1442 fprintf (dump_file, "Untypesafe usage in do_ds_constraint\n");
1443 }
1444 }
1445
1446 /* Handle a non-simple (simple meaning requires no iteration), non-copy
1447 constraint (IE *x = &y, x = *y, and *x = y). */
1448
1449 static void
1450 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1451 {
1452 if (c->lhs.type == DEREF)
1453 {
1454 if (c->rhs.type == ADDRESSOF)
1455 {
1456 /* *x = &y */
1457 do_da_constraint (graph, c, delta);
1458 }
1459 else
1460 {
1461 /* *x = y */
1462 do_ds_constraint (graph, c, delta);
1463 }
1464 }
1465 else
1466 {
1467 /* x = *y */
1468 if (!(get_varinfo (c->lhs.var)->is_special_var))
1469 do_sd_constraint (graph, c, delta);
1470 }
1471 }
1472
1473 /* Initialize and return a new SCC info structure. */
1474
1475 static struct scc_info *
1476 init_scc_info (void)
1477 {
1478 struct scc_info *si = xmalloc (sizeof (struct scc_info));
1479 size_t size = VEC_length (varinfo_t, varmap);
1480
1481 si->current_index = 0;
1482 si->visited = sbitmap_alloc (size);
1483 sbitmap_zero (si->visited);
1484 si->in_component = sbitmap_alloc (size);
1485 sbitmap_ones (si->in_component);
1486 si->visited_index = xcalloc (sizeof (unsigned int), size + 1);
1487 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1488 si->unification_queue = VEC_alloc (unsigned, heap, 1);
1489 return si;
1490 }
1491
1492 /* Free an SCC info structure pointed to by SI */
1493
1494 static void
1495 free_scc_info (struct scc_info *si)
1496 {
1497 sbitmap_free (si->visited);
1498 sbitmap_free (si->in_component);
1499 free (si->visited_index);
1500 VEC_free (unsigned, heap, si->scc_stack);
1501 VEC_free (unsigned, heap, si->unification_queue);
1502 free(si);
1503 }
1504
1505
1506 /* Find cycles in GRAPH that occur, using strongly connected components, and
1507 collapse the cycles into a single representative node. if UPDATE_CHANGED
1508 is true, then update the changed sbitmap to note those nodes whose
1509 solutions have changed as a result of collapsing. */
1510
1511 static void
1512 find_and_collapse_graph_cycles (constraint_graph_t graph, bool update_changed)
1513 {
1514 unsigned int i;
1515 unsigned int size = VEC_length (varinfo_t, varmap);
1516 struct scc_info *si = init_scc_info ();
1517
1518 for (i = 0; i != size; ++i)
1519 if (!TEST_BIT (si->visited, i) && get_varinfo (i)->node == i)
1520 scc_visit (graph, si, i);
1521 process_unification_queue (graph, si, update_changed);
1522 free_scc_info (si);
1523 }
1524
1525 /* Compute a topological ordering for GRAPH, and store the result in the
1526 topo_info structure TI. */
1527
1528 static void
1529 compute_topo_order (constraint_graph_t graph,
1530 struct topo_info *ti)
1531 {
1532 unsigned int i;
1533 unsigned int size = VEC_length (varinfo_t, varmap);
1534
1535 for (i = 0; i != size; ++i)
1536 if (!TEST_BIT (ti->visited, i) && get_varinfo (i)->node == i)
1537 topo_visit (graph, ti, i);
1538 }
1539
1540 /* Return true if bitmap B is empty, or a bitmap other than bit 0 is set. */
1541
1542 static bool
1543 bitmap_other_than_zero_bit_set (bitmap b)
1544 {
1545 unsigned int i;
1546 bitmap_iterator bi;
1547
1548 if (bitmap_empty_p (b))
1549 return false;
1550 EXECUTE_IF_SET_IN_BITMAP (b, 1, i, bi)
1551 return true;
1552 return false;
1553 }
1554
1555 /* Perform offline variable substitution.
1556
1557 This is a linear time way of identifying variables that must have
1558 equivalent points-to sets, including those caused by static cycles,
1559 and single entry subgraphs, in the constraint graph.
1560
1561 The technique is described in "Off-line variable substitution for
1562 scaling points-to analysis" by Atanas Rountev and Satish Chandra,
1563 in "ACM SIGPLAN Notices" volume 35, number 5, pages 47-56. */
1564
1565 static void
1566 perform_var_substitution (constraint_graph_t graph)
1567 {
1568 struct topo_info *ti = init_topo_info ();
1569
1570 /* Compute the topological ordering of the graph, then visit each
1571 node in topological order. */
1572 compute_topo_order (graph, ti);
1573
1574 while (VEC_length (unsigned, ti->topo_order) != 0)
1575 {
1576 unsigned int i = VEC_pop (unsigned, ti->topo_order);
1577 unsigned int pred;
1578 varinfo_t vi = get_varinfo (i);
1579 bool okay_to_elim = false;
1580 unsigned int root = VEC_length (varinfo_t, varmap);
1581 VEC(constraint_edge_t,heap) *predvec = graph->preds[i];
1582 constraint_edge_t ce;
1583 bitmap tmp;
1584
1585 /* We can't eliminate things whose address is taken, or which is
1586 the target of a dereference. */
1587 if (vi->address_taken || vi->indirect_target)
1588 continue;
1589
1590 /* See if all predecessors of I are ripe for elimination */
1591 for (pred = 0; VEC_iterate (constraint_edge_t, predvec, pred, ce); pred++)
1592 {
1593 bitmap weight;
1594 unsigned int w;
1595 weight = get_graph_weights (graph, *ce);
1596
1597 /* We can't eliminate variables that have nonzero weighted
1598 edges between them. */
1599 if (bitmap_other_than_zero_bit_set (weight))
1600 {
1601 okay_to_elim = false;
1602 break;
1603 }
1604 w = get_varinfo (ce->dest)->node;
1605
1606 /* We can't eliminate the node if one of the predecessors is
1607 part of a different strongly connected component. */
1608 if (!okay_to_elim)
1609 {
1610 root = w;
1611 okay_to_elim = true;
1612 }
1613 else if (w != root)
1614 {
1615 okay_to_elim = false;
1616 break;
1617 }
1618
1619 /* Theorem 4 in Rountev and Chandra: If i is a direct node,
1620 then Solution(i) is a subset of Solution (w), where w is a
1621 predecessor in the graph.
1622 Corollary: If all predecessors of i have the same
1623 points-to set, then i has that same points-to set as
1624 those predecessors. */
1625 tmp = BITMAP_ALLOC (NULL);
1626 bitmap_and_compl (tmp, get_varinfo (i)->solution,
1627 get_varinfo (w)->solution);
1628 if (!bitmap_empty_p (tmp))
1629 {
1630 okay_to_elim = false;
1631 BITMAP_FREE (tmp);
1632 break;
1633 }
1634 BITMAP_FREE (tmp);
1635 }
1636
1637 /* See if the root is different than the original node.
1638 If so, we've found an equivalence. */
1639 if (root != get_varinfo (i)->node && okay_to_elim)
1640 {
1641 /* Found an equivalence */
1642 get_varinfo (i)->node = root;
1643 collapse_nodes (graph, root, i);
1644 if (dump_file && (dump_flags & TDF_DETAILS))
1645 fprintf (dump_file, "Collapsing %s into %s\n",
1646 get_varinfo (i)->name,
1647 get_varinfo (root)->name);
1648 stats.collapsed_vars++;
1649 }
1650 }
1651
1652 free_topo_info (ti);
1653 }
1654
1655
1656 /* Solve the constraint graph GRAPH using our worklist solver.
1657 This is based on the PW* family of solvers from the "Efficient Field
1658 Sensitive Pointer Analysis for C" paper.
1659 It works by iterating over all the graph nodes, processing the complex
1660 constraints and propagating the copy constraints, until everything stops
1661 changed. This corresponds to steps 6-8 in the solving list given above. */
1662
1663 static void
1664 solve_graph (constraint_graph_t graph)
1665 {
1666 unsigned int size = VEC_length (varinfo_t, varmap);
1667 unsigned int i;
1668
1669 changed_count = size;
1670 changed = sbitmap_alloc (size);
1671 sbitmap_ones (changed);
1672
1673 /* The already collapsed/unreachable nodes will never change, so we
1674 need to account for them in changed_count. */
1675 for (i = 0; i < size; i++)
1676 if (get_varinfo (i)->node != i)
1677 changed_count--;
1678
1679 while (changed_count > 0)
1680 {
1681 unsigned int i;
1682 struct topo_info *ti = init_topo_info ();
1683 stats.iterations++;
1684
1685 bitmap_obstack_initialize (&iteration_obstack);
1686
1687 if (edge_added)
1688 {
1689 /* We already did cycle elimination once, when we did
1690 variable substitution, so we don't need it again for the
1691 first iteration. */
1692 if (stats.iterations > 1)
1693 find_and_collapse_graph_cycles (graph, true);
1694
1695 edge_added = false;
1696 }
1697
1698 compute_topo_order (graph, ti);
1699
1700 while (VEC_length (unsigned, ti->topo_order) != 0)
1701 {
1702 i = VEC_pop (unsigned, ti->topo_order);
1703 gcc_assert (get_varinfo (i)->node == i);
1704
1705 /* If the node has changed, we need to process the
1706 complex constraints and outgoing edges again. */
1707 if (TEST_BIT (changed, i))
1708 {
1709 unsigned int j;
1710 constraint_t c;
1711 constraint_edge_t e;
1712 bitmap solution;
1713 VEC(constraint_t,heap) *complex = get_varinfo (i)->complex;
1714 VEC(constraint_edge_t,heap) *succs;
1715
1716 RESET_BIT (changed, i);
1717 changed_count--;
1718
1719 /* Process the complex constraints */
1720 solution = get_varinfo (i)->solution;
1721 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
1722 do_complex_constraint (graph, c, solution);
1723
1724 /* Propagate solution to all successors. */
1725 succs = graph->succs[i];
1726 for (j = 0; VEC_iterate (constraint_edge_t, succs, j, e); j++)
1727 {
1728 bitmap tmp = get_varinfo (e->dest)->solution;
1729 bool flag = false;
1730 unsigned int k;
1731 bitmap weights = e->weights;
1732 bitmap_iterator bi;
1733
1734 gcc_assert (!bitmap_empty_p (weights));
1735 EXECUTE_IF_SET_IN_BITMAP (weights, 0, k, bi)
1736 flag |= set_union_with_increment (tmp, solution, k);
1737
1738 if (flag)
1739 {
1740 get_varinfo (e->dest)->solution = tmp;
1741 if (!TEST_BIT (changed, e->dest))
1742 {
1743 SET_BIT (changed, e->dest);
1744 changed_count++;
1745 }
1746 }
1747 }
1748 }
1749 }
1750 free_topo_info (ti);
1751 bitmap_obstack_release (&iteration_obstack);
1752 }
1753
1754 sbitmap_free (changed);
1755 }
1756
1757
1758 /* CONSTRAINT AND VARIABLE GENERATION FUNCTIONS */
1759
1760 /* Map from trees to variable ids. */
1761 static htab_t id_for_tree;
1762
1763 typedef struct tree_id
1764 {
1765 tree t;
1766 unsigned int id;
1767 } *tree_id_t;
1768
1769 /* Hash a tree id structure. */
1770
1771 static hashval_t
1772 tree_id_hash (const void *p)
1773 {
1774 const tree_id_t ta = (tree_id_t) p;
1775 return htab_hash_pointer (ta->t);
1776 }
1777
1778 /* Return true if the tree in P1 and the tree in P2 are the same. */
1779
1780 static int
1781 tree_id_eq (const void *p1, const void *p2)
1782 {
1783 const tree_id_t ta1 = (tree_id_t) p1;
1784 const tree_id_t ta2 = (tree_id_t) p2;
1785 return ta1->t == ta2->t;
1786 }
1787
1788 /* Insert ID as the variable id for tree T in the hashtable. */
1789
1790 static void
1791 insert_id_for_tree (tree t, int id)
1792 {
1793 void **slot;
1794 struct tree_id finder;
1795 tree_id_t new_pair;
1796
1797 finder.t = t;
1798 slot = htab_find_slot (id_for_tree, &finder, INSERT);
1799 gcc_assert (*slot == NULL);
1800 new_pair = xmalloc (sizeof (struct tree_id));
1801 new_pair->t = t;
1802 new_pair->id = id;
1803 *slot = (void *)new_pair;
1804 }
1805
1806 /* Find the variable id for tree T in ID_FOR_TREE. If T does not
1807 exist in the hash table, return false, otherwise, return true and
1808 set *ID to the id we found. */
1809
1810 static bool
1811 lookup_id_for_tree (tree t, unsigned int *id)
1812 {
1813 tree_id_t pair;
1814 struct tree_id finder;
1815
1816 finder.t = t;
1817 pair = htab_find (id_for_tree, &finder);
1818 if (pair == NULL)
1819 return false;
1820 *id = pair->id;
1821 return true;
1822 }
1823
1824 /* Return a printable name for DECL */
1825
1826 static const char *
1827 alias_get_name (tree decl)
1828 {
1829 const char *res = get_name (decl);
1830 char *temp;
1831 int num_printed = 0;
1832
1833 if (res != NULL)
1834 return res;
1835
1836 res = "NULL";
1837 if (TREE_CODE (decl) == SSA_NAME)
1838 {
1839 num_printed = asprintf (&temp, "%s_%u",
1840 alias_get_name (SSA_NAME_VAR (decl)),
1841 SSA_NAME_VERSION (decl));
1842 }
1843 else if (DECL_P (decl))
1844 {
1845 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
1846 }
1847 if (num_printed > 0)
1848 {
1849 res = ggc_strdup (temp);
1850 free (temp);
1851 }
1852 return res;
1853 }
1854
1855 /* Find the variable id for tree T in the hashtable.
1856 If T doesn't exist in the hash table, create an entry for it. */
1857
1858 static unsigned int
1859 get_id_for_tree (tree t)
1860 {
1861 tree_id_t pair;
1862 struct tree_id finder;
1863
1864 finder.t = t;
1865 pair = htab_find (id_for_tree, &finder);
1866 if (pair == NULL)
1867 return create_variable_info_for (t, alias_get_name (t));
1868
1869 return pair->id;
1870 }
1871
1872 /* Get a constraint expression from an SSA_VAR_P node. */
1873
1874 static struct constraint_expr
1875 get_constraint_exp_from_ssa_var (tree t)
1876 {
1877 struct constraint_expr cexpr;
1878
1879 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
1880
1881 /* For parameters, get at the points-to set for the actual parm
1882 decl. */
1883 if (TREE_CODE (t) == SSA_NAME
1884 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
1885 && default_def (SSA_NAME_VAR (t)) == t)
1886 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
1887
1888 cexpr.type = SCALAR;
1889
1890 cexpr.var = get_id_for_tree (t);
1891 /* If we determine the result is "anything", and we know this is readonly,
1892 say it points to readonly memory instead. */
1893 if (cexpr.var == anything_id && TREE_READONLY (t))
1894 {
1895 cexpr.type = ADDRESSOF;
1896 cexpr.var = readonly_id;
1897 }
1898
1899 cexpr.offset = 0;
1900 return cexpr;
1901 }
1902
1903 /* Process a completed constraint T, and add it to the constraint
1904 list. */
1905
1906 static void
1907 process_constraint (constraint_t t)
1908 {
1909 struct constraint_expr rhs = t->rhs;
1910 struct constraint_expr lhs = t->lhs;
1911
1912 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
1913 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
1914
1915 /* ANYTHING == ANYTHING is pointless. */
1916 if (lhs.var == anything_id && rhs.var == anything_id)
1917 return;
1918
1919 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
1920 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
1921 {
1922 rhs = t->lhs;
1923 t->lhs = t->rhs;
1924 t->rhs = rhs;
1925 process_constraint (t);
1926 }
1927 /* This can happen in our IR with things like n->a = *p */
1928 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
1929 {
1930 /* Split into tmp = *rhs, *lhs = tmp */
1931 tree rhsdecl = get_varinfo (rhs.var)->decl;
1932 tree pointertype = TREE_TYPE (rhsdecl);
1933 tree pointedtotype = TREE_TYPE (pointertype);
1934 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
1935 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
1936
1937 /* If this is an aggregate of known size, we should have passed
1938 this off to do_structure_copy, and it should have broken it
1939 up. */
1940 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
1941 || get_varinfo (rhs.var)->is_unknown_size_var);
1942
1943 process_constraint (new_constraint (tmplhs, rhs));
1944 process_constraint (new_constraint (lhs, tmplhs));
1945 }
1946 else if (rhs.type == ADDRESSOF)
1947 {
1948 varinfo_t vi;
1949 gcc_assert (rhs.offset == 0);
1950
1951 for (vi = get_varinfo (rhs.var); vi != NULL; vi = vi->next)
1952 vi->address_taken = true;
1953
1954 VEC_safe_push (constraint_t, heap, constraints, t);
1955 }
1956 else
1957 {
1958 if (lhs.type != DEREF && rhs.type == DEREF)
1959 get_varinfo (lhs.var)->indirect_target = true;
1960 VEC_safe_push (constraint_t, heap, constraints, t);
1961 }
1962 }
1963
1964
1965 /* Return the position, in bits, of FIELD_DECL from the beginning of its
1966 structure. */
1967
1968 static unsigned HOST_WIDE_INT
1969 bitpos_of_field (const tree fdecl)
1970 {
1971
1972 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
1973 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
1974 return -1;
1975
1976 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
1977 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
1978 }
1979
1980
1981 /* Return true if an access to [ACCESSPOS, ACCESSSIZE]
1982 overlaps with a field at [FIELDPOS, FIELDSIZE] */
1983
1984 static bool
1985 offset_overlaps_with_access (const unsigned HOST_WIDE_INT fieldpos,
1986 const unsigned HOST_WIDE_INT fieldsize,
1987 const unsigned HOST_WIDE_INT accesspos,
1988 const unsigned HOST_WIDE_INT accesssize)
1989 {
1990 if (fieldpos == accesspos && fieldsize == accesssize)
1991 return true;
1992 if (accesspos >= fieldpos && accesspos < (fieldpos + fieldsize))
1993 return true;
1994 if (accesspos < fieldpos && (accesspos + accesssize > fieldpos))
1995 return true;
1996
1997 return false;
1998 }
1999
2000 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2001
2002 static struct constraint_expr
2003 get_constraint_for_component_ref (tree t)
2004 {
2005 struct constraint_expr result;
2006 HOST_WIDE_INT bitsize;
2007 HOST_WIDE_INT bitpos;
2008 tree offset;
2009 enum machine_mode mode;
2010 int unsignedp;
2011 int volatilep;
2012 tree forzero;
2013
2014 result.offset = 0;
2015 result.type = SCALAR;
2016 result.var = 0;
2017
2018 /* Some people like to do cute things like take the address of
2019 &0->a.b */
2020 forzero = t;
2021 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2022 forzero = TREE_OPERAND (forzero, 0);
2023
2024 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2025 {
2026 result.offset = 0;
2027 result.var = integer_id;
2028 result.type = SCALAR;
2029 return result;
2030 }
2031
2032 t = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
2033 &unsignedp, &volatilep, false);
2034 result = get_constraint_for (t);
2035
2036 /* This can also happen due to weird offsetof type macros. */
2037 if (TREE_CODE (t) != ADDR_EXPR && result.type == ADDRESSOF)
2038 result.type = SCALAR;
2039
2040 /* If we know where this goes, then yay. Otherwise, booo. */
2041
2042 if (offset == NULL && bitsize != -1)
2043 {
2044 result.offset = bitpos;
2045 }
2046 else
2047 {
2048 result.var = anything_id;
2049 result.offset = 0;
2050 }
2051
2052 if (result.type == SCALAR)
2053 {
2054 /* In languages like C, you can access one past the end of an
2055 array. You aren't allowed to dereference it, so we can
2056 ignore this constraint. When we handle pointer subtraction,
2057 we may have to do something cute here. */
2058
2059 if (result.offset < get_varinfo (result.var)->fullsize)
2060 {
2061 /* It's also not true that the constraint will actually start at the
2062 right offset, it may start in some padding. We only care about
2063 setting the constraint to the first actual field it touches, so
2064 walk to find it. */
2065 varinfo_t curr;
2066 for (curr = get_varinfo (result.var); curr; curr = curr->next)
2067 {
2068 if (offset_overlaps_with_access (curr->offset, curr->size,
2069 result.offset, bitsize))
2070 {
2071 result.var = curr->id;
2072 break;
2073
2074 }
2075 }
2076 /* assert that we found *some* field there. The user couldn't be
2077 accessing *only* padding. */
2078
2079 gcc_assert (curr);
2080 }
2081 else
2082 if (dump_file && (dump_flags & TDF_DETAILS))
2083 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2084
2085 result.offset = 0;
2086 }
2087
2088 return result;
2089 }
2090
2091
2092 /* Dereference the constraint expression CONS, and return the result.
2093 DEREF (ADDRESSOF) = SCALAR
2094 DEREF (SCALAR) = DEREF
2095 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2096 This is needed so that we can handle dereferencing DEREF constraints. */
2097
2098 static struct constraint_expr
2099 do_deref (struct constraint_expr cons)
2100 {
2101 if (cons.type == SCALAR)
2102 {
2103 cons.type = DEREF;
2104 return cons;
2105 }
2106 else if (cons.type == ADDRESSOF)
2107 {
2108 cons.type = SCALAR;
2109 return cons;
2110 }
2111 else if (cons.type == DEREF)
2112 {
2113 tree tmpvar = create_tmp_var_raw (ptr_type_node, "derefmp");
2114 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2115 process_constraint (new_constraint (tmplhs, cons));
2116 cons.var = tmplhs.var;
2117 return cons;
2118 }
2119 gcc_unreachable ();
2120 }
2121
2122
2123 /* Given a tree T, return the constraint expression for it. */
2124
2125 static struct constraint_expr
2126 get_constraint_for (tree t)
2127 {
2128 struct constraint_expr temp;
2129
2130 /* x = integer is all glommed to a single variable, which doesn't
2131 point to anything by itself. That is, of course, unless it is an
2132 integer constant being treated as a pointer, in which case, we
2133 will return that this is really the addressof anything. This
2134 happens below, since it will fall into the default case. The only
2135 case we know something about an integer treated like a pointer is
2136 when it is the NULL pointer, and then we just say it points to
2137 NULL. */
2138 if (TREE_CODE (t) == INTEGER_CST
2139 && !POINTER_TYPE_P (TREE_TYPE (t)))
2140 {
2141 temp.var = integer_id;
2142 temp.type = SCALAR;
2143 temp.offset = 0;
2144 return temp;
2145 }
2146 else if (TREE_CODE (t) == INTEGER_CST
2147 && integer_zerop (t))
2148 {
2149 temp.var = nothing_id;
2150 temp.type = ADDRESSOF;
2151 temp.offset = 0;
2152 return temp;
2153 }
2154
2155 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2156 {
2157 case tcc_expression:
2158 {
2159 switch (TREE_CODE (t))
2160 {
2161 case ADDR_EXPR:
2162 {
2163 temp = get_constraint_for (TREE_OPERAND (t, 0));
2164 if (temp.type == DEREF)
2165 temp.type = SCALAR;
2166 else
2167 temp.type = ADDRESSOF;
2168 return temp;
2169 }
2170 break;
2171 case CALL_EXPR:
2172
2173 /* XXX: In interprocedural mode, if we didn't have the
2174 body, we would need to do *each pointer argument =
2175 &ANYTHING added. */
2176 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2177 {
2178 varinfo_t vi;
2179 tree heapvar;
2180
2181 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2182 DECL_EXTERNAL (heapvar) = 1;
2183 add_referenced_tmp_var (heapvar);
2184 temp.var = create_variable_info_for (heapvar,
2185 alias_get_name (heapvar));
2186
2187 vi = get_varinfo (temp.var);
2188 vi->is_artificial_var = 1;
2189 vi->is_heap_var = 1;
2190 temp.type = ADDRESSOF;
2191 temp.offset = 0;
2192 return temp;
2193 }
2194 /* FALLTHRU */
2195 default:
2196 {
2197 temp.type = ADDRESSOF;
2198 temp.var = anything_id;
2199 temp.offset = 0;
2200 return temp;
2201 }
2202 }
2203 }
2204 case tcc_reference:
2205 {
2206 switch (TREE_CODE (t))
2207 {
2208 case INDIRECT_REF:
2209 {
2210 temp = get_constraint_for (TREE_OPERAND (t, 0));
2211 temp = do_deref (temp);
2212 return temp;
2213 }
2214 case ARRAY_REF:
2215 case COMPONENT_REF:
2216 temp = get_constraint_for_component_ref (t);
2217 return temp;
2218 default:
2219 {
2220 temp.type = ADDRESSOF;
2221 temp.var = anything_id;
2222 temp.offset = 0;
2223 return temp;
2224 }
2225 }
2226 }
2227 case tcc_unary:
2228 {
2229 switch (TREE_CODE (t))
2230 {
2231 case NOP_EXPR:
2232 case CONVERT_EXPR:
2233 case NON_LVALUE_EXPR:
2234 {
2235 tree op = TREE_OPERAND (t, 0);
2236
2237 /* Cast from non-pointer to pointers are bad news for us.
2238 Anything else, we see through */
2239 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2240 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2241 return get_constraint_for (op);
2242
2243 /* FALLTHRU */
2244 }
2245 default:
2246 {
2247 temp.type = ADDRESSOF;
2248 temp.var = anything_id;
2249 temp.offset = 0;
2250 return temp;
2251 }
2252 }
2253 }
2254 case tcc_exceptional:
2255 {
2256 switch (TREE_CODE (t))
2257 {
2258 case PHI_NODE:
2259 return get_constraint_for (PHI_RESULT (t));
2260 case SSA_NAME:
2261 return get_constraint_exp_from_ssa_var (t);
2262 default:
2263 {
2264 temp.type = ADDRESSOF;
2265 temp.var = anything_id;
2266 temp.offset = 0;
2267 return temp;
2268 }
2269 }
2270 }
2271 case tcc_declaration:
2272 return get_constraint_exp_from_ssa_var (t);
2273 default:
2274 {
2275 temp.type = ADDRESSOF;
2276 temp.var = anything_id;
2277 temp.offset = 0;
2278 return temp;
2279 }
2280 }
2281 }
2282
2283
2284 /* Handle the structure copy case where we have a simple structure copy
2285 between LHS and RHS that is of SIZE (in bits)
2286
2287 For each field of the lhs variable (lhsfield)
2288 For each field of the rhs variable at lhsfield.offset (rhsfield)
2289 add the constraint lhsfield = rhsfield
2290 */
2291
2292 static void
2293 do_simple_structure_copy (const struct constraint_expr lhs,
2294 const struct constraint_expr rhs,
2295 const unsigned HOST_WIDE_INT size)
2296 {
2297 varinfo_t p = get_varinfo (lhs.var);
2298 unsigned HOST_WIDE_INT pstart, last;
2299 pstart = p->offset;
2300 last = p->offset + size;
2301 for (; p && p->offset < last; p = p->next)
2302 {
2303 varinfo_t q;
2304 struct constraint_expr templhs = lhs;
2305 struct constraint_expr temprhs = rhs;
2306 unsigned HOST_WIDE_INT fieldoffset;
2307
2308 templhs.var = p->id;
2309 q = get_varinfo (temprhs.var);
2310 fieldoffset = p->offset - pstart;
2311 q = first_vi_for_offset (q, q->offset + fieldoffset);
2312 temprhs.var = q->id;
2313 process_constraint (new_constraint (templhs, temprhs));
2314 }
2315 }
2316
2317
2318 /* Handle the structure copy case where we have a structure copy between a
2319 aggregate on the LHS and a dereference of a pointer on the RHS
2320 that is of SIZE (in bits)
2321
2322 For each field of the lhs variable (lhsfield)
2323 rhs.offset = lhsfield->offset
2324 add the constraint lhsfield = rhs
2325 */
2326
2327 static void
2328 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
2329 const struct constraint_expr rhs,
2330 const unsigned HOST_WIDE_INT size)
2331 {
2332 varinfo_t p = get_varinfo (lhs.var);
2333 unsigned HOST_WIDE_INT pstart,last;
2334 pstart = p->offset;
2335 last = p->offset + size;
2336
2337 for (; p && p->offset < last; p = p->next)
2338 {
2339 varinfo_t q;
2340 struct constraint_expr templhs = lhs;
2341 struct constraint_expr temprhs = rhs;
2342 unsigned HOST_WIDE_INT fieldoffset;
2343
2344
2345 if (templhs.type == SCALAR)
2346 templhs.var = p->id;
2347 else
2348 templhs.offset = p->offset;
2349
2350 q = get_varinfo (temprhs.var);
2351 fieldoffset = p->offset - pstart;
2352 temprhs.offset += fieldoffset;
2353 process_constraint (new_constraint (templhs, temprhs));
2354 }
2355 }
2356
2357 /* Handle the structure copy case where we have a structure copy
2358 between a aggregate on the RHS and a dereference of a pointer on
2359 the LHS that is of SIZE (in bits)
2360
2361 For each field of the rhs variable (rhsfield)
2362 lhs.offset = rhsfield->offset
2363 add the constraint lhs = rhsfield
2364 */
2365
2366 static void
2367 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
2368 const struct constraint_expr rhs,
2369 const unsigned HOST_WIDE_INT size)
2370 {
2371 varinfo_t p = get_varinfo (rhs.var);
2372 unsigned HOST_WIDE_INT pstart,last;
2373 pstart = p->offset;
2374 last = p->offset + size;
2375
2376 for (; p && p->offset < last; p = p->next)
2377 {
2378 varinfo_t q;
2379 struct constraint_expr templhs = lhs;
2380 struct constraint_expr temprhs = rhs;
2381 unsigned HOST_WIDE_INT fieldoffset;
2382
2383
2384 if (temprhs.type == SCALAR)
2385 temprhs.var = p->id;
2386 else
2387 temprhs.offset = p->offset;
2388
2389 q = get_varinfo (templhs.var);
2390 fieldoffset = p->offset - pstart;
2391 templhs.offset += fieldoffset;
2392 process_constraint (new_constraint (templhs, temprhs));
2393 }
2394 }
2395
2396
2397 /* Handle aggregate copies by expanding into copies of the respective
2398 fields of the structures. */
2399
2400 static void
2401 do_structure_copy (tree lhsop, tree rhsop)
2402 {
2403 struct constraint_expr lhs, rhs, tmp;
2404 varinfo_t p;
2405 unsigned HOST_WIDE_INT lhssize;
2406 unsigned HOST_WIDE_INT rhssize;
2407
2408 lhs = get_constraint_for (lhsop);
2409 rhs = get_constraint_for (rhsop);
2410
2411 /* If we have special var = x, swap it around. */
2412 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
2413 {
2414 tmp = lhs;
2415 lhs = rhs;
2416 rhs = tmp;
2417 }
2418
2419 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
2420 possible it's something we could handle. However, most cases falling
2421 into this are dealing with transparent unions, which are slightly
2422 weird. */
2423 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
2424 {
2425 rhs.type = ADDRESSOF;
2426 rhs.var = anything_id;
2427 }
2428
2429 /* If the RHS is a special var, or an addressof, set all the LHS fields to
2430 that special var. */
2431 if (rhs.var <= integer_id)
2432 {
2433 for (p = get_varinfo (lhs.var); p; p = p->next)
2434 {
2435 struct constraint_expr templhs = lhs;
2436 struct constraint_expr temprhs = rhs;
2437 if (templhs.type == SCALAR )
2438 templhs.var = p->id;
2439 else
2440 templhs.offset += p->offset;
2441 process_constraint (new_constraint (templhs, temprhs));
2442 }
2443 }
2444 else
2445 {
2446 tree rhstype = TREE_TYPE (rhsop);
2447 tree lhstype = TREE_TYPE (lhsop);
2448 tree rhstypesize = TYPE_SIZE (rhstype);
2449 tree lhstypesize = TYPE_SIZE (lhstype);
2450
2451 /* If we have a variably sized types on the rhs or lhs, and a deref
2452 constraint, add the constraint, lhsconstraint = &ANYTHING.
2453 This is conservatively correct because either the lhs is an unknown
2454 sized var (if the constraint is SCALAR), or the lhs is a DEREF
2455 constraint, and every variable it can point to must be unknown sized
2456 anyway, so we don't need to worry about fields at all. */
2457 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
2458 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
2459 {
2460 rhs.var = anything_id;
2461 rhs.type = ADDRESSOF;
2462 rhs.offset = 0;
2463 process_constraint (new_constraint (lhs, rhs));
2464 return;
2465 }
2466
2467 /* The size only really matters insofar as we don't set more or less of
2468 the variable. If we hit an unknown size var, the size should be the
2469 whole darn thing. */
2470 if (get_varinfo (rhs.var)->is_unknown_size_var)
2471 rhssize = ~0;
2472 else
2473 rhssize = TREE_INT_CST_LOW (rhstypesize);
2474
2475 if (get_varinfo (lhs.var)->is_unknown_size_var)
2476 lhssize = ~0;
2477 else
2478 lhssize = TREE_INT_CST_LOW (lhstypesize);
2479
2480
2481 if (rhs.type == SCALAR && lhs.type == SCALAR)
2482 do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
2483 else if (lhs.type != DEREF && rhs.type == DEREF)
2484 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
2485 else if (lhs.type == DEREF && rhs.type != DEREF)
2486 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
2487 else
2488 {
2489 tree pointedtotype = lhstype;
2490 tree tmpvar;
2491
2492 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
2493 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
2494 do_structure_copy (tmpvar, rhsop);
2495 do_structure_copy (lhsop, tmpvar);
2496 }
2497 }
2498 }
2499
2500
2501 /* Return true if REF, a COMPONENT_REF, has an INDIRECT_REF somewhere
2502 in it. */
2503
2504 static inline bool
2505 ref_contains_indirect_ref (tree ref)
2506 {
2507 while (handled_component_p (ref))
2508 {
2509 if (TREE_CODE (ref) == INDIRECT_REF)
2510 return true;
2511 ref = TREE_OPERAND (ref, 0);
2512 }
2513 return false;
2514 }
2515
2516
2517 /* Update related alias information kept in AI. This is used when
2518 building name tags, alias sets and deciding grouping heuristics.
2519 STMT is the statement to process. This function also updates
2520 ADDRESSABLE_VARS. */
2521
2522 static void
2523 update_alias_info (tree stmt, struct alias_info *ai)
2524 {
2525 bitmap addr_taken;
2526 use_operand_p use_p;
2527 ssa_op_iter iter;
2528 bool stmt_escapes_p = is_escape_site (stmt, ai);
2529 tree op;
2530
2531 /* Mark all the variables whose address are taken by the statement. */
2532 addr_taken = addresses_taken (stmt);
2533 if (addr_taken)
2534 {
2535 bitmap_ior_into (addressable_vars, addr_taken);
2536
2537 /* If STMT is an escape point, all the addresses taken by it are
2538 call-clobbered. */
2539 if (stmt_escapes_p)
2540 {
2541 bitmap_iterator bi;
2542 unsigned i;
2543
2544 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
2545 mark_call_clobbered (referenced_var (i));
2546 }
2547 }
2548
2549 /* Process each operand use. If an operand may be aliased, keep
2550 track of how many times it's being used. For pointers, determine
2551 whether they are dereferenced by the statement, or whether their
2552 value escapes, etc. */
2553 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
2554 {
2555 tree op, var;
2556 var_ann_t v_ann;
2557 struct ptr_info_def *pi;
2558 bool is_store, is_potential_deref;
2559 unsigned num_uses, num_derefs;
2560
2561 op = USE_FROM_PTR (use_p);
2562
2563 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
2564 to the set of addressable variables. */
2565 if (TREE_CODE (op) == ADDR_EXPR)
2566 {
2567 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
2568
2569 /* PHI nodes don't have annotations for pinning the set
2570 of addresses taken, so we collect them here.
2571
2572 FIXME, should we allow PHI nodes to have annotations
2573 so that they can be treated like regular statements?
2574 Currently, they are treated as second-class
2575 statements. */
2576 add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
2577 continue;
2578 }
2579
2580 /* Ignore constants. */
2581 if (TREE_CODE (op) != SSA_NAME)
2582 continue;
2583
2584 var = SSA_NAME_VAR (op);
2585 v_ann = var_ann (var);
2586
2587 /* If the operand's variable may be aliased, keep track of how
2588 many times we've referenced it. This is used for alias
2589 grouping in compute_flow_insensitive_aliasing. */
2590 if (may_be_aliased (var))
2591 NUM_REFERENCES_INC (v_ann);
2592
2593 /* We are only interested in pointers. */
2594 if (!POINTER_TYPE_P (TREE_TYPE (op)))
2595 continue;
2596
2597 pi = get_ptr_info (op);
2598
2599 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
2600 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
2601 {
2602 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
2603 VARRAY_PUSH_TREE (ai->processed_ptrs, op);
2604 }
2605
2606 /* If STMT is a PHI node, then it will not have pointer
2607 dereferences and it will not be an escape point. */
2608 if (TREE_CODE (stmt) == PHI_NODE)
2609 continue;
2610
2611 /* Determine whether OP is a dereferenced pointer, and if STMT
2612 is an escape point, whether OP escapes. */
2613 count_uses_and_derefs (op, stmt, &num_uses, &num_derefs, &is_store);
2614
2615 /* Handle a corner case involving address expressions of the
2616 form '&PTR->FLD'. The problem with these expressions is that
2617 they do not represent a dereference of PTR. However, if some
2618 other transformation propagates them into an INDIRECT_REF
2619 expression, we end up with '*(&PTR->FLD)' which is folded
2620 into 'PTR->FLD'.
2621
2622 So, if the original code had no other dereferences of PTR,
2623 the aliaser will not create memory tags for it, and when
2624 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
2625 memory operations will receive no V_MAY_DEF/VUSE operands.
2626
2627 One solution would be to have count_uses_and_derefs consider
2628 &PTR->FLD a dereference of PTR. But that is wrong, since it
2629 is not really a dereference but an offset calculation.
2630
2631 What we do here is to recognize these special ADDR_EXPR
2632 nodes. Since these expressions are never GIMPLE values (they
2633 are not GIMPLE invariants), they can only appear on the RHS
2634 of an assignment and their base address is always an
2635 INDIRECT_REF expression. */
2636 is_potential_deref = false;
2637 if (TREE_CODE (stmt) == MODIFY_EXPR
2638 && TREE_CODE (TREE_OPERAND (stmt, 1)) == ADDR_EXPR
2639 && !is_gimple_val (TREE_OPERAND (stmt, 1)))
2640 {
2641 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
2642 this represents a potential dereference of PTR. */
2643 tree rhs = TREE_OPERAND (stmt, 1);
2644 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2645 if (TREE_CODE (base) == INDIRECT_REF
2646 && TREE_OPERAND (base, 0) == op)
2647 is_potential_deref = true;
2648 }
2649
2650 if (num_derefs > 0 || is_potential_deref)
2651 {
2652 /* Mark OP as dereferenced. In a subsequent pass,
2653 dereferenced pointers that point to a set of
2654 variables will be assigned a name tag to alias
2655 all the variables OP points to. */
2656 pi->is_dereferenced = 1;
2657
2658 /* Keep track of how many time we've dereferenced each
2659 pointer. */
2660 NUM_REFERENCES_INC (v_ann);
2661
2662 /* If this is a store operation, mark OP as being
2663 dereferenced to store, otherwise mark it as being
2664 dereferenced to load. */
2665 if (is_store)
2666 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
2667 else
2668 bitmap_set_bit (ai->dereferenced_ptrs_load, DECL_UID (var));
2669 }
2670
2671 if (stmt_escapes_p && num_derefs < num_uses)
2672 {
2673 /* If STMT is an escape point and STMT contains at
2674 least one direct use of OP, then the value of OP
2675 escapes and so the pointed-to variables need to
2676 be marked call-clobbered. */
2677 pi->value_escapes_p = 1;
2678
2679 /* If the statement makes a function call, assume
2680 that pointer OP will be dereferenced in a store
2681 operation inside the called function. */
2682 if (get_call_expr_in (stmt))
2683 {
2684 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
2685 pi->is_dereferenced = 1;
2686 }
2687 }
2688 }
2689
2690 if (TREE_CODE (stmt) == PHI_NODE)
2691 return;
2692
2693 /* Update reference counter for definitions to any
2694 potentially aliased variable. This is used in the alias
2695 grouping heuristics. */
2696 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
2697 {
2698 tree var = SSA_NAME_VAR (op);
2699 var_ann_t ann = var_ann (var);
2700 bitmap_set_bit (ai->written_vars, DECL_UID (var));
2701 if (may_be_aliased (var))
2702 NUM_REFERENCES_INC (ann);
2703
2704 }
2705
2706 /* Mark variables in V_MAY_DEF operands as being written to. */
2707 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VIRTUAL_DEFS)
2708 {
2709 tree var = DECL_P (op) ? op : SSA_NAME_VAR (op);
2710 bitmap_set_bit (ai->written_vars, DECL_UID (var));
2711 }
2712 }
2713
2714
2715 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
2716 Expressions of the type PTR + CST can be handled in two ways:
2717
2718 1- If the constraint for PTR is ADDRESSOF for a non-structure
2719 variable, then we can use it directly because adding or
2720 subtracting a constant may not alter the original ADDRESSOF
2721 constraint (i.e., pointer arithmetic may not legally go outside
2722 an object's boundaries).
2723
2724 2- If the constraint for PTR is ADDRESSOF for a structure variable,
2725 then if CST is a compile-time constant that can be used as an
2726 offset, we can determine which sub-variable will be pointed-to
2727 by the expression.
2728
2729 Return true if the expression is handled. For any other kind of
2730 expression, return false so that each operand can be added as a
2731 separate constraint by the caller. */
2732
2733 static bool
2734 handle_ptr_arith (struct constraint_expr lhs, tree expr)
2735 {
2736 tree op0, op1;
2737 struct constraint_expr base, offset;
2738
2739 if (TREE_CODE (expr) != PLUS_EXPR)
2740 return false;
2741
2742 op0 = TREE_OPERAND (expr, 0);
2743 op1 = TREE_OPERAND (expr, 1);
2744
2745 base = get_constraint_for (op0);
2746
2747 offset.var = anyoffset_id;
2748 offset.type = ADDRESSOF;
2749 offset.offset = 0;
2750
2751 process_constraint (new_constraint (lhs, base));
2752 process_constraint (new_constraint (lhs, offset));
2753
2754 return true;
2755 }
2756
2757
2758 /* Walk statement T setting up aliasing constraints according to the
2759 references found in T. This function is the main part of the
2760 constraint builder. AI points to auxiliary alias information used
2761 when building alias sets and computing alias grouping heuristics. */
2762
2763 static void
2764 find_func_aliases (tree t, struct alias_info *ai)
2765 {
2766 struct constraint_expr lhs, rhs;
2767
2768 /* Update various related attributes like escaped addresses, pointer
2769 dereferences for loads and stores. This is used when creating
2770 name tags and alias sets. */
2771 update_alias_info (t, ai);
2772
2773 /* Now build constraints expressions. */
2774 if (TREE_CODE (t) == PHI_NODE)
2775 {
2776 /* Only care about pointers and structures containing
2777 pointers. */
2778 if (POINTER_TYPE_P (TREE_TYPE (PHI_RESULT (t)))
2779 || AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))))
2780 {
2781 int i;
2782
2783 lhs = get_constraint_for (PHI_RESULT (t));
2784 for (i = 0; i < PHI_NUM_ARGS (t); i++)
2785 {
2786 rhs = get_constraint_for (PHI_ARG_DEF (t, i));
2787 process_constraint (new_constraint (lhs, rhs));
2788 }
2789 }
2790 }
2791 else if (TREE_CODE (t) == MODIFY_EXPR)
2792 {
2793 tree lhsop = TREE_OPERAND (t, 0);
2794 tree rhsop = TREE_OPERAND (t, 1);
2795 int i;
2796
2797 if (AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
2798 && AGGREGATE_TYPE_P (TREE_TYPE (rhsop)))
2799 {
2800 do_structure_copy (lhsop, rhsop);
2801 }
2802 else
2803 {
2804 /* Only care about operations with pointers, structures
2805 containing pointers, dereferences, and call expressions. */
2806 if (POINTER_TYPE_P (TREE_TYPE (lhsop))
2807 || AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
2808 || ref_contains_indirect_ref (lhsop)
2809 || TREE_CODE (rhsop) == CALL_EXPR)
2810 {
2811 lhs = get_constraint_for (lhsop);
2812 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
2813 {
2814 /* RHS that consist of unary operations,
2815 exceptional types, or bare decls/constants, get
2816 handled directly by get_constraint_for. */
2817 case tcc_reference:
2818 case tcc_declaration:
2819 case tcc_constant:
2820 case tcc_exceptional:
2821 case tcc_expression:
2822 case tcc_unary:
2823 {
2824 rhs = get_constraint_for (rhsop);
2825 process_constraint (new_constraint (lhs, rhs));
2826
2827 /* When taking the address of an aggregate
2828 type, from the LHS we can access any field
2829 of the RHS. */
2830 if (rhs.type == ADDRESSOF
2831 && !(get_varinfo (rhs.var)->is_special_var)
2832 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhsop))))
2833 {
2834 rhs.var = anyoffset_id;
2835 rhs.type = ADDRESSOF;
2836 rhs.offset = 0;
2837 process_constraint (new_constraint (lhs, rhs));
2838 }
2839 }
2840 break;
2841
2842 case tcc_binary:
2843 {
2844 /* For pointer arithmetic of the form
2845 PTR + CST, we can simply use PTR's
2846 constraint because pointer arithmetic is
2847 not allowed to go out of bounds. */
2848 if (handle_ptr_arith (lhs, rhsop))
2849 break;
2850 }
2851 /* FALLTHRU */
2852
2853 /* Otherwise, walk each operand. Notice that we
2854 can't use the operand interface because we need
2855 to process expressions other than simple operands
2856 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
2857 default:
2858 for (i = 0; i < TREE_CODE_LENGTH (TREE_CODE (rhsop)); i++)
2859 {
2860 tree op = TREE_OPERAND (rhsop, i);
2861 rhs = get_constraint_for (op);
2862 process_constraint (new_constraint (lhs, rhs));
2863 }
2864 }
2865 }
2866 }
2867 }
2868
2869 /* After promoting variables and computing aliasing we will
2870 need to re-scan most statements. FIXME: Try to minimize the
2871 number of statements re-scanned. It's not really necessary to
2872 re-scan *all* statements. */
2873 mark_stmt_modified (t);
2874 }
2875
2876
2877 /* Find the first varinfo in the same variable as START that overlaps with
2878 OFFSET.
2879 Effectively, walk the chain of fields for the variable START to find the
2880 first field that overlaps with OFFSET.
2881 Abort if we can't find one. */
2882
2883 static varinfo_t
2884 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
2885 {
2886 varinfo_t curr = start;
2887 while (curr)
2888 {
2889 /* We may not find a variable in the field list with the actual
2890 offset when when we have glommed a structure to a variable.
2891 In that case, however, offset should still be within the size
2892 of the variable. */
2893 if (offset >= curr->offset && offset < (curr->offset + curr->size))
2894 return curr;
2895 curr = curr->next;
2896 }
2897
2898 gcc_unreachable ();
2899 }
2900
2901
2902 /* Insert the varinfo FIELD into the field list for BASE, ordered by
2903 offset. */
2904
2905 static void
2906 insert_into_field_list (varinfo_t base, varinfo_t field)
2907 {
2908 varinfo_t prev = base;
2909 varinfo_t curr = base->next;
2910
2911 if (curr == NULL)
2912 {
2913 prev->next = field;
2914 field->next = NULL;
2915 }
2916 else
2917 {
2918 while (curr)
2919 {
2920 if (field->offset <= curr->offset)
2921 break;
2922 prev = curr;
2923 curr = curr->next;
2924 }
2925 field->next = prev->next;
2926 prev->next = field;
2927 }
2928 }
2929
2930 /* qsort comparison function for two fieldoff's PA and PB */
2931
2932 static int
2933 fieldoff_compare (const void *pa, const void *pb)
2934 {
2935 const fieldoff_s *foa = (const fieldoff_s *)pa;
2936 const fieldoff_s *fob = (const fieldoff_s *)pb;
2937 HOST_WIDE_INT foasize, fobsize;
2938
2939 if (foa->offset != fob->offset)
2940 return foa->offset - fob->offset;
2941
2942 foasize = TREE_INT_CST_LOW (DECL_SIZE (foa->field));
2943 fobsize = TREE_INT_CST_LOW (DECL_SIZE (fob->field));
2944 return foasize - fobsize;
2945 }
2946
2947 /* Sort a fieldstack according to the field offset and sizes. */
2948 void sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
2949 {
2950 qsort (VEC_address (fieldoff_s, fieldstack),
2951 VEC_length (fieldoff_s, fieldstack),
2952 sizeof (fieldoff_s),
2953 fieldoff_compare);
2954 }
2955
2956 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields
2957 of TYPE onto fieldstack, recording their offsets along the way.
2958 OFFSET is used to keep track of the offset in this entire structure, rather
2959 than just the immediately containing structure. Returns the number
2960 of fields pushed.
2961 HAS_UNION is set to true if we find a union type as a field of
2962 TYPE. */
2963
2964 int
2965 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
2966 HOST_WIDE_INT offset, bool *has_union)
2967 {
2968 tree field;
2969 int count = 0;
2970
2971 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
2972 if (TREE_CODE (field) == FIELD_DECL)
2973 {
2974 bool push = false;
2975 int pushed = 0;
2976
2977 if (has_union
2978 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
2979 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
2980 *has_union = true;
2981
2982 if (!var_can_have_subvars (field))
2983 push = true;
2984 else if (!(pushed = push_fields_onto_fieldstack
2985 (TREE_TYPE (field), fieldstack,
2986 offset + bitpos_of_field (field), has_union))
2987 && DECL_SIZE (field)
2988 && !integer_zerop (DECL_SIZE (field)))
2989 /* Empty structures may have actual size, like in C++. So
2990 see if we didn't push any subfields and the size is
2991 nonzero, push the field onto the stack */
2992 push = true;
2993
2994 if (push)
2995 {
2996 fieldoff_s *pair;
2997
2998 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
2999 pair->field = field;
3000 pair->offset = offset + bitpos_of_field (field);
3001 count++;
3002 }
3003 else
3004 count += pushed;
3005 }
3006
3007 return count;
3008 }
3009
3010 static void
3011 make_constraint_to_anything (varinfo_t vi)
3012 {
3013 struct constraint_expr lhs, rhs;
3014
3015 lhs.var = vi->id;
3016 lhs.offset = 0;
3017 lhs.type = SCALAR;
3018
3019 rhs.var = anything_id;
3020 rhs.offset =0 ;
3021 rhs.type = ADDRESSOF;
3022 process_constraint (new_constraint (lhs, rhs));
3023 }
3024
3025 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
3026 This will also create any varinfo structures necessary for fields
3027 of DECL. */
3028
3029 static unsigned int
3030 create_variable_info_for (tree decl, const char *name)
3031 {
3032 unsigned int index = VEC_length (varinfo_t, varmap);
3033 varinfo_t vi;
3034 tree decltype = TREE_TYPE (decl);
3035 bool notokay = false;
3036 bool hasunion;
3037 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
3038 VEC (fieldoff_s,heap) *fieldstack = NULL;
3039
3040
3041 hasunion = TREE_CODE (decltype) == UNION_TYPE
3042 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
3043 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
3044 {
3045 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion);
3046 if (hasunion)
3047 {
3048 VEC_free (fieldoff_s, heap, fieldstack);
3049 notokay = true;
3050 }
3051 }
3052
3053
3054 /* If the variable doesn't have subvars, we may end up needing to
3055 sort the field list and create fake variables for all the
3056 fields. */
3057 vi = new_var_info (decl, index, name, index);
3058 vi->decl = decl;
3059 vi->offset = 0;
3060 vi->has_union = hasunion;
3061 if (!TYPE_SIZE (decltype)
3062 || TREE_CODE (TYPE_SIZE (decltype)) != INTEGER_CST
3063 || TREE_CODE (decltype) == ARRAY_TYPE
3064 || TREE_CODE (decltype) == UNION_TYPE
3065 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
3066 {
3067 vi->is_unknown_size_var = true;
3068 vi->fullsize = ~0;
3069 vi->size = ~0;
3070 }
3071 else
3072 {
3073 vi->fullsize = TREE_INT_CST_LOW (TYPE_SIZE (decltype));
3074 vi->size = vi->fullsize;
3075 }
3076
3077 insert_id_for_tree (vi->decl, index);
3078 VEC_safe_push (varinfo_t, heap, varmap, vi);
3079 if (is_global)
3080 make_constraint_to_anything (vi);
3081
3082 stats.total_vars++;
3083 if (use_field_sensitive
3084 && !notokay
3085 && !vi->is_unknown_size_var
3086 && var_can_have_subvars (decl))
3087 {
3088 unsigned int newindex = VEC_length (varinfo_t, varmap);
3089 fieldoff_s *fo = NULL;
3090 unsigned int i;
3091 tree field;
3092
3093 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3094 {
3095 if (!DECL_SIZE (fo->field)
3096 || TREE_CODE (DECL_SIZE (fo->field)) != INTEGER_CST
3097 || TREE_CODE (TREE_TYPE (fo->field)) == ARRAY_TYPE
3098 || fo->offset < 0)
3099 {
3100 notokay = true;
3101 break;
3102 }
3103 }
3104
3105 /* We can't sort them if we have a field with a variable sized type,
3106 which will make notokay = true. In that case, we are going to return
3107 without creating varinfos for the fields anyway, so sorting them is a
3108 waste to boot. */
3109 if (!notokay)
3110 sort_fieldstack (fieldstack);
3111
3112 if (VEC_length (fieldoff_s, fieldstack) != 0)
3113 fo = VEC_index (fieldoff_s, fieldstack, 0);
3114
3115 if (fo == NULL || notokay)
3116 {
3117 vi->is_unknown_size_var = 1;
3118 vi->fullsize = ~0;
3119 vi->size = ~0;
3120 VEC_free (fieldoff_s, heap, fieldstack);
3121 return index;
3122 }
3123
3124 field = fo->field;
3125 vi->size = TREE_INT_CST_LOW (DECL_SIZE (field));
3126 vi->offset = fo->offset;
3127 for (i = 1; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3128 {
3129 varinfo_t newvi;
3130 const char *newname;
3131 char *tempname;
3132
3133 field = fo->field;
3134 newindex = VEC_length (varinfo_t, varmap);
3135 asprintf (&tempname, "%s.%s", vi->name, alias_get_name (field));
3136 newname = ggc_strdup (tempname);
3137 free (tempname);
3138 newvi = new_var_info (decl, newindex, newname, newindex);
3139 newvi->offset = fo->offset;
3140 newvi->size = TREE_INT_CST_LOW (DECL_SIZE (field));
3141 newvi->fullsize = vi->fullsize;
3142 insert_into_field_list (vi, newvi);
3143 VEC_safe_push (varinfo_t, heap, varmap, newvi);
3144 if (is_global)
3145 make_constraint_to_anything (newvi);
3146
3147 stats.total_vars++;
3148 }
3149 VEC_free (fieldoff_s, heap, fieldstack);
3150 }
3151 return index;
3152 }
3153
3154 /* Print out the points-to solution for VAR to FILE. */
3155
3156 void
3157 dump_solution_for_var (FILE *file, unsigned int var)
3158 {
3159 varinfo_t vi = get_varinfo (var);
3160 unsigned int i;
3161 bitmap_iterator bi;
3162
3163 fprintf (file, "%s = { ", vi->name);
3164 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (vi->node)->solution, 0, i, bi)
3165 {
3166 fprintf (file, "%s ", get_varinfo (i)->name);
3167 }
3168 fprintf (file, "}\n");
3169 }
3170
3171 /* Print the points-to solution for VAR to stdout. */
3172
3173 void
3174 debug_solution_for_var (unsigned int var)
3175 {
3176 dump_solution_for_var (stdout, var);
3177 }
3178
3179
3180 /* Create varinfo structures for all of the variables in the
3181 function for intraprocedural mode. */
3182
3183 static void
3184 intra_create_variable_infos (void)
3185 {
3186 tree t;
3187
3188 /* For each incoming argument arg, ARG = &ANYTHING */
3189 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
3190 {
3191 struct constraint_expr lhs;
3192 struct constraint_expr rhs;
3193 varinfo_t p;
3194
3195 lhs.offset = 0;
3196 lhs.type = SCALAR;
3197 lhs.var = create_variable_info_for (t, alias_get_name (t));
3198
3199 rhs.var = anything_id;
3200 rhs.type = ADDRESSOF;
3201 rhs.offset = 0;
3202
3203 for (p = get_varinfo (lhs.var); p; p = p->next)
3204 {
3205 struct constraint_expr temp = lhs;
3206 temp.var = p->id;
3207 process_constraint (new_constraint (temp, rhs));
3208 }
3209 }
3210
3211 }
3212
3213 /* Set bits in INTO corresponding to the variable uids in solution set
3214 FROM */
3215
3216 static void
3217 set_uids_in_ptset (bitmap into, bitmap from)
3218 {
3219 unsigned int i;
3220 bitmap_iterator bi;
3221 bool found_anyoffset = false;
3222 subvar_t sv;
3223
3224 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
3225 {
3226 varinfo_t vi = get_varinfo (i);
3227
3228 /* If we find ANYOFFSET in the solution and the solution
3229 includes SFTs for some structure, then all the SFTs in that
3230 structure will need to be added to the alias set. */
3231 if (vi->id == anyoffset_id)
3232 {
3233 found_anyoffset = true;
3234 continue;
3235 }
3236
3237 /* The only artificial variables that are allowed in a may-alias
3238 set are heap variables. */
3239 if (vi->is_artificial_var && !vi->is_heap_var)
3240 continue;
3241
3242 if (vi->has_union && get_subvars_for_var (vi->decl) != NULL)
3243 {
3244 /* Variables containing unions may need to be converted to
3245 their SFT's, because SFT's can have unions and we cannot. */
3246 for (sv = get_subvars_for_var (vi->decl); sv; sv = sv->next)
3247 bitmap_set_bit (into, DECL_UID (sv->var));
3248 }
3249 else if (TREE_CODE (vi->decl) == VAR_DECL
3250 || TREE_CODE (vi->decl) == PARM_DECL)
3251 {
3252 if (found_anyoffset
3253 && var_can_have_subvars (vi->decl)
3254 && get_subvars_for_var (vi->decl))
3255 {
3256 /* If ANYOFFSET is in the solution set and VI->DECL is
3257 an aggregate variable with sub-variables, then any of
3258 the SFTs inside VI->DECL may have been accessed. Add
3259 all the sub-vars for VI->DECL. */
3260 for (sv = get_subvars_for_var (vi->decl); sv; sv = sv->next)
3261 bitmap_set_bit (into, DECL_UID (sv->var));
3262 }
3263 else if (var_can_have_subvars (vi->decl)
3264 && get_subvars_for_var (vi->decl))
3265 {
3266 /* If VI->DECL is an aggregate for which we created
3267 SFTs, add the SFT corresponding to VI->OFFSET. */
3268 tree sft = get_subvar_at (vi->decl, vi->offset);
3269 bitmap_set_bit (into, DECL_UID (sft));
3270 }
3271 else
3272 {
3273 /* Otherwise, just add VI->DECL to the alias set. */
3274 bitmap_set_bit (into, DECL_UID (vi->decl));
3275 }
3276 }
3277 }
3278 }
3279
3280
3281 static bool have_alias_info = false;
3282
3283 /* Given a pointer variable P, fill in its points-to set, or return
3284 false if we can't. */
3285
3286 bool
3287 find_what_p_points_to (tree p)
3288 {
3289 unsigned int id = 0;
3290
3291 if (!have_alias_info)
3292 return false;
3293
3294 if (lookup_id_for_tree (p, &id))
3295 {
3296 varinfo_t vi = get_varinfo (id);
3297
3298 if (vi->is_artificial_var)
3299 return false;
3300
3301 /* See if this is a field or a structure. */
3302 if (vi->size != vi->fullsize)
3303 {
3304 /* Nothing currently asks about structure fields directly,
3305 but when they do, we need code here to hand back the
3306 points-to set. */
3307 if (!var_can_have_subvars (vi->decl)
3308 || get_subvars_for_var (vi->decl) == NULL)
3309 return false;
3310 }
3311 else
3312 {
3313 struct ptr_info_def *pi = get_ptr_info (p);
3314 unsigned int i;
3315 bitmap_iterator bi;
3316
3317 /* This variable may have been collapsed, let's get the real
3318 variable. */
3319 vi = get_varinfo (vi->node);
3320
3321 /* Translate artificial variables into SSA_NAME_PTR_INFO
3322 attributes. */
3323 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
3324 {
3325 varinfo_t vi = get_varinfo (i);
3326
3327 if (vi->is_artificial_var)
3328 {
3329 /* FIXME. READONLY should be handled better so that
3330 flow insensitive aliasing can disregard writable
3331 aliases. */
3332 if (vi->id == nothing_id)
3333 pi->pt_null = 1;
3334 else if (vi->id == anything_id)
3335 pi->pt_anything = 1;
3336 else if (vi->id == readonly_id)
3337 pi->pt_anything = 1;
3338 else if (vi->id == integer_id)
3339 pi->pt_anything = 1;
3340 else if (vi->is_heap_var)
3341 pi->pt_global_mem = 1;
3342 }
3343 }
3344
3345 if (pi->pt_anything)
3346 return false;
3347
3348 if (!pi->pt_vars)
3349 pi->pt_vars = BITMAP_GGC_ALLOC ();
3350
3351 set_uids_in_ptset (pi->pt_vars, vi->solution);
3352
3353 if (bitmap_empty_p (pi->pt_vars))
3354 pi->pt_vars = NULL;
3355
3356 return true;
3357 }
3358 }
3359
3360 return false;
3361 }
3362
3363
3364 /* Initialize things necessary to perform PTA */
3365
3366 static void
3367 init_alias_vars (void)
3368 {
3369 bitmap_obstack_initialize (&ptabitmap_obstack);
3370 }
3371
3372
3373 /* Dump points-to information to OUTFILE. */
3374
3375 void
3376 dump_sa_points_to_info (FILE *outfile)
3377 {
3378 unsigned int i;
3379
3380 fprintf (outfile, "\nPoints-to sets\n\n");
3381
3382 if (dump_flags & TDF_STATS)
3383 {
3384 fprintf (outfile, "Stats:\n");
3385 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
3386 fprintf (outfile, "Statically unified vars: %d\n",
3387 stats.unified_vars_static);
3388 fprintf (outfile, "Collapsed vars: %d\n", stats.collapsed_vars);
3389 fprintf (outfile, "Dynamically unified vars: %d\n",
3390 stats.unified_vars_dynamic);
3391 fprintf (outfile, "Iterations: %d\n", stats.iterations);
3392 }
3393
3394 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
3395 dump_solution_for_var (outfile, i);
3396 }
3397
3398
3399 /* Debug points-to information to stderr. */
3400
3401 void
3402 debug_sa_points_to_info (void)
3403 {
3404 dump_sa_points_to_info (stderr);
3405 }
3406
3407
3408 /* Initialize the always-existing constraint variables for NULL
3409 ANYTHING, READONLY, and INTEGER */
3410
3411 static void
3412 init_base_vars (void)
3413 {
3414 struct constraint_expr lhs, rhs;
3415
3416 /* Create the NULL variable, used to represent that a variable points
3417 to NULL. */
3418 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
3419 var_nothing = new_var_info (nothing_tree, 0, "NULL", 0);
3420 insert_id_for_tree (nothing_tree, 0);
3421 var_nothing->is_artificial_var = 1;
3422 var_nothing->offset = 0;
3423 var_nothing->size = ~0;
3424 var_nothing->fullsize = ~0;
3425 var_nothing->is_special_var = 1;
3426 nothing_id = 0;
3427 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
3428
3429 /* Create the ANYTHING variable, used to represent that a variable
3430 points to some unknown piece of memory. */
3431 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
3432 var_anything = new_var_info (anything_tree, 1, "ANYTHING", 1);
3433 insert_id_for_tree (anything_tree, 1);
3434 var_anything->is_artificial_var = 1;
3435 var_anything->size = ~0;
3436 var_anything->offset = 0;
3437 var_anything->next = NULL;
3438 var_anything->fullsize = ~0;
3439 var_anything->is_special_var = 1;
3440 anything_id = 1;
3441
3442 /* Anything points to anything. This makes deref constraints just
3443 work in the presence of linked list and other p = *p type loops,
3444 by saying that *ANYTHING = ANYTHING. */
3445 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
3446 lhs.type = SCALAR;
3447 lhs.var = anything_id;
3448 lhs.offset = 0;
3449 rhs.type = ADDRESSOF;
3450 rhs.var = anything_id;
3451 rhs.offset = 0;
3452 var_anything->address_taken = true;
3453
3454 /* This specifically does not use process_constraint because
3455 process_constraint ignores all anything = anything constraints, since all
3456 but this one are redundant. */
3457 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
3458
3459 /* Create the READONLY variable, used to represent that a variable
3460 points to readonly memory. */
3461 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
3462 var_readonly = new_var_info (readonly_tree, 2, "READONLY", 2);
3463 var_readonly->is_artificial_var = 1;
3464 var_readonly->offset = 0;
3465 var_readonly->size = ~0;
3466 var_readonly->fullsize = ~0;
3467 var_readonly->next = NULL;
3468 var_readonly->is_special_var = 1;
3469 insert_id_for_tree (readonly_tree, 2);
3470 readonly_id = 2;
3471 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
3472
3473 /* readonly memory points to anything, in order to make deref
3474 easier. In reality, it points to anything the particular
3475 readonly variable can point to, but we don't track this
3476 separately. */
3477 lhs.type = SCALAR;
3478 lhs.var = readonly_id;
3479 lhs.offset = 0;
3480 rhs.type = ADDRESSOF;
3481 rhs.var = anything_id;
3482 rhs.offset = 0;
3483
3484 process_constraint (new_constraint (lhs, rhs));
3485
3486 /* Create the INTEGER variable, used to represent that a variable points
3487 to an INTEGER. */
3488 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
3489 var_integer = new_var_info (integer_tree, 3, "INTEGER", 3);
3490 insert_id_for_tree (integer_tree, 3);
3491 var_integer->is_artificial_var = 1;
3492 var_integer->size = ~0;
3493 var_integer->fullsize = ~0;
3494 var_integer->offset = 0;
3495 var_integer->next = NULL;
3496 var_integer->is_special_var = 1;
3497 integer_id = 3;
3498 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
3499
3500 /* *INTEGER = ANYTHING, because we don't know where a dereference of a random
3501 integer will point to. */
3502 lhs.type = SCALAR;
3503 lhs.var = integer_id;
3504 lhs.offset = 0;
3505 rhs.type = ADDRESSOF;
3506 rhs.var = anything_id;
3507 rhs.offset = 0;
3508 process_constraint (new_constraint (lhs, rhs));
3509
3510 /* Create the ANYOFFSET variable, used to represent an arbitrary offset
3511 inside an object. This is similar to ANYTHING, but less drastic.
3512 It means that the pointer can point anywhere inside an object,
3513 but not outside of it. */
3514 anyoffset_tree = create_tmp_var_raw (void_type_node, "ANYOFFSET");
3515 anyoffset_id = 4;
3516 var_anyoffset = new_var_info (anyoffset_tree, anyoffset_id, "ANYOFFSET",
3517 anyoffset_id);
3518 insert_id_for_tree (anyoffset_tree, anyoffset_id);
3519 var_anyoffset->is_artificial_var = 1;
3520 var_anyoffset->size = ~0;
3521 var_anyoffset->offset = 0;
3522 var_anyoffset->next = NULL;
3523 var_anyoffset->fullsize = ~0;
3524 var_anyoffset->is_special_var = 1;
3525 VEC_safe_push (varinfo_t, heap, varmap, var_anyoffset);
3526
3527 /* ANYOFFSET points to ANYOFFSET. */
3528 lhs.type = SCALAR;
3529 lhs.var = anyoffset_id;
3530 lhs.offset = 0;
3531 rhs.type = ADDRESSOF;
3532 rhs.var = anyoffset_id;
3533 rhs.offset = 0;
3534 process_constraint (new_constraint (lhs, rhs));
3535 }
3536
3537 /* Return true if we actually need to solve the constraint graph in order to
3538 get our points-to sets. This is false when, for example, no addresses are
3539 taken other than special vars, or all points-to sets with members already
3540 contain the anything variable and there are no predecessors for other
3541 sets. */
3542
3543 static bool
3544 need_to_solve (void)
3545 {
3546 int i;
3547 varinfo_t v;
3548 bool found_address_taken = false;
3549 bool found_non_anything = false;
3550
3551 for (i = 0; VEC_iterate (varinfo_t, varmap, i, v); i++)
3552 {
3553 if (v->is_special_var)
3554 continue;
3555
3556 if (v->address_taken)
3557 found_address_taken = true;
3558
3559 if (v->solution
3560 && !bitmap_empty_p (v->solution)
3561 && !bitmap_bit_p (v->solution, anything_id))
3562 found_non_anything = true;
3563 else if (bitmap_empty_p (v->solution)
3564 && VEC_length (constraint_edge_t, graph->preds[v->id]) != 0)
3565 found_non_anything = true;
3566
3567 if (found_address_taken && found_non_anything)
3568 return true;
3569 }
3570
3571 return false;
3572 }
3573
3574 /* Create points-to sets for the current function. See the comments
3575 at the start of the file for an algorithmic overview. */
3576
3577 void
3578 compute_points_to_sets (struct alias_info *ai)
3579 {
3580 basic_block bb;
3581
3582 timevar_push (TV_TREE_PTA);
3583
3584 init_alias_vars ();
3585
3586 constraint_pool = create_alloc_pool ("Constraint pool",
3587 sizeof (struct constraint), 30);
3588 variable_info_pool = create_alloc_pool ("Variable info pool",
3589 sizeof (struct variable_info), 30);
3590 constraint_edge_pool = create_alloc_pool ("Constraint edges",
3591 sizeof (struct constraint_edge), 30);
3592
3593 constraints = VEC_alloc (constraint_t, heap, 8);
3594 varmap = VEC_alloc (varinfo_t, heap, 8);
3595 id_for_tree = htab_create (10, tree_id_hash, tree_id_eq, free);
3596 memset (&stats, 0, sizeof (stats));
3597
3598 init_base_vars ();
3599
3600 intra_create_variable_infos ();
3601
3602 /* Now walk all statements and derive aliases. */
3603 FOR_EACH_BB (bb)
3604 {
3605 block_stmt_iterator bsi;
3606 tree phi;
3607
3608 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
3609 if (is_gimple_reg (PHI_RESULT (phi)))
3610 find_func_aliases (phi, ai);
3611
3612 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3613 find_func_aliases (bsi_stmt (bsi), ai);
3614 }
3615
3616 build_constraint_graph ();
3617
3618 if (dump_file)
3619 {
3620 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
3621 dump_constraints (dump_file);
3622 }
3623
3624 if (need_to_solve ())
3625 {
3626 if (dump_file)
3627 fprintf (dump_file, "\nCollapsing static cycles and doing variable "
3628 "substitution:\n");
3629
3630 find_and_collapse_graph_cycles (graph, false);
3631 perform_var_substitution (graph);
3632
3633 if (dump_file)
3634 fprintf (dump_file, "\nSolving graph:\n");
3635
3636 solve_graph (graph);
3637 }
3638
3639 if (dump_file)
3640 dump_sa_points_to_info (dump_file);
3641
3642 have_alias_info = true;
3643
3644 timevar_pop (TV_TREE_PTA);
3645 }
3646
3647
3648 /* Delete created points-to sets. */
3649
3650 void
3651 delete_points_to_sets (void)
3652 {
3653 varinfo_t v;
3654 int i;
3655
3656 htab_delete (id_for_tree);
3657 bitmap_obstack_release (&ptabitmap_obstack);
3658 VEC_free (constraint_t, heap, constraints);
3659
3660 for (i = 0; VEC_iterate (varinfo_t, varmap, i, v); i++)
3661 {
3662 VEC_free (constraint_edge_t, heap, graph->succs[i]);
3663 VEC_free (constraint_edge_t, heap, graph->preds[i]);
3664 VEC_free (constraint_t, heap, v->complex);
3665 }
3666 free (graph->succs);
3667 free (graph->preds);
3668 free (graph);
3669
3670 VEC_free (varinfo_t, heap, varmap);
3671 free_alloc_pool (variable_info_pool);
3672 free_alloc_pool (constraint_pool);
3673 free_alloc_pool (constraint_edge_pool);
3674
3675 have_alias_info = false;
3676 }