Collections.java (UnmodifiableMap.toArray): Imported changes from Classpath.
[gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "obstack.h"
28 #include "bitmap.h"
29 #include "flags.h"
30 #include "rtl.h"
31 #include "tm_p.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "output.h"
35 #include "errors.h"
36 #include "diagnostic.h"
37 #include "tree.h"
38 #include "c-common.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
41 #include "varray.h"
42 #include "c-tree.h"
43 #include "tree-gimple.h"
44 #include "hashtab.h"
45 #include "function.h"
46 #include "cgraph.h"
47 #include "tree-pass.h"
48 #include "timevar.h"
49 #include "alloc-pool.h"
50 #include "splay-tree.h"
51 #include "params.h"
52 #include "tree-ssa-structalias.h"
53 #include "cgraph.h"
54 #include "alias.h"
55 #include "pointer-set.h"
56
57 /* The idea behind this analyzer is to generate set constraints from the
58 program, then solve the resulting constraints in order to generate the
59 points-to sets.
60
61 Set constraints are a way of modeling program analysis problems that
62 involve sets. They consist of an inclusion constraint language,
63 describing the variables (each variable is a set) and operations that
64 are involved on the variables, and a set of rules that derive facts
65 from these operations. To solve a system of set constraints, you derive
66 all possible facts under the rules, which gives you the correct sets
67 as a consequence.
68
69 See "Efficient Field-sensitive pointer analysis for C" by "David
70 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
71 http://citeseer.ist.psu.edu/pearce04efficient.html
72
73 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
74 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
75 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76
77 There are three types of real constraint expressions, DEREF,
78 ADDRESSOF, and SCALAR. Each constraint expression consists
79 of a constraint type, a variable, and an offset.
80
81 SCALAR is a constraint expression type used to represent x, whether
82 it appears on the LHS or the RHS of a statement.
83 DEREF is a constraint expression type used to represent *x, whether
84 it appears on the LHS or the RHS of a statement.
85 ADDRESSOF is a constraint expression used to represent &x, whether
86 it appears on the LHS or the RHS of a statement.
87
88 Each pointer variable in the program is assigned an integer id, and
89 each field of a structure variable is assigned an integer id as well.
90
91 Structure variables are linked to their list of fields through a "next
92 field" in each variable that points to the next field in offset
93 order.
94 Each variable for a structure field has
95
96 1. "size", that tells the size in bits of that field.
97 2. "fullsize, that tells the size in bits of the entire structure.
98 3. "offset", that tells the offset in bits from the beginning of the
99 structure to this field.
100
101 Thus,
102 struct f
103 {
104 int a;
105 int b;
106 } foo;
107 int *bar;
108
109 looks like
110
111 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
112 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
113 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
114
115
116 In order to solve the system of set constraints, the following is
117 done:
118
119 1. Each constraint variable x has a solution set associated with it,
120 Sol(x).
121
122 2. Constraints are separated into direct, copy, and complex.
123 Direct constraints are ADDRESSOF constraints that require no extra
124 processing, such as P = &Q
125 Copy constraints are those of the form P = Q.
126 Complex constraints are all the constraints involving dereferences
127 and offsets (including offsetted copies).
128
129 3. All direct constraints of the form P = &Q are processed, such
130 that Q is added to Sol(P)
131
132 4. All complex constraints for a given constraint variable are stored in a
133 linked list attached to that variable's node.
134
135 5. A directed graph is built out of the copy constraints. Each
136 constraint variable is a node in the graph, and an edge from
137 Q to P is added for each copy constraint of the form P = Q
138
139 6. The graph is then walked, and solution sets are
140 propagated along the copy edges, such that an edge from Q to P
141 causes Sol(P) <- Sol(P) union Sol(Q).
142
143 7. As we visit each node, all complex constraints associated with
144 that node are processed by adding appropriate copy edges to the graph, or the
145 appropriate variables to the solution set.
146
147 8. The process of walking the graph is iterated until no solution
148 sets change.
149
150 Prior to walking the graph in steps 6 and 7, We perform static
151 cycle elimination on the constraint graph, as well
152 as off-line variable substitution.
153
154 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
155 on and turned into anything), but isn't. You can just see what offset
156 inside the pointed-to struct it's going to access.
157
158 TODO: Constant bounded arrays can be handled as if they were structs of the
159 same number of elements.
160
161 TODO: Modeling heap and incoming pointers becomes much better if we
162 add fields to them as we discover them, which we could do.
163
164 TODO: We could handle unions, but to be honest, it's probably not
165 worth the pain or slowdown. */
166
167 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
168 htab_t heapvar_for_stmt;
169
170 static bool use_field_sensitive = true;
171 static int in_ipa_mode = 0;
172
173 /* Used for predecessor bitmaps. */
174 static bitmap_obstack predbitmap_obstack;
175
176 /* Used for points-to sets. */
177 static bitmap_obstack pta_obstack;
178
179 /* Used for oldsolution members of variables. */
180 static bitmap_obstack oldpta_obstack;
181
182 /* Used for per-solver-iteration bitmaps. */
183 static bitmap_obstack iteration_obstack;
184
185 static unsigned int create_variable_info_for (tree, const char *);
186 typedef struct constraint_graph *constraint_graph_t;
187 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
188
189 DEF_VEC_P(constraint_t);
190 DEF_VEC_ALLOC_P(constraint_t,heap);
191
192 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
193 if (a) \
194 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195
196 static struct constraint_stats
197 {
198 unsigned int total_vars;
199 unsigned int nonpointer_vars;
200 unsigned int unified_vars_static;
201 unsigned int unified_vars_dynamic;
202 unsigned int iterations;
203 unsigned int num_edges;
204 unsigned int num_implicit_edges;
205 unsigned int points_to_sets_created;
206 } stats;
207
208 struct variable_info
209 {
210 /* ID of this variable */
211 unsigned int id;
212
213 /* Name of this variable */
214 const char *name;
215
216 /* Tree that this variable is associated with. */
217 tree decl;
218
219 /* Offset of this variable, in bits, from the base variable */
220 unsigned HOST_WIDE_INT offset;
221
222 /* Size of the variable, in bits. */
223 unsigned HOST_WIDE_INT size;
224
225 /* Full size of the base variable, in bits. */
226 unsigned HOST_WIDE_INT fullsize;
227
228 /* A link to the variable for the next field in this structure. */
229 struct variable_info *next;
230
231 /* True if the variable is directly the target of a dereference.
232 This is used to track which variables are *actually* dereferenced
233 so we can prune their points to listed. */
234 unsigned int directly_dereferenced:1;
235
236 /* True if this is a variable created by the constraint analysis, such as
237 heap variables and constraints we had to break up. */
238 unsigned int is_artificial_var:1;
239
240 /* True if this is a special variable whose solution set should not be
241 changed. */
242 unsigned int is_special_var:1;
243
244 /* True for variables whose size is not known or variable. */
245 unsigned int is_unknown_size_var:1;
246
247 /* True for variables that have unions somewhere in them. */
248 unsigned int has_union:1;
249
250 /* True if this is a heap variable. */
251 unsigned int is_heap_var:1;
252
253 /* Points-to set for this variable. */
254 bitmap solution;
255
256 /* Old points-to set for this variable. */
257 bitmap oldsolution;
258
259 /* Finished points-to set for this variable (IE what is returned
260 from find_what_p_points_to. */
261 bitmap finished_solution;
262
263 /* Variable ids represented by this node. */
264 bitmap variables;
265
266 /* Variable id this was collapsed to due to type unsafety. This
267 should be unused completely after build_succ_graph, or something
268 is broken. */
269 struct variable_info *collapsed_to;
270 };
271 typedef struct variable_info *varinfo_t;
272
273 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
274
275 /* Pool of variable info structures. */
276 static alloc_pool variable_info_pool;
277
278 DEF_VEC_P(varinfo_t);
279
280 DEF_VEC_ALLOC_P(varinfo_t, heap);
281
282 /* Table of variable info structures for constraint variables.
283 Indexed directly by variable info id. */
284 static VEC(varinfo_t,heap) *varmap;
285
286 /* Return the varmap element N */
287
288 static inline varinfo_t
289 get_varinfo (unsigned int n)
290 {
291 return VEC_index (varinfo_t, varmap, n);
292 }
293
294 /* Return the varmap element N, following the collapsed_to link. */
295
296 static inline varinfo_t
297 get_varinfo_fc (unsigned int n)
298 {
299 varinfo_t v = VEC_index (varinfo_t, varmap, n);
300
301 if (v->collapsed_to)
302 return v->collapsed_to;
303 return v;
304 }
305
306 /* Variable that represents the unknown pointer. */
307 static varinfo_t var_anything;
308 static tree anything_tree;
309 static unsigned int anything_id;
310
311 /* Variable that represents the NULL pointer. */
312 static varinfo_t var_nothing;
313 static tree nothing_tree;
314 static unsigned int nothing_id;
315
316 /* Variable that represents read only memory. */
317 static varinfo_t var_readonly;
318 static tree readonly_tree;
319 static unsigned int readonly_id;
320
321 /* Variable that represents integers. This is used for when people do things
322 like &0->a.b. */
323 static varinfo_t var_integer;
324 static tree integer_tree;
325 static unsigned int integer_id;
326
327 /* Lookup a heap var for FROM, and return it if we find one. */
328
329 static tree
330 heapvar_lookup (tree from)
331 {
332 struct tree_map *h, in;
333 in.from = from;
334
335 h = htab_find_with_hash (heapvar_for_stmt, &in, htab_hash_pointer (from));
336 if (h)
337 return h->to;
338 return NULL_TREE;
339 }
340
341 /* Insert a mapping FROM->TO in the heap var for statement
342 hashtable. */
343
344 static void
345 heapvar_insert (tree from, tree to)
346 {
347 struct tree_map *h;
348 void **loc;
349
350 h = ggc_alloc (sizeof (struct tree_map));
351 h->hash = htab_hash_pointer (from);
352 h->from = from;
353 h->to = to;
354 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
355 *(struct tree_map **) loc = h;
356 }
357
358 /* Return a new variable info structure consisting for a variable
359 named NAME, and using constraint graph node NODE. */
360
361 static varinfo_t
362 new_var_info (tree t, unsigned int id, const char *name)
363 {
364 varinfo_t ret = pool_alloc (variable_info_pool);
365
366 ret->id = id;
367 ret->name = name;
368 ret->decl = t;
369 ret->directly_dereferenced = false;
370 ret->is_artificial_var = false;
371 ret->is_heap_var = false;
372 ret->is_special_var = false;
373 ret->is_unknown_size_var = false;
374 ret->has_union = false;
375 ret->solution = BITMAP_ALLOC (&pta_obstack);
376 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
377 ret->finished_solution = NULL;
378 ret->next = NULL;
379 ret->collapsed_to = NULL;
380 return ret;
381 }
382
383 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
384
385 /* An expression that appears in a constraint. */
386
387 struct constraint_expr
388 {
389 /* Constraint type. */
390 constraint_expr_type type;
391
392 /* Variable we are referring to in the constraint. */
393 unsigned int var;
394
395 /* Offset, in bits, of this constraint from the beginning of
396 variables it ends up referring to.
397
398 IOW, in a deref constraint, we would deref, get the result set,
399 then add OFFSET to each member. */
400 unsigned HOST_WIDE_INT offset;
401 };
402
403 typedef struct constraint_expr ce_s;
404 DEF_VEC_O(ce_s);
405 DEF_VEC_ALLOC_O(ce_s, heap);
406 static void get_constraint_for (tree, VEC(ce_s, heap) **);
407 static void do_deref (VEC (ce_s, heap) **);
408
409 /* Our set constraints are made up of two constraint expressions, one
410 LHS, and one RHS.
411
412 As described in the introduction, our set constraints each represent an
413 operation between set valued variables.
414 */
415 struct constraint
416 {
417 struct constraint_expr lhs;
418 struct constraint_expr rhs;
419 };
420
421 /* List of constraints that we use to build the constraint graph from. */
422
423 static VEC(constraint_t,heap) *constraints;
424 static alloc_pool constraint_pool;
425
426
427 DEF_VEC_I(int);
428 DEF_VEC_ALLOC_I(int, heap);
429
430 /* The constraint graph is represented as an array of bitmaps
431 containing successor nodes. */
432
433 struct constraint_graph
434 {
435 /* Size of this graph, which may be different than the number of
436 nodes in the variable map. */
437 unsigned int size;
438
439 /* Explicit successors of each node. */
440 bitmap *succs;
441
442 /* Implicit predecessors of each node (Used for variable
443 substitution). */
444 bitmap *implicit_preds;
445
446 /* Explicit predecessors of each node (Used for variable substitution). */
447 bitmap *preds;
448
449 /* Indirect cycle representatives, or -1 if the node has no indirect
450 cycles. */
451 int *indirect_cycles;
452
453 /* Representative node for a node. rep[a] == a unless the node has
454 been unified. */
455 unsigned int *rep;
456
457 /* Equivalence class representative for a node. This is used for
458 variable substitution. */
459 int *eq_rep;
460
461 /* Label for each node, used during variable substitution. */
462 unsigned int *label;
463
464 /* Bitmap of nodes where the bit is set if the node is a direct
465 node. Used for variable substitution. */
466 sbitmap direct_nodes;
467
468 /* Vector of complex constraints for each graph node. Complex
469 constraints are those involving dereferences or offsets that are
470 not 0. */
471 VEC(constraint_t,heap) **complex;
472 };
473
474 static constraint_graph_t graph;
475
476 /* During variable substitution and the offline version of indirect
477 cycle finding, we create nodes to represent dereferences and
478 address taken constraints. These represent where these start and
479 end. */
480 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
481 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
482 #define FIRST_ADDR_NODE (LAST_REF_NODE + 1)
483
484 /* Return the representative node for NODE, if NODE has been unioned
485 with another NODE.
486 This function performs path compression along the way to finding
487 the representative. */
488
489 static unsigned int
490 find (unsigned int node)
491 {
492 gcc_assert (node < graph->size);
493 if (graph->rep[node] != node)
494 return graph->rep[node] = find (graph->rep[node]);
495 return node;
496 }
497
498 /* Union the TO and FROM nodes to the TO nodes.
499 Note that at some point in the future, we may want to do
500 union-by-rank, in which case we are going to have to return the
501 node we unified to. */
502
503 static bool
504 unite (unsigned int to, unsigned int from)
505 {
506 gcc_assert (to < graph->size && from < graph->size);
507 if (to != from && graph->rep[from] != to)
508 {
509 graph->rep[from] = to;
510 return true;
511 }
512 return false;
513 }
514
515 /* Create a new constraint consisting of LHS and RHS expressions. */
516
517 static constraint_t
518 new_constraint (const struct constraint_expr lhs,
519 const struct constraint_expr rhs)
520 {
521 constraint_t ret = pool_alloc (constraint_pool);
522 ret->lhs = lhs;
523 ret->rhs = rhs;
524 return ret;
525 }
526
527 /* Print out constraint C to FILE. */
528
529 void
530 dump_constraint (FILE *file, constraint_t c)
531 {
532 if (c->lhs.type == ADDRESSOF)
533 fprintf (file, "&");
534 else if (c->lhs.type == DEREF)
535 fprintf (file, "*");
536 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
537 if (c->lhs.offset != 0)
538 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
539 fprintf (file, " = ");
540 if (c->rhs.type == ADDRESSOF)
541 fprintf (file, "&");
542 else if (c->rhs.type == DEREF)
543 fprintf (file, "*");
544 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
545 if (c->rhs.offset != 0)
546 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
547 fprintf (file, "\n");
548 }
549
550 /* Print out constraint C to stderr. */
551
552 void
553 debug_constraint (constraint_t c)
554 {
555 dump_constraint (stderr, c);
556 }
557
558 /* Print out all constraints to FILE */
559
560 void
561 dump_constraints (FILE *file)
562 {
563 int i;
564 constraint_t c;
565 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
566 dump_constraint (file, c);
567 }
568
569 /* Print out all constraints to stderr. */
570
571 void
572 debug_constraints (void)
573 {
574 dump_constraints (stderr);
575 }
576
577 /* SOLVER FUNCTIONS
578
579 The solver is a simple worklist solver, that works on the following
580 algorithm:
581
582 sbitmap changed_nodes = all zeroes;
583 changed_count = 0;
584 For each node that is not already collapsed:
585 changed_count++;
586 set bit in changed nodes
587
588 while (changed_count > 0)
589 {
590 compute topological ordering for constraint graph
591
592 find and collapse cycles in the constraint graph (updating
593 changed if necessary)
594
595 for each node (n) in the graph in topological order:
596 changed_count--;
597
598 Process each complex constraint associated with the node,
599 updating changed if necessary.
600
601 For each outgoing edge from n, propagate the solution from n to
602 the destination of the edge, updating changed as necessary.
603
604 } */
605
606 /* Return true if two constraint expressions A and B are equal. */
607
608 static bool
609 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
610 {
611 return a.type == b.type && a.var == b.var && a.offset == b.offset;
612 }
613
614 /* Return true if constraint expression A is less than constraint expression
615 B. This is just arbitrary, but consistent, in order to give them an
616 ordering. */
617
618 static bool
619 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
620 {
621 if (a.type == b.type)
622 {
623 if (a.var == b.var)
624 return a.offset < b.offset;
625 else
626 return a.var < b.var;
627 }
628 else
629 return a.type < b.type;
630 }
631
632 /* Return true if constraint A is less than constraint B. This is just
633 arbitrary, but consistent, in order to give them an ordering. */
634
635 static bool
636 constraint_less (const constraint_t a, const constraint_t b)
637 {
638 if (constraint_expr_less (a->lhs, b->lhs))
639 return true;
640 else if (constraint_expr_less (b->lhs, a->lhs))
641 return false;
642 else
643 return constraint_expr_less (a->rhs, b->rhs);
644 }
645
646 /* Return true if two constraints A and B are equal. */
647
648 static bool
649 constraint_equal (struct constraint a, struct constraint b)
650 {
651 return constraint_expr_equal (a.lhs, b.lhs)
652 && constraint_expr_equal (a.rhs, b.rhs);
653 }
654
655
656 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
657
658 static constraint_t
659 constraint_vec_find (VEC(constraint_t,heap) *vec,
660 struct constraint lookfor)
661 {
662 unsigned int place;
663 constraint_t found;
664
665 if (vec == NULL)
666 return NULL;
667
668 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
669 if (place >= VEC_length (constraint_t, vec))
670 return NULL;
671 found = VEC_index (constraint_t, vec, place);
672 if (!constraint_equal (*found, lookfor))
673 return NULL;
674 return found;
675 }
676
677 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
678
679 static void
680 constraint_set_union (VEC(constraint_t,heap) **to,
681 VEC(constraint_t,heap) **from)
682 {
683 int i;
684 constraint_t c;
685
686 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
687 {
688 if (constraint_vec_find (*to, *c) == NULL)
689 {
690 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
691 constraint_less);
692 VEC_safe_insert (constraint_t, heap, *to, place, c);
693 }
694 }
695 }
696
697 /* Take a solution set SET, add OFFSET to each member of the set, and
698 overwrite SET with the result when done. */
699
700 static void
701 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
702 {
703 bitmap result = BITMAP_ALLOC (&iteration_obstack);
704 unsigned int i;
705 bitmap_iterator bi;
706
707 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
708 {
709 /* If this is a properly sized variable, only add offset if it's
710 less than end. Otherwise, it is globbed to a single
711 variable. */
712
713 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
714 {
715 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
716 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
717 if (!v)
718 continue;
719 bitmap_set_bit (result, v->id);
720 }
721 else if (get_varinfo (i)->is_artificial_var
722 || get_varinfo (i)->has_union
723 || get_varinfo (i)->is_unknown_size_var)
724 {
725 bitmap_set_bit (result, i);
726 }
727 }
728
729 bitmap_copy (set, result);
730 BITMAP_FREE (result);
731 }
732
733 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
734 process. */
735
736 static bool
737 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
738 {
739 if (inc == 0)
740 return bitmap_ior_into (to, from);
741 else
742 {
743 bitmap tmp;
744 bool res;
745
746 tmp = BITMAP_ALLOC (&iteration_obstack);
747 bitmap_copy (tmp, from);
748 solution_set_add (tmp, inc);
749 res = bitmap_ior_into (to, tmp);
750 BITMAP_FREE (tmp);
751 return res;
752 }
753 }
754
755 /* Insert constraint C into the list of complex constraints for graph
756 node VAR. */
757
758 static void
759 insert_into_complex (constraint_graph_t graph,
760 unsigned int var, constraint_t c)
761 {
762 VEC (constraint_t, heap) *complex = graph->complex[var];
763 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
764 constraint_less);
765
766 /* Only insert constraints that do not already exist. */
767 if (place >= VEC_length (constraint_t, complex)
768 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
769 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
770 }
771
772
773 /* Condense two variable nodes into a single variable node, by moving
774 all associated info from SRC to TO. */
775
776 static void
777 merge_node_constraints (constraint_graph_t graph, unsigned int to,
778 unsigned int from)
779 {
780 unsigned int i;
781 constraint_t c;
782
783 gcc_assert (find (from) == to);
784
785 /* Move all complex constraints from src node into to node */
786 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
787 {
788 /* In complex constraints for node src, we may have either
789 a = *src, and *src = a, or an offseted constraint which are
790 always added to the rhs node's constraints. */
791
792 if (c->rhs.type == DEREF)
793 c->rhs.var = to;
794 else if (c->lhs.type == DEREF)
795 c->lhs.var = to;
796 else
797 c->rhs.var = to;
798 }
799 constraint_set_union (&graph->complex[to], &graph->complex[from]);
800 VEC_free (constraint_t, heap, graph->complex[from]);
801 graph->complex[from] = NULL;
802 }
803
804
805 /* Remove edges involving NODE from GRAPH. */
806
807 static void
808 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
809 {
810 if (graph->succs[node])
811 BITMAP_FREE (graph->succs[node]);
812 }
813
814 /* Merge GRAPH nodes FROM and TO into node TO. */
815
816 static void
817 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
818 unsigned int from)
819 {
820 if (graph->indirect_cycles[from] != -1)
821 {
822 /* If we have indirect cycles with the from node, and we have
823 none on the to node, the to node has indirect cycles from the
824 from node now that they are unified.
825 If indirect cycles exist on both, unify the nodes that they
826 are in a cycle with, since we know they are in a cycle with
827 each other. */
828 if (graph->indirect_cycles[to] == -1)
829 {
830 graph->indirect_cycles[to] = graph->indirect_cycles[from];
831 }
832 else
833 {
834 unsigned int tonode = find (graph->indirect_cycles[to]);
835 unsigned int fromnode = find (graph->indirect_cycles[from]);
836
837 if (unite (tonode, fromnode))
838 unify_nodes (graph, tonode, fromnode, true);
839 }
840 }
841
842 /* Merge all the successor edges. */
843 if (graph->succs[from])
844 {
845 if (!graph->succs[to])
846 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
847 bitmap_ior_into (graph->succs[to],
848 graph->succs[from]);
849 }
850
851 clear_edges_for_node (graph, from);
852 }
853
854
855 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
856 it doesn't exist in the graph already. */
857
858 static void
859 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
860 unsigned int from)
861 {
862 if (to == from)
863 return;
864
865 if (!graph->implicit_preds[to])
866 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
867
868 if (!bitmap_bit_p (graph->implicit_preds[to], from))
869 {
870 stats.num_implicit_edges++;
871 bitmap_set_bit (graph->implicit_preds[to], from);
872 }
873 }
874
875 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
876 it doesn't exist in the graph already.
877 Return false if the edge already existed, true otherwise. */
878
879 static void
880 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
881 unsigned int from)
882 {
883 if (!graph->preds[to])
884 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
885 if (!bitmap_bit_p (graph->preds[to], from))
886 bitmap_set_bit (graph->preds[to], from);
887 }
888
889 /* Add a graph edge to GRAPH, going from FROM to TO if
890 it doesn't exist in the graph already.
891 Return false if the edge already existed, true otherwise. */
892
893 static bool
894 add_graph_edge (constraint_graph_t graph, unsigned int to,
895 unsigned int from)
896 {
897 if (to == from)
898 {
899 return false;
900 }
901 else
902 {
903 bool r = false;
904
905 if (!graph->succs[from])
906 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
907 if (!bitmap_bit_p (graph->succs[from], to))
908 {
909 r = true;
910 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
911 stats.num_edges++;
912 bitmap_set_bit (graph->succs[from], to);
913 }
914 return r;
915 }
916 }
917
918
919 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
920
921 static bool
922 valid_graph_edge (constraint_graph_t graph, unsigned int src,
923 unsigned int dest)
924 {
925 return (graph->succs[dest]
926 && bitmap_bit_p (graph->succs[dest], src));
927 }
928
929 /* Build the constraint graph, adding only predecessor edges right now. */
930
931 static void
932 build_pred_graph (void)
933 {
934 int i;
935 constraint_t c;
936 unsigned int j;
937
938 graph = XNEW (struct constraint_graph);
939 graph->size = (VEC_length (varinfo_t, varmap)) * 3;
940 graph->succs = XCNEWVEC (bitmap, graph->size);
941 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
942 graph->preds = XCNEWVEC (bitmap, graph->size);
943 graph->indirect_cycles = XNEWVEC (int, VEC_length (varinfo_t, varmap));
944 graph->label = XCNEWVEC (unsigned int, graph->size);
945 graph->rep = XNEWVEC (unsigned int, graph->size);
946 graph->eq_rep = XNEWVEC (int, graph->size);
947 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *,
948 VEC_length (varinfo_t, varmap));
949 graph->direct_nodes = sbitmap_alloc (graph->size);
950 sbitmap_zero (graph->direct_nodes);
951
952 for (j = 0; j < FIRST_REF_NODE; j++)
953 {
954 if (!get_varinfo (j)->is_special_var)
955 SET_BIT (graph->direct_nodes, j);
956 }
957
958 for (j = 0; j < graph->size; j++)
959 {
960 graph->rep[j] = j;
961 graph->eq_rep[j] = -1;
962 }
963
964 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
965 graph->indirect_cycles[j] = -1;
966
967 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
968 {
969 struct constraint_expr lhs = c->lhs;
970 struct constraint_expr rhs = c->rhs;
971 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
972 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
973
974 if (lhs.type == DEREF)
975 {
976 /* *x = y. */
977 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
978 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
979 if (rhs.type == ADDRESSOF)
980 RESET_BIT (graph->direct_nodes, rhsvar);
981 }
982 else if (rhs.type == DEREF)
983 {
984 /* x = *y */
985 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
986 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
987 else
988 RESET_BIT (graph->direct_nodes, lhsvar);
989 }
990 else if (rhs.type == ADDRESSOF)
991 {
992 /* x = &y */
993 add_pred_graph_edge (graph, lhsvar, FIRST_ADDR_NODE + rhsvar);
994 /* Implicitly, *x = y */
995 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
996
997 RESET_BIT (graph->direct_nodes, rhsvar);
998 }
999 else if (lhsvar > anything_id
1000 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1001 {
1002 /* x = y */
1003 add_pred_graph_edge (graph, lhsvar, rhsvar);
1004 /* Implicitly, *x = *y */
1005 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1006 FIRST_REF_NODE + rhsvar);
1007 }
1008 else if (lhs.offset != 0 || rhs.offset != 0)
1009 {
1010 if (rhs.offset != 0)
1011 RESET_BIT (graph->direct_nodes, lhs.var);
1012 if (lhs.offset != 0)
1013 RESET_BIT (graph->direct_nodes, rhs.var);
1014 }
1015 }
1016 }
1017
1018 /* Build the constraint graph, adding successor edges. */
1019
1020 static void
1021 build_succ_graph (void)
1022 {
1023 int i;
1024 constraint_t c;
1025
1026 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1027 {
1028 struct constraint_expr lhs;
1029 struct constraint_expr rhs;
1030 unsigned int lhsvar;
1031 unsigned int rhsvar;
1032
1033 if (!c)
1034 continue;
1035
1036 lhs = c->lhs;
1037 rhs = c->rhs;
1038 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1039 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1040
1041 if (lhs.type == DEREF)
1042 {
1043 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1044 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1045 }
1046 else if (rhs.type == DEREF)
1047 {
1048 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1049 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1050 }
1051 else if (rhs.type == ADDRESSOF)
1052 {
1053 /* x = &y */
1054 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1055 == get_varinfo_fc (rhs.var)->id);
1056 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1057 }
1058 else if (lhsvar > anything_id
1059 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1060 {
1061 add_graph_edge (graph, lhsvar, rhsvar);
1062 }
1063 }
1064 }
1065
1066
1067 /* Changed variables on the last iteration. */
1068 static unsigned int changed_count;
1069 static sbitmap changed;
1070
1071 DEF_VEC_I(unsigned);
1072 DEF_VEC_ALLOC_I(unsigned,heap);
1073
1074
1075 /* Strongly Connected Component visitation info. */
1076
1077 struct scc_info
1078 {
1079 sbitmap visited;
1080 sbitmap roots;
1081 unsigned int *dfs;
1082 unsigned int *node_mapping;
1083 int current_index;
1084 VEC(unsigned,heap) *scc_stack;
1085 };
1086
1087
1088 /* Recursive routine to find strongly connected components in GRAPH.
1089 SI is the SCC info to store the information in, and N is the id of current
1090 graph node we are processing.
1091
1092 This is Tarjan's strongly connected component finding algorithm, as
1093 modified by Nuutila to keep only non-root nodes on the stack.
1094 The algorithm can be found in "On finding the strongly connected
1095 connected components in a directed graph" by Esko Nuutila and Eljas
1096 Soisalon-Soininen, in Information Processing Letters volume 49,
1097 number 1, pages 9-14. */
1098
1099 static void
1100 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1101 {
1102 unsigned int i;
1103 bitmap_iterator bi;
1104 unsigned int my_dfs;
1105
1106 SET_BIT (si->visited, n);
1107 si->dfs[n] = si->current_index ++;
1108 my_dfs = si->dfs[n];
1109
1110 /* Visit all the successors. */
1111 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1112 {
1113 unsigned int w;
1114
1115 if (i > LAST_REF_NODE)
1116 break;
1117
1118 w = find (i);
1119 if (TEST_BIT (si->roots, w))
1120 continue;
1121
1122 if (!TEST_BIT (si->visited, w))
1123 scc_visit (graph, si, w);
1124 {
1125 unsigned int t = find (w);
1126 unsigned int nnode = find (n);
1127 gcc_assert (nnode == n);
1128
1129 if (si->dfs[t] < si->dfs[nnode])
1130 si->dfs[n] = si->dfs[t];
1131 }
1132 }
1133
1134 /* See if any components have been identified. */
1135 if (si->dfs[n] == my_dfs)
1136 {
1137 if (VEC_length (unsigned, si->scc_stack) > 0
1138 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1139 {
1140 bitmap scc = BITMAP_ALLOC (NULL);
1141 bool have_ref_node = n >= FIRST_REF_NODE;
1142 unsigned int lowest_node;
1143 bitmap_iterator bi;
1144
1145 bitmap_set_bit (scc, n);
1146
1147 while (VEC_length (unsigned, si->scc_stack) != 0
1148 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1149 {
1150 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1151
1152 bitmap_set_bit (scc, w);
1153 if (w >= FIRST_REF_NODE)
1154 have_ref_node = true;
1155 }
1156
1157 lowest_node = bitmap_first_set_bit (scc);
1158 gcc_assert (lowest_node < FIRST_REF_NODE);
1159 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1160 {
1161 if (i < FIRST_REF_NODE)
1162 {
1163 /* Mark this node for collapsing. */
1164 if (unite (lowest_node, i))
1165 unify_nodes (graph, lowest_node, i, false);
1166 }
1167 else
1168 {
1169 unite (lowest_node, i);
1170 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1171 }
1172 }
1173 }
1174 SET_BIT (si->roots, n);
1175 }
1176 else
1177 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1178 }
1179
1180 /* Unify node FROM into node TO, updating the changed count if
1181 necessary when UPDATE_CHANGED is true. */
1182
1183 static void
1184 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1185 bool update_changed)
1186 {
1187
1188 gcc_assert (to != from && find (to) == to);
1189 if (dump_file && (dump_flags & TDF_DETAILS))
1190 fprintf (dump_file, "Unifying %s to %s\n",
1191 get_varinfo (from)->name,
1192 get_varinfo (to)->name);
1193
1194 if (update_changed)
1195 stats.unified_vars_dynamic++;
1196 else
1197 stats.unified_vars_static++;
1198
1199 merge_graph_nodes (graph, to, from);
1200 merge_node_constraints (graph, to, from);
1201
1202 if (update_changed && TEST_BIT (changed, from))
1203 {
1204 RESET_BIT (changed, from);
1205 if (!TEST_BIT (changed, to))
1206 SET_BIT (changed, to);
1207 else
1208 {
1209 gcc_assert (changed_count > 0);
1210 changed_count--;
1211 }
1212 }
1213
1214 /* If the solution changes because of the merging, we need to mark
1215 the variable as changed. */
1216 if (bitmap_ior_into (get_varinfo (to)->solution,
1217 get_varinfo (from)->solution))
1218 {
1219 if (update_changed && !TEST_BIT (changed, to))
1220 {
1221 SET_BIT (changed, to);
1222 changed_count++;
1223 }
1224 }
1225
1226 BITMAP_FREE (get_varinfo (from)->solution);
1227 BITMAP_FREE (get_varinfo (from)->oldsolution);
1228
1229 if (stats.iterations > 0)
1230 {
1231 BITMAP_FREE (get_varinfo (to)->oldsolution);
1232 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1233 }
1234
1235 if (valid_graph_edge (graph, to, to))
1236 {
1237 if (graph->succs[to])
1238 bitmap_clear_bit (graph->succs[to], to);
1239 }
1240 }
1241
1242 /* Information needed to compute the topological ordering of a graph. */
1243
1244 struct topo_info
1245 {
1246 /* sbitmap of visited nodes. */
1247 sbitmap visited;
1248 /* Array that stores the topological order of the graph, *in
1249 reverse*. */
1250 VEC(unsigned,heap) *topo_order;
1251 };
1252
1253
1254 /* Initialize and return a topological info structure. */
1255
1256 static struct topo_info *
1257 init_topo_info (void)
1258 {
1259 size_t size = VEC_length (varinfo_t, varmap);
1260 struct topo_info *ti = XNEW (struct topo_info);
1261 ti->visited = sbitmap_alloc (size);
1262 sbitmap_zero (ti->visited);
1263 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1264 return ti;
1265 }
1266
1267
1268 /* Free the topological sort info pointed to by TI. */
1269
1270 static void
1271 free_topo_info (struct topo_info *ti)
1272 {
1273 sbitmap_free (ti->visited);
1274 VEC_free (unsigned, heap, ti->topo_order);
1275 free (ti);
1276 }
1277
1278 /* Visit the graph in topological order, and store the order in the
1279 topo_info structure. */
1280
1281 static void
1282 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1283 unsigned int n)
1284 {
1285 bitmap_iterator bi;
1286 unsigned int j;
1287
1288 SET_BIT (ti->visited, n);
1289
1290 if (graph->succs[n])
1291 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1292 {
1293 if (!TEST_BIT (ti->visited, j))
1294 topo_visit (graph, ti, j);
1295 }
1296
1297 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1298 }
1299
1300 /* Return true if variable N + OFFSET is a legal field of N. */
1301
1302 static bool
1303 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1304 {
1305 varinfo_t ninfo = get_varinfo (n);
1306
1307 /* For things we've globbed to single variables, any offset into the
1308 variable acts like the entire variable, so that it becomes offset
1309 0. */
1310 if (ninfo->is_special_var
1311 || ninfo->is_artificial_var
1312 || ninfo->is_unknown_size_var)
1313 {
1314 *offset = 0;
1315 return true;
1316 }
1317 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1318 }
1319
1320 /* Process a constraint C that represents *x = &y. */
1321
1322 static void
1323 do_da_constraint (constraint_graph_t graph ATTRIBUTE_UNUSED,
1324 constraint_t c, bitmap delta)
1325 {
1326 unsigned int rhs = c->rhs.var;
1327 unsigned int j;
1328 bitmap_iterator bi;
1329
1330 /* For each member j of Delta (Sol(x)), add x to Sol(j) */
1331 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1332 {
1333 unsigned HOST_WIDE_INT offset = c->lhs.offset;
1334 if (type_safe (j, &offset) && !(get_varinfo (j)->is_special_var))
1335 {
1336 /* *x != NULL && *x != ANYTHING*/
1337 varinfo_t v;
1338 unsigned int t;
1339 bitmap sol;
1340 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + offset;
1341
1342 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1343 if (!v)
1344 continue;
1345 t = find (v->id);
1346 sol = get_varinfo (t)->solution;
1347 if (!bitmap_bit_p (sol, rhs))
1348 {
1349 bitmap_set_bit (sol, rhs);
1350 if (!TEST_BIT (changed, t))
1351 {
1352 SET_BIT (changed, t);
1353 changed_count++;
1354 }
1355 }
1356 }
1357 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1358 fprintf (dump_file, "Untypesafe usage in do_da_constraint.\n");
1359
1360 }
1361 }
1362
1363 /* Process a constraint C that represents x = *y, using DELTA as the
1364 starting solution. */
1365
1366 static void
1367 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1368 bitmap delta)
1369 {
1370 unsigned int lhs = find (c->lhs.var);
1371 bool flag = false;
1372 bitmap sol = get_varinfo (lhs)->solution;
1373 unsigned int j;
1374 bitmap_iterator bi;
1375
1376 if (bitmap_bit_p (delta, anything_id))
1377 {
1378 flag = !bitmap_bit_p (sol, anything_id);
1379 if (flag)
1380 bitmap_set_bit (sol, anything_id);
1381 goto done;
1382 }
1383 /* For each variable j in delta (Sol(y)), add
1384 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1385 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1386 {
1387 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1388 if (type_safe (j, &roffset))
1389 {
1390 varinfo_t v;
1391 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1392 unsigned int t;
1393
1394 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1395 if (!v)
1396 continue;
1397 t = find (v->id);
1398
1399 /* Adding edges from the special vars is pointless.
1400 They don't have sets that can change. */
1401 if (get_varinfo (t) ->is_special_var)
1402 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1403 else if (add_graph_edge (graph, lhs, t))
1404 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1405 }
1406 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1407 fprintf (dump_file, "Untypesafe usage in do_sd_constraint\n");
1408
1409 }
1410
1411 done:
1412 /* If the LHS solution changed, mark the var as changed. */
1413 if (flag)
1414 {
1415 get_varinfo (lhs)->solution = sol;
1416 if (!TEST_BIT (changed, lhs))
1417 {
1418 SET_BIT (changed, lhs);
1419 changed_count++;
1420 }
1421 }
1422 }
1423
1424 /* Process a constraint C that represents *x = y. */
1425
1426 static void
1427 do_ds_constraint (constraint_t c, bitmap delta)
1428 {
1429 unsigned int rhs = find (c->rhs.var);
1430 unsigned HOST_WIDE_INT roff = c->rhs.offset;
1431 bitmap sol = get_varinfo (rhs)->solution;
1432 unsigned int j;
1433 bitmap_iterator bi;
1434
1435 if (bitmap_bit_p (sol, anything_id))
1436 {
1437 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1438 {
1439 varinfo_t jvi = get_varinfo (j);
1440 unsigned int t;
1441 unsigned int loff = c->lhs.offset;
1442 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1443 varinfo_t v;
1444
1445 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1446 if (!v)
1447 continue;
1448 t = find (v->id);
1449
1450 if (!bitmap_bit_p (get_varinfo (t)->solution, anything_id))
1451 {
1452 bitmap_set_bit (get_varinfo (t)->solution, anything_id);
1453 if (!TEST_BIT (changed, t))
1454 {
1455 SET_BIT (changed, t);
1456 changed_count++;
1457 }
1458 }
1459 }
1460 return;
1461 }
1462
1463 /* For each member j of delta (Sol(x)), add an edge from y to j and
1464 union Sol(y) into Sol(j) */
1465 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1466 {
1467 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1468 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1469 {
1470 varinfo_t v;
1471 unsigned int t;
1472 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1473 bitmap tmp;
1474
1475 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1476 if (!v)
1477 continue;
1478 t = find (v->id);
1479 tmp = get_varinfo (t)->solution;
1480
1481 if (set_union_with_increment (tmp, sol, roff))
1482 {
1483 get_varinfo (t)->solution = tmp;
1484 if (t == rhs)
1485 sol = get_varinfo (rhs)->solution;
1486 if (!TEST_BIT (changed, t))
1487 {
1488 SET_BIT (changed, t);
1489 changed_count++;
1490 }
1491 }
1492 }
1493 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1494 fprintf (dump_file, "Untypesafe usage in do_ds_constraint\n");
1495 }
1496 }
1497
1498 /* Handle a non-simple (simple meaning requires no iteration),
1499 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1500
1501 static void
1502 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1503 {
1504 if (c->lhs.type == DEREF)
1505 {
1506 if (c->rhs.type == ADDRESSOF)
1507 {
1508 /* *x = &y */
1509 do_da_constraint (graph, c, delta);
1510 }
1511 else
1512 {
1513 /* *x = y */
1514 do_ds_constraint (c, delta);
1515 }
1516 }
1517 else if (c->rhs.type == DEREF)
1518 {
1519 /* x = *y */
1520 if (!(get_varinfo (c->lhs.var)->is_special_var))
1521 do_sd_constraint (graph, c, delta);
1522 }
1523 else
1524 {
1525 bitmap tmp;
1526 bitmap solution;
1527 bool flag = false;
1528 unsigned int t;
1529
1530 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1531 t = find (c->rhs.var);
1532 solution = get_varinfo (t)->solution;
1533 t = find (c->lhs.var);
1534 tmp = get_varinfo (t)->solution;
1535
1536 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1537
1538 if (flag)
1539 {
1540 get_varinfo (t)->solution = tmp;
1541 if (!TEST_BIT (changed, t))
1542 {
1543 SET_BIT (changed, t);
1544 changed_count++;
1545 }
1546 }
1547 }
1548 }
1549
1550 /* Initialize and return a new SCC info structure. */
1551
1552 static struct scc_info *
1553 init_scc_info (size_t size)
1554 {
1555 struct scc_info *si = XNEW (struct scc_info);
1556 size_t i;
1557
1558 si->current_index = 0;
1559 si->visited = sbitmap_alloc (size);
1560 sbitmap_zero (si->visited);
1561 si->roots = sbitmap_alloc (size);
1562 sbitmap_zero (si->roots);
1563 si->node_mapping = XNEWVEC (unsigned int, size);
1564 si->dfs = XCNEWVEC (unsigned int, size);
1565
1566 for (i = 0; i < size; i++)
1567 si->node_mapping[i] = i;
1568
1569 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1570 return si;
1571 }
1572
1573 /* Free an SCC info structure pointed to by SI */
1574
1575 static void
1576 free_scc_info (struct scc_info *si)
1577 {
1578 sbitmap_free (si->visited);
1579 sbitmap_free (si->roots);
1580 free (si->node_mapping);
1581 free (si->dfs);
1582 VEC_free (unsigned, heap, si->scc_stack);
1583 free (si);
1584 }
1585
1586
1587 /* Find indirect cycles in GRAPH that occur, using strongly connected
1588 components, and note them in the indirect cycles map.
1589
1590 This technique comes from Ben Hardekopf and Calvin Lin,
1591 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1592 Lines of Code", submitted to PLDI 2007. */
1593
1594 static void
1595 find_indirect_cycles (constraint_graph_t graph)
1596 {
1597 unsigned int i;
1598 unsigned int size = graph->size;
1599 struct scc_info *si = init_scc_info (size);
1600
1601 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1602 if (!TEST_BIT (si->visited, i) && find (i) == i)
1603 scc_visit (graph, si, i);
1604
1605 free_scc_info (si);
1606 }
1607
1608 /* Compute a topological ordering for GRAPH, and store the result in the
1609 topo_info structure TI. */
1610
1611 static void
1612 compute_topo_order (constraint_graph_t graph,
1613 struct topo_info *ti)
1614 {
1615 unsigned int i;
1616 unsigned int size = VEC_length (varinfo_t, varmap);
1617
1618 for (i = 0; i != size; ++i)
1619 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1620 topo_visit (graph, ti, i);
1621 }
1622
1623 /* Perform offline variable substitution.
1624
1625 This is a linear time way of identifying variables that must have
1626 equivalent points-to sets, including those caused by static cycles,
1627 and single entry subgraphs, in the constraint graph.
1628
1629 The technique is described in "Off-line variable substitution for
1630 scaling points-to analysis" by Atanas Rountev and Satish Chandra,
1631 in "ACM SIGPLAN Notices" volume 35, number 5, pages 47-56.
1632
1633 There is an optimal way to do this involving hash based value
1634 numbering, once the technique is published i will implement it
1635 here.
1636
1637 The general method of finding equivalence classes is as follows:
1638 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1639 Add fake nodes (ADDRESS nodes) and edges for a = &b constraints.
1640 Initialize all non-REF/ADDRESS nodes to be direct nodes
1641 For each SCC in the predecessor graph:
1642 for each member (x) of the SCC
1643 if x is not a direct node:
1644 set rootnode(SCC) to be not a direct node
1645 collapse node x into rootnode(SCC).
1646 if rootnode(SCC) is not a direct node:
1647 label rootnode(SCC) with a new equivalence class
1648 else:
1649 if all labeled predecessors of rootnode(SCC) have the same
1650 label:
1651 label rootnode(SCC) with this label
1652 else:
1653 label rootnode(SCC) with a new equivalence class
1654
1655 All direct nodes with the same equivalence class can be replaced
1656 with a single representative node.
1657 All unlabeled nodes (label == 0) are not pointers and all edges
1658 involving them can be eliminated.
1659 We perform these optimizations during move_complex_constraints.
1660 */
1661
1662 static int equivalence_class;
1663
1664 /* Recursive routine to find strongly connected components in GRAPH,
1665 and label it's nodes with equivalence classes.
1666 This is used during variable substitution to find cycles involving
1667 the regular or implicit predecessors, and label them as equivalent.
1668 The SCC finding algorithm used is the same as that for scc_visit. */
1669
1670 static void
1671 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1672 {
1673 unsigned int i;
1674 bitmap_iterator bi;
1675 unsigned int my_dfs;
1676
1677 gcc_assert (si->node_mapping[n] == n);
1678 SET_BIT (si->visited, n);
1679 si->dfs[n] = si->current_index ++;
1680 my_dfs = si->dfs[n];
1681
1682 /* Visit all the successors. */
1683 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1684 {
1685 unsigned int w = si->node_mapping[i];
1686
1687 if (TEST_BIT (si->roots, w))
1688 continue;
1689
1690 if (!TEST_BIT (si->visited, w))
1691 label_visit (graph, si, w);
1692 {
1693 unsigned int t = si->node_mapping[w];
1694 unsigned int nnode = si->node_mapping[n];
1695 gcc_assert (nnode == n);
1696
1697 if (si->dfs[t] < si->dfs[nnode])
1698 si->dfs[n] = si->dfs[t];
1699 }
1700 }
1701
1702 /* Visit all the implicit predecessors. */
1703 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1704 {
1705 unsigned int w = si->node_mapping[i];
1706
1707 if (TEST_BIT (si->roots, w))
1708 continue;
1709
1710 if (!TEST_BIT (si->visited, w))
1711 label_visit (graph, si, w);
1712 {
1713 unsigned int t = si->node_mapping[w];
1714 unsigned int nnode = si->node_mapping[n];
1715 gcc_assert (nnode == n);
1716
1717 if (si->dfs[t] < si->dfs[nnode])
1718 si->dfs[n] = si->dfs[t];
1719 }
1720 }
1721
1722 /* See if any components have been identified. */
1723 if (si->dfs[n] == my_dfs)
1724 {
1725 while (VEC_length (unsigned, si->scc_stack) != 0
1726 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1727 {
1728 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1729 si->node_mapping[w] = n;
1730
1731 if (!TEST_BIT (graph->direct_nodes, w))
1732 RESET_BIT (graph->direct_nodes, n);
1733 }
1734 SET_BIT (si->roots, n);
1735
1736 if (!TEST_BIT (graph->direct_nodes, n))
1737 {
1738 graph->label[n] = equivalence_class++;
1739 }
1740 else
1741 {
1742 unsigned int size = 0;
1743 unsigned int firstlabel = ~0;
1744
1745 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1746 {
1747 unsigned int j = si->node_mapping[i];
1748
1749 if (j == n || graph->label[j] == 0)
1750 continue;
1751
1752 if (firstlabel == (unsigned int)~0)
1753 {
1754 firstlabel = graph->label[j];
1755 size++;
1756 }
1757 else if (graph->label[j] != firstlabel)
1758 size++;
1759 }
1760
1761 if (size == 0)
1762 graph->label[n] = 0;
1763 else if (size == 1)
1764 graph->label[n] = firstlabel;
1765 else
1766 graph->label[n] = equivalence_class++;
1767 }
1768 }
1769 else
1770 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1771 }
1772
1773 /* Perform offline variable substitution, discovering equivalence
1774 classes, and eliminating non-pointer variables. */
1775
1776 static struct scc_info *
1777 perform_var_substitution (constraint_graph_t graph)
1778 {
1779 unsigned int i;
1780 unsigned int size = graph->size;
1781 struct scc_info *si = init_scc_info (size);
1782
1783 bitmap_obstack_initialize (&iteration_obstack);
1784 equivalence_class = 0;
1785
1786 /* We only need to visit the non-address nodes for labeling
1787 purposes, as the address nodes will never have any predecessors,
1788 because &x never appears on the LHS of a constraint. */
1789 for (i = 0; i < LAST_REF_NODE; i++)
1790 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1791 label_visit (graph, si, si->node_mapping[i]);
1792
1793 if (dump_file && (dump_flags & TDF_DETAILS))
1794 for (i = 0; i < FIRST_REF_NODE; i++)
1795 {
1796 bool direct_node = TEST_BIT (graph->direct_nodes, i);
1797 fprintf (dump_file,
1798 "Equivalence class for %s node id %d:%s is %d\n",
1799 direct_node ? "Direct node" : "Indirect node", i,
1800 get_varinfo (i)->name,
1801 graph->label[si->node_mapping[i]]);
1802 }
1803
1804 /* Quickly eliminate our non-pointer variables. */
1805
1806 for (i = 0; i < FIRST_REF_NODE; i++)
1807 {
1808 unsigned int node = si->node_mapping[i];
1809
1810 if (graph->label[node] == 0 && TEST_BIT (graph->direct_nodes, node))
1811 {
1812 if (dump_file && (dump_flags & TDF_DETAILS))
1813 fprintf (dump_file,
1814 "%s is a non-pointer variable, eliminating edges.\n",
1815 get_varinfo (node)->name);
1816 stats.nonpointer_vars++;
1817 clear_edges_for_node (graph, node);
1818 }
1819 }
1820 return si;
1821 }
1822
1823 /* Free information that was only necessary for variable
1824 substitution. */
1825
1826 static void
1827 free_var_substitution_info (struct scc_info *si)
1828 {
1829 free_scc_info (si);
1830 free (graph->label);
1831 free (graph->eq_rep);
1832 sbitmap_free (graph->direct_nodes);
1833 bitmap_obstack_release (&iteration_obstack);
1834 }
1835
1836 /* Return an existing node that is equivalent to NODE, which has
1837 equivalence class LABEL, if one exists. Return NODE otherwise. */
1838
1839 static unsigned int
1840 find_equivalent_node (constraint_graph_t graph,
1841 unsigned int node, unsigned int label)
1842 {
1843 /* If the address version of this variable is unused, we can
1844 substitute it for anything else with the same label.
1845 Otherwise, we know the pointers are equivalent, but not the
1846 locations. */
1847
1848 if (graph->label[FIRST_ADDR_NODE + node] == 0)
1849 {
1850 gcc_assert (label < graph->size);
1851
1852 if (graph->eq_rep[label] != -1)
1853 {
1854 /* Unify the two variables since we know they are equivalent. */
1855 if (unite (graph->eq_rep[label], node))
1856 unify_nodes (graph, graph->eq_rep[label], node, false);
1857 return graph->eq_rep[label];
1858 }
1859 else
1860 {
1861 graph->eq_rep[label] = node;
1862 }
1863 }
1864 return node;
1865 }
1866
1867 /* Move complex constraints to the appropriate nodes, and collapse
1868 variables we've discovered are equivalent during variable
1869 substitution. SI is the SCC_INFO that is the result of
1870 perform_variable_substitution. */
1871
1872 static void
1873 move_complex_constraints (constraint_graph_t graph,
1874 struct scc_info *si)
1875 {
1876 int i;
1877 unsigned int j;
1878 constraint_t c;
1879
1880 for (j = 0; j < graph->size; j++)
1881 gcc_assert (find (j) == j);
1882
1883 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1884 {
1885 struct constraint_expr lhs = c->lhs;
1886 struct constraint_expr rhs = c->rhs;
1887 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
1888 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
1889 unsigned int lhsnode, rhsnode;
1890 unsigned int lhslabel, rhslabel;
1891
1892 lhsnode = si->node_mapping[lhsvar];
1893 rhsnode = si->node_mapping[rhsvar];
1894 lhslabel = graph->label[lhsnode];
1895 rhslabel = graph->label[rhsnode];
1896
1897 /* See if it is really a non-pointer variable, and if so, ignore
1898 the constraint. */
1899 if (lhslabel == 0)
1900 {
1901 if (!TEST_BIT (graph->direct_nodes, lhsnode))
1902 lhslabel = graph->label[lhsnode] = equivalence_class++;
1903 else
1904 {
1905 if (dump_file && (dump_flags & TDF_DETAILS))
1906 {
1907
1908 fprintf (dump_file, "%s is a non-pointer variable,"
1909 "ignoring constraint:",
1910 get_varinfo (lhs.var)->name);
1911 dump_constraint (dump_file, c);
1912 }
1913 VEC_replace (constraint_t, constraints, i, NULL);
1914 continue;
1915 }
1916 }
1917
1918 if (rhslabel == 0)
1919 {
1920 if (!TEST_BIT (graph->direct_nodes, rhsnode))
1921 rhslabel = graph->label[rhsnode] = equivalence_class++;
1922 else
1923 {
1924 if (dump_file && (dump_flags & TDF_DETAILS))
1925 {
1926
1927 fprintf (dump_file, "%s is a non-pointer variable,"
1928 "ignoring constraint:",
1929 get_varinfo (rhs.var)->name);
1930 dump_constraint (dump_file, c);
1931 }
1932 VEC_replace (constraint_t, constraints, i, NULL);
1933 continue;
1934 }
1935 }
1936
1937 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
1938 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
1939 c->lhs.var = lhsvar;
1940 c->rhs.var = rhsvar;
1941
1942 if (lhs.type == DEREF)
1943 {
1944 if (rhs.type == ADDRESSOF || rhsvar > anything_id)
1945 insert_into_complex (graph, lhsvar, c);
1946 }
1947 else if (rhs.type == DEREF)
1948 {
1949 if (!(get_varinfo (lhsvar)->is_special_var))
1950 insert_into_complex (graph, rhsvar, c);
1951 }
1952 else if (rhs.type != ADDRESSOF && lhsvar > anything_id
1953 && (lhs.offset != 0 || rhs.offset != 0))
1954 {
1955 insert_into_complex (graph, rhsvar, c);
1956 }
1957
1958 }
1959 }
1960
1961 /* Eliminate indirect cycles involving NODE. Return true if NODE was
1962 part of an SCC, false otherwise. */
1963
1964 static bool
1965 eliminate_indirect_cycles (unsigned int node)
1966 {
1967 if (graph->indirect_cycles[node] != -1
1968 && !bitmap_empty_p (get_varinfo (node)->solution))
1969 {
1970 unsigned int i;
1971 VEC(unsigned,heap) *queue = NULL;
1972 int queuepos;
1973 unsigned int to = find (graph->indirect_cycles[node]);
1974 bitmap_iterator bi;
1975
1976 /* We can't touch the solution set and call unify_nodes
1977 at the same time, because unify_nodes is going to do
1978 bitmap unions into it. */
1979
1980 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
1981 {
1982 if (find (i) == i && i != to)
1983 {
1984 if (unite (to, i))
1985 VEC_safe_push (unsigned, heap, queue, i);
1986 }
1987 }
1988
1989 for (queuepos = 0;
1990 VEC_iterate (unsigned, queue, queuepos, i);
1991 queuepos++)
1992 {
1993 unify_nodes (graph, to, i, true);
1994 }
1995 VEC_free (unsigned, heap, queue);
1996 return true;
1997 }
1998 return false;
1999 }
2000
2001 /* Solve the constraint graph GRAPH using our worklist solver.
2002 This is based on the PW* family of solvers from the "Efficient Field
2003 Sensitive Pointer Analysis for C" paper.
2004 It works by iterating over all the graph nodes, processing the complex
2005 constraints and propagating the copy constraints, until everything stops
2006 changed. This corresponds to steps 6-8 in the solving list given above. */
2007
2008 static void
2009 solve_graph (constraint_graph_t graph)
2010 {
2011 unsigned int size = VEC_length (varinfo_t, varmap);
2012 unsigned int i;
2013 bitmap pts;
2014
2015 changed_count = 0;
2016 changed = sbitmap_alloc (size);
2017 sbitmap_zero (changed);
2018
2019 /* Mark all initial non-collapsed nodes as changed. */
2020 for (i = 0; i < size; i++)
2021 {
2022 varinfo_t ivi = get_varinfo (i);
2023 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2024 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2025 || VEC_length (constraint_t, graph->complex[i]) > 0))
2026 {
2027 SET_BIT (changed, i);
2028 changed_count++;
2029 }
2030 }
2031
2032 /* Allocate a bitmap to be used to store the changed bits. */
2033 pts = BITMAP_ALLOC (&pta_obstack);
2034
2035 while (changed_count > 0)
2036 {
2037 unsigned int i;
2038 struct topo_info *ti = init_topo_info ();
2039 stats.iterations++;
2040
2041 bitmap_obstack_initialize (&iteration_obstack);
2042
2043 compute_topo_order (graph, ti);
2044
2045 while (VEC_length (unsigned, ti->topo_order) != 0)
2046 {
2047
2048 i = VEC_pop (unsigned, ti->topo_order);
2049
2050 /* If this variable is not a representative, skip it. */
2051 if (find (i) != i)
2052 continue;
2053
2054 /* In certain indirect cycle cases, we may merge this
2055 variable to another. */
2056 if (eliminate_indirect_cycles (i) && find (i) != i)
2057 continue;
2058
2059 /* If the node has changed, we need to process the
2060 complex constraints and outgoing edges again. */
2061 if (TEST_BIT (changed, i))
2062 {
2063 unsigned int j;
2064 constraint_t c;
2065 bitmap solution;
2066 VEC(constraint_t,heap) *complex = graph->complex[i];
2067 bool solution_empty;
2068
2069 RESET_BIT (changed, i);
2070 changed_count--;
2071
2072 /* Compute the changed set of solution bits. */
2073 bitmap_and_compl (pts, get_varinfo (i)->solution,
2074 get_varinfo (i)->oldsolution);
2075
2076 if (bitmap_empty_p (pts))
2077 continue;
2078
2079 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2080
2081 solution = get_varinfo (i)->solution;
2082 solution_empty = bitmap_empty_p (solution);
2083
2084 /* Process the complex constraints */
2085 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2086 {
2087 /* The only complex constraint that can change our
2088 solution to non-empty, given an empty solution,
2089 is a constraint where the lhs side is receiving
2090 some set from elsewhere. */
2091 if (!solution_empty || c->lhs.type != DEREF)
2092 do_complex_constraint (graph, c, pts);
2093 }
2094
2095 solution_empty = bitmap_empty_p (solution);
2096
2097 if (!solution_empty)
2098 {
2099 bitmap_iterator bi;
2100
2101 /* Propagate solution to all successors. */
2102 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2103 0, j, bi)
2104 {
2105 bitmap tmp;
2106 bool flag;
2107
2108 unsigned int to = find (j);
2109 tmp = get_varinfo (to)->solution;
2110 flag = false;
2111
2112 /* Don't try to propagate to ourselves. */
2113 if (to == i)
2114 continue;
2115
2116 flag = set_union_with_increment (tmp, pts, 0);
2117
2118 if (flag)
2119 {
2120 get_varinfo (to)->solution = tmp;
2121 if (!TEST_BIT (changed, to))
2122 {
2123 SET_BIT (changed, to);
2124 changed_count++;
2125 }
2126 }
2127 }
2128 }
2129 }
2130 }
2131 free_topo_info (ti);
2132 bitmap_obstack_release (&iteration_obstack);
2133 }
2134
2135 BITMAP_FREE (pts);
2136 sbitmap_free (changed);
2137 bitmap_obstack_release (&oldpta_obstack);
2138 }
2139
2140 /* Map from trees to variable infos. */
2141 static struct pointer_map_t *vi_for_tree;
2142
2143
2144 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2145
2146 static void
2147 insert_vi_for_tree (tree t, varinfo_t vi)
2148 {
2149 void **slot = pointer_map_insert (vi_for_tree, t);
2150 gcc_assert (vi);
2151 gcc_assert (*slot == NULL);
2152 *slot = vi;
2153 }
2154
2155 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2156 exist in the map, return NULL, otherwise, return the varinfo we found. */
2157
2158 static varinfo_t
2159 lookup_vi_for_tree (tree t)
2160 {
2161 void **slot = pointer_map_contains (vi_for_tree, t);
2162 if (slot == NULL)
2163 return NULL;
2164
2165 return (varinfo_t) *slot;
2166 }
2167
2168 /* Return a printable name for DECL */
2169
2170 static const char *
2171 alias_get_name (tree decl)
2172 {
2173 const char *res = get_name (decl);
2174 char *temp;
2175 int num_printed = 0;
2176
2177 if (res != NULL)
2178 return res;
2179
2180 res = "NULL";
2181 if (!dump_file)
2182 return res;
2183
2184 if (TREE_CODE (decl) == SSA_NAME)
2185 {
2186 num_printed = asprintf (&temp, "%s_%u",
2187 alias_get_name (SSA_NAME_VAR (decl)),
2188 SSA_NAME_VERSION (decl));
2189 }
2190 else if (DECL_P (decl))
2191 {
2192 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2193 }
2194 if (num_printed > 0)
2195 {
2196 res = ggc_strdup (temp);
2197 free (temp);
2198 }
2199 return res;
2200 }
2201
2202 /* Find the variable id for tree T in the map.
2203 If T doesn't exist in the map, create an entry for it and return it. */
2204
2205 static varinfo_t
2206 get_vi_for_tree (tree t)
2207 {
2208 void **slot = pointer_map_contains (vi_for_tree, t);
2209 if (slot == NULL)
2210 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2211
2212 return (varinfo_t) *slot;
2213 }
2214
2215 /* Get a constraint expression from an SSA_VAR_P node. */
2216
2217 static struct constraint_expr
2218 get_constraint_exp_from_ssa_var (tree t)
2219 {
2220 struct constraint_expr cexpr;
2221
2222 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2223
2224 /* For parameters, get at the points-to set for the actual parm
2225 decl. */
2226 if (TREE_CODE (t) == SSA_NAME
2227 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2228 && SSA_NAME_IS_DEFAULT_DEF (t))
2229 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
2230
2231 cexpr.type = SCALAR;
2232
2233 cexpr.var = get_vi_for_tree (t)->id;
2234 /* If we determine the result is "anything", and we know this is readonly,
2235 say it points to readonly memory instead. */
2236 if (cexpr.var == anything_id && TREE_READONLY (t))
2237 {
2238 cexpr.type = ADDRESSOF;
2239 cexpr.var = readonly_id;
2240 }
2241
2242 cexpr.offset = 0;
2243 return cexpr;
2244 }
2245
2246 /* Process a completed constraint T, and add it to the constraint
2247 list. */
2248
2249 static void
2250 process_constraint (constraint_t t)
2251 {
2252 struct constraint_expr rhs = t->rhs;
2253 struct constraint_expr lhs = t->lhs;
2254
2255 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2256 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2257
2258 if (lhs.type == DEREF)
2259 get_varinfo (lhs.var)->directly_dereferenced = true;
2260 if (rhs.type == DEREF)
2261 get_varinfo (rhs.var)->directly_dereferenced = true;
2262
2263 if (!use_field_sensitive)
2264 {
2265 t->rhs.offset = 0;
2266 t->lhs.offset = 0;
2267 }
2268
2269 /* ANYTHING == ANYTHING is pointless. */
2270 if (lhs.var == anything_id && rhs.var == anything_id)
2271 return;
2272
2273 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2274 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2275 {
2276 rhs = t->lhs;
2277 t->lhs = t->rhs;
2278 t->rhs = rhs;
2279 process_constraint (t);
2280 }
2281 /* This can happen in our IR with things like n->a = *p */
2282 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2283 {
2284 /* Split into tmp = *rhs, *lhs = tmp */
2285 tree rhsdecl = get_varinfo (rhs.var)->decl;
2286 tree pointertype = TREE_TYPE (rhsdecl);
2287 tree pointedtotype = TREE_TYPE (pointertype);
2288 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2289 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2290
2291 /* If this is an aggregate of known size, we should have passed
2292 this off to do_structure_copy, and it should have broken it
2293 up. */
2294 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
2295 || get_varinfo (rhs.var)->is_unknown_size_var);
2296
2297 process_constraint (new_constraint (tmplhs, rhs));
2298 process_constraint (new_constraint (lhs, tmplhs));
2299 }
2300 else
2301 {
2302 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2303 VEC_safe_push (constraint_t, heap, constraints, t);
2304 }
2305 }
2306
2307 /* Return true if T is a variable of a type that could contain
2308 pointers. */
2309
2310 static bool
2311 could_have_pointers (tree t)
2312 {
2313 tree type = TREE_TYPE (t);
2314
2315 if (POINTER_TYPE_P (type) || AGGREGATE_TYPE_P (type)
2316 || TREE_CODE (type) == COMPLEX_TYPE)
2317 return true;
2318 return false;
2319 }
2320
2321 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2322 structure. */
2323
2324 static unsigned HOST_WIDE_INT
2325 bitpos_of_field (const tree fdecl)
2326 {
2327
2328 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2329 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2330 return -1;
2331
2332 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2333 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2334 }
2335
2336
2337 /* Return true if an access to [ACCESSPOS, ACCESSSIZE]
2338 overlaps with a field at [FIELDPOS, FIELDSIZE] */
2339
2340 static bool
2341 offset_overlaps_with_access (const unsigned HOST_WIDE_INT fieldpos,
2342 const unsigned HOST_WIDE_INT fieldsize,
2343 const unsigned HOST_WIDE_INT accesspos,
2344 const unsigned HOST_WIDE_INT accesssize)
2345 {
2346 if (fieldpos == accesspos && fieldsize == accesssize)
2347 return true;
2348 if (accesspos >= fieldpos && accesspos < (fieldpos + fieldsize))
2349 return true;
2350 if (accesspos < fieldpos && (accesspos + accesssize > fieldpos))
2351 return true;
2352
2353 return false;
2354 }
2355
2356 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2357
2358 static void
2359 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results)
2360 {
2361 tree orig_t = t;
2362 HOST_WIDE_INT bitsize = -1;
2363 HOST_WIDE_INT bitmaxsize = -1;
2364 HOST_WIDE_INT bitpos;
2365 tree forzero;
2366 struct constraint_expr *result;
2367 unsigned int beforelength = VEC_length (ce_s, *results);
2368
2369 /* Some people like to do cute things like take the address of
2370 &0->a.b */
2371 forzero = t;
2372 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2373 forzero = TREE_OPERAND (forzero, 0);
2374
2375 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2376 {
2377 struct constraint_expr temp;
2378
2379 temp.offset = 0;
2380 temp.var = integer_id;
2381 temp.type = SCALAR;
2382 VEC_safe_push (ce_s, heap, *results, &temp);
2383 return;
2384 }
2385
2386 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2387
2388 /* String constants are readonly, so there is nothing to really do
2389 here. */
2390 if (TREE_CODE (t) == STRING_CST)
2391 return;
2392
2393 get_constraint_for (t, results);
2394 result = VEC_last (ce_s, *results);
2395 result->offset = bitpos;
2396
2397 gcc_assert (beforelength + 1 == VEC_length (ce_s, *results));
2398
2399 /* This can also happen due to weird offsetof type macros. */
2400 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2401 result->type = SCALAR;
2402
2403 if (result->type == SCALAR)
2404 {
2405 /* In languages like C, you can access one past the end of an
2406 array. You aren't allowed to dereference it, so we can
2407 ignore this constraint. When we handle pointer subtraction,
2408 we may have to do something cute here. */
2409
2410 if (result->offset < get_varinfo (result->var)->fullsize
2411 && bitmaxsize != 0)
2412 {
2413 /* It's also not true that the constraint will actually start at the
2414 right offset, it may start in some padding. We only care about
2415 setting the constraint to the first actual field it touches, so
2416 walk to find it. */
2417 varinfo_t curr;
2418 for (curr = get_varinfo (result->var); curr; curr = curr->next)
2419 {
2420 if (offset_overlaps_with_access (curr->offset, curr->size,
2421 result->offset, bitmaxsize))
2422 {
2423 result->var = curr->id;
2424 break;
2425 }
2426 }
2427 /* assert that we found *some* field there. The user couldn't be
2428 accessing *only* padding. */
2429 /* Still the user could access one past the end of an array
2430 embedded in a struct resulting in accessing *only* padding. */
2431 gcc_assert (curr || ref_contains_array_ref (orig_t));
2432 }
2433 else if (bitmaxsize == 0)
2434 {
2435 if (dump_file && (dump_flags & TDF_DETAILS))
2436 fprintf (dump_file, "Access to zero-sized part of variable,"
2437 "ignoring\n");
2438 }
2439 else
2440 if (dump_file && (dump_flags & TDF_DETAILS))
2441 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2442
2443 result->offset = 0;
2444 }
2445 }
2446
2447
2448 /* Dereference the constraint expression CONS, and return the result.
2449 DEREF (ADDRESSOF) = SCALAR
2450 DEREF (SCALAR) = DEREF
2451 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2452 This is needed so that we can handle dereferencing DEREF constraints. */
2453
2454 static void
2455 do_deref (VEC (ce_s, heap) **constraints)
2456 {
2457 struct constraint_expr *c;
2458 unsigned int i = 0;
2459
2460 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2461 {
2462 if (c->type == SCALAR)
2463 c->type = DEREF;
2464 else if (c->type == ADDRESSOF)
2465 c->type = SCALAR;
2466 else if (c->type == DEREF)
2467 {
2468 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2469 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2470 process_constraint (new_constraint (tmplhs, *c));
2471 c->var = tmplhs.var;
2472 }
2473 else
2474 gcc_unreachable ();
2475 }
2476 }
2477
2478 /* Given a tree T, return the constraint expression for it. */
2479
2480 static void
2481 get_constraint_for (tree t, VEC (ce_s, heap) **results)
2482 {
2483 struct constraint_expr temp;
2484
2485 /* x = integer is all glommed to a single variable, which doesn't
2486 point to anything by itself. That is, of course, unless it is an
2487 integer constant being treated as a pointer, in which case, we
2488 will return that this is really the addressof anything. This
2489 happens below, since it will fall into the default case. The only
2490 case we know something about an integer treated like a pointer is
2491 when it is the NULL pointer, and then we just say it points to
2492 NULL. */
2493 if (TREE_CODE (t) == INTEGER_CST
2494 && !POINTER_TYPE_P (TREE_TYPE (t)))
2495 {
2496 temp.var = integer_id;
2497 temp.type = SCALAR;
2498 temp.offset = 0;
2499 VEC_safe_push (ce_s, heap, *results, &temp);
2500 return;
2501 }
2502 else if (TREE_CODE (t) == INTEGER_CST
2503 && integer_zerop (t))
2504 {
2505 temp.var = nothing_id;
2506 temp.type = ADDRESSOF;
2507 temp.offset = 0;
2508 VEC_safe_push (ce_s, heap, *results, &temp);
2509 return;
2510 }
2511
2512 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2513 {
2514 case tcc_expression:
2515 {
2516 switch (TREE_CODE (t))
2517 {
2518 case ADDR_EXPR:
2519 {
2520 struct constraint_expr *c;
2521 unsigned int i;
2522 tree exp = TREE_OPERAND (t, 0);
2523 tree pttype = TREE_TYPE (TREE_TYPE (t));
2524
2525 get_constraint_for (exp, results);
2526 /* Make sure we capture constraints to all elements
2527 of an array. */
2528 if ((handled_component_p (exp)
2529 && ref_contains_array_ref (exp))
2530 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE)
2531 {
2532 struct constraint_expr *origrhs;
2533 varinfo_t origvar;
2534 struct constraint_expr tmp;
2535
2536 if (VEC_length (ce_s, *results) == 0)
2537 return;
2538
2539 gcc_assert (VEC_length (ce_s, *results) == 1);
2540 origrhs = VEC_last (ce_s, *results);
2541 tmp = *origrhs;
2542 VEC_pop (ce_s, *results);
2543 origvar = get_varinfo (origrhs->var);
2544 for (; origvar; origvar = origvar->next)
2545 {
2546 tmp.var = origvar->id;
2547 VEC_safe_push (ce_s, heap, *results, &tmp);
2548 }
2549 }
2550 else if (VEC_length (ce_s, *results) == 1
2551 && (AGGREGATE_TYPE_P (pttype)
2552 || TREE_CODE (pttype) == COMPLEX_TYPE))
2553 {
2554 struct constraint_expr *origrhs;
2555 varinfo_t origvar;
2556 struct constraint_expr tmp;
2557
2558 gcc_assert (VEC_length (ce_s, *results) == 1);
2559 origrhs = VEC_last (ce_s, *results);
2560 tmp = *origrhs;
2561 VEC_pop (ce_s, *results);
2562 origvar = get_varinfo (origrhs->var);
2563 for (; origvar; origvar = origvar->next)
2564 {
2565 tmp.var = origvar->id;
2566 VEC_safe_push (ce_s, heap, *results, &tmp);
2567 }
2568 }
2569
2570 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2571 {
2572 if (c->type == DEREF)
2573 c->type = SCALAR;
2574 else
2575 c->type = ADDRESSOF;
2576 }
2577 return;
2578 }
2579 break;
2580 case CALL_EXPR:
2581 /* XXX: In interprocedural mode, if we didn't have the
2582 body, we would need to do *each pointer argument =
2583 &ANYTHING added. */
2584 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2585 {
2586 varinfo_t vi;
2587 tree heapvar = heapvar_lookup (t);
2588
2589 if (heapvar == NULL)
2590 {
2591 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2592 DECL_EXTERNAL (heapvar) = 1;
2593 get_var_ann (heapvar)->is_heapvar = 1;
2594 if (gimple_referenced_vars (cfun))
2595 add_referenced_var (heapvar);
2596 heapvar_insert (t, heapvar);
2597 }
2598
2599 temp.var = create_variable_info_for (heapvar,
2600 alias_get_name (heapvar));
2601
2602 vi = get_varinfo (temp.var);
2603 vi->is_artificial_var = 1;
2604 vi->is_heap_var = 1;
2605 temp.type = ADDRESSOF;
2606 temp.offset = 0;
2607 VEC_safe_push (ce_s, heap, *results, &temp);
2608 return;
2609 }
2610 else
2611 {
2612 temp.var = anything_id;
2613 temp.type = SCALAR;
2614 temp.offset = 0;
2615 VEC_safe_push (ce_s, heap, *results, &temp);
2616 return;
2617 }
2618 break;
2619 default:
2620 {
2621 temp.type = ADDRESSOF;
2622 temp.var = anything_id;
2623 temp.offset = 0;
2624 VEC_safe_push (ce_s, heap, *results, &temp);
2625 return;
2626 }
2627 }
2628 }
2629 case tcc_reference:
2630 {
2631 switch (TREE_CODE (t))
2632 {
2633 case INDIRECT_REF:
2634 {
2635 get_constraint_for (TREE_OPERAND (t, 0), results);
2636 do_deref (results);
2637 return;
2638 }
2639 case ARRAY_REF:
2640 case ARRAY_RANGE_REF:
2641 case COMPONENT_REF:
2642 get_constraint_for_component_ref (t, results);
2643 return;
2644 default:
2645 {
2646 temp.type = ADDRESSOF;
2647 temp.var = anything_id;
2648 temp.offset = 0;
2649 VEC_safe_push (ce_s, heap, *results, &temp);
2650 return;
2651 }
2652 }
2653 }
2654 case tcc_unary:
2655 {
2656 switch (TREE_CODE (t))
2657 {
2658 case NOP_EXPR:
2659 case CONVERT_EXPR:
2660 case NON_LVALUE_EXPR:
2661 {
2662 tree op = TREE_OPERAND (t, 0);
2663
2664 /* Cast from non-pointer to pointers are bad news for us.
2665 Anything else, we see through */
2666 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2667 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2668 {
2669 get_constraint_for (op, results);
2670 return;
2671 }
2672
2673 /* FALLTHRU */
2674 }
2675 default:
2676 {
2677 temp.type = ADDRESSOF;
2678 temp.var = anything_id;
2679 temp.offset = 0;
2680 VEC_safe_push (ce_s, heap, *results, &temp);
2681 return;
2682 }
2683 }
2684 }
2685 case tcc_exceptional:
2686 {
2687 switch (TREE_CODE (t))
2688 {
2689 case PHI_NODE:
2690 {
2691 get_constraint_for (PHI_RESULT (t), results);
2692 return;
2693 }
2694 break;
2695 case SSA_NAME:
2696 {
2697 struct constraint_expr temp;
2698 temp = get_constraint_exp_from_ssa_var (t);
2699 VEC_safe_push (ce_s, heap, *results, &temp);
2700 return;
2701 }
2702 break;
2703 default:
2704 {
2705 temp.type = ADDRESSOF;
2706 temp.var = anything_id;
2707 temp.offset = 0;
2708 VEC_safe_push (ce_s, heap, *results, &temp);
2709 return;
2710 }
2711 }
2712 }
2713 case tcc_declaration:
2714 {
2715 struct constraint_expr temp;
2716 temp = get_constraint_exp_from_ssa_var (t);
2717 VEC_safe_push (ce_s, heap, *results, &temp);
2718 return;
2719 }
2720 default:
2721 {
2722 temp.type = ADDRESSOF;
2723 temp.var = anything_id;
2724 temp.offset = 0;
2725 VEC_safe_push (ce_s, heap, *results, &temp);
2726 return;
2727 }
2728 }
2729 }
2730
2731
2732 /* Handle the structure copy case where we have a simple structure copy
2733 between LHS and RHS that is of SIZE (in bits)
2734
2735 For each field of the lhs variable (lhsfield)
2736 For each field of the rhs variable at lhsfield.offset (rhsfield)
2737 add the constraint lhsfield = rhsfield
2738
2739 If we fail due to some kind of type unsafety or other thing we
2740 can't handle, return false. We expect the caller to collapse the
2741 variable in that case. */
2742
2743 static bool
2744 do_simple_structure_copy (const struct constraint_expr lhs,
2745 const struct constraint_expr rhs,
2746 const unsigned HOST_WIDE_INT size)
2747 {
2748 varinfo_t p = get_varinfo (lhs.var);
2749 unsigned HOST_WIDE_INT pstart, last;
2750 pstart = p->offset;
2751 last = p->offset + size;
2752 for (; p && p->offset < last; p = p->next)
2753 {
2754 varinfo_t q;
2755 struct constraint_expr templhs = lhs;
2756 struct constraint_expr temprhs = rhs;
2757 unsigned HOST_WIDE_INT fieldoffset;
2758
2759 templhs.var = p->id;
2760 q = get_varinfo (temprhs.var);
2761 fieldoffset = p->offset - pstart;
2762 q = first_vi_for_offset (q, q->offset + fieldoffset);
2763 if (!q)
2764 return false;
2765 temprhs.var = q->id;
2766 process_constraint (new_constraint (templhs, temprhs));
2767 }
2768 return true;
2769 }
2770
2771
2772 /* Handle the structure copy case where we have a structure copy between a
2773 aggregate on the LHS and a dereference of a pointer on the RHS
2774 that is of SIZE (in bits)
2775
2776 For each field of the lhs variable (lhsfield)
2777 rhs.offset = lhsfield->offset
2778 add the constraint lhsfield = rhs
2779 */
2780
2781 static void
2782 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
2783 const struct constraint_expr rhs,
2784 const unsigned HOST_WIDE_INT size)
2785 {
2786 varinfo_t p = get_varinfo (lhs.var);
2787 unsigned HOST_WIDE_INT pstart,last;
2788 pstart = p->offset;
2789 last = p->offset + size;
2790
2791 for (; p && p->offset < last; p = p->next)
2792 {
2793 varinfo_t q;
2794 struct constraint_expr templhs = lhs;
2795 struct constraint_expr temprhs = rhs;
2796 unsigned HOST_WIDE_INT fieldoffset;
2797
2798
2799 if (templhs.type == SCALAR)
2800 templhs.var = p->id;
2801 else
2802 templhs.offset = p->offset;
2803
2804 q = get_varinfo (temprhs.var);
2805 fieldoffset = p->offset - pstart;
2806 temprhs.offset += fieldoffset;
2807 process_constraint (new_constraint (templhs, temprhs));
2808 }
2809 }
2810
2811 /* Handle the structure copy case where we have a structure copy
2812 between a aggregate on the RHS and a dereference of a pointer on
2813 the LHS that is of SIZE (in bits)
2814
2815 For each field of the rhs variable (rhsfield)
2816 lhs.offset = rhsfield->offset
2817 add the constraint lhs = rhsfield
2818 */
2819
2820 static void
2821 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
2822 const struct constraint_expr rhs,
2823 const unsigned HOST_WIDE_INT size)
2824 {
2825 varinfo_t p = get_varinfo (rhs.var);
2826 unsigned HOST_WIDE_INT pstart,last;
2827 pstart = p->offset;
2828 last = p->offset + size;
2829
2830 for (; p && p->offset < last; p = p->next)
2831 {
2832 varinfo_t q;
2833 struct constraint_expr templhs = lhs;
2834 struct constraint_expr temprhs = rhs;
2835 unsigned HOST_WIDE_INT fieldoffset;
2836
2837
2838 if (temprhs.type == SCALAR)
2839 temprhs.var = p->id;
2840 else
2841 temprhs.offset = p->offset;
2842
2843 q = get_varinfo (templhs.var);
2844 fieldoffset = p->offset - pstart;
2845 templhs.offset += fieldoffset;
2846 process_constraint (new_constraint (templhs, temprhs));
2847 }
2848 }
2849
2850 /* Sometimes, frontends like to give us bad type information. This
2851 function will collapse all the fields from VAR to the end of VAR,
2852 into VAR, so that we treat those fields as a single variable.
2853 We return the variable they were collapsed into. */
2854
2855 static unsigned int
2856 collapse_rest_of_var (unsigned int var)
2857 {
2858 varinfo_t currvar = get_varinfo (var);
2859 varinfo_t field;
2860
2861 for (field = currvar->next; field; field = field->next)
2862 {
2863 if (dump_file)
2864 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
2865 field->name, currvar->name);
2866
2867 gcc_assert (!field->collapsed_to);
2868 field->collapsed_to = currvar;
2869 }
2870
2871 currvar->next = NULL;
2872 currvar->size = currvar->fullsize - currvar->offset;
2873
2874 return currvar->id;
2875 }
2876
2877 /* Handle aggregate copies by expanding into copies of the respective
2878 fields of the structures. */
2879
2880 static void
2881 do_structure_copy (tree lhsop, tree rhsop)
2882 {
2883 struct constraint_expr lhs, rhs, tmp;
2884 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
2885 varinfo_t p;
2886 unsigned HOST_WIDE_INT lhssize;
2887 unsigned HOST_WIDE_INT rhssize;
2888
2889 get_constraint_for (lhsop, &lhsc);
2890 get_constraint_for (rhsop, &rhsc);
2891 gcc_assert (VEC_length (ce_s, lhsc) == 1);
2892 gcc_assert (VEC_length (ce_s, rhsc) == 1);
2893 lhs = *(VEC_last (ce_s, lhsc));
2894 rhs = *(VEC_last (ce_s, rhsc));
2895
2896 VEC_free (ce_s, heap, lhsc);
2897 VEC_free (ce_s, heap, rhsc);
2898
2899 /* If we have special var = x, swap it around. */
2900 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
2901 {
2902 tmp = lhs;
2903 lhs = rhs;
2904 rhs = tmp;
2905 }
2906
2907 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
2908 possible it's something we could handle. However, most cases falling
2909 into this are dealing with transparent unions, which are slightly
2910 weird. */
2911 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
2912 {
2913 rhs.type = ADDRESSOF;
2914 rhs.var = anything_id;
2915 }
2916
2917 /* If the RHS is a special var, or an addressof, set all the LHS fields to
2918 that special var. */
2919 if (rhs.var <= integer_id)
2920 {
2921 for (p = get_varinfo (lhs.var); p; p = p->next)
2922 {
2923 struct constraint_expr templhs = lhs;
2924 struct constraint_expr temprhs = rhs;
2925
2926 if (templhs.type == SCALAR )
2927 templhs.var = p->id;
2928 else
2929 templhs.offset += p->offset;
2930 process_constraint (new_constraint (templhs, temprhs));
2931 }
2932 }
2933 else
2934 {
2935 tree rhstype = TREE_TYPE (rhsop);
2936 tree lhstype = TREE_TYPE (lhsop);
2937 tree rhstypesize;
2938 tree lhstypesize;
2939
2940 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
2941 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
2942
2943 /* If we have a variably sized types on the rhs or lhs, and a deref
2944 constraint, add the constraint, lhsconstraint = &ANYTHING.
2945 This is conservatively correct because either the lhs is an unknown
2946 sized var (if the constraint is SCALAR), or the lhs is a DEREF
2947 constraint, and every variable it can point to must be unknown sized
2948 anyway, so we don't need to worry about fields at all. */
2949 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
2950 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
2951 {
2952 rhs.var = anything_id;
2953 rhs.type = ADDRESSOF;
2954 rhs.offset = 0;
2955 process_constraint (new_constraint (lhs, rhs));
2956 return;
2957 }
2958
2959 /* The size only really matters insofar as we don't set more or less of
2960 the variable. If we hit an unknown size var, the size should be the
2961 whole darn thing. */
2962 if (get_varinfo (rhs.var)->is_unknown_size_var)
2963 rhssize = ~0;
2964 else
2965 rhssize = TREE_INT_CST_LOW (rhstypesize);
2966
2967 if (get_varinfo (lhs.var)->is_unknown_size_var)
2968 lhssize = ~0;
2969 else
2970 lhssize = TREE_INT_CST_LOW (lhstypesize);
2971
2972
2973 if (rhs.type == SCALAR && lhs.type == SCALAR)
2974 {
2975 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
2976 {
2977 lhs.var = collapse_rest_of_var (lhs.var);
2978 rhs.var = collapse_rest_of_var (rhs.var);
2979 lhs.offset = 0;
2980 rhs.offset = 0;
2981 lhs.type = SCALAR;
2982 rhs.type = SCALAR;
2983 process_constraint (new_constraint (lhs, rhs));
2984 }
2985 }
2986 else if (lhs.type != DEREF && rhs.type == DEREF)
2987 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
2988 else if (lhs.type == DEREF && rhs.type != DEREF)
2989 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
2990 else
2991 {
2992 tree pointedtotype = lhstype;
2993 tree tmpvar;
2994
2995 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
2996 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
2997 do_structure_copy (tmpvar, rhsop);
2998 do_structure_copy (lhsop, tmpvar);
2999 }
3000 }
3001 }
3002
3003 /* Update related alias information kept in AI. This is used when
3004 building name tags, alias sets and deciding grouping heuristics.
3005 STMT is the statement to process. This function also updates
3006 ADDRESSABLE_VARS. */
3007
3008 static void
3009 update_alias_info (tree stmt, struct alias_info *ai)
3010 {
3011 bitmap addr_taken;
3012 use_operand_p use_p;
3013 ssa_op_iter iter;
3014 enum escape_type stmt_escape_type = is_escape_site (stmt);
3015
3016 if (stmt_escape_type == ESCAPE_TO_CALL
3017 || stmt_escape_type == ESCAPE_TO_PURE_CONST)
3018 {
3019 ai->num_calls_found++;
3020 if (stmt_escape_type == ESCAPE_TO_PURE_CONST)
3021 ai->num_pure_const_calls_found++;
3022 }
3023
3024 /* Mark all the variables whose address are taken by the statement. */
3025 addr_taken = addresses_taken (stmt);
3026 if (addr_taken)
3027 {
3028 bitmap_ior_into (gimple_addressable_vars (cfun), addr_taken);
3029
3030 /* If STMT is an escape point, all the addresses taken by it are
3031 call-clobbered. */
3032 if (stmt_escape_type != NO_ESCAPE)
3033 {
3034 bitmap_iterator bi;
3035 unsigned i;
3036
3037 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
3038 {
3039 tree rvar = referenced_var (i);
3040 if (!unmodifiable_var_p (rvar))
3041 mark_call_clobbered (rvar, stmt_escape_type);
3042 }
3043 }
3044 }
3045
3046 /* Process each operand use. If an operand may be aliased, keep
3047 track of how many times it's being used. For pointers, determine
3048 whether they are dereferenced by the statement, or whether their
3049 value escapes, etc. */
3050 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
3051 {
3052 tree op, var;
3053 var_ann_t v_ann;
3054 struct ptr_info_def *pi;
3055 bool is_store, is_potential_deref;
3056 unsigned num_uses, num_derefs;
3057
3058 op = USE_FROM_PTR (use_p);
3059
3060 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
3061 to the set of addressable variables. */
3062 if (TREE_CODE (op) == ADDR_EXPR)
3063 {
3064 bitmap addressable_vars = gimple_addressable_vars (cfun);
3065
3066 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
3067 gcc_assert (addressable_vars);
3068
3069 /* PHI nodes don't have annotations for pinning the set
3070 of addresses taken, so we collect them here.
3071
3072 FIXME, should we allow PHI nodes to have annotations
3073 so that they can be treated like regular statements?
3074 Currently, they are treated as second-class
3075 statements. */
3076 add_to_addressable_set (TREE_OPERAND (op, 0),
3077 &addressable_vars);
3078 continue;
3079 }
3080
3081 /* Ignore constants. */
3082 if (TREE_CODE (op) != SSA_NAME)
3083 continue;
3084
3085 var = SSA_NAME_VAR (op);
3086 v_ann = var_ann (var);
3087
3088 /* The base variable of an SSA name must be a GIMPLE register, and thus
3089 it cannot be aliased. */
3090 gcc_assert (!may_be_aliased (var));
3091
3092 /* We are only interested in pointers. */
3093 if (!POINTER_TYPE_P (TREE_TYPE (op)))
3094 continue;
3095
3096 pi = get_ptr_info (op);
3097
3098 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3099 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
3100 {
3101 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
3102 VEC_safe_push (tree, heap, ai->processed_ptrs, op);
3103 }
3104
3105 /* If STMT is a PHI node, then it will not have pointer
3106 dereferences and it will not be an escape point. */
3107 if (TREE_CODE (stmt) == PHI_NODE)
3108 continue;
3109
3110 /* Determine whether OP is a dereferenced pointer, and if STMT
3111 is an escape point, whether OP escapes. */
3112 count_uses_and_derefs (op, stmt, &num_uses, &num_derefs, &is_store);
3113
3114 /* Handle a corner case involving address expressions of the
3115 form '&PTR->FLD'. The problem with these expressions is that
3116 they do not represent a dereference of PTR. However, if some
3117 other transformation propagates them into an INDIRECT_REF
3118 expression, we end up with '*(&PTR->FLD)' which is folded
3119 into 'PTR->FLD'.
3120
3121 So, if the original code had no other dereferences of PTR,
3122 the aliaser will not create memory tags for it, and when
3123 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3124 memory operations will receive no VDEF/VUSE operands.
3125
3126 One solution would be to have count_uses_and_derefs consider
3127 &PTR->FLD a dereference of PTR. But that is wrong, since it
3128 is not really a dereference but an offset calculation.
3129
3130 What we do here is to recognize these special ADDR_EXPR
3131 nodes. Since these expressions are never GIMPLE values (they
3132 are not GIMPLE invariants), they can only appear on the RHS
3133 of an assignment and their base address is always an
3134 INDIRECT_REF expression. */
3135 is_potential_deref = false;
3136 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3137 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR
3138 && !is_gimple_val (GIMPLE_STMT_OPERAND (stmt, 1)))
3139 {
3140 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3141 this represents a potential dereference of PTR. */
3142 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3143 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3144 if (TREE_CODE (base) == INDIRECT_REF
3145 && TREE_OPERAND (base, 0) == op)
3146 is_potential_deref = true;
3147 }
3148
3149 if (num_derefs > 0 || is_potential_deref)
3150 {
3151 /* Mark OP as dereferenced. In a subsequent pass,
3152 dereferenced pointers that point to a set of
3153 variables will be assigned a name tag to alias
3154 all the variables OP points to. */
3155 pi->is_dereferenced = 1;
3156
3157 /* If this is a store operation, mark OP as being
3158 dereferenced to store, otherwise mark it as being
3159 dereferenced to load. */
3160 if (is_store)
3161 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3162 else
3163 pointer_set_insert (ai->dereferenced_ptrs_load, var);
3164 }
3165
3166 if (stmt_escape_type != NO_ESCAPE && num_derefs < num_uses)
3167 {
3168 /* If STMT is an escape point and STMT contains at
3169 least one direct use of OP, then the value of OP
3170 escapes and so the pointed-to variables need to
3171 be marked call-clobbered. */
3172 pi->value_escapes_p = 1;
3173 pi->escape_mask |= stmt_escape_type;
3174
3175 /* If the statement makes a function call, assume
3176 that pointer OP will be dereferenced in a store
3177 operation inside the called function. */
3178 if (get_call_expr_in (stmt)
3179 || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3180 {
3181 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3182 pi->is_dereferenced = 1;
3183 }
3184 }
3185 }
3186
3187 if (TREE_CODE (stmt) == PHI_NODE)
3188 return;
3189
3190 /* Mark stored variables in STMT as being written to and update the
3191 reference counter for potentially aliased symbols in STMT. */
3192 if (stmt_references_memory_p (stmt) && STORED_SYMS (stmt))
3193 {
3194 unsigned i;
3195 bitmap_iterator bi;
3196 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
3197 pointer_set_insert (ai->written_vars, referenced_var (i));
3198 }
3199 }
3200
3201
3202 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3203 Expressions of the type PTR + CST can be handled in two ways:
3204
3205 1- If the constraint for PTR is ADDRESSOF for a non-structure
3206 variable, then we can use it directly because adding or
3207 subtracting a constant may not alter the original ADDRESSOF
3208 constraint (i.e., pointer arithmetic may not legally go outside
3209 an object's boundaries).
3210
3211 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3212 then if CST is a compile-time constant that can be used as an
3213 offset, we can determine which sub-variable will be pointed-to
3214 by the expression.
3215
3216 Return true if the expression is handled. For any other kind of
3217 expression, return false so that each operand can be added as a
3218 separate constraint by the caller. */
3219
3220 static bool
3221 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3222 {
3223 tree op0, op1;
3224 struct constraint_expr *c, *c2;
3225 unsigned int i = 0;
3226 unsigned int j = 0;
3227 VEC (ce_s, heap) *temp = NULL;
3228 unsigned int rhsoffset = 0;
3229
3230 if (TREE_CODE (expr) != PLUS_EXPR
3231 && TREE_CODE (expr) != MINUS_EXPR)
3232 return false;
3233
3234 op0 = TREE_OPERAND (expr, 0);
3235 op1 = TREE_OPERAND (expr, 1);
3236
3237 get_constraint_for (op0, &temp);
3238 if (POINTER_TYPE_P (TREE_TYPE (op0))
3239 && TREE_CODE (op1) == INTEGER_CST
3240 && TREE_CODE (expr) == PLUS_EXPR)
3241 {
3242 rhsoffset = TREE_INT_CST_LOW (op1) * BITS_PER_UNIT;
3243 }
3244 else
3245 return false;
3246
3247
3248 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3249 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3250 {
3251 if (c2->type == ADDRESSOF && rhsoffset != 0)
3252 {
3253 varinfo_t temp = get_varinfo (c2->var);
3254
3255 /* An access one after the end of an array is valid,
3256 so simply punt on accesses we cannot resolve. */
3257 temp = first_vi_for_offset (temp, rhsoffset);
3258 if (temp == NULL)
3259 continue;
3260 c2->var = temp->id;
3261 c2->offset = 0;
3262 }
3263 else
3264 c2->offset = rhsoffset;
3265 process_constraint (new_constraint (*c, *c2));
3266 }
3267
3268 VEC_free (ce_s, heap, temp);
3269
3270 return true;
3271 }
3272
3273
3274 /* Walk statement T setting up aliasing constraints according to the
3275 references found in T. This function is the main part of the
3276 constraint builder. AI points to auxiliary alias information used
3277 when building alias sets and computing alias grouping heuristics. */
3278
3279 static void
3280 find_func_aliases (tree origt)
3281 {
3282 tree t = origt;
3283 VEC(ce_s, heap) *lhsc = NULL;
3284 VEC(ce_s, heap) *rhsc = NULL;
3285 struct constraint_expr *c;
3286
3287 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3288 t = TREE_OPERAND (t, 0);
3289
3290 /* Now build constraints expressions. */
3291 if (TREE_CODE (t) == PHI_NODE)
3292 {
3293 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
3294
3295 /* Only care about pointers and structures containing
3296 pointers. */
3297 if (could_have_pointers (PHI_RESULT (t)))
3298 {
3299 int i;
3300 unsigned int j;
3301
3302 /* For a phi node, assign all the arguments to
3303 the result. */
3304 get_constraint_for (PHI_RESULT (t), &lhsc);
3305 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3306 {
3307 tree rhstype;
3308 tree strippedrhs = PHI_ARG_DEF (t, i);
3309
3310 STRIP_NOPS (strippedrhs);
3311 rhstype = TREE_TYPE (strippedrhs);
3312 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3313
3314 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3315 {
3316 struct constraint_expr *c2;
3317 while (VEC_length (ce_s, rhsc) > 0)
3318 {
3319 c2 = VEC_last (ce_s, rhsc);
3320 process_constraint (new_constraint (*c, *c2));
3321 VEC_pop (ce_s, rhsc);
3322 }
3323 }
3324 }
3325 }
3326 }
3327 /* In IPA mode, we need to generate constraints to pass call
3328 arguments through their calls. There are two case, either a
3329 modify_expr when we are returning a value, or just a plain
3330 call_expr when we are not. */
3331 else if (in_ipa_mode
3332 && ((TREE_CODE (t) == GIMPLE_MODIFY_STMT
3333 && TREE_CODE (GIMPLE_STMT_OPERAND (t, 1)) == CALL_EXPR
3334 && !(call_expr_flags (GIMPLE_STMT_OPERAND (t, 1))
3335 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
3336 || (TREE_CODE (t) == CALL_EXPR
3337 && !(call_expr_flags (t)
3338 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))))
3339 {
3340 tree lhsop;
3341 tree rhsop;
3342 tree arglist;
3343 varinfo_t fi;
3344 int i = 1;
3345 tree decl;
3346 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3347 {
3348 lhsop = GIMPLE_STMT_OPERAND (t, 0);
3349 rhsop = GIMPLE_STMT_OPERAND (t, 1);
3350 }
3351 else
3352 {
3353 lhsop = NULL;
3354 rhsop = t;
3355 }
3356 decl = get_callee_fndecl (rhsop);
3357
3358 /* If we can directly resolve the function being called, do so.
3359 Otherwise, it must be some sort of indirect expression that
3360 we should still be able to handle. */
3361 if (decl)
3362 {
3363 fi = get_vi_for_tree (decl);
3364 }
3365 else
3366 {
3367 decl = TREE_OPERAND (rhsop, 0);
3368 fi = get_vi_for_tree (decl);
3369 }
3370
3371 /* Assign all the passed arguments to the appropriate incoming
3372 parameters of the function. */
3373 arglist = TREE_OPERAND (rhsop, 1);
3374
3375 for (;arglist; arglist = TREE_CHAIN (arglist))
3376 {
3377 tree arg = TREE_VALUE (arglist);
3378 struct constraint_expr lhs ;
3379 struct constraint_expr *rhsp;
3380
3381 get_constraint_for (arg, &rhsc);
3382 if (TREE_CODE (decl) != FUNCTION_DECL)
3383 {
3384 lhs.type = DEREF;
3385 lhs.var = fi->id;
3386 lhs.offset = i;
3387 }
3388 else
3389 {
3390 lhs.type = SCALAR;
3391 lhs.var = first_vi_for_offset (fi, i)->id;
3392 lhs.offset = 0;
3393 }
3394 while (VEC_length (ce_s, rhsc) != 0)
3395 {
3396 rhsp = VEC_last (ce_s, rhsc);
3397 process_constraint (new_constraint (lhs, *rhsp));
3398 VEC_pop (ce_s, rhsc);
3399 }
3400 i++;
3401 }
3402 /* If we are returning a value, assign it to the result. */
3403 if (lhsop)
3404 {
3405 struct constraint_expr rhs;
3406 struct constraint_expr *lhsp;
3407 unsigned int j = 0;
3408
3409 get_constraint_for (lhsop, &lhsc);
3410 if (TREE_CODE (decl) != FUNCTION_DECL)
3411 {
3412 rhs.type = DEREF;
3413 rhs.var = fi->id;
3414 rhs.offset = i;
3415 }
3416 else
3417 {
3418 rhs.type = SCALAR;
3419 rhs.var = first_vi_for_offset (fi, i)->id;
3420 rhs.offset = 0;
3421 }
3422 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3423 process_constraint (new_constraint (*lhsp, rhs));
3424 }
3425 }
3426 /* Otherwise, just a regular assignment statement. */
3427 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3428 {
3429 tree lhsop = GIMPLE_STMT_OPERAND (t, 0);
3430 tree rhsop = GIMPLE_STMT_OPERAND (t, 1);
3431 int i;
3432
3433 if ((AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3434 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE)
3435 && (AGGREGATE_TYPE_P (TREE_TYPE (rhsop))
3436 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE))
3437 {
3438 do_structure_copy (lhsop, rhsop);
3439 }
3440 else
3441 {
3442 /* Only care about operations with pointers, structures
3443 containing pointers, dereferences, and call expressions. */
3444 if (could_have_pointers (lhsop)
3445 || TREE_CODE (rhsop) == CALL_EXPR)
3446 {
3447 get_constraint_for (lhsop, &lhsc);
3448 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3449 {
3450 /* RHS that consist of unary operations,
3451 exceptional types, or bare decls/constants, get
3452 handled directly by get_constraint_for. */
3453 case tcc_reference:
3454 case tcc_declaration:
3455 case tcc_constant:
3456 case tcc_exceptional:
3457 case tcc_expression:
3458 case tcc_unary:
3459 {
3460 unsigned int j;
3461
3462 get_constraint_for (rhsop, &rhsc);
3463 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3464 {
3465 struct constraint_expr *c2;
3466 unsigned int k;
3467
3468 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3469 process_constraint (new_constraint (*c, *c2));
3470 }
3471
3472 }
3473 break;
3474
3475 case tcc_binary:
3476 {
3477 /* For pointer arithmetic of the form
3478 PTR + CST, we can simply use PTR's
3479 constraint because pointer arithmetic is
3480 not allowed to go out of bounds. */
3481 if (handle_ptr_arith (lhsc, rhsop))
3482 break;
3483 }
3484 /* FALLTHRU */
3485
3486 /* Otherwise, walk each operand. Notice that we
3487 can't use the operand interface because we need
3488 to process expressions other than simple operands
3489 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3490 default:
3491 for (i = 0; i < TREE_CODE_LENGTH (TREE_CODE (rhsop)); i++)
3492 {
3493 tree op = TREE_OPERAND (rhsop, i);
3494 unsigned int j;
3495
3496 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3497 get_constraint_for (op, &rhsc);
3498 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3499 {
3500 struct constraint_expr *c2;
3501 while (VEC_length (ce_s, rhsc) > 0)
3502 {
3503 c2 = VEC_last (ce_s, rhsc);
3504 process_constraint (new_constraint (*c, *c2));
3505 VEC_pop (ce_s, rhsc);
3506 }
3507 }
3508 }
3509 }
3510 }
3511 }
3512 }
3513
3514 /* After promoting variables and computing aliasing we will
3515 need to re-scan most statements. FIXME: Try to minimize the
3516 number of statements re-scanned. It's not really necessary to
3517 re-scan *all* statements. */
3518 mark_stmt_modified (origt);
3519 VEC_free (ce_s, heap, rhsc);
3520 VEC_free (ce_s, heap, lhsc);
3521 }
3522
3523
3524 /* Find the first varinfo in the same variable as START that overlaps with
3525 OFFSET.
3526 Effectively, walk the chain of fields for the variable START to find the
3527 first field that overlaps with OFFSET.
3528 Return NULL if we can't find one. */
3529
3530 static varinfo_t
3531 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3532 {
3533 varinfo_t curr = start;
3534 while (curr)
3535 {
3536 /* We may not find a variable in the field list with the actual
3537 offset when when we have glommed a structure to a variable.
3538 In that case, however, offset should still be within the size
3539 of the variable. */
3540 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3541 return curr;
3542 curr = curr->next;
3543 }
3544 return NULL;
3545 }
3546
3547
3548 /* Insert the varinfo FIELD into the field list for BASE, at the front
3549 of the list. */
3550
3551 static void
3552 insert_into_field_list (varinfo_t base, varinfo_t field)
3553 {
3554 varinfo_t prev = base;
3555 varinfo_t curr = base->next;
3556
3557 field->next = curr;
3558 prev->next = field;
3559 }
3560
3561 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3562 offset. */
3563
3564 static void
3565 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3566 {
3567 varinfo_t prev = base;
3568 varinfo_t curr = base->next;
3569
3570 if (curr == NULL)
3571 {
3572 prev->next = field;
3573 field->next = NULL;
3574 }
3575 else
3576 {
3577 while (curr)
3578 {
3579 if (field->offset <= curr->offset)
3580 break;
3581 prev = curr;
3582 curr = curr->next;
3583 }
3584 field->next = prev->next;
3585 prev->next = field;
3586 }
3587 }
3588
3589 /* qsort comparison function for two fieldoff's PA and PB */
3590
3591 static int
3592 fieldoff_compare (const void *pa, const void *pb)
3593 {
3594 const fieldoff_s *foa = (const fieldoff_s *)pa;
3595 const fieldoff_s *fob = (const fieldoff_s *)pb;
3596 HOST_WIDE_INT foasize, fobsize;
3597
3598 if (foa->offset != fob->offset)
3599 return foa->offset - fob->offset;
3600
3601 foasize = TREE_INT_CST_LOW (foa->size);
3602 fobsize = TREE_INT_CST_LOW (fob->size);
3603 return foasize - fobsize;
3604 }
3605
3606 /* Sort a fieldstack according to the field offset and sizes. */
3607 void
3608 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
3609 {
3610 qsort (VEC_address (fieldoff_s, fieldstack),
3611 VEC_length (fieldoff_s, fieldstack),
3612 sizeof (fieldoff_s),
3613 fieldoff_compare);
3614 }
3615
3616 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields
3617 of TYPE onto fieldstack, recording their offsets along the way.
3618 OFFSET is used to keep track of the offset in this entire structure, rather
3619 than just the immediately containing structure. Returns the number
3620 of fields pushed.
3621 HAS_UNION is set to true if we find a union type as a field of
3622 TYPE. */
3623
3624 int
3625 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
3626 HOST_WIDE_INT offset, bool *has_union)
3627 {
3628 tree field;
3629 int count = 0;
3630
3631 if (TREE_CODE (type) == COMPLEX_TYPE)
3632 {
3633 fieldoff_s *real_part, *img_part;
3634 real_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3635 real_part->type = TREE_TYPE (type);
3636 real_part->size = TYPE_SIZE (TREE_TYPE (type));
3637 real_part->offset = offset;
3638 real_part->decl = NULL_TREE;
3639
3640 img_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3641 img_part->type = TREE_TYPE (type);
3642 img_part->size = TYPE_SIZE (TREE_TYPE (type));
3643 img_part->offset = offset + TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (type)));
3644 img_part->decl = NULL_TREE;
3645
3646 return 2;
3647 }
3648
3649 if (TREE_CODE (type) == ARRAY_TYPE)
3650 {
3651 tree sz = TYPE_SIZE (type);
3652 tree elsz = TYPE_SIZE (TREE_TYPE (type));
3653 HOST_WIDE_INT nr;
3654 int i;
3655
3656 if (! sz
3657 || ! host_integerp (sz, 1)
3658 || TREE_INT_CST_LOW (sz) == 0
3659 || ! elsz
3660 || ! host_integerp (elsz, 1)
3661 || TREE_INT_CST_LOW (elsz) == 0)
3662 return 0;
3663
3664 nr = TREE_INT_CST_LOW (sz) / TREE_INT_CST_LOW (elsz);
3665 if (nr > SALIAS_MAX_ARRAY_ELEMENTS)
3666 return 0;
3667
3668 for (i = 0; i < nr; ++i)
3669 {
3670 bool push = false;
3671 int pushed = 0;
3672
3673 if (has_union
3674 && (TREE_CODE (TREE_TYPE (type)) == QUAL_UNION_TYPE
3675 || TREE_CODE (TREE_TYPE (type)) == UNION_TYPE))
3676 *has_union = true;
3677
3678 if (!AGGREGATE_TYPE_P (TREE_TYPE (type))) /* var_can_have_subvars */
3679 push = true;
3680 else if (!(pushed = push_fields_onto_fieldstack
3681 (TREE_TYPE (type), fieldstack,
3682 offset + i * TREE_INT_CST_LOW (elsz), has_union)))
3683 /* Empty structures may have actual size, like in C++. So
3684 see if we didn't push any subfields and the size is
3685 nonzero, push the field onto the stack */
3686 push = true;
3687
3688 if (push)
3689 {
3690 fieldoff_s *pair;
3691
3692 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3693 pair->type = TREE_TYPE (type);
3694 pair->size = elsz;
3695 pair->decl = NULL_TREE;
3696 pair->offset = offset + i * TREE_INT_CST_LOW (elsz);
3697 count++;
3698 }
3699 else
3700 count += pushed;
3701 }
3702
3703 return count;
3704 }
3705
3706 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
3707 if (TREE_CODE (field) == FIELD_DECL)
3708 {
3709 bool push = false;
3710 int pushed = 0;
3711
3712 if (has_union
3713 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
3714 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
3715 *has_union = true;
3716
3717 if (!var_can_have_subvars (field))
3718 push = true;
3719 else if (!(pushed = push_fields_onto_fieldstack
3720 (TREE_TYPE (field), fieldstack,
3721 offset + bitpos_of_field (field), has_union))
3722 && DECL_SIZE (field)
3723 && !integer_zerop (DECL_SIZE (field)))
3724 /* Empty structures may have actual size, like in C++. So
3725 see if we didn't push any subfields and the size is
3726 nonzero, push the field onto the stack */
3727 push = true;
3728
3729 if (push)
3730 {
3731 fieldoff_s *pair;
3732
3733 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3734 pair->type = TREE_TYPE (field);
3735 pair->size = DECL_SIZE (field);
3736 pair->decl = field;
3737 pair->offset = offset + bitpos_of_field (field);
3738 count++;
3739 }
3740 else
3741 count += pushed;
3742 }
3743
3744 return count;
3745 }
3746
3747 /* Create a constraint from ANYTHING variable to VI. */
3748 static void
3749 make_constraint_from_anything (varinfo_t vi)
3750 {
3751 struct constraint_expr lhs, rhs;
3752
3753 lhs.var = vi->id;
3754 lhs.offset = 0;
3755 lhs.type = SCALAR;
3756
3757 rhs.var = anything_id;
3758 rhs.offset = 0;
3759 rhs.type = ADDRESSOF;
3760 process_constraint (new_constraint (lhs, rhs));
3761 }
3762
3763 /* Count the number of arguments DECL has, and set IS_VARARGS to true
3764 if it is a varargs function. */
3765
3766 static unsigned int
3767 count_num_arguments (tree decl, bool *is_varargs)
3768 {
3769 unsigned int i = 0;
3770 tree t;
3771
3772 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
3773 t;
3774 t = TREE_CHAIN (t))
3775 {
3776 if (TREE_VALUE (t) == void_type_node)
3777 break;
3778 i++;
3779 }
3780
3781 if (!t)
3782 *is_varargs = true;
3783 return i;
3784 }
3785
3786 /* Creation function node for DECL, using NAME, and return the index
3787 of the variable we've created for the function. */
3788
3789 static unsigned int
3790 create_function_info_for (tree decl, const char *name)
3791 {
3792 unsigned int index = VEC_length (varinfo_t, varmap);
3793 varinfo_t vi;
3794 tree arg;
3795 unsigned int i;
3796 bool is_varargs = false;
3797
3798 /* Create the variable info. */
3799
3800 vi = new_var_info (decl, index, name);
3801 vi->decl = decl;
3802 vi->offset = 0;
3803 vi->has_union = 0;
3804 vi->size = 1;
3805 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
3806 insert_vi_for_tree (vi->decl, vi);
3807 VEC_safe_push (varinfo_t, heap, varmap, vi);
3808
3809 stats.total_vars++;
3810
3811 /* If it's varargs, we don't know how many arguments it has, so we
3812 can't do much.
3813 */
3814 if (is_varargs)
3815 {
3816 vi->fullsize = ~0;
3817 vi->size = ~0;
3818 vi->is_unknown_size_var = true;
3819 return index;
3820 }
3821
3822
3823 arg = DECL_ARGUMENTS (decl);
3824
3825 /* Set up variables for each argument. */
3826 for (i = 1; i < vi->fullsize; i++)
3827 {
3828 varinfo_t argvi;
3829 const char *newname;
3830 char *tempname;
3831 unsigned int newindex;
3832 tree argdecl = decl;
3833
3834 if (arg)
3835 argdecl = arg;
3836
3837 newindex = VEC_length (varinfo_t, varmap);
3838 asprintf (&tempname, "%s.arg%d", name, i-1);
3839 newname = ggc_strdup (tempname);
3840 free (tempname);
3841
3842 argvi = new_var_info (argdecl, newindex, newname);
3843 argvi->decl = argdecl;
3844 VEC_safe_push (varinfo_t, heap, varmap, argvi);
3845 argvi->offset = i;
3846 argvi->size = 1;
3847 argvi->fullsize = vi->fullsize;
3848 argvi->has_union = false;
3849 insert_into_field_list_sorted (vi, argvi);
3850 stats.total_vars ++;
3851 if (arg)
3852 {
3853 insert_vi_for_tree (arg, argvi);
3854 arg = TREE_CHAIN (arg);
3855 }
3856 }
3857
3858 /* Create a variable for the return var. */
3859 if (DECL_RESULT (decl) != NULL
3860 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
3861 {
3862 varinfo_t resultvi;
3863 const char *newname;
3864 char *tempname;
3865 unsigned int newindex;
3866 tree resultdecl = decl;
3867
3868 vi->fullsize ++;
3869
3870 if (DECL_RESULT (decl))
3871 resultdecl = DECL_RESULT (decl);
3872
3873 newindex = VEC_length (varinfo_t, varmap);
3874 asprintf (&tempname, "%s.result", name);
3875 newname = ggc_strdup (tempname);
3876 free (tempname);
3877
3878 resultvi = new_var_info (resultdecl, newindex, newname);
3879 resultvi->decl = resultdecl;
3880 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
3881 resultvi->offset = i;
3882 resultvi->size = 1;
3883 resultvi->fullsize = vi->fullsize;
3884 resultvi->has_union = false;
3885 insert_into_field_list_sorted (vi, resultvi);
3886 stats.total_vars ++;
3887 if (DECL_RESULT (decl))
3888 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
3889 }
3890 return index;
3891 }
3892
3893
3894 /* Return true if FIELDSTACK contains fields that overlap.
3895 FIELDSTACK is assumed to be sorted by offset. */
3896
3897 static bool
3898 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
3899 {
3900 fieldoff_s *fo = NULL;
3901 unsigned int i;
3902 HOST_WIDE_INT lastoffset = -1;
3903
3904 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3905 {
3906 if (fo->offset == lastoffset)
3907 return true;
3908 lastoffset = fo->offset;
3909 }
3910 return false;
3911 }
3912
3913 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
3914 This will also create any varinfo structures necessary for fields
3915 of DECL. */
3916
3917 static unsigned int
3918 create_variable_info_for (tree decl, const char *name)
3919 {
3920 unsigned int index = VEC_length (varinfo_t, varmap);
3921 varinfo_t vi;
3922 tree decltype = TREE_TYPE (decl);
3923 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
3924 bool notokay = false;
3925 bool hasunion;
3926 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
3927 VEC (fieldoff_s,heap) *fieldstack = NULL;
3928
3929 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
3930 return create_function_info_for (decl, name);
3931
3932 hasunion = TREE_CODE (decltype) == UNION_TYPE
3933 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
3934 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
3935 {
3936 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion);
3937 if (hasunion)
3938 {
3939 VEC_free (fieldoff_s, heap, fieldstack);
3940 notokay = true;
3941 }
3942 }
3943
3944
3945 /* If the variable doesn't have subvars, we may end up needing to
3946 sort the field list and create fake variables for all the
3947 fields. */
3948 vi = new_var_info (decl, index, name);
3949 vi->decl = decl;
3950 vi->offset = 0;
3951 vi->has_union = hasunion;
3952 if (!declsize
3953 || TREE_CODE (declsize) != INTEGER_CST
3954 || TREE_CODE (decltype) == UNION_TYPE
3955 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
3956 {
3957 vi->is_unknown_size_var = true;
3958 vi->fullsize = ~0;
3959 vi->size = ~0;
3960 }
3961 else
3962 {
3963 vi->fullsize = TREE_INT_CST_LOW (declsize);
3964 vi->size = vi->fullsize;
3965 }
3966
3967 insert_vi_for_tree (vi->decl, vi);
3968 VEC_safe_push (varinfo_t, heap, varmap, vi);
3969 if (is_global && (!flag_whole_program || !in_ipa_mode))
3970 make_constraint_from_anything (vi);
3971
3972 stats.total_vars++;
3973 if (use_field_sensitive
3974 && !notokay
3975 && !vi->is_unknown_size_var
3976 && var_can_have_subvars (decl)
3977 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
3978 {
3979 unsigned int newindex = VEC_length (varinfo_t, varmap);
3980 fieldoff_s *fo = NULL;
3981 unsigned int i;
3982
3983 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3984 {
3985 if (! fo->size
3986 || TREE_CODE (fo->size) != INTEGER_CST
3987 || fo->offset < 0)
3988 {
3989 notokay = true;
3990 break;
3991 }
3992 }
3993
3994 /* We can't sort them if we have a field with a variable sized type,
3995 which will make notokay = true. In that case, we are going to return
3996 without creating varinfos for the fields anyway, so sorting them is a
3997 waste to boot. */
3998 if (!notokay)
3999 {
4000 sort_fieldstack (fieldstack);
4001 /* Due to some C++ FE issues, like PR 22488, we might end up
4002 what appear to be overlapping fields even though they,
4003 in reality, do not overlap. Until the C++ FE is fixed,
4004 we will simply disable field-sensitivity for these cases. */
4005 notokay = check_for_overlaps (fieldstack);
4006 }
4007
4008
4009 if (VEC_length (fieldoff_s, fieldstack) != 0)
4010 fo = VEC_index (fieldoff_s, fieldstack, 0);
4011
4012 if (fo == NULL || notokay)
4013 {
4014 vi->is_unknown_size_var = 1;
4015 vi->fullsize = ~0;
4016 vi->size = ~0;
4017 VEC_free (fieldoff_s, heap, fieldstack);
4018 return index;
4019 }
4020
4021 vi->size = TREE_INT_CST_LOW (fo->size);
4022 vi->offset = fo->offset;
4023 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4024 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4025 i--)
4026 {
4027 varinfo_t newvi;
4028 const char *newname = "NULL";
4029 char *tempname;
4030
4031 newindex = VEC_length (varinfo_t, varmap);
4032 if (dump_file)
4033 {
4034 if (fo->decl)
4035 asprintf (&tempname, "%s.%s",
4036 vi->name, alias_get_name (fo->decl));
4037 else
4038 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC,
4039 vi->name, fo->offset);
4040 newname = ggc_strdup (tempname);
4041 free (tempname);
4042 }
4043 newvi = new_var_info (decl, newindex, newname);
4044 newvi->offset = fo->offset;
4045 newvi->size = TREE_INT_CST_LOW (fo->size);
4046 newvi->fullsize = vi->fullsize;
4047 insert_into_field_list (vi, newvi);
4048 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4049 if (is_global && (!flag_whole_program || !in_ipa_mode))
4050 make_constraint_from_anything (newvi);
4051
4052 stats.total_vars++;
4053 }
4054 VEC_free (fieldoff_s, heap, fieldstack);
4055 }
4056 return index;
4057 }
4058
4059 /* Print out the points-to solution for VAR to FILE. */
4060
4061 void
4062 dump_solution_for_var (FILE *file, unsigned int var)
4063 {
4064 varinfo_t vi = get_varinfo (var);
4065 unsigned int i;
4066 bitmap_iterator bi;
4067
4068 if (find (var) != var)
4069 {
4070 varinfo_t vipt = get_varinfo (find (var));
4071 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4072 }
4073 else
4074 {
4075 fprintf (file, "%s = { ", vi->name);
4076 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4077 {
4078 fprintf (file, "%s ", get_varinfo (i)->name);
4079 }
4080 fprintf (file, "}\n");
4081 }
4082 }
4083
4084 /* Print the points-to solution for VAR to stdout. */
4085
4086 void
4087 debug_solution_for_var (unsigned int var)
4088 {
4089 dump_solution_for_var (stdout, var);
4090 }
4091
4092 /* Create varinfo structures for all of the variables in the
4093 function for intraprocedural mode. */
4094
4095 static void
4096 intra_create_variable_infos (void)
4097 {
4098 tree t;
4099 struct constraint_expr lhs, rhs;
4100
4101 /* For each incoming pointer argument arg, ARG = ANYTHING or a
4102 dummy variable if flag_argument_noalias > 2. */
4103 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4104 {
4105 varinfo_t p;
4106
4107 if (!could_have_pointers (t))
4108 continue;
4109
4110 /* With flag_argument_noalias greater than two means that the incoming
4111 argument cannot alias anything except for itself so create a HEAP
4112 variable. */
4113 if (POINTER_TYPE_P (TREE_TYPE (t))
4114 && flag_argument_noalias > 2)
4115 {
4116 varinfo_t vi;
4117 tree heapvar = heapvar_lookup (t);
4118
4119 lhs.offset = 0;
4120 lhs.type = SCALAR;
4121 lhs.var = get_vi_for_tree (t)->id;
4122
4123 if (heapvar == NULL_TREE)
4124 {
4125 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4126 "PARM_NOALIAS");
4127 get_var_ann (heapvar)->is_heapvar = 1;
4128 DECL_EXTERNAL (heapvar) = 1;
4129 if (gimple_referenced_vars (cfun))
4130 add_referenced_var (heapvar);
4131 heapvar_insert (t, heapvar);
4132 }
4133 vi = get_vi_for_tree (heapvar);
4134 vi->is_artificial_var = 1;
4135 vi->is_heap_var = 1;
4136 rhs.var = vi->id;
4137 rhs.type = ADDRESSOF;
4138 rhs.offset = 0;
4139 for (p = get_varinfo (lhs.var); p; p = p->next)
4140 {
4141 struct constraint_expr temp = lhs;
4142 temp.var = p->id;
4143 process_constraint (new_constraint (temp, rhs));
4144 }
4145 }
4146 else
4147 {
4148 varinfo_t arg_vi = get_vi_for_tree (t);
4149
4150 for (p = arg_vi; p; p = p->next)
4151 make_constraint_from_anything (p);
4152 }
4153 }
4154 }
4155
4156 /* Set bits in INTO corresponding to the variable uids in solution set
4157 FROM, which came from variable PTR.
4158 For variables that are actually dereferenced, we also use type
4159 based alias analysis to prune the points-to sets. */
4160
4161 static void
4162 set_uids_in_ptset (tree ptr, bitmap into, bitmap from)
4163 {
4164 unsigned int i;
4165 bitmap_iterator bi;
4166 subvar_t sv;
4167 HOST_WIDE_INT ptr_alias_set = get_alias_set (TREE_TYPE (ptr));
4168
4169 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4170 {
4171 varinfo_t vi = get_varinfo (i);
4172 unsigned HOST_WIDE_INT var_alias_set;
4173
4174 /* The only artificial variables that are allowed in a may-alias
4175 set are heap variables. */
4176 if (vi->is_artificial_var && !vi->is_heap_var)
4177 continue;
4178
4179 if (vi->has_union && get_subvars_for_var (vi->decl) != NULL)
4180 {
4181 /* Variables containing unions may need to be converted to
4182 their SFT's, because SFT's can have unions and we cannot. */
4183 for (sv = get_subvars_for_var (vi->decl); sv; sv = sv->next)
4184 bitmap_set_bit (into, DECL_UID (sv->var));
4185 }
4186 else if (TREE_CODE (vi->decl) == VAR_DECL
4187 || TREE_CODE (vi->decl) == PARM_DECL)
4188 {
4189 if (var_can_have_subvars (vi->decl)
4190 && get_subvars_for_var (vi->decl))
4191 {
4192 /* If VI->DECL is an aggregate for which we created
4193 SFTs, add the SFT corresponding to VI->OFFSET. */
4194 tree sft = get_subvar_at (vi->decl, vi->offset);
4195 if (sft)
4196 {
4197 var_alias_set = get_alias_set (sft);
4198 if (!vi->directly_dereferenced
4199 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4200 bitmap_set_bit (into, DECL_UID (sft));
4201 }
4202 }
4203 else
4204 {
4205 /* Otherwise, just add VI->DECL to the alias set.
4206 Don't type prune artificial vars. */
4207 if (vi->is_artificial_var)
4208 bitmap_set_bit (into, DECL_UID (vi->decl));
4209 else
4210 {
4211 var_alias_set = get_alias_set (vi->decl);
4212 if (!vi->directly_dereferenced
4213 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4214 bitmap_set_bit (into, DECL_UID (vi->decl));
4215 }
4216 }
4217 }
4218 }
4219 }
4220
4221
4222 static bool have_alias_info = false;
4223
4224 /* The list of SMT's that are in use by our pointer variables. This
4225 is the set of SMT's for all pointers that can point to anything. */
4226 static bitmap used_smts;
4227
4228 /* Due to the ordering of points-to set calculation and SMT
4229 calculation being a bit co-dependent, we can't just calculate SMT
4230 used info whenever we want, we have to calculate it around the time
4231 that find_what_p_points_to is called. */
4232
4233 /* Mark which SMT's are in use by points-to anything variables. */
4234
4235 void
4236 set_used_smts (void)
4237 {
4238 int i;
4239 varinfo_t vi;
4240 used_smts = BITMAP_ALLOC (&pta_obstack);
4241
4242 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); i++)
4243 {
4244 tree var = vi->decl;
4245 tree smt;
4246 var_ann_t va;
4247 struct ptr_info_def *pi = NULL;
4248
4249 /* For parm decls, the pointer info may be under the default
4250 def. */
4251 if (TREE_CODE (vi->decl) == PARM_DECL
4252 && gimple_default_def (cfun, var))
4253 pi = SSA_NAME_PTR_INFO (gimple_default_def (cfun, var));
4254 else if (TREE_CODE (var) == SSA_NAME)
4255 pi = SSA_NAME_PTR_INFO (var);
4256
4257 /* Skip the special variables and those without their own
4258 solution set. */
4259 if (vi->is_special_var || find (vi->id) != vi->id
4260 || !SSA_VAR_P (var)
4261 || (pi && !pi->is_dereferenced)
4262 || (TREE_CODE (var) == VAR_DECL && !may_be_aliased (var))
4263 || !POINTER_TYPE_P (TREE_TYPE (var)))
4264 continue;
4265
4266 if (TREE_CODE (var) == SSA_NAME)
4267 var = SSA_NAME_VAR (var);
4268
4269 va = var_ann (var);
4270 if (!va)
4271 continue;
4272
4273 smt = va->symbol_mem_tag;
4274 if (smt && bitmap_bit_p (vi->solution, anything_id))
4275 bitmap_set_bit (used_smts, DECL_UID (smt));
4276 }
4277 }
4278
4279 /* Merge the necessary SMT's into the solution set for VI, which is
4280 P's varinfo. This involves merging all SMT's that are a subset of
4281 the SMT necessary for P. */
4282
4283 static void
4284 merge_smts_into (tree p, varinfo_t vi)
4285 {
4286 unsigned int i;
4287 bitmap_iterator bi;
4288 tree smt;
4289 bitmap aliases;
4290 tree var = p;
4291
4292 if (TREE_CODE (p) == SSA_NAME)
4293 var = SSA_NAME_VAR (p);
4294
4295 smt = var_ann (var)->symbol_mem_tag;
4296 if (smt)
4297 {
4298 HOST_WIDE_INT smtset = get_alias_set (TREE_TYPE (smt));
4299
4300 /* Need to set the SMT subsets first before this
4301 will work properly. */
4302 bitmap_set_bit (vi->finished_solution, DECL_UID (smt));
4303 EXECUTE_IF_SET_IN_BITMAP (used_smts, 0, i, bi)
4304 {
4305 tree newsmt = referenced_var (i);
4306 tree newsmttype = TREE_TYPE (newsmt);
4307
4308 if (alias_set_subset_of (get_alias_set (newsmttype),
4309 smtset))
4310 bitmap_set_bit (vi->finished_solution, i);
4311 }
4312
4313 aliases = MTAG_ALIASES (smt);
4314 if (aliases)
4315 bitmap_ior_into (vi->finished_solution, aliases);
4316 }
4317 }
4318
4319 /* Given a pointer variable P, fill in its points-to set, or return
4320 false if we can't.
4321 Rather than return false for variables that point-to anything, we
4322 instead find the corresponding SMT, and merge in it's aliases. In
4323 addition to these aliases, we also set the bits for the SMT's
4324 themselves and their subsets, as SMT's are still in use by
4325 non-SSA_NAME's, and pruning may eliminate every one of their
4326 aliases. In such a case, if we did not include the right set of
4327 SMT's in the points-to set of the variable, we'd end up with
4328 statements that do not conflict but should. */
4329
4330 bool
4331 find_what_p_points_to (tree p)
4332 {
4333 tree lookup_p = p;
4334 varinfo_t vi;
4335
4336 if (!have_alias_info)
4337 return false;
4338
4339 /* For parameters, get at the points-to set for the actual parm
4340 decl. */
4341 if (TREE_CODE (p) == SSA_NAME
4342 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4343 && SSA_NAME_IS_DEFAULT_DEF (p))
4344 lookup_p = SSA_NAME_VAR (p);
4345
4346 vi = lookup_vi_for_tree (lookup_p);
4347 if (vi)
4348 {
4349 if (vi->is_artificial_var)
4350 return false;
4351
4352 /* See if this is a field or a structure. */
4353 if (vi->size != vi->fullsize)
4354 {
4355 /* Nothing currently asks about structure fields directly,
4356 but when they do, we need code here to hand back the
4357 points-to set. */
4358 if (!var_can_have_subvars (vi->decl)
4359 || get_subvars_for_var (vi->decl) == NULL)
4360 return false;
4361 }
4362 else
4363 {
4364 struct ptr_info_def *pi = get_ptr_info (p);
4365 unsigned int i;
4366 bitmap_iterator bi;
4367 bool was_pt_anything = false;
4368
4369 if (!pi->is_dereferenced)
4370 return false;
4371
4372 /* This variable may have been collapsed, let's get the real
4373 variable. */
4374 vi = get_varinfo (find (vi->id));
4375
4376 /* Translate artificial variables into SSA_NAME_PTR_INFO
4377 attributes. */
4378 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4379 {
4380 varinfo_t vi = get_varinfo (i);
4381
4382 if (vi->is_artificial_var)
4383 {
4384 /* FIXME. READONLY should be handled better so that
4385 flow insensitive aliasing can disregard writable
4386 aliases. */
4387 if (vi->id == nothing_id)
4388 pi->pt_null = 1;
4389 else if (vi->id == anything_id)
4390 was_pt_anything = 1;
4391 else if (vi->id == readonly_id)
4392 was_pt_anything = 1;
4393 else if (vi->id == integer_id)
4394 was_pt_anything = 1;
4395 else if (vi->is_heap_var)
4396 pi->pt_global_mem = 1;
4397 }
4398 }
4399
4400 /* Share the final set of variables between the SSA_NAME
4401 pointer infos for collapsed nodes that are collapsed to
4402 non-special variables. This is because special vars have
4403 no real types associated with them, so while we know the
4404 pointers are equivalent to them, we need to generate the
4405 solution separately since it will include SMT's from the
4406 original non-collapsed variable. */
4407 if (!vi->is_special_var && vi->finished_solution)
4408 {
4409 pi->pt_vars = vi->finished_solution;
4410 }
4411 else
4412 {
4413 vi->finished_solution = BITMAP_GGC_ALLOC ();
4414 stats.points_to_sets_created++;
4415
4416 /* Instead of using pt_anything, we instead merge in the SMT
4417 aliases for the underlying SMT. */
4418 if (was_pt_anything)
4419 {
4420 merge_smts_into (p, vi);
4421 pi->pt_global_mem = 1;
4422 }
4423
4424 set_uids_in_ptset (vi->decl, vi->finished_solution, vi->solution);
4425 pi->pt_vars = vi->finished_solution;
4426 }
4427
4428 if (bitmap_empty_p (pi->pt_vars))
4429 pi->pt_vars = NULL;
4430
4431 return true;
4432 }
4433 }
4434
4435 return false;
4436 }
4437
4438
4439
4440 /* Dump points-to information to OUTFILE. */
4441
4442 void
4443 dump_sa_points_to_info (FILE *outfile)
4444 {
4445 unsigned int i;
4446
4447 fprintf (outfile, "\nPoints-to sets\n\n");
4448
4449 if (dump_flags & TDF_STATS)
4450 {
4451 fprintf (outfile, "Stats:\n");
4452 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
4453 fprintf (outfile, "Non-pointer vars: %d\n",
4454 stats.nonpointer_vars);
4455 fprintf (outfile, "Statically unified vars: %d\n",
4456 stats.unified_vars_static);
4457 fprintf (outfile, "Dynamically unified vars: %d\n",
4458 stats.unified_vars_dynamic);
4459 fprintf (outfile, "Iterations: %d\n", stats.iterations);
4460 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
4461 fprintf (outfile, "Number of implicit edges: %d\n",
4462 stats.num_implicit_edges);
4463 }
4464
4465 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
4466 dump_solution_for_var (outfile, i);
4467 }
4468
4469
4470 /* Debug points-to information to stderr. */
4471
4472 void
4473 debug_sa_points_to_info (void)
4474 {
4475 dump_sa_points_to_info (stderr);
4476 }
4477
4478
4479 /* Initialize the always-existing constraint variables for NULL
4480 ANYTHING, READONLY, and INTEGER */
4481
4482 static void
4483 init_base_vars (void)
4484 {
4485 struct constraint_expr lhs, rhs;
4486
4487 /* Create the NULL variable, used to represent that a variable points
4488 to NULL. */
4489 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
4490 var_nothing = new_var_info (nothing_tree, 0, "NULL");
4491 insert_vi_for_tree (nothing_tree, var_nothing);
4492 var_nothing->is_artificial_var = 1;
4493 var_nothing->offset = 0;
4494 var_nothing->size = ~0;
4495 var_nothing->fullsize = ~0;
4496 var_nothing->is_special_var = 1;
4497 nothing_id = 0;
4498 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
4499
4500 /* Create the ANYTHING variable, used to represent that a variable
4501 points to some unknown piece of memory. */
4502 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
4503 var_anything = new_var_info (anything_tree, 1, "ANYTHING");
4504 insert_vi_for_tree (anything_tree, var_anything);
4505 var_anything->is_artificial_var = 1;
4506 var_anything->size = ~0;
4507 var_anything->offset = 0;
4508 var_anything->next = NULL;
4509 var_anything->fullsize = ~0;
4510 var_anything->is_special_var = 1;
4511 anything_id = 1;
4512
4513 /* Anything points to anything. This makes deref constraints just
4514 work in the presence of linked list and other p = *p type loops,
4515 by saying that *ANYTHING = ANYTHING. */
4516 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
4517 lhs.type = SCALAR;
4518 lhs.var = anything_id;
4519 lhs.offset = 0;
4520 rhs.type = ADDRESSOF;
4521 rhs.var = anything_id;
4522 rhs.offset = 0;
4523
4524 /* This specifically does not use process_constraint because
4525 process_constraint ignores all anything = anything constraints, since all
4526 but this one are redundant. */
4527 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
4528
4529 /* Create the READONLY variable, used to represent that a variable
4530 points to readonly memory. */
4531 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
4532 var_readonly = new_var_info (readonly_tree, 2, "READONLY");
4533 var_readonly->is_artificial_var = 1;
4534 var_readonly->offset = 0;
4535 var_readonly->size = ~0;
4536 var_readonly->fullsize = ~0;
4537 var_readonly->next = NULL;
4538 var_readonly->is_special_var = 1;
4539 insert_vi_for_tree (readonly_tree, var_readonly);
4540 readonly_id = 2;
4541 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
4542
4543 /* readonly memory points to anything, in order to make deref
4544 easier. In reality, it points to anything the particular
4545 readonly variable can point to, but we don't track this
4546 separately. */
4547 lhs.type = SCALAR;
4548 lhs.var = readonly_id;
4549 lhs.offset = 0;
4550 rhs.type = ADDRESSOF;
4551 rhs.var = anything_id;
4552 rhs.offset = 0;
4553
4554 process_constraint (new_constraint (lhs, rhs));
4555
4556 /* Create the INTEGER variable, used to represent that a variable points
4557 to an INTEGER. */
4558 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
4559 var_integer = new_var_info (integer_tree, 3, "INTEGER");
4560 insert_vi_for_tree (integer_tree, var_integer);
4561 var_integer->is_artificial_var = 1;
4562 var_integer->size = ~0;
4563 var_integer->fullsize = ~0;
4564 var_integer->offset = 0;
4565 var_integer->next = NULL;
4566 var_integer->is_special_var = 1;
4567 integer_id = 3;
4568 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
4569
4570 /* INTEGER = ANYTHING, because we don't know where a dereference of
4571 a random integer will point to. */
4572 lhs.type = SCALAR;
4573 lhs.var = integer_id;
4574 lhs.offset = 0;
4575 rhs.type = ADDRESSOF;
4576 rhs.var = anything_id;
4577 rhs.offset = 0;
4578 process_constraint (new_constraint (lhs, rhs));
4579 }
4580
4581 /* Initialize things necessary to perform PTA */
4582
4583 static void
4584 init_alias_vars (void)
4585 {
4586 bitmap_obstack_initialize (&pta_obstack);
4587 bitmap_obstack_initialize (&oldpta_obstack);
4588 bitmap_obstack_initialize (&predbitmap_obstack);
4589
4590 constraint_pool = create_alloc_pool ("Constraint pool",
4591 sizeof (struct constraint), 30);
4592 variable_info_pool = create_alloc_pool ("Variable info pool",
4593 sizeof (struct variable_info), 30);
4594 constraints = VEC_alloc (constraint_t, heap, 8);
4595 varmap = VEC_alloc (varinfo_t, heap, 8);
4596 vi_for_tree = pointer_map_create ();
4597
4598 memset (&stats, 0, sizeof (stats));
4599
4600 init_base_vars ();
4601 }
4602
4603 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
4604 predecessor edges. */
4605
4606 static void
4607 remove_preds_and_fake_succs (constraint_graph_t graph)
4608 {
4609 unsigned int i;
4610
4611 /* Clear the implicit ref and address nodes from the successor
4612 lists. */
4613 for (i = 0; i < FIRST_REF_NODE; i++)
4614 {
4615 if (graph->succs[i])
4616 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
4617 FIRST_REF_NODE * 2);
4618 }
4619
4620 /* Free the successor list for the non-ref nodes. */
4621 for (i = FIRST_REF_NODE; i < graph->size; i++)
4622 {
4623 if (graph->succs[i])
4624 BITMAP_FREE (graph->succs[i]);
4625 }
4626
4627 /* Now reallocate the size of the successor list as, and blow away
4628 the predecessor bitmaps. */
4629 graph->size = VEC_length (varinfo_t, varmap);
4630 graph->succs = xrealloc (graph->succs, graph->size * sizeof (bitmap));
4631
4632 free (graph->implicit_preds);
4633 graph->implicit_preds = NULL;
4634 free (graph->preds);
4635 graph->preds = NULL;
4636 bitmap_obstack_release (&predbitmap_obstack);
4637 }
4638
4639 /* Create points-to sets for the current function. See the comments
4640 at the start of the file for an algorithmic overview. */
4641
4642 void
4643 compute_points_to_sets (struct alias_info *ai)
4644 {
4645 struct scc_info *si;
4646 basic_block bb;
4647
4648 timevar_push (TV_TREE_PTA);
4649
4650 init_alias_vars ();
4651 init_alias_heapvars ();
4652
4653 intra_create_variable_infos ();
4654
4655 /* Now walk all statements and derive aliases. */
4656 FOR_EACH_BB (bb)
4657 {
4658 block_stmt_iterator bsi;
4659 tree phi;
4660
4661 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
4662 {
4663 if (is_gimple_reg (PHI_RESULT (phi)))
4664 {
4665 find_func_aliases (phi);
4666
4667 /* Update various related attributes like escaped
4668 addresses, pointer dereferences for loads and stores.
4669 This is used when creating name tags and alias
4670 sets. */
4671 update_alias_info (phi, ai);
4672 }
4673 }
4674
4675 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4676 {
4677 tree stmt = bsi_stmt (bsi);
4678
4679 find_func_aliases (stmt);
4680
4681 /* Update various related attributes like escaped
4682 addresses, pointer dereferences for loads and stores.
4683 This is used when creating name tags and alias
4684 sets. */
4685 update_alias_info (stmt, ai);
4686 }
4687 }
4688
4689
4690 if (dump_file)
4691 {
4692 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
4693 dump_constraints (dump_file);
4694 }
4695
4696 if (dump_file)
4697 fprintf (dump_file,
4698 "\nCollapsing static cycles and doing variable "
4699 "substitution:\n");
4700 build_pred_graph ();
4701 si = perform_var_substitution (graph);
4702 move_complex_constraints (graph, si);
4703 free_var_substitution_info (si);
4704
4705 build_succ_graph ();
4706 find_indirect_cycles (graph);
4707
4708 /* Implicit nodes and predecessors are no longer necessary at this
4709 point. */
4710 remove_preds_and_fake_succs (graph);
4711
4712 if (dump_file)
4713 fprintf (dump_file, "\nSolving graph:\n");
4714
4715 solve_graph (graph);
4716
4717 if (dump_file)
4718 dump_sa_points_to_info (dump_file);
4719
4720 have_alias_info = true;
4721
4722 timevar_pop (TV_TREE_PTA);
4723 }
4724
4725
4726 /* Delete created points-to sets. */
4727
4728 void
4729 delete_points_to_sets (void)
4730 {
4731 varinfo_t v;
4732 int i;
4733
4734 if (dump_file && (dump_flags & TDF_STATS))
4735 fprintf (dump_file, "Points to sets created:%d\n",
4736 stats.points_to_sets_created);
4737
4738 pointer_map_destroy (vi_for_tree);
4739 bitmap_obstack_release (&pta_obstack);
4740 VEC_free (constraint_t, heap, constraints);
4741
4742 for (i = 0; VEC_iterate (varinfo_t, varmap, i, v); i++)
4743 VEC_free (constraint_t, heap, graph->complex[i]);
4744
4745 free (graph->rep);
4746 free (graph->succs);
4747 free (graph->indirect_cycles);
4748 free (graph);
4749
4750 VEC_free (varinfo_t, heap, varmap);
4751 free_alloc_pool (variable_info_pool);
4752 free_alloc_pool (constraint_pool);
4753 have_alias_info = false;
4754 }
4755
4756 /* Return true if we should execute IPA PTA. */
4757 static bool
4758 gate_ipa_pta (void)
4759 {
4760 return (flag_unit_at_a_time != 0
4761 && flag_ipa_pta
4762 /* Don't bother doing anything if the program has errors. */
4763 && !(errorcount || sorrycount));
4764 }
4765
4766 /* Execute the driver for IPA PTA. */
4767 static unsigned int
4768 ipa_pta_execute (void)
4769 {
4770 struct cgraph_node *node;
4771 struct scc_info *si;
4772
4773 in_ipa_mode = 1;
4774 init_alias_heapvars ();
4775 init_alias_vars ();
4776
4777 for (node = cgraph_nodes; node; node = node->next)
4778 {
4779 if (!node->analyzed || cgraph_is_master_clone (node))
4780 {
4781 unsigned int varid;
4782
4783 varid = create_function_info_for (node->decl,
4784 cgraph_node_name (node));
4785 if (node->local.externally_visible)
4786 {
4787 varinfo_t fi = get_varinfo (varid);
4788 for (; fi; fi = fi->next)
4789 make_constraint_from_anything (fi);
4790 }
4791 }
4792 }
4793 for (node = cgraph_nodes; node; node = node->next)
4794 {
4795 if (node->analyzed && cgraph_is_master_clone (node))
4796 {
4797 struct function *cfun = DECL_STRUCT_FUNCTION (node->decl);
4798 basic_block bb;
4799 tree old_func_decl = current_function_decl;
4800 if (dump_file)
4801 fprintf (dump_file,
4802 "Generating constraints for %s\n",
4803 cgraph_node_name (node));
4804 push_cfun (cfun);
4805 current_function_decl = node->decl;
4806
4807 FOR_EACH_BB_FN (bb, cfun)
4808 {
4809 block_stmt_iterator bsi;
4810 tree phi;
4811
4812 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
4813 {
4814 if (is_gimple_reg (PHI_RESULT (phi)))
4815 {
4816 find_func_aliases (phi);
4817 }
4818 }
4819
4820 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4821 {
4822 tree stmt = bsi_stmt (bsi);
4823 find_func_aliases (stmt);
4824 }
4825 }
4826 current_function_decl = old_func_decl;
4827 pop_cfun ();
4828 }
4829 else
4830 {
4831 /* Make point to anything. */
4832 }
4833 }
4834
4835
4836
4837 if (dump_file)
4838 {
4839 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
4840 dump_constraints (dump_file);
4841 }
4842
4843 if (dump_file)
4844 fprintf (dump_file,
4845 "\nCollapsing static cycles and doing variable "
4846 "substitution:\n");
4847
4848 build_pred_graph ();
4849 si = perform_var_substitution (graph);
4850 move_complex_constraints (graph, si);
4851 free_var_substitution_info (si);
4852
4853 build_succ_graph ();
4854 find_indirect_cycles (graph);
4855
4856 /* Implicit nodes and predecessors are no longer necessary at this
4857 point. */
4858 remove_preds_and_fake_succs (graph);
4859
4860 if (dump_file)
4861 fprintf (dump_file, "\nSolving graph:\n");
4862
4863 solve_graph (graph);
4864
4865 if (dump_file)
4866 dump_sa_points_to_info (dump_file);
4867
4868 in_ipa_mode = 0;
4869 delete_alias_heapvars ();
4870 delete_points_to_sets ();
4871 return 0;
4872 }
4873
4874 struct tree_opt_pass pass_ipa_pta =
4875 {
4876 "pta", /* name */
4877 gate_ipa_pta, /* gate */
4878 ipa_pta_execute, /* execute */
4879 NULL, /* sub */
4880 NULL, /* next */
4881 0, /* static_pass_number */
4882 TV_IPA_PTA, /* tv_id */
4883 0, /* properties_required */
4884 0, /* properties_provided */
4885 0, /* properties_destroyed */
4886 0, /* todo_flags_start */
4887 0, /* todo_flags_finish */
4888 0 /* letter */
4889 };
4890
4891 /* Initialize the heapvar for statement mapping. */
4892 void
4893 init_alias_heapvars (void)
4894 {
4895 if (!heapvar_for_stmt)
4896 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
4897 NULL);
4898 }
4899
4900 void
4901 delete_alias_heapvars (void)
4902 {
4903 htab_delete (heapvar_for_stmt);
4904 heapvar_for_stmt = NULL;
4905 }
4906
4907
4908 #include "gt-tree-ssa-structalias.h"