target.h (globalize_decl_name): New.
[gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "obstack.h"
28 #include "bitmap.h"
29 #include "flags.h"
30 #include "rtl.h"
31 #include "tm_p.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "output.h"
35 #include "errors.h"
36 #include "diagnostic.h"
37 #include "tree.h"
38 #include "c-common.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
41 #include "varray.h"
42 #include "c-tree.h"
43 #include "tree-gimple.h"
44 #include "hashtab.h"
45 #include "function.h"
46 #include "cgraph.h"
47 #include "tree-pass.h"
48 #include "timevar.h"
49 #include "alloc-pool.h"
50 #include "splay-tree.h"
51 #include "params.h"
52 #include "tree-ssa-structalias.h"
53 #include "cgraph.h"
54 #include "alias.h"
55 #include "pointer-set.h"
56
57 /* The idea behind this analyzer is to generate set constraints from the
58 program, then solve the resulting constraints in order to generate the
59 points-to sets.
60
61 Set constraints are a way of modeling program analysis problems that
62 involve sets. They consist of an inclusion constraint language,
63 describing the variables (each variable is a set) and operations that
64 are involved on the variables, and a set of rules that derive facts
65 from these operations. To solve a system of set constraints, you derive
66 all possible facts under the rules, which gives you the correct sets
67 as a consequence.
68
69 See "Efficient Field-sensitive pointer analysis for C" by "David
70 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
71 http://citeseer.ist.psu.edu/pearce04efficient.html
72
73 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
74 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
75 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76
77 There are three types of real constraint expressions, DEREF,
78 ADDRESSOF, and SCALAR. Each constraint expression consists
79 of a constraint type, a variable, and an offset.
80
81 SCALAR is a constraint expression type used to represent x, whether
82 it appears on the LHS or the RHS of a statement.
83 DEREF is a constraint expression type used to represent *x, whether
84 it appears on the LHS or the RHS of a statement.
85 ADDRESSOF is a constraint expression used to represent &x, whether
86 it appears on the LHS or the RHS of a statement.
87
88 Each pointer variable in the program is assigned an integer id, and
89 each field of a structure variable is assigned an integer id as well.
90
91 Structure variables are linked to their list of fields through a "next
92 field" in each variable that points to the next field in offset
93 order.
94 Each variable for a structure field has
95
96 1. "size", that tells the size in bits of that field.
97 2. "fullsize, that tells the size in bits of the entire structure.
98 3. "offset", that tells the offset in bits from the beginning of the
99 structure to this field.
100
101 Thus,
102 struct f
103 {
104 int a;
105 int b;
106 } foo;
107 int *bar;
108
109 looks like
110
111 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
112 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
113 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
114
115
116 In order to solve the system of set constraints, the following is
117 done:
118
119 1. Each constraint variable x has a solution set associated with it,
120 Sol(x).
121
122 2. Constraints are separated into direct, copy, and complex.
123 Direct constraints are ADDRESSOF constraints that require no extra
124 processing, such as P = &Q
125 Copy constraints are those of the form P = Q.
126 Complex constraints are all the constraints involving dereferences
127 and offsets (including offsetted copies).
128
129 3. All direct constraints of the form P = &Q are processed, such
130 that Q is added to Sol(P)
131
132 4. All complex constraints for a given constraint variable are stored in a
133 linked list attached to that variable's node.
134
135 5. A directed graph is built out of the copy constraints. Each
136 constraint variable is a node in the graph, and an edge from
137 Q to P is added for each copy constraint of the form P = Q
138
139 6. The graph is then walked, and solution sets are
140 propagated along the copy edges, such that an edge from Q to P
141 causes Sol(P) <- Sol(P) union Sol(Q).
142
143 7. As we visit each node, all complex constraints associated with
144 that node are processed by adding appropriate copy edges to the graph, or the
145 appropriate variables to the solution set.
146
147 8. The process of walking the graph is iterated until no solution
148 sets change.
149
150 Prior to walking the graph in steps 6 and 7, We perform static
151 cycle elimination on the constraint graph, as well
152 as off-line variable substitution.
153
154 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
155 on and turned into anything), but isn't. You can just see what offset
156 inside the pointed-to struct it's going to access.
157
158 TODO: Constant bounded arrays can be handled as if they were structs of the
159 same number of elements.
160
161 TODO: Modeling heap and incoming pointers becomes much better if we
162 add fields to them as we discover them, which we could do.
163
164 TODO: We could handle unions, but to be honest, it's probably not
165 worth the pain or slowdown. */
166
167 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
168 htab_t heapvar_for_stmt;
169
170 static bool use_field_sensitive = true;
171 static int in_ipa_mode = 0;
172
173 /* Used for predecessor bitmaps. */
174 static bitmap_obstack predbitmap_obstack;
175
176 /* Used for points-to sets. */
177 static bitmap_obstack pta_obstack;
178
179 /* Used for oldsolution members of variables. */
180 static bitmap_obstack oldpta_obstack;
181
182 /* Used for per-solver-iteration bitmaps. */
183 static bitmap_obstack iteration_obstack;
184
185 static unsigned int create_variable_info_for (tree, const char *);
186 typedef struct constraint_graph *constraint_graph_t;
187 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
188
189 DEF_VEC_P(constraint_t);
190 DEF_VEC_ALLOC_P(constraint_t,heap);
191
192 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
193 if (a) \
194 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195
196 static struct constraint_stats
197 {
198 unsigned int total_vars;
199 unsigned int nonpointer_vars;
200 unsigned int unified_vars_static;
201 unsigned int unified_vars_dynamic;
202 unsigned int iterations;
203 unsigned int num_edges;
204 unsigned int num_implicit_edges;
205 unsigned int points_to_sets_created;
206 } stats;
207
208 struct variable_info
209 {
210 /* ID of this variable */
211 unsigned int id;
212
213 /* Name of this variable */
214 const char *name;
215
216 /* Tree that this variable is associated with. */
217 tree decl;
218
219 /* Offset of this variable, in bits, from the base variable */
220 unsigned HOST_WIDE_INT offset;
221
222 /* Size of the variable, in bits. */
223 unsigned HOST_WIDE_INT size;
224
225 /* Full size of the base variable, in bits. */
226 unsigned HOST_WIDE_INT fullsize;
227
228 /* A link to the variable for the next field in this structure. */
229 struct variable_info *next;
230
231 /* True if the variable is directly the target of a dereference.
232 This is used to track which variables are *actually* dereferenced
233 so we can prune their points to listed. */
234 unsigned int directly_dereferenced:1;
235
236 /* True if this is a variable created by the constraint analysis, such as
237 heap variables and constraints we had to break up. */
238 unsigned int is_artificial_var:1;
239
240 /* True if this is a special variable whose solution set should not be
241 changed. */
242 unsigned int is_special_var:1;
243
244 /* True for variables whose size is not known or variable. */
245 unsigned int is_unknown_size_var:1;
246
247 /* True for variables that have unions somewhere in them. */
248 unsigned int has_union:1;
249
250 /* True if this is a heap variable. */
251 unsigned int is_heap_var:1;
252
253 /* Points-to set for this variable. */
254 bitmap solution;
255
256 /* Old points-to set for this variable. */
257 bitmap oldsolution;
258
259 /* Finished points-to set for this variable (IE what is returned
260 from find_what_p_points_to. */
261 bitmap finished_solution;
262
263 /* Variable ids represented by this node. */
264 bitmap variables;
265
266 /* Variable id this was collapsed to due to type unsafety. This
267 should be unused completely after build_succ_graph, or something
268 is broken. */
269 struct variable_info *collapsed_to;
270 };
271 typedef struct variable_info *varinfo_t;
272
273 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
274
275 /* Pool of variable info structures. */
276 static alloc_pool variable_info_pool;
277
278 DEF_VEC_P(varinfo_t);
279
280 DEF_VEC_ALLOC_P(varinfo_t, heap);
281
282 /* Table of variable info structures for constraint variables.
283 Indexed directly by variable info id. */
284 static VEC(varinfo_t,heap) *varmap;
285
286 /* Return the varmap element N */
287
288 static inline varinfo_t
289 get_varinfo (unsigned int n)
290 {
291 return VEC_index(varinfo_t, varmap, n);
292 }
293
294 /* Return the varmap element N, following the collapsed_to link. */
295
296 static inline varinfo_t
297 get_varinfo_fc (unsigned int n)
298 {
299 varinfo_t v = VEC_index(varinfo_t, varmap, n);
300
301 if (v->collapsed_to)
302 return v->collapsed_to;
303 return v;
304 }
305
306 /* Variable that represents the unknown pointer. */
307 static varinfo_t var_anything;
308 static tree anything_tree;
309 static unsigned int anything_id;
310
311 /* Variable that represents the NULL pointer. */
312 static varinfo_t var_nothing;
313 static tree nothing_tree;
314 static unsigned int nothing_id;
315
316 /* Variable that represents read only memory. */
317 static varinfo_t var_readonly;
318 static tree readonly_tree;
319 static unsigned int readonly_id;
320
321 /* Variable that represents integers. This is used for when people do things
322 like &0->a.b. */
323 static varinfo_t var_integer;
324 static tree integer_tree;
325 static unsigned int integer_id;
326
327 /* Lookup a heap var for FROM, and return it if we find one. */
328
329 static tree
330 heapvar_lookup (tree from)
331 {
332 struct tree_map *h, in;
333 in.from = from;
334
335 h = htab_find_with_hash (heapvar_for_stmt, &in, htab_hash_pointer (from));
336 if (h)
337 return h->to;
338 return NULL_TREE;
339 }
340
341 /* Insert a mapping FROM->TO in the heap var for statement
342 hashtable. */
343
344 static void
345 heapvar_insert (tree from, tree to)
346 {
347 struct tree_map *h;
348 void **loc;
349
350 h = ggc_alloc (sizeof (struct tree_map));
351 h->hash = htab_hash_pointer (from);
352 h->from = from;
353 h->to = to;
354 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
355 *(struct tree_map **) loc = h;
356 }
357
358 /* Return a new variable info structure consisting for a variable
359 named NAME, and using constraint graph node NODE. */
360
361 static varinfo_t
362 new_var_info (tree t, unsigned int id, const char *name)
363 {
364 varinfo_t ret = pool_alloc (variable_info_pool);
365
366 ret->id = id;
367 ret->name = name;
368 ret->decl = t;
369 ret->directly_dereferenced = false;
370 ret->is_artificial_var = false;
371 ret->is_heap_var = false;
372 ret->is_special_var = false;
373 ret->is_unknown_size_var = false;
374 ret->has_union = false;
375 ret->solution = BITMAP_ALLOC (&pta_obstack);
376 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
377 ret->finished_solution = NULL;
378 ret->next = NULL;
379 ret->collapsed_to = NULL;
380 return ret;
381 }
382
383 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
384
385 /* An expression that appears in a constraint. */
386
387 struct constraint_expr
388 {
389 /* Constraint type. */
390 constraint_expr_type type;
391
392 /* Variable we are referring to in the constraint. */
393 unsigned int var;
394
395 /* Offset, in bits, of this constraint from the beginning of
396 variables it ends up referring to.
397
398 IOW, in a deref constraint, we would deref, get the result set,
399 then add OFFSET to each member. */
400 unsigned HOST_WIDE_INT offset;
401 };
402
403 typedef struct constraint_expr ce_s;
404 DEF_VEC_O(ce_s);
405 DEF_VEC_ALLOC_O(ce_s, heap);
406 static void get_constraint_for (tree, VEC(ce_s, heap) **);
407 static void do_deref (VEC (ce_s, heap) **);
408
409 /* Our set constraints are made up of two constraint expressions, one
410 LHS, and one RHS.
411
412 As described in the introduction, our set constraints each represent an
413 operation between set valued variables.
414 */
415 struct constraint
416 {
417 struct constraint_expr lhs;
418 struct constraint_expr rhs;
419 };
420
421 /* List of constraints that we use to build the constraint graph from. */
422
423 static VEC(constraint_t,heap) *constraints;
424 static alloc_pool constraint_pool;
425
426
427 DEF_VEC_I(int);
428 DEF_VEC_ALLOC_I(int, heap);
429
430 /* The constraint graph is represented as an array of bitmaps
431 containing successor nodes. */
432
433 struct constraint_graph
434 {
435 /* Size of this graph, which may be different than the number of
436 nodes in the variable map. */
437 unsigned int size;
438
439 /* Explicit successors of each node. */
440 bitmap *succs;
441
442 /* Implicit predecessors of each node (Used for variable
443 substitution). */
444 bitmap *implicit_preds;
445
446 /* Explicit predecessors of each node (Used for variable substitution). */
447 bitmap *preds;
448
449 /* Indirect cycle representatives, or -1 if the node has no indirect
450 cycles. */
451 int *indirect_cycles;
452
453 /* Representative node for a node. rep[a] == a unless the node has
454 been unified. */
455 unsigned int *rep;
456
457 /* Equivalence class representative for a node. This is used for
458 variable substitution. */
459 int *eq_rep;
460
461 /* Label for each node, used during variable substitution. */
462 unsigned int *label;
463
464 /* Bitmap of nodes where the bit is set if the node is a direct
465 node. Used for variable substitution. */
466 sbitmap direct_nodes;
467
468 /* Vector of complex constraints for each graph node. Complex
469 constraints are those involving dereferences or offsets that are
470 not 0. */
471 VEC(constraint_t,heap) **complex;
472 };
473
474 static constraint_graph_t graph;
475
476 /* During variable substitution and the offline version of indirect
477 cycle finding, we create nodes to represent dereferences and
478 address taken constraints. These represent where these start and
479 end. */
480 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
481 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
482 #define FIRST_ADDR_NODE (LAST_REF_NODE + 1)
483
484 /* Return the representative node for NODE, if NODE has been unioned
485 with another NODE.
486 This function performs path compression along the way to finding
487 the representative. */
488
489 static unsigned int
490 find (unsigned int node)
491 {
492 gcc_assert (node < graph->size);
493 if (graph->rep[node] != node)
494 return graph->rep[node] = find (graph->rep[node]);
495 return node;
496 }
497
498 /* Union the TO and FROM nodes to the TO nodes.
499 Note that at some point in the future, we may want to do
500 union-by-rank, in which case we are going to have to return the
501 node we unified to. */
502
503 static bool
504 unite (unsigned int to, unsigned int from)
505 {
506 gcc_assert (to < graph->size && from < graph->size);
507 if (to != from && graph->rep[from] != to)
508 {
509 graph->rep[from] = to;
510 return true;
511 }
512 return false;
513 }
514
515 /* Create a new constraint consisting of LHS and RHS expressions. */
516
517 static constraint_t
518 new_constraint (const struct constraint_expr lhs,
519 const struct constraint_expr rhs)
520 {
521 constraint_t ret = pool_alloc (constraint_pool);
522 ret->lhs = lhs;
523 ret->rhs = rhs;
524 return ret;
525 }
526
527 /* Print out constraint C to FILE. */
528
529 void
530 dump_constraint (FILE *file, constraint_t c)
531 {
532 if (c->lhs.type == ADDRESSOF)
533 fprintf (file, "&");
534 else if (c->lhs.type == DEREF)
535 fprintf (file, "*");
536 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
537 if (c->lhs.offset != 0)
538 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
539 fprintf (file, " = ");
540 if (c->rhs.type == ADDRESSOF)
541 fprintf (file, "&");
542 else if (c->rhs.type == DEREF)
543 fprintf (file, "*");
544 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
545 if (c->rhs.offset != 0)
546 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
547 fprintf (file, "\n");
548 }
549
550 /* Print out constraint C to stderr. */
551
552 void
553 debug_constraint (constraint_t c)
554 {
555 dump_constraint (stderr, c);
556 }
557
558 /* Print out all constraints to FILE */
559
560 void
561 dump_constraints (FILE *file)
562 {
563 int i;
564 constraint_t c;
565 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
566 dump_constraint (file, c);
567 }
568
569 /* Print out all constraints to stderr. */
570
571 void
572 debug_constraints (void)
573 {
574 dump_constraints (stderr);
575 }
576
577 /* SOLVER FUNCTIONS
578
579 The solver is a simple worklist solver, that works on the following
580 algorithm:
581
582 sbitmap changed_nodes = all zeroes;
583 changed_count = 0;
584 For each node that is not already collapsed:
585 changed_count++;
586 set bit in changed nodes
587
588 while (changed_count > 0)
589 {
590 compute topological ordering for constraint graph
591
592 find and collapse cycles in the constraint graph (updating
593 changed if necessary)
594
595 for each node (n) in the graph in topological order:
596 changed_count--;
597
598 Process each complex constraint associated with the node,
599 updating changed if necessary.
600
601 For each outgoing edge from n, propagate the solution from n to
602 the destination of the edge, updating changed as necessary.
603
604 } */
605
606 /* Return true if two constraint expressions A and B are equal. */
607
608 static bool
609 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
610 {
611 return a.type == b.type && a.var == b.var && a.offset == b.offset;
612 }
613
614 /* Return true if constraint expression A is less than constraint expression
615 B. This is just arbitrary, but consistent, in order to give them an
616 ordering. */
617
618 static bool
619 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
620 {
621 if (a.type == b.type)
622 {
623 if (a.var == b.var)
624 return a.offset < b.offset;
625 else
626 return a.var < b.var;
627 }
628 else
629 return a.type < b.type;
630 }
631
632 /* Return true if constraint A is less than constraint B. This is just
633 arbitrary, but consistent, in order to give them an ordering. */
634
635 static bool
636 constraint_less (const constraint_t a, const constraint_t b)
637 {
638 if (constraint_expr_less (a->lhs, b->lhs))
639 return true;
640 else if (constraint_expr_less (b->lhs, a->lhs))
641 return false;
642 else
643 return constraint_expr_less (a->rhs, b->rhs);
644 }
645
646 /* Return true if two constraints A and B are equal. */
647
648 static bool
649 constraint_equal (struct constraint a, struct constraint b)
650 {
651 return constraint_expr_equal (a.lhs, b.lhs)
652 && constraint_expr_equal (a.rhs, b.rhs);
653 }
654
655
656 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
657
658 static constraint_t
659 constraint_vec_find (VEC(constraint_t,heap) *vec,
660 struct constraint lookfor)
661 {
662 unsigned int place;
663 constraint_t found;
664
665 if (vec == NULL)
666 return NULL;
667
668 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
669 if (place >= VEC_length (constraint_t, vec))
670 return NULL;
671 found = VEC_index (constraint_t, vec, place);
672 if (!constraint_equal (*found, lookfor))
673 return NULL;
674 return found;
675 }
676
677 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
678
679 static void
680 constraint_set_union (VEC(constraint_t,heap) **to,
681 VEC(constraint_t,heap) **from)
682 {
683 int i;
684 constraint_t c;
685
686 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
687 {
688 if (constraint_vec_find (*to, *c) == NULL)
689 {
690 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
691 constraint_less);
692 VEC_safe_insert (constraint_t, heap, *to, place, c);
693 }
694 }
695 }
696
697 /* Take a solution set SET, add OFFSET to each member of the set, and
698 overwrite SET with the result when done. */
699
700 static void
701 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
702 {
703 bitmap result = BITMAP_ALLOC (&iteration_obstack);
704 unsigned int i;
705 bitmap_iterator bi;
706
707 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
708 {
709 /* If this is a properly sized variable, only add offset if it's
710 less than end. Otherwise, it is globbed to a single
711 variable. */
712
713 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
714 {
715 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
716 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
717 if (!v)
718 continue;
719 bitmap_set_bit (result, v->id);
720 }
721 else if (get_varinfo (i)->is_artificial_var
722 || get_varinfo (i)->has_union
723 || get_varinfo (i)->is_unknown_size_var)
724 {
725 bitmap_set_bit (result, i);
726 }
727 }
728
729 bitmap_copy (set, result);
730 BITMAP_FREE (result);
731 }
732
733 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
734 process. */
735
736 static bool
737 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
738 {
739 if (inc == 0)
740 return bitmap_ior_into (to, from);
741 else
742 {
743 bitmap tmp;
744 bool res;
745
746 tmp = BITMAP_ALLOC (&iteration_obstack);
747 bitmap_copy (tmp, from);
748 solution_set_add (tmp, inc);
749 res = bitmap_ior_into (to, tmp);
750 BITMAP_FREE (tmp);
751 return res;
752 }
753 }
754
755 /* Insert constraint C into the list of complex constraints for graph
756 node VAR. */
757
758 static void
759 insert_into_complex (constraint_graph_t graph,
760 unsigned int var, constraint_t c)
761 {
762 VEC (constraint_t, heap) *complex = graph->complex[var];
763 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
764 constraint_less);
765
766 /* Only insert constraints that do not already exist. */
767 if (place >= VEC_length (constraint_t, complex)
768 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
769 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
770 }
771
772
773 /* Condense two variable nodes into a single variable node, by moving
774 all associated info from SRC to TO. */
775
776 static void
777 merge_node_constraints (constraint_graph_t graph, unsigned int to,
778 unsigned int from)
779 {
780 unsigned int i;
781 constraint_t c;
782
783 gcc_assert (find (from) == to);
784
785 /* Move all complex constraints from src node into to node */
786 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
787 {
788 /* In complex constraints for node src, we may have either
789 a = *src, and *src = a, or an offseted constraint which are
790 always added to the rhs node's constraints. */
791
792 if (c->rhs.type == DEREF)
793 c->rhs.var = to;
794 else if (c->lhs.type == DEREF)
795 c->lhs.var = to;
796 else
797 c->rhs.var = to;
798 }
799 constraint_set_union (&graph->complex[to], &graph->complex[from]);
800 VEC_free (constraint_t, heap, graph->complex[from]);
801 graph->complex[from] = NULL;
802 }
803
804
805 /* Remove edges involving NODE from GRAPH. */
806
807 static void
808 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
809 {
810 if (graph->succs[node])
811 BITMAP_FREE (graph->succs[node]);
812 }
813
814 /* Merge GRAPH nodes FROM and TO into node TO. */
815
816 static void
817 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
818 unsigned int from)
819 {
820 if (graph->indirect_cycles[from] != -1)
821 {
822 /* If we have indirect cycles with the from node, and we have
823 none on the to node, the to node has indirect cycles from the
824 from node now that they are unified.
825 If indirect cycles exist on both, unify the nodes that they
826 are in a cycle with, since we know they are in a cycle with
827 each other. */
828 if (graph->indirect_cycles[to] == -1)
829 {
830 graph->indirect_cycles[to] = graph->indirect_cycles[from];
831 }
832 else
833 {
834 unsigned int tonode = find (graph->indirect_cycles[to]);
835 unsigned int fromnode = find (graph->indirect_cycles[from]);
836
837 if (unite (tonode, fromnode))
838 unify_nodes (graph, tonode, fromnode, true);
839 }
840 }
841
842 /* Merge all the successor edges. */
843 if (graph->succs[from])
844 {
845 if (!graph->succs[to])
846 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
847 bitmap_ior_into (graph->succs[to],
848 graph->succs[from]);
849 }
850
851 clear_edges_for_node (graph, from);
852 }
853
854
855 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
856 it doesn't exist in the graph already. */
857
858 static void
859 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
860 unsigned int from)
861 {
862 if (to == from)
863 return;
864
865 if (!graph->implicit_preds[to])
866 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
867
868 if (!bitmap_bit_p (graph->implicit_preds[to], from))
869 {
870 stats.num_implicit_edges++;
871 bitmap_set_bit (graph->implicit_preds[to], from);
872 }
873 }
874
875 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
876 it doesn't exist in the graph already.
877 Return false if the edge already existed, true otherwise. */
878
879 static void
880 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
881 unsigned int from)
882 {
883 if (!graph->preds[to])
884 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
885 if (!bitmap_bit_p (graph->preds[to], from))
886 bitmap_set_bit (graph->preds[to], from);
887 }
888
889 /* Add a graph edge to GRAPH, going from FROM to TO if
890 it doesn't exist in the graph already.
891 Return false if the edge already existed, true otherwise. */
892
893 static bool
894 add_graph_edge (constraint_graph_t graph, unsigned int to,
895 unsigned int from)
896 {
897 if (to == from)
898 {
899 return false;
900 }
901 else
902 {
903 bool r = false;
904
905 if (!graph->succs[from])
906 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
907 if (!bitmap_bit_p (graph->succs[from], to))
908 {
909 r = true;
910 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
911 stats.num_edges++;
912 bitmap_set_bit (graph->succs[from], to);
913 }
914 return r;
915 }
916 }
917
918
919 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
920
921 static bool
922 valid_graph_edge (constraint_graph_t graph, unsigned int src,
923 unsigned int dest)
924 {
925 return (graph->succs[dest]
926 && bitmap_bit_p (graph->succs[dest], src));
927 }
928
929 /* Build the constraint graph, adding only predecessor edges right now. */
930
931 static void
932 build_pred_graph (void)
933 {
934 int i;
935 constraint_t c;
936 unsigned int j;
937
938 graph = XNEW (struct constraint_graph);
939 graph->size = (VEC_length (varinfo_t, varmap)) * 3;
940 graph->succs = XCNEWVEC (bitmap, graph->size);
941 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
942 graph->preds = XCNEWVEC (bitmap, graph->size);
943 graph->indirect_cycles = XNEWVEC (int, VEC_length (varinfo_t, varmap));
944 graph->label = XCNEWVEC (unsigned int, graph->size);
945 graph->rep = XNEWVEC (unsigned int, graph->size);
946 graph->eq_rep = XNEWVEC (int, graph->size);
947 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *,
948 VEC_length (varinfo_t, varmap));
949 graph->direct_nodes = sbitmap_alloc (graph->size);
950 sbitmap_zero (graph->direct_nodes);
951
952 for (j = 0; j < FIRST_REF_NODE; j++)
953 {
954 if (!get_varinfo (j)->is_special_var)
955 SET_BIT (graph->direct_nodes, j);
956 }
957
958 for (j = 0; j < graph->size; j++)
959 {
960 graph->rep[j] = j;
961 graph->eq_rep[j] = -1;
962 }
963
964 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
965 graph->indirect_cycles[j] = -1;
966
967 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
968 {
969 struct constraint_expr lhs = c->lhs;
970 struct constraint_expr rhs = c->rhs;
971 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
972 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
973
974 if (lhs.type == DEREF)
975 {
976 /* *x = y. */
977 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
978 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
979 if (rhs.type == ADDRESSOF)
980 RESET_BIT (graph->direct_nodes, rhsvar);
981 }
982 else if (rhs.type == DEREF)
983 {
984 /* x = *y */
985 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
986 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
987 else
988 RESET_BIT (graph->direct_nodes, lhsvar);
989 }
990 else if (rhs.type == ADDRESSOF)
991 {
992 /* x = &y */
993 add_pred_graph_edge (graph, lhsvar, FIRST_ADDR_NODE + rhsvar);
994 /* Implicitly, *x = y */
995 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
996
997 RESET_BIT (graph->direct_nodes, rhsvar);
998 }
999 else if (lhsvar > anything_id
1000 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1001 {
1002 /* x = y */
1003 add_pred_graph_edge (graph, lhsvar, rhsvar);
1004 /* Implicitly, *x = *y */
1005 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1006 FIRST_REF_NODE + rhsvar);
1007 }
1008 else if (lhs.offset != 0 || rhs.offset != 0)
1009 {
1010 if (rhs.offset != 0)
1011 RESET_BIT (graph->direct_nodes, lhs.var);
1012 if (lhs.offset != 0)
1013 RESET_BIT (graph->direct_nodes, rhs.var);
1014 }
1015 }
1016 }
1017
1018 /* Build the constraint graph, adding successor edges. */
1019
1020 static void
1021 build_succ_graph (void)
1022 {
1023 int i;
1024 constraint_t c;
1025
1026 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1027 {
1028 struct constraint_expr lhs;
1029 struct constraint_expr rhs;
1030 unsigned int lhsvar;
1031 unsigned int rhsvar;
1032
1033 if (!c)
1034 continue;
1035
1036 lhs = c->lhs;
1037 rhs = c->rhs;
1038 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1039 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1040
1041 if (lhs.type == DEREF)
1042 {
1043 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1044 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1045 }
1046 else if (rhs.type == DEREF)
1047 {
1048 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1049 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1050 }
1051 else if (rhs.type == ADDRESSOF)
1052 {
1053 /* x = &y */
1054 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1055 == get_varinfo_fc (rhs.var)->id);
1056 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1057 }
1058 else if (lhsvar > anything_id
1059 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1060 {
1061 add_graph_edge (graph, lhsvar, rhsvar);
1062 }
1063 }
1064 }
1065
1066
1067 /* Changed variables on the last iteration. */
1068 static unsigned int changed_count;
1069 static sbitmap changed;
1070
1071 DEF_VEC_I(unsigned);
1072 DEF_VEC_ALLOC_I(unsigned,heap);
1073
1074
1075 /* Strongly Connected Component visitation info. */
1076
1077 struct scc_info
1078 {
1079 sbitmap visited;
1080 sbitmap roots;
1081 unsigned int *dfs;
1082 unsigned int *node_mapping;
1083 int current_index;
1084 VEC(unsigned,heap) *scc_stack;
1085 };
1086
1087
1088 /* Recursive routine to find strongly connected components in GRAPH.
1089 SI is the SCC info to store the information in, and N is the id of current
1090 graph node we are processing.
1091
1092 This is Tarjan's strongly connected component finding algorithm, as
1093 modified by Nuutila to keep only non-root nodes on the stack.
1094 The algorithm can be found in "On finding the strongly connected
1095 connected components in a directed graph" by Esko Nuutila and Eljas
1096 Soisalon-Soininen, in Information Processing Letters volume 49,
1097 number 1, pages 9-14. */
1098
1099 static void
1100 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1101 {
1102 unsigned int i;
1103 bitmap_iterator bi;
1104 unsigned int my_dfs;
1105
1106 SET_BIT (si->visited, n);
1107 si->dfs[n] = si->current_index ++;
1108 my_dfs = si->dfs[n];
1109
1110 /* Visit all the successors. */
1111 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1112 {
1113 unsigned int w;
1114
1115 if (i > LAST_REF_NODE)
1116 break;
1117
1118 w = find (i);
1119 if (TEST_BIT (si->roots, w))
1120 continue;
1121
1122 if (!TEST_BIT (si->visited, w))
1123 scc_visit (graph, si, w);
1124 {
1125 unsigned int t = find (w);
1126 unsigned int nnode = find (n);
1127 gcc_assert(nnode == n);
1128
1129 if (si->dfs[t] < si->dfs[nnode])
1130 si->dfs[n] = si->dfs[t];
1131 }
1132 }
1133
1134 /* See if any components have been identified. */
1135 if (si->dfs[n] == my_dfs)
1136 {
1137 if (VEC_length (unsigned, si->scc_stack) > 0
1138 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1139 {
1140 bitmap scc = BITMAP_ALLOC (NULL);
1141 bool have_ref_node = n >= FIRST_REF_NODE;
1142 unsigned int lowest_node;
1143 bitmap_iterator bi;
1144
1145 bitmap_set_bit (scc, n);
1146
1147 while (VEC_length (unsigned, si->scc_stack) != 0
1148 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1149 {
1150 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1151
1152 bitmap_set_bit (scc, w);
1153 if (w >= FIRST_REF_NODE)
1154 have_ref_node = true;
1155 }
1156
1157 lowest_node = bitmap_first_set_bit (scc);
1158 gcc_assert (lowest_node < FIRST_REF_NODE);
1159 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1160 {
1161 if (i < FIRST_REF_NODE)
1162 {
1163 /* Mark this node for collapsing. */
1164 if (unite (lowest_node, i))
1165 unify_nodes (graph, lowest_node, i, false);
1166 }
1167 else
1168 {
1169 unite (lowest_node, i);
1170 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1171 }
1172 }
1173 }
1174 SET_BIT (si->roots, n);
1175 }
1176 else
1177 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1178 }
1179
1180 /* Unify node FROM into node TO, updating the changed count if
1181 necessary when UPDATE_CHANGED is true. */
1182
1183 static void
1184 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1185 bool update_changed)
1186 {
1187
1188 gcc_assert (to != from && find (to) == to);
1189 if (dump_file && (dump_flags & TDF_DETAILS))
1190 fprintf (dump_file, "Unifying %s to %s\n",
1191 get_varinfo (from)->name,
1192 get_varinfo (to)->name);
1193
1194 if (update_changed)
1195 stats.unified_vars_dynamic++;
1196 else
1197 stats.unified_vars_static++;
1198
1199 merge_graph_nodes (graph, to, from);
1200 merge_node_constraints (graph, to, from);
1201
1202 if (update_changed && TEST_BIT (changed, from))
1203 {
1204 RESET_BIT (changed, from);
1205 if (!TEST_BIT (changed, to))
1206 SET_BIT (changed, to);
1207 else
1208 {
1209 gcc_assert (changed_count > 0);
1210 changed_count--;
1211 }
1212 }
1213
1214 /* If the solution changes because of the merging, we need to mark
1215 the variable as changed. */
1216 if (bitmap_ior_into (get_varinfo (to)->solution,
1217 get_varinfo (from)->solution))
1218 {
1219 if (update_changed && !TEST_BIT (changed, to))
1220 {
1221 SET_BIT (changed, to);
1222 changed_count++;
1223 }
1224 }
1225
1226 BITMAP_FREE (get_varinfo (from)->solution);
1227 BITMAP_FREE (get_varinfo (from)->oldsolution);
1228
1229 if (stats.iterations > 0)
1230 {
1231 BITMAP_FREE (get_varinfo (to)->oldsolution);
1232 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1233 }
1234
1235 if (valid_graph_edge (graph, to, to))
1236 {
1237 if (graph->succs[to])
1238 bitmap_clear_bit (graph->succs[to], to);
1239 }
1240 }
1241
1242 /* Information needed to compute the topological ordering of a graph. */
1243
1244 struct topo_info
1245 {
1246 /* sbitmap of visited nodes. */
1247 sbitmap visited;
1248 /* Array that stores the topological order of the graph, *in
1249 reverse*. */
1250 VEC(unsigned,heap) *topo_order;
1251 };
1252
1253
1254 /* Initialize and return a topological info structure. */
1255
1256 static struct topo_info *
1257 init_topo_info (void)
1258 {
1259 size_t size = VEC_length (varinfo_t, varmap);
1260 struct topo_info *ti = XNEW (struct topo_info);
1261 ti->visited = sbitmap_alloc (size);
1262 sbitmap_zero (ti->visited);
1263 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1264 return ti;
1265 }
1266
1267
1268 /* Free the topological sort info pointed to by TI. */
1269
1270 static void
1271 free_topo_info (struct topo_info *ti)
1272 {
1273 sbitmap_free (ti->visited);
1274 VEC_free (unsigned, heap, ti->topo_order);
1275 free (ti);
1276 }
1277
1278 /* Visit the graph in topological order, and store the order in the
1279 topo_info structure. */
1280
1281 static void
1282 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1283 unsigned int n)
1284 {
1285 bitmap_iterator bi;
1286 unsigned int j;
1287
1288 SET_BIT (ti->visited, n);
1289
1290 if (graph->succs[n])
1291 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1292 {
1293 if (!TEST_BIT (ti->visited, j))
1294 topo_visit (graph, ti, j);
1295 }
1296
1297 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1298 }
1299
1300 /* Return true if variable N + OFFSET is a legal field of N. */
1301
1302 static bool
1303 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1304 {
1305 varinfo_t ninfo = get_varinfo (n);
1306
1307 /* For things we've globbed to single variables, any offset into the
1308 variable acts like the entire variable, so that it becomes offset
1309 0. */
1310 if (ninfo->is_special_var
1311 || ninfo->is_artificial_var
1312 || ninfo->is_unknown_size_var)
1313 {
1314 *offset = 0;
1315 return true;
1316 }
1317 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1318 }
1319
1320 /* Process a constraint C that represents *x = &y. */
1321
1322 static void
1323 do_da_constraint (constraint_graph_t graph ATTRIBUTE_UNUSED,
1324 constraint_t c, bitmap delta)
1325 {
1326 unsigned int rhs = c->rhs.var;
1327 unsigned int j;
1328 bitmap_iterator bi;
1329
1330 /* For each member j of Delta (Sol(x)), add x to Sol(j) */
1331 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1332 {
1333 unsigned HOST_WIDE_INT offset = c->lhs.offset;
1334 if (type_safe (j, &offset) && !(get_varinfo (j)->is_special_var))
1335 {
1336 /* *x != NULL && *x != ANYTHING*/
1337 varinfo_t v;
1338 unsigned int t;
1339 bitmap sol;
1340 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + offset;
1341
1342 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1343 if (!v)
1344 continue;
1345 t = find (v->id);
1346 sol = get_varinfo (t)->solution;
1347 if (!bitmap_bit_p (sol, rhs))
1348 {
1349 bitmap_set_bit (sol, rhs);
1350 if (!TEST_BIT (changed, t))
1351 {
1352 SET_BIT (changed, t);
1353 changed_count++;
1354 }
1355 }
1356 }
1357 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1358 fprintf (dump_file, "Untypesafe usage in do_da_constraint.\n");
1359
1360 }
1361 }
1362
1363 /* Process a constraint C that represents x = *y, using DELTA as the
1364 starting solution. */
1365
1366 static void
1367 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1368 bitmap delta)
1369 {
1370 unsigned int lhs = find (c->lhs.var);
1371 bool flag = false;
1372 bitmap sol = get_varinfo (lhs)->solution;
1373 unsigned int j;
1374 bitmap_iterator bi;
1375
1376 if (bitmap_bit_p (delta, anything_id))
1377 {
1378 flag = !bitmap_bit_p (sol, anything_id);
1379 if (flag)
1380 bitmap_set_bit (sol, anything_id);
1381 goto done;
1382 }
1383 /* For each variable j in delta (Sol(y)), add
1384 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1385 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1386 {
1387 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1388 if (type_safe (j, &roffset))
1389 {
1390 varinfo_t v;
1391 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1392 unsigned int t;
1393
1394 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1395 if (!v)
1396 continue;
1397 t = find (v->id);
1398
1399 /* Adding edges from the special vars is pointless.
1400 They don't have sets that can change. */
1401 if (get_varinfo (t) ->is_special_var)
1402 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1403 else if (add_graph_edge (graph, lhs, t))
1404 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1405 }
1406 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1407 fprintf (dump_file, "Untypesafe usage in do_sd_constraint\n");
1408
1409 }
1410
1411 done:
1412 /* If the LHS solution changed, mark the var as changed. */
1413 if (flag)
1414 {
1415 get_varinfo (lhs)->solution = sol;
1416 if (!TEST_BIT (changed, lhs))
1417 {
1418 SET_BIT (changed, lhs);
1419 changed_count++;
1420 }
1421 }
1422 }
1423
1424 /* Process a constraint C that represents *x = y. */
1425
1426 static void
1427 do_ds_constraint (constraint_t c, bitmap delta)
1428 {
1429 unsigned int rhs = find (c->rhs.var);
1430 unsigned HOST_WIDE_INT roff = c->rhs.offset;
1431 bitmap sol = get_varinfo (rhs)->solution;
1432 unsigned int j;
1433 bitmap_iterator bi;
1434
1435 if (bitmap_bit_p (sol, anything_id))
1436 {
1437 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1438 {
1439 varinfo_t jvi = get_varinfo (j);
1440 unsigned int t;
1441 unsigned int loff = c->lhs.offset;
1442 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1443 varinfo_t v;
1444
1445 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1446 if (!v)
1447 continue;
1448 t = find (v->id);
1449
1450 if (!bitmap_bit_p (get_varinfo (t)->solution, anything_id))
1451 {
1452 bitmap_set_bit (get_varinfo (t)->solution, anything_id);
1453 if (!TEST_BIT (changed, t))
1454 {
1455 SET_BIT (changed, t);
1456 changed_count++;
1457 }
1458 }
1459 }
1460 return;
1461 }
1462
1463 /* For each member j of delta (Sol(x)), add an edge from y to j and
1464 union Sol(y) into Sol(j) */
1465 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1466 {
1467 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1468 if (type_safe (j, &loff) && !(get_varinfo(j)->is_special_var))
1469 {
1470 varinfo_t v;
1471 unsigned int t;
1472 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1473 bitmap tmp;
1474
1475 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1476 if (!v)
1477 continue;
1478 t = find (v->id);
1479 tmp = get_varinfo (t)->solution;
1480
1481 if (set_union_with_increment (tmp, sol, roff))
1482 {
1483 get_varinfo (t)->solution = tmp;
1484 if (t == rhs)
1485 sol = get_varinfo (rhs)->solution;
1486 if (!TEST_BIT (changed, t))
1487 {
1488 SET_BIT (changed, t);
1489 changed_count++;
1490 }
1491 }
1492 }
1493 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1494 fprintf (dump_file, "Untypesafe usage in do_ds_constraint\n");
1495 }
1496 }
1497
1498 /* Handle a non-simple (simple meaning requires no iteration),
1499 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1500
1501 static void
1502 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1503 {
1504 if (c->lhs.type == DEREF)
1505 {
1506 if (c->rhs.type == ADDRESSOF)
1507 {
1508 /* *x = &y */
1509 do_da_constraint (graph, c, delta);
1510 }
1511 else
1512 {
1513 /* *x = y */
1514 do_ds_constraint (c, delta);
1515 }
1516 }
1517 else if (c->rhs.type == DEREF)
1518 {
1519 /* x = *y */
1520 if (!(get_varinfo (c->lhs.var)->is_special_var))
1521 do_sd_constraint (graph, c, delta);
1522 }
1523 else
1524 {
1525 bitmap tmp;
1526 bitmap solution;
1527 bool flag = false;
1528 unsigned int t;
1529
1530 gcc_assert(c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1531 t = find (c->rhs.var);
1532 solution = get_varinfo (t)->solution;
1533 t = find (c->lhs.var);
1534 tmp = get_varinfo (t)->solution;
1535
1536 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1537
1538 if (flag)
1539 {
1540 get_varinfo (t)->solution = tmp;
1541 if (!TEST_BIT (changed, c->lhs.var))
1542 {
1543 SET_BIT (changed, c->lhs.var);
1544 changed_count++;
1545 }
1546 }
1547 }
1548 }
1549
1550 /* Initialize and return a new SCC info structure. */
1551
1552 static struct scc_info *
1553 init_scc_info (size_t size)
1554 {
1555 struct scc_info *si = XNEW (struct scc_info);
1556 size_t i;
1557
1558 si->current_index = 0;
1559 si->visited = sbitmap_alloc (size);
1560 sbitmap_zero (si->visited);
1561 si->roots = sbitmap_alloc (size);
1562 sbitmap_zero (si->roots);
1563 si->node_mapping = XNEWVEC (unsigned int, size);
1564 si->dfs = XCNEWVEC (unsigned int, size);
1565
1566 for (i = 0; i < size; i++)
1567 si->node_mapping[i] = i;
1568
1569 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1570 return si;
1571 }
1572
1573 /* Free an SCC info structure pointed to by SI */
1574
1575 static void
1576 free_scc_info (struct scc_info *si)
1577 {
1578 sbitmap_free (si->visited);
1579 sbitmap_free (si->roots);
1580 free (si->node_mapping);
1581 free (si->dfs);
1582 VEC_free (unsigned, heap, si->scc_stack);
1583 free (si);
1584 }
1585
1586
1587 /* Find indirect cycles in GRAPH that occur, using strongly connected
1588 components, and note them in the indirect cycles map.
1589
1590 This technique comes from Ben Hardekopf and Calvin Lin,
1591 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1592 Lines of Code", submitted to PLDI 2007. */
1593
1594 static void
1595 find_indirect_cycles (constraint_graph_t graph)
1596 {
1597 unsigned int i;
1598 unsigned int size = graph->size;
1599 struct scc_info *si = init_scc_info (size);
1600
1601 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1602 if (!TEST_BIT (si->visited, i) && find (i) == i)
1603 scc_visit (graph, si, i);
1604
1605 free_scc_info (si);
1606 }
1607
1608 /* Compute a topological ordering for GRAPH, and store the result in the
1609 topo_info structure TI. */
1610
1611 static void
1612 compute_topo_order (constraint_graph_t graph,
1613 struct topo_info *ti)
1614 {
1615 unsigned int i;
1616 unsigned int size = VEC_length (varinfo_t, varmap);
1617
1618 for (i = 0; i != size; ++i)
1619 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1620 topo_visit (graph, ti, i);
1621 }
1622
1623 /* Perform offline variable substitution.
1624
1625 This is a linear time way of identifying variables that must have
1626 equivalent points-to sets, including those caused by static cycles,
1627 and single entry subgraphs, in the constraint graph.
1628
1629 The technique is described in "Off-line variable substitution for
1630 scaling points-to analysis" by Atanas Rountev and Satish Chandra,
1631 in "ACM SIGPLAN Notices" volume 35, number 5, pages 47-56.
1632
1633 There is an optimal way to do this involving hash based value
1634 numbering, once the technique is published i will implement it
1635 here.
1636
1637 The general method of finding equivalence classes is as follows:
1638 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1639 Add fake nodes (ADDRESS nodes) and edges for a = &b constraints.
1640 Initialize all non-REF/ADDRESS nodes to be direct nodes
1641 For each SCC in the predecessor graph:
1642 for each member (x) of the SCC
1643 if x is not a direct node:
1644 set rootnode(SCC) to be not a direct node
1645 collapse node x into rootnode(SCC).
1646 if rootnode(SCC) is not a direct node:
1647 label rootnode(SCC) with a new equivalence class
1648 else:
1649 if all labeled predecessors of rootnode(SCC) have the same
1650 label:
1651 label rootnode(SCC) with this label
1652 else:
1653 label rootnode(SCC) with a new equivalence class
1654
1655 All direct nodes with the same equivalence class can be replaced
1656 with a single representative node.
1657 All unlabeled nodes (label == 0) are not pointers and all edges
1658 involving them can be eliminated.
1659 We perform these optimizations during move_complex_constraints.
1660 */
1661
1662 static int equivalence_class;
1663
1664 /* Recursive routine to find strongly connected components in GRAPH,
1665 and label it's nodes with equivalence classes.
1666 This is used during variable substitution to find cycles involving
1667 the regular or implicit predecessors, and label them as equivalent.
1668 The SCC finding algorithm used is the same as that for scc_visit. */
1669
1670 static void
1671 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1672 {
1673 unsigned int i;
1674 bitmap_iterator bi;
1675 unsigned int my_dfs;
1676
1677 gcc_assert (si->node_mapping[n] == n);
1678 SET_BIT (si->visited, n);
1679 si->dfs[n] = si->current_index ++;
1680 my_dfs = si->dfs[n];
1681
1682 /* Visit all the successors. */
1683 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1684 {
1685 unsigned int w = si->node_mapping[i];
1686
1687 if (TEST_BIT (si->roots, w))
1688 continue;
1689
1690 if (!TEST_BIT (si->visited, w))
1691 label_visit (graph, si, w);
1692 {
1693 unsigned int t = si->node_mapping[w];
1694 unsigned int nnode = si->node_mapping[n];
1695 gcc_assert(nnode == n);
1696
1697 if (si->dfs[t] < si->dfs[nnode])
1698 si->dfs[n] = si->dfs[t];
1699 }
1700 }
1701
1702 /* Visit all the implicit predecessors. */
1703 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1704 {
1705 unsigned int w = si->node_mapping[i];
1706
1707 if (TEST_BIT (si->roots, w))
1708 continue;
1709
1710 if (!TEST_BIT (si->visited, w))
1711 label_visit (graph, si, w);
1712 {
1713 unsigned int t = si->node_mapping[w];
1714 unsigned int nnode = si->node_mapping[n];
1715 gcc_assert (nnode == n);
1716
1717 if (si->dfs[t] < si->dfs[nnode])
1718 si->dfs[n] = si->dfs[t];
1719 }
1720 }
1721
1722 /* See if any components have been identified. */
1723 if (si->dfs[n] == my_dfs)
1724 {
1725 while (VEC_length (unsigned, si->scc_stack) != 0
1726 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1727 {
1728 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1729 si->node_mapping[w] = n;
1730
1731 if (!TEST_BIT (graph->direct_nodes, w))
1732 RESET_BIT (graph->direct_nodes, n);
1733 }
1734 SET_BIT (si->roots, n);
1735
1736 if (!TEST_BIT (graph->direct_nodes, n))
1737 {
1738 graph->label[n] = equivalence_class++;
1739 }
1740 else
1741 {
1742 unsigned int size = 0;
1743 unsigned int firstlabel = ~0;
1744
1745 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1746 {
1747 unsigned int j = si->node_mapping[i];
1748
1749 if (j == n || graph->label[j] == 0)
1750 continue;
1751
1752 if (firstlabel == (unsigned int)~0)
1753 {
1754 firstlabel = graph->label[j];
1755 size++;
1756 }
1757 else if (graph->label[j] != firstlabel)
1758 size++;
1759 }
1760
1761 if (size == 0)
1762 graph->label[n] = 0;
1763 else if (size == 1)
1764 graph->label[n] = firstlabel;
1765 else
1766 graph->label[n] = equivalence_class++;
1767 }
1768 }
1769 else
1770 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1771 }
1772
1773 /* Perform offline variable substitution, discovering equivalence
1774 classes, and eliminating non-pointer variables. */
1775
1776 static struct scc_info *
1777 perform_var_substitution (constraint_graph_t graph)
1778 {
1779 unsigned int i;
1780 unsigned int size = graph->size;
1781 struct scc_info *si = init_scc_info (size);
1782
1783 bitmap_obstack_initialize (&iteration_obstack);
1784 equivalence_class = 0;
1785
1786 /* We only need to visit the non-address nodes for labeling
1787 purposes, as the address nodes will never have any predecessors,
1788 because &x never appears on the LHS of a constraint. */
1789 for (i = 0; i < LAST_REF_NODE; i++)
1790 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1791 label_visit (graph, si, si->node_mapping[i]);
1792
1793 if (dump_file && (dump_flags & TDF_DETAILS))
1794 for (i = 0; i < FIRST_REF_NODE; i++)
1795 {
1796 bool direct_node = TEST_BIT (graph->direct_nodes, i);
1797 fprintf (dump_file,
1798 "Equivalence class for %s node id %d:%s is %d\n",
1799 direct_node ? "Direct node" : "Indirect node", i,
1800 get_varinfo(i)->name,
1801 graph->label[si->node_mapping[i]]);
1802 }
1803
1804 /* Quickly eliminate our non-pointer variables. */
1805
1806 for (i = 0; i < FIRST_REF_NODE; i++)
1807 {
1808 unsigned int node = si->node_mapping[i];
1809
1810 if (graph->label[node] == 0 && TEST_BIT (graph->direct_nodes, node))
1811 {
1812 if (dump_file && (dump_flags & TDF_DETAILS))
1813 fprintf (dump_file,
1814 "%s is a non-pointer variable, eliminating edges.\n",
1815 get_varinfo (node)->name);
1816 stats.nonpointer_vars++;
1817 clear_edges_for_node (graph, node);
1818 }
1819 }
1820 return si;
1821 }
1822
1823 /* Free information that was only necessary for variable
1824 substitution. */
1825
1826 static void
1827 free_var_substitution_info (struct scc_info *si)
1828 {
1829 free_scc_info (si);
1830 free (graph->label);
1831 free (graph->eq_rep);
1832 sbitmap_free (graph->direct_nodes);
1833 bitmap_obstack_release (&iteration_obstack);
1834 }
1835
1836 /* Return an existing node that is equivalent to NODE, which has
1837 equivalence class LABEL, if one exists. Return NODE otherwise. */
1838
1839 static unsigned int
1840 find_equivalent_node (constraint_graph_t graph,
1841 unsigned int node, unsigned int label)
1842 {
1843 /* If the address version of this variable is unused, we can
1844 substitute it for anything else with the same label.
1845 Otherwise, we know the pointers are equivalent, but not the
1846 locations. */
1847
1848 if (graph->label[FIRST_ADDR_NODE + node] == 0)
1849 {
1850 gcc_assert (label < graph->size);
1851
1852 if (graph->eq_rep[label] != -1)
1853 {
1854 /* Unify the two variables since we know they are equivalent. */
1855 if (unite (graph->eq_rep[label], node))
1856 unify_nodes (graph, graph->eq_rep[label], node, false);
1857 return graph->eq_rep[label];
1858 }
1859 else
1860 {
1861 graph->eq_rep[label] = node;
1862 }
1863 }
1864 return node;
1865 }
1866
1867 /* Move complex constraints to the appropriate nodes, and collapse
1868 variables we've discovered are equivalent during variable
1869 substitution. SI is the SCC_INFO that is the result of
1870 perform_variable_substitution. */
1871
1872 static void
1873 move_complex_constraints (constraint_graph_t graph,
1874 struct scc_info *si)
1875 {
1876 int i;
1877 unsigned int j;
1878 constraint_t c;
1879
1880 for (j = 0; j < graph->size; j++)
1881 gcc_assert (find (j) == j);
1882
1883 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1884 {
1885 struct constraint_expr lhs = c->lhs;
1886 struct constraint_expr rhs = c->rhs;
1887 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
1888 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
1889 unsigned int lhsnode, rhsnode;
1890 unsigned int lhslabel, rhslabel;
1891
1892 lhsnode = si->node_mapping[lhsvar];
1893 rhsnode = si->node_mapping[rhsvar];
1894 lhslabel = graph->label[lhsnode];
1895 rhslabel = graph->label[rhsnode];
1896
1897 /* See if it is really a non-pointer variable, and if so, ignore
1898 the constraint. */
1899 if (lhslabel == 0)
1900 {
1901 if (!TEST_BIT (graph->direct_nodes, lhsnode))
1902 lhslabel = graph->label[lhsnode] = equivalence_class++;
1903 else
1904 {
1905 if (dump_file && (dump_flags & TDF_DETAILS))
1906 {
1907
1908 fprintf (dump_file, "%s is a non-pointer variable,"
1909 "ignoring constraint:",
1910 get_varinfo (lhs.var)->name);
1911 dump_constraint (dump_file, c);
1912 }
1913 VEC_replace (constraint_t, constraints, i, NULL);
1914 continue;
1915 }
1916 }
1917
1918 if (rhslabel == 0)
1919 {
1920 if (!TEST_BIT (graph->direct_nodes, rhsnode))
1921 rhslabel = graph->label[rhsnode] = equivalence_class++;
1922 else
1923 {
1924 if (dump_file && (dump_flags & TDF_DETAILS))
1925 {
1926
1927 fprintf (dump_file, "%s is a non-pointer variable,"
1928 "ignoring constraint:",
1929 get_varinfo (rhs.var)->name);
1930 dump_constraint (dump_file, c);
1931 }
1932 VEC_replace (constraint_t, constraints, i, NULL);
1933 continue;
1934 }
1935 }
1936
1937 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
1938 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
1939 c->lhs.var = lhsvar;
1940 c->rhs.var = rhsvar;
1941
1942 if (lhs.type == DEREF)
1943 {
1944 if (rhs.type == ADDRESSOF || rhsvar > anything_id)
1945 insert_into_complex (graph, lhsvar, c);
1946 }
1947 else if (rhs.type == DEREF)
1948 {
1949 if (!(get_varinfo (lhsvar)->is_special_var))
1950 insert_into_complex (graph, rhsvar, c);
1951 }
1952 else if (rhs.type != ADDRESSOF && lhsvar > anything_id
1953 && (lhs.offset != 0 || rhs.offset != 0))
1954 {
1955 insert_into_complex (graph, rhsvar, c);
1956 }
1957
1958 }
1959 }
1960
1961 /* Eliminate indirect cycles involving NODE. Return true if NODE was
1962 part of an SCC, false otherwise. */
1963
1964 static bool
1965 eliminate_indirect_cycles (unsigned int node)
1966 {
1967 if (graph->indirect_cycles[node] != -1
1968 && !bitmap_empty_p (get_varinfo (node)->solution))
1969 {
1970 unsigned int i;
1971 VEC(unsigned,heap) *queue = NULL;
1972 int queuepos;
1973 unsigned int to = find (graph->indirect_cycles[node]);
1974 bitmap_iterator bi;
1975
1976 /* We can't touch the solution set and call unify_nodes
1977 at the same time, because unify_nodes is going to do
1978 bitmap unions into it. */
1979
1980 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
1981 {
1982 if (find (i) == i && i != to)
1983 {
1984 if (unite (to, i))
1985 VEC_safe_push (unsigned, heap, queue, i);
1986 }
1987 }
1988
1989 for (queuepos = 0;
1990 VEC_iterate (unsigned, queue, queuepos, i);
1991 queuepos++)
1992 {
1993 unify_nodes (graph, to, i, true);
1994 }
1995 VEC_free (unsigned, heap, queue);
1996 return true;
1997 }
1998 return false;
1999 }
2000
2001 /* Solve the constraint graph GRAPH using our worklist solver.
2002 This is based on the PW* family of solvers from the "Efficient Field
2003 Sensitive Pointer Analysis for C" paper.
2004 It works by iterating over all the graph nodes, processing the complex
2005 constraints and propagating the copy constraints, until everything stops
2006 changed. This corresponds to steps 6-8 in the solving list given above. */
2007
2008 static void
2009 solve_graph (constraint_graph_t graph)
2010 {
2011 unsigned int size = VEC_length (varinfo_t, varmap);
2012 unsigned int i;
2013 bitmap pts;
2014
2015 changed_count = 0;
2016 changed = sbitmap_alloc (size);
2017 sbitmap_zero (changed);
2018
2019 /* Mark all initial non-collapsed nodes as changed. */
2020 for (i = 0; i < size; i++)
2021 {
2022 varinfo_t ivi = get_varinfo (i);
2023 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2024 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2025 || VEC_length (constraint_t, graph->complex[i]) > 0))
2026 {
2027 SET_BIT (changed, i);
2028 changed_count++;
2029 }
2030 }
2031
2032 /* Allocate a bitmap to be used to store the changed bits. */
2033 pts = BITMAP_ALLOC (&pta_obstack);
2034
2035 while (changed_count > 0)
2036 {
2037 unsigned int i;
2038 struct topo_info *ti = init_topo_info ();
2039 stats.iterations++;
2040
2041 bitmap_obstack_initialize (&iteration_obstack);
2042
2043 compute_topo_order (graph, ti);
2044
2045 while (VEC_length (unsigned, ti->topo_order) != 0)
2046 {
2047
2048 i = VEC_pop (unsigned, ti->topo_order);
2049
2050 /* If this variable is not a representative, skip it. */
2051 if (find (i) != i)
2052 continue;
2053
2054 eliminate_indirect_cycles (i);
2055
2056 gcc_assert (find (i) == i);
2057
2058 /* If the node has changed, we need to process the
2059 complex constraints and outgoing edges again. */
2060 if (TEST_BIT (changed, i))
2061 {
2062 unsigned int j;
2063 constraint_t c;
2064 bitmap solution;
2065 VEC(constraint_t,heap) *complex = graph->complex[i];
2066 bool solution_empty;
2067 RESET_BIT (changed, i);
2068 changed_count--;
2069
2070 /* Compute the changed set of solution bits. */
2071 bitmap_and_compl (pts, get_varinfo (i)->solution,
2072 get_varinfo (i)->oldsolution);
2073
2074 if (bitmap_empty_p (pts))
2075 continue;
2076
2077 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2078
2079 solution = get_varinfo (i)->solution;
2080 solution_empty = bitmap_empty_p (solution);
2081
2082 /* Process the complex constraints */
2083 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2084 {
2085 /* The only complex constraint that can change our
2086 solution to non-empty, given an empty solution,
2087 is a constraint where the lhs side is receiving
2088 some set from elsewhere. */
2089 if (!solution_empty || c->lhs.type != DEREF)
2090 do_complex_constraint (graph, c, pts);
2091 }
2092
2093 solution_empty = bitmap_empty_p (solution);
2094
2095 if (!solution_empty)
2096 {
2097 bitmap_iterator bi;
2098
2099 /* Propagate solution to all successors. */
2100 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2101 0, j, bi)
2102 {
2103 bitmap tmp;
2104 bool flag;
2105
2106 unsigned int to = find (j);
2107 tmp = get_varinfo (to)->solution;
2108 flag = false;
2109
2110 /* Don't try to propagate to ourselves. */
2111 if (to == i)
2112 continue;
2113
2114 flag = set_union_with_increment (tmp, pts, 0);
2115
2116 if (flag)
2117 {
2118 get_varinfo (to)->solution = tmp;
2119 if (!TEST_BIT (changed, to))
2120 {
2121 SET_BIT (changed, to);
2122 changed_count++;
2123 }
2124 }
2125 }
2126 }
2127 }
2128 }
2129 free_topo_info (ti);
2130 bitmap_obstack_release (&iteration_obstack);
2131 }
2132
2133 BITMAP_FREE (pts);
2134 sbitmap_free (changed);
2135 bitmap_obstack_release (&oldpta_obstack);
2136 }
2137
2138 /* Map from trees to variable infos. */
2139 static htab_t vi_for_tree;
2140
2141 typedef struct tree_vi
2142 {
2143 tree t;
2144 varinfo_t vi;
2145 } *tree_vi_t;
2146
2147 /* Hash a tree id structure. */
2148
2149 static hashval_t
2150 tree_vi_hash (const void *p)
2151 {
2152 const tree_vi_t ta = (tree_vi_t) p;
2153 return htab_hash_pointer (ta->t);
2154 }
2155
2156 /* Return true if the tree in P1 and the tree in P2 are the same. */
2157
2158 static int
2159 tree_vi_eq (const void *p1, const void *p2)
2160 {
2161 const tree_vi_t ta1 = (tree_vi_t) p1;
2162 const tree_vi_t ta2 = (tree_vi_t) p2;
2163 return ta1->t == ta2->t;
2164 }
2165
2166 /* Insert ID as the variable id for tree T in the hashtable. */
2167
2168 static void
2169 insert_vi_for_tree (tree t, varinfo_t vi)
2170 {
2171 void **slot;
2172 struct tree_vi finder;
2173 tree_vi_t new_pair;
2174
2175 finder.t = t;
2176 slot = htab_find_slot (vi_for_tree, &finder, INSERT);
2177 gcc_assert (*slot == NULL);
2178 new_pair = XNEW (struct tree_vi);
2179 new_pair->t = t;
2180 new_pair->vi = vi;
2181 *slot = (void *)new_pair;
2182 }
2183
2184 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2185 exist in the hash table, return false, otherwise, return true and
2186 set *VI to the varinfo we found. */
2187
2188 static bool
2189 lookup_vi_for_tree (tree t, varinfo_t *vi)
2190 {
2191 tree_vi_t pair;
2192 struct tree_vi finder;
2193
2194 finder.t = t;
2195 pair = htab_find (vi_for_tree, &finder);
2196 if (pair == NULL)
2197 return false;
2198 *vi = pair->vi;
2199 return true;
2200 }
2201
2202 /* Return a printable name for DECL */
2203
2204 static const char *
2205 alias_get_name (tree decl)
2206 {
2207 const char *res = get_name (decl);
2208 char *temp;
2209 int num_printed = 0;
2210
2211 if (res != NULL)
2212 return res;
2213
2214 res = "NULL";
2215 if (!dump_file)
2216 return res;
2217
2218 if (TREE_CODE (decl) == SSA_NAME)
2219 {
2220 num_printed = asprintf (&temp, "%s_%u",
2221 alias_get_name (SSA_NAME_VAR (decl)),
2222 SSA_NAME_VERSION (decl));
2223 }
2224 else if (DECL_P (decl))
2225 {
2226 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2227 }
2228 if (num_printed > 0)
2229 {
2230 res = ggc_strdup (temp);
2231 free (temp);
2232 }
2233 return res;
2234 }
2235
2236 /* Find the variable id for tree T in the hashtable.
2237 If T doesn't exist in the hash table, create an entry for it. */
2238
2239 static varinfo_t
2240 get_vi_for_tree (tree t)
2241 {
2242 tree_vi_t pair;
2243 struct tree_vi finder;
2244
2245 finder.t = t;
2246 pair = htab_find (vi_for_tree, &finder);
2247 if (pair == NULL)
2248 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2249
2250 return pair->vi;
2251 }
2252
2253 /* Get a constraint expression from an SSA_VAR_P node. */
2254
2255 static struct constraint_expr
2256 get_constraint_exp_from_ssa_var (tree t)
2257 {
2258 struct constraint_expr cexpr;
2259
2260 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2261
2262 /* For parameters, get at the points-to set for the actual parm
2263 decl. */
2264 if (TREE_CODE (t) == SSA_NAME
2265 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2266 && SSA_NAME_IS_DEFAULT_DEF (t))
2267 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
2268
2269 cexpr.type = SCALAR;
2270
2271 cexpr.var = get_vi_for_tree (t)->id;
2272 /* If we determine the result is "anything", and we know this is readonly,
2273 say it points to readonly memory instead. */
2274 if (cexpr.var == anything_id && TREE_READONLY (t))
2275 {
2276 cexpr.type = ADDRESSOF;
2277 cexpr.var = readonly_id;
2278 }
2279
2280 cexpr.offset = 0;
2281 return cexpr;
2282 }
2283
2284 /* Process a completed constraint T, and add it to the constraint
2285 list. */
2286
2287 static void
2288 process_constraint (constraint_t t)
2289 {
2290 struct constraint_expr rhs = t->rhs;
2291 struct constraint_expr lhs = t->lhs;
2292
2293 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2294 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2295
2296 if (lhs.type == DEREF)
2297 get_varinfo (lhs.var)->directly_dereferenced = true;
2298 if (rhs.type == DEREF)
2299 get_varinfo (rhs.var)->directly_dereferenced = true;
2300
2301 if (!use_field_sensitive)
2302 {
2303 t->rhs.offset = 0;
2304 t->lhs.offset = 0;
2305 }
2306
2307 /* ANYTHING == ANYTHING is pointless. */
2308 if (lhs.var == anything_id && rhs.var == anything_id)
2309 return;
2310
2311 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2312 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2313 {
2314 rhs = t->lhs;
2315 t->lhs = t->rhs;
2316 t->rhs = rhs;
2317 process_constraint (t);
2318 }
2319 /* This can happen in our IR with things like n->a = *p */
2320 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2321 {
2322 /* Split into tmp = *rhs, *lhs = tmp */
2323 tree rhsdecl = get_varinfo (rhs.var)->decl;
2324 tree pointertype = TREE_TYPE (rhsdecl);
2325 tree pointedtotype = TREE_TYPE (pointertype);
2326 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2327 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2328
2329 /* If this is an aggregate of known size, we should have passed
2330 this off to do_structure_copy, and it should have broken it
2331 up. */
2332 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
2333 || get_varinfo (rhs.var)->is_unknown_size_var);
2334
2335 process_constraint (new_constraint (tmplhs, rhs));
2336 process_constraint (new_constraint (lhs, tmplhs));
2337 }
2338 else
2339 {
2340 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2341 VEC_safe_push (constraint_t, heap, constraints, t);
2342 }
2343 }
2344
2345 /* Return true if T is a variable of a type that could contain
2346 pointers. */
2347
2348 static bool
2349 could_have_pointers (tree t)
2350 {
2351 tree type = TREE_TYPE (t);
2352
2353 if (POINTER_TYPE_P (type) || AGGREGATE_TYPE_P (type)
2354 || TREE_CODE (type) == COMPLEX_TYPE)
2355 return true;
2356 return false;
2357 }
2358
2359 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2360 structure. */
2361
2362 static unsigned HOST_WIDE_INT
2363 bitpos_of_field (const tree fdecl)
2364 {
2365
2366 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2367 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2368 return -1;
2369
2370 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2371 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2372 }
2373
2374
2375 /* Return true if an access to [ACCESSPOS, ACCESSSIZE]
2376 overlaps with a field at [FIELDPOS, FIELDSIZE] */
2377
2378 static bool
2379 offset_overlaps_with_access (const unsigned HOST_WIDE_INT fieldpos,
2380 const unsigned HOST_WIDE_INT fieldsize,
2381 const unsigned HOST_WIDE_INT accesspos,
2382 const unsigned HOST_WIDE_INT accesssize)
2383 {
2384 if (fieldpos == accesspos && fieldsize == accesssize)
2385 return true;
2386 if (accesspos >= fieldpos && accesspos < (fieldpos + fieldsize))
2387 return true;
2388 if (accesspos < fieldpos && (accesspos + accesssize > fieldpos))
2389 return true;
2390
2391 return false;
2392 }
2393
2394 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2395
2396 static void
2397 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results)
2398 {
2399 tree orig_t = t;
2400 HOST_WIDE_INT bitsize = -1;
2401 HOST_WIDE_INT bitmaxsize = -1;
2402 HOST_WIDE_INT bitpos;
2403 tree forzero;
2404 struct constraint_expr *result;
2405 unsigned int beforelength = VEC_length (ce_s, *results);
2406
2407 /* Some people like to do cute things like take the address of
2408 &0->a.b */
2409 forzero = t;
2410 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2411 forzero = TREE_OPERAND (forzero, 0);
2412
2413 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2414 {
2415 struct constraint_expr temp;
2416
2417 temp.offset = 0;
2418 temp.var = integer_id;
2419 temp.type = SCALAR;
2420 VEC_safe_push (ce_s, heap, *results, &temp);
2421 return;
2422 }
2423
2424 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2425
2426 /* String constants are readonly, so there is nothing to really do
2427 here. */
2428 if (TREE_CODE (t) == STRING_CST)
2429 return;
2430
2431 get_constraint_for (t, results);
2432 result = VEC_last (ce_s, *results);
2433 result->offset = bitpos;
2434
2435 gcc_assert (beforelength + 1 == VEC_length (ce_s, *results));
2436
2437 /* This can also happen due to weird offsetof type macros. */
2438 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2439 result->type = SCALAR;
2440
2441 if (result->type == SCALAR)
2442 {
2443 /* In languages like C, you can access one past the end of an
2444 array. You aren't allowed to dereference it, so we can
2445 ignore this constraint. When we handle pointer subtraction,
2446 we may have to do something cute here. */
2447
2448 if (result->offset < get_varinfo (result->var)->fullsize
2449 && bitmaxsize != 0)
2450 {
2451 /* It's also not true that the constraint will actually start at the
2452 right offset, it may start in some padding. We only care about
2453 setting the constraint to the first actual field it touches, so
2454 walk to find it. */
2455 varinfo_t curr;
2456 for (curr = get_varinfo (result->var); curr; curr = curr->next)
2457 {
2458 if (offset_overlaps_with_access (curr->offset, curr->size,
2459 result->offset, bitmaxsize))
2460 {
2461 result->var = curr->id;
2462 break;
2463 }
2464 }
2465 /* assert that we found *some* field there. The user couldn't be
2466 accessing *only* padding. */
2467 /* Still the user could access one past the end of an array
2468 embedded in a struct resulting in accessing *only* padding. */
2469 gcc_assert (curr || ref_contains_array_ref (orig_t));
2470 }
2471 else if (bitmaxsize == 0)
2472 {
2473 if (dump_file && (dump_flags & TDF_DETAILS))
2474 fprintf (dump_file, "Access to zero-sized part of variable,"
2475 "ignoring\n");
2476 }
2477 else
2478 if (dump_file && (dump_flags & TDF_DETAILS))
2479 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2480
2481 result->offset = 0;
2482 }
2483 }
2484
2485
2486 /* Dereference the constraint expression CONS, and return the result.
2487 DEREF (ADDRESSOF) = SCALAR
2488 DEREF (SCALAR) = DEREF
2489 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2490 This is needed so that we can handle dereferencing DEREF constraints. */
2491
2492 static void
2493 do_deref (VEC (ce_s, heap) **constraints)
2494 {
2495 struct constraint_expr *c;
2496 unsigned int i = 0;
2497
2498 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2499 {
2500 if (c->type == SCALAR)
2501 c->type = DEREF;
2502 else if (c->type == ADDRESSOF)
2503 c->type = SCALAR;
2504 else if (c->type == DEREF)
2505 {
2506 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2507 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2508 process_constraint (new_constraint (tmplhs, *c));
2509 c->var = tmplhs.var;
2510 }
2511 else
2512 gcc_unreachable ();
2513 }
2514 }
2515
2516 /* Given a tree T, return the constraint expression for it. */
2517
2518 static void
2519 get_constraint_for (tree t, VEC (ce_s, heap) **results)
2520 {
2521 struct constraint_expr temp;
2522
2523 /* x = integer is all glommed to a single variable, which doesn't
2524 point to anything by itself. That is, of course, unless it is an
2525 integer constant being treated as a pointer, in which case, we
2526 will return that this is really the addressof anything. This
2527 happens below, since it will fall into the default case. The only
2528 case we know something about an integer treated like a pointer is
2529 when it is the NULL pointer, and then we just say it points to
2530 NULL. */
2531 if (TREE_CODE (t) == INTEGER_CST
2532 && !POINTER_TYPE_P (TREE_TYPE (t)))
2533 {
2534 temp.var = integer_id;
2535 temp.type = SCALAR;
2536 temp.offset = 0;
2537 VEC_safe_push (ce_s, heap, *results, &temp);
2538 return;
2539 }
2540 else if (TREE_CODE (t) == INTEGER_CST
2541 && integer_zerop (t))
2542 {
2543 temp.var = nothing_id;
2544 temp.type = ADDRESSOF;
2545 temp.offset = 0;
2546 VEC_safe_push (ce_s, heap, *results, &temp);
2547 return;
2548 }
2549
2550 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2551 {
2552 case tcc_expression:
2553 {
2554 switch (TREE_CODE (t))
2555 {
2556 case ADDR_EXPR:
2557 {
2558 struct constraint_expr *c;
2559 unsigned int i;
2560 tree exp = TREE_OPERAND (t, 0);
2561 tree pttype = TREE_TYPE (TREE_TYPE (t));
2562
2563 get_constraint_for (exp, results);
2564 /* Make sure we capture constraints to all elements
2565 of an array. */
2566 if ((handled_component_p (exp)
2567 && ref_contains_array_ref (exp))
2568 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE)
2569 {
2570 struct constraint_expr *origrhs;
2571 varinfo_t origvar;
2572 struct constraint_expr tmp;
2573
2574 if (VEC_length (ce_s, *results) == 0)
2575 return;
2576
2577 gcc_assert (VEC_length (ce_s, *results) == 1);
2578 origrhs = VEC_last (ce_s, *results);
2579 tmp = *origrhs;
2580 VEC_pop (ce_s, *results);
2581 origvar = get_varinfo (origrhs->var);
2582 for (; origvar; origvar = origvar->next)
2583 {
2584 tmp.var = origvar->id;
2585 VEC_safe_push (ce_s, heap, *results, &tmp);
2586 }
2587 }
2588 else if (VEC_length (ce_s, *results) == 1
2589 && (AGGREGATE_TYPE_P (pttype)
2590 || TREE_CODE (pttype) == COMPLEX_TYPE))
2591 {
2592 struct constraint_expr *origrhs;
2593 varinfo_t origvar;
2594 struct constraint_expr tmp;
2595
2596 gcc_assert (VEC_length (ce_s, *results) == 1);
2597 origrhs = VEC_last (ce_s, *results);
2598 tmp = *origrhs;
2599 VEC_pop (ce_s, *results);
2600 origvar = get_varinfo (origrhs->var);
2601 for (; origvar; origvar = origvar->next)
2602 {
2603 tmp.var = origvar->id;
2604 VEC_safe_push (ce_s, heap, *results, &tmp);
2605 }
2606 }
2607
2608 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2609 {
2610 if (c->type == DEREF)
2611 c->type = SCALAR;
2612 else
2613 c->type = ADDRESSOF;
2614 }
2615 return;
2616 }
2617 break;
2618 case CALL_EXPR:
2619 /* XXX: In interprocedural mode, if we didn't have the
2620 body, we would need to do *each pointer argument =
2621 &ANYTHING added. */
2622 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2623 {
2624 varinfo_t vi;
2625 tree heapvar = heapvar_lookup (t);
2626
2627 if (heapvar == NULL)
2628 {
2629 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2630 DECL_EXTERNAL (heapvar) = 1;
2631 get_var_ann (heapvar)->is_heapvar = 1;
2632 if (gimple_referenced_vars (cfun))
2633 add_referenced_var (heapvar);
2634 heapvar_insert (t, heapvar);
2635 }
2636
2637 temp.var = create_variable_info_for (heapvar,
2638 alias_get_name (heapvar));
2639
2640 vi = get_varinfo (temp.var);
2641 vi->is_artificial_var = 1;
2642 vi->is_heap_var = 1;
2643 temp.type = ADDRESSOF;
2644 temp.offset = 0;
2645 VEC_safe_push (ce_s, heap, *results, &temp);
2646 return;
2647 }
2648 else
2649 {
2650 temp.var = anything_id;
2651 temp.type = SCALAR;
2652 temp.offset = 0;
2653 VEC_safe_push (ce_s, heap, *results, &temp);
2654 return;
2655 }
2656 break;
2657 default:
2658 {
2659 temp.type = ADDRESSOF;
2660 temp.var = anything_id;
2661 temp.offset = 0;
2662 VEC_safe_push (ce_s, heap, *results, &temp);
2663 return;
2664 }
2665 }
2666 }
2667 case tcc_reference:
2668 {
2669 switch (TREE_CODE (t))
2670 {
2671 case INDIRECT_REF:
2672 {
2673 get_constraint_for (TREE_OPERAND (t, 0), results);
2674 do_deref (results);
2675 return;
2676 }
2677 case ARRAY_REF:
2678 case ARRAY_RANGE_REF:
2679 case COMPONENT_REF:
2680 get_constraint_for_component_ref (t, results);
2681 return;
2682 default:
2683 {
2684 temp.type = ADDRESSOF;
2685 temp.var = anything_id;
2686 temp.offset = 0;
2687 VEC_safe_push (ce_s, heap, *results, &temp);
2688 return;
2689 }
2690 }
2691 }
2692 case tcc_unary:
2693 {
2694 switch (TREE_CODE (t))
2695 {
2696 case NOP_EXPR:
2697 case CONVERT_EXPR:
2698 case NON_LVALUE_EXPR:
2699 {
2700 tree op = TREE_OPERAND (t, 0);
2701
2702 /* Cast from non-pointer to pointers are bad news for us.
2703 Anything else, we see through */
2704 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2705 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2706 {
2707 get_constraint_for (op, results);
2708 return;
2709 }
2710
2711 /* FALLTHRU */
2712 }
2713 default:
2714 {
2715 temp.type = ADDRESSOF;
2716 temp.var = anything_id;
2717 temp.offset = 0;
2718 VEC_safe_push (ce_s, heap, *results, &temp);
2719 return;
2720 }
2721 }
2722 }
2723 case tcc_exceptional:
2724 {
2725 switch (TREE_CODE (t))
2726 {
2727 case PHI_NODE:
2728 {
2729 get_constraint_for (PHI_RESULT (t), results);
2730 return;
2731 }
2732 break;
2733 case SSA_NAME:
2734 {
2735 struct constraint_expr temp;
2736 temp = get_constraint_exp_from_ssa_var (t);
2737 VEC_safe_push (ce_s, heap, *results, &temp);
2738 return;
2739 }
2740 break;
2741 default:
2742 {
2743 temp.type = ADDRESSOF;
2744 temp.var = anything_id;
2745 temp.offset = 0;
2746 VEC_safe_push (ce_s, heap, *results, &temp);
2747 return;
2748 }
2749 }
2750 }
2751 case tcc_declaration:
2752 {
2753 struct constraint_expr temp;
2754 temp = get_constraint_exp_from_ssa_var (t);
2755 VEC_safe_push (ce_s, heap, *results, &temp);
2756 return;
2757 }
2758 default:
2759 {
2760 temp.type = ADDRESSOF;
2761 temp.var = anything_id;
2762 temp.offset = 0;
2763 VEC_safe_push (ce_s, heap, *results, &temp);
2764 return;
2765 }
2766 }
2767 }
2768
2769
2770 /* Handle the structure copy case where we have a simple structure copy
2771 between LHS and RHS that is of SIZE (in bits)
2772
2773 For each field of the lhs variable (lhsfield)
2774 For each field of the rhs variable at lhsfield.offset (rhsfield)
2775 add the constraint lhsfield = rhsfield
2776
2777 If we fail due to some kind of type unsafety or other thing we
2778 can't handle, return false. We expect the caller to collapse the
2779 variable in that case. */
2780
2781 static bool
2782 do_simple_structure_copy (const struct constraint_expr lhs,
2783 const struct constraint_expr rhs,
2784 const unsigned HOST_WIDE_INT size)
2785 {
2786 varinfo_t p = get_varinfo (lhs.var);
2787 unsigned HOST_WIDE_INT pstart, last;
2788 pstart = p->offset;
2789 last = p->offset + size;
2790 for (; p && p->offset < last; p = p->next)
2791 {
2792 varinfo_t q;
2793 struct constraint_expr templhs = lhs;
2794 struct constraint_expr temprhs = rhs;
2795 unsigned HOST_WIDE_INT fieldoffset;
2796
2797 templhs.var = p->id;
2798 q = get_varinfo (temprhs.var);
2799 fieldoffset = p->offset - pstart;
2800 q = first_vi_for_offset (q, q->offset + fieldoffset);
2801 if (!q)
2802 return false;
2803 temprhs.var = q->id;
2804 process_constraint (new_constraint (templhs, temprhs));
2805 }
2806 return true;
2807 }
2808
2809
2810 /* Handle the structure copy case where we have a structure copy between a
2811 aggregate on the LHS and a dereference of a pointer on the RHS
2812 that is of SIZE (in bits)
2813
2814 For each field of the lhs variable (lhsfield)
2815 rhs.offset = lhsfield->offset
2816 add the constraint lhsfield = rhs
2817 */
2818
2819 static void
2820 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
2821 const struct constraint_expr rhs,
2822 const unsigned HOST_WIDE_INT size)
2823 {
2824 varinfo_t p = get_varinfo (lhs.var);
2825 unsigned HOST_WIDE_INT pstart,last;
2826 pstart = p->offset;
2827 last = p->offset + size;
2828
2829 for (; p && p->offset < last; p = p->next)
2830 {
2831 varinfo_t q;
2832 struct constraint_expr templhs = lhs;
2833 struct constraint_expr temprhs = rhs;
2834 unsigned HOST_WIDE_INT fieldoffset;
2835
2836
2837 if (templhs.type == SCALAR)
2838 templhs.var = p->id;
2839 else
2840 templhs.offset = p->offset;
2841
2842 q = get_varinfo (temprhs.var);
2843 fieldoffset = p->offset - pstart;
2844 temprhs.offset += fieldoffset;
2845 process_constraint (new_constraint (templhs, temprhs));
2846 }
2847 }
2848
2849 /* Handle the structure copy case where we have a structure copy
2850 between a aggregate on the RHS and a dereference of a pointer on
2851 the LHS that is of SIZE (in bits)
2852
2853 For each field of the rhs variable (rhsfield)
2854 lhs.offset = rhsfield->offset
2855 add the constraint lhs = rhsfield
2856 */
2857
2858 static void
2859 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
2860 const struct constraint_expr rhs,
2861 const unsigned HOST_WIDE_INT size)
2862 {
2863 varinfo_t p = get_varinfo (rhs.var);
2864 unsigned HOST_WIDE_INT pstart,last;
2865 pstart = p->offset;
2866 last = p->offset + size;
2867
2868 for (; p && p->offset < last; p = p->next)
2869 {
2870 varinfo_t q;
2871 struct constraint_expr templhs = lhs;
2872 struct constraint_expr temprhs = rhs;
2873 unsigned HOST_WIDE_INT fieldoffset;
2874
2875
2876 if (temprhs.type == SCALAR)
2877 temprhs.var = p->id;
2878 else
2879 temprhs.offset = p->offset;
2880
2881 q = get_varinfo (templhs.var);
2882 fieldoffset = p->offset - pstart;
2883 templhs.offset += fieldoffset;
2884 process_constraint (new_constraint (templhs, temprhs));
2885 }
2886 }
2887
2888 /* Sometimes, frontends like to give us bad type information. This
2889 function will collapse all the fields from VAR to the end of VAR,
2890 into VAR, so that we treat those fields as a single variable.
2891 We return the variable they were collapsed into. */
2892
2893 static unsigned int
2894 collapse_rest_of_var (unsigned int var)
2895 {
2896 varinfo_t currvar = get_varinfo (var);
2897 varinfo_t field;
2898
2899 for (field = currvar->next; field; field = field->next)
2900 {
2901 if (dump_file)
2902 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
2903 field->name, currvar->name);
2904
2905 gcc_assert (!field->collapsed_to);
2906 field->collapsed_to = currvar;
2907 }
2908
2909 currvar->next = NULL;
2910 currvar->size = currvar->fullsize - currvar->offset;
2911
2912 return currvar->id;
2913 }
2914
2915 /* Handle aggregate copies by expanding into copies of the respective
2916 fields of the structures. */
2917
2918 static void
2919 do_structure_copy (tree lhsop, tree rhsop)
2920 {
2921 struct constraint_expr lhs, rhs, tmp;
2922 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
2923 varinfo_t p;
2924 unsigned HOST_WIDE_INT lhssize;
2925 unsigned HOST_WIDE_INT rhssize;
2926
2927 get_constraint_for (lhsop, &lhsc);
2928 get_constraint_for (rhsop, &rhsc);
2929 gcc_assert (VEC_length (ce_s, lhsc) == 1);
2930 gcc_assert (VEC_length (ce_s, rhsc) == 1);
2931 lhs = *(VEC_last (ce_s, lhsc));
2932 rhs = *(VEC_last (ce_s, rhsc));
2933
2934 VEC_free (ce_s, heap, lhsc);
2935 VEC_free (ce_s, heap, rhsc);
2936
2937 /* If we have special var = x, swap it around. */
2938 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
2939 {
2940 tmp = lhs;
2941 lhs = rhs;
2942 rhs = tmp;
2943 }
2944
2945 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
2946 possible it's something we could handle. However, most cases falling
2947 into this are dealing with transparent unions, which are slightly
2948 weird. */
2949 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
2950 {
2951 rhs.type = ADDRESSOF;
2952 rhs.var = anything_id;
2953 }
2954
2955 /* If the RHS is a special var, or an addressof, set all the LHS fields to
2956 that special var. */
2957 if (rhs.var <= integer_id)
2958 {
2959 for (p = get_varinfo (lhs.var); p; p = p->next)
2960 {
2961 struct constraint_expr templhs = lhs;
2962 struct constraint_expr temprhs = rhs;
2963
2964 if (templhs.type == SCALAR )
2965 templhs.var = p->id;
2966 else
2967 templhs.offset += p->offset;
2968 process_constraint (new_constraint (templhs, temprhs));
2969 }
2970 }
2971 else
2972 {
2973 tree rhstype = TREE_TYPE (rhsop);
2974 tree lhstype = TREE_TYPE (lhsop);
2975 tree rhstypesize;
2976 tree lhstypesize;
2977
2978 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
2979 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
2980
2981 /* If we have a variably sized types on the rhs or lhs, and a deref
2982 constraint, add the constraint, lhsconstraint = &ANYTHING.
2983 This is conservatively correct because either the lhs is an unknown
2984 sized var (if the constraint is SCALAR), or the lhs is a DEREF
2985 constraint, and every variable it can point to must be unknown sized
2986 anyway, so we don't need to worry about fields at all. */
2987 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
2988 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
2989 {
2990 rhs.var = anything_id;
2991 rhs.type = ADDRESSOF;
2992 rhs.offset = 0;
2993 process_constraint (new_constraint (lhs, rhs));
2994 return;
2995 }
2996
2997 /* The size only really matters insofar as we don't set more or less of
2998 the variable. If we hit an unknown size var, the size should be the
2999 whole darn thing. */
3000 if (get_varinfo (rhs.var)->is_unknown_size_var)
3001 rhssize = ~0;
3002 else
3003 rhssize = TREE_INT_CST_LOW (rhstypesize);
3004
3005 if (get_varinfo (lhs.var)->is_unknown_size_var)
3006 lhssize = ~0;
3007 else
3008 lhssize = TREE_INT_CST_LOW (lhstypesize);
3009
3010
3011 if (rhs.type == SCALAR && lhs.type == SCALAR)
3012 {
3013 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
3014 {
3015 lhs.var = collapse_rest_of_var (lhs.var);
3016 rhs.var = collapse_rest_of_var (rhs.var);
3017 lhs.offset = 0;
3018 rhs.offset = 0;
3019 lhs.type = SCALAR;
3020 rhs.type = SCALAR;
3021 process_constraint (new_constraint (lhs, rhs));
3022 }
3023 }
3024 else if (lhs.type != DEREF && rhs.type == DEREF)
3025 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3026 else if (lhs.type == DEREF && rhs.type != DEREF)
3027 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3028 else
3029 {
3030 tree pointedtotype = lhstype;
3031 tree tmpvar;
3032
3033 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3034 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3035 do_structure_copy (tmpvar, rhsop);
3036 do_structure_copy (lhsop, tmpvar);
3037 }
3038 }
3039 }
3040
3041 /* Update related alias information kept in AI. This is used when
3042 building name tags, alias sets and deciding grouping heuristics.
3043 STMT is the statement to process. This function also updates
3044 ADDRESSABLE_VARS. */
3045
3046 static void
3047 update_alias_info (tree stmt, struct alias_info *ai)
3048 {
3049 bitmap addr_taken;
3050 use_operand_p use_p;
3051 ssa_op_iter iter;
3052 enum escape_type stmt_escape_type = is_escape_site (stmt);
3053
3054 if (stmt_escape_type == ESCAPE_TO_CALL
3055 || stmt_escape_type == ESCAPE_TO_PURE_CONST)
3056 {
3057 ai->num_calls_found++;
3058 if (stmt_escape_type == ESCAPE_TO_PURE_CONST)
3059 ai->num_pure_const_calls_found++;
3060 }
3061
3062 /* Mark all the variables whose address are taken by the statement. */
3063 addr_taken = addresses_taken (stmt);
3064 if (addr_taken)
3065 {
3066 bitmap_ior_into (gimple_addressable_vars (cfun), addr_taken);
3067
3068 /* If STMT is an escape point, all the addresses taken by it are
3069 call-clobbered. */
3070 if (stmt_escape_type != NO_ESCAPE)
3071 {
3072 bitmap_iterator bi;
3073 unsigned i;
3074
3075 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
3076 {
3077 tree rvar = referenced_var (i);
3078 if (!unmodifiable_var_p (rvar))
3079 mark_call_clobbered (rvar, stmt_escape_type);
3080 }
3081 }
3082 }
3083
3084 /* Process each operand use. If an operand may be aliased, keep
3085 track of how many times it's being used. For pointers, determine
3086 whether they are dereferenced by the statement, or whether their
3087 value escapes, etc. */
3088 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
3089 {
3090 tree op, var;
3091 var_ann_t v_ann;
3092 struct ptr_info_def *pi;
3093 bool is_store, is_potential_deref;
3094 unsigned num_uses, num_derefs;
3095
3096 op = USE_FROM_PTR (use_p);
3097
3098 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
3099 to the set of addressable variables. */
3100 if (TREE_CODE (op) == ADDR_EXPR)
3101 {
3102 bitmap addressable_vars = gimple_addressable_vars (cfun);
3103
3104 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
3105 gcc_assert (addressable_vars);
3106
3107 /* PHI nodes don't have annotations for pinning the set
3108 of addresses taken, so we collect them here.
3109
3110 FIXME, should we allow PHI nodes to have annotations
3111 so that they can be treated like regular statements?
3112 Currently, they are treated as second-class
3113 statements. */
3114 add_to_addressable_set (TREE_OPERAND (op, 0),
3115 &addressable_vars);
3116 continue;
3117 }
3118
3119 /* Ignore constants. */
3120 if (TREE_CODE (op) != SSA_NAME)
3121 continue;
3122
3123 var = SSA_NAME_VAR (op);
3124 v_ann = var_ann (var);
3125
3126 /* The base variable of an SSA name must be a GIMPLE register, and thus
3127 it cannot be aliased. */
3128 gcc_assert (!may_be_aliased (var));
3129
3130 /* We are only interested in pointers. */
3131 if (!POINTER_TYPE_P (TREE_TYPE (op)))
3132 continue;
3133
3134 pi = get_ptr_info (op);
3135
3136 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3137 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
3138 {
3139 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
3140 VEC_safe_push (tree, heap, ai->processed_ptrs, op);
3141 }
3142
3143 /* If STMT is a PHI node, then it will not have pointer
3144 dereferences and it will not be an escape point. */
3145 if (TREE_CODE (stmt) == PHI_NODE)
3146 continue;
3147
3148 /* Determine whether OP is a dereferenced pointer, and if STMT
3149 is an escape point, whether OP escapes. */
3150 count_uses_and_derefs (op, stmt, &num_uses, &num_derefs, &is_store);
3151
3152 /* Handle a corner case involving address expressions of the
3153 form '&PTR->FLD'. The problem with these expressions is that
3154 they do not represent a dereference of PTR. However, if some
3155 other transformation propagates them into an INDIRECT_REF
3156 expression, we end up with '*(&PTR->FLD)' which is folded
3157 into 'PTR->FLD'.
3158
3159 So, if the original code had no other dereferences of PTR,
3160 the aliaser will not create memory tags for it, and when
3161 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3162 memory operations will receive no VDEF/VUSE operands.
3163
3164 One solution would be to have count_uses_and_derefs consider
3165 &PTR->FLD a dereference of PTR. But that is wrong, since it
3166 is not really a dereference but an offset calculation.
3167
3168 What we do here is to recognize these special ADDR_EXPR
3169 nodes. Since these expressions are never GIMPLE values (they
3170 are not GIMPLE invariants), they can only appear on the RHS
3171 of an assignment and their base address is always an
3172 INDIRECT_REF expression. */
3173 is_potential_deref = false;
3174 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3175 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR
3176 && !is_gimple_val (GIMPLE_STMT_OPERAND (stmt, 1)))
3177 {
3178 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3179 this represents a potential dereference of PTR. */
3180 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3181 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3182 if (TREE_CODE (base) == INDIRECT_REF
3183 && TREE_OPERAND (base, 0) == op)
3184 is_potential_deref = true;
3185 }
3186
3187 if (num_derefs > 0 || is_potential_deref)
3188 {
3189 /* Mark OP as dereferenced. In a subsequent pass,
3190 dereferenced pointers that point to a set of
3191 variables will be assigned a name tag to alias
3192 all the variables OP points to. */
3193 pi->is_dereferenced = 1;
3194
3195 /* If this is a store operation, mark OP as being
3196 dereferenced to store, otherwise mark it as being
3197 dereferenced to load. */
3198 if (is_store)
3199 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3200 else
3201 pointer_set_insert (ai->dereferenced_ptrs_load, var);
3202 }
3203
3204 if (stmt_escape_type != NO_ESCAPE && num_derefs < num_uses)
3205 {
3206 /* If STMT is an escape point and STMT contains at
3207 least one direct use of OP, then the value of OP
3208 escapes and so the pointed-to variables need to
3209 be marked call-clobbered. */
3210 pi->value_escapes_p = 1;
3211 pi->escape_mask |= stmt_escape_type;
3212
3213 /* If the statement makes a function call, assume
3214 that pointer OP will be dereferenced in a store
3215 operation inside the called function. */
3216 if (get_call_expr_in (stmt)
3217 || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3218 {
3219 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3220 pi->is_dereferenced = 1;
3221 }
3222 }
3223 }
3224
3225 if (TREE_CODE (stmt) == PHI_NODE)
3226 return;
3227
3228 /* Mark stored variables in STMT as being written to and update the
3229 reference counter for potentially aliased symbols in STMT. */
3230 if (stmt_references_memory_p (stmt) && STORED_SYMS (stmt))
3231 {
3232 unsigned i;
3233 bitmap_iterator bi;
3234 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
3235 pointer_set_insert (ai->written_vars, referenced_var (i));
3236 }
3237 }
3238
3239
3240 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3241 Expressions of the type PTR + CST can be handled in two ways:
3242
3243 1- If the constraint for PTR is ADDRESSOF for a non-structure
3244 variable, then we can use it directly because adding or
3245 subtracting a constant may not alter the original ADDRESSOF
3246 constraint (i.e., pointer arithmetic may not legally go outside
3247 an object's boundaries).
3248
3249 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3250 then if CST is a compile-time constant that can be used as an
3251 offset, we can determine which sub-variable will be pointed-to
3252 by the expression.
3253
3254 Return true if the expression is handled. For any other kind of
3255 expression, return false so that each operand can be added as a
3256 separate constraint by the caller. */
3257
3258 static bool
3259 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3260 {
3261 tree op0, op1;
3262 struct constraint_expr *c, *c2;
3263 unsigned int i = 0;
3264 unsigned int j = 0;
3265 VEC (ce_s, heap) *temp = NULL;
3266 unsigned int rhsoffset = 0;
3267
3268 if (TREE_CODE (expr) != PLUS_EXPR
3269 && TREE_CODE (expr) != MINUS_EXPR)
3270 return false;
3271
3272 op0 = TREE_OPERAND (expr, 0);
3273 op1 = TREE_OPERAND (expr, 1);
3274
3275 get_constraint_for (op0, &temp);
3276 if (POINTER_TYPE_P (TREE_TYPE (op0))
3277 && TREE_CODE (op1) == INTEGER_CST
3278 && TREE_CODE (expr) == PLUS_EXPR)
3279 {
3280 rhsoffset = TREE_INT_CST_LOW (op1) * BITS_PER_UNIT;
3281 }
3282 else
3283 return false;
3284
3285
3286 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3287 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3288 {
3289 if (c2->type == ADDRESSOF && rhsoffset != 0)
3290 {
3291 varinfo_t temp = get_varinfo (c2->var);
3292
3293 /* An access one after the end of an array is valid,
3294 so simply punt on accesses we cannot resolve. */
3295 temp = first_vi_for_offset (temp, rhsoffset);
3296 if (temp == NULL)
3297 continue;
3298 c2->var = temp->id;
3299 c2->offset = 0;
3300 }
3301 else
3302 c2->offset = rhsoffset;
3303 process_constraint (new_constraint (*c, *c2));
3304 }
3305
3306 VEC_free (ce_s, heap, temp);
3307
3308 return true;
3309 }
3310
3311
3312 /* Walk statement T setting up aliasing constraints according to the
3313 references found in T. This function is the main part of the
3314 constraint builder. AI points to auxiliary alias information used
3315 when building alias sets and computing alias grouping heuristics. */
3316
3317 static void
3318 find_func_aliases (tree origt)
3319 {
3320 tree t = origt;
3321 VEC(ce_s, heap) *lhsc = NULL;
3322 VEC(ce_s, heap) *rhsc = NULL;
3323 struct constraint_expr *c;
3324
3325 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3326 t = TREE_OPERAND (t, 0);
3327
3328 /* Now build constraints expressions. */
3329 if (TREE_CODE (t) == PHI_NODE)
3330 {
3331 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
3332
3333 /* Only care about pointers and structures containing
3334 pointers. */
3335 if (could_have_pointers (PHI_RESULT (t)))
3336 {
3337 int i;
3338 unsigned int j;
3339
3340 /* For a phi node, assign all the arguments to
3341 the result. */
3342 get_constraint_for (PHI_RESULT (t), &lhsc);
3343 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3344 {
3345 tree rhstype;
3346 tree strippedrhs = PHI_ARG_DEF (t, i);
3347
3348 STRIP_NOPS (strippedrhs);
3349 rhstype = TREE_TYPE (strippedrhs);
3350 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3351
3352 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3353 {
3354 struct constraint_expr *c2;
3355 while (VEC_length (ce_s, rhsc) > 0)
3356 {
3357 c2 = VEC_last (ce_s, rhsc);
3358 process_constraint (new_constraint (*c, *c2));
3359 VEC_pop (ce_s, rhsc);
3360 }
3361 }
3362 }
3363 }
3364 }
3365 /* In IPA mode, we need to generate constraints to pass call
3366 arguments through their calls. There are two case, either a
3367 modify_expr when we are returning a value, or just a plain
3368 call_expr when we are not. */
3369 else if (in_ipa_mode
3370 && ((TREE_CODE (t) == GIMPLE_MODIFY_STMT
3371 && TREE_CODE (GIMPLE_STMT_OPERAND (t, 1)) == CALL_EXPR
3372 && !(call_expr_flags (GIMPLE_STMT_OPERAND (t, 1))
3373 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
3374 || (TREE_CODE (t) == CALL_EXPR
3375 && !(call_expr_flags (t)
3376 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))))
3377 {
3378 tree lhsop;
3379 tree rhsop;
3380 tree arglist;
3381 varinfo_t fi;
3382 int i = 1;
3383 tree decl;
3384 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3385 {
3386 lhsop = GIMPLE_STMT_OPERAND (t, 0);
3387 rhsop = GIMPLE_STMT_OPERAND (t, 1);
3388 }
3389 else
3390 {
3391 lhsop = NULL;
3392 rhsop = t;
3393 }
3394 decl = get_callee_fndecl (rhsop);
3395
3396 /* If we can directly resolve the function being called, do so.
3397 Otherwise, it must be some sort of indirect expression that
3398 we should still be able to handle. */
3399 if (decl)
3400 {
3401 fi = get_vi_for_tree (decl);
3402 }
3403 else
3404 {
3405 decl = TREE_OPERAND (rhsop, 0);
3406 fi = get_vi_for_tree (decl);
3407 }
3408
3409 /* Assign all the passed arguments to the appropriate incoming
3410 parameters of the function. */
3411 arglist = TREE_OPERAND (rhsop, 1);
3412
3413 for (;arglist; arglist = TREE_CHAIN (arglist))
3414 {
3415 tree arg = TREE_VALUE (arglist);
3416 struct constraint_expr lhs ;
3417 struct constraint_expr *rhsp;
3418
3419 get_constraint_for (arg, &rhsc);
3420 if (TREE_CODE (decl) != FUNCTION_DECL)
3421 {
3422 lhs.type = DEREF;
3423 lhs.var = fi->id;
3424 lhs.offset = i;
3425 }
3426 else
3427 {
3428 lhs.type = SCALAR;
3429 lhs.var = first_vi_for_offset (fi, i)->id;
3430 lhs.offset = 0;
3431 }
3432 while (VEC_length (ce_s, rhsc) != 0)
3433 {
3434 rhsp = VEC_last (ce_s, rhsc);
3435 process_constraint (new_constraint (lhs, *rhsp));
3436 VEC_pop (ce_s, rhsc);
3437 }
3438 i++;
3439 }
3440 /* If we are returning a value, assign it to the result. */
3441 if (lhsop)
3442 {
3443 struct constraint_expr rhs;
3444 struct constraint_expr *lhsp;
3445 unsigned int j = 0;
3446
3447 get_constraint_for (lhsop, &lhsc);
3448 if (TREE_CODE (decl) != FUNCTION_DECL)
3449 {
3450 rhs.type = DEREF;
3451 rhs.var = fi->id;
3452 rhs.offset = i;
3453 }
3454 else
3455 {
3456 rhs.type = SCALAR;
3457 rhs.var = first_vi_for_offset (fi, i)->id;
3458 rhs.offset = 0;
3459 }
3460 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3461 process_constraint (new_constraint (*lhsp, rhs));
3462 }
3463 }
3464 /* Otherwise, just a regular assignment statement. */
3465 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3466 {
3467 tree lhsop = GIMPLE_STMT_OPERAND (t, 0);
3468 tree rhsop = GIMPLE_STMT_OPERAND (t, 1);
3469 int i;
3470
3471 if ((AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3472 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE)
3473 && (AGGREGATE_TYPE_P (TREE_TYPE (rhsop))
3474 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE))
3475 {
3476 do_structure_copy (lhsop, rhsop);
3477 }
3478 else
3479 {
3480 /* Only care about operations with pointers, structures
3481 containing pointers, dereferences, and call expressions. */
3482 if (could_have_pointers (lhsop)
3483 || TREE_CODE (rhsop) == CALL_EXPR)
3484 {
3485 get_constraint_for (lhsop, &lhsc);
3486 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3487 {
3488 /* RHS that consist of unary operations,
3489 exceptional types, or bare decls/constants, get
3490 handled directly by get_constraint_for. */
3491 case tcc_reference:
3492 case tcc_declaration:
3493 case tcc_constant:
3494 case tcc_exceptional:
3495 case tcc_expression:
3496 case tcc_unary:
3497 {
3498 unsigned int j;
3499
3500 get_constraint_for (rhsop, &rhsc);
3501 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3502 {
3503 struct constraint_expr *c2;
3504 unsigned int k;
3505
3506 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3507 process_constraint (new_constraint (*c, *c2));
3508 }
3509
3510 }
3511 break;
3512
3513 case tcc_binary:
3514 {
3515 /* For pointer arithmetic of the form
3516 PTR + CST, we can simply use PTR's
3517 constraint because pointer arithmetic is
3518 not allowed to go out of bounds. */
3519 if (handle_ptr_arith (lhsc, rhsop))
3520 break;
3521 }
3522 /* FALLTHRU */
3523
3524 /* Otherwise, walk each operand. Notice that we
3525 can't use the operand interface because we need
3526 to process expressions other than simple operands
3527 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3528 default:
3529 for (i = 0; i < TREE_CODE_LENGTH (TREE_CODE (rhsop)); i++)
3530 {
3531 tree op = TREE_OPERAND (rhsop, i);
3532 unsigned int j;
3533
3534 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3535 get_constraint_for (op, &rhsc);
3536 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3537 {
3538 struct constraint_expr *c2;
3539 while (VEC_length (ce_s, rhsc) > 0)
3540 {
3541 c2 = VEC_last (ce_s, rhsc);
3542 process_constraint (new_constraint (*c, *c2));
3543 VEC_pop (ce_s, rhsc);
3544 }
3545 }
3546 }
3547 }
3548 }
3549 }
3550 }
3551
3552 /* After promoting variables and computing aliasing we will
3553 need to re-scan most statements. FIXME: Try to minimize the
3554 number of statements re-scanned. It's not really necessary to
3555 re-scan *all* statements. */
3556 mark_stmt_modified (origt);
3557 VEC_free (ce_s, heap, rhsc);
3558 VEC_free (ce_s, heap, lhsc);
3559 }
3560
3561
3562 /* Find the first varinfo in the same variable as START that overlaps with
3563 OFFSET.
3564 Effectively, walk the chain of fields for the variable START to find the
3565 first field that overlaps with OFFSET.
3566 Return NULL if we can't find one. */
3567
3568 static varinfo_t
3569 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3570 {
3571 varinfo_t curr = start;
3572 while (curr)
3573 {
3574 /* We may not find a variable in the field list with the actual
3575 offset when when we have glommed a structure to a variable.
3576 In that case, however, offset should still be within the size
3577 of the variable. */
3578 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3579 return curr;
3580 curr = curr->next;
3581 }
3582 return NULL;
3583 }
3584
3585
3586 /* Insert the varinfo FIELD into the field list for BASE, at the front
3587 of the list. */
3588
3589 static void
3590 insert_into_field_list (varinfo_t base, varinfo_t field)
3591 {
3592 varinfo_t prev = base;
3593 varinfo_t curr = base->next;
3594
3595 field->next = curr;
3596 prev->next = field;
3597 }
3598
3599 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3600 offset. */
3601
3602 static void
3603 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3604 {
3605 varinfo_t prev = base;
3606 varinfo_t curr = base->next;
3607
3608 if (curr == NULL)
3609 {
3610 prev->next = field;
3611 field->next = NULL;
3612 }
3613 else
3614 {
3615 while (curr)
3616 {
3617 if (field->offset <= curr->offset)
3618 break;
3619 prev = curr;
3620 curr = curr->next;
3621 }
3622 field->next = prev->next;
3623 prev->next = field;
3624 }
3625 }
3626
3627 /* qsort comparison function for two fieldoff's PA and PB */
3628
3629 static int
3630 fieldoff_compare (const void *pa, const void *pb)
3631 {
3632 const fieldoff_s *foa = (const fieldoff_s *)pa;
3633 const fieldoff_s *fob = (const fieldoff_s *)pb;
3634 HOST_WIDE_INT foasize, fobsize;
3635
3636 if (foa->offset != fob->offset)
3637 return foa->offset - fob->offset;
3638
3639 foasize = TREE_INT_CST_LOW (foa->size);
3640 fobsize = TREE_INT_CST_LOW (fob->size);
3641 return foasize - fobsize;
3642 }
3643
3644 /* Sort a fieldstack according to the field offset and sizes. */
3645 void
3646 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
3647 {
3648 qsort (VEC_address (fieldoff_s, fieldstack),
3649 VEC_length (fieldoff_s, fieldstack),
3650 sizeof (fieldoff_s),
3651 fieldoff_compare);
3652 }
3653
3654 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields
3655 of TYPE onto fieldstack, recording their offsets along the way.
3656 OFFSET is used to keep track of the offset in this entire structure, rather
3657 than just the immediately containing structure. Returns the number
3658 of fields pushed.
3659 HAS_UNION is set to true if we find a union type as a field of
3660 TYPE. */
3661
3662 int
3663 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
3664 HOST_WIDE_INT offset, bool *has_union)
3665 {
3666 tree field;
3667 int count = 0;
3668
3669 if (TREE_CODE (type) == COMPLEX_TYPE)
3670 {
3671 fieldoff_s *real_part, *img_part;
3672 real_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3673 real_part->type = TREE_TYPE (type);
3674 real_part->size = TYPE_SIZE (TREE_TYPE (type));
3675 real_part->offset = offset;
3676 real_part->decl = NULL_TREE;
3677
3678 img_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3679 img_part->type = TREE_TYPE (type);
3680 img_part->size = TYPE_SIZE (TREE_TYPE (type));
3681 img_part->offset = offset + TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (type)));
3682 img_part->decl = NULL_TREE;
3683
3684 return 2;
3685 }
3686
3687 if (TREE_CODE (type) == ARRAY_TYPE)
3688 {
3689 tree sz = TYPE_SIZE (type);
3690 tree elsz = TYPE_SIZE (TREE_TYPE (type));
3691 HOST_WIDE_INT nr;
3692 int i;
3693
3694 if (! sz
3695 || ! host_integerp (sz, 1)
3696 || TREE_INT_CST_LOW (sz) == 0
3697 || ! elsz
3698 || ! host_integerp (elsz, 1)
3699 || TREE_INT_CST_LOW (elsz) == 0)
3700 return 0;
3701
3702 nr = TREE_INT_CST_LOW (sz) / TREE_INT_CST_LOW (elsz);
3703 if (nr > SALIAS_MAX_ARRAY_ELEMENTS)
3704 return 0;
3705
3706 for (i = 0; i < nr; ++i)
3707 {
3708 bool push = false;
3709 int pushed = 0;
3710
3711 if (has_union
3712 && (TREE_CODE (TREE_TYPE (type)) == QUAL_UNION_TYPE
3713 || TREE_CODE (TREE_TYPE (type)) == UNION_TYPE))
3714 *has_union = true;
3715
3716 if (!AGGREGATE_TYPE_P (TREE_TYPE (type))) /* var_can_have_subvars */
3717 push = true;
3718 else if (!(pushed = push_fields_onto_fieldstack
3719 (TREE_TYPE (type), fieldstack,
3720 offset + i * TREE_INT_CST_LOW (elsz), has_union)))
3721 /* Empty structures may have actual size, like in C++. So
3722 see if we didn't push any subfields and the size is
3723 nonzero, push the field onto the stack */
3724 push = true;
3725
3726 if (push)
3727 {
3728 fieldoff_s *pair;
3729
3730 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3731 pair->type = TREE_TYPE (type);
3732 pair->size = elsz;
3733 pair->decl = NULL_TREE;
3734 pair->offset = offset + i * TREE_INT_CST_LOW (elsz);
3735 count++;
3736 }
3737 else
3738 count += pushed;
3739 }
3740
3741 return count;
3742 }
3743
3744 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
3745 if (TREE_CODE (field) == FIELD_DECL)
3746 {
3747 bool push = false;
3748 int pushed = 0;
3749
3750 if (has_union
3751 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
3752 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
3753 *has_union = true;
3754
3755 if (!var_can_have_subvars (field))
3756 push = true;
3757 else if (!(pushed = push_fields_onto_fieldstack
3758 (TREE_TYPE (field), fieldstack,
3759 offset + bitpos_of_field (field), has_union))
3760 && DECL_SIZE (field)
3761 && !integer_zerop (DECL_SIZE (field)))
3762 /* Empty structures may have actual size, like in C++. So
3763 see if we didn't push any subfields and the size is
3764 nonzero, push the field onto the stack */
3765 push = true;
3766
3767 if (push)
3768 {
3769 fieldoff_s *pair;
3770
3771 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3772 pair->type = TREE_TYPE (field);
3773 pair->size = DECL_SIZE (field);
3774 pair->decl = field;
3775 pair->offset = offset + bitpos_of_field (field);
3776 count++;
3777 }
3778 else
3779 count += pushed;
3780 }
3781
3782 return count;
3783 }
3784
3785 /* Create a constraint from ANYTHING variable to VI. */
3786 static void
3787 make_constraint_from_anything (varinfo_t vi)
3788 {
3789 struct constraint_expr lhs, rhs;
3790
3791 lhs.var = vi->id;
3792 lhs.offset = 0;
3793 lhs.type = SCALAR;
3794
3795 rhs.var = anything_id;
3796 rhs.offset = 0;
3797 rhs.type = ADDRESSOF;
3798 process_constraint (new_constraint (lhs, rhs));
3799 }
3800
3801 /* Count the number of arguments DECL has, and set IS_VARARGS to true
3802 if it is a varargs function. */
3803
3804 static unsigned int
3805 count_num_arguments (tree decl, bool *is_varargs)
3806 {
3807 unsigned int i = 0;
3808 tree t;
3809
3810 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
3811 t;
3812 t = TREE_CHAIN (t))
3813 {
3814 if (TREE_VALUE (t) == void_type_node)
3815 break;
3816 i++;
3817 }
3818
3819 if (!t)
3820 *is_varargs = true;
3821 return i;
3822 }
3823
3824 /* Creation function node for DECL, using NAME, and return the index
3825 of the variable we've created for the function. */
3826
3827 static unsigned int
3828 create_function_info_for (tree decl, const char *name)
3829 {
3830 unsigned int index = VEC_length (varinfo_t, varmap);
3831 varinfo_t vi;
3832 tree arg;
3833 unsigned int i;
3834 bool is_varargs = false;
3835
3836 /* Create the variable info. */
3837
3838 vi = new_var_info (decl, index, name);
3839 vi->decl = decl;
3840 vi->offset = 0;
3841 vi->has_union = 0;
3842 vi->size = 1;
3843 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
3844 insert_vi_for_tree (vi->decl, vi);
3845 VEC_safe_push (varinfo_t, heap, varmap, vi);
3846
3847 stats.total_vars++;
3848
3849 /* If it's varargs, we don't know how many arguments it has, so we
3850 can't do much.
3851 */
3852 if (is_varargs)
3853 {
3854 vi->fullsize = ~0;
3855 vi->size = ~0;
3856 vi->is_unknown_size_var = true;
3857 return index;
3858 }
3859
3860
3861 arg = DECL_ARGUMENTS (decl);
3862
3863 /* Set up variables for each argument. */
3864 for (i = 1; i < vi->fullsize; i++)
3865 {
3866 varinfo_t argvi;
3867 const char *newname;
3868 char *tempname;
3869 unsigned int newindex;
3870 tree argdecl = decl;
3871
3872 if (arg)
3873 argdecl = arg;
3874
3875 newindex = VEC_length (varinfo_t, varmap);
3876 asprintf (&tempname, "%s.arg%d", name, i-1);
3877 newname = ggc_strdup (tempname);
3878 free (tempname);
3879
3880 argvi = new_var_info (argdecl, newindex, newname);
3881 argvi->decl = argdecl;
3882 VEC_safe_push (varinfo_t, heap, varmap, argvi);
3883 argvi->offset = i;
3884 argvi->size = 1;
3885 argvi->fullsize = vi->fullsize;
3886 argvi->has_union = false;
3887 insert_into_field_list_sorted (vi, argvi);
3888 stats.total_vars ++;
3889 if (arg)
3890 {
3891 insert_vi_for_tree (arg, argvi);
3892 arg = TREE_CHAIN (arg);
3893 }
3894 }
3895
3896 /* Create a variable for the return var. */
3897 if (DECL_RESULT (decl) != NULL
3898 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
3899 {
3900 varinfo_t resultvi;
3901 const char *newname;
3902 char *tempname;
3903 unsigned int newindex;
3904 tree resultdecl = decl;
3905
3906 vi->fullsize ++;
3907
3908 if (DECL_RESULT (decl))
3909 resultdecl = DECL_RESULT (decl);
3910
3911 newindex = VEC_length (varinfo_t, varmap);
3912 asprintf (&tempname, "%s.result", name);
3913 newname = ggc_strdup (tempname);
3914 free (tempname);
3915
3916 resultvi = new_var_info (resultdecl, newindex, newname);
3917 resultvi->decl = resultdecl;
3918 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
3919 resultvi->offset = i;
3920 resultvi->size = 1;
3921 resultvi->fullsize = vi->fullsize;
3922 resultvi->has_union = false;
3923 insert_into_field_list_sorted (vi, resultvi);
3924 stats.total_vars ++;
3925 if (DECL_RESULT (decl))
3926 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
3927 }
3928 return index;
3929 }
3930
3931
3932 /* Return true if FIELDSTACK contains fields that overlap.
3933 FIELDSTACK is assumed to be sorted by offset. */
3934
3935 static bool
3936 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
3937 {
3938 fieldoff_s *fo = NULL;
3939 unsigned int i;
3940 HOST_WIDE_INT lastoffset = -1;
3941
3942 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3943 {
3944 if (fo->offset == lastoffset)
3945 return true;
3946 lastoffset = fo->offset;
3947 }
3948 return false;
3949 }
3950
3951 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
3952 This will also create any varinfo structures necessary for fields
3953 of DECL. */
3954
3955 static unsigned int
3956 create_variable_info_for (tree decl, const char *name)
3957 {
3958 unsigned int index = VEC_length (varinfo_t, varmap);
3959 varinfo_t vi;
3960 tree decltype = TREE_TYPE (decl);
3961 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
3962 bool notokay = false;
3963 bool hasunion;
3964 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
3965 VEC (fieldoff_s,heap) *fieldstack = NULL;
3966
3967 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
3968 return create_function_info_for (decl, name);
3969
3970 hasunion = TREE_CODE (decltype) == UNION_TYPE
3971 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
3972 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
3973 {
3974 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion);
3975 if (hasunion)
3976 {
3977 VEC_free (fieldoff_s, heap, fieldstack);
3978 notokay = true;
3979 }
3980 }
3981
3982
3983 /* If the variable doesn't have subvars, we may end up needing to
3984 sort the field list and create fake variables for all the
3985 fields. */
3986 vi = new_var_info (decl, index, name);
3987 vi->decl = decl;
3988 vi->offset = 0;
3989 vi->has_union = hasunion;
3990 if (!declsize
3991 || TREE_CODE (declsize) != INTEGER_CST
3992 || TREE_CODE (decltype) == UNION_TYPE
3993 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
3994 {
3995 vi->is_unknown_size_var = true;
3996 vi->fullsize = ~0;
3997 vi->size = ~0;
3998 }
3999 else
4000 {
4001 vi->fullsize = TREE_INT_CST_LOW (declsize);
4002 vi->size = vi->fullsize;
4003 }
4004
4005 insert_vi_for_tree (vi->decl, vi);
4006 VEC_safe_push (varinfo_t, heap, varmap, vi);
4007 if (is_global && (!flag_whole_program || !in_ipa_mode))
4008 make_constraint_from_anything (vi);
4009
4010 stats.total_vars++;
4011 if (use_field_sensitive
4012 && !notokay
4013 && !vi->is_unknown_size_var
4014 && var_can_have_subvars (decl)
4015 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4016 {
4017 unsigned int newindex = VEC_length (varinfo_t, varmap);
4018 fieldoff_s *fo = NULL;
4019 unsigned int i;
4020
4021 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4022 {
4023 if (! fo->size
4024 || TREE_CODE (fo->size) != INTEGER_CST
4025 || fo->offset < 0)
4026 {
4027 notokay = true;
4028 break;
4029 }
4030 }
4031
4032 /* We can't sort them if we have a field with a variable sized type,
4033 which will make notokay = true. In that case, we are going to return
4034 without creating varinfos for the fields anyway, so sorting them is a
4035 waste to boot. */
4036 if (!notokay)
4037 {
4038 sort_fieldstack (fieldstack);
4039 /* Due to some C++ FE issues, like PR 22488, we might end up
4040 what appear to be overlapping fields even though they,
4041 in reality, do not overlap. Until the C++ FE is fixed,
4042 we will simply disable field-sensitivity for these cases. */
4043 notokay = check_for_overlaps (fieldstack);
4044 }
4045
4046
4047 if (VEC_length (fieldoff_s, fieldstack) != 0)
4048 fo = VEC_index (fieldoff_s, fieldstack, 0);
4049
4050 if (fo == NULL || notokay)
4051 {
4052 vi->is_unknown_size_var = 1;
4053 vi->fullsize = ~0;
4054 vi->size = ~0;
4055 VEC_free (fieldoff_s, heap, fieldstack);
4056 return index;
4057 }
4058
4059 vi->size = TREE_INT_CST_LOW (fo->size);
4060 vi->offset = fo->offset;
4061 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4062 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4063 i--)
4064 {
4065 varinfo_t newvi;
4066 const char *newname = "NULL";
4067 char *tempname;
4068
4069 newindex = VEC_length (varinfo_t, varmap);
4070 if (dump_file)
4071 {
4072 if (fo->decl)
4073 asprintf (&tempname, "%s.%s",
4074 vi->name, alias_get_name (fo->decl));
4075 else
4076 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC,
4077 vi->name, fo->offset);
4078 newname = ggc_strdup (tempname);
4079 free (tempname);
4080 }
4081 newvi = new_var_info (decl, newindex, newname);
4082 newvi->offset = fo->offset;
4083 newvi->size = TREE_INT_CST_LOW (fo->size);
4084 newvi->fullsize = vi->fullsize;
4085 insert_into_field_list (vi, newvi);
4086 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4087 if (is_global && (!flag_whole_program || !in_ipa_mode))
4088 make_constraint_from_anything (newvi);
4089
4090 stats.total_vars++;
4091 }
4092 VEC_free (fieldoff_s, heap, fieldstack);
4093 }
4094 return index;
4095 }
4096
4097 /* Print out the points-to solution for VAR to FILE. */
4098
4099 void
4100 dump_solution_for_var (FILE *file, unsigned int var)
4101 {
4102 varinfo_t vi = get_varinfo (var);
4103 unsigned int i;
4104 bitmap_iterator bi;
4105
4106 if (find (var) != var)
4107 {
4108 varinfo_t vipt = get_varinfo (find (var));
4109 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4110 }
4111 else
4112 {
4113 fprintf (file, "%s = { ", vi->name);
4114 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4115 {
4116 fprintf (file, "%s ", get_varinfo (i)->name);
4117 }
4118 fprintf (file, "}\n");
4119 }
4120 }
4121
4122 /* Print the points-to solution for VAR to stdout. */
4123
4124 void
4125 debug_solution_for_var (unsigned int var)
4126 {
4127 dump_solution_for_var (stdout, var);
4128 }
4129
4130 /* Create varinfo structures for all of the variables in the
4131 function for intraprocedural mode. */
4132
4133 static void
4134 intra_create_variable_infos (void)
4135 {
4136 tree t;
4137 struct constraint_expr lhs, rhs;
4138
4139 /* For each incoming pointer argument arg, ARG = ANYTHING or a
4140 dummy variable if flag_argument_noalias > 2. */
4141 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4142 {
4143 varinfo_t p;
4144
4145 if (!could_have_pointers (t))
4146 continue;
4147
4148 /* With flag_argument_noalias greater than two means that the incoming
4149 argument cannot alias anything except for itself so create a HEAP
4150 variable. */
4151 if (POINTER_TYPE_P (TREE_TYPE (t))
4152 && flag_argument_noalias > 2)
4153 {
4154 varinfo_t vi;
4155 tree heapvar = heapvar_lookup (t);
4156
4157 lhs.offset = 0;
4158 lhs.type = SCALAR;
4159 lhs.var = get_vi_for_tree (t)->id;
4160
4161 if (heapvar == NULL_TREE)
4162 {
4163 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4164 "PARM_NOALIAS");
4165 get_var_ann (heapvar)->is_heapvar = 1;
4166 DECL_EXTERNAL (heapvar) = 1;
4167 if (gimple_referenced_vars (cfun))
4168 add_referenced_var (heapvar);
4169 heapvar_insert (t, heapvar);
4170 }
4171 vi = get_vi_for_tree (heapvar);
4172 vi->is_artificial_var = 1;
4173 vi->is_heap_var = 1;
4174 rhs.var = vi->id;
4175 rhs.type = ADDRESSOF;
4176 rhs.offset = 0;
4177 for (p = get_varinfo (lhs.var); p; p = p->next)
4178 {
4179 struct constraint_expr temp = lhs;
4180 temp.var = p->id;
4181 process_constraint (new_constraint (temp, rhs));
4182 }
4183 }
4184 else
4185 {
4186 varinfo_t arg_vi = get_vi_for_tree (t);
4187
4188 for (p = arg_vi; p; p = p->next)
4189 make_constraint_from_anything (p);
4190 }
4191 }
4192 }
4193
4194 /* Set bits in INTO corresponding to the variable uids in solution set
4195 FROM, which came from variable PTR.
4196 For variables that are actually dereferenced, we also use type
4197 based alias analysis to prune the points-to sets. */
4198
4199 static void
4200 set_uids_in_ptset (tree ptr, bitmap into, bitmap from)
4201 {
4202 unsigned int i;
4203 bitmap_iterator bi;
4204 subvar_t sv;
4205 HOST_WIDE_INT ptr_alias_set = get_alias_set (TREE_TYPE (ptr));
4206
4207 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4208 {
4209 varinfo_t vi = get_varinfo (i);
4210 unsigned HOST_WIDE_INT var_alias_set;
4211
4212 /* The only artificial variables that are allowed in a may-alias
4213 set are heap variables. */
4214 if (vi->is_artificial_var && !vi->is_heap_var)
4215 continue;
4216
4217 if (vi->has_union && get_subvars_for_var (vi->decl) != NULL)
4218 {
4219 /* Variables containing unions may need to be converted to
4220 their SFT's, because SFT's can have unions and we cannot. */
4221 for (sv = get_subvars_for_var (vi->decl); sv; sv = sv->next)
4222 bitmap_set_bit (into, DECL_UID (sv->var));
4223 }
4224 else if (TREE_CODE (vi->decl) == VAR_DECL
4225 || TREE_CODE (vi->decl) == PARM_DECL)
4226 {
4227 if (var_can_have_subvars (vi->decl)
4228 && get_subvars_for_var (vi->decl))
4229 {
4230 /* If VI->DECL is an aggregate for which we created
4231 SFTs, add the SFT corresponding to VI->OFFSET. */
4232 tree sft = get_subvar_at (vi->decl, vi->offset);
4233 if (sft)
4234 {
4235 var_alias_set = get_alias_set (sft);
4236 if (!vi->directly_dereferenced
4237 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4238 bitmap_set_bit (into, DECL_UID (sft));
4239 }
4240 }
4241 else
4242 {
4243 /* Otherwise, just add VI->DECL to the alias set.
4244 Don't type prune artificial vars. */
4245 if (vi->is_artificial_var)
4246 bitmap_set_bit (into, DECL_UID (vi->decl));
4247 else
4248 {
4249 var_alias_set = get_alias_set (vi->decl);
4250 if (!vi->directly_dereferenced
4251 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4252 bitmap_set_bit (into, DECL_UID (vi->decl));
4253 }
4254 }
4255 }
4256 }
4257 }
4258
4259
4260 static bool have_alias_info = false;
4261
4262 /* The list of SMT's that are in use by our pointer variables. This
4263 is the set of SMT's for all pointers that can point to anything. */
4264 static bitmap used_smts;
4265
4266 /* Due to the ordering of points-to set calculation and SMT
4267 calculation being a bit co-dependent, we can't just calculate SMT
4268 used info whenever we want, we have to calculate it around the time
4269 that find_what_p_points_to is called. */
4270
4271 /* Mark which SMT's are in use by points-to anything variables. */
4272
4273 void
4274 set_used_smts (void)
4275 {
4276 int i;
4277 varinfo_t vi;
4278 used_smts = BITMAP_ALLOC (&pta_obstack);
4279
4280 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); i++)
4281 {
4282 tree var = vi->decl;
4283 tree smt;
4284 var_ann_t va;
4285 struct ptr_info_def *pi = NULL;
4286
4287 /* For parm decls, the pointer info may be under the default
4288 def. */
4289 if (TREE_CODE (vi->decl) == PARM_DECL
4290 && gimple_default_def (cfun, var))
4291 pi = SSA_NAME_PTR_INFO (gimple_default_def (cfun, var));
4292 else if (TREE_CODE (var) == SSA_NAME)
4293 pi = SSA_NAME_PTR_INFO (var);
4294
4295 /* Skip the special variables and those without their own
4296 solution set. */
4297 if (vi->is_special_var || find (vi->id) != vi->id
4298 || !SSA_VAR_P (var)
4299 || (pi && !pi->is_dereferenced)
4300 || (TREE_CODE (var) == VAR_DECL && !may_be_aliased (var))
4301 || !POINTER_TYPE_P (TREE_TYPE (var)))
4302 continue;
4303
4304 if (TREE_CODE (var) == SSA_NAME)
4305 var = SSA_NAME_VAR (var);
4306
4307 va = var_ann (var);
4308 if (!va)
4309 continue;
4310
4311 smt = va->symbol_mem_tag;
4312 if (smt && bitmap_bit_p (vi->solution, anything_id))
4313 bitmap_set_bit (used_smts, DECL_UID (smt));
4314 }
4315 }
4316
4317 /* Merge the necessary SMT's into the solution set for VI, which is
4318 P's varinfo. This involves merging all SMT's that are a subset of
4319 the SMT necessary for P. */
4320
4321 static void
4322 merge_smts_into (tree p, varinfo_t vi)
4323 {
4324 unsigned int i;
4325 bitmap_iterator bi;
4326 tree smt;
4327 VEC(tree, gc) *aliases;
4328 tree var = p;
4329
4330 if (TREE_CODE (p) == SSA_NAME)
4331 var = SSA_NAME_VAR (p);
4332
4333 smt = var_ann (var)->symbol_mem_tag;
4334 if (smt)
4335 {
4336 HOST_WIDE_INT smtset = get_alias_set (TREE_TYPE (smt));
4337
4338 /* Need to set the SMT subsets first before this
4339 will work properly. */
4340 bitmap_set_bit (vi->finished_solution, DECL_UID (smt));
4341 EXECUTE_IF_SET_IN_BITMAP (used_smts, 0, i, bi)
4342 {
4343 tree newsmt = referenced_var (i);
4344 tree newsmttype = TREE_TYPE (newsmt);
4345
4346 if (alias_set_subset_of (get_alias_set (newsmttype),
4347 smtset))
4348 bitmap_set_bit (vi->finished_solution, i);
4349 }
4350
4351 aliases = var_ann (smt)->may_aliases;
4352 if (aliases)
4353 {
4354 size_t k;
4355 tree al;
4356 for (k = 0; VEC_iterate (tree, aliases, k, al); k++)
4357 bitmap_set_bit (vi->finished_solution,
4358 DECL_UID (al));
4359 }
4360 }
4361 }
4362
4363 /* Given a pointer variable P, fill in its points-to set, or return
4364 false if we can't.
4365 Rather than return false for variables that point-to anything, we
4366 instead find the corresponding SMT, and merge in it's aliases. In
4367 addition to these aliases, we also set the bits for the SMT's
4368 themselves and their subsets, as SMT's are still in use by
4369 non-SSA_NAME's, and pruning may eliminate every one of their
4370 aliases. In such a case, if we did not include the right set of
4371 SMT's in the points-to set of the variable, we'd end up with
4372 statements that do not conflict but should. */
4373
4374 bool
4375 find_what_p_points_to (tree p)
4376 {
4377 tree lookup_p = p;
4378 varinfo_t vi;
4379
4380 if (!have_alias_info)
4381 return false;
4382
4383 /* For parameters, get at the points-to set for the actual parm
4384 decl. */
4385 if (TREE_CODE (p) == SSA_NAME
4386 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4387 && SSA_NAME_IS_DEFAULT_DEF (p))
4388 lookup_p = SSA_NAME_VAR (p);
4389
4390 if (lookup_vi_for_tree (lookup_p, &vi))
4391 {
4392
4393 if (vi->is_artificial_var)
4394 return false;
4395
4396 /* See if this is a field or a structure. */
4397 if (vi->size != vi->fullsize)
4398 {
4399 /* Nothing currently asks about structure fields directly,
4400 but when they do, we need code here to hand back the
4401 points-to set. */
4402 if (!var_can_have_subvars (vi->decl)
4403 || get_subvars_for_var (vi->decl) == NULL)
4404 return false;
4405 }
4406 else
4407 {
4408 struct ptr_info_def *pi = get_ptr_info (p);
4409 unsigned int i;
4410 bitmap_iterator bi;
4411 bool was_pt_anything = false;
4412
4413 if (!pi->is_dereferenced)
4414 return false;
4415
4416 /* This variable may have been collapsed, let's get the real
4417 variable. */
4418 vi = get_varinfo (find (vi->id));
4419
4420 /* Translate artificial variables into SSA_NAME_PTR_INFO
4421 attributes. */
4422 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4423 {
4424 varinfo_t vi = get_varinfo (i);
4425
4426 if (vi->is_artificial_var)
4427 {
4428 /* FIXME. READONLY should be handled better so that
4429 flow insensitive aliasing can disregard writable
4430 aliases. */
4431 if (vi->id == nothing_id)
4432 pi->pt_null = 1;
4433 else if (vi->id == anything_id)
4434 was_pt_anything = 1;
4435 else if (vi->id == readonly_id)
4436 was_pt_anything = 1;
4437 else if (vi->id == integer_id)
4438 was_pt_anything = 1;
4439 else if (vi->is_heap_var)
4440 pi->pt_global_mem = 1;
4441 }
4442 }
4443
4444 /* Share the final set of variables between the SSA_NAME
4445 pointer infos for collapsed nodes that are collapsed to
4446 non-special variables. This is because special vars have
4447 no real types associated with them, so while we know the
4448 pointers are equivalent to them, we need to generate the
4449 solution separately since it will include SMT's from the
4450 original non-collapsed variable. */
4451 if (!vi->is_special_var && vi->finished_solution)
4452 {
4453 pi->pt_vars = vi->finished_solution;
4454 }
4455 else
4456 {
4457 vi->finished_solution = BITMAP_GGC_ALLOC ();
4458 stats.points_to_sets_created++;
4459
4460 /* Instead of using pt_anything, we instead merge in the SMT
4461 aliases for the underlying SMT. */
4462 if (was_pt_anything)
4463 {
4464 merge_smts_into (p, vi);
4465 pi->pt_global_mem = 1;
4466 }
4467
4468 set_uids_in_ptset (vi->decl, vi->finished_solution, vi->solution);
4469 pi->pt_vars = vi->finished_solution;
4470 }
4471
4472 if (bitmap_empty_p (pi->pt_vars))
4473 pi->pt_vars = NULL;
4474
4475 return true;
4476 }
4477 }
4478
4479 return false;
4480 }
4481
4482
4483
4484 /* Dump points-to information to OUTFILE. */
4485
4486 void
4487 dump_sa_points_to_info (FILE *outfile)
4488 {
4489 unsigned int i;
4490
4491 fprintf (outfile, "\nPoints-to sets\n\n");
4492
4493 if (dump_flags & TDF_STATS)
4494 {
4495 fprintf (outfile, "Stats:\n");
4496 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
4497 fprintf (outfile, "Non-pointer vars: %d\n",
4498 stats.nonpointer_vars);
4499 fprintf (outfile, "Statically unified vars: %d\n",
4500 stats.unified_vars_static);
4501 fprintf (outfile, "Dynamically unified vars: %d\n",
4502 stats.unified_vars_dynamic);
4503 fprintf (outfile, "Iterations: %d\n", stats.iterations);
4504 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
4505 fprintf (outfile, "Number of implicit edges: %d\n",
4506 stats.num_implicit_edges);
4507 }
4508
4509 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
4510 dump_solution_for_var (outfile, i);
4511 }
4512
4513
4514 /* Debug points-to information to stderr. */
4515
4516 void
4517 debug_sa_points_to_info (void)
4518 {
4519 dump_sa_points_to_info (stderr);
4520 }
4521
4522
4523 /* Initialize the always-existing constraint variables for NULL
4524 ANYTHING, READONLY, and INTEGER */
4525
4526 static void
4527 init_base_vars (void)
4528 {
4529 struct constraint_expr lhs, rhs;
4530
4531 /* Create the NULL variable, used to represent that a variable points
4532 to NULL. */
4533 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
4534 var_nothing = new_var_info (nothing_tree, 0, "NULL");
4535 insert_vi_for_tree (nothing_tree, var_nothing);
4536 var_nothing->is_artificial_var = 1;
4537 var_nothing->offset = 0;
4538 var_nothing->size = ~0;
4539 var_nothing->fullsize = ~0;
4540 var_nothing->is_special_var = 1;
4541 nothing_id = 0;
4542 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
4543
4544 /* Create the ANYTHING variable, used to represent that a variable
4545 points to some unknown piece of memory. */
4546 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
4547 var_anything = new_var_info (anything_tree, 1, "ANYTHING");
4548 insert_vi_for_tree (anything_tree, var_anything);
4549 var_anything->is_artificial_var = 1;
4550 var_anything->size = ~0;
4551 var_anything->offset = 0;
4552 var_anything->next = NULL;
4553 var_anything->fullsize = ~0;
4554 var_anything->is_special_var = 1;
4555 anything_id = 1;
4556
4557 /* Anything points to anything. This makes deref constraints just
4558 work in the presence of linked list and other p = *p type loops,
4559 by saying that *ANYTHING = ANYTHING. */
4560 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
4561 lhs.type = SCALAR;
4562 lhs.var = anything_id;
4563 lhs.offset = 0;
4564 rhs.type = ADDRESSOF;
4565 rhs.var = anything_id;
4566 rhs.offset = 0;
4567
4568 /* This specifically does not use process_constraint because
4569 process_constraint ignores all anything = anything constraints, since all
4570 but this one are redundant. */
4571 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
4572
4573 /* Create the READONLY variable, used to represent that a variable
4574 points to readonly memory. */
4575 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
4576 var_readonly = new_var_info (readonly_tree, 2, "READONLY");
4577 var_readonly->is_artificial_var = 1;
4578 var_readonly->offset = 0;
4579 var_readonly->size = ~0;
4580 var_readonly->fullsize = ~0;
4581 var_readonly->next = NULL;
4582 var_readonly->is_special_var = 1;
4583 insert_vi_for_tree (readonly_tree, var_readonly);
4584 readonly_id = 2;
4585 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
4586
4587 /* readonly memory points to anything, in order to make deref
4588 easier. In reality, it points to anything the particular
4589 readonly variable can point to, but we don't track this
4590 separately. */
4591 lhs.type = SCALAR;
4592 lhs.var = readonly_id;
4593 lhs.offset = 0;
4594 rhs.type = ADDRESSOF;
4595 rhs.var = anything_id;
4596 rhs.offset = 0;
4597
4598 process_constraint (new_constraint (lhs, rhs));
4599
4600 /* Create the INTEGER variable, used to represent that a variable points
4601 to an INTEGER. */
4602 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
4603 var_integer = new_var_info (integer_tree, 3, "INTEGER");
4604 insert_vi_for_tree (integer_tree, var_integer);
4605 var_integer->is_artificial_var = 1;
4606 var_integer->size = ~0;
4607 var_integer->fullsize = ~0;
4608 var_integer->offset = 0;
4609 var_integer->next = NULL;
4610 var_integer->is_special_var = 1;
4611 integer_id = 3;
4612 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
4613
4614 /* INTEGER = ANYTHING, because we don't know where a dereference of
4615 a random integer will point to. */
4616 lhs.type = SCALAR;
4617 lhs.var = integer_id;
4618 lhs.offset = 0;
4619 rhs.type = ADDRESSOF;
4620 rhs.var = anything_id;
4621 rhs.offset = 0;
4622 process_constraint (new_constraint (lhs, rhs));
4623 }
4624
4625 /* Initialize things necessary to perform PTA */
4626
4627 static void
4628 init_alias_vars (void)
4629 {
4630 bitmap_obstack_initialize (&pta_obstack);
4631 bitmap_obstack_initialize (&oldpta_obstack);
4632 bitmap_obstack_initialize (&predbitmap_obstack);
4633
4634 constraint_pool = create_alloc_pool ("Constraint pool",
4635 sizeof (struct constraint), 30);
4636 variable_info_pool = create_alloc_pool ("Variable info pool",
4637 sizeof (struct variable_info), 30);
4638 constraints = VEC_alloc (constraint_t, heap, 8);
4639 varmap = VEC_alloc (varinfo_t, heap, 8);
4640 vi_for_tree = htab_create (10, tree_vi_hash, tree_vi_eq, free);
4641
4642 memset (&stats, 0, sizeof (stats));
4643
4644 init_base_vars ();
4645 }
4646
4647 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
4648 predecessor edges. */
4649
4650 static void
4651 remove_preds_and_fake_succs (constraint_graph_t graph)
4652 {
4653 unsigned int i;
4654
4655 /* Clear the implicit ref and address nodes from the successor
4656 lists. */
4657 for (i = 0; i < FIRST_REF_NODE; i++)
4658 {
4659 if (graph->succs[i])
4660 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
4661 FIRST_REF_NODE * 2);
4662 }
4663
4664 /* Free the successor list for the non-ref nodes. */
4665 for (i = FIRST_REF_NODE; i < graph->size; i++)
4666 {
4667 if (graph->succs[i])
4668 BITMAP_FREE (graph->succs[i]);
4669 }
4670
4671 /* Now reallocate the size of the successor list as, and blow away
4672 the predecessor bitmaps. */
4673 graph->size = VEC_length (varinfo_t, varmap);
4674 graph->succs = xrealloc (graph->succs, graph->size * sizeof (bitmap));
4675
4676 free (graph->implicit_preds);
4677 graph->implicit_preds = NULL;
4678 free (graph->preds);
4679 graph->preds = NULL;
4680 bitmap_obstack_release (&predbitmap_obstack);
4681 }
4682
4683 /* Create points-to sets for the current function. See the comments
4684 at the start of the file for an algorithmic overview. */
4685
4686 void
4687 compute_points_to_sets (struct alias_info *ai)
4688 {
4689 struct scc_info *si;
4690 basic_block bb;
4691
4692 timevar_push (TV_TREE_PTA);
4693
4694 init_alias_vars ();
4695 init_alias_heapvars ();
4696
4697 intra_create_variable_infos ();
4698
4699 /* Now walk all statements and derive aliases. */
4700 FOR_EACH_BB (bb)
4701 {
4702 block_stmt_iterator bsi;
4703 tree phi;
4704
4705 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
4706 {
4707 if (is_gimple_reg (PHI_RESULT (phi)))
4708 {
4709 find_func_aliases (phi);
4710 /* Update various related attributes like escaped
4711 addresses, pointer dereferences for loads and stores.
4712 This is used when creating name tags and alias
4713 sets. */
4714 update_alias_info (phi, ai);
4715 }
4716 }
4717
4718 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4719 {
4720 tree stmt = bsi_stmt (bsi);
4721
4722 find_func_aliases (stmt);
4723
4724 /* Update various related attributes like escaped
4725 addresses, pointer dereferences for loads and stores.
4726 This is used when creating name tags and alias
4727 sets. */
4728 update_alias_info (stmt, ai);
4729 }
4730 }
4731
4732
4733 if (dump_file)
4734 {
4735 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
4736 dump_constraints (dump_file);
4737 }
4738
4739 if (dump_file)
4740 fprintf (dump_file,
4741 "\nCollapsing static cycles and doing variable "
4742 "substitution:\n");
4743 build_pred_graph ();
4744 si = perform_var_substitution (graph);
4745 move_complex_constraints (graph, si);
4746 free_var_substitution_info (si);
4747
4748 build_succ_graph ();
4749 find_indirect_cycles (graph);
4750
4751 /* Implicit nodes and predecessors are no longer necessary at this
4752 point. */
4753 remove_preds_and_fake_succs (graph);
4754
4755 if (dump_file)
4756 fprintf (dump_file, "\nSolving graph:\n");
4757
4758 solve_graph (graph);
4759
4760 if (dump_file)
4761 dump_sa_points_to_info (dump_file);
4762
4763 have_alias_info = true;
4764
4765 timevar_pop (TV_TREE_PTA);
4766 }
4767
4768
4769 /* Delete created points-to sets. */
4770
4771 void
4772 delete_points_to_sets (void)
4773 {
4774 varinfo_t v;
4775 int i;
4776
4777 if (dump_file && (dump_flags & TDF_STATS))
4778 fprintf (dump_file, "Points to sets created:%d\n",
4779 stats.points_to_sets_created);
4780
4781 htab_delete (vi_for_tree);
4782 bitmap_obstack_release (&pta_obstack);
4783 VEC_free (constraint_t, heap, constraints);
4784
4785 for (i = 0; VEC_iterate (varinfo_t, varmap, i, v); i++)
4786 VEC_free (constraint_t, heap, graph->complex[i]);
4787
4788 free (graph->rep);
4789 free (graph->succs);
4790 free (graph->indirect_cycles);
4791 free (graph);
4792
4793 VEC_free (varinfo_t, heap, varmap);
4794 free_alloc_pool (variable_info_pool);
4795 free_alloc_pool (constraint_pool);
4796 have_alias_info = false;
4797 }
4798
4799 /* Return true if we should execute IPA PTA. */
4800 static bool
4801 gate_ipa_pta (void)
4802 {
4803 return (flag_unit_at_a_time != 0
4804 && flag_ipa_pta
4805 /* Don't bother doing anything if the program has errors. */
4806 && !(errorcount || sorrycount));
4807 }
4808
4809 /* Execute the driver for IPA PTA. */
4810 static unsigned int
4811 ipa_pta_execute (void)
4812 {
4813 struct cgraph_node *node;
4814 struct scc_info *si;
4815
4816 in_ipa_mode = 1;
4817 init_alias_heapvars ();
4818 init_alias_vars ();
4819
4820 for (node = cgraph_nodes; node; node = node->next)
4821 {
4822 if (!node->analyzed || cgraph_is_master_clone (node))
4823 {
4824 unsigned int varid;
4825
4826 varid = create_function_info_for (node->decl,
4827 cgraph_node_name (node));
4828 if (node->local.externally_visible)
4829 {
4830 varinfo_t fi = get_varinfo (varid);
4831 for (; fi; fi = fi->next)
4832 make_constraint_from_anything (fi);
4833 }
4834 }
4835 }
4836 for (node = cgraph_nodes; node; node = node->next)
4837 {
4838 if (node->analyzed && cgraph_is_master_clone (node))
4839 {
4840 struct function *cfun = DECL_STRUCT_FUNCTION (node->decl);
4841 basic_block bb;
4842 tree old_func_decl = current_function_decl;
4843 if (dump_file)
4844 fprintf (dump_file,
4845 "Generating constraints for %s\n",
4846 cgraph_node_name (node));
4847 push_cfun (cfun);
4848 current_function_decl = node->decl;
4849
4850 FOR_EACH_BB_FN (bb, cfun)
4851 {
4852 block_stmt_iterator bsi;
4853 tree phi;
4854
4855 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
4856 {
4857 if (is_gimple_reg (PHI_RESULT (phi)))
4858 {
4859 find_func_aliases (phi);
4860 }
4861 }
4862
4863 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4864 {
4865 tree stmt = bsi_stmt (bsi);
4866 find_func_aliases (stmt);
4867 }
4868 }
4869 current_function_decl = old_func_decl;
4870 pop_cfun ();
4871 }
4872 else
4873 {
4874 /* Make point to anything. */
4875 }
4876 }
4877
4878
4879
4880 if (dump_file)
4881 {
4882 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
4883 dump_constraints (dump_file);
4884 }
4885
4886 if (dump_file)
4887 fprintf (dump_file,
4888 "\nCollapsing static cycles and doing variable "
4889 "substitution:\n");
4890
4891 build_pred_graph ();
4892 si = perform_var_substitution (graph);
4893 move_complex_constraints (graph, si);
4894 free_var_substitution_info (si);
4895
4896 build_succ_graph ();
4897 find_indirect_cycles (graph);
4898
4899 /* Implicit nodes and predecessors are no longer necessary at this
4900 point. */
4901 remove_preds_and_fake_succs (graph);
4902
4903 if (dump_file)
4904 fprintf (dump_file, "\nSolving graph:\n");
4905
4906 solve_graph (graph);
4907
4908 if (dump_file)
4909 dump_sa_points_to_info (dump_file);
4910
4911 in_ipa_mode = 0;
4912 delete_alias_heapvars ();
4913 delete_points_to_sets ();
4914 return 0;
4915 }
4916
4917 struct tree_opt_pass pass_ipa_pta =
4918 {
4919 "pta", /* name */
4920 gate_ipa_pta, /* gate */
4921 ipa_pta_execute, /* execute */
4922 NULL, /* sub */
4923 NULL, /* next */
4924 0, /* static_pass_number */
4925 TV_IPA_PTA, /* tv_id */
4926 0, /* properties_required */
4927 0, /* properties_provided */
4928 0, /* properties_destroyed */
4929 0, /* todo_flags_start */
4930 0, /* todo_flags_finish */
4931 0 /* letter */
4932 };
4933
4934 /* Initialize the heapvar for statement mapping. */
4935 void
4936 init_alias_heapvars (void)
4937 {
4938 if (!heapvar_for_stmt)
4939 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
4940 NULL);
4941 }
4942
4943 void
4944 delete_alias_heapvars (void)
4945 {
4946 htab_delete (heapvar_for_stmt);
4947 heapvar_for_stmt = NULL;
4948 }
4949
4950
4951 #include "gt-tree-ssa-structalias.h"