a3c655cfda1b1855330ae6b602bb97fa418de11f
[gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "obstack.h"
28 #include "bitmap.h"
29 #include "flags.h"
30 #include "rtl.h"
31 #include "tm_p.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "output.h"
35 #include "errors.h"
36 #include "diagnostic.h"
37 #include "tree.h"
38 #include "c-common.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
41 #include "varray.h"
42 #include "c-tree.h"
43 #include "tree-gimple.h"
44 #include "hashtab.h"
45 #include "function.h"
46 #include "cgraph.h"
47 #include "tree-pass.h"
48 #include "timevar.h"
49 #include "alloc-pool.h"
50 #include "splay-tree.h"
51 #include "tree-ssa-structalias.h"
52 #include "cgraph.h"
53
54 /* The idea behind this analyzer is to generate set constraints from the
55 program, then solve the resulting constraints in order to generate the
56 points-to sets.
57
58 Set constraints are a way of modeling program analysis problems that
59 involve sets. They consist of an inclusion constraint language,
60 describing the variables (each variable is a set) and operations that
61 are involved on the variables, and a set of rules that derive facts
62 from these operations. To solve a system of set constraints, you derive
63 all possible facts under the rules, which gives you the correct sets
64 as a consequence.
65
66 See "Efficient Field-sensitive pointer analysis for C" by "David
67 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
68 http://citeseer.ist.psu.edu/pearce04efficient.html
69
70 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
71 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
72 http://citeseer.ist.psu.edu/heintze01ultrafast.html
73
74 There are three types of constraint expressions, DEREF, ADDRESSOF, and
75 SCALAR. Each constraint expression consists of a constraint type,
76 a variable, and an offset.
77
78 SCALAR is a constraint expression type used to represent x, whether
79 it appears on the LHS or the RHS of a statement.
80 DEREF is a constraint expression type used to represent *x, whether
81 it appears on the LHS or the RHS of a statement.
82 ADDRESSOF is a constraint expression used to represent &x, whether
83 it appears on the LHS or the RHS of a statement.
84
85 Each pointer variable in the program is assigned an integer id, and
86 each field of a structure variable is assigned an integer id as well.
87
88 Structure variables are linked to their list of fields through a "next
89 field" in each variable that points to the next field in offset
90 order.
91 Each variable for a structure field has
92
93 1. "size", that tells the size in bits of that field.
94 2. "fullsize, that tells the size in bits of the entire structure.
95 3. "offset", that tells the offset in bits from the beginning of the
96 structure to this field.
97
98 Thus,
99 struct f
100 {
101 int a;
102 int b;
103 } foo;
104 int *bar;
105
106 looks like
107
108 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
109 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
110 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
111
112
113 In order to solve the system of set constraints, the following is
114 done:
115
116 1. Each constraint variable x has a solution set associated with it,
117 Sol(x).
118
119 2. Constraints are separated into direct, copy, and complex.
120 Direct constraints are ADDRESSOF constraints that require no extra
121 processing, such as P = &Q
122 Copy constraints are those of the form P = Q.
123 Complex constraints are all the constraints involving dereferences.
124
125 3. All direct constraints of the form P = &Q are processed, such
126 that Q is added to Sol(P)
127
128 4. All complex constraints for a given constraint variable are stored in a
129 linked list attached to that variable's node.
130
131 5. A directed graph is built out of the copy constraints. Each
132 constraint variable is a node in the graph, and an edge from
133 Q to P is added for each copy constraint of the form P = Q
134
135 6. The graph is then walked, and solution sets are
136 propagated along the copy edges, such that an edge from Q to P
137 causes Sol(P) <- Sol(P) union Sol(Q).
138
139 7. As we visit each node, all complex constraints associated with
140 that node are processed by adding appropriate copy edges to the graph, or the
141 appropriate variables to the solution set.
142
143 8. The process of walking the graph is iterated until no solution
144 sets change.
145
146 Prior to walking the graph in steps 6 and 7, We perform static
147 cycle elimination on the constraint graph, as well
148 as off-line variable substitution.
149
150 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
151 on and turned into anything), but isn't. You can just see what offset
152 inside the pointed-to struct it's going to access.
153
154 TODO: Constant bounded arrays can be handled as if they were structs of the
155 same number of elements.
156
157 TODO: Modeling heap and incoming pointers becomes much better if we
158 add fields to them as we discover them, which we could do.
159
160 TODO: We could handle unions, but to be honest, it's probably not
161 worth the pain or slowdown. */
162
163 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
164 htab_t heapvar_for_stmt;
165 static bool use_field_sensitive = true;
166 static int in_ipa_mode = 0;
167 static bitmap_obstack predbitmap_obstack;
168 static bitmap_obstack ptabitmap_obstack;
169 static bitmap_obstack iteration_obstack;
170
171 static unsigned int create_variable_info_for (tree, const char *);
172 static void build_constraint_graph (void);
173
174 DEF_VEC_P(constraint_t);
175 DEF_VEC_ALLOC_P(constraint_t,heap);
176
177 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
178 if (a) \
179 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
180
181 static struct constraint_stats
182 {
183 unsigned int total_vars;
184 unsigned int collapsed_vars;
185 unsigned int unified_vars_static;
186 unsigned int unified_vars_dynamic;
187 unsigned int iterations;
188 unsigned int num_edges;
189 } stats;
190
191 struct variable_info
192 {
193 /* ID of this variable */
194 unsigned int id;
195
196 /* Name of this variable */
197 const char *name;
198
199 /* Tree that this variable is associated with. */
200 tree decl;
201
202 /* Offset of this variable, in bits, from the base variable */
203 unsigned HOST_WIDE_INT offset;
204
205 /* Size of the variable, in bits. */
206 unsigned HOST_WIDE_INT size;
207
208 /* Full size of the base variable, in bits. */
209 unsigned HOST_WIDE_INT fullsize;
210
211 /* A link to the variable for the next field in this structure. */
212 struct variable_info *next;
213
214 /* Node in the graph that represents the constraints and points-to
215 solution for the variable. */
216 unsigned int node;
217
218 /* True if the address of this variable is taken. Needed for
219 variable substitution. */
220 unsigned int address_taken:1;
221
222 /* True if this variable is the target of a dereference. Needed for
223 variable substitution. */
224 unsigned int indirect_target:1;
225
226 /* True if this is a variable created by the constraint analysis, such as
227 heap variables and constraints we had to break up. */
228 unsigned int is_artificial_var:1;
229
230 /* True if this is a special variable whose solution set should not be
231 changed. */
232 unsigned int is_special_var:1;
233
234 /* True for variables whose size is not known or variable. */
235 unsigned int is_unknown_size_var:1;
236
237 /* True for variables that have unions somewhere in them. */
238 unsigned int has_union:1;
239
240 /* True if this is a heap variable. */
241 unsigned int is_heap_var:1;
242
243 /* Points-to set for this variable. */
244 bitmap solution;
245
246 /* Variable ids represented by this node. */
247 bitmap variables;
248
249 /* Vector of complex constraints for this node. Complex
250 constraints are those involving dereferences. */
251 VEC(constraint_t,heap) *complex;
252
253 /* Variable id this was collapsed to due to type unsafety.
254 This should be unused completely after build_constraint_graph, or
255 something is broken. */
256 struct variable_info *collapsed_to;
257 };
258 typedef struct variable_info *varinfo_t;
259
260 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
261
262 /* Pool of variable info structures. */
263 static alloc_pool variable_info_pool;
264
265 DEF_VEC_P(varinfo_t);
266
267 DEF_VEC_ALLOC_P(varinfo_t, heap);
268
269 /* Table of variable info structures for constraint variables. Indexed directly
270 by variable info id. */
271 static VEC(varinfo_t,heap) *varmap;
272
273 /* Return the varmap element N */
274
275 static inline varinfo_t
276 get_varinfo (unsigned int n)
277 {
278 return VEC_index(varinfo_t, varmap, n);
279 }
280
281 /* Return the varmap element N, following the collapsed_to link. */
282
283 static inline varinfo_t
284 get_varinfo_fc (unsigned int n)
285 {
286 varinfo_t v = VEC_index(varinfo_t, varmap, n);
287
288 if (v->collapsed_to)
289 return v->collapsed_to;
290 return v;
291 }
292
293 /* Variable that represents the unknown pointer. */
294 static varinfo_t var_anything;
295 static tree anything_tree;
296 static unsigned int anything_id;
297
298 /* Variable that represents the NULL pointer. */
299 static varinfo_t var_nothing;
300 static tree nothing_tree;
301 static unsigned int nothing_id;
302
303 /* Variable that represents read only memory. */
304 static varinfo_t var_readonly;
305 static tree readonly_tree;
306 static unsigned int readonly_id;
307
308 /* Variable that represents integers. This is used for when people do things
309 like &0->a.b. */
310 static varinfo_t var_integer;
311 static tree integer_tree;
312 static unsigned int integer_id;
313
314
315 /* Lookup a heap var for FROM, and return it if we find one. */
316
317 static tree
318 heapvar_lookup (tree from)
319 {
320 struct tree_map *h, in;
321 in.from = from;
322
323 h = htab_find_with_hash (heapvar_for_stmt, &in, htab_hash_pointer (from));
324 if (h)
325 return h->to;
326 return NULL_TREE;
327 }
328
329 /* Insert a mapping FROM->TO in the heap var for statement
330 hashtable. */
331
332 static void
333 heapvar_insert (tree from, tree to)
334 {
335 struct tree_map *h;
336 void **loc;
337
338 h = ggc_alloc (sizeof (struct tree_map));
339 h->hash = htab_hash_pointer (from);
340 h->from = from;
341 h->to = to;
342 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
343 *(struct tree_map **) loc = h;
344 }
345
346 /* Return a new variable info structure consisting for a variable
347 named NAME, and using constraint graph node NODE. */
348
349 static varinfo_t
350 new_var_info (tree t, unsigned int id, const char *name, unsigned int node)
351 {
352 varinfo_t ret = pool_alloc (variable_info_pool);
353
354 ret->id = id;
355 ret->name = name;
356 ret->decl = t;
357 ret->node = node;
358 ret->address_taken = false;
359 ret->indirect_target = false;
360 ret->is_artificial_var = false;
361 ret->is_heap_var = false;
362 ret->is_special_var = false;
363 ret->is_unknown_size_var = false;
364 ret->has_union = false;
365 ret->solution = BITMAP_ALLOC (&ptabitmap_obstack);
366 ret->variables = BITMAP_ALLOC (&ptabitmap_obstack);
367 ret->complex = NULL;
368 ret->next = NULL;
369 ret->collapsed_to = NULL;
370 return ret;
371 }
372
373 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
374
375 /* An expression that appears in a constraint. */
376
377 struct constraint_expr
378 {
379 /* Constraint type. */
380 constraint_expr_type type;
381
382 /* Variable we are referring to in the constraint. */
383 unsigned int var;
384
385 /* Offset, in bits, of this constraint from the beginning of
386 variables it ends up referring to.
387
388 IOW, in a deref constraint, we would deref, get the result set,
389 then add OFFSET to each member. */
390 unsigned HOST_WIDE_INT offset;
391 };
392
393 typedef struct constraint_expr ce_s;
394 DEF_VEC_O(ce_s);
395 DEF_VEC_ALLOC_O(ce_s, heap);
396 static void get_constraint_for (tree, VEC(ce_s, heap) **, bool *);
397 static void do_deref (VEC (ce_s, heap) **);
398
399 /* Our set constraints are made up of two constraint expressions, one
400 LHS, and one RHS.
401
402 As described in the introduction, our set constraints each represent an
403 operation between set valued variables.
404 */
405 struct constraint
406 {
407 struct constraint_expr lhs;
408 struct constraint_expr rhs;
409 };
410
411 /* List of constraints that we use to build the constraint graph from. */
412
413 static VEC(constraint_t,heap) *constraints;
414 static alloc_pool constraint_pool;
415
416 /* An edge in the weighted constraint graph. The edges are weighted,
417 with a bit set in weights meaning their is an edge with that
418 weight.
419 We don't keep the src in the edge, because we always know what it
420 is. */
421
422 struct constraint_edge
423 {
424 unsigned int dest;
425 bitmap weights;
426 };
427
428 typedef struct constraint_edge *constraint_edge_t;
429 static alloc_pool constraint_edge_pool;
430
431 /* Return a new constraint edge from SRC to DEST. */
432
433 static constraint_edge_t
434 new_constraint_edge (unsigned int dest)
435 {
436 constraint_edge_t ret = pool_alloc (constraint_edge_pool);
437 ret->dest = dest;
438 ret->weights = NULL;
439 return ret;
440 }
441
442 DEF_VEC_P(constraint_edge_t);
443 DEF_VEC_ALLOC_P(constraint_edge_t,heap);
444
445
446 /* The constraint graph is represented internally in two different
447 ways. The overwhelming majority of edges in the constraint graph
448 are zero weigh edges, and thus, using a vector of contrainst_edge_t
449 is a waste of time and memory, since they have no weights. We
450 simply use a bitmap to store the preds and succs for each node.
451 The weighted edges are stored as a set of adjacency vectors, one
452 per variable. succs[x] is the vector of successors for variable x,
453 and preds[x] is the vector of predecessors for variable x. IOW,
454 all edges are "forward" edges, which is not like our CFG. So
455 remember that preds[x]->src == x, and succs[x]->src == x. */
456
457 struct constraint_graph
458 {
459 bitmap *zero_weight_succs;
460 bitmap *zero_weight_preds;
461 VEC(constraint_edge_t,heap) **succs;
462 VEC(constraint_edge_t,heap) **preds;
463 };
464
465 typedef struct constraint_graph *constraint_graph_t;
466
467 static constraint_graph_t graph;
468
469 /* Create a new constraint consisting of LHS and RHS expressions. */
470
471 static constraint_t
472 new_constraint (const struct constraint_expr lhs,
473 const struct constraint_expr rhs)
474 {
475 constraint_t ret = pool_alloc (constraint_pool);
476 ret->lhs = lhs;
477 ret->rhs = rhs;
478 return ret;
479 }
480
481 /* Print out constraint C to FILE. */
482
483 void
484 dump_constraint (FILE *file, constraint_t c)
485 {
486 if (c->lhs.type == ADDRESSOF)
487 fprintf (file, "&");
488 else if (c->lhs.type == DEREF)
489 fprintf (file, "*");
490 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
491 if (c->lhs.offset != 0)
492 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
493 fprintf (file, " = ");
494 if (c->rhs.type == ADDRESSOF)
495 fprintf (file, "&");
496 else if (c->rhs.type == DEREF)
497 fprintf (file, "*");
498 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
499 if (c->rhs.offset != 0)
500 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
501 fprintf (file, "\n");
502 }
503
504 /* Print out constraint C to stderr. */
505
506 void
507 debug_constraint (constraint_t c)
508 {
509 dump_constraint (stderr, c);
510 }
511
512 /* Print out all constraints to FILE */
513
514 void
515 dump_constraints (FILE *file)
516 {
517 int i;
518 constraint_t c;
519 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
520 dump_constraint (file, c);
521 }
522
523 /* Print out all constraints to stderr. */
524
525 void
526 debug_constraints (void)
527 {
528 dump_constraints (stderr);
529 }
530
531 /* SOLVER FUNCTIONS
532
533 The solver is a simple worklist solver, that works on the following
534 algorithm:
535
536 sbitmap changed_nodes = all ones;
537 changed_count = number of nodes;
538 For each node that was already collapsed:
539 changed_count--;
540
541 while (changed_count > 0)
542 {
543 compute topological ordering for constraint graph
544
545 find and collapse cycles in the constraint graph (updating
546 changed if necessary)
547
548 for each node (n) in the graph in topological order:
549 changed_count--;
550
551 Process each complex constraint associated with the node,
552 updating changed if necessary.
553
554 For each outgoing edge from n, propagate the solution from n to
555 the destination of the edge, updating changed as necessary.
556
557 } */
558
559 /* Return true if two constraint expressions A and B are equal. */
560
561 static bool
562 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
563 {
564 return a.type == b.type && a.var == b.var && a.offset == b.offset;
565 }
566
567 /* Return true if constraint expression A is less than constraint expression
568 B. This is just arbitrary, but consistent, in order to give them an
569 ordering. */
570
571 static bool
572 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
573 {
574 if (a.type == b.type)
575 {
576 if (a.var == b.var)
577 return a.offset < b.offset;
578 else
579 return a.var < b.var;
580 }
581 else
582 return a.type < b.type;
583 }
584
585 /* Return true if constraint A is less than constraint B. This is just
586 arbitrary, but consistent, in order to give them an ordering. */
587
588 static bool
589 constraint_less (const constraint_t a, const constraint_t b)
590 {
591 if (constraint_expr_less (a->lhs, b->lhs))
592 return true;
593 else if (constraint_expr_less (b->lhs, a->lhs))
594 return false;
595 else
596 return constraint_expr_less (a->rhs, b->rhs);
597 }
598
599 /* Return true if two constraints A and B are equal. */
600
601 static bool
602 constraint_equal (struct constraint a, struct constraint b)
603 {
604 return constraint_expr_equal (a.lhs, b.lhs)
605 && constraint_expr_equal (a.rhs, b.rhs);
606 }
607
608
609 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
610
611 static constraint_t
612 constraint_vec_find (VEC(constraint_t,heap) *vec,
613 struct constraint lookfor)
614 {
615 unsigned int place;
616 constraint_t found;
617
618 if (vec == NULL)
619 return NULL;
620
621 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
622 if (place >= VEC_length (constraint_t, vec))
623 return NULL;
624 found = VEC_index (constraint_t, vec, place);
625 if (!constraint_equal (*found, lookfor))
626 return NULL;
627 return found;
628 }
629
630 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
631
632 static void
633 constraint_set_union (VEC(constraint_t,heap) **to,
634 VEC(constraint_t,heap) **from)
635 {
636 int i;
637 constraint_t c;
638
639 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
640 {
641 if (constraint_vec_find (*to, *c) == NULL)
642 {
643 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
644 constraint_less);
645 VEC_safe_insert (constraint_t, heap, *to, place, c);
646 }
647 }
648 }
649
650 /* Take a solution set SET, add OFFSET to each member of the set, and
651 overwrite SET with the result when done. */
652
653 static void
654 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
655 {
656 bitmap result = BITMAP_ALLOC (&iteration_obstack);
657 unsigned int i;
658 bitmap_iterator bi;
659
660 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
661 {
662 /* If this is a properly sized variable, only add offset if it's
663 less than end. Otherwise, it is globbed to a single
664 variable. */
665
666 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
667 {
668 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
669 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
670 if (!v)
671 continue;
672 bitmap_set_bit (result, v->id);
673 }
674 else if (get_varinfo (i)->is_artificial_var
675 || get_varinfo (i)->has_union
676 || get_varinfo (i)->is_unknown_size_var)
677 {
678 bitmap_set_bit (result, i);
679 }
680 }
681
682 bitmap_copy (set, result);
683 BITMAP_FREE (result);
684 }
685
686 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
687 process. */
688
689 static bool
690 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
691 {
692 if (inc == 0)
693 return bitmap_ior_into (to, from);
694 else
695 {
696 bitmap tmp;
697 bool res;
698
699 tmp = BITMAP_ALLOC (&iteration_obstack);
700 bitmap_copy (tmp, from);
701 solution_set_add (tmp, inc);
702 res = bitmap_ior_into (to, tmp);
703 BITMAP_FREE (tmp);
704 return res;
705 }
706 }
707
708 /* Insert constraint C into the list of complex constraints for VAR. */
709
710 static void
711 insert_into_complex (unsigned int var, constraint_t c)
712 {
713 varinfo_t vi = get_varinfo (var);
714 unsigned int place = VEC_lower_bound (constraint_t, vi->complex, c,
715 constraint_less);
716 VEC_safe_insert (constraint_t, heap, vi->complex, place, c);
717 }
718
719
720 /* Compare two constraint edges A and B, return true if they are equal. */
721
722 static bool
723 constraint_edge_equal (struct constraint_edge a, struct constraint_edge b)
724 {
725 return a.dest == b.dest;
726 }
727
728 /* Compare two constraint edges, return true if A is less than B */
729
730 static bool
731 constraint_edge_less (const constraint_edge_t a, const constraint_edge_t b)
732 {
733 if (a->dest < b->dest)
734 return true;
735 return false;
736 }
737
738 /* Find the constraint edge that matches LOOKFOR, in VEC.
739 Return the edge, if found, NULL otherwise. */
740
741 static constraint_edge_t
742 constraint_edge_vec_find (VEC(constraint_edge_t,heap) *vec,
743 struct constraint_edge lookfor)
744 {
745 unsigned int place;
746 constraint_edge_t edge = NULL;
747
748 place = VEC_lower_bound (constraint_edge_t, vec, &lookfor,
749 constraint_edge_less);
750 if (place >= VEC_length (constraint_edge_t, vec))
751 return NULL;
752 edge = VEC_index (constraint_edge_t, vec, place);
753 if (!constraint_edge_equal (*edge, lookfor))
754 return NULL;
755 return edge;
756 }
757
758 /* Condense two variable nodes into a single variable node, by moving
759 all associated info from SRC to TO. */
760
761 static void
762 condense_varmap_nodes (unsigned int to, unsigned int src)
763 {
764 varinfo_t tovi = get_varinfo (to);
765 varinfo_t srcvi = get_varinfo (src);
766 unsigned int i;
767 constraint_t c;
768 bitmap_iterator bi;
769
770 /* the src node, and all its variables, are now the to node. */
771 srcvi->node = to;
772 EXECUTE_IF_SET_IN_BITMAP (srcvi->variables, 0, i, bi)
773 get_varinfo (i)->node = to;
774
775 /* Merge the src node variables and the to node variables. */
776 bitmap_set_bit (tovi->variables, src);
777 bitmap_ior_into (tovi->variables, srcvi->variables);
778 bitmap_clear (srcvi->variables);
779
780 /* Move all complex constraints from src node into to node */
781 for (i = 0; VEC_iterate (constraint_t, srcvi->complex, i, c); i++)
782 {
783 /* In complex constraints for node src, we may have either
784 a = *src, and *src = a. */
785
786 if (c->rhs.type == DEREF)
787 c->rhs.var = to;
788 else
789 c->lhs.var = to;
790 }
791 constraint_set_union (&tovi->complex, &srcvi->complex);
792 VEC_free (constraint_t, heap, srcvi->complex);
793 srcvi->complex = NULL;
794 }
795
796 /* Erase an edge from SRC to SRC from GRAPH. This routine only
797 handles self-edges (e.g. an edge from a to a). */
798
799 static void
800 erase_graph_self_edge (constraint_graph_t graph, unsigned int src)
801 {
802 VEC(constraint_edge_t,heap) *predvec = graph->preds[src];
803 VEC(constraint_edge_t,heap) *succvec = graph->succs[src];
804 struct constraint_edge edge;
805 unsigned int place;
806
807 edge.dest = src;
808
809 /* Remove from the successors. */
810 place = VEC_lower_bound (constraint_edge_t, succvec, &edge,
811 constraint_edge_less);
812
813 /* Make sure we found the edge. */
814 #ifdef ENABLE_CHECKING
815 {
816 constraint_edge_t tmp = VEC_index (constraint_edge_t, succvec, place);
817 gcc_assert (constraint_edge_equal (*tmp, edge));
818 }
819 #endif
820 VEC_ordered_remove (constraint_edge_t, succvec, place);
821
822 /* Remove from the predecessors. */
823 place = VEC_lower_bound (constraint_edge_t, predvec, &edge,
824 constraint_edge_less);
825
826 /* Make sure we found the edge. */
827 #ifdef ENABLE_CHECKING
828 {
829 constraint_edge_t tmp = VEC_index (constraint_edge_t, predvec, place);
830 gcc_assert (constraint_edge_equal (*tmp, edge));
831 }
832 #endif
833 VEC_ordered_remove (constraint_edge_t, predvec, place);
834 }
835
836 /* Remove edges involving NODE from GRAPH. */
837
838 static void
839 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
840 {
841 VEC(constraint_edge_t,heap) *succvec = graph->succs[node];
842 VEC(constraint_edge_t,heap) *predvec = graph->preds[node];
843 bitmap_iterator bi;
844 unsigned int j;
845 constraint_edge_t c = NULL;
846 int i;
847
848 /* Walk the successors, erase the associated preds. */
849
850 EXECUTE_IF_IN_NONNULL_BITMAP (graph->zero_weight_succs[node], 0, j, bi)
851 if (j != node)
852 bitmap_clear_bit (graph->zero_weight_preds[j], node);
853
854 for (i = 0; VEC_iterate (constraint_edge_t, succvec, i, c); i++)
855 if (c->dest != node)
856 {
857 unsigned int place;
858 struct constraint_edge lookfor;
859 constraint_edge_t result;
860
861 lookfor.dest = node;
862 place = VEC_lower_bound (constraint_edge_t, graph->preds[c->dest],
863 &lookfor, constraint_edge_less);
864 result = VEC_ordered_remove (constraint_edge_t,
865 graph->preds[c->dest], place);
866 pool_free (constraint_edge_pool, result);
867 }
868
869 /* Walk the preds, erase the associated succs. */
870
871 EXECUTE_IF_IN_NONNULL_BITMAP (graph->zero_weight_preds[node], 0, j, bi)
872 if (j != node)
873 bitmap_clear_bit (graph->zero_weight_succs[j], node);
874
875 for (i =0; VEC_iterate (constraint_edge_t, predvec, i, c); i++)
876 if (c->dest != node)
877 {
878 unsigned int place;
879 struct constraint_edge lookfor;
880 constraint_edge_t result;
881
882 lookfor.dest = node;
883 place = VEC_lower_bound (constraint_edge_t, graph->succs[c->dest],
884 &lookfor, constraint_edge_less);
885 result = VEC_ordered_remove (constraint_edge_t,
886 graph->succs[c->dest], place);
887 pool_free (constraint_edge_pool, result);
888
889 }
890
891 if (graph->zero_weight_preds[node])
892 {
893 BITMAP_FREE (graph->zero_weight_preds[node]);
894 graph->zero_weight_preds[node] = NULL;
895 }
896
897 if (graph->zero_weight_succs[node])
898 {
899 BITMAP_FREE (graph->zero_weight_succs[node]);
900 graph->zero_weight_succs[node] = NULL;
901 }
902
903 VEC_free (constraint_edge_t, heap, graph->preds[node]);
904 VEC_free (constraint_edge_t, heap, graph->succs[node]);
905 graph->preds[node] = NULL;
906 graph->succs[node] = NULL;
907 }
908
909 static bool edge_added = false;
910
911 /* Add edge (src, dest) to the graph. */
912
913 static bool
914 add_graph_edge (constraint_graph_t graph, unsigned int src, unsigned int dest)
915 {
916 unsigned int place;
917 VEC(constraint_edge_t,heap) *vec;
918 struct constraint_edge newe;
919 newe.dest = dest;
920
921 vec = graph->preds[src];
922 place = VEC_lower_bound (constraint_edge_t, vec, &newe,
923 constraint_edge_less);
924 if (place == VEC_length (constraint_edge_t, vec)
925 || VEC_index (constraint_edge_t, vec, place)->dest != dest)
926 {
927 constraint_edge_t edge = new_constraint_edge (dest);
928
929 VEC_safe_insert (constraint_edge_t, heap, graph->preds[src],
930 place, edge);
931 edge = new_constraint_edge (src);
932
933 place = VEC_lower_bound (constraint_edge_t, graph->succs[dest],
934 edge, constraint_edge_less);
935 VEC_safe_insert (constraint_edge_t, heap, graph->succs[dest],
936 place, edge);
937 edge_added = true;
938 stats.num_edges++;
939 return true;
940 }
941 else
942 return false;
943 }
944
945
946 /* Return the bitmap representing the weights of edge (SRC, DEST). */
947
948 static bitmap *
949 get_graph_weights (constraint_graph_t graph, unsigned int src,
950 unsigned int dest)
951 {
952 constraint_edge_t edge;
953 VEC(constraint_edge_t,heap) *vec;
954 struct constraint_edge lookfor;
955
956 lookfor.dest = dest;
957
958 vec = graph->preds[src];
959 edge = constraint_edge_vec_find (vec, lookfor);
960 gcc_assert (edge != NULL);
961 return &edge->weights;
962 }
963
964 /* Allocate graph weight bitmap for the edges associated with SRC and
965 DEST in GRAPH. Both the pred and the succ edges share a single
966 bitmap, so we need to set both edges to that bitmap. */
967
968 static bitmap
969 allocate_graph_weights (constraint_graph_t graph, unsigned int src,
970 unsigned int dest)
971 {
972 bitmap result;
973 constraint_edge_t edge;
974 VEC(constraint_edge_t,heap) *vec;
975 struct constraint_edge lookfor;
976
977 result = BITMAP_ALLOC (&ptabitmap_obstack);
978
979 /* Set the pred weight. */
980 lookfor.dest = dest;
981 vec = graph->preds[src];
982 edge = constraint_edge_vec_find (vec, lookfor);
983 gcc_assert (edge != NULL);
984 edge->weights = result;
985
986 /* Set the succ weight. */
987 lookfor.dest = src;
988 vec = graph->succs[dest];
989 edge = constraint_edge_vec_find (vec, lookfor);
990 gcc_assert (edge != NULL);
991 edge->weights = result;
992
993 return result;
994 }
995
996
997 /* Merge GRAPH nodes FROM and TO into node TO. */
998
999 static void
1000 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1001 unsigned int from)
1002 {
1003 VEC(constraint_edge_t,heap) *succvec = graph->succs[from];
1004 VEC(constraint_edge_t,heap) *predvec = graph->preds[from];
1005 int i;
1006 constraint_edge_t c;
1007 unsigned int j;
1008 bitmap_iterator bi;
1009
1010 /* Merge all the zero weighted predecessor edges. */
1011 if (graph->zero_weight_preds[from])
1012 {
1013 if (!graph->zero_weight_preds[to])
1014 graph->zero_weight_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1015
1016 EXECUTE_IF_SET_IN_BITMAP (graph->zero_weight_preds[from], 0, j, bi)
1017 {
1018 if (j != to)
1019 {
1020 bitmap_clear_bit (graph->zero_weight_succs[j], from);
1021 bitmap_set_bit (graph->zero_weight_succs[j], to);
1022 }
1023 }
1024 bitmap_ior_into (graph->zero_weight_preds[to],
1025 graph->zero_weight_preds[from]);
1026 }
1027
1028 /* Merge all the zero weighted successor edges. */
1029 if (graph->zero_weight_succs[from])
1030 {
1031 if (!graph->zero_weight_succs[to])
1032 graph->zero_weight_succs[to] = BITMAP_ALLOC (&ptabitmap_obstack);
1033 EXECUTE_IF_SET_IN_BITMAP (graph->zero_weight_succs[from], 0, j, bi)
1034 {
1035 bitmap_clear_bit (graph->zero_weight_preds[j], from);
1036 bitmap_set_bit (graph->zero_weight_preds[j], to);
1037 }
1038 bitmap_ior_into (graph->zero_weight_succs[to],
1039 graph->zero_weight_succs[from]);
1040 }
1041
1042 /* Merge all the non-zero weighted predecessor edges. */
1043 for (i = 0; VEC_iterate (constraint_edge_t, predvec, i, c); i++)
1044 {
1045 unsigned int d = c->dest;
1046 bitmap temp;
1047 bitmap *weights;
1048
1049 if (c->dest == from)
1050 d = to;
1051
1052 add_graph_edge (graph, to, d);
1053
1054 temp = *(get_graph_weights (graph, from, c->dest));
1055 if (temp)
1056 {
1057 weights = get_graph_weights (graph, to, d);
1058 if (!*weights)
1059 *weights = allocate_graph_weights (graph, to, d);
1060
1061 bitmap_ior_into (*weights, temp);
1062 }
1063
1064 }
1065
1066 /* Merge all the non-zero weighted successor edges. */
1067 for (i = 0; VEC_iterate (constraint_edge_t, succvec, i, c); i++)
1068 {
1069 unsigned int d = c->dest;
1070 bitmap temp;
1071 bitmap *weights;
1072
1073 if (c->dest == from)
1074 d = to;
1075
1076 add_graph_edge (graph, d, to);
1077
1078 temp = *(get_graph_weights (graph, c->dest, from));
1079 if (temp)
1080 {
1081 weights = get_graph_weights (graph, d, to);
1082 if (!*weights)
1083 *weights = allocate_graph_weights (graph, d, to);
1084 bitmap_ior_into (*weights, temp);
1085 }
1086 }
1087 clear_edges_for_node (graph, from);
1088 }
1089
1090 /* Add a graph edge to GRAPH, going from TO to FROM, with WEIGHT, if
1091 it doesn't exist in the graph already.
1092 Return false if the edge already existed, true otherwise. */
1093
1094 static bool
1095 int_add_graph_edge (constraint_graph_t graph, unsigned int to,
1096 unsigned int from, unsigned HOST_WIDE_INT weight)
1097 {
1098 if (to == from && weight == 0)
1099 {
1100 return false;
1101 }
1102 else
1103 {
1104 bool r = false;
1105
1106 if (weight == 0)
1107 {
1108 if (!graph->zero_weight_preds[to])
1109 graph->zero_weight_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1110 if (!graph->zero_weight_succs[from])
1111 graph->zero_weight_succs[from] = BITMAP_ALLOC (&ptabitmap_obstack);
1112 if (!bitmap_bit_p (graph->zero_weight_succs[from], to))
1113 {
1114 edge_added = true;
1115 r = true;
1116 stats.num_edges++;
1117 bitmap_set_bit (graph->zero_weight_preds[to], from);
1118 bitmap_set_bit (graph->zero_weight_succs[from], to);
1119 }
1120 }
1121 else
1122 {
1123 bitmap *weights;
1124
1125 r = add_graph_edge (graph, to, from);
1126 weights = get_graph_weights (graph, to, from);
1127
1128 if (!*weights)
1129 {
1130 r = true;
1131 *weights = allocate_graph_weights (graph, to, from);
1132 bitmap_set_bit (*weights, weight);
1133 }
1134 else
1135 {
1136 r |= !bitmap_bit_p (*weights, weight);
1137 bitmap_set_bit (*weights, weight);
1138 }
1139 }
1140
1141 return r;
1142 }
1143 }
1144
1145
1146 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1147
1148 static bool
1149 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1150 unsigned int dest)
1151 {
1152 struct constraint_edge lookfor;
1153 lookfor.dest = src;
1154
1155 return (graph->zero_weight_succs[dest]
1156 && bitmap_bit_p (graph->zero_weight_succs[dest], src))
1157 || constraint_edge_vec_find (graph->succs[dest], lookfor) != NULL;
1158 }
1159
1160 /* Return true if {DEST, SRC} is an existing weighted graph edge (IE has
1161 a weight other than 0) in GRAPH. */
1162 static bool
1163 valid_weighted_graph_edge (constraint_graph_t graph, unsigned int src,
1164 unsigned int dest)
1165 {
1166 struct constraint_edge lookfor;
1167 lookfor.dest = src;
1168
1169 return graph->preds[src]
1170 && constraint_edge_vec_find (graph->succs[dest], lookfor) != NULL;
1171 }
1172
1173
1174 /* Build the constraint graph. */
1175
1176 static void
1177 build_constraint_graph (void)
1178 {
1179 int i = 0;
1180 constraint_t c;
1181
1182 graph = xmalloc (sizeof (struct constraint_graph));
1183 graph->succs = xcalloc (VEC_length (varinfo_t, varmap) + 1,
1184 sizeof (*graph->succs));
1185 graph->preds = xcalloc (VEC_length (varinfo_t, varmap) + 1,
1186 sizeof (*graph->preds));
1187 graph->zero_weight_succs = xcalloc (VEC_length (varinfo_t, varmap) + 1,
1188 sizeof (*graph->zero_weight_succs));
1189 graph->zero_weight_preds = xcalloc (VEC_length (varinfo_t, varmap) + 1,
1190 sizeof (*graph->zero_weight_preds));
1191
1192 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1193 {
1194 struct constraint_expr lhs = c->lhs;
1195 struct constraint_expr rhs = c->rhs;
1196 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1197 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1198
1199 if (lhs.type == DEREF)
1200 {
1201 /* *x = y or *x = &y (complex) */
1202 if (rhs.type == ADDRESSOF || rhsvar > anything_id)
1203 insert_into_complex (lhsvar, c);
1204 }
1205 else if (rhs.type == DEREF)
1206 {
1207 /* !special var= *y */
1208 if (!(get_varinfo (lhsvar)->is_special_var))
1209 insert_into_complex (rhsvar, c);
1210 }
1211 else if (rhs.type == ADDRESSOF)
1212 {
1213 /* x = &y */
1214 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1215 }
1216 else if (lhsvar > anything_id)
1217 {
1218 /* Ignore 0 weighted self edges, as they can't possibly contribute
1219 anything */
1220 if (lhsvar != rhsvar || rhs.offset != 0 || lhs.offset != 0)
1221 {
1222 /* x = y (simple) */
1223 int_add_graph_edge (graph, lhs.var, rhs.var, rhs.offset);
1224 }
1225
1226 }
1227 }
1228 }
1229
1230
1231 /* Changed variables on the last iteration. */
1232 static unsigned int changed_count;
1233 static sbitmap changed;
1234
1235 DEF_VEC_I(unsigned);
1236 DEF_VEC_ALLOC_I(unsigned,heap);
1237
1238
1239 /* Strongly Connected Component visitation info. */
1240
1241 struct scc_info
1242 {
1243 sbitmap visited;
1244 sbitmap in_component;
1245 int current_index;
1246 unsigned int *visited_index;
1247 VEC(unsigned,heap) *scc_stack;
1248 VEC(unsigned,heap) *unification_queue;
1249 };
1250
1251
1252 /* Recursive routine to find strongly connected components in GRAPH.
1253 SI is the SCC info to store the information in, and N is the id of current
1254 graph node we are processing.
1255
1256 This is Tarjan's strongly connected component finding algorithm, as
1257 modified by Nuutila to keep only non-root nodes on the stack.
1258 The algorithm can be found in "On finding the strongly connected
1259 connected components in a directed graph" by Esko Nuutila and Eljas
1260 Soisalon-Soininen, in Information Processing Letters volume 49,
1261 number 1, pages 9-14. */
1262
1263 static void
1264 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1265 {
1266 unsigned int i;
1267 bitmap_iterator bi;
1268
1269 gcc_assert (get_varinfo (n)->node == n);
1270 SET_BIT (si->visited, n);
1271 RESET_BIT (si->in_component, n);
1272 si->visited_index[n] = si->current_index ++;
1273
1274 /* Visit all the successors. */
1275 EXECUTE_IF_IN_NONNULL_BITMAP (graph->zero_weight_succs[n], 0, i, bi)
1276 {
1277 unsigned int w = i;
1278 if (!TEST_BIT (si->visited, w))
1279 scc_visit (graph, si, w);
1280 if (!TEST_BIT (si->in_component, w))
1281 {
1282 unsigned int t = get_varinfo (w)->node;
1283 unsigned int nnode = get_varinfo (n)->node;
1284 if (si->visited_index[t] < si->visited_index[nnode])
1285 get_varinfo (n)->node = t;
1286 }
1287 }
1288
1289 /* See if any components have been identified. */
1290 if (get_varinfo (n)->node == n)
1291 {
1292 unsigned int t = si->visited_index[n];
1293 SET_BIT (si->in_component, n);
1294 while (VEC_length (unsigned, si->scc_stack) != 0
1295 && t < si->visited_index[VEC_last (unsigned, si->scc_stack)])
1296 {
1297 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1298 get_varinfo (w)->node = n;
1299 SET_BIT (si->in_component, w);
1300 /* Mark this node for collapsing. */
1301 VEC_safe_push (unsigned, heap, si->unification_queue, w);
1302 }
1303 }
1304 else
1305 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1306 }
1307
1308
1309 /* Collapse two variables into one variable. */
1310
1311 static void
1312 collapse_nodes (constraint_graph_t graph, unsigned int to, unsigned int from)
1313 {
1314 bitmap tosol, fromsol;
1315
1316 condense_varmap_nodes (to, from);
1317 tosol = get_varinfo (to)->solution;
1318 fromsol = get_varinfo (from)->solution;
1319 bitmap_ior_into (tosol, fromsol);
1320 merge_graph_nodes (graph, to, from);
1321
1322 if (valid_graph_edge (graph, to, to))
1323 {
1324 if (graph->zero_weight_preds[to])
1325 {
1326 bitmap_clear_bit (graph->zero_weight_preds[to], to);
1327 bitmap_clear_bit (graph->zero_weight_succs[to], to);
1328 }
1329 if (valid_weighted_graph_edge (graph, to, to))
1330 {
1331 bitmap weights = *(get_graph_weights (graph, to, to));
1332 if (!weights || bitmap_empty_p (weights))
1333 erase_graph_self_edge (graph, to);
1334 }
1335 }
1336 BITMAP_FREE (fromsol);
1337 get_varinfo (to)->address_taken |= get_varinfo (from)->address_taken;
1338 get_varinfo (to)->indirect_target |= get_varinfo (from)->indirect_target;
1339 }
1340
1341
1342 /* Unify nodes in GRAPH that we have found to be part of a cycle.
1343 SI is the Strongly Connected Components information structure that tells us
1344 what components to unify.
1345 UPDATE_CHANGED should be set to true if the changed sbitmap and changed
1346 count should be updated to reflect the unification. */
1347
1348 static void
1349 process_unification_queue (constraint_graph_t graph, struct scc_info *si,
1350 bool update_changed)
1351 {
1352 size_t i = 0;
1353 bitmap tmp = BITMAP_ALLOC (update_changed ? &iteration_obstack : NULL);
1354 bitmap_clear (tmp);
1355
1356 /* We proceed as follows:
1357
1358 For each component in the queue (components are delineated by
1359 when current_queue_element->node != next_queue_element->node):
1360
1361 rep = representative node for component
1362
1363 For each node (tounify) to be unified in the component,
1364 merge the solution for tounify into tmp bitmap
1365
1366 clear solution for tounify
1367
1368 merge edges from tounify into rep
1369
1370 merge complex constraints from tounify into rep
1371
1372 update changed count to note that tounify will never change
1373 again
1374
1375 Merge tmp into solution for rep, marking rep changed if this
1376 changed rep's solution.
1377
1378 Delete any 0 weighted self-edges we now have for rep. */
1379 while (i != VEC_length (unsigned, si->unification_queue))
1380 {
1381 unsigned int tounify = VEC_index (unsigned, si->unification_queue, i);
1382 unsigned int n = get_varinfo (tounify)->node;
1383
1384 if (dump_file && (dump_flags & TDF_DETAILS))
1385 fprintf (dump_file, "Unifying %s to %s\n",
1386 get_varinfo (tounify)->name,
1387 get_varinfo (n)->name);
1388 if (update_changed)
1389 stats.unified_vars_dynamic++;
1390 else
1391 stats.unified_vars_static++;
1392 bitmap_ior_into (tmp, get_varinfo (tounify)->solution);
1393 merge_graph_nodes (graph, n, tounify);
1394 condense_varmap_nodes (n, tounify);
1395
1396 if (update_changed && TEST_BIT (changed, tounify))
1397 {
1398 RESET_BIT (changed, tounify);
1399 if (!TEST_BIT (changed, n))
1400 SET_BIT (changed, n);
1401 else
1402 {
1403 gcc_assert (changed_count > 0);
1404 changed_count--;
1405 }
1406 }
1407
1408 bitmap_clear (get_varinfo (tounify)->solution);
1409 ++i;
1410
1411 /* If we've either finished processing the entire queue, or
1412 finished processing all nodes for component n, update the solution for
1413 n. */
1414 if (i == VEC_length (unsigned, si->unification_queue)
1415 || get_varinfo (VEC_index (unsigned, si->unification_queue, i))->node != n)
1416 {
1417 /* If the solution changes because of the merging, we need to mark
1418 the variable as changed. */
1419 if (bitmap_ior_into (get_varinfo (n)->solution, tmp))
1420 {
1421 if (update_changed && !TEST_BIT (changed, n))
1422 {
1423 SET_BIT (changed, n);
1424 changed_count++;
1425 }
1426 }
1427 bitmap_clear (tmp);
1428
1429 if (valid_graph_edge (graph, n, n))
1430 {
1431 if (graph->zero_weight_succs[n])
1432 {
1433 if (graph->zero_weight_preds[n])
1434 bitmap_clear_bit (graph->zero_weight_preds[n], n);
1435 bitmap_clear_bit (graph->zero_weight_succs[n], n);
1436 }
1437 if (valid_weighted_graph_edge (graph, n, n))
1438 {
1439 bitmap weights = *(get_graph_weights (graph, n, n));
1440 if (!weights || bitmap_empty_p (weights))
1441 erase_graph_self_edge (graph, n);
1442 }
1443 }
1444 }
1445 }
1446 BITMAP_FREE (tmp);
1447 }
1448
1449
1450 /* Information needed to compute the topological ordering of a graph. */
1451
1452 struct topo_info
1453 {
1454 /* sbitmap of visited nodes. */
1455 sbitmap visited;
1456 /* Array that stores the topological order of the graph, *in
1457 reverse*. */
1458 VEC(unsigned,heap) *topo_order;
1459 };
1460
1461
1462 /* Initialize and return a topological info structure. */
1463
1464 static struct topo_info *
1465 init_topo_info (void)
1466 {
1467 size_t size = VEC_length (varinfo_t, varmap);
1468 struct topo_info *ti = xmalloc (sizeof (struct topo_info));
1469 ti->visited = sbitmap_alloc (size);
1470 sbitmap_zero (ti->visited);
1471 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1472 return ti;
1473 }
1474
1475
1476 /* Free the topological sort info pointed to by TI. */
1477
1478 static void
1479 free_topo_info (struct topo_info *ti)
1480 {
1481 sbitmap_free (ti->visited);
1482 VEC_free (unsigned, heap, ti->topo_order);
1483 free (ti);
1484 }
1485
1486 /* Visit the graph in topological order, and store the order in the
1487 topo_info structure. */
1488
1489 static void
1490 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1491 unsigned int n)
1492 {
1493 VEC(constraint_edge_t,heap) *succs = graph->succs[n];
1494 bitmap temp;
1495 bitmap_iterator bi;
1496 constraint_edge_t c;
1497 int i;
1498 unsigned int j;
1499
1500 SET_BIT (ti->visited, n);
1501 if (VEC_length (constraint_edge_t, succs) != 0)
1502 {
1503 temp = BITMAP_ALLOC (&iteration_obstack);
1504 if (graph->zero_weight_succs[n])
1505 bitmap_ior_into (temp, graph->zero_weight_succs[n]);
1506 for (i = 0; VEC_iterate (constraint_edge_t, succs, i, c); i++)
1507 bitmap_set_bit (temp, c->dest);
1508 }
1509 else
1510 temp = graph->zero_weight_succs[n];
1511
1512 if (temp)
1513 EXECUTE_IF_SET_IN_BITMAP (temp, 0, j, bi)
1514 {
1515 if (!TEST_BIT (ti->visited, j))
1516 topo_visit (graph, ti, j);
1517 }
1518 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1519 }
1520
1521 /* Return true if variable N + OFFSET is a legal field of N. */
1522
1523 static bool
1524 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1525 {
1526 varinfo_t ninfo = get_varinfo (n);
1527
1528 /* For things we've globbed to single variables, any offset into the
1529 variable acts like the entire variable, so that it becomes offset
1530 0. */
1531 if (ninfo->is_special_var
1532 || ninfo->is_artificial_var
1533 || ninfo->is_unknown_size_var)
1534 {
1535 *offset = 0;
1536 return true;
1537 }
1538 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1539 }
1540
1541 #define DONT_PROPAGATE_WITH_ANYTHING 0
1542
1543 /* Process a constraint C that represents *x = &y. */
1544
1545 static void
1546 do_da_constraint (constraint_graph_t graph ATTRIBUTE_UNUSED,
1547 constraint_t c, bitmap delta)
1548 {
1549 unsigned int rhs = c->rhs.var;
1550 unsigned int j;
1551 bitmap_iterator bi;
1552
1553 /* For each member j of Delta (Sol(x)), add x to Sol(j) */
1554 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1555 {
1556 unsigned HOST_WIDE_INT offset = c->lhs.offset;
1557 if (type_safe (j, &offset) && !(get_varinfo (j)->is_special_var))
1558 {
1559 /* *x != NULL && *x != ANYTHING*/
1560 varinfo_t v;
1561 unsigned int t;
1562 bitmap sol;
1563 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + offset;
1564
1565 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1566 if (!v)
1567 continue;
1568 t = v->node;
1569 sol = get_varinfo (t)->solution;
1570 if (!bitmap_bit_p (sol, rhs))
1571 {
1572 bitmap_set_bit (sol, rhs);
1573 if (!TEST_BIT (changed, t))
1574 {
1575 SET_BIT (changed, t);
1576 changed_count++;
1577 }
1578 }
1579 }
1580 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1581 fprintf (dump_file, "Untypesafe usage in do_da_constraint.\n");
1582
1583 }
1584 }
1585
1586 /* Process a constraint C that represents x = *y, using DELTA as the
1587 starting solution. */
1588
1589 static void
1590 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1591 bitmap delta)
1592 {
1593 unsigned int lhs = get_varinfo (c->lhs.var)->node;
1594 bool flag = false;
1595 bitmap sol = get_varinfo (lhs)->solution;
1596 unsigned int j;
1597 bitmap_iterator bi;
1598
1599 #if DONT_PROPAGATE_WITH_ANYTHING
1600 if (bitmap_bit_p (delta, anything_id))
1601 {
1602 flag = !bitmap_bit_p (sol, anything_id);
1603 if (flag)
1604 bitmap_set_bit (sol, anything_id);
1605 goto done;
1606 }
1607 #endif
1608 /* For each variable j in delta (Sol(y)), add
1609 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1610 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1611 {
1612 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1613 if (type_safe (j, &roffset))
1614 {
1615 varinfo_t v;
1616 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1617 unsigned int t;
1618
1619 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1620 if (!v)
1621 continue;
1622 t = v->node;
1623
1624 /* Adding edges from the special vars is pointless.
1625 They don't have sets that can change. */
1626 if (get_varinfo (t) ->is_special_var)
1627 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1628 else if (int_add_graph_edge (graph, lhs, t, 0))
1629 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1630 }
1631 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1632 fprintf (dump_file, "Untypesafe usage in do_sd_constraint\n");
1633
1634 }
1635 #if DONT_PROPAGATE_WITH_ANYTHING
1636 done:
1637 #endif
1638 /* If the LHS solution changed, mark the var as changed. */
1639 if (flag)
1640 {
1641 get_varinfo (lhs)->solution = sol;
1642 if (!TEST_BIT (changed, lhs))
1643 {
1644 SET_BIT (changed, lhs);
1645 changed_count++;
1646 }
1647 }
1648 }
1649
1650 /* Process a constraint C that represents *x = y. */
1651
1652 static void
1653 do_ds_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1654 {
1655 unsigned int rhs = get_varinfo (c->rhs.var)->node;
1656 unsigned HOST_WIDE_INT roff = c->rhs.offset;
1657 bitmap sol = get_varinfo (rhs)->solution;
1658 unsigned int j;
1659 bitmap_iterator bi;
1660
1661 #if DONT_PROPAGATE_WITH_ANYTHING
1662 if (bitmap_bit_p (sol, anything_id))
1663 {
1664 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1665 {
1666 varinfo_t jvi = get_varinfo (j);
1667 unsigned int t;
1668 unsigned int loff = c->lhs.offset;
1669 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1670 varinfo_t v;
1671
1672 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1673 if (!v)
1674 continue;
1675 t = v->node;
1676
1677 if (!bitmap_bit_p (get_varinfo (t)->solution, anything_id))
1678 {
1679 bitmap_set_bit (get_varinfo (t)->solution, anything_id);
1680 if (!TEST_BIT (changed, t))
1681 {
1682 SET_BIT (changed, t);
1683 changed_count++;
1684 }
1685 }
1686 }
1687 return;
1688 }
1689 #endif
1690
1691 /* For each member j of delta (Sol(x)), add an edge from y to j and
1692 union Sol(y) into Sol(j) */
1693 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1694 {
1695 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1696 if (type_safe (j, &loff) && !(get_varinfo(j)->is_special_var))
1697 {
1698 varinfo_t v;
1699 unsigned int t;
1700 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1701
1702 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1703 if (!v)
1704 continue;
1705 t = v->node;
1706 if (int_add_graph_edge (graph, t, rhs, roff))
1707 {
1708 bitmap tmp = get_varinfo (t)->solution;
1709 if (set_union_with_increment (tmp, sol, roff))
1710 {
1711 get_varinfo (t)->solution = tmp;
1712 if (t == rhs)
1713 sol = get_varinfo (rhs)->solution;
1714 if (!TEST_BIT (changed, t))
1715 {
1716 SET_BIT (changed, t);
1717 changed_count++;
1718 }
1719 }
1720 }
1721 }
1722 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1723 fprintf (dump_file, "Untypesafe usage in do_ds_constraint\n");
1724 }
1725 }
1726
1727 /* Handle a non-simple (simple meaning requires no iteration), non-copy
1728 constraint (IE *x = &y, x = *y, and *x = y). */
1729
1730 static void
1731 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1732 {
1733 if (c->lhs.type == DEREF)
1734 {
1735 if (c->rhs.type == ADDRESSOF)
1736 {
1737 /* *x = &y */
1738 do_da_constraint (graph, c, delta);
1739 }
1740 else
1741 {
1742 /* *x = y */
1743 do_ds_constraint (graph, c, delta);
1744 }
1745 }
1746 else
1747 {
1748 /* x = *y */
1749 if (!(get_varinfo (c->lhs.var)->is_special_var))
1750 do_sd_constraint (graph, c, delta);
1751 }
1752 }
1753
1754 /* Initialize and return a new SCC info structure. */
1755
1756 static struct scc_info *
1757 init_scc_info (void)
1758 {
1759 struct scc_info *si = xmalloc (sizeof (struct scc_info));
1760 size_t size = VEC_length (varinfo_t, varmap);
1761
1762 si->current_index = 0;
1763 si->visited = sbitmap_alloc (size);
1764 sbitmap_zero (si->visited);
1765 si->in_component = sbitmap_alloc (size);
1766 sbitmap_ones (si->in_component);
1767 si->visited_index = xcalloc (sizeof (unsigned int), size + 1);
1768 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1769 si->unification_queue = VEC_alloc (unsigned, heap, 1);
1770 return si;
1771 }
1772
1773 /* Free an SCC info structure pointed to by SI */
1774
1775 static void
1776 free_scc_info (struct scc_info *si)
1777 {
1778 sbitmap_free (si->visited);
1779 sbitmap_free (si->in_component);
1780 free (si->visited_index);
1781 VEC_free (unsigned, heap, si->scc_stack);
1782 VEC_free (unsigned, heap, si->unification_queue);
1783 free(si);
1784 }
1785
1786
1787 /* Find cycles in GRAPH that occur, using strongly connected components, and
1788 collapse the cycles into a single representative node. if UPDATE_CHANGED
1789 is true, then update the changed sbitmap to note those nodes whose
1790 solutions have changed as a result of collapsing. */
1791
1792 static void
1793 find_and_collapse_graph_cycles (constraint_graph_t graph, bool update_changed)
1794 {
1795 unsigned int i;
1796 unsigned int size = VEC_length (varinfo_t, varmap);
1797 struct scc_info *si = init_scc_info ();
1798
1799 for (i = 0; i != size; ++i)
1800 if (!TEST_BIT (si->visited, i) && get_varinfo (i)->node == i)
1801 scc_visit (graph, si, i);
1802
1803 process_unification_queue (graph, si, update_changed);
1804 free_scc_info (si);
1805 }
1806
1807 /* Compute a topological ordering for GRAPH, and store the result in the
1808 topo_info structure TI. */
1809
1810 static void
1811 compute_topo_order (constraint_graph_t graph,
1812 struct topo_info *ti)
1813 {
1814 unsigned int i;
1815 unsigned int size = VEC_length (varinfo_t, varmap);
1816
1817 for (i = 0; i != size; ++i)
1818 if (!TEST_BIT (ti->visited, i) && get_varinfo (i)->node == i)
1819 topo_visit (graph, ti, i);
1820 }
1821
1822 /* Return true if bitmap B is empty, or a bitmap other than bit 0 is set. */
1823
1824 static bool
1825 bitmap_other_than_zero_bit_set (bitmap b)
1826 {
1827 unsigned int i;
1828 bitmap_iterator bi;
1829
1830 if (bitmap_empty_p (b))
1831 return false;
1832 EXECUTE_IF_SET_IN_BITMAP (b, 1, i, bi)
1833 return true;
1834 return false;
1835 }
1836
1837 /* Perform offline variable substitution.
1838
1839 This is a linear time way of identifying variables that must have
1840 equivalent points-to sets, including those caused by static cycles,
1841 and single entry subgraphs, in the constraint graph.
1842
1843 The technique is described in "Off-line variable substitution for
1844 scaling points-to analysis" by Atanas Rountev and Satish Chandra,
1845 in "ACM SIGPLAN Notices" volume 35, number 5, pages 47-56. */
1846
1847 static void
1848 perform_var_substitution (constraint_graph_t graph)
1849 {
1850 struct topo_info *ti = init_topo_info ();
1851
1852 bitmap_obstack_initialize (&iteration_obstack);
1853 /* Compute the topological ordering of the graph, then visit each
1854 node in topological order. */
1855 compute_topo_order (graph, ti);
1856
1857 while (VEC_length (unsigned, ti->topo_order) != 0)
1858 {
1859 unsigned int i = VEC_pop (unsigned, ti->topo_order);
1860 unsigned int pred;
1861 varinfo_t vi = get_varinfo (i);
1862 bool okay_to_elim = false;
1863 unsigned int root = VEC_length (varinfo_t, varmap);
1864 VEC(constraint_edge_t,heap) *predvec = graph->preds[i];
1865 constraint_edge_t ce = NULL;
1866 bitmap tmp;
1867 unsigned int k;
1868 bitmap_iterator bi;
1869
1870 /* We can't eliminate things whose address is taken, or which is
1871 the target of a dereference. */
1872 if (vi->address_taken || vi->indirect_target)
1873 continue;
1874
1875 /* See if all predecessors of I are ripe for elimination */
1876 EXECUTE_IF_IN_NONNULL_BITMAP (graph->zero_weight_preds[i], 0, k, bi)
1877 {
1878 unsigned int w;
1879 w = get_varinfo (k)->node;
1880
1881 /* We can't eliminate the node if one of the predecessors is
1882 part of a different strongly connected component. */
1883 if (!okay_to_elim)
1884 {
1885 root = w;
1886 okay_to_elim = true;
1887 }
1888 else if (w != root)
1889 {
1890 okay_to_elim = false;
1891 break;
1892 }
1893
1894 /* Theorem 4 in Rountev and Chandra: If i is a direct node,
1895 then Solution(i) is a subset of Solution (w), where w is a
1896 predecessor in the graph.
1897 Corollary: If all predecessors of i have the same
1898 points-to set, then i has that same points-to set as
1899 those predecessors. */
1900 tmp = BITMAP_ALLOC (NULL);
1901 bitmap_and_compl (tmp, get_varinfo (i)->solution,
1902 get_varinfo (w)->solution);
1903 if (!bitmap_empty_p (tmp))
1904 {
1905 okay_to_elim = false;
1906 BITMAP_FREE (tmp);
1907 break;
1908 }
1909 BITMAP_FREE (tmp);
1910 }
1911
1912 if (okay_to_elim)
1913 for (pred = 0;
1914 VEC_iterate (constraint_edge_t, predvec, pred, ce);
1915 pred++)
1916 {
1917 bitmap weight;
1918 unsigned int w;
1919 weight = *(get_graph_weights (graph, i, ce->dest));
1920
1921 /* We can't eliminate variables that have nonzero weighted
1922 edges between them. */
1923 if (weight && bitmap_other_than_zero_bit_set (weight))
1924 {
1925 okay_to_elim = false;
1926 break;
1927 }
1928 w = get_varinfo (ce->dest)->node;
1929
1930 /* We can't eliminate the node if one of the predecessors is
1931 part of a different strongly connected component. */
1932 if (!okay_to_elim)
1933 {
1934 root = w;
1935 okay_to_elim = true;
1936 }
1937 else if (w != root)
1938 {
1939 okay_to_elim = false;
1940 break;
1941 }
1942
1943 /* Theorem 4 in Rountev and Chandra: If i is a direct node,
1944 then Solution(i) is a subset of Solution (w), where w is a
1945 predecessor in the graph.
1946 Corollary: If all predecessors of i have the same
1947 points-to set, then i has that same points-to set as
1948 those predecessors. */
1949 tmp = BITMAP_ALLOC (NULL);
1950 bitmap_and_compl (tmp, get_varinfo (i)->solution,
1951 get_varinfo (w)->solution);
1952 if (!bitmap_empty_p (tmp))
1953 {
1954 okay_to_elim = false;
1955 BITMAP_FREE (tmp);
1956 break;
1957 }
1958 BITMAP_FREE (tmp);
1959 }
1960
1961 /* See if the root is different than the original node.
1962 If so, we've found an equivalence. */
1963 if (root != get_varinfo (i)->node && okay_to_elim)
1964 {
1965 /* Found an equivalence */
1966 get_varinfo (i)->node = root;
1967 collapse_nodes (graph, root, i);
1968 if (dump_file && (dump_flags & TDF_DETAILS))
1969 fprintf (dump_file, "Collapsing %s into %s\n",
1970 get_varinfo (i)->name,
1971 get_varinfo (root)->name);
1972 stats.collapsed_vars++;
1973 }
1974 }
1975
1976 bitmap_obstack_release (&iteration_obstack);
1977 free_topo_info (ti);
1978 }
1979
1980 /* Solve the constraint graph GRAPH using our worklist solver.
1981 This is based on the PW* family of solvers from the "Efficient Field
1982 Sensitive Pointer Analysis for C" paper.
1983 It works by iterating over all the graph nodes, processing the complex
1984 constraints and propagating the copy constraints, until everything stops
1985 changed. This corresponds to steps 6-8 in the solving list given above. */
1986
1987 static void
1988 solve_graph (constraint_graph_t graph)
1989 {
1990 unsigned int size = VEC_length (varinfo_t, varmap);
1991 unsigned int i;
1992
1993 changed_count = size;
1994 changed = sbitmap_alloc (size);
1995 sbitmap_ones (changed);
1996
1997 /* The already collapsed/unreachable nodes will never change, so we
1998 need to account for them in changed_count. */
1999 for (i = 0; i < size; i++)
2000 if (get_varinfo (i)->node != i)
2001 changed_count--;
2002
2003 while (changed_count > 0)
2004 {
2005 unsigned int i;
2006 struct topo_info *ti = init_topo_info ();
2007 stats.iterations++;
2008
2009 bitmap_obstack_initialize (&iteration_obstack);
2010
2011 if (edge_added)
2012 {
2013 /* We already did cycle elimination once, when we did
2014 variable substitution, so we don't need it again for the
2015 first iteration. */
2016 if (stats.iterations > 1)
2017 find_and_collapse_graph_cycles (graph, true);
2018
2019 edge_added = false;
2020 }
2021
2022 compute_topo_order (graph, ti);
2023
2024 while (VEC_length (unsigned, ti->topo_order) != 0)
2025 {
2026 i = VEC_pop (unsigned, ti->topo_order);
2027 gcc_assert (get_varinfo (i)->node == i);
2028
2029 /* If the node has changed, we need to process the
2030 complex constraints and outgoing edges again. */
2031 if (TEST_BIT (changed, i))
2032 {
2033 unsigned int j;
2034 constraint_t c;
2035 constraint_edge_t e = NULL;
2036 bitmap solution;
2037 bitmap_iterator bi;
2038 VEC(constraint_t,heap) *complex = get_varinfo (i)->complex;
2039 VEC(constraint_edge_t,heap) *succs;
2040
2041 RESET_BIT (changed, i);
2042 changed_count--;
2043
2044 /* Process the complex constraints */
2045 solution = get_varinfo (i)->solution;
2046 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2047 do_complex_constraint (graph, c, solution);
2048
2049 /* Propagate solution to all successors. */
2050 succs = graph->succs[i];
2051
2052 EXECUTE_IF_IN_NONNULL_BITMAP (graph->zero_weight_succs[i], 0, j, bi)
2053 {
2054 bitmap tmp = get_varinfo (j)->solution;
2055 bool flag = false;
2056
2057 flag = set_union_with_increment (tmp, solution, 0);
2058
2059 if (flag)
2060 {
2061 get_varinfo (j)->solution = tmp;
2062 if (!TEST_BIT (changed, j))
2063 {
2064 SET_BIT (changed, j);
2065 changed_count++;
2066 }
2067 }
2068 }
2069 for (j = 0; VEC_iterate (constraint_edge_t, succs, j, e); j++)
2070 {
2071 bitmap tmp = get_varinfo (e->dest)->solution;
2072 bool flag = false;
2073 unsigned int k;
2074 bitmap weights = e->weights;
2075 bitmap_iterator bi;
2076
2077 gcc_assert (weights && !bitmap_empty_p (weights));
2078 EXECUTE_IF_SET_IN_BITMAP (weights, 0, k, bi)
2079 flag |= set_union_with_increment (tmp, solution, k);
2080
2081 if (flag)
2082 {
2083 get_varinfo (e->dest)->solution = tmp;
2084 if (!TEST_BIT (changed, e->dest))
2085 {
2086 SET_BIT (changed, e->dest);
2087 changed_count++;
2088 }
2089 }
2090 }
2091 }
2092 }
2093 free_topo_info (ti);
2094 bitmap_obstack_release (&iteration_obstack);
2095 }
2096
2097 sbitmap_free (changed);
2098 }
2099
2100
2101 /* CONSTRAINT AND VARIABLE GENERATION FUNCTIONS */
2102
2103 /* Map from trees to variable ids. */
2104 static htab_t id_for_tree;
2105
2106 typedef struct tree_id
2107 {
2108 tree t;
2109 unsigned int id;
2110 } *tree_id_t;
2111
2112 /* Hash a tree id structure. */
2113
2114 static hashval_t
2115 tree_id_hash (const void *p)
2116 {
2117 const tree_id_t ta = (tree_id_t) p;
2118 return htab_hash_pointer (ta->t);
2119 }
2120
2121 /* Return true if the tree in P1 and the tree in P2 are the same. */
2122
2123 static int
2124 tree_id_eq (const void *p1, const void *p2)
2125 {
2126 const tree_id_t ta1 = (tree_id_t) p1;
2127 const tree_id_t ta2 = (tree_id_t) p2;
2128 return ta1->t == ta2->t;
2129 }
2130
2131 /* Insert ID as the variable id for tree T in the hashtable. */
2132
2133 static void
2134 insert_id_for_tree (tree t, int id)
2135 {
2136 void **slot;
2137 struct tree_id finder;
2138 tree_id_t new_pair;
2139
2140 finder.t = t;
2141 slot = htab_find_slot (id_for_tree, &finder, INSERT);
2142 gcc_assert (*slot == NULL);
2143 new_pair = xmalloc (sizeof (struct tree_id));
2144 new_pair->t = t;
2145 new_pair->id = id;
2146 *slot = (void *)new_pair;
2147 }
2148
2149 /* Find the variable id for tree T in ID_FOR_TREE. If T does not
2150 exist in the hash table, return false, otherwise, return true and
2151 set *ID to the id we found. */
2152
2153 static bool
2154 lookup_id_for_tree (tree t, unsigned int *id)
2155 {
2156 tree_id_t pair;
2157 struct tree_id finder;
2158
2159 finder.t = t;
2160 pair = htab_find (id_for_tree, &finder);
2161 if (pair == NULL)
2162 return false;
2163 *id = pair->id;
2164 return true;
2165 }
2166
2167 /* Return a printable name for DECL */
2168
2169 static const char *
2170 alias_get_name (tree decl)
2171 {
2172 const char *res = get_name (decl);
2173 char *temp;
2174 int num_printed = 0;
2175
2176 if (res != NULL)
2177 return res;
2178
2179 res = "NULL";
2180 if (TREE_CODE (decl) == SSA_NAME)
2181 {
2182 num_printed = asprintf (&temp, "%s_%u",
2183 alias_get_name (SSA_NAME_VAR (decl)),
2184 SSA_NAME_VERSION (decl));
2185 }
2186 else if (DECL_P (decl))
2187 {
2188 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2189 }
2190 if (num_printed > 0)
2191 {
2192 res = ggc_strdup (temp);
2193 free (temp);
2194 }
2195 return res;
2196 }
2197
2198 /* Find the variable id for tree T in the hashtable.
2199 If T doesn't exist in the hash table, create an entry for it. */
2200
2201 static unsigned int
2202 get_id_for_tree (tree t)
2203 {
2204 tree_id_t pair;
2205 struct tree_id finder;
2206
2207 finder.t = t;
2208 pair = htab_find (id_for_tree, &finder);
2209 if (pair == NULL)
2210 return create_variable_info_for (t, alias_get_name (t));
2211
2212 return pair->id;
2213 }
2214
2215 /* Get a constraint expression from an SSA_VAR_P node. */
2216
2217 static struct constraint_expr
2218 get_constraint_exp_from_ssa_var (tree t)
2219 {
2220 struct constraint_expr cexpr;
2221
2222 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2223
2224 /* For parameters, get at the points-to set for the actual parm
2225 decl. */
2226 if (TREE_CODE (t) == SSA_NAME
2227 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2228 && default_def (SSA_NAME_VAR (t)) == t)
2229 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
2230
2231 cexpr.type = SCALAR;
2232
2233 cexpr.var = get_id_for_tree (t);
2234 /* If we determine the result is "anything", and we know this is readonly,
2235 say it points to readonly memory instead. */
2236 if (cexpr.var == anything_id && TREE_READONLY (t))
2237 {
2238 cexpr.type = ADDRESSOF;
2239 cexpr.var = readonly_id;
2240 }
2241
2242 cexpr.offset = 0;
2243 return cexpr;
2244 }
2245
2246 /* Process a completed constraint T, and add it to the constraint
2247 list. */
2248
2249 static void
2250 process_constraint (constraint_t t)
2251 {
2252 struct constraint_expr rhs = t->rhs;
2253 struct constraint_expr lhs = t->lhs;
2254
2255 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2256 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2257
2258 /* ANYTHING == ANYTHING is pointless. */
2259 if (lhs.var == anything_id && rhs.var == anything_id)
2260 return;
2261
2262 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2263 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2264 {
2265 rhs = t->lhs;
2266 t->lhs = t->rhs;
2267 t->rhs = rhs;
2268 process_constraint (t);
2269 }
2270 /* This can happen in our IR with things like n->a = *p */
2271 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2272 {
2273 /* Split into tmp = *rhs, *lhs = tmp */
2274 tree rhsdecl = get_varinfo (rhs.var)->decl;
2275 tree pointertype = TREE_TYPE (rhsdecl);
2276 tree pointedtotype = TREE_TYPE (pointertype);
2277 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2278 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2279
2280 /* If this is an aggregate of known size, we should have passed
2281 this off to do_structure_copy, and it should have broken it
2282 up. */
2283 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
2284 || get_varinfo (rhs.var)->is_unknown_size_var);
2285
2286 process_constraint (new_constraint (tmplhs, rhs));
2287 process_constraint (new_constraint (lhs, tmplhs));
2288 }
2289 else if (rhs.type == ADDRESSOF)
2290 {
2291 varinfo_t vi;
2292 gcc_assert (rhs.offset == 0);
2293
2294 for (vi = get_varinfo (rhs.var); vi != NULL; vi = vi->next)
2295 vi->address_taken = true;
2296
2297 VEC_safe_push (constraint_t, heap, constraints, t);
2298 }
2299 else
2300 {
2301 if (lhs.type != DEREF && rhs.type == DEREF)
2302 get_varinfo (lhs.var)->indirect_target = true;
2303 VEC_safe_push (constraint_t, heap, constraints, t);
2304 }
2305 }
2306
2307
2308 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2309 structure. */
2310
2311 static unsigned HOST_WIDE_INT
2312 bitpos_of_field (const tree fdecl)
2313 {
2314
2315 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2316 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2317 return -1;
2318
2319 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2320 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2321 }
2322
2323
2324 /* Return true if an access to [ACCESSPOS, ACCESSSIZE]
2325 overlaps with a field at [FIELDPOS, FIELDSIZE] */
2326
2327 static bool
2328 offset_overlaps_with_access (const unsigned HOST_WIDE_INT fieldpos,
2329 const unsigned HOST_WIDE_INT fieldsize,
2330 const unsigned HOST_WIDE_INT accesspos,
2331 const unsigned HOST_WIDE_INT accesssize)
2332 {
2333 if (fieldpos == accesspos && fieldsize == accesssize)
2334 return true;
2335 if (accesspos >= fieldpos && accesspos < (fieldpos + fieldsize))
2336 return true;
2337 if (accesspos < fieldpos && (accesspos + accesssize > fieldpos))
2338 return true;
2339
2340 return false;
2341 }
2342
2343 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2344
2345 static void
2346 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
2347 bool *anyoffset)
2348 {
2349 tree orig_t = t;
2350 HOST_WIDE_INT bitsize = -1;
2351 HOST_WIDE_INT bitmaxsize = -1;
2352 HOST_WIDE_INT bitpos;
2353 tree forzero;
2354 struct constraint_expr *result;
2355 unsigned int beforelength = VEC_length (ce_s, *results);
2356
2357 /* Some people like to do cute things like take the address of
2358 &0->a.b */
2359 forzero = t;
2360 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2361 forzero = TREE_OPERAND (forzero, 0);
2362
2363 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2364 {
2365 struct constraint_expr temp;
2366
2367 temp.offset = 0;
2368 temp.var = integer_id;
2369 temp.type = SCALAR;
2370 VEC_safe_push (ce_s, heap, *results, &temp);
2371 return;
2372 }
2373
2374 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2375 get_constraint_for (t, results, anyoffset);
2376 result = VEC_last (ce_s, *results);
2377
2378 gcc_assert (beforelength + 1 == VEC_length (ce_s, *results));
2379
2380 /* This can also happen due to weird offsetof type macros. */
2381 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2382 result->type = SCALAR;
2383
2384 /* If we know where this goes, then yay. Otherwise, booo. */
2385 if (bitmaxsize != -1
2386 && bitsize == bitmaxsize)
2387 {
2388 result->offset = bitpos;
2389 }
2390 /* FIXME: Handle the DEREF case. */
2391 else if (anyoffset && result->type != DEREF)
2392 {
2393 result->offset = 0;
2394 *anyoffset = true;
2395 }
2396 else
2397 {
2398 result->var = anything_id;
2399 result->offset = 0;
2400 }
2401
2402 if (result->type == SCALAR)
2403 {
2404 /* In languages like C, you can access one past the end of an
2405 array. You aren't allowed to dereference it, so we can
2406 ignore this constraint. When we handle pointer subtraction,
2407 we may have to do something cute here. */
2408
2409 if (result->offset < get_varinfo (result->var)->fullsize)
2410 {
2411 /* It's also not true that the constraint will actually start at the
2412 right offset, it may start in some padding. We only care about
2413 setting the constraint to the first actual field it touches, so
2414 walk to find it. */
2415 varinfo_t curr;
2416 for (curr = get_varinfo (result->var); curr; curr = curr->next)
2417 {
2418 if (offset_overlaps_with_access (curr->offset, curr->size,
2419 result->offset, bitsize))
2420 {
2421 result->var = curr->id;
2422 break;
2423 }
2424 }
2425 /* assert that we found *some* field there. The user couldn't be
2426 accessing *only* padding. */
2427 /* Still the user could access one past the end of an array
2428 embedded in a struct resulting in accessing *only* padding. */
2429 gcc_assert (curr || ref_contains_array_ref (orig_t));
2430 }
2431 else
2432 if (dump_file && (dump_flags & TDF_DETAILS))
2433 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2434
2435 result->offset = 0;
2436 }
2437 }
2438
2439
2440 /* Dereference the constraint expression CONS, and return the result.
2441 DEREF (ADDRESSOF) = SCALAR
2442 DEREF (SCALAR) = DEREF
2443 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2444 This is needed so that we can handle dereferencing DEREF constraints. */
2445
2446 static void
2447 do_deref (VEC (ce_s, heap) **constraints)
2448 {
2449 struct constraint_expr *c;
2450 unsigned int i = 0;
2451 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2452 {
2453 if (c->type == SCALAR)
2454 c->type = DEREF;
2455 else if (c->type == ADDRESSOF)
2456 c->type = SCALAR;
2457 else if (c->type == DEREF)
2458 {
2459 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2460 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2461 process_constraint (new_constraint (tmplhs, *c));
2462 c->var = tmplhs.var;
2463 }
2464 else
2465 gcc_unreachable ();
2466 }
2467 }
2468
2469
2470 /* Given a tree T, return the constraint expression for it. */
2471
2472 static void
2473 get_constraint_for (tree t, VEC (ce_s, heap) **results, bool *anyoffset)
2474 {
2475 struct constraint_expr temp;
2476
2477 /* x = integer is all glommed to a single variable, which doesn't
2478 point to anything by itself. That is, of course, unless it is an
2479 integer constant being treated as a pointer, in which case, we
2480 will return that this is really the addressof anything. This
2481 happens below, since it will fall into the default case. The only
2482 case we know something about an integer treated like a pointer is
2483 when it is the NULL pointer, and then we just say it points to
2484 NULL. */
2485 if (TREE_CODE (t) == INTEGER_CST
2486 && !POINTER_TYPE_P (TREE_TYPE (t)))
2487 {
2488 temp.var = integer_id;
2489 temp.type = SCALAR;
2490 temp.offset = 0;
2491 VEC_safe_push (ce_s, heap, *results, &temp);
2492 return;
2493 }
2494 else if (TREE_CODE (t) == INTEGER_CST
2495 && integer_zerop (t))
2496 {
2497 temp.var = nothing_id;
2498 temp.type = ADDRESSOF;
2499 temp.offset = 0;
2500 VEC_safe_push (ce_s, heap, *results, &temp);
2501 return;
2502 }
2503
2504 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2505 {
2506 case tcc_expression:
2507 {
2508 switch (TREE_CODE (t))
2509 {
2510 case ADDR_EXPR:
2511 {
2512 struct constraint_expr *c;
2513 unsigned int i;
2514
2515 get_constraint_for (TREE_OPERAND (t, 0), results, anyoffset);
2516 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2517 {
2518 if (c->type == DEREF)
2519 c->type = SCALAR;
2520 else
2521 c->type = ADDRESSOF;
2522 }
2523 return;
2524 }
2525 break;
2526 case CALL_EXPR:
2527
2528 /* XXX: In interprocedural mode, if we didn't have the
2529 body, we would need to do *each pointer argument =
2530 &ANYTHING added. */
2531 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2532 {
2533 varinfo_t vi;
2534 tree heapvar = heapvar_lookup (t);
2535
2536 if (heapvar == NULL)
2537 {
2538 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2539 DECL_EXTERNAL (heapvar) = 1;
2540 add_referenced_tmp_var (heapvar);
2541 heapvar_insert (t, heapvar);
2542 }
2543
2544 temp.var = create_variable_info_for (heapvar,
2545 alias_get_name (heapvar));
2546
2547 vi = get_varinfo (temp.var);
2548 vi->is_artificial_var = 1;
2549 vi->is_heap_var = 1;
2550 temp.type = ADDRESSOF;
2551 temp.offset = 0;
2552 VEC_safe_push (ce_s, heap, *results, &temp);
2553 return;
2554 }
2555 /* FALLTHRU */
2556 default:
2557 {
2558 temp.type = ADDRESSOF;
2559 temp.var = anything_id;
2560 temp.offset = 0;
2561 VEC_safe_push (ce_s, heap, *results, &temp);
2562 return;
2563 }
2564 }
2565 }
2566 case tcc_reference:
2567 {
2568 switch (TREE_CODE (t))
2569 {
2570 case INDIRECT_REF:
2571 {
2572 get_constraint_for (TREE_OPERAND (t, 0), results, anyoffset);
2573 do_deref (results);
2574 return;
2575 }
2576 case ARRAY_REF:
2577 case ARRAY_RANGE_REF:
2578 case COMPONENT_REF:
2579 get_constraint_for_component_ref (t, results, anyoffset);
2580 return;
2581 default:
2582 {
2583 temp.type = ADDRESSOF;
2584 temp.var = anything_id;
2585 temp.offset = 0;
2586 VEC_safe_push (ce_s, heap, *results, &temp);
2587 return;
2588 }
2589 }
2590 }
2591 case tcc_unary:
2592 {
2593 switch (TREE_CODE (t))
2594 {
2595 case NOP_EXPR:
2596 case CONVERT_EXPR:
2597 case NON_LVALUE_EXPR:
2598 {
2599 tree op = TREE_OPERAND (t, 0);
2600
2601 /* Cast from non-pointer to pointers are bad news for us.
2602 Anything else, we see through */
2603 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2604 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2605 {
2606 get_constraint_for (op, results, anyoffset);
2607 return;
2608 }
2609
2610 /* FALLTHRU */
2611 }
2612 default:
2613 {
2614 temp.type = ADDRESSOF;
2615 temp.var = anything_id;
2616 temp.offset = 0;
2617 VEC_safe_push (ce_s, heap, *results, &temp);
2618 return;
2619 }
2620 }
2621 }
2622 case tcc_exceptional:
2623 {
2624 switch (TREE_CODE (t))
2625 {
2626 case PHI_NODE:
2627 {
2628 get_constraint_for (PHI_RESULT (t), results, anyoffset);
2629 return;
2630 }
2631 break;
2632 case SSA_NAME:
2633 {
2634 struct constraint_expr temp;
2635 temp = get_constraint_exp_from_ssa_var (t);
2636 VEC_safe_push (ce_s, heap, *results, &temp);
2637 return;
2638 }
2639 break;
2640 default:
2641 {
2642 temp.type = ADDRESSOF;
2643 temp.var = anything_id;
2644 temp.offset = 0;
2645 VEC_safe_push (ce_s, heap, *results, &temp);
2646 return;
2647 }
2648 }
2649 }
2650 case tcc_declaration:
2651 {
2652 struct constraint_expr temp;
2653 temp = get_constraint_exp_from_ssa_var (t);
2654 VEC_safe_push (ce_s, heap, *results, &temp);
2655 return;
2656 }
2657 default:
2658 {
2659 temp.type = ADDRESSOF;
2660 temp.var = anything_id;
2661 temp.offset = 0;
2662 VEC_safe_push (ce_s, heap, *results, &temp);
2663 return;
2664 }
2665 }
2666 }
2667
2668
2669 /* Handle the structure copy case where we have a simple structure copy
2670 between LHS and RHS that is of SIZE (in bits)
2671
2672 For each field of the lhs variable (lhsfield)
2673 For each field of the rhs variable at lhsfield.offset (rhsfield)
2674 add the constraint lhsfield = rhsfield
2675
2676 If we fail due to some kind of type unsafety or other thing we
2677 can't handle, return false. We expect the caller to collapse the
2678 variable in that case. */
2679
2680 static bool
2681 do_simple_structure_copy (const struct constraint_expr lhs,
2682 const struct constraint_expr rhs,
2683 const unsigned HOST_WIDE_INT size)
2684 {
2685 varinfo_t p = get_varinfo (lhs.var);
2686 unsigned HOST_WIDE_INT pstart, last;
2687 pstart = p->offset;
2688 last = p->offset + size;
2689 for (; p && p->offset < last; p = p->next)
2690 {
2691 varinfo_t q;
2692 struct constraint_expr templhs = lhs;
2693 struct constraint_expr temprhs = rhs;
2694 unsigned HOST_WIDE_INT fieldoffset;
2695
2696 templhs.var = p->id;
2697 q = get_varinfo (temprhs.var);
2698 fieldoffset = p->offset - pstart;
2699 q = first_vi_for_offset (q, q->offset + fieldoffset);
2700 if (!q)
2701 return false;
2702 temprhs.var = q->id;
2703 process_constraint (new_constraint (templhs, temprhs));
2704 }
2705 return true;
2706 }
2707
2708
2709 /* Handle the structure copy case where we have a structure copy between a
2710 aggregate on the LHS and a dereference of a pointer on the RHS
2711 that is of SIZE (in bits)
2712
2713 For each field of the lhs variable (lhsfield)
2714 rhs.offset = lhsfield->offset
2715 add the constraint lhsfield = rhs
2716 */
2717
2718 static void
2719 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
2720 const struct constraint_expr rhs,
2721 const unsigned HOST_WIDE_INT size)
2722 {
2723 varinfo_t p = get_varinfo (lhs.var);
2724 unsigned HOST_WIDE_INT pstart,last;
2725 pstart = p->offset;
2726 last = p->offset + size;
2727
2728 for (; p && p->offset < last; p = p->next)
2729 {
2730 varinfo_t q;
2731 struct constraint_expr templhs = lhs;
2732 struct constraint_expr temprhs = rhs;
2733 unsigned HOST_WIDE_INT fieldoffset;
2734
2735
2736 if (templhs.type == SCALAR)
2737 templhs.var = p->id;
2738 else
2739 templhs.offset = p->offset;
2740
2741 q = get_varinfo (temprhs.var);
2742 fieldoffset = p->offset - pstart;
2743 temprhs.offset += fieldoffset;
2744 process_constraint (new_constraint (templhs, temprhs));
2745 }
2746 }
2747
2748 /* Handle the structure copy case where we have a structure copy
2749 between a aggregate on the RHS and a dereference of a pointer on
2750 the LHS that is of SIZE (in bits)
2751
2752 For each field of the rhs variable (rhsfield)
2753 lhs.offset = rhsfield->offset
2754 add the constraint lhs = rhsfield
2755 */
2756
2757 static void
2758 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
2759 const struct constraint_expr rhs,
2760 const unsigned HOST_WIDE_INT size)
2761 {
2762 varinfo_t p = get_varinfo (rhs.var);
2763 unsigned HOST_WIDE_INT pstart,last;
2764 pstart = p->offset;
2765 last = p->offset + size;
2766
2767 for (; p && p->offset < last; p = p->next)
2768 {
2769 varinfo_t q;
2770 struct constraint_expr templhs = lhs;
2771 struct constraint_expr temprhs = rhs;
2772 unsigned HOST_WIDE_INT fieldoffset;
2773
2774
2775 if (temprhs.type == SCALAR)
2776 temprhs.var = p->id;
2777 else
2778 temprhs.offset = p->offset;
2779
2780 q = get_varinfo (templhs.var);
2781 fieldoffset = p->offset - pstart;
2782 templhs.offset += fieldoffset;
2783 process_constraint (new_constraint (templhs, temprhs));
2784 }
2785 }
2786
2787 /* Sometimes, frontends like to give us bad type information. This
2788 function will collapse all the fields from VAR to the end of VAR,
2789 into VAR, so that we treat those fields as a single variable.
2790 We return the variable they were collapsed into. */
2791
2792 static unsigned int
2793 collapse_rest_of_var (unsigned int var)
2794 {
2795 varinfo_t currvar = get_varinfo (var);
2796 varinfo_t field;
2797
2798 for (field = currvar->next; field; field = field->next)
2799 {
2800 if (dump_file)
2801 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
2802 field->name, currvar->name);
2803
2804 gcc_assert (!field->collapsed_to);
2805 field->collapsed_to = currvar;
2806 }
2807
2808 currvar->next = NULL;
2809 currvar->size = currvar->fullsize - currvar->offset;
2810
2811 return currvar->id;
2812 }
2813
2814 /* Handle aggregate copies by expanding into copies of the respective
2815 fields of the structures. */
2816
2817 static void
2818 do_structure_copy (tree lhsop, tree rhsop)
2819 {
2820 struct constraint_expr lhs, rhs, tmp;
2821 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
2822 varinfo_t p;
2823 unsigned HOST_WIDE_INT lhssize;
2824 unsigned HOST_WIDE_INT rhssize;
2825
2826 get_constraint_for (lhsop, &lhsc, NULL);
2827 get_constraint_for (rhsop, &rhsc, NULL);
2828 gcc_assert (VEC_length (ce_s, lhsc) == 1);
2829 gcc_assert (VEC_length (ce_s, rhsc) == 1);
2830 lhs = *(VEC_last (ce_s, lhsc));
2831 rhs = *(VEC_last (ce_s, rhsc));
2832
2833 VEC_free (ce_s, heap, lhsc);
2834 VEC_free (ce_s, heap, rhsc);
2835
2836 /* If we have special var = x, swap it around. */
2837 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
2838 {
2839 tmp = lhs;
2840 lhs = rhs;
2841 rhs = tmp;
2842 }
2843
2844 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
2845 possible it's something we could handle. However, most cases falling
2846 into this are dealing with transparent unions, which are slightly
2847 weird. */
2848 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
2849 {
2850 rhs.type = ADDRESSOF;
2851 rhs.var = anything_id;
2852 }
2853
2854 /* If the RHS is a special var, or an addressof, set all the LHS fields to
2855 that special var. */
2856 if (rhs.var <= integer_id)
2857 {
2858 for (p = get_varinfo (lhs.var); p; p = p->next)
2859 {
2860 struct constraint_expr templhs = lhs;
2861 struct constraint_expr temprhs = rhs;
2862
2863 if (templhs.type == SCALAR )
2864 templhs.var = p->id;
2865 else
2866 templhs.offset += p->offset;
2867 process_constraint (new_constraint (templhs, temprhs));
2868 }
2869 }
2870 else
2871 {
2872 tree rhstype = TREE_TYPE (rhsop);
2873 tree lhstype = TREE_TYPE (lhsop);
2874 tree rhstypesize;
2875 tree lhstypesize;
2876
2877 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
2878 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
2879
2880 /* If we have a variably sized types on the rhs or lhs, and a deref
2881 constraint, add the constraint, lhsconstraint = &ANYTHING.
2882 This is conservatively correct because either the lhs is an unknown
2883 sized var (if the constraint is SCALAR), or the lhs is a DEREF
2884 constraint, and every variable it can point to must be unknown sized
2885 anyway, so we don't need to worry about fields at all. */
2886 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
2887 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
2888 {
2889 rhs.var = anything_id;
2890 rhs.type = ADDRESSOF;
2891 rhs.offset = 0;
2892 process_constraint (new_constraint (lhs, rhs));
2893 return;
2894 }
2895
2896 /* The size only really matters insofar as we don't set more or less of
2897 the variable. If we hit an unknown size var, the size should be the
2898 whole darn thing. */
2899 if (get_varinfo (rhs.var)->is_unknown_size_var)
2900 rhssize = ~0;
2901 else
2902 rhssize = TREE_INT_CST_LOW (rhstypesize);
2903
2904 if (get_varinfo (lhs.var)->is_unknown_size_var)
2905 lhssize = ~0;
2906 else
2907 lhssize = TREE_INT_CST_LOW (lhstypesize);
2908
2909
2910 if (rhs.type == SCALAR && lhs.type == SCALAR)
2911 {
2912 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
2913 {
2914 lhs.var = collapse_rest_of_var (lhs.var);
2915 rhs.var = collapse_rest_of_var (rhs.var);
2916 lhs.offset = 0;
2917 rhs.offset = 0;
2918 lhs.type = SCALAR;
2919 rhs.type = SCALAR;
2920 process_constraint (new_constraint (lhs, rhs));
2921 }
2922 }
2923 else if (lhs.type != DEREF && rhs.type == DEREF)
2924 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
2925 else if (lhs.type == DEREF && rhs.type != DEREF)
2926 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
2927 else
2928 {
2929 tree pointedtotype = lhstype;
2930 tree tmpvar;
2931
2932 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
2933 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
2934 do_structure_copy (tmpvar, rhsop);
2935 do_structure_copy (lhsop, tmpvar);
2936 }
2937 }
2938 }
2939
2940 /* Update related alias information kept in AI. This is used when
2941 building name tags, alias sets and deciding grouping heuristics.
2942 STMT is the statement to process. This function also updates
2943 ADDRESSABLE_VARS. */
2944
2945 static void
2946 update_alias_info (tree stmt, struct alias_info *ai)
2947 {
2948 bitmap addr_taken;
2949 use_operand_p use_p;
2950 ssa_op_iter iter;
2951 bool stmt_escapes_p = is_escape_site (stmt, ai);
2952 tree op;
2953
2954 /* Mark all the variables whose address are taken by the statement. */
2955 addr_taken = addresses_taken (stmt);
2956 if (addr_taken)
2957 {
2958 bitmap_ior_into (addressable_vars, addr_taken);
2959
2960 /* If STMT is an escape point, all the addresses taken by it are
2961 call-clobbered. */
2962 if (stmt_escapes_p)
2963 {
2964 bitmap_iterator bi;
2965 unsigned i;
2966
2967 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
2968 mark_call_clobbered (referenced_var (i));
2969 }
2970 }
2971
2972 /* Process each operand use. If an operand may be aliased, keep
2973 track of how many times it's being used. For pointers, determine
2974 whether they are dereferenced by the statement, or whether their
2975 value escapes, etc. */
2976 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
2977 {
2978 tree op, var;
2979 var_ann_t v_ann;
2980 struct ptr_info_def *pi;
2981 bool is_store, is_potential_deref;
2982 unsigned num_uses, num_derefs;
2983
2984 op = USE_FROM_PTR (use_p);
2985
2986 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
2987 to the set of addressable variables. */
2988 if (TREE_CODE (op) == ADDR_EXPR)
2989 {
2990 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
2991
2992 /* PHI nodes don't have annotations for pinning the set
2993 of addresses taken, so we collect them here.
2994
2995 FIXME, should we allow PHI nodes to have annotations
2996 so that they can be treated like regular statements?
2997 Currently, they are treated as second-class
2998 statements. */
2999 add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
3000 continue;
3001 }
3002
3003 /* Ignore constants. */
3004 if (TREE_CODE (op) != SSA_NAME)
3005 continue;
3006
3007 var = SSA_NAME_VAR (op);
3008 v_ann = var_ann (var);
3009
3010 /* The base variable of an ssa name must be a GIMPLE register, and thus
3011 it cannot be aliased. */
3012 gcc_assert (!may_be_aliased (var));
3013
3014 /* We are only interested in pointers. */
3015 if (!POINTER_TYPE_P (TREE_TYPE (op)))
3016 continue;
3017
3018 pi = get_ptr_info (op);
3019
3020 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3021 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
3022 {
3023 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
3024 VARRAY_PUSH_TREE (ai->processed_ptrs, op);
3025 }
3026
3027 /* If STMT is a PHI node, then it will not have pointer
3028 dereferences and it will not be an escape point. */
3029 if (TREE_CODE (stmt) == PHI_NODE)
3030 continue;
3031
3032 /* Determine whether OP is a dereferenced pointer, and if STMT
3033 is an escape point, whether OP escapes. */
3034 count_uses_and_derefs (op, stmt, &num_uses, &num_derefs, &is_store);
3035
3036 /* Handle a corner case involving address expressions of the
3037 form '&PTR->FLD'. The problem with these expressions is that
3038 they do not represent a dereference of PTR. However, if some
3039 other transformation propagates them into an INDIRECT_REF
3040 expression, we end up with '*(&PTR->FLD)' which is folded
3041 into 'PTR->FLD'.
3042
3043 So, if the original code had no other dereferences of PTR,
3044 the aliaser will not create memory tags for it, and when
3045 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3046 memory operations will receive no V_MAY_DEF/VUSE operands.
3047
3048 One solution would be to have count_uses_and_derefs consider
3049 &PTR->FLD a dereference of PTR. But that is wrong, since it
3050 is not really a dereference but an offset calculation.
3051
3052 What we do here is to recognize these special ADDR_EXPR
3053 nodes. Since these expressions are never GIMPLE values (they
3054 are not GIMPLE invariants), they can only appear on the RHS
3055 of an assignment and their base address is always an
3056 INDIRECT_REF expression. */
3057 is_potential_deref = false;
3058 if (TREE_CODE (stmt) == MODIFY_EXPR
3059 && TREE_CODE (TREE_OPERAND (stmt, 1)) == ADDR_EXPR
3060 && !is_gimple_val (TREE_OPERAND (stmt, 1)))
3061 {
3062 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3063 this represents a potential dereference of PTR. */
3064 tree rhs = TREE_OPERAND (stmt, 1);
3065 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3066 if (TREE_CODE (base) == INDIRECT_REF
3067 && TREE_OPERAND (base, 0) == op)
3068 is_potential_deref = true;
3069 }
3070
3071 if (num_derefs > 0 || is_potential_deref)
3072 {
3073 /* Mark OP as dereferenced. In a subsequent pass,
3074 dereferenced pointers that point to a set of
3075 variables will be assigned a name tag to alias
3076 all the variables OP points to. */
3077 pi->is_dereferenced = 1;
3078
3079 /* Keep track of how many time we've dereferenced each
3080 pointer. */
3081 NUM_REFERENCES_INC (v_ann);
3082
3083 /* If this is a store operation, mark OP as being
3084 dereferenced to store, otherwise mark it as being
3085 dereferenced to load. */
3086 if (is_store)
3087 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
3088 else
3089 bitmap_set_bit (ai->dereferenced_ptrs_load, DECL_UID (var));
3090 }
3091
3092 if (stmt_escapes_p && num_derefs < num_uses)
3093 {
3094 /* If STMT is an escape point and STMT contains at
3095 least one direct use of OP, then the value of OP
3096 escapes and so the pointed-to variables need to
3097 be marked call-clobbered. */
3098 pi->value_escapes_p = 1;
3099
3100 /* If the statement makes a function call, assume
3101 that pointer OP will be dereferenced in a store
3102 operation inside the called function. */
3103 if (get_call_expr_in (stmt))
3104 {
3105 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
3106 pi->is_dereferenced = 1;
3107 }
3108 }
3109 }
3110
3111 if (TREE_CODE (stmt) == PHI_NODE)
3112 return;
3113
3114 /* Update reference counter for definitions to any
3115 potentially aliased variable. This is used in the alias
3116 grouping heuristics. */
3117 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3118 {
3119 tree var = SSA_NAME_VAR (op);
3120 var_ann_t ann = var_ann (var);
3121 bitmap_set_bit (ai->written_vars, DECL_UID (var));
3122 if (may_be_aliased (var))
3123 NUM_REFERENCES_INC (ann);
3124
3125 }
3126
3127 /* Mark variables in V_MAY_DEF operands as being written to. */
3128 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VIRTUAL_DEFS)
3129 {
3130 tree var = DECL_P (op) ? op : SSA_NAME_VAR (op);
3131 bitmap_set_bit (ai->written_vars, DECL_UID (var));
3132 }
3133 }
3134
3135
3136 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3137 Expressions of the type PTR + CST can be handled in two ways:
3138
3139 1- If the constraint for PTR is ADDRESSOF for a non-structure
3140 variable, then we can use it directly because adding or
3141 subtracting a constant may not alter the original ADDRESSOF
3142 constraint (i.e., pointer arithmetic may not legally go outside
3143 an object's boundaries).
3144
3145 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3146 then if CST is a compile-time constant that can be used as an
3147 offset, we can determine which sub-variable will be pointed-to
3148 by the expression.
3149
3150 Return true if the expression is handled. For any other kind of
3151 expression, return false so that each operand can be added as a
3152 separate constraint by the caller. */
3153
3154 static bool
3155 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3156 {
3157 tree op0, op1;
3158 struct constraint_expr *c, *c2;
3159 unsigned int i = 0;
3160 unsigned int j = 0;
3161 VEC (ce_s, heap) *temp = NULL;
3162 unsigned int rhsoffset = 0;
3163
3164 if (TREE_CODE (expr) != PLUS_EXPR)
3165 return false;
3166
3167 op0 = TREE_OPERAND (expr, 0);
3168 op1 = TREE_OPERAND (expr, 1);
3169
3170 get_constraint_for (op0, &temp, NULL);
3171 if (POINTER_TYPE_P (TREE_TYPE (op0))
3172 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) == RECORD_TYPE
3173 && TREE_CODE (op1) == INTEGER_CST)
3174 {
3175 rhsoffset = TREE_INT_CST_LOW (op1) * BITS_PER_UNIT;
3176 }
3177
3178
3179 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3180 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3181 {
3182 if (c2->type == ADDRESSOF && rhsoffset != 0)
3183 {
3184 varinfo_t temp = get_varinfo (c2->var);
3185
3186 /* An access one after the end of an array is valid,
3187 so simply punt on accesses we cannot resolve. */
3188 temp = first_vi_for_offset (temp, rhsoffset);
3189 if (temp == NULL)
3190 continue;
3191 c2->var = temp->id;
3192 c2->offset = 0;
3193 }
3194 else
3195 c2->offset = rhsoffset;
3196 process_constraint (new_constraint (*c, *c2));
3197 }
3198
3199 VEC_free (ce_s, heap, temp);
3200
3201 return true;
3202 }
3203
3204
3205 /* Walk statement T setting up aliasing constraints according to the
3206 references found in T. This function is the main part of the
3207 constraint builder. AI points to auxiliary alias information used
3208 when building alias sets and computing alias grouping heuristics. */
3209
3210 static void
3211 find_func_aliases (tree origt)
3212 {
3213 tree t = origt;
3214 VEC(ce_s, heap) *lhsc = NULL;
3215 VEC(ce_s, heap) *rhsc = NULL;
3216 struct constraint_expr *c;
3217
3218 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3219 t = TREE_OPERAND (t, 0);
3220
3221 /* Now build constraints expressions. */
3222 if (TREE_CODE (t) == PHI_NODE)
3223 {
3224 /* Only care about pointers and structures containing
3225 pointers. */
3226 if (POINTER_TYPE_P (TREE_TYPE (PHI_RESULT (t)))
3227 || AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))))
3228 {
3229 int i;
3230 unsigned int j;
3231
3232 /* For a phi node, assign all the arguments to
3233 the result. */
3234 get_constraint_for (PHI_RESULT (t), &lhsc, NULL);
3235 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3236 {
3237 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc, NULL);
3238 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3239 {
3240 struct constraint_expr *c2;
3241 while (VEC_length (ce_s, rhsc) > 0)
3242 {
3243 c2 = VEC_last (ce_s, rhsc);
3244 process_constraint (new_constraint (*c, *c2));
3245 VEC_pop (ce_s, rhsc);
3246 }
3247 }
3248 }
3249 }
3250 }
3251 /* In IPA mode, we need to generate constraints to pass call
3252 arguments through their calls. There are two case, either a
3253 modify_expr when we are returning a value, or just a plain
3254 call_expr when we are not. */
3255 else if (in_ipa_mode
3256 && ((TREE_CODE (t) == MODIFY_EXPR
3257 && TREE_CODE (TREE_OPERAND (t, 1)) == CALL_EXPR
3258 && !(call_expr_flags (TREE_OPERAND (t, 1))
3259 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
3260 || (TREE_CODE (t) == CALL_EXPR
3261 && !(call_expr_flags (t)
3262 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))))
3263 {
3264 tree lhsop;
3265 tree rhsop;
3266 unsigned int varid;
3267 bool found = false;
3268 tree arglist;
3269 varinfo_t fi;
3270 int i = 1;
3271 tree decl;
3272 if (TREE_CODE (t) == MODIFY_EXPR)
3273 {
3274 lhsop = TREE_OPERAND (t, 0);
3275 rhsop = TREE_OPERAND (t, 1);
3276 }
3277 else
3278 {
3279 lhsop = NULL;
3280 rhsop = t;
3281 }
3282 decl = get_callee_fndecl (rhsop);
3283
3284 /* If we can directly resolve the function being called, do so.
3285 Otherwise, it must be some sort of indirect expression that
3286 we should still be able to handle. */
3287 if (decl)
3288 {
3289 found = lookup_id_for_tree (decl, &varid);
3290 gcc_assert (found);
3291 }
3292 else
3293 {
3294 decl = TREE_OPERAND (rhsop, 0);
3295 found = lookup_id_for_tree (decl, &varid);
3296 gcc_assert (found);
3297 }
3298
3299 /* Assign all the passed arguments to the appropriate incoming
3300 parameters of the function. */
3301 fi = get_varinfo (varid);
3302 arglist = TREE_OPERAND (rhsop, 1);
3303
3304 for (;arglist; arglist = TREE_CHAIN (arglist))
3305 {
3306 tree arg = TREE_VALUE (arglist);
3307 struct constraint_expr lhs ;
3308 struct constraint_expr *rhsp;
3309
3310 get_constraint_for (arg, &rhsc, NULL);
3311 if (TREE_CODE (decl) != FUNCTION_DECL)
3312 {
3313 lhs.type = DEREF;
3314 lhs.var = fi->id;
3315 lhs.offset = i;
3316 }
3317 else
3318 {
3319 lhs.type = SCALAR;
3320 lhs.var = first_vi_for_offset (fi, i)->id;
3321 lhs.offset = 0;
3322 }
3323 while (VEC_length (ce_s, rhsc) != 0)
3324 {
3325 rhsp = VEC_last (ce_s, rhsc);
3326 process_constraint (new_constraint (lhs, *rhsp));
3327 VEC_pop (ce_s, rhsc);
3328 }
3329 i++;
3330 }
3331 /* If we are returning a value, assign it to the result. */
3332 if (lhsop)
3333 {
3334 struct constraint_expr rhs;
3335 struct constraint_expr *lhsp;
3336 unsigned int j = 0;
3337
3338 get_constraint_for (lhsop, &lhsc, NULL);
3339 if (TREE_CODE (decl) != FUNCTION_DECL)
3340 {
3341 rhs.type = DEREF;
3342 rhs.var = fi->id;
3343 rhs.offset = i;
3344 }
3345 else
3346 {
3347 rhs.type = SCALAR;
3348 rhs.var = first_vi_for_offset (fi, i)->id;
3349 rhs.offset = 0;
3350 }
3351 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3352 process_constraint (new_constraint (*lhsp, rhs));
3353 }
3354 }
3355 /* Otherwise, just a regular assignment statement. */
3356 else if (TREE_CODE (t) == MODIFY_EXPR)
3357 {
3358 tree lhsop = TREE_OPERAND (t, 0);
3359 tree rhsop = TREE_OPERAND (t, 1);
3360 int i;
3361
3362 if (AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3363 && AGGREGATE_TYPE_P (TREE_TYPE (rhsop)))
3364 {
3365 do_structure_copy (lhsop, rhsop);
3366 }
3367 else
3368 {
3369 /* Only care about operations with pointers, structures
3370 containing pointers, dereferences, and call expressions. */
3371 if (POINTER_TYPE_P (TREE_TYPE (lhsop))
3372 || AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3373 || TREE_CODE (rhsop) == CALL_EXPR)
3374 {
3375 get_constraint_for (lhsop, &lhsc, NULL);
3376 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3377 {
3378 /* RHS that consist of unary operations,
3379 exceptional types, or bare decls/constants, get
3380 handled directly by get_constraint_for. */
3381 case tcc_reference:
3382 case tcc_declaration:
3383 case tcc_constant:
3384 case tcc_exceptional:
3385 case tcc_expression:
3386 case tcc_unary:
3387 {
3388 unsigned int j;
3389 bool need_anyoffset = false;
3390 tree strippedrhs = rhsop;
3391 tree rhstype;
3392
3393 /* XXX: Push this back into the ADDR_EXPR
3394 case, and remove anyoffset handling. */
3395 STRIP_NOPS (strippedrhs);
3396 rhstype = TREE_TYPE (strippedrhs);
3397
3398 get_constraint_for (rhsop, &rhsc, &need_anyoffset);
3399 if (TREE_CODE (strippedrhs) == ADDR_EXPR
3400 && AGGREGATE_TYPE_P (TREE_TYPE (rhstype)))
3401 {
3402 struct constraint_expr *origrhs;
3403 varinfo_t origvar;
3404 struct constraint_expr tmp;
3405
3406 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3407 origrhs = VEC_last (ce_s, rhsc);
3408 tmp = *origrhs;
3409 VEC_pop (ce_s, rhsc);
3410 origvar = get_varinfo (origrhs->var);
3411 for (; origvar; origvar = origvar->next)
3412 {
3413 tmp.var = origvar->id;
3414 VEC_safe_push (ce_s, heap, rhsc, &tmp);
3415 }
3416 }
3417
3418 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3419 {
3420 struct constraint_expr *c2;
3421 unsigned int k;
3422
3423 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3424 process_constraint (new_constraint (*c, *c2));
3425 }
3426
3427 }
3428 break;
3429
3430 case tcc_binary:
3431 {
3432 /* For pointer arithmetic of the form
3433 PTR + CST, we can simply use PTR's
3434 constraint because pointer arithmetic is
3435 not allowed to go out of bounds. */
3436 if (handle_ptr_arith (lhsc, rhsop))
3437 break;
3438 }
3439 /* FALLTHRU */
3440
3441 /* Otherwise, walk each operand. Notice that we
3442 can't use the operand interface because we need
3443 to process expressions other than simple operands
3444 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3445 default:
3446 for (i = 0; i < TREE_CODE_LENGTH (TREE_CODE (rhsop)); i++)
3447 {
3448 tree op = TREE_OPERAND (rhsop, i);
3449 unsigned int j;
3450
3451 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3452 get_constraint_for (op, &rhsc, NULL);
3453 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3454 {
3455 struct constraint_expr *c2;
3456 while (VEC_length (ce_s, rhsc) > 0)
3457 {
3458 c2 = VEC_last (ce_s, rhsc);
3459 process_constraint (new_constraint (*c, *c2));
3460 VEC_pop (ce_s, rhsc);
3461 }
3462 }
3463 }
3464 }
3465 }
3466 }
3467 }
3468
3469 /* After promoting variables and computing aliasing we will
3470 need to re-scan most statements. FIXME: Try to minimize the
3471 number of statements re-scanned. It's not really necessary to
3472 re-scan *all* statements. */
3473 mark_stmt_modified (origt);
3474 VEC_free (ce_s, heap, rhsc);
3475 VEC_free (ce_s, heap, lhsc);
3476 }
3477
3478
3479 /* Find the first varinfo in the same variable as START that overlaps with
3480 OFFSET.
3481 Effectively, walk the chain of fields for the variable START to find the
3482 first field that overlaps with OFFSET.
3483 Return NULL if we can't find one. */
3484
3485 static varinfo_t
3486 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3487 {
3488 varinfo_t curr = start;
3489 while (curr)
3490 {
3491 /* We may not find a variable in the field list with the actual
3492 offset when when we have glommed a structure to a variable.
3493 In that case, however, offset should still be within the size
3494 of the variable. */
3495 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3496 return curr;
3497 curr = curr->next;
3498 }
3499 return NULL;
3500 }
3501
3502
3503 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3504 offset. */
3505
3506 static void
3507 insert_into_field_list (varinfo_t base, varinfo_t field)
3508 {
3509 varinfo_t prev = base;
3510 varinfo_t curr = base->next;
3511
3512 if (curr == NULL)
3513 {
3514 prev->next = field;
3515 field->next = NULL;
3516 }
3517 else
3518 {
3519 while (curr)
3520 {
3521 if (field->offset <= curr->offset)
3522 break;
3523 prev = curr;
3524 curr = curr->next;
3525 }
3526 field->next = prev->next;
3527 prev->next = field;
3528 }
3529 }
3530
3531 /* qsort comparison function for two fieldoff's PA and PB */
3532
3533 static int
3534 fieldoff_compare (const void *pa, const void *pb)
3535 {
3536 const fieldoff_s *foa = (const fieldoff_s *)pa;
3537 const fieldoff_s *fob = (const fieldoff_s *)pb;
3538 HOST_WIDE_INT foasize, fobsize;
3539
3540 if (foa->offset != fob->offset)
3541 return foa->offset - fob->offset;
3542
3543 foasize = TREE_INT_CST_LOW (DECL_SIZE (foa->field));
3544 fobsize = TREE_INT_CST_LOW (DECL_SIZE (fob->field));
3545 return foasize - fobsize;
3546 }
3547
3548 /* Sort a fieldstack according to the field offset and sizes. */
3549 void sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
3550 {
3551 qsort (VEC_address (fieldoff_s, fieldstack),
3552 VEC_length (fieldoff_s, fieldstack),
3553 sizeof (fieldoff_s),
3554 fieldoff_compare);
3555 }
3556
3557 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields
3558 of TYPE onto fieldstack, recording their offsets along the way.
3559 OFFSET is used to keep track of the offset in this entire structure, rather
3560 than just the immediately containing structure. Returns the number
3561 of fields pushed.
3562 HAS_UNION is set to true if we find a union type as a field of
3563 TYPE. */
3564
3565 int
3566 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
3567 HOST_WIDE_INT offset, bool *has_union)
3568 {
3569 tree field;
3570 int count = 0;
3571
3572 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
3573 if (TREE_CODE (field) == FIELD_DECL)
3574 {
3575 bool push = false;
3576 int pushed = 0;
3577
3578 if (has_union
3579 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
3580 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
3581 *has_union = true;
3582
3583 if (!var_can_have_subvars (field))
3584 push = true;
3585 else if (!(pushed = push_fields_onto_fieldstack
3586 (TREE_TYPE (field), fieldstack,
3587 offset + bitpos_of_field (field), has_union))
3588 && DECL_SIZE (field)
3589 && !integer_zerop (DECL_SIZE (field)))
3590 /* Empty structures may have actual size, like in C++. So
3591 see if we didn't push any subfields and the size is
3592 nonzero, push the field onto the stack */
3593 push = true;
3594
3595 if (push)
3596 {
3597 fieldoff_s *pair;
3598
3599 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3600 pair->field = field;
3601 pair->offset = offset + bitpos_of_field (field);
3602 count++;
3603 }
3604 else
3605 count += pushed;
3606 }
3607
3608 return count;
3609 }
3610
3611 static void
3612 make_constraint_to_anything (varinfo_t vi)
3613 {
3614 struct constraint_expr lhs, rhs;
3615
3616 lhs.var = vi->id;
3617 lhs.offset = 0;
3618 lhs.type = SCALAR;
3619
3620 rhs.var = anything_id;
3621 rhs.offset =0 ;
3622 rhs.type = ADDRESSOF;
3623 process_constraint (new_constraint (lhs, rhs));
3624 }
3625
3626 /* Count the number of arguments DECL has, and set IS_VARARGS to true
3627 if it is a varargs function. */
3628
3629 static unsigned int
3630 count_num_arguments (tree decl, bool *is_varargs)
3631 {
3632 unsigned int i = 0;
3633 tree t;
3634
3635 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
3636 t;
3637 t = TREE_CHAIN (t))
3638 {
3639 if (TREE_VALUE (t) == void_type_node)
3640 break;
3641 i++;
3642 }
3643
3644 if (!t)
3645 *is_varargs = true;
3646 return i;
3647 }
3648
3649 /* Creation function node for DECL, using NAME, and return the index
3650 of the variable we've created for the function. */
3651
3652 static unsigned int
3653 create_function_info_for (tree decl, const char *name)
3654 {
3655 unsigned int index = VEC_length (varinfo_t, varmap);
3656 varinfo_t vi;
3657 tree arg;
3658 unsigned int i;
3659 bool is_varargs = false;
3660
3661 /* Create the variable info. */
3662
3663 vi = new_var_info (decl, index, name, index);
3664 vi->decl = decl;
3665 vi->offset = 0;
3666 vi->has_union = 0;
3667 vi->size = 1;
3668 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
3669 insert_id_for_tree (vi->decl, index);
3670 VEC_safe_push (varinfo_t, heap, varmap, vi);
3671
3672 stats.total_vars++;
3673
3674 /* If it's varargs, we don't know how many arguments it has, so we
3675 can't do much.
3676 */
3677 if (is_varargs)
3678 {
3679 vi->fullsize = ~0;
3680 vi->size = ~0;
3681 vi->is_unknown_size_var = true;
3682 return index;
3683 }
3684
3685
3686 arg = DECL_ARGUMENTS (decl);
3687
3688 /* Set up variables for each argument. */
3689 for (i = 1; i < vi->fullsize; i++)
3690 {
3691 varinfo_t argvi;
3692 const char *newname;
3693 char *tempname;
3694 unsigned int newindex;
3695 tree argdecl = decl;
3696
3697 if (arg)
3698 argdecl = arg;
3699
3700 newindex = VEC_length (varinfo_t, varmap);
3701 asprintf (&tempname, "%s.arg%d", name, i-1);
3702 newname = ggc_strdup (tempname);
3703 free (tempname);
3704
3705 argvi = new_var_info (argdecl, newindex,newname, newindex);
3706 argvi->decl = argdecl;
3707 VEC_safe_push (varinfo_t, heap, varmap, argvi);
3708 argvi->offset = i;
3709 argvi->size = 1;
3710 argvi->fullsize = vi->fullsize;
3711 argvi->has_union = false;
3712 insert_into_field_list (vi, argvi);
3713 stats.total_vars ++;
3714 if (arg)
3715 {
3716 insert_id_for_tree (arg, newindex);
3717 arg = TREE_CHAIN (arg);
3718 }
3719 }
3720
3721 /* Create a variable for the return var. */
3722 if (DECL_RESULT (decl) != NULL
3723 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
3724 {
3725 varinfo_t resultvi;
3726 const char *newname;
3727 char *tempname;
3728 unsigned int newindex;
3729 tree resultdecl = decl;
3730
3731 vi->fullsize ++;
3732
3733
3734 if (DECL_RESULT (decl))
3735 resultdecl = DECL_RESULT (decl);
3736
3737 newindex = VEC_length (varinfo_t, varmap);
3738 asprintf (&tempname, "%s.result", name);
3739 newname = ggc_strdup (tempname);
3740 free (tempname);
3741
3742 resultvi = new_var_info (resultdecl, newindex, newname, newindex);
3743 resultvi->decl = resultdecl;
3744 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
3745 resultvi->offset = i;
3746 resultvi->size = 1;
3747 resultvi->fullsize = vi->fullsize;
3748 resultvi->has_union = false;
3749 insert_into_field_list (vi, resultvi);
3750 stats.total_vars ++;
3751 if (DECL_RESULT (decl))
3752 insert_id_for_tree (DECL_RESULT (decl), newindex);
3753 }
3754 return index;
3755 }
3756
3757
3758 /* Return true if FIELDSTACK contains fields that overlap.
3759 FIELDSTACK is assumed to be sorted by offset. */
3760
3761 static bool
3762 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
3763 {
3764 fieldoff_s *fo = NULL;
3765 unsigned int i;
3766 HOST_WIDE_INT lastoffset = -1;
3767
3768 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3769 {
3770 if (fo->offset == lastoffset)
3771 return true;
3772 lastoffset = fo->offset;
3773 }
3774 return false;
3775 }
3776
3777 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
3778 This will also create any varinfo structures necessary for fields
3779 of DECL. */
3780
3781 static unsigned int
3782 create_variable_info_for (tree decl, const char *name)
3783 {
3784 unsigned int index = VEC_length (varinfo_t, varmap);
3785 varinfo_t vi;
3786 tree decltype = TREE_TYPE (decl);
3787 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
3788 bool notokay = false;
3789 bool hasunion;
3790 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
3791 VEC (fieldoff_s,heap) *fieldstack = NULL;
3792
3793 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
3794 return create_function_info_for (decl, name);
3795
3796 hasunion = TREE_CODE (decltype) == UNION_TYPE
3797 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
3798 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
3799 {
3800 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion);
3801 if (hasunion)
3802 {
3803 VEC_free (fieldoff_s, heap, fieldstack);
3804 notokay = true;
3805 }
3806 }
3807
3808
3809 /* If the variable doesn't have subvars, we may end up needing to
3810 sort the field list and create fake variables for all the
3811 fields. */
3812 vi = new_var_info (decl, index, name, index);
3813 vi->decl = decl;
3814 vi->offset = 0;
3815 vi->has_union = hasunion;
3816 if (!declsize
3817 || TREE_CODE (declsize) != INTEGER_CST
3818 || TREE_CODE (decltype) == UNION_TYPE
3819 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
3820 {
3821 vi->is_unknown_size_var = true;
3822 vi->fullsize = ~0;
3823 vi->size = ~0;
3824 }
3825 else
3826 {
3827 vi->fullsize = TREE_INT_CST_LOW (declsize);
3828 vi->size = vi->fullsize;
3829 }
3830
3831 insert_id_for_tree (vi->decl, index);
3832 VEC_safe_push (varinfo_t, heap, varmap, vi);
3833 if (is_global && (!flag_whole_program || !in_ipa_mode))
3834 make_constraint_to_anything (vi);
3835
3836 stats.total_vars++;
3837 if (use_field_sensitive
3838 && !notokay
3839 && !vi->is_unknown_size_var
3840 && var_can_have_subvars (decl))
3841 {
3842 unsigned int newindex = VEC_length (varinfo_t, varmap);
3843 fieldoff_s *fo = NULL;
3844 unsigned int i;
3845 tree field;
3846
3847 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3848 {
3849 if (!DECL_SIZE (fo->field)
3850 || TREE_CODE (DECL_SIZE (fo->field)) != INTEGER_CST
3851 || fo->offset < 0)
3852 {
3853 notokay = true;
3854 break;
3855 }
3856 }
3857
3858 /* We can't sort them if we have a field with a variable sized type,
3859 which will make notokay = true. In that case, we are going to return
3860 without creating varinfos for the fields anyway, so sorting them is a
3861 waste to boot. */
3862 if (!notokay)
3863 {
3864 sort_fieldstack (fieldstack);
3865 /* Due to some C++ FE issues, like PR 22488, we might end up
3866 what appear to be overlapping fields even though they,
3867 in reality, do not overlap. Until the C++ FE is fixed,
3868 we will simply disable field-sensitivity for these cases. */
3869 notokay = check_for_overlaps (fieldstack);
3870 }
3871
3872
3873 if (VEC_length (fieldoff_s, fieldstack) != 0)
3874 fo = VEC_index (fieldoff_s, fieldstack, 0);
3875
3876 if (fo == NULL || notokay)
3877 {
3878 vi->is_unknown_size_var = 1;
3879 vi->fullsize = ~0;
3880 vi->size = ~0;
3881 VEC_free (fieldoff_s, heap, fieldstack);
3882 return index;
3883 }
3884
3885 field = fo->field;
3886 vi->size = TREE_INT_CST_LOW (DECL_SIZE (field));
3887 vi->offset = fo->offset;
3888 for (i = 1; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3889 {
3890 varinfo_t newvi;
3891 const char *newname;
3892 char *tempname;
3893
3894 field = fo->field;
3895 newindex = VEC_length (varinfo_t, varmap);
3896 asprintf (&tempname, "%s.%s", vi->name, alias_get_name (field));
3897 newname = ggc_strdup (tempname);
3898 free (tempname);
3899 newvi = new_var_info (decl, newindex, newname, newindex);
3900 newvi->offset = fo->offset;
3901 newvi->size = TREE_INT_CST_LOW (DECL_SIZE (field));
3902 newvi->fullsize = vi->fullsize;
3903 insert_into_field_list (vi, newvi);
3904 VEC_safe_push (varinfo_t, heap, varmap, newvi);
3905 if (is_global && (!flag_whole_program || !in_ipa_mode))
3906 make_constraint_to_anything (newvi);
3907
3908 stats.total_vars++;
3909 }
3910 VEC_free (fieldoff_s, heap, fieldstack);
3911 }
3912 return index;
3913 }
3914
3915 /* Print out the points-to solution for VAR to FILE. */
3916
3917 void
3918 dump_solution_for_var (FILE *file, unsigned int var)
3919 {
3920 varinfo_t vi = get_varinfo (var);
3921 unsigned int i;
3922 bitmap_iterator bi;
3923
3924 fprintf (file, "%s = { ", vi->name);
3925 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (vi->node)->solution, 0, i, bi)
3926 {
3927 fprintf (file, "%s ", get_varinfo (i)->name);
3928 }
3929 fprintf (file, "}\n");
3930 }
3931
3932 /* Print the points-to solution for VAR to stdout. */
3933
3934 void
3935 debug_solution_for_var (unsigned int var)
3936 {
3937 dump_solution_for_var (stdout, var);
3938 }
3939
3940
3941 /* Create varinfo structures for all of the variables in the
3942 function for intraprocedural mode. */
3943
3944 static void
3945 intra_create_variable_infos (void)
3946 {
3947 tree t;
3948
3949 /* For each incoming argument arg, ARG = &ANYTHING */
3950 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
3951 {
3952 struct constraint_expr lhs;
3953 varinfo_t p;
3954
3955 lhs.offset = 0;
3956 lhs.type = SCALAR;
3957 lhs.var = create_variable_info_for (t, alias_get_name (t));
3958
3959 for (p = get_varinfo (lhs.var); p; p = p->next)
3960 make_constraint_to_anything (p);
3961 }
3962
3963 }
3964
3965 /* Set bits in INTO corresponding to the variable uids in solution set
3966 FROM */
3967
3968 static void
3969 set_uids_in_ptset (bitmap into, bitmap from)
3970 {
3971 unsigned int i;
3972 bitmap_iterator bi;
3973 subvar_t sv;
3974
3975 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
3976 {
3977 varinfo_t vi = get_varinfo (i);
3978
3979 /* The only artificial variables that are allowed in a may-alias
3980 set are heap variables. */
3981 if (vi->is_artificial_var && !vi->is_heap_var)
3982 continue;
3983
3984 if (vi->has_union && get_subvars_for_var (vi->decl) != NULL)
3985 {
3986 /* Variables containing unions may need to be converted to
3987 their SFT's, because SFT's can have unions and we cannot. */
3988 for (sv = get_subvars_for_var (vi->decl); sv; sv = sv->next)
3989 bitmap_set_bit (into, DECL_UID (sv->var));
3990 }
3991 else if (TREE_CODE (vi->decl) == VAR_DECL
3992 || TREE_CODE (vi->decl) == PARM_DECL)
3993 {
3994 if (var_can_have_subvars (vi->decl)
3995 && get_subvars_for_var (vi->decl))
3996 {
3997 /* If VI->DECL is an aggregate for which we created
3998 SFTs, add the SFT corresponding to VI->OFFSET. */
3999 tree sft = get_subvar_at (vi->decl, vi->offset);
4000 if (sft)
4001 bitmap_set_bit (into, DECL_UID (sft));
4002 }
4003 else
4004 {
4005 /* Otherwise, just add VI->DECL to the alias set. */
4006 bitmap_set_bit (into, DECL_UID (vi->decl));
4007 }
4008 }
4009 }
4010 }
4011
4012
4013 static bool have_alias_info = false;
4014
4015 /* Given a pointer variable P, fill in its points-to set, or return
4016 false if we can't. */
4017
4018 bool
4019 find_what_p_points_to (tree p)
4020 {
4021 unsigned int id = 0;
4022
4023 if (!have_alias_info)
4024 return false;
4025
4026 if (lookup_id_for_tree (p, &id))
4027 {
4028 varinfo_t vi = get_varinfo (id);
4029
4030 if (vi->is_artificial_var)
4031 return false;
4032
4033 /* See if this is a field or a structure. */
4034 if (vi->size != vi->fullsize)
4035 {
4036 /* Nothing currently asks about structure fields directly,
4037 but when they do, we need code here to hand back the
4038 points-to set. */
4039 if (!var_can_have_subvars (vi->decl)
4040 || get_subvars_for_var (vi->decl) == NULL)
4041 return false;
4042 }
4043 else
4044 {
4045 struct ptr_info_def *pi = get_ptr_info (p);
4046 unsigned int i;
4047 bitmap_iterator bi;
4048
4049 /* This variable may have been collapsed, let's get the real
4050 variable. */
4051 vi = get_varinfo (vi->node);
4052
4053 /* Translate artificial variables into SSA_NAME_PTR_INFO
4054 attributes. */
4055 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4056 {
4057 varinfo_t vi = get_varinfo (i);
4058
4059 if (vi->is_artificial_var)
4060 {
4061 /* FIXME. READONLY should be handled better so that
4062 flow insensitive aliasing can disregard writable
4063 aliases. */
4064 if (vi->id == nothing_id)
4065 pi->pt_null = 1;
4066 else if (vi->id == anything_id)
4067 pi->pt_anything = 1;
4068 else if (vi->id == readonly_id)
4069 pi->pt_anything = 1;
4070 else if (vi->id == integer_id)
4071 pi->pt_anything = 1;
4072 else if (vi->is_heap_var)
4073 pi->pt_global_mem = 1;
4074 }
4075 }
4076
4077 if (pi->pt_anything)
4078 return false;
4079
4080 if (!pi->pt_vars)
4081 pi->pt_vars = BITMAP_GGC_ALLOC ();
4082
4083 set_uids_in_ptset (pi->pt_vars, vi->solution);
4084
4085 if (bitmap_empty_p (pi->pt_vars))
4086 pi->pt_vars = NULL;
4087
4088 return true;
4089 }
4090 }
4091
4092 return false;
4093 }
4094
4095
4096
4097 /* Dump points-to information to OUTFILE. */
4098
4099 void
4100 dump_sa_points_to_info (FILE *outfile)
4101 {
4102 unsigned int i;
4103
4104 fprintf (outfile, "\nPoints-to sets\n\n");
4105
4106 if (dump_flags & TDF_STATS)
4107 {
4108 fprintf (outfile, "Stats:\n");
4109 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
4110 fprintf (outfile, "Statically unified vars: %d\n",
4111 stats.unified_vars_static);
4112 fprintf (outfile, "Collapsed vars: %d\n", stats.collapsed_vars);
4113 fprintf (outfile, "Dynamically unified vars: %d\n",
4114 stats.unified_vars_dynamic);
4115 fprintf (outfile, "Iterations: %d\n", stats.iterations);
4116 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
4117 }
4118
4119 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
4120 dump_solution_for_var (outfile, i);
4121 }
4122
4123
4124 /* Debug points-to information to stderr. */
4125
4126 void
4127 debug_sa_points_to_info (void)
4128 {
4129 dump_sa_points_to_info (stderr);
4130 }
4131
4132
4133 /* Initialize the always-existing constraint variables for NULL
4134 ANYTHING, READONLY, and INTEGER */
4135
4136 static void
4137 init_base_vars (void)
4138 {
4139 struct constraint_expr lhs, rhs;
4140
4141 /* Create the NULL variable, used to represent that a variable points
4142 to NULL. */
4143 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
4144 var_nothing = new_var_info (nothing_tree, 0, "NULL", 0);
4145 insert_id_for_tree (nothing_tree, 0);
4146 var_nothing->is_artificial_var = 1;
4147 var_nothing->offset = 0;
4148 var_nothing->size = ~0;
4149 var_nothing->fullsize = ~0;
4150 var_nothing->is_special_var = 1;
4151 nothing_id = 0;
4152 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
4153
4154 /* Create the ANYTHING variable, used to represent that a variable
4155 points to some unknown piece of memory. */
4156 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
4157 var_anything = new_var_info (anything_tree, 1, "ANYTHING", 1);
4158 insert_id_for_tree (anything_tree, 1);
4159 var_anything->is_artificial_var = 1;
4160 var_anything->size = ~0;
4161 var_anything->offset = 0;
4162 var_anything->next = NULL;
4163 var_anything->fullsize = ~0;
4164 var_anything->is_special_var = 1;
4165 anything_id = 1;
4166
4167 /* Anything points to anything. This makes deref constraints just
4168 work in the presence of linked list and other p = *p type loops,
4169 by saying that *ANYTHING = ANYTHING. */
4170 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
4171 lhs.type = SCALAR;
4172 lhs.var = anything_id;
4173 lhs.offset = 0;
4174 rhs.type = ADDRESSOF;
4175 rhs.var = anything_id;
4176 rhs.offset = 0;
4177 var_anything->address_taken = true;
4178
4179 /* This specifically does not use process_constraint because
4180 process_constraint ignores all anything = anything constraints, since all
4181 but this one are redundant. */
4182 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
4183
4184 /* Create the READONLY variable, used to represent that a variable
4185 points to readonly memory. */
4186 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
4187 var_readonly = new_var_info (readonly_tree, 2, "READONLY", 2);
4188 var_readonly->is_artificial_var = 1;
4189 var_readonly->offset = 0;
4190 var_readonly->size = ~0;
4191 var_readonly->fullsize = ~0;
4192 var_readonly->next = NULL;
4193 var_readonly->is_special_var = 1;
4194 insert_id_for_tree (readonly_tree, 2);
4195 readonly_id = 2;
4196 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
4197
4198 /* readonly memory points to anything, in order to make deref
4199 easier. In reality, it points to anything the particular
4200 readonly variable can point to, but we don't track this
4201 separately. */
4202 lhs.type = SCALAR;
4203 lhs.var = readonly_id;
4204 lhs.offset = 0;
4205 rhs.type = ADDRESSOF;
4206 rhs.var = anything_id;
4207 rhs.offset = 0;
4208
4209 process_constraint (new_constraint (lhs, rhs));
4210
4211 /* Create the INTEGER variable, used to represent that a variable points
4212 to an INTEGER. */
4213 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
4214 var_integer = new_var_info (integer_tree, 3, "INTEGER", 3);
4215 insert_id_for_tree (integer_tree, 3);
4216 var_integer->is_artificial_var = 1;
4217 var_integer->size = ~0;
4218 var_integer->fullsize = ~0;
4219 var_integer->offset = 0;
4220 var_integer->next = NULL;
4221 var_integer->is_special_var = 1;
4222 integer_id = 3;
4223 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
4224
4225 /* *INTEGER = ANYTHING, because we don't know where a dereference of a random
4226 integer will point to. */
4227 lhs.type = SCALAR;
4228 lhs.var = integer_id;
4229 lhs.offset = 0;
4230 rhs.type = ADDRESSOF;
4231 rhs.var = anything_id;
4232 rhs.offset = 0;
4233 process_constraint (new_constraint (lhs, rhs));
4234 }
4235
4236 /* Return true if we actually need to solve the constraint graph in order to
4237 get our points-to sets. This is false when, for example, no addresses are
4238 taken other than special vars, or all points-to sets with members already
4239 contain the anything variable and there are no predecessors for other
4240 sets. */
4241
4242 static bool
4243 need_to_solve (void)
4244 {
4245 int i;
4246 varinfo_t v;
4247 bool found_address_taken = false;
4248 bool found_non_anything = false;
4249
4250 for (i = 0; VEC_iterate (varinfo_t, varmap, i, v); i++)
4251 {
4252 if (v->is_special_var)
4253 continue;
4254
4255 if (v->address_taken)
4256 found_address_taken = true;
4257
4258 if (v->solution
4259 && !bitmap_empty_p (v->solution)
4260 && !bitmap_bit_p (v->solution, anything_id))
4261 found_non_anything = true;
4262 else if (bitmap_empty_p (v->solution)
4263 && (VEC_length (constraint_edge_t, graph->preds[v->id]) != 0
4264 || (graph->zero_weight_preds[v->id] && !bitmap_empty_p (graph->zero_weight_preds[v->id]))))
4265 found_non_anything = true;
4266
4267 if (found_address_taken && found_non_anything)
4268 return true;
4269 }
4270
4271 return false;
4272 }
4273
4274 /* Initialize things necessary to perform PTA */
4275
4276 static void
4277 init_alias_vars (void)
4278 {
4279 bitmap_obstack_initialize (&ptabitmap_obstack);
4280 bitmap_obstack_initialize (&predbitmap_obstack);
4281
4282 constraint_pool = create_alloc_pool ("Constraint pool",
4283 sizeof (struct constraint), 30);
4284 variable_info_pool = create_alloc_pool ("Variable info pool",
4285 sizeof (struct variable_info), 30);
4286 constraint_edge_pool = create_alloc_pool ("Constraint edges",
4287 sizeof (struct constraint_edge), 30);
4288
4289 constraints = VEC_alloc (constraint_t, heap, 8);
4290 varmap = VEC_alloc (varinfo_t, heap, 8);
4291 id_for_tree = htab_create (10, tree_id_hash, tree_id_eq, free);
4292 memset (&stats, 0, sizeof (stats));
4293
4294 init_base_vars ();
4295 }
4296
4297
4298 /* Create points-to sets for the current function. See the comments
4299 at the start of the file for an algorithmic overview. */
4300
4301 void
4302 compute_points_to_sets (struct alias_info *ai)
4303 {
4304 basic_block bb;
4305
4306 timevar_push (TV_TREE_PTA);
4307
4308 init_alias_vars ();
4309
4310 intra_create_variable_infos ();
4311
4312 /* Now walk all statements and derive aliases. */
4313 FOR_EACH_BB (bb)
4314 {
4315 block_stmt_iterator bsi;
4316 tree phi;
4317
4318 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
4319 {
4320 if (is_gimple_reg (PHI_RESULT (phi)))
4321 {
4322 find_func_aliases (phi);
4323 /* Update various related attributes like escaped
4324 addresses, pointer dereferences for loads and stores.
4325 This is used when creating name tags and alias
4326 sets. */
4327 update_alias_info (phi, ai);
4328 }
4329 }
4330
4331 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4332 {
4333 tree stmt = bsi_stmt (bsi);
4334 find_func_aliases (stmt);
4335 /* Update various related attributes like escaped
4336 addresses, pointer dereferences for loads and stores.
4337 This is used when creating name tags and alias
4338 sets. */
4339 update_alias_info (stmt, ai);
4340 }
4341 }
4342
4343 build_constraint_graph ();
4344
4345 if (dump_file)
4346 {
4347 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
4348 dump_constraints (dump_file);
4349 }
4350
4351 if (need_to_solve ())
4352 {
4353 if (dump_file)
4354 fprintf (dump_file,
4355 "\nCollapsing static cycles and doing variable "
4356 "substitution:\n");
4357
4358 find_and_collapse_graph_cycles (graph, false);
4359 perform_var_substitution (graph);
4360
4361 if (dump_file)
4362 fprintf (dump_file, "\nSolving graph:\n");
4363
4364 solve_graph (graph);
4365 }
4366
4367 if (dump_file)
4368 dump_sa_points_to_info (dump_file);
4369
4370 have_alias_info = true;
4371
4372 timevar_pop (TV_TREE_PTA);
4373 }
4374
4375
4376 /* Delete created points-to sets. */
4377
4378 void
4379 delete_points_to_sets (void)
4380 {
4381 varinfo_t v;
4382 int i;
4383
4384 htab_delete (id_for_tree);
4385 bitmap_obstack_release (&ptabitmap_obstack);
4386 bitmap_obstack_release (&predbitmap_obstack);
4387 VEC_free (constraint_t, heap, constraints);
4388
4389 for (i = 0; VEC_iterate (varinfo_t, varmap, i, v); i++)
4390 {
4391 VEC_free (constraint_edge_t, heap, graph->succs[i]);
4392 VEC_free (constraint_edge_t, heap, graph->preds[i]);
4393 VEC_free (constraint_t, heap, v->complex);
4394 }
4395 free (graph->zero_weight_preds);
4396 free (graph->zero_weight_succs);
4397 free (graph->succs);
4398 free (graph->preds);
4399 free (graph);
4400
4401 VEC_free (varinfo_t, heap, varmap);
4402 free_alloc_pool (variable_info_pool);
4403 free_alloc_pool (constraint_pool);
4404 free_alloc_pool (constraint_edge_pool);
4405
4406 have_alias_info = false;
4407 }
4408
4409 /* Return true if we should execute IPA PTA. */
4410 static bool
4411 gate_ipa_pta (void)
4412 {
4413 return (flag_unit_at_a_time != 0
4414 /* Don't bother doing anything if the program has errors. */
4415 && !(errorcount || sorrycount));
4416 }
4417
4418 /* Execute the driver for IPA PTA. */
4419 static void
4420 ipa_pta_execute (void)
4421 {
4422 struct cgraph_node *node;
4423 in_ipa_mode = 1;
4424
4425 init_alias_vars ();
4426
4427 for (node = cgraph_nodes; node; node = node->next)
4428 {
4429 if (!node->analyzed || cgraph_is_master_clone (node))
4430 {
4431 unsigned int varid;
4432
4433 varid = create_function_info_for (node->decl,
4434 cgraph_node_name (node));
4435 if (node->local.externally_visible)
4436 {
4437 varinfo_t fi = get_varinfo (varid);
4438 for (; fi; fi = fi->next)
4439 make_constraint_to_anything (fi);
4440 }
4441 }
4442 }
4443 for (node = cgraph_nodes; node; node = node->next)
4444 {
4445 if (node->analyzed && cgraph_is_master_clone (node))
4446 {
4447 struct function *cfun = DECL_STRUCT_FUNCTION (node->decl);
4448 basic_block bb;
4449 tree old_func_decl = current_function_decl;
4450 if (dump_file)
4451 fprintf (dump_file,
4452 "Generating constraints for %s\n",
4453 cgraph_node_name (node));
4454 push_cfun (cfun);
4455 current_function_decl = node->decl;
4456
4457 FOR_EACH_BB_FN (bb, cfun)
4458 {
4459 block_stmt_iterator bsi;
4460 tree phi;
4461
4462 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
4463 {
4464 if (is_gimple_reg (PHI_RESULT (phi)))
4465 {
4466 find_func_aliases (phi);
4467 }
4468 }
4469
4470 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4471 {
4472 tree stmt = bsi_stmt (bsi);
4473 find_func_aliases (stmt);
4474 }
4475 }
4476 current_function_decl = old_func_decl;
4477 pop_cfun ();
4478 }
4479 else
4480 {
4481 /* Make point to anything. */
4482 }
4483 }
4484
4485 build_constraint_graph ();
4486
4487 if (dump_file)
4488 {
4489 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
4490 dump_constraints (dump_file);
4491 }
4492
4493 if (need_to_solve ())
4494 {
4495 if (dump_file)
4496 fprintf (dump_file,
4497 "\nCollapsing static cycles and doing variable "
4498 "substitution:\n");
4499
4500 find_and_collapse_graph_cycles (graph, false);
4501 perform_var_substitution (graph);
4502
4503 if (dump_file)
4504 fprintf (dump_file, "\nSolving graph:\n");
4505
4506 solve_graph (graph);
4507 }
4508
4509 if (dump_file)
4510 dump_sa_points_to_info (dump_file);
4511 in_ipa_mode = 0;
4512 }
4513
4514 struct tree_opt_pass pass_ipa_pta =
4515 {
4516 "pta", /* name */
4517 gate_ipa_pta, /* gate */
4518 ipa_pta_execute, /* execute */
4519 NULL, /* sub */
4520 NULL, /* next */
4521 0, /* static_pass_number */
4522 TV_IPA_PTA, /* tv_id */
4523 0, /* properties_required */
4524 0, /* properties_provided */
4525 0, /* properties_destroyed */
4526 0, /* todo_flags_start */
4527 0, /* todo_flags_finish */
4528 0 /* letter */
4529 };
4530
4531 /* Initialize the heapvar for statement mapping. */
4532 void
4533 init_alias_heapvars (void)
4534 {
4535 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq, NULL);
4536 }
4537
4538 void
4539 delete_alias_heapvars (void)
4540 {
4541 htab_delete (heapvar_for_stmt);
4542 }
4543
4544
4545 #include "gt-tree-ssa-structalias.h"