Daily bump.
[gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "obstack.h"
28 #include "bitmap.h"
29 #include "flags.h"
30 #include "rtl.h"
31 #include "tm_p.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "output.h"
35 #include "errors.h"
36 #include "diagnostic.h"
37 #include "tree.h"
38 #include "c-common.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
41 #include "varray.h"
42 #include "c-tree.h"
43 #include "tree-gimple.h"
44 #include "hashtab.h"
45 #include "function.h"
46 #include "cgraph.h"
47 #include "tree-pass.h"
48 #include "timevar.h"
49 #include "alloc-pool.h"
50 #include "splay-tree.h"
51 #include "params.h"
52 #include "tree-ssa-structalias.h"
53 #include "cgraph.h"
54
55 /* The idea behind this analyzer is to generate set constraints from the
56 program, then solve the resulting constraints in order to generate the
57 points-to sets.
58
59 Set constraints are a way of modeling program analysis problems that
60 involve sets. They consist of an inclusion constraint language,
61 describing the variables (each variable is a set) and operations that
62 are involved on the variables, and a set of rules that derive facts
63 from these operations. To solve a system of set constraints, you derive
64 all possible facts under the rules, which gives you the correct sets
65 as a consequence.
66
67 See "Efficient Field-sensitive pointer analysis for C" by "David
68 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
69 http://citeseer.ist.psu.edu/pearce04efficient.html
70
71 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
72 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
73 http://citeseer.ist.psu.edu/heintze01ultrafast.html
74
75 There are three types of constraint expressions, DEREF, ADDRESSOF, and
76 SCALAR. Each constraint expression consists of a constraint type,
77 a variable, and an offset.
78
79 SCALAR is a constraint expression type used to represent x, whether
80 it appears on the LHS or the RHS of a statement.
81 DEREF is a constraint expression type used to represent *x, whether
82 it appears on the LHS or the RHS of a statement.
83 ADDRESSOF is a constraint expression used to represent &x, whether
84 it appears on the LHS or the RHS of a statement.
85
86 Each pointer variable in the program is assigned an integer id, and
87 each field of a structure variable is assigned an integer id as well.
88
89 Structure variables are linked to their list of fields through a "next
90 field" in each variable that points to the next field in offset
91 order.
92 Each variable for a structure field has
93
94 1. "size", that tells the size in bits of that field.
95 2. "fullsize, that tells the size in bits of the entire structure.
96 3. "offset", that tells the offset in bits from the beginning of the
97 structure to this field.
98
99 Thus,
100 struct f
101 {
102 int a;
103 int b;
104 } foo;
105 int *bar;
106
107 looks like
108
109 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
110 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
111 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
112
113
114 In order to solve the system of set constraints, the following is
115 done:
116
117 1. Each constraint variable x has a solution set associated with it,
118 Sol(x).
119
120 2. Constraints are separated into direct, copy, and complex.
121 Direct constraints are ADDRESSOF constraints that require no extra
122 processing, such as P = &Q
123 Copy constraints are those of the form P = Q.
124 Complex constraints are all the constraints involving dereferences.
125
126 3. All direct constraints of the form P = &Q are processed, such
127 that Q is added to Sol(P)
128
129 4. All complex constraints for a given constraint variable are stored in a
130 linked list attached to that variable's node.
131
132 5. A directed graph is built out of the copy constraints. Each
133 constraint variable is a node in the graph, and an edge from
134 Q to P is added for each copy constraint of the form P = Q
135
136 6. The graph is then walked, and solution sets are
137 propagated along the copy edges, such that an edge from Q to P
138 causes Sol(P) <- Sol(P) union Sol(Q).
139
140 7. As we visit each node, all complex constraints associated with
141 that node are processed by adding appropriate copy edges to the graph, or the
142 appropriate variables to the solution set.
143
144 8. The process of walking the graph is iterated until no solution
145 sets change.
146
147 Prior to walking the graph in steps 6 and 7, We perform static
148 cycle elimination on the constraint graph, as well
149 as off-line variable substitution.
150
151 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
152 on and turned into anything), but isn't. You can just see what offset
153 inside the pointed-to struct it's going to access.
154
155 TODO: Constant bounded arrays can be handled as if they were structs of the
156 same number of elements.
157
158 TODO: Modeling heap and incoming pointers becomes much better if we
159 add fields to them as we discover them, which we could do.
160
161 TODO: We could handle unions, but to be honest, it's probably not
162 worth the pain or slowdown. */
163
164 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
165 htab_t heapvar_for_stmt;
166 static bool use_field_sensitive = true;
167 static int in_ipa_mode = 0;
168 static bitmap_obstack predbitmap_obstack;
169 static bitmap_obstack ptabitmap_obstack;
170 static bitmap_obstack iteration_obstack;
171
172 static unsigned int create_variable_info_for (tree, const char *);
173 static void build_constraint_graph (void);
174
175 DEF_VEC_P(constraint_t);
176 DEF_VEC_ALLOC_P(constraint_t,heap);
177
178 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
179 if (a) \
180 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
181
182 static struct constraint_stats
183 {
184 unsigned int total_vars;
185 unsigned int collapsed_vars;
186 unsigned int unified_vars_static;
187 unsigned int unified_vars_dynamic;
188 unsigned int iterations;
189 unsigned int num_edges;
190 } stats;
191
192 struct variable_info
193 {
194 /* ID of this variable */
195 unsigned int id;
196
197 /* Name of this variable */
198 const char *name;
199
200 /* Tree that this variable is associated with. */
201 tree decl;
202
203 /* Offset of this variable, in bits, from the base variable */
204 unsigned HOST_WIDE_INT offset;
205
206 /* Size of the variable, in bits. */
207 unsigned HOST_WIDE_INT size;
208
209 /* Full size of the base variable, in bits. */
210 unsigned HOST_WIDE_INT fullsize;
211
212 /* A link to the variable for the next field in this structure. */
213 struct variable_info *next;
214
215 /* Node in the graph that represents the constraints and points-to
216 solution for the variable. */
217 unsigned int node;
218
219 /* True if the address of this variable is taken. Needed for
220 variable substitution. */
221 unsigned int address_taken:1;
222
223 /* True if this variable is the target of a dereference. Needed for
224 variable substitution. */
225 unsigned int indirect_target:1;
226
227 /* True if this is a variable created by the constraint analysis, such as
228 heap variables and constraints we had to break up. */
229 unsigned int is_artificial_var:1;
230
231 /* True if this is a special variable whose solution set should not be
232 changed. */
233 unsigned int is_special_var:1;
234
235 /* True for variables whose size is not known or variable. */
236 unsigned int is_unknown_size_var:1;
237
238 /* True for variables that have unions somewhere in them. */
239 unsigned int has_union:1;
240
241 /* True if this is a heap variable. */
242 unsigned int is_heap_var:1;
243
244 /* Points-to set for this variable. */
245 bitmap solution;
246
247 /* Variable ids represented by this node. */
248 bitmap variables;
249
250 /* Vector of complex constraints for this node. Complex
251 constraints are those involving dereferences. */
252 VEC(constraint_t,heap) *complex;
253
254 /* Variable id this was collapsed to due to type unsafety.
255 This should be unused completely after build_constraint_graph, or
256 something is broken. */
257 struct variable_info *collapsed_to;
258 };
259 typedef struct variable_info *varinfo_t;
260
261 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
262
263 /* Pool of variable info structures. */
264 static alloc_pool variable_info_pool;
265
266 DEF_VEC_P(varinfo_t);
267
268 DEF_VEC_ALLOC_P(varinfo_t, heap);
269
270 /* Table of variable info structures for constraint variables. Indexed directly
271 by variable info id. */
272 static VEC(varinfo_t,heap) *varmap;
273
274 /* Return the varmap element N */
275
276 static inline varinfo_t
277 get_varinfo (unsigned int n)
278 {
279 return VEC_index(varinfo_t, varmap, n);
280 }
281
282 /* Return the varmap element N, following the collapsed_to link. */
283
284 static inline varinfo_t
285 get_varinfo_fc (unsigned int n)
286 {
287 varinfo_t v = VEC_index(varinfo_t, varmap, n);
288
289 if (v->collapsed_to)
290 return v->collapsed_to;
291 return v;
292 }
293
294 /* Variable that represents the unknown pointer. */
295 static varinfo_t var_anything;
296 static tree anything_tree;
297 static unsigned int anything_id;
298
299 /* Variable that represents the NULL pointer. */
300 static varinfo_t var_nothing;
301 static tree nothing_tree;
302 static unsigned int nothing_id;
303
304 /* Variable that represents read only memory. */
305 static varinfo_t var_readonly;
306 static tree readonly_tree;
307 static unsigned int readonly_id;
308
309 /* Variable that represents integers. This is used for when people do things
310 like &0->a.b. */
311 static varinfo_t var_integer;
312 static tree integer_tree;
313 static unsigned int integer_id;
314
315
316 /* Lookup a heap var for FROM, and return it if we find one. */
317
318 static tree
319 heapvar_lookup (tree from)
320 {
321 struct tree_map *h, in;
322 in.from = from;
323
324 h = htab_find_with_hash (heapvar_for_stmt, &in, htab_hash_pointer (from));
325 if (h)
326 return h->to;
327 return NULL_TREE;
328 }
329
330 /* Insert a mapping FROM->TO in the heap var for statement
331 hashtable. */
332
333 static void
334 heapvar_insert (tree from, tree to)
335 {
336 struct tree_map *h;
337 void **loc;
338
339 h = ggc_alloc (sizeof (struct tree_map));
340 h->hash = htab_hash_pointer (from);
341 h->from = from;
342 h->to = to;
343 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
344 *(struct tree_map **) loc = h;
345 }
346
347 /* Return a new variable info structure consisting for a variable
348 named NAME, and using constraint graph node NODE. */
349
350 static varinfo_t
351 new_var_info (tree t, unsigned int id, const char *name, unsigned int node)
352 {
353 varinfo_t ret = pool_alloc (variable_info_pool);
354
355 ret->id = id;
356 ret->name = name;
357 ret->decl = t;
358 ret->node = node;
359 ret->address_taken = false;
360 ret->indirect_target = false;
361 ret->is_artificial_var = false;
362 ret->is_heap_var = false;
363 ret->is_special_var = false;
364 ret->is_unknown_size_var = false;
365 ret->has_union = false;
366 ret->solution = BITMAP_ALLOC (&ptabitmap_obstack);
367 ret->variables = BITMAP_ALLOC (&ptabitmap_obstack);
368 ret->complex = NULL;
369 ret->next = NULL;
370 ret->collapsed_to = NULL;
371 return ret;
372 }
373
374 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
375
376 /* An expression that appears in a constraint. */
377
378 struct constraint_expr
379 {
380 /* Constraint type. */
381 constraint_expr_type type;
382
383 /* Variable we are referring to in the constraint. */
384 unsigned int var;
385
386 /* Offset, in bits, of this constraint from the beginning of
387 variables it ends up referring to.
388
389 IOW, in a deref constraint, we would deref, get the result set,
390 then add OFFSET to each member. */
391 unsigned HOST_WIDE_INT offset;
392 };
393
394 typedef struct constraint_expr ce_s;
395 DEF_VEC_O(ce_s);
396 DEF_VEC_ALLOC_O(ce_s, heap);
397 static void get_constraint_for (tree, VEC(ce_s, heap) **);
398 static void do_deref (VEC (ce_s, heap) **);
399
400 /* Our set constraints are made up of two constraint expressions, one
401 LHS, and one RHS.
402
403 As described in the introduction, our set constraints each represent an
404 operation between set valued variables.
405 */
406 struct constraint
407 {
408 struct constraint_expr lhs;
409 struct constraint_expr rhs;
410 };
411
412 /* List of constraints that we use to build the constraint graph from. */
413
414 static VEC(constraint_t,heap) *constraints;
415 static alloc_pool constraint_pool;
416
417 /* An edge in the weighted constraint graph. The edges are weighted,
418 with a bit set in weights meaning their is an edge with that
419 weight.
420 We don't keep the src in the edge, because we always know what it
421 is. */
422
423 struct constraint_edge
424 {
425 unsigned int dest;
426 bitmap weights;
427 };
428
429 typedef struct constraint_edge *constraint_edge_t;
430 static alloc_pool constraint_edge_pool;
431
432 /* Return a new constraint edge from SRC to DEST. */
433
434 static constraint_edge_t
435 new_constraint_edge (unsigned int dest)
436 {
437 constraint_edge_t ret = pool_alloc (constraint_edge_pool);
438 ret->dest = dest;
439 ret->weights = NULL;
440 return ret;
441 }
442
443 DEF_VEC_P(constraint_edge_t);
444 DEF_VEC_ALLOC_P(constraint_edge_t,heap);
445
446
447 /* The constraint graph is represented internally in two different
448 ways. The overwhelming majority of edges in the constraint graph
449 are zero weigh edges, and thus, using a vector of contrainst_edge_t
450 is a waste of time and memory, since they have no weights. We
451 simply use a bitmap to store the preds and succs for each node.
452 The weighted edges are stored as a set of adjacency vectors, one
453 per variable. succs[x] is the vector of successors for variable x,
454 and preds[x] is the vector of predecessors for variable x. IOW,
455 all edges are "forward" edges, which is not like our CFG. So
456 remember that preds[x]->src == x, and succs[x]->src == x. */
457
458 struct constraint_graph
459 {
460 bitmap *zero_weight_succs;
461 bitmap *zero_weight_preds;
462 VEC(constraint_edge_t,heap) **succs;
463 VEC(constraint_edge_t,heap) **preds;
464 };
465
466 typedef struct constraint_graph *constraint_graph_t;
467
468 static constraint_graph_t graph;
469
470 /* Create a new constraint consisting of LHS and RHS expressions. */
471
472 static constraint_t
473 new_constraint (const struct constraint_expr lhs,
474 const struct constraint_expr rhs)
475 {
476 constraint_t ret = pool_alloc (constraint_pool);
477 ret->lhs = lhs;
478 ret->rhs = rhs;
479 return ret;
480 }
481
482 /* Print out constraint C to FILE. */
483
484 void
485 dump_constraint (FILE *file, constraint_t c)
486 {
487 if (c->lhs.type == ADDRESSOF)
488 fprintf (file, "&");
489 else if (c->lhs.type == DEREF)
490 fprintf (file, "*");
491 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
492 if (c->lhs.offset != 0)
493 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
494 fprintf (file, " = ");
495 if (c->rhs.type == ADDRESSOF)
496 fprintf (file, "&");
497 else if (c->rhs.type == DEREF)
498 fprintf (file, "*");
499 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
500 if (c->rhs.offset != 0)
501 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
502 fprintf (file, "\n");
503 }
504
505 /* Print out constraint C to stderr. */
506
507 void
508 debug_constraint (constraint_t c)
509 {
510 dump_constraint (stderr, c);
511 }
512
513 /* Print out all constraints to FILE */
514
515 void
516 dump_constraints (FILE *file)
517 {
518 int i;
519 constraint_t c;
520 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
521 dump_constraint (file, c);
522 }
523
524 /* Print out all constraints to stderr. */
525
526 void
527 debug_constraints (void)
528 {
529 dump_constraints (stderr);
530 }
531
532 /* SOLVER FUNCTIONS
533
534 The solver is a simple worklist solver, that works on the following
535 algorithm:
536
537 sbitmap changed_nodes = all ones;
538 changed_count = number of nodes;
539 For each node that was already collapsed:
540 changed_count--;
541
542 while (changed_count > 0)
543 {
544 compute topological ordering for constraint graph
545
546 find and collapse cycles in the constraint graph (updating
547 changed if necessary)
548
549 for each node (n) in the graph in topological order:
550 changed_count--;
551
552 Process each complex constraint associated with the node,
553 updating changed if necessary.
554
555 For each outgoing edge from n, propagate the solution from n to
556 the destination of the edge, updating changed as necessary.
557
558 } */
559
560 /* Return true if two constraint expressions A and B are equal. */
561
562 static bool
563 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
564 {
565 return a.type == b.type && a.var == b.var && a.offset == b.offset;
566 }
567
568 /* Return true if constraint expression A is less than constraint expression
569 B. This is just arbitrary, but consistent, in order to give them an
570 ordering. */
571
572 static bool
573 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
574 {
575 if (a.type == b.type)
576 {
577 if (a.var == b.var)
578 return a.offset < b.offset;
579 else
580 return a.var < b.var;
581 }
582 else
583 return a.type < b.type;
584 }
585
586 /* Return true if constraint A is less than constraint B. This is just
587 arbitrary, but consistent, in order to give them an ordering. */
588
589 static bool
590 constraint_less (const constraint_t a, const constraint_t b)
591 {
592 if (constraint_expr_less (a->lhs, b->lhs))
593 return true;
594 else if (constraint_expr_less (b->lhs, a->lhs))
595 return false;
596 else
597 return constraint_expr_less (a->rhs, b->rhs);
598 }
599
600 /* Return true if two constraints A and B are equal. */
601
602 static bool
603 constraint_equal (struct constraint a, struct constraint b)
604 {
605 return constraint_expr_equal (a.lhs, b.lhs)
606 && constraint_expr_equal (a.rhs, b.rhs);
607 }
608
609
610 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
611
612 static constraint_t
613 constraint_vec_find (VEC(constraint_t,heap) *vec,
614 struct constraint lookfor)
615 {
616 unsigned int place;
617 constraint_t found;
618
619 if (vec == NULL)
620 return NULL;
621
622 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
623 if (place >= VEC_length (constraint_t, vec))
624 return NULL;
625 found = VEC_index (constraint_t, vec, place);
626 if (!constraint_equal (*found, lookfor))
627 return NULL;
628 return found;
629 }
630
631 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
632
633 static void
634 constraint_set_union (VEC(constraint_t,heap) **to,
635 VEC(constraint_t,heap) **from)
636 {
637 int i;
638 constraint_t c;
639
640 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
641 {
642 if (constraint_vec_find (*to, *c) == NULL)
643 {
644 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
645 constraint_less);
646 VEC_safe_insert (constraint_t, heap, *to, place, c);
647 }
648 }
649 }
650
651 /* Take a solution set SET, add OFFSET to each member of the set, and
652 overwrite SET with the result when done. */
653
654 static void
655 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
656 {
657 bitmap result = BITMAP_ALLOC (&iteration_obstack);
658 unsigned int i;
659 bitmap_iterator bi;
660
661 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
662 {
663 /* If this is a properly sized variable, only add offset if it's
664 less than end. Otherwise, it is globbed to a single
665 variable. */
666
667 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
668 {
669 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
670 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
671 if (!v)
672 continue;
673 bitmap_set_bit (result, v->id);
674 }
675 else if (get_varinfo (i)->is_artificial_var
676 || get_varinfo (i)->has_union
677 || get_varinfo (i)->is_unknown_size_var)
678 {
679 bitmap_set_bit (result, i);
680 }
681 }
682
683 bitmap_copy (set, result);
684 BITMAP_FREE (result);
685 }
686
687 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
688 process. */
689
690 static bool
691 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
692 {
693 if (inc == 0)
694 return bitmap_ior_into (to, from);
695 else
696 {
697 bitmap tmp;
698 bool res;
699
700 tmp = BITMAP_ALLOC (&iteration_obstack);
701 bitmap_copy (tmp, from);
702 solution_set_add (tmp, inc);
703 res = bitmap_ior_into (to, tmp);
704 BITMAP_FREE (tmp);
705 return res;
706 }
707 }
708
709 /* Insert constraint C into the list of complex constraints for VAR. */
710
711 static void
712 insert_into_complex (unsigned int var, constraint_t c)
713 {
714 varinfo_t vi = get_varinfo (var);
715 unsigned int place = VEC_lower_bound (constraint_t, vi->complex, c,
716 constraint_less);
717 VEC_safe_insert (constraint_t, heap, vi->complex, place, c);
718 }
719
720
721 /* Compare two constraint edges A and B, return true if they are equal. */
722
723 static bool
724 constraint_edge_equal (struct constraint_edge a, struct constraint_edge b)
725 {
726 return a.dest == b.dest;
727 }
728
729 /* Compare two constraint edges, return true if A is less than B */
730
731 static bool
732 constraint_edge_less (const constraint_edge_t a, const constraint_edge_t b)
733 {
734 if (a->dest < b->dest)
735 return true;
736 return false;
737 }
738
739 /* Find the constraint edge that matches LOOKFOR, in VEC.
740 Return the edge, if found, NULL otherwise. */
741
742 static constraint_edge_t
743 constraint_edge_vec_find (VEC(constraint_edge_t,heap) *vec,
744 struct constraint_edge lookfor)
745 {
746 unsigned int place;
747 constraint_edge_t edge = NULL;
748
749 place = VEC_lower_bound (constraint_edge_t, vec, &lookfor,
750 constraint_edge_less);
751 if (place >= VEC_length (constraint_edge_t, vec))
752 return NULL;
753 edge = VEC_index (constraint_edge_t, vec, place);
754 if (!constraint_edge_equal (*edge, lookfor))
755 return NULL;
756 return edge;
757 }
758
759 /* Condense two variable nodes into a single variable node, by moving
760 all associated info from SRC to TO. */
761
762 static void
763 condense_varmap_nodes (unsigned int to, unsigned int src)
764 {
765 varinfo_t tovi = get_varinfo (to);
766 varinfo_t srcvi = get_varinfo (src);
767 unsigned int i;
768 constraint_t c;
769 bitmap_iterator bi;
770
771 /* the src node, and all its variables, are now the to node. */
772 srcvi->node = to;
773 EXECUTE_IF_SET_IN_BITMAP (srcvi->variables, 0, i, bi)
774 get_varinfo (i)->node = to;
775
776 /* Merge the src node variables and the to node variables. */
777 bitmap_set_bit (tovi->variables, src);
778 bitmap_ior_into (tovi->variables, srcvi->variables);
779 bitmap_clear (srcvi->variables);
780
781 /* Move all complex constraints from src node into to node */
782 for (i = 0; VEC_iterate (constraint_t, srcvi->complex, i, c); i++)
783 {
784 /* In complex constraints for node src, we may have either
785 a = *src, and *src = a. */
786
787 if (c->rhs.type == DEREF)
788 c->rhs.var = to;
789 else
790 c->lhs.var = to;
791 }
792 constraint_set_union (&tovi->complex, &srcvi->complex);
793 VEC_free (constraint_t, heap, srcvi->complex);
794 srcvi->complex = NULL;
795 }
796
797 /* Erase an edge from SRC to SRC from GRAPH. This routine only
798 handles self-edges (e.g. an edge from a to a). */
799
800 static void
801 erase_graph_self_edge (constraint_graph_t graph, unsigned int src)
802 {
803 VEC(constraint_edge_t,heap) *predvec = graph->preds[src];
804 VEC(constraint_edge_t,heap) *succvec = graph->succs[src];
805 struct constraint_edge edge;
806 unsigned int place;
807
808 edge.dest = src;
809
810 /* Remove from the successors. */
811 place = VEC_lower_bound (constraint_edge_t, succvec, &edge,
812 constraint_edge_less);
813
814 /* Make sure we found the edge. */
815 #ifdef ENABLE_CHECKING
816 {
817 constraint_edge_t tmp = VEC_index (constraint_edge_t, succvec, place);
818 gcc_assert (constraint_edge_equal (*tmp, edge));
819 }
820 #endif
821 VEC_ordered_remove (constraint_edge_t, succvec, place);
822
823 /* Remove from the predecessors. */
824 place = VEC_lower_bound (constraint_edge_t, predvec, &edge,
825 constraint_edge_less);
826
827 /* Make sure we found the edge. */
828 #ifdef ENABLE_CHECKING
829 {
830 constraint_edge_t tmp = VEC_index (constraint_edge_t, predvec, place);
831 gcc_assert (constraint_edge_equal (*tmp, edge));
832 }
833 #endif
834 VEC_ordered_remove (constraint_edge_t, predvec, place);
835 }
836
837 /* Remove edges involving NODE from GRAPH. */
838
839 static void
840 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
841 {
842 VEC(constraint_edge_t,heap) *succvec = graph->succs[node];
843 VEC(constraint_edge_t,heap) *predvec = graph->preds[node];
844 bitmap_iterator bi;
845 unsigned int j;
846 constraint_edge_t c = NULL;
847 int i;
848
849 /* Walk the successors, erase the associated preds. */
850
851 EXECUTE_IF_IN_NONNULL_BITMAP (graph->zero_weight_succs[node], 0, j, bi)
852 if (j != node)
853 bitmap_clear_bit (graph->zero_weight_preds[j], node);
854
855 for (i = 0; VEC_iterate (constraint_edge_t, succvec, i, c); i++)
856 if (c->dest != node)
857 {
858 unsigned int place;
859 struct constraint_edge lookfor;
860 constraint_edge_t result;
861
862 lookfor.dest = node;
863 place = VEC_lower_bound (constraint_edge_t, graph->preds[c->dest],
864 &lookfor, constraint_edge_less);
865 result = VEC_ordered_remove (constraint_edge_t,
866 graph->preds[c->dest], place);
867 pool_free (constraint_edge_pool, result);
868 }
869
870 /* Walk the preds, erase the associated succs. */
871
872 EXECUTE_IF_IN_NONNULL_BITMAP (graph->zero_weight_preds[node], 0, j, bi)
873 if (j != node)
874 bitmap_clear_bit (graph->zero_weight_succs[j], node);
875
876 for (i =0; VEC_iterate (constraint_edge_t, predvec, i, c); i++)
877 if (c->dest != node)
878 {
879 unsigned int place;
880 struct constraint_edge lookfor;
881 constraint_edge_t result;
882
883 lookfor.dest = node;
884 place = VEC_lower_bound (constraint_edge_t, graph->succs[c->dest],
885 &lookfor, constraint_edge_less);
886 result = VEC_ordered_remove (constraint_edge_t,
887 graph->succs[c->dest], place);
888 pool_free (constraint_edge_pool, result);
889
890 }
891
892 if (graph->zero_weight_preds[node])
893 {
894 BITMAP_FREE (graph->zero_weight_preds[node]);
895 graph->zero_weight_preds[node] = NULL;
896 }
897
898 if (graph->zero_weight_succs[node])
899 {
900 BITMAP_FREE (graph->zero_weight_succs[node]);
901 graph->zero_weight_succs[node] = NULL;
902 }
903
904 VEC_free (constraint_edge_t, heap, graph->preds[node]);
905 VEC_free (constraint_edge_t, heap, graph->succs[node]);
906 graph->preds[node] = NULL;
907 graph->succs[node] = NULL;
908 }
909
910 static bool edge_added = false;
911
912 /* Add edge (src, dest) to the graph. */
913
914 static bool
915 add_graph_edge (constraint_graph_t graph, unsigned int src, unsigned int dest)
916 {
917 unsigned int place;
918 VEC(constraint_edge_t,heap) *vec;
919 struct constraint_edge newe;
920 newe.dest = dest;
921
922 vec = graph->preds[src];
923 place = VEC_lower_bound (constraint_edge_t, vec, &newe,
924 constraint_edge_less);
925 if (place == VEC_length (constraint_edge_t, vec)
926 || VEC_index (constraint_edge_t, vec, place)->dest != dest)
927 {
928 constraint_edge_t edge = new_constraint_edge (dest);
929
930 VEC_safe_insert (constraint_edge_t, heap, graph->preds[src],
931 place, edge);
932 edge = new_constraint_edge (src);
933
934 place = VEC_lower_bound (constraint_edge_t, graph->succs[dest],
935 edge, constraint_edge_less);
936 VEC_safe_insert (constraint_edge_t, heap, graph->succs[dest],
937 place, edge);
938 edge_added = true;
939 stats.num_edges++;
940 return true;
941 }
942 else
943 return false;
944 }
945
946
947 /* Return the bitmap representing the weights of edge (SRC, DEST). */
948
949 static bitmap *
950 get_graph_weights (constraint_graph_t graph, unsigned int src,
951 unsigned int dest)
952 {
953 constraint_edge_t edge;
954 VEC(constraint_edge_t,heap) *vec;
955 struct constraint_edge lookfor;
956
957 lookfor.dest = dest;
958
959 vec = graph->preds[src];
960 edge = constraint_edge_vec_find (vec, lookfor);
961 gcc_assert (edge != NULL);
962 return &edge->weights;
963 }
964
965 /* Allocate graph weight bitmap for the edges associated with SRC and
966 DEST in GRAPH. Both the pred and the succ edges share a single
967 bitmap, so we need to set both edges to that bitmap. */
968
969 static bitmap
970 allocate_graph_weights (constraint_graph_t graph, unsigned int src,
971 unsigned int dest)
972 {
973 bitmap result;
974 constraint_edge_t edge;
975 VEC(constraint_edge_t,heap) *vec;
976 struct constraint_edge lookfor;
977
978 result = BITMAP_ALLOC (&ptabitmap_obstack);
979
980 /* Set the pred weight. */
981 lookfor.dest = dest;
982 vec = graph->preds[src];
983 edge = constraint_edge_vec_find (vec, lookfor);
984 gcc_assert (edge != NULL);
985 edge->weights = result;
986
987 /* Set the succ weight. */
988 lookfor.dest = src;
989 vec = graph->succs[dest];
990 edge = constraint_edge_vec_find (vec, lookfor);
991 gcc_assert (edge != NULL);
992 edge->weights = result;
993
994 return result;
995 }
996
997
998 /* Merge GRAPH nodes FROM and TO into node TO. */
999
1000 static void
1001 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1002 unsigned int from)
1003 {
1004 VEC(constraint_edge_t,heap) *succvec = graph->succs[from];
1005 VEC(constraint_edge_t,heap) *predvec = graph->preds[from];
1006 int i;
1007 constraint_edge_t c;
1008 unsigned int j;
1009 bitmap_iterator bi;
1010
1011 /* Merge all the zero weighted predecessor edges. */
1012 if (graph->zero_weight_preds[from])
1013 {
1014 if (!graph->zero_weight_preds[to])
1015 graph->zero_weight_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1016
1017 EXECUTE_IF_SET_IN_BITMAP (graph->zero_weight_preds[from], 0, j, bi)
1018 {
1019 if (j != to)
1020 {
1021 bitmap_clear_bit (graph->zero_weight_succs[j], from);
1022 bitmap_set_bit (graph->zero_weight_succs[j], to);
1023 }
1024 }
1025 bitmap_ior_into (graph->zero_weight_preds[to],
1026 graph->zero_weight_preds[from]);
1027 }
1028
1029 /* Merge all the zero weighted successor edges. */
1030 if (graph->zero_weight_succs[from])
1031 {
1032 if (!graph->zero_weight_succs[to])
1033 graph->zero_weight_succs[to] = BITMAP_ALLOC (&ptabitmap_obstack);
1034 EXECUTE_IF_SET_IN_BITMAP (graph->zero_weight_succs[from], 0, j, bi)
1035 {
1036 bitmap_clear_bit (graph->zero_weight_preds[j], from);
1037 bitmap_set_bit (graph->zero_weight_preds[j], to);
1038 }
1039 bitmap_ior_into (graph->zero_weight_succs[to],
1040 graph->zero_weight_succs[from]);
1041 }
1042
1043 /* Merge all the nonzero weighted predecessor edges. */
1044 for (i = 0; VEC_iterate (constraint_edge_t, predvec, i, c); i++)
1045 {
1046 unsigned int d = c->dest;
1047 bitmap temp;
1048 bitmap *weights;
1049
1050 if (c->dest == from)
1051 d = to;
1052
1053 add_graph_edge (graph, to, d);
1054
1055 temp = *(get_graph_weights (graph, from, c->dest));
1056 if (temp)
1057 {
1058 weights = get_graph_weights (graph, to, d);
1059 if (!*weights)
1060 *weights = allocate_graph_weights (graph, to, d);
1061
1062 bitmap_ior_into (*weights, temp);
1063 }
1064
1065 }
1066
1067 /* Merge all the nonzero weighted successor edges. */
1068 for (i = 0; VEC_iterate (constraint_edge_t, succvec, i, c); i++)
1069 {
1070 unsigned int d = c->dest;
1071 bitmap temp;
1072 bitmap *weights;
1073
1074 if (c->dest == from)
1075 d = to;
1076
1077 add_graph_edge (graph, d, to);
1078
1079 temp = *(get_graph_weights (graph, c->dest, from));
1080 if (temp)
1081 {
1082 weights = get_graph_weights (graph, d, to);
1083 if (!*weights)
1084 *weights = allocate_graph_weights (graph, d, to);
1085 bitmap_ior_into (*weights, temp);
1086 }
1087 }
1088 clear_edges_for_node (graph, from);
1089 }
1090
1091 /* Add a graph edge to GRAPH, going from TO to FROM, with WEIGHT, if
1092 it doesn't exist in the graph already.
1093 Return false if the edge already existed, true otherwise. */
1094
1095 static bool
1096 int_add_graph_edge (constraint_graph_t graph, unsigned int to,
1097 unsigned int from, unsigned HOST_WIDE_INT weight)
1098 {
1099 if (to == from && weight == 0)
1100 {
1101 return false;
1102 }
1103 else
1104 {
1105 bool r = false;
1106
1107 if (weight == 0)
1108 {
1109 if (!graph->zero_weight_preds[to])
1110 graph->zero_weight_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1111 if (!graph->zero_weight_succs[from])
1112 graph->zero_weight_succs[from] = BITMAP_ALLOC (&ptabitmap_obstack);
1113 if (!bitmap_bit_p (graph->zero_weight_succs[from], to))
1114 {
1115 edge_added = true;
1116 r = true;
1117 stats.num_edges++;
1118 bitmap_set_bit (graph->zero_weight_preds[to], from);
1119 bitmap_set_bit (graph->zero_weight_succs[from], to);
1120 }
1121 }
1122 else
1123 {
1124 bitmap *weights;
1125
1126 r = add_graph_edge (graph, to, from);
1127 weights = get_graph_weights (graph, to, from);
1128
1129 if (!*weights)
1130 {
1131 r = true;
1132 *weights = allocate_graph_weights (graph, to, from);
1133 bitmap_set_bit (*weights, weight);
1134 }
1135 else
1136 {
1137 r |= !bitmap_bit_p (*weights, weight);
1138 bitmap_set_bit (*weights, weight);
1139 }
1140 }
1141
1142 return r;
1143 }
1144 }
1145
1146
1147 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1148
1149 static bool
1150 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1151 unsigned int dest)
1152 {
1153 struct constraint_edge lookfor;
1154 lookfor.dest = src;
1155
1156 return (graph->zero_weight_succs[dest]
1157 && bitmap_bit_p (graph->zero_weight_succs[dest], src))
1158 || constraint_edge_vec_find (graph->succs[dest], lookfor) != NULL;
1159 }
1160
1161 /* Return true if {DEST, SRC} is an existing weighted graph edge (IE has
1162 a weight other than 0) in GRAPH. */
1163 static bool
1164 valid_weighted_graph_edge (constraint_graph_t graph, unsigned int src,
1165 unsigned int dest)
1166 {
1167 struct constraint_edge lookfor;
1168 lookfor.dest = src;
1169
1170 return graph->preds[src]
1171 && constraint_edge_vec_find (graph->succs[dest], lookfor) != NULL;
1172 }
1173
1174
1175 /* Build the constraint graph. */
1176
1177 static void
1178 build_constraint_graph (void)
1179 {
1180 int i = 0;
1181 constraint_t c;
1182
1183 graph = XNEW (struct constraint_graph);
1184 graph->succs = XCNEWVEC (VEC(constraint_edge_t,heap) *, VEC_length (varinfo_t, varmap) + 1);
1185 graph->preds = XCNEWVEC (VEC(constraint_edge_t,heap) *, VEC_length (varinfo_t, varmap) + 1);
1186 graph->zero_weight_succs = XCNEWVEC (bitmap, VEC_length (varinfo_t, varmap) + 1);
1187 graph->zero_weight_preds = XCNEWVEC (bitmap, VEC_length (varinfo_t, varmap) + 1);
1188
1189 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1190 {
1191 struct constraint_expr lhs = c->lhs;
1192 struct constraint_expr rhs = c->rhs;
1193 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1194 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1195
1196 if (lhs.type == DEREF)
1197 {
1198 /* *x = y or *x = &y (complex) */
1199 if (rhs.type == ADDRESSOF || rhsvar > anything_id)
1200 insert_into_complex (lhsvar, c);
1201 }
1202 else if (rhs.type == DEREF)
1203 {
1204 /* !special var= *y */
1205 if (!(get_varinfo (lhsvar)->is_special_var))
1206 insert_into_complex (rhsvar, c);
1207 }
1208 else if (rhs.type == ADDRESSOF)
1209 {
1210 /* x = &y */
1211 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1212 }
1213 else if (lhsvar > anything_id)
1214 {
1215 /* Ignore 0 weighted self edges, as they can't possibly contribute
1216 anything */
1217 if (lhsvar != rhsvar || rhs.offset != 0 || lhs.offset != 0)
1218 {
1219 /* x = y (simple) */
1220 int_add_graph_edge (graph, lhs.var, rhs.var, rhs.offset);
1221 }
1222
1223 }
1224 }
1225 }
1226
1227
1228 /* Changed variables on the last iteration. */
1229 static unsigned int changed_count;
1230 static sbitmap changed;
1231
1232 DEF_VEC_I(unsigned);
1233 DEF_VEC_ALLOC_I(unsigned,heap);
1234
1235
1236 /* Strongly Connected Component visitation info. */
1237
1238 struct scc_info
1239 {
1240 sbitmap visited;
1241 sbitmap in_component;
1242 int current_index;
1243 unsigned int *visited_index;
1244 VEC(unsigned,heap) *scc_stack;
1245 VEC(unsigned,heap) *unification_queue;
1246 };
1247
1248
1249 /* Recursive routine to find strongly connected components in GRAPH.
1250 SI is the SCC info to store the information in, and N is the id of current
1251 graph node we are processing.
1252
1253 This is Tarjan's strongly connected component finding algorithm, as
1254 modified by Nuutila to keep only non-root nodes on the stack.
1255 The algorithm can be found in "On finding the strongly connected
1256 connected components in a directed graph" by Esko Nuutila and Eljas
1257 Soisalon-Soininen, in Information Processing Letters volume 49,
1258 number 1, pages 9-14. */
1259
1260 static void
1261 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1262 {
1263 unsigned int i;
1264 bitmap_iterator bi;
1265
1266 gcc_assert (get_varinfo (n)->node == n);
1267 SET_BIT (si->visited, n);
1268 RESET_BIT (si->in_component, n);
1269 si->visited_index[n] = si->current_index ++;
1270
1271 /* Visit all the successors. */
1272 EXECUTE_IF_IN_NONNULL_BITMAP (graph->zero_weight_succs[n], 0, i, bi)
1273 {
1274 unsigned int w = i;
1275 if (!TEST_BIT (si->visited, w))
1276 scc_visit (graph, si, w);
1277 if (!TEST_BIT (si->in_component, w))
1278 {
1279 unsigned int t = get_varinfo (w)->node;
1280 unsigned int nnode = get_varinfo (n)->node;
1281 if (si->visited_index[t] < si->visited_index[nnode])
1282 get_varinfo (n)->node = t;
1283 }
1284 }
1285
1286 /* See if any components have been identified. */
1287 if (get_varinfo (n)->node == n)
1288 {
1289 unsigned int t = si->visited_index[n];
1290 SET_BIT (si->in_component, n);
1291 while (VEC_length (unsigned, si->scc_stack) != 0
1292 && t < si->visited_index[VEC_last (unsigned, si->scc_stack)])
1293 {
1294 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1295 get_varinfo (w)->node = n;
1296 SET_BIT (si->in_component, w);
1297 /* Mark this node for collapsing. */
1298 VEC_safe_push (unsigned, heap, si->unification_queue, w);
1299 }
1300 }
1301 else
1302 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1303 }
1304
1305
1306 /* Collapse two variables into one variable. */
1307
1308 static void
1309 collapse_nodes (constraint_graph_t graph, unsigned int to, unsigned int from)
1310 {
1311 bitmap tosol, fromsol;
1312
1313 condense_varmap_nodes (to, from);
1314 tosol = get_varinfo (to)->solution;
1315 fromsol = get_varinfo (from)->solution;
1316 bitmap_ior_into (tosol, fromsol);
1317 merge_graph_nodes (graph, to, from);
1318
1319 if (valid_graph_edge (graph, to, to))
1320 {
1321 if (graph->zero_weight_preds[to])
1322 {
1323 bitmap_clear_bit (graph->zero_weight_preds[to], to);
1324 bitmap_clear_bit (graph->zero_weight_succs[to], to);
1325 }
1326 if (valid_weighted_graph_edge (graph, to, to))
1327 {
1328 bitmap weights = *(get_graph_weights (graph, to, to));
1329 if (!weights || bitmap_empty_p (weights))
1330 erase_graph_self_edge (graph, to);
1331 }
1332 }
1333 BITMAP_FREE (fromsol);
1334 get_varinfo (to)->address_taken |= get_varinfo (from)->address_taken;
1335 get_varinfo (to)->indirect_target |= get_varinfo (from)->indirect_target;
1336 }
1337
1338
1339 /* Unify nodes in GRAPH that we have found to be part of a cycle.
1340 SI is the Strongly Connected Components information structure that tells us
1341 what components to unify.
1342 UPDATE_CHANGED should be set to true if the changed sbitmap and changed
1343 count should be updated to reflect the unification. */
1344
1345 static void
1346 process_unification_queue (constraint_graph_t graph, struct scc_info *si,
1347 bool update_changed)
1348 {
1349 size_t i = 0;
1350 bitmap tmp = BITMAP_ALLOC (update_changed ? &iteration_obstack : NULL);
1351 bitmap_clear (tmp);
1352
1353 /* We proceed as follows:
1354
1355 For each component in the queue (components are delineated by
1356 when current_queue_element->node != next_queue_element->node):
1357
1358 rep = representative node for component
1359
1360 For each node (tounify) to be unified in the component,
1361 merge the solution for tounify into tmp bitmap
1362
1363 clear solution for tounify
1364
1365 merge edges from tounify into rep
1366
1367 merge complex constraints from tounify into rep
1368
1369 update changed count to note that tounify will never change
1370 again
1371
1372 Merge tmp into solution for rep, marking rep changed if this
1373 changed rep's solution.
1374
1375 Delete any 0 weighted self-edges we now have for rep. */
1376 while (i != VEC_length (unsigned, si->unification_queue))
1377 {
1378 unsigned int tounify = VEC_index (unsigned, si->unification_queue, i);
1379 unsigned int n = get_varinfo (tounify)->node;
1380
1381 if (dump_file && (dump_flags & TDF_DETAILS))
1382 fprintf (dump_file, "Unifying %s to %s\n",
1383 get_varinfo (tounify)->name,
1384 get_varinfo (n)->name);
1385 if (update_changed)
1386 stats.unified_vars_dynamic++;
1387 else
1388 stats.unified_vars_static++;
1389 bitmap_ior_into (tmp, get_varinfo (tounify)->solution);
1390 merge_graph_nodes (graph, n, tounify);
1391 condense_varmap_nodes (n, tounify);
1392
1393 if (update_changed && TEST_BIT (changed, tounify))
1394 {
1395 RESET_BIT (changed, tounify);
1396 if (!TEST_BIT (changed, n))
1397 SET_BIT (changed, n);
1398 else
1399 {
1400 gcc_assert (changed_count > 0);
1401 changed_count--;
1402 }
1403 }
1404
1405 bitmap_clear (get_varinfo (tounify)->solution);
1406 ++i;
1407
1408 /* If we've either finished processing the entire queue, or
1409 finished processing all nodes for component n, update the solution for
1410 n. */
1411 if (i == VEC_length (unsigned, si->unification_queue)
1412 || get_varinfo (VEC_index (unsigned, si->unification_queue, i))->node != n)
1413 {
1414 /* If the solution changes because of the merging, we need to mark
1415 the variable as changed. */
1416 if (bitmap_ior_into (get_varinfo (n)->solution, tmp))
1417 {
1418 if (update_changed && !TEST_BIT (changed, n))
1419 {
1420 SET_BIT (changed, n);
1421 changed_count++;
1422 }
1423 }
1424 bitmap_clear (tmp);
1425
1426 if (valid_graph_edge (graph, n, n))
1427 {
1428 if (graph->zero_weight_succs[n])
1429 {
1430 if (graph->zero_weight_preds[n])
1431 bitmap_clear_bit (graph->zero_weight_preds[n], n);
1432 bitmap_clear_bit (graph->zero_weight_succs[n], n);
1433 }
1434 if (valid_weighted_graph_edge (graph, n, n))
1435 {
1436 bitmap weights = *(get_graph_weights (graph, n, n));
1437 if (!weights || bitmap_empty_p (weights))
1438 erase_graph_self_edge (graph, n);
1439 }
1440 }
1441 }
1442 }
1443 BITMAP_FREE (tmp);
1444 }
1445
1446
1447 /* Information needed to compute the topological ordering of a graph. */
1448
1449 struct topo_info
1450 {
1451 /* sbitmap of visited nodes. */
1452 sbitmap visited;
1453 /* Array that stores the topological order of the graph, *in
1454 reverse*. */
1455 VEC(unsigned,heap) *topo_order;
1456 };
1457
1458
1459 /* Initialize and return a topological info structure. */
1460
1461 static struct topo_info *
1462 init_topo_info (void)
1463 {
1464 size_t size = VEC_length (varinfo_t, varmap);
1465 struct topo_info *ti = XNEW (struct topo_info);
1466 ti->visited = sbitmap_alloc (size);
1467 sbitmap_zero (ti->visited);
1468 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1469 return ti;
1470 }
1471
1472
1473 /* Free the topological sort info pointed to by TI. */
1474
1475 static void
1476 free_topo_info (struct topo_info *ti)
1477 {
1478 sbitmap_free (ti->visited);
1479 VEC_free (unsigned, heap, ti->topo_order);
1480 free (ti);
1481 }
1482
1483 /* Visit the graph in topological order, and store the order in the
1484 topo_info structure. */
1485
1486 static void
1487 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1488 unsigned int n)
1489 {
1490 VEC(constraint_edge_t,heap) *succs = graph->succs[n];
1491 bitmap temp;
1492 bitmap_iterator bi;
1493 constraint_edge_t c;
1494 int i;
1495 unsigned int j;
1496
1497 SET_BIT (ti->visited, n);
1498 if (VEC_length (constraint_edge_t, succs) != 0)
1499 {
1500 temp = BITMAP_ALLOC (&iteration_obstack);
1501 if (graph->zero_weight_succs[n])
1502 bitmap_ior_into (temp, graph->zero_weight_succs[n]);
1503 for (i = 0; VEC_iterate (constraint_edge_t, succs, i, c); i++)
1504 bitmap_set_bit (temp, c->dest);
1505 }
1506 else
1507 temp = graph->zero_weight_succs[n];
1508
1509 if (temp)
1510 EXECUTE_IF_SET_IN_BITMAP (temp, 0, j, bi)
1511 {
1512 if (!TEST_BIT (ti->visited, j))
1513 topo_visit (graph, ti, j);
1514 }
1515 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1516 }
1517
1518 /* Return true if variable N + OFFSET is a legal field of N. */
1519
1520 static bool
1521 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1522 {
1523 varinfo_t ninfo = get_varinfo (n);
1524
1525 /* For things we've globbed to single variables, any offset into the
1526 variable acts like the entire variable, so that it becomes offset
1527 0. */
1528 if (ninfo->is_special_var
1529 || ninfo->is_artificial_var
1530 || ninfo->is_unknown_size_var)
1531 {
1532 *offset = 0;
1533 return true;
1534 }
1535 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1536 }
1537
1538 /* Process a constraint C that represents *x = &y. */
1539
1540 static void
1541 do_da_constraint (constraint_graph_t graph ATTRIBUTE_UNUSED,
1542 constraint_t c, bitmap delta)
1543 {
1544 unsigned int rhs = c->rhs.var;
1545 unsigned int j;
1546 bitmap_iterator bi;
1547
1548 /* For each member j of Delta (Sol(x)), add x to Sol(j) */
1549 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1550 {
1551 unsigned HOST_WIDE_INT offset = c->lhs.offset;
1552 if (type_safe (j, &offset) && !(get_varinfo (j)->is_special_var))
1553 {
1554 /* *x != NULL && *x != ANYTHING*/
1555 varinfo_t v;
1556 unsigned int t;
1557 bitmap sol;
1558 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + offset;
1559
1560 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1561 if (!v)
1562 continue;
1563 t = v->node;
1564 sol = get_varinfo (t)->solution;
1565 if (!bitmap_bit_p (sol, rhs))
1566 {
1567 bitmap_set_bit (sol, rhs);
1568 if (!TEST_BIT (changed, t))
1569 {
1570 SET_BIT (changed, t);
1571 changed_count++;
1572 }
1573 }
1574 }
1575 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1576 fprintf (dump_file, "Untypesafe usage in do_da_constraint.\n");
1577
1578 }
1579 }
1580
1581 /* Process a constraint C that represents x = *y, using DELTA as the
1582 starting solution. */
1583
1584 static void
1585 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1586 bitmap delta)
1587 {
1588 unsigned int lhs = get_varinfo (c->lhs.var)->node;
1589 bool flag = false;
1590 bitmap sol = get_varinfo (lhs)->solution;
1591 unsigned int j;
1592 bitmap_iterator bi;
1593
1594 if (bitmap_bit_p (delta, anything_id))
1595 {
1596 flag = !bitmap_bit_p (sol, anything_id);
1597 if (flag)
1598 bitmap_set_bit (sol, anything_id);
1599 goto done;
1600 }
1601 /* For each variable j in delta (Sol(y)), add
1602 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1603 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1604 {
1605 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1606 if (type_safe (j, &roffset))
1607 {
1608 varinfo_t v;
1609 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1610 unsigned int t;
1611
1612 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1613 if (!v)
1614 continue;
1615 t = v->node;
1616
1617 /* Adding edges from the special vars is pointless.
1618 They don't have sets that can change. */
1619 if (get_varinfo (t) ->is_special_var)
1620 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1621 else if (int_add_graph_edge (graph, lhs, t, 0))
1622 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1623 }
1624 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1625 fprintf (dump_file, "Untypesafe usage in do_sd_constraint\n");
1626
1627 }
1628
1629 done:
1630 /* If the LHS solution changed, mark the var as changed. */
1631 if (flag)
1632 {
1633 get_varinfo (lhs)->solution = sol;
1634 if (!TEST_BIT (changed, lhs))
1635 {
1636 SET_BIT (changed, lhs);
1637 changed_count++;
1638 }
1639 }
1640 }
1641
1642 /* Process a constraint C that represents *x = y. */
1643
1644 static void
1645 do_ds_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1646 {
1647 unsigned int rhs = get_varinfo (c->rhs.var)->node;
1648 unsigned HOST_WIDE_INT roff = c->rhs.offset;
1649 bitmap sol = get_varinfo (rhs)->solution;
1650 unsigned int j;
1651 bitmap_iterator bi;
1652
1653 if (bitmap_bit_p (sol, anything_id))
1654 {
1655 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1656 {
1657 varinfo_t jvi = get_varinfo (j);
1658 unsigned int t;
1659 unsigned int loff = c->lhs.offset;
1660 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1661 varinfo_t v;
1662
1663 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1664 if (!v)
1665 continue;
1666 t = v->node;
1667
1668 if (!bitmap_bit_p (get_varinfo (t)->solution, anything_id))
1669 {
1670 bitmap_set_bit (get_varinfo (t)->solution, anything_id);
1671 if (!TEST_BIT (changed, t))
1672 {
1673 SET_BIT (changed, t);
1674 changed_count++;
1675 }
1676 }
1677 }
1678 return;
1679 }
1680
1681 /* For each member j of delta (Sol(x)), add an edge from y to j and
1682 union Sol(y) into Sol(j) */
1683 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1684 {
1685 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1686 if (type_safe (j, &loff) && !(get_varinfo(j)->is_special_var))
1687 {
1688 varinfo_t v;
1689 unsigned int t;
1690 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1691
1692 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1693 if (!v)
1694 continue;
1695 t = v->node;
1696 if (int_add_graph_edge (graph, t, rhs, roff))
1697 {
1698 bitmap tmp = get_varinfo (t)->solution;
1699 if (set_union_with_increment (tmp, sol, roff))
1700 {
1701 get_varinfo (t)->solution = tmp;
1702 if (t == rhs)
1703 sol = get_varinfo (rhs)->solution;
1704 if (!TEST_BIT (changed, t))
1705 {
1706 SET_BIT (changed, t);
1707 changed_count++;
1708 }
1709 }
1710 }
1711 }
1712 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1713 fprintf (dump_file, "Untypesafe usage in do_ds_constraint\n");
1714 }
1715 }
1716
1717 /* Handle a non-simple (simple meaning requires no iteration), non-copy
1718 constraint (IE *x = &y, x = *y, and *x = y). */
1719
1720 static void
1721 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1722 {
1723 if (c->lhs.type == DEREF)
1724 {
1725 if (c->rhs.type == ADDRESSOF)
1726 {
1727 /* *x = &y */
1728 do_da_constraint (graph, c, delta);
1729 }
1730 else
1731 {
1732 /* *x = y */
1733 do_ds_constraint (graph, c, delta);
1734 }
1735 }
1736 else
1737 {
1738 /* x = *y */
1739 if (!(get_varinfo (c->lhs.var)->is_special_var))
1740 do_sd_constraint (graph, c, delta);
1741 }
1742 }
1743
1744 /* Initialize and return a new SCC info structure. */
1745
1746 static struct scc_info *
1747 init_scc_info (void)
1748 {
1749 struct scc_info *si = XNEW (struct scc_info);
1750 size_t size = VEC_length (varinfo_t, varmap);
1751
1752 si->current_index = 0;
1753 si->visited = sbitmap_alloc (size);
1754 sbitmap_zero (si->visited);
1755 si->in_component = sbitmap_alloc (size);
1756 sbitmap_ones (si->in_component);
1757 si->visited_index = XCNEWVEC (unsigned int, size + 1);
1758 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1759 si->unification_queue = VEC_alloc (unsigned, heap, 1);
1760 return si;
1761 }
1762
1763 /* Free an SCC info structure pointed to by SI */
1764
1765 static void
1766 free_scc_info (struct scc_info *si)
1767 {
1768 sbitmap_free (si->visited);
1769 sbitmap_free (si->in_component);
1770 free (si->visited_index);
1771 VEC_free (unsigned, heap, si->scc_stack);
1772 VEC_free (unsigned, heap, si->unification_queue);
1773 free(si);
1774 }
1775
1776
1777 /* Find cycles in GRAPH that occur, using strongly connected components, and
1778 collapse the cycles into a single representative node. if UPDATE_CHANGED
1779 is true, then update the changed sbitmap to note those nodes whose
1780 solutions have changed as a result of collapsing. */
1781
1782 static void
1783 find_and_collapse_graph_cycles (constraint_graph_t graph, bool update_changed)
1784 {
1785 unsigned int i;
1786 unsigned int size = VEC_length (varinfo_t, varmap);
1787 struct scc_info *si = init_scc_info ();
1788
1789 for (i = 0; i != size; ++i)
1790 if (!TEST_BIT (si->visited, i) && get_varinfo (i)->node == i)
1791 scc_visit (graph, si, i);
1792
1793 process_unification_queue (graph, si, update_changed);
1794 free_scc_info (si);
1795 }
1796
1797 /* Compute a topological ordering for GRAPH, and store the result in the
1798 topo_info structure TI. */
1799
1800 static void
1801 compute_topo_order (constraint_graph_t graph,
1802 struct topo_info *ti)
1803 {
1804 unsigned int i;
1805 unsigned int size = VEC_length (varinfo_t, varmap);
1806
1807 for (i = 0; i != size; ++i)
1808 if (!TEST_BIT (ti->visited, i) && get_varinfo (i)->node == i)
1809 topo_visit (graph, ti, i);
1810 }
1811
1812 /* Return true if bitmap B is empty, or a bitmap other than bit 0 is set. */
1813
1814 static bool
1815 bitmap_other_than_zero_bit_set (bitmap b)
1816 {
1817 unsigned int i;
1818 bitmap_iterator bi;
1819
1820 if (bitmap_empty_p (b))
1821 return false;
1822 EXECUTE_IF_SET_IN_BITMAP (b, 1, i, bi)
1823 return true;
1824 return false;
1825 }
1826
1827 /* Perform offline variable substitution.
1828
1829 This is a linear time way of identifying variables that must have
1830 equivalent points-to sets, including those caused by static cycles,
1831 and single entry subgraphs, in the constraint graph.
1832
1833 The technique is described in "Off-line variable substitution for
1834 scaling points-to analysis" by Atanas Rountev and Satish Chandra,
1835 in "ACM SIGPLAN Notices" volume 35, number 5, pages 47-56. */
1836
1837 static void
1838 perform_var_substitution (constraint_graph_t graph)
1839 {
1840 struct topo_info *ti = init_topo_info ();
1841
1842 bitmap_obstack_initialize (&iteration_obstack);
1843 /* Compute the topological ordering of the graph, then visit each
1844 node in topological order. */
1845 compute_topo_order (graph, ti);
1846
1847 while (VEC_length (unsigned, ti->topo_order) != 0)
1848 {
1849 unsigned int i = VEC_pop (unsigned, ti->topo_order);
1850 unsigned int pred;
1851 varinfo_t vi = get_varinfo (i);
1852 bool okay_to_elim = false;
1853 unsigned int root = VEC_length (varinfo_t, varmap);
1854 VEC(constraint_edge_t,heap) *predvec = graph->preds[i];
1855 constraint_edge_t ce = NULL;
1856 bitmap tmp;
1857 unsigned int k;
1858 bitmap_iterator bi;
1859
1860 /* We can't eliminate things whose address is taken, or which is
1861 the target of a dereference. */
1862 if (vi->address_taken || vi->indirect_target)
1863 continue;
1864
1865 /* See if all predecessors of I are ripe for elimination */
1866 EXECUTE_IF_IN_NONNULL_BITMAP (graph->zero_weight_preds[i], 0, k, bi)
1867 {
1868 unsigned int w;
1869 w = get_varinfo (k)->node;
1870
1871 /* We can't eliminate the node if one of the predecessors is
1872 part of a different strongly connected component. */
1873 if (!okay_to_elim)
1874 {
1875 root = w;
1876 okay_to_elim = true;
1877 }
1878 else if (w != root)
1879 {
1880 okay_to_elim = false;
1881 break;
1882 }
1883
1884 /* Theorem 4 in Rountev and Chandra: If i is a direct node,
1885 then Solution(i) is a subset of Solution (w), where w is a
1886 predecessor in the graph.
1887 Corollary: If all predecessors of i have the same
1888 points-to set, then i has that same points-to set as
1889 those predecessors. */
1890 tmp = BITMAP_ALLOC (NULL);
1891 bitmap_and_compl (tmp, get_varinfo (i)->solution,
1892 get_varinfo (w)->solution);
1893 if (!bitmap_empty_p (tmp))
1894 {
1895 okay_to_elim = false;
1896 BITMAP_FREE (tmp);
1897 break;
1898 }
1899 BITMAP_FREE (tmp);
1900 }
1901
1902 if (okay_to_elim)
1903 for (pred = 0;
1904 VEC_iterate (constraint_edge_t, predvec, pred, ce);
1905 pred++)
1906 {
1907 bitmap weight;
1908 unsigned int w;
1909 weight = *(get_graph_weights (graph, i, ce->dest));
1910
1911 /* We can't eliminate variables that have nonzero weighted
1912 edges between them. */
1913 if (weight && bitmap_other_than_zero_bit_set (weight))
1914 {
1915 okay_to_elim = false;
1916 break;
1917 }
1918 w = get_varinfo (ce->dest)->node;
1919
1920 /* We can't eliminate the node if one of the predecessors is
1921 part of a different strongly connected component. */
1922 if (!okay_to_elim)
1923 {
1924 root = w;
1925 okay_to_elim = true;
1926 }
1927 else if (w != root)
1928 {
1929 okay_to_elim = false;
1930 break;
1931 }
1932
1933 /* Theorem 4 in Rountev and Chandra: If i is a direct node,
1934 then Solution(i) is a subset of Solution (w), where w is a
1935 predecessor in the graph.
1936 Corollary: If all predecessors of i have the same
1937 points-to set, then i has that same points-to set as
1938 those predecessors. */
1939 tmp = BITMAP_ALLOC (NULL);
1940 bitmap_and_compl (tmp, get_varinfo (i)->solution,
1941 get_varinfo (w)->solution);
1942 if (!bitmap_empty_p (tmp))
1943 {
1944 okay_to_elim = false;
1945 BITMAP_FREE (tmp);
1946 break;
1947 }
1948 BITMAP_FREE (tmp);
1949 }
1950
1951 /* See if the root is different than the original node.
1952 If so, we've found an equivalence. */
1953 if (root != get_varinfo (i)->node && okay_to_elim)
1954 {
1955 /* Found an equivalence */
1956 get_varinfo (i)->node = root;
1957 collapse_nodes (graph, root, i);
1958 if (dump_file && (dump_flags & TDF_DETAILS))
1959 fprintf (dump_file, "Collapsing %s into %s\n",
1960 get_varinfo (i)->name,
1961 get_varinfo (root)->name);
1962 stats.collapsed_vars++;
1963 }
1964 }
1965
1966 bitmap_obstack_release (&iteration_obstack);
1967 free_topo_info (ti);
1968 }
1969
1970 /* Solve the constraint graph GRAPH using our worklist solver.
1971 This is based on the PW* family of solvers from the "Efficient Field
1972 Sensitive Pointer Analysis for C" paper.
1973 It works by iterating over all the graph nodes, processing the complex
1974 constraints and propagating the copy constraints, until everything stops
1975 changed. This corresponds to steps 6-8 in the solving list given above. */
1976
1977 static void
1978 solve_graph (constraint_graph_t graph)
1979 {
1980 unsigned int size = VEC_length (varinfo_t, varmap);
1981 unsigned int i;
1982
1983 changed_count = size;
1984 changed = sbitmap_alloc (size);
1985 sbitmap_ones (changed);
1986
1987 /* The already collapsed/unreachable nodes will never change, so we
1988 need to account for them in changed_count. */
1989 for (i = 0; i < size; i++)
1990 if (get_varinfo (i)->node != i)
1991 changed_count--;
1992
1993 while (changed_count > 0)
1994 {
1995 unsigned int i;
1996 struct topo_info *ti = init_topo_info ();
1997 stats.iterations++;
1998
1999 bitmap_obstack_initialize (&iteration_obstack);
2000
2001 if (edge_added)
2002 {
2003 /* We already did cycle elimination once, when we did
2004 variable substitution, so we don't need it again for the
2005 first iteration. */
2006 if (stats.iterations > 1)
2007 find_and_collapse_graph_cycles (graph, true);
2008
2009 edge_added = false;
2010 }
2011
2012 compute_topo_order (graph, ti);
2013
2014 while (VEC_length (unsigned, ti->topo_order) != 0)
2015 {
2016 i = VEC_pop (unsigned, ti->topo_order);
2017 gcc_assert (get_varinfo (i)->node == i);
2018
2019 /* If the node has changed, we need to process the
2020 complex constraints and outgoing edges again. */
2021 if (TEST_BIT (changed, i))
2022 {
2023 unsigned int j;
2024 constraint_t c;
2025 constraint_edge_t e = NULL;
2026 bitmap solution;
2027 bitmap_iterator bi;
2028 VEC(constraint_t,heap) *complex = get_varinfo (i)->complex;
2029 VEC(constraint_edge_t,heap) *succs;
2030
2031 RESET_BIT (changed, i);
2032 changed_count--;
2033
2034 /* Process the complex constraints */
2035 solution = get_varinfo (i)->solution;
2036 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2037 do_complex_constraint (graph, c, solution);
2038
2039 /* Propagate solution to all successors. */
2040 succs = graph->succs[i];
2041
2042 EXECUTE_IF_IN_NONNULL_BITMAP (graph->zero_weight_succs[i], 0, j, bi)
2043 {
2044 bitmap tmp = get_varinfo (j)->solution;
2045 bool flag = false;
2046
2047 flag = set_union_with_increment (tmp, solution, 0);
2048
2049 if (flag)
2050 {
2051 get_varinfo (j)->solution = tmp;
2052 if (!TEST_BIT (changed, j))
2053 {
2054 SET_BIT (changed, j);
2055 changed_count++;
2056 }
2057 }
2058 }
2059 for (j = 0; VEC_iterate (constraint_edge_t, succs, j, e); j++)
2060 {
2061 bitmap tmp = get_varinfo (e->dest)->solution;
2062 bool flag = false;
2063 unsigned int k;
2064 bitmap weights = e->weights;
2065 bitmap_iterator bi;
2066
2067 gcc_assert (weights && !bitmap_empty_p (weights));
2068 EXECUTE_IF_SET_IN_BITMAP (weights, 0, k, bi)
2069 flag |= set_union_with_increment (tmp, solution, k);
2070
2071 if (flag)
2072 {
2073 get_varinfo (e->dest)->solution = tmp;
2074 if (!TEST_BIT (changed, e->dest))
2075 {
2076 SET_BIT (changed, e->dest);
2077 changed_count++;
2078 }
2079 }
2080 }
2081 }
2082 }
2083 free_topo_info (ti);
2084 bitmap_obstack_release (&iteration_obstack);
2085 }
2086
2087 sbitmap_free (changed);
2088 }
2089
2090
2091 /* CONSTRAINT AND VARIABLE GENERATION FUNCTIONS */
2092
2093 /* Map from trees to variable ids. */
2094 static htab_t id_for_tree;
2095
2096 typedef struct tree_id
2097 {
2098 tree t;
2099 unsigned int id;
2100 } *tree_id_t;
2101
2102 /* Hash a tree id structure. */
2103
2104 static hashval_t
2105 tree_id_hash (const void *p)
2106 {
2107 const tree_id_t ta = (tree_id_t) p;
2108 return htab_hash_pointer (ta->t);
2109 }
2110
2111 /* Return true if the tree in P1 and the tree in P2 are the same. */
2112
2113 static int
2114 tree_id_eq (const void *p1, const void *p2)
2115 {
2116 const tree_id_t ta1 = (tree_id_t) p1;
2117 const tree_id_t ta2 = (tree_id_t) p2;
2118 return ta1->t == ta2->t;
2119 }
2120
2121 /* Insert ID as the variable id for tree T in the hashtable. */
2122
2123 static void
2124 insert_id_for_tree (tree t, int id)
2125 {
2126 void **slot;
2127 struct tree_id finder;
2128 tree_id_t new_pair;
2129
2130 finder.t = t;
2131 slot = htab_find_slot (id_for_tree, &finder, INSERT);
2132 gcc_assert (*slot == NULL);
2133 new_pair = XNEW (struct tree_id);
2134 new_pair->t = t;
2135 new_pair->id = id;
2136 *slot = (void *)new_pair;
2137 }
2138
2139 /* Find the variable id for tree T in ID_FOR_TREE. If T does not
2140 exist in the hash table, return false, otherwise, return true and
2141 set *ID to the id we found. */
2142
2143 static bool
2144 lookup_id_for_tree (tree t, unsigned int *id)
2145 {
2146 tree_id_t pair;
2147 struct tree_id finder;
2148
2149 finder.t = t;
2150 pair = htab_find (id_for_tree, &finder);
2151 if (pair == NULL)
2152 return false;
2153 *id = pair->id;
2154 return true;
2155 }
2156
2157 /* Return a printable name for DECL */
2158
2159 static const char *
2160 alias_get_name (tree decl)
2161 {
2162 const char *res = get_name (decl);
2163 char *temp;
2164 int num_printed = 0;
2165
2166 if (res != NULL)
2167 return res;
2168
2169 res = "NULL";
2170 if (TREE_CODE (decl) == SSA_NAME)
2171 {
2172 num_printed = asprintf (&temp, "%s_%u",
2173 alias_get_name (SSA_NAME_VAR (decl)),
2174 SSA_NAME_VERSION (decl));
2175 }
2176 else if (DECL_P (decl))
2177 {
2178 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2179 }
2180 if (num_printed > 0)
2181 {
2182 res = ggc_strdup (temp);
2183 free (temp);
2184 }
2185 return res;
2186 }
2187
2188 /* Find the variable id for tree T in the hashtable.
2189 If T doesn't exist in the hash table, create an entry for it. */
2190
2191 static unsigned int
2192 get_id_for_tree (tree t)
2193 {
2194 tree_id_t pair;
2195 struct tree_id finder;
2196
2197 finder.t = t;
2198 pair = htab_find (id_for_tree, &finder);
2199 if (pair == NULL)
2200 return create_variable_info_for (t, alias_get_name (t));
2201
2202 return pair->id;
2203 }
2204
2205 /* Get a constraint expression from an SSA_VAR_P node. */
2206
2207 static struct constraint_expr
2208 get_constraint_exp_from_ssa_var (tree t)
2209 {
2210 struct constraint_expr cexpr;
2211
2212 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2213
2214 /* For parameters, get at the points-to set for the actual parm
2215 decl. */
2216 if (TREE_CODE (t) == SSA_NAME
2217 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2218 && default_def (SSA_NAME_VAR (t)) == t)
2219 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
2220
2221 cexpr.type = SCALAR;
2222
2223 cexpr.var = get_id_for_tree (t);
2224 /* If we determine the result is "anything", and we know this is readonly,
2225 say it points to readonly memory instead. */
2226 if (cexpr.var == anything_id && TREE_READONLY (t))
2227 {
2228 cexpr.type = ADDRESSOF;
2229 cexpr.var = readonly_id;
2230 }
2231
2232 cexpr.offset = 0;
2233 return cexpr;
2234 }
2235
2236 /* Process a completed constraint T, and add it to the constraint
2237 list. */
2238
2239 static void
2240 process_constraint (constraint_t t)
2241 {
2242 struct constraint_expr rhs = t->rhs;
2243 struct constraint_expr lhs = t->lhs;
2244
2245 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2246 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2247
2248 /* ANYTHING == ANYTHING is pointless. */
2249 if (lhs.var == anything_id && rhs.var == anything_id)
2250 return;
2251
2252 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2253 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2254 {
2255 rhs = t->lhs;
2256 t->lhs = t->rhs;
2257 t->rhs = rhs;
2258 process_constraint (t);
2259 }
2260 /* This can happen in our IR with things like n->a = *p */
2261 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2262 {
2263 /* Split into tmp = *rhs, *lhs = tmp */
2264 tree rhsdecl = get_varinfo (rhs.var)->decl;
2265 tree pointertype = TREE_TYPE (rhsdecl);
2266 tree pointedtotype = TREE_TYPE (pointertype);
2267 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2268 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2269
2270 /* If this is an aggregate of known size, we should have passed
2271 this off to do_structure_copy, and it should have broken it
2272 up. */
2273 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
2274 || get_varinfo (rhs.var)->is_unknown_size_var);
2275
2276 process_constraint (new_constraint (tmplhs, rhs));
2277 process_constraint (new_constraint (lhs, tmplhs));
2278 }
2279 else if (rhs.type == ADDRESSOF)
2280 {
2281 varinfo_t vi;
2282 gcc_assert (rhs.offset == 0);
2283
2284 for (vi = get_varinfo (rhs.var); vi != NULL; vi = vi->next)
2285 vi->address_taken = true;
2286
2287 VEC_safe_push (constraint_t, heap, constraints, t);
2288 }
2289 else
2290 {
2291 if (lhs.type != DEREF && rhs.type == DEREF)
2292 get_varinfo (lhs.var)->indirect_target = true;
2293 VEC_safe_push (constraint_t, heap, constraints, t);
2294 }
2295 }
2296
2297
2298 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2299 structure. */
2300
2301 static unsigned HOST_WIDE_INT
2302 bitpos_of_field (const tree fdecl)
2303 {
2304
2305 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2306 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2307 return -1;
2308
2309 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2310 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2311 }
2312
2313
2314 /* Return true if an access to [ACCESSPOS, ACCESSSIZE]
2315 overlaps with a field at [FIELDPOS, FIELDSIZE] */
2316
2317 static bool
2318 offset_overlaps_with_access (const unsigned HOST_WIDE_INT fieldpos,
2319 const unsigned HOST_WIDE_INT fieldsize,
2320 const unsigned HOST_WIDE_INT accesspos,
2321 const unsigned HOST_WIDE_INT accesssize)
2322 {
2323 if (fieldpos == accesspos && fieldsize == accesssize)
2324 return true;
2325 if (accesspos >= fieldpos && accesspos < (fieldpos + fieldsize))
2326 return true;
2327 if (accesspos < fieldpos && (accesspos + accesssize > fieldpos))
2328 return true;
2329
2330 return false;
2331 }
2332
2333 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2334
2335 static void
2336 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results)
2337 {
2338 tree orig_t = t;
2339 HOST_WIDE_INT bitsize = -1;
2340 HOST_WIDE_INT bitmaxsize = -1;
2341 HOST_WIDE_INT bitpos;
2342 tree forzero;
2343 struct constraint_expr *result;
2344 unsigned int beforelength = VEC_length (ce_s, *results);
2345
2346 /* Some people like to do cute things like take the address of
2347 &0->a.b */
2348 forzero = t;
2349 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2350 forzero = TREE_OPERAND (forzero, 0);
2351
2352 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2353 {
2354 struct constraint_expr temp;
2355
2356 temp.offset = 0;
2357 temp.var = integer_id;
2358 temp.type = SCALAR;
2359 VEC_safe_push (ce_s, heap, *results, &temp);
2360 return;
2361 }
2362
2363 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2364 get_constraint_for (t, results);
2365 result = VEC_last (ce_s, *results);
2366 result->offset = bitpos;
2367
2368 gcc_assert (beforelength + 1 == VEC_length (ce_s, *results));
2369
2370 /* This can also happen due to weird offsetof type macros. */
2371 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2372 result->type = SCALAR;
2373
2374 if (result->type == SCALAR)
2375 {
2376 /* In languages like C, you can access one past the end of an
2377 array. You aren't allowed to dereference it, so we can
2378 ignore this constraint. When we handle pointer subtraction,
2379 we may have to do something cute here. */
2380
2381 if (result->offset < get_varinfo (result->var)->fullsize)
2382 {
2383 /* It's also not true that the constraint will actually start at the
2384 right offset, it may start in some padding. We only care about
2385 setting the constraint to the first actual field it touches, so
2386 walk to find it. */
2387 varinfo_t curr;
2388 for (curr = get_varinfo (result->var); curr; curr = curr->next)
2389 {
2390 if (offset_overlaps_with_access (curr->offset, curr->size,
2391 result->offset, bitmaxsize))
2392 {
2393 result->var = curr->id;
2394 break;
2395 }
2396 }
2397 /* assert that we found *some* field there. The user couldn't be
2398 accessing *only* padding. */
2399 /* Still the user could access one past the end of an array
2400 embedded in a struct resulting in accessing *only* padding. */
2401 gcc_assert (curr || ref_contains_array_ref (orig_t));
2402 }
2403 else
2404 if (dump_file && (dump_flags & TDF_DETAILS))
2405 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2406
2407 result->offset = 0;
2408 }
2409 }
2410
2411
2412 /* Dereference the constraint expression CONS, and return the result.
2413 DEREF (ADDRESSOF) = SCALAR
2414 DEREF (SCALAR) = DEREF
2415 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2416 This is needed so that we can handle dereferencing DEREF constraints. */
2417
2418 static void
2419 do_deref (VEC (ce_s, heap) **constraints)
2420 {
2421 struct constraint_expr *c;
2422 unsigned int i = 0;
2423 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2424 {
2425 if (c->type == SCALAR)
2426 c->type = DEREF;
2427 else if (c->type == ADDRESSOF)
2428 c->type = SCALAR;
2429 else if (c->type == DEREF)
2430 {
2431 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2432 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2433 process_constraint (new_constraint (tmplhs, *c));
2434 c->var = tmplhs.var;
2435 }
2436 else
2437 gcc_unreachable ();
2438 }
2439 }
2440
2441
2442 /* Given a tree T, return the constraint expression for it. */
2443
2444 static void
2445 get_constraint_for (tree t, VEC (ce_s, heap) **results)
2446 {
2447 struct constraint_expr temp;
2448
2449 /* x = integer is all glommed to a single variable, which doesn't
2450 point to anything by itself. That is, of course, unless it is an
2451 integer constant being treated as a pointer, in which case, we
2452 will return that this is really the addressof anything. This
2453 happens below, since it will fall into the default case. The only
2454 case we know something about an integer treated like a pointer is
2455 when it is the NULL pointer, and then we just say it points to
2456 NULL. */
2457 if (TREE_CODE (t) == INTEGER_CST
2458 && !POINTER_TYPE_P (TREE_TYPE (t)))
2459 {
2460 temp.var = integer_id;
2461 temp.type = SCALAR;
2462 temp.offset = 0;
2463 VEC_safe_push (ce_s, heap, *results, &temp);
2464 return;
2465 }
2466 else if (TREE_CODE (t) == INTEGER_CST
2467 && integer_zerop (t))
2468 {
2469 temp.var = nothing_id;
2470 temp.type = ADDRESSOF;
2471 temp.offset = 0;
2472 VEC_safe_push (ce_s, heap, *results, &temp);
2473 return;
2474 }
2475
2476 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2477 {
2478 case tcc_expression:
2479 {
2480 switch (TREE_CODE (t))
2481 {
2482 case ADDR_EXPR:
2483 {
2484 struct constraint_expr *c;
2485 unsigned int i;
2486 tree exp = TREE_OPERAND (t, 0);
2487 tree pttype = TREE_TYPE (TREE_TYPE (t));
2488
2489 get_constraint_for (exp, results);
2490 /* Make sure we capture constraints to all elements
2491 of an array. */
2492 if ((handled_component_p (exp)
2493 && ref_contains_array_ref (exp))
2494 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE)
2495 {
2496 struct constraint_expr *origrhs;
2497 varinfo_t origvar;
2498 struct constraint_expr tmp;
2499
2500 gcc_assert (VEC_length (ce_s, *results) == 1);
2501 origrhs = VEC_last (ce_s, *results);
2502 tmp = *origrhs;
2503 VEC_pop (ce_s, *results);
2504 origvar = get_varinfo (origrhs->var);
2505 for (; origvar; origvar = origvar->next)
2506 {
2507 tmp.var = origvar->id;
2508 VEC_safe_push (ce_s, heap, *results, &tmp);
2509 }
2510 }
2511 else if (VEC_length (ce_s, *results) == 1
2512 && (AGGREGATE_TYPE_P (pttype)
2513 || TREE_CODE (pttype) == COMPLEX_TYPE))
2514 {
2515 struct constraint_expr *origrhs;
2516 varinfo_t origvar;
2517 struct constraint_expr tmp;
2518
2519 gcc_assert (VEC_length (ce_s, *results) == 1);
2520 origrhs = VEC_last (ce_s, *results);
2521 tmp = *origrhs;
2522 VEC_pop (ce_s, *results);
2523 origvar = get_varinfo (origrhs->var);
2524 for (; origvar; origvar = origvar->next)
2525 {
2526 tmp.var = origvar->id;
2527 VEC_safe_push (ce_s, heap, *results, &tmp);
2528 }
2529 }
2530
2531 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2532 {
2533 if (c->type == DEREF)
2534 c->type = SCALAR;
2535 else
2536 c->type = ADDRESSOF;
2537 }
2538 return;
2539 }
2540 break;
2541 case CALL_EXPR:
2542
2543 /* XXX: In interprocedural mode, if we didn't have the
2544 body, we would need to do *each pointer argument =
2545 &ANYTHING added. */
2546 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2547 {
2548 varinfo_t vi;
2549 tree heapvar = heapvar_lookup (t);
2550
2551 if (heapvar == NULL)
2552 {
2553 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2554 DECL_EXTERNAL (heapvar) = 1;
2555 if (referenced_vars)
2556 add_referenced_tmp_var (heapvar);
2557 heapvar_insert (t, heapvar);
2558 }
2559
2560 temp.var = create_variable_info_for (heapvar,
2561 alias_get_name (heapvar));
2562
2563 vi = get_varinfo (temp.var);
2564 vi->is_artificial_var = 1;
2565 vi->is_heap_var = 1;
2566 temp.type = ADDRESSOF;
2567 temp.offset = 0;
2568 VEC_safe_push (ce_s, heap, *results, &temp);
2569 return;
2570 }
2571 /* FALLTHRU */
2572 default:
2573 {
2574 temp.type = ADDRESSOF;
2575 temp.var = anything_id;
2576 temp.offset = 0;
2577 VEC_safe_push (ce_s, heap, *results, &temp);
2578 return;
2579 }
2580 }
2581 }
2582 case tcc_reference:
2583 {
2584 switch (TREE_CODE (t))
2585 {
2586 case INDIRECT_REF:
2587 {
2588 get_constraint_for (TREE_OPERAND (t, 0), results);
2589 do_deref (results);
2590 return;
2591 }
2592 case ARRAY_REF:
2593 case ARRAY_RANGE_REF:
2594 case COMPONENT_REF:
2595 get_constraint_for_component_ref (t, results);
2596 return;
2597 default:
2598 {
2599 temp.type = ADDRESSOF;
2600 temp.var = anything_id;
2601 temp.offset = 0;
2602 VEC_safe_push (ce_s, heap, *results, &temp);
2603 return;
2604 }
2605 }
2606 }
2607 case tcc_unary:
2608 {
2609 switch (TREE_CODE (t))
2610 {
2611 case NOP_EXPR:
2612 case CONVERT_EXPR:
2613 case NON_LVALUE_EXPR:
2614 {
2615 tree op = TREE_OPERAND (t, 0);
2616
2617 /* Cast from non-pointer to pointers are bad news for us.
2618 Anything else, we see through */
2619 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2620 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2621 {
2622 get_constraint_for (op, results);
2623 return;
2624 }
2625
2626 /* FALLTHRU */
2627 }
2628 default:
2629 {
2630 temp.type = ADDRESSOF;
2631 temp.var = anything_id;
2632 temp.offset = 0;
2633 VEC_safe_push (ce_s, heap, *results, &temp);
2634 return;
2635 }
2636 }
2637 }
2638 case tcc_exceptional:
2639 {
2640 switch (TREE_CODE (t))
2641 {
2642 case PHI_NODE:
2643 {
2644 get_constraint_for (PHI_RESULT (t), results);
2645 return;
2646 }
2647 break;
2648 case SSA_NAME:
2649 {
2650 struct constraint_expr temp;
2651 temp = get_constraint_exp_from_ssa_var (t);
2652 VEC_safe_push (ce_s, heap, *results, &temp);
2653 return;
2654 }
2655 break;
2656 default:
2657 {
2658 temp.type = ADDRESSOF;
2659 temp.var = anything_id;
2660 temp.offset = 0;
2661 VEC_safe_push (ce_s, heap, *results, &temp);
2662 return;
2663 }
2664 }
2665 }
2666 case tcc_declaration:
2667 {
2668 struct constraint_expr temp;
2669 temp = get_constraint_exp_from_ssa_var (t);
2670 VEC_safe_push (ce_s, heap, *results, &temp);
2671 return;
2672 }
2673 default:
2674 {
2675 temp.type = ADDRESSOF;
2676 temp.var = anything_id;
2677 temp.offset = 0;
2678 VEC_safe_push (ce_s, heap, *results, &temp);
2679 return;
2680 }
2681 }
2682 }
2683
2684
2685 /* Handle the structure copy case where we have a simple structure copy
2686 between LHS and RHS that is of SIZE (in bits)
2687
2688 For each field of the lhs variable (lhsfield)
2689 For each field of the rhs variable at lhsfield.offset (rhsfield)
2690 add the constraint lhsfield = rhsfield
2691
2692 If we fail due to some kind of type unsafety or other thing we
2693 can't handle, return false. We expect the caller to collapse the
2694 variable in that case. */
2695
2696 static bool
2697 do_simple_structure_copy (const struct constraint_expr lhs,
2698 const struct constraint_expr rhs,
2699 const unsigned HOST_WIDE_INT size)
2700 {
2701 varinfo_t p = get_varinfo (lhs.var);
2702 unsigned HOST_WIDE_INT pstart, last;
2703 pstart = p->offset;
2704 last = p->offset + size;
2705 for (; p && p->offset < last; p = p->next)
2706 {
2707 varinfo_t q;
2708 struct constraint_expr templhs = lhs;
2709 struct constraint_expr temprhs = rhs;
2710 unsigned HOST_WIDE_INT fieldoffset;
2711
2712 templhs.var = p->id;
2713 q = get_varinfo (temprhs.var);
2714 fieldoffset = p->offset - pstart;
2715 q = first_vi_for_offset (q, q->offset + fieldoffset);
2716 if (!q)
2717 return false;
2718 temprhs.var = q->id;
2719 process_constraint (new_constraint (templhs, temprhs));
2720 }
2721 return true;
2722 }
2723
2724
2725 /* Handle the structure copy case where we have a structure copy between a
2726 aggregate on the LHS and a dereference of a pointer on the RHS
2727 that is of SIZE (in bits)
2728
2729 For each field of the lhs variable (lhsfield)
2730 rhs.offset = lhsfield->offset
2731 add the constraint lhsfield = rhs
2732 */
2733
2734 static void
2735 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
2736 const struct constraint_expr rhs,
2737 const unsigned HOST_WIDE_INT size)
2738 {
2739 varinfo_t p = get_varinfo (lhs.var);
2740 unsigned HOST_WIDE_INT pstart,last;
2741 pstart = p->offset;
2742 last = p->offset + size;
2743
2744 for (; p && p->offset < last; p = p->next)
2745 {
2746 varinfo_t q;
2747 struct constraint_expr templhs = lhs;
2748 struct constraint_expr temprhs = rhs;
2749 unsigned HOST_WIDE_INT fieldoffset;
2750
2751
2752 if (templhs.type == SCALAR)
2753 templhs.var = p->id;
2754 else
2755 templhs.offset = p->offset;
2756
2757 q = get_varinfo (temprhs.var);
2758 fieldoffset = p->offset - pstart;
2759 temprhs.offset += fieldoffset;
2760 process_constraint (new_constraint (templhs, temprhs));
2761 }
2762 }
2763
2764 /* Handle the structure copy case where we have a structure copy
2765 between a aggregate on the RHS and a dereference of a pointer on
2766 the LHS that is of SIZE (in bits)
2767
2768 For each field of the rhs variable (rhsfield)
2769 lhs.offset = rhsfield->offset
2770 add the constraint lhs = rhsfield
2771 */
2772
2773 static void
2774 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
2775 const struct constraint_expr rhs,
2776 const unsigned HOST_WIDE_INT size)
2777 {
2778 varinfo_t p = get_varinfo (rhs.var);
2779 unsigned HOST_WIDE_INT pstart,last;
2780 pstart = p->offset;
2781 last = p->offset + size;
2782
2783 for (; p && p->offset < last; p = p->next)
2784 {
2785 varinfo_t q;
2786 struct constraint_expr templhs = lhs;
2787 struct constraint_expr temprhs = rhs;
2788 unsigned HOST_WIDE_INT fieldoffset;
2789
2790
2791 if (temprhs.type == SCALAR)
2792 temprhs.var = p->id;
2793 else
2794 temprhs.offset = p->offset;
2795
2796 q = get_varinfo (templhs.var);
2797 fieldoffset = p->offset - pstart;
2798 templhs.offset += fieldoffset;
2799 process_constraint (new_constraint (templhs, temprhs));
2800 }
2801 }
2802
2803 /* Sometimes, frontends like to give us bad type information. This
2804 function will collapse all the fields from VAR to the end of VAR,
2805 into VAR, so that we treat those fields as a single variable.
2806 We return the variable they were collapsed into. */
2807
2808 static unsigned int
2809 collapse_rest_of_var (unsigned int var)
2810 {
2811 varinfo_t currvar = get_varinfo (var);
2812 varinfo_t field;
2813
2814 for (field = currvar->next; field; field = field->next)
2815 {
2816 if (dump_file)
2817 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
2818 field->name, currvar->name);
2819
2820 gcc_assert (!field->collapsed_to);
2821 field->collapsed_to = currvar;
2822 }
2823
2824 currvar->next = NULL;
2825 currvar->size = currvar->fullsize - currvar->offset;
2826
2827 return currvar->id;
2828 }
2829
2830 /* Handle aggregate copies by expanding into copies of the respective
2831 fields of the structures. */
2832
2833 static void
2834 do_structure_copy (tree lhsop, tree rhsop)
2835 {
2836 struct constraint_expr lhs, rhs, tmp;
2837 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
2838 varinfo_t p;
2839 unsigned HOST_WIDE_INT lhssize;
2840 unsigned HOST_WIDE_INT rhssize;
2841
2842 get_constraint_for (lhsop, &lhsc);
2843 get_constraint_for (rhsop, &rhsc);
2844 gcc_assert (VEC_length (ce_s, lhsc) == 1);
2845 gcc_assert (VEC_length (ce_s, rhsc) == 1);
2846 lhs = *(VEC_last (ce_s, lhsc));
2847 rhs = *(VEC_last (ce_s, rhsc));
2848
2849 VEC_free (ce_s, heap, lhsc);
2850 VEC_free (ce_s, heap, rhsc);
2851
2852 /* If we have special var = x, swap it around. */
2853 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
2854 {
2855 tmp = lhs;
2856 lhs = rhs;
2857 rhs = tmp;
2858 }
2859
2860 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
2861 possible it's something we could handle. However, most cases falling
2862 into this are dealing with transparent unions, which are slightly
2863 weird. */
2864 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
2865 {
2866 rhs.type = ADDRESSOF;
2867 rhs.var = anything_id;
2868 }
2869
2870 /* If the RHS is a special var, or an addressof, set all the LHS fields to
2871 that special var. */
2872 if (rhs.var <= integer_id)
2873 {
2874 for (p = get_varinfo (lhs.var); p; p = p->next)
2875 {
2876 struct constraint_expr templhs = lhs;
2877 struct constraint_expr temprhs = rhs;
2878
2879 if (templhs.type == SCALAR )
2880 templhs.var = p->id;
2881 else
2882 templhs.offset += p->offset;
2883 process_constraint (new_constraint (templhs, temprhs));
2884 }
2885 }
2886 else
2887 {
2888 tree rhstype = TREE_TYPE (rhsop);
2889 tree lhstype = TREE_TYPE (lhsop);
2890 tree rhstypesize;
2891 tree lhstypesize;
2892
2893 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
2894 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
2895
2896 /* If we have a variably sized types on the rhs or lhs, and a deref
2897 constraint, add the constraint, lhsconstraint = &ANYTHING.
2898 This is conservatively correct because either the lhs is an unknown
2899 sized var (if the constraint is SCALAR), or the lhs is a DEREF
2900 constraint, and every variable it can point to must be unknown sized
2901 anyway, so we don't need to worry about fields at all. */
2902 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
2903 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
2904 {
2905 rhs.var = anything_id;
2906 rhs.type = ADDRESSOF;
2907 rhs.offset = 0;
2908 process_constraint (new_constraint (lhs, rhs));
2909 return;
2910 }
2911
2912 /* The size only really matters insofar as we don't set more or less of
2913 the variable. If we hit an unknown size var, the size should be the
2914 whole darn thing. */
2915 if (get_varinfo (rhs.var)->is_unknown_size_var)
2916 rhssize = ~0;
2917 else
2918 rhssize = TREE_INT_CST_LOW (rhstypesize);
2919
2920 if (get_varinfo (lhs.var)->is_unknown_size_var)
2921 lhssize = ~0;
2922 else
2923 lhssize = TREE_INT_CST_LOW (lhstypesize);
2924
2925
2926 if (rhs.type == SCALAR && lhs.type == SCALAR)
2927 {
2928 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
2929 {
2930 lhs.var = collapse_rest_of_var (lhs.var);
2931 rhs.var = collapse_rest_of_var (rhs.var);
2932 lhs.offset = 0;
2933 rhs.offset = 0;
2934 lhs.type = SCALAR;
2935 rhs.type = SCALAR;
2936 process_constraint (new_constraint (lhs, rhs));
2937 }
2938 }
2939 else if (lhs.type != DEREF && rhs.type == DEREF)
2940 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
2941 else if (lhs.type == DEREF && rhs.type != DEREF)
2942 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
2943 else
2944 {
2945 tree pointedtotype = lhstype;
2946 tree tmpvar;
2947
2948 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
2949 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
2950 do_structure_copy (tmpvar, rhsop);
2951 do_structure_copy (lhsop, tmpvar);
2952 }
2953 }
2954 }
2955
2956 /* Update related alias information kept in AI. This is used when
2957 building name tags, alias sets and deciding grouping heuristics.
2958 STMT is the statement to process. This function also updates
2959 ADDRESSABLE_VARS. */
2960
2961 static void
2962 update_alias_info (tree stmt, struct alias_info *ai)
2963 {
2964 bitmap addr_taken;
2965 use_operand_p use_p;
2966 ssa_op_iter iter;
2967 enum escape_type stmt_escape_type = is_escape_site (stmt, ai);
2968 tree op;
2969
2970 /* Mark all the variables whose address are taken by the statement. */
2971 addr_taken = addresses_taken (stmt);
2972 if (addr_taken)
2973 {
2974 bitmap_ior_into (addressable_vars, addr_taken);
2975
2976 /* If STMT is an escape point, all the addresses taken by it are
2977 call-clobbered. */
2978 if (stmt_escape_type != NO_ESCAPE)
2979 {
2980 bitmap_iterator bi;
2981 unsigned i;
2982
2983 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
2984 {
2985 tree rvar = referenced_var (i);
2986 if (!unmodifiable_var_p (rvar))
2987 mark_call_clobbered (rvar, stmt_escape_type);
2988 }
2989 }
2990 }
2991
2992 /* Process each operand use. If an operand may be aliased, keep
2993 track of how many times it's being used. For pointers, determine
2994 whether they are dereferenced by the statement, or whether their
2995 value escapes, etc. */
2996 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
2997 {
2998 tree op, var;
2999 var_ann_t v_ann;
3000 struct ptr_info_def *pi;
3001 bool is_store, is_potential_deref;
3002 unsigned num_uses, num_derefs;
3003
3004 op = USE_FROM_PTR (use_p);
3005
3006 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
3007 to the set of addressable variables. */
3008 if (TREE_CODE (op) == ADDR_EXPR)
3009 {
3010 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
3011
3012 /* PHI nodes don't have annotations for pinning the set
3013 of addresses taken, so we collect them here.
3014
3015 FIXME, should we allow PHI nodes to have annotations
3016 so that they can be treated like regular statements?
3017 Currently, they are treated as second-class
3018 statements. */
3019 add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
3020 continue;
3021 }
3022
3023 /* Ignore constants. */
3024 if (TREE_CODE (op) != SSA_NAME)
3025 continue;
3026
3027 var = SSA_NAME_VAR (op);
3028 v_ann = var_ann (var);
3029
3030 /* The base variable of an ssa name must be a GIMPLE register, and thus
3031 it cannot be aliased. */
3032 gcc_assert (!may_be_aliased (var));
3033
3034 /* We are only interested in pointers. */
3035 if (!POINTER_TYPE_P (TREE_TYPE (op)))
3036 continue;
3037
3038 pi = get_ptr_info (op);
3039
3040 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3041 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
3042 {
3043 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
3044 VARRAY_PUSH_TREE (ai->processed_ptrs, op);
3045 }
3046
3047 /* If STMT is a PHI node, then it will not have pointer
3048 dereferences and it will not be an escape point. */
3049 if (TREE_CODE (stmt) == PHI_NODE)
3050 continue;
3051
3052 /* Determine whether OP is a dereferenced pointer, and if STMT
3053 is an escape point, whether OP escapes. */
3054 count_uses_and_derefs (op, stmt, &num_uses, &num_derefs, &is_store);
3055
3056 /* Handle a corner case involving address expressions of the
3057 form '&PTR->FLD'. The problem with these expressions is that
3058 they do not represent a dereference of PTR. However, if some
3059 other transformation propagates them into an INDIRECT_REF
3060 expression, we end up with '*(&PTR->FLD)' which is folded
3061 into 'PTR->FLD'.
3062
3063 So, if the original code had no other dereferences of PTR,
3064 the aliaser will not create memory tags for it, and when
3065 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3066 memory operations will receive no V_MAY_DEF/VUSE operands.
3067
3068 One solution would be to have count_uses_and_derefs consider
3069 &PTR->FLD a dereference of PTR. But that is wrong, since it
3070 is not really a dereference but an offset calculation.
3071
3072 What we do here is to recognize these special ADDR_EXPR
3073 nodes. Since these expressions are never GIMPLE values (they
3074 are not GIMPLE invariants), they can only appear on the RHS
3075 of an assignment and their base address is always an
3076 INDIRECT_REF expression. */
3077 is_potential_deref = false;
3078 if (TREE_CODE (stmt) == MODIFY_EXPR
3079 && TREE_CODE (TREE_OPERAND (stmt, 1)) == ADDR_EXPR
3080 && !is_gimple_val (TREE_OPERAND (stmt, 1)))
3081 {
3082 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3083 this represents a potential dereference of PTR. */
3084 tree rhs = TREE_OPERAND (stmt, 1);
3085 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3086 if (TREE_CODE (base) == INDIRECT_REF
3087 && TREE_OPERAND (base, 0) == op)
3088 is_potential_deref = true;
3089 }
3090
3091 if (num_derefs > 0 || is_potential_deref)
3092 {
3093 /* Mark OP as dereferenced. In a subsequent pass,
3094 dereferenced pointers that point to a set of
3095 variables will be assigned a name tag to alias
3096 all the variables OP points to. */
3097 pi->is_dereferenced = 1;
3098
3099 /* Keep track of how many time we've dereferenced each
3100 pointer. */
3101 NUM_REFERENCES_INC (v_ann);
3102
3103 /* If this is a store operation, mark OP as being
3104 dereferenced to store, otherwise mark it as being
3105 dereferenced to load. */
3106 if (is_store)
3107 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
3108 else
3109 bitmap_set_bit (ai->dereferenced_ptrs_load, DECL_UID (var));
3110 }
3111
3112 if (stmt_escape_type != NO_ESCAPE && num_derefs < num_uses)
3113 {
3114 /* If STMT is an escape point and STMT contains at
3115 least one direct use of OP, then the value of OP
3116 escapes and so the pointed-to variables need to
3117 be marked call-clobbered. */
3118 pi->value_escapes_p = 1;
3119 pi->escape_mask |= stmt_escape_type;
3120
3121 /* If the statement makes a function call, assume
3122 that pointer OP will be dereferenced in a store
3123 operation inside the called function. */
3124 if (get_call_expr_in (stmt))
3125 {
3126 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
3127 pi->is_dereferenced = 1;
3128 }
3129 }
3130 }
3131
3132 if (TREE_CODE (stmt) == PHI_NODE)
3133 return;
3134
3135 /* Update reference counter for definitions to any
3136 potentially aliased variable. This is used in the alias
3137 grouping heuristics. */
3138 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3139 {
3140 tree var = SSA_NAME_VAR (op);
3141 var_ann_t ann = var_ann (var);
3142 bitmap_set_bit (ai->written_vars, DECL_UID (var));
3143 if (may_be_aliased (var))
3144 NUM_REFERENCES_INC (ann);
3145
3146 }
3147
3148 /* Mark variables in V_MAY_DEF operands as being written to. */
3149 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VIRTUAL_DEFS)
3150 {
3151 tree var = DECL_P (op) ? op : SSA_NAME_VAR (op);
3152 bitmap_set_bit (ai->written_vars, DECL_UID (var));
3153 }
3154 }
3155
3156
3157 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3158 Expressions of the type PTR + CST can be handled in two ways:
3159
3160 1- If the constraint for PTR is ADDRESSOF for a non-structure
3161 variable, then we can use it directly because adding or
3162 subtracting a constant may not alter the original ADDRESSOF
3163 constraint (i.e., pointer arithmetic may not legally go outside
3164 an object's boundaries).
3165
3166 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3167 then if CST is a compile-time constant that can be used as an
3168 offset, we can determine which sub-variable will be pointed-to
3169 by the expression.
3170
3171 Return true if the expression is handled. For any other kind of
3172 expression, return false so that each operand can be added as a
3173 separate constraint by the caller. */
3174
3175 static bool
3176 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3177 {
3178 tree op0, op1;
3179 struct constraint_expr *c, *c2;
3180 unsigned int i = 0;
3181 unsigned int j = 0;
3182 VEC (ce_s, heap) *temp = NULL;
3183 unsigned int rhsoffset = 0;
3184
3185 if (TREE_CODE (expr) != PLUS_EXPR
3186 && TREE_CODE (expr) != MINUS_EXPR)
3187 return false;
3188
3189 op0 = TREE_OPERAND (expr, 0);
3190 op1 = TREE_OPERAND (expr, 1);
3191
3192 get_constraint_for (op0, &temp);
3193 if (POINTER_TYPE_P (TREE_TYPE (op0))
3194 && TREE_CODE (op1) == INTEGER_CST
3195 && TREE_CODE (expr) == PLUS_EXPR)
3196 {
3197 rhsoffset = TREE_INT_CST_LOW (op1) * BITS_PER_UNIT;
3198 }
3199
3200
3201 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3202 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3203 {
3204 if (c2->type == ADDRESSOF && rhsoffset != 0)
3205 {
3206 varinfo_t temp = get_varinfo (c2->var);
3207
3208 /* An access one after the end of an array is valid,
3209 so simply punt on accesses we cannot resolve. */
3210 temp = first_vi_for_offset (temp, rhsoffset);
3211 if (temp == NULL)
3212 continue;
3213 c2->var = temp->id;
3214 c2->offset = 0;
3215 }
3216 else
3217 c2->offset = rhsoffset;
3218 process_constraint (new_constraint (*c, *c2));
3219 }
3220
3221 VEC_free (ce_s, heap, temp);
3222
3223 return true;
3224 }
3225
3226
3227 /* Walk statement T setting up aliasing constraints according to the
3228 references found in T. This function is the main part of the
3229 constraint builder. AI points to auxiliary alias information used
3230 when building alias sets and computing alias grouping heuristics. */
3231
3232 static void
3233 find_func_aliases (tree origt)
3234 {
3235 tree t = origt;
3236 VEC(ce_s, heap) *lhsc = NULL;
3237 VEC(ce_s, heap) *rhsc = NULL;
3238 struct constraint_expr *c;
3239
3240 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3241 t = TREE_OPERAND (t, 0);
3242
3243 /* Now build constraints expressions. */
3244 if (TREE_CODE (t) == PHI_NODE)
3245 {
3246 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
3247
3248 /* Only care about pointers and structures containing
3249 pointers. */
3250 if (POINTER_TYPE_P (TREE_TYPE (PHI_RESULT (t)))
3251 || TREE_CODE (TREE_TYPE (PHI_RESULT (t))) == COMPLEX_TYPE)
3252 {
3253 int i;
3254 unsigned int j;
3255
3256 /* For a phi node, assign all the arguments to
3257 the result. */
3258 get_constraint_for (PHI_RESULT (t), &lhsc);
3259 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3260 {
3261 tree rhstype;
3262 tree strippedrhs = PHI_ARG_DEF (t, i);
3263
3264 STRIP_NOPS (strippedrhs);
3265 rhstype = TREE_TYPE (strippedrhs);
3266 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3267
3268 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3269 {
3270 struct constraint_expr *c2;
3271 while (VEC_length (ce_s, rhsc) > 0)
3272 {
3273 c2 = VEC_last (ce_s, rhsc);
3274 process_constraint (new_constraint (*c, *c2));
3275 VEC_pop (ce_s, rhsc);
3276 }
3277 }
3278 }
3279 }
3280 }
3281 /* In IPA mode, we need to generate constraints to pass call
3282 arguments through their calls. There are two case, either a
3283 modify_expr when we are returning a value, or just a plain
3284 call_expr when we are not. */
3285 else if (in_ipa_mode
3286 && ((TREE_CODE (t) == MODIFY_EXPR
3287 && TREE_CODE (TREE_OPERAND (t, 1)) == CALL_EXPR
3288 && !(call_expr_flags (TREE_OPERAND (t, 1))
3289 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
3290 || (TREE_CODE (t) == CALL_EXPR
3291 && !(call_expr_flags (t)
3292 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))))
3293 {
3294 tree lhsop;
3295 tree rhsop;
3296 unsigned int varid;
3297 tree arglist;
3298 varinfo_t fi;
3299 int i = 1;
3300 tree decl;
3301 if (TREE_CODE (t) == MODIFY_EXPR)
3302 {
3303 lhsop = TREE_OPERAND (t, 0);
3304 rhsop = TREE_OPERAND (t, 1);
3305 }
3306 else
3307 {
3308 lhsop = NULL;
3309 rhsop = t;
3310 }
3311 decl = get_callee_fndecl (rhsop);
3312
3313 /* If we can directly resolve the function being called, do so.
3314 Otherwise, it must be some sort of indirect expression that
3315 we should still be able to handle. */
3316 if (decl)
3317 {
3318 varid = get_id_for_tree (decl);
3319 }
3320 else
3321 {
3322 decl = TREE_OPERAND (rhsop, 0);
3323 varid = get_id_for_tree (decl);
3324 }
3325
3326 /* Assign all the passed arguments to the appropriate incoming
3327 parameters of the function. */
3328 fi = get_varinfo (varid);
3329 arglist = TREE_OPERAND (rhsop, 1);
3330
3331 for (;arglist; arglist = TREE_CHAIN (arglist))
3332 {
3333 tree arg = TREE_VALUE (arglist);
3334 struct constraint_expr lhs ;
3335 struct constraint_expr *rhsp;
3336
3337 get_constraint_for (arg, &rhsc);
3338 if (TREE_CODE (decl) != FUNCTION_DECL)
3339 {
3340 lhs.type = DEREF;
3341 lhs.var = fi->id;
3342 lhs.offset = i;
3343 }
3344 else
3345 {
3346 lhs.type = SCALAR;
3347 lhs.var = first_vi_for_offset (fi, i)->id;
3348 lhs.offset = 0;
3349 }
3350 while (VEC_length (ce_s, rhsc) != 0)
3351 {
3352 rhsp = VEC_last (ce_s, rhsc);
3353 process_constraint (new_constraint (lhs, *rhsp));
3354 VEC_pop (ce_s, rhsc);
3355 }
3356 i++;
3357 }
3358 /* If we are returning a value, assign it to the result. */
3359 if (lhsop)
3360 {
3361 struct constraint_expr rhs;
3362 struct constraint_expr *lhsp;
3363 unsigned int j = 0;
3364
3365 get_constraint_for (lhsop, &lhsc);
3366 if (TREE_CODE (decl) != FUNCTION_DECL)
3367 {
3368 rhs.type = DEREF;
3369 rhs.var = fi->id;
3370 rhs.offset = i;
3371 }
3372 else
3373 {
3374 rhs.type = SCALAR;
3375 rhs.var = first_vi_for_offset (fi, i)->id;
3376 rhs.offset = 0;
3377 }
3378 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3379 process_constraint (new_constraint (*lhsp, rhs));
3380 }
3381 }
3382 /* Otherwise, just a regular assignment statement. */
3383 else if (TREE_CODE (t) == MODIFY_EXPR)
3384 {
3385 tree lhsop = TREE_OPERAND (t, 0);
3386 tree rhsop = TREE_OPERAND (t, 1);
3387 int i;
3388
3389 if ((AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3390 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE)
3391 && (AGGREGATE_TYPE_P (TREE_TYPE (rhsop))
3392 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE))
3393 {
3394 do_structure_copy (lhsop, rhsop);
3395 }
3396 else
3397 {
3398 /* Only care about operations with pointers, structures
3399 containing pointers, dereferences, and call expressions. */
3400 if (POINTER_TYPE_P (TREE_TYPE (lhsop))
3401 || AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3402 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE
3403 || TREE_CODE (rhsop) == CALL_EXPR)
3404 {
3405 get_constraint_for (lhsop, &lhsc);
3406 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3407 {
3408 /* RHS that consist of unary operations,
3409 exceptional types, or bare decls/constants, get
3410 handled directly by get_constraint_for. */
3411 case tcc_reference:
3412 case tcc_declaration:
3413 case tcc_constant:
3414 case tcc_exceptional:
3415 case tcc_expression:
3416 case tcc_unary:
3417 {
3418 unsigned int j;
3419
3420 get_constraint_for (rhsop, &rhsc);
3421 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3422 {
3423 struct constraint_expr *c2;
3424 unsigned int k;
3425
3426 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3427 process_constraint (new_constraint (*c, *c2));
3428 }
3429
3430 }
3431 break;
3432
3433 case tcc_binary:
3434 {
3435 /* For pointer arithmetic of the form
3436 PTR + CST, we can simply use PTR's
3437 constraint because pointer arithmetic is
3438 not allowed to go out of bounds. */
3439 if (handle_ptr_arith (lhsc, rhsop))
3440 break;
3441 }
3442 /* FALLTHRU */
3443
3444 /* Otherwise, walk each operand. Notice that we
3445 can't use the operand interface because we need
3446 to process expressions other than simple operands
3447 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3448 default:
3449 for (i = 0; i < TREE_CODE_LENGTH (TREE_CODE (rhsop)); i++)
3450 {
3451 tree op = TREE_OPERAND (rhsop, i);
3452 unsigned int j;
3453
3454 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3455 get_constraint_for (op, &rhsc);
3456 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3457 {
3458 struct constraint_expr *c2;
3459 while (VEC_length (ce_s, rhsc) > 0)
3460 {
3461 c2 = VEC_last (ce_s, rhsc);
3462 process_constraint (new_constraint (*c, *c2));
3463 VEC_pop (ce_s, rhsc);
3464 }
3465 }
3466 }
3467 }
3468 }
3469 }
3470 }
3471
3472 /* After promoting variables and computing aliasing we will
3473 need to re-scan most statements. FIXME: Try to minimize the
3474 number of statements re-scanned. It's not really necessary to
3475 re-scan *all* statements. */
3476 mark_stmt_modified (origt);
3477 VEC_free (ce_s, heap, rhsc);
3478 VEC_free (ce_s, heap, lhsc);
3479 }
3480
3481
3482 /* Find the first varinfo in the same variable as START that overlaps with
3483 OFFSET.
3484 Effectively, walk the chain of fields for the variable START to find the
3485 first field that overlaps with OFFSET.
3486 Return NULL if we can't find one. */
3487
3488 static varinfo_t
3489 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3490 {
3491 varinfo_t curr = start;
3492 while (curr)
3493 {
3494 /* We may not find a variable in the field list with the actual
3495 offset when when we have glommed a structure to a variable.
3496 In that case, however, offset should still be within the size
3497 of the variable. */
3498 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3499 return curr;
3500 curr = curr->next;
3501 }
3502 return NULL;
3503 }
3504
3505
3506 /* Insert the varinfo FIELD into the field list for BASE, at the front
3507 of the list. */
3508
3509 static void
3510 insert_into_field_list (varinfo_t base, varinfo_t field)
3511 {
3512 varinfo_t prev = base;
3513 varinfo_t curr = base->next;
3514
3515 field->next = curr;
3516 prev->next = field;
3517 }
3518
3519 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3520 offset. */
3521
3522 static void
3523 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3524 {
3525 varinfo_t prev = base;
3526 varinfo_t curr = base->next;
3527
3528 if (curr == NULL)
3529 {
3530 prev->next = field;
3531 field->next = NULL;
3532 }
3533 else
3534 {
3535 while (curr)
3536 {
3537 if (field->offset <= curr->offset)
3538 break;
3539 prev = curr;
3540 curr = curr->next;
3541 }
3542 field->next = prev->next;
3543 prev->next = field;
3544 }
3545 }
3546
3547 /* qsort comparison function for two fieldoff's PA and PB */
3548
3549 static int
3550 fieldoff_compare (const void *pa, const void *pb)
3551 {
3552 const fieldoff_s *foa = (const fieldoff_s *)pa;
3553 const fieldoff_s *fob = (const fieldoff_s *)pb;
3554 HOST_WIDE_INT foasize, fobsize;
3555
3556 if (foa->offset != fob->offset)
3557 return foa->offset - fob->offset;
3558
3559 foasize = TREE_INT_CST_LOW (foa->size);
3560 fobsize = TREE_INT_CST_LOW (fob->size);
3561 return foasize - fobsize;
3562 }
3563
3564 /* Sort a fieldstack according to the field offset and sizes. */
3565 void sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
3566 {
3567 qsort (VEC_address (fieldoff_s, fieldstack),
3568 VEC_length (fieldoff_s, fieldstack),
3569 sizeof (fieldoff_s),
3570 fieldoff_compare);
3571 }
3572
3573 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields
3574 of TYPE onto fieldstack, recording their offsets along the way.
3575 OFFSET is used to keep track of the offset in this entire structure, rather
3576 than just the immediately containing structure. Returns the number
3577 of fields pushed.
3578 HAS_UNION is set to true if we find a union type as a field of
3579 TYPE. */
3580
3581 int
3582 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
3583 HOST_WIDE_INT offset, bool *has_union)
3584 {
3585 tree field;
3586 int count = 0;
3587
3588 if (TREE_CODE (type) == COMPLEX_TYPE)
3589 {
3590 fieldoff_s *real_part, *img_part;
3591 real_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3592 real_part->type = TREE_TYPE (type);
3593 real_part->size = TYPE_SIZE (TREE_TYPE (type));
3594 real_part->offset = offset;
3595 real_part->decl = NULL_TREE;
3596
3597 img_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3598 img_part->type = TREE_TYPE (type);
3599 img_part->size = TYPE_SIZE (TREE_TYPE (type));
3600 img_part->offset = offset + TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (type)));
3601 img_part->decl = NULL_TREE;
3602
3603 return 2;
3604 }
3605
3606 if (TREE_CODE (type) == ARRAY_TYPE)
3607 {
3608 tree sz = TYPE_SIZE (type);
3609 tree elsz = TYPE_SIZE (TREE_TYPE (type));
3610 HOST_WIDE_INT nr;
3611 int i;
3612
3613 if (! sz
3614 || ! host_integerp (sz, 1)
3615 || TREE_INT_CST_LOW (sz) == 0
3616 || ! elsz
3617 || ! host_integerp (elsz, 1)
3618 || TREE_INT_CST_LOW (elsz) == 0)
3619 return 0;
3620
3621 nr = TREE_INT_CST_LOW (sz) / TREE_INT_CST_LOW (elsz);
3622 if (nr > SALIAS_MAX_ARRAY_ELEMENTS)
3623 return 0;
3624
3625 for (i = 0; i < nr; ++i)
3626 {
3627 bool push = false;
3628 int pushed = 0;
3629
3630 if (has_union
3631 && (TREE_CODE (TREE_TYPE (type)) == QUAL_UNION_TYPE
3632 || TREE_CODE (TREE_TYPE (type)) == UNION_TYPE))
3633 *has_union = true;
3634
3635 if (!AGGREGATE_TYPE_P (TREE_TYPE (type))) /* var_can_have_subvars */
3636 push = true;
3637 else if (!(pushed = push_fields_onto_fieldstack
3638 (TREE_TYPE (type), fieldstack,
3639 offset + i * TREE_INT_CST_LOW (elsz), has_union)))
3640 /* Empty structures may have actual size, like in C++. So
3641 see if we didn't push any subfields and the size is
3642 nonzero, push the field onto the stack */
3643 push = true;
3644
3645 if (push)
3646 {
3647 fieldoff_s *pair;
3648
3649 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3650 pair->type = TREE_TYPE (type);
3651 pair->size = elsz;
3652 pair->decl = NULL_TREE;
3653 pair->offset = offset + i * TREE_INT_CST_LOW (elsz);
3654 count++;
3655 }
3656 else
3657 count += pushed;
3658 }
3659
3660 return count;
3661 }
3662
3663 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
3664 if (TREE_CODE (field) == FIELD_DECL)
3665 {
3666 bool push = false;
3667 int pushed = 0;
3668
3669 if (has_union
3670 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
3671 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
3672 *has_union = true;
3673
3674 if (!var_can_have_subvars (field))
3675 push = true;
3676 else if (!(pushed = push_fields_onto_fieldstack
3677 (TREE_TYPE (field), fieldstack,
3678 offset + bitpos_of_field (field), has_union))
3679 && DECL_SIZE (field)
3680 && !integer_zerop (DECL_SIZE (field)))
3681 /* Empty structures may have actual size, like in C++. So
3682 see if we didn't push any subfields and the size is
3683 nonzero, push the field onto the stack */
3684 push = true;
3685
3686 if (push)
3687 {
3688 fieldoff_s *pair;
3689
3690 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3691 pair->type = TREE_TYPE (field);
3692 pair->size = DECL_SIZE (field);
3693 pair->decl = field;
3694 pair->offset = offset + bitpos_of_field (field);
3695 count++;
3696 }
3697 else
3698 count += pushed;
3699 }
3700
3701 return count;
3702 }
3703
3704 static void
3705 make_constraint_to_anything (varinfo_t vi)
3706 {
3707 struct constraint_expr lhs, rhs;
3708
3709 lhs.var = vi->id;
3710 lhs.offset = 0;
3711 lhs.type = SCALAR;
3712
3713 rhs.var = anything_id;
3714 rhs.offset =0 ;
3715 rhs.type = ADDRESSOF;
3716 process_constraint (new_constraint (lhs, rhs));
3717 }
3718
3719 /* Count the number of arguments DECL has, and set IS_VARARGS to true
3720 if it is a varargs function. */
3721
3722 static unsigned int
3723 count_num_arguments (tree decl, bool *is_varargs)
3724 {
3725 unsigned int i = 0;
3726 tree t;
3727
3728 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
3729 t;
3730 t = TREE_CHAIN (t))
3731 {
3732 if (TREE_VALUE (t) == void_type_node)
3733 break;
3734 i++;
3735 }
3736
3737 if (!t)
3738 *is_varargs = true;
3739 return i;
3740 }
3741
3742 /* Creation function node for DECL, using NAME, and return the index
3743 of the variable we've created for the function. */
3744
3745 static unsigned int
3746 create_function_info_for (tree decl, const char *name)
3747 {
3748 unsigned int index = VEC_length (varinfo_t, varmap);
3749 varinfo_t vi;
3750 tree arg;
3751 unsigned int i;
3752 bool is_varargs = false;
3753
3754 /* Create the variable info. */
3755
3756 vi = new_var_info (decl, index, name, index);
3757 vi->decl = decl;
3758 vi->offset = 0;
3759 vi->has_union = 0;
3760 vi->size = 1;
3761 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
3762 insert_id_for_tree (vi->decl, index);
3763 VEC_safe_push (varinfo_t, heap, varmap, vi);
3764
3765 stats.total_vars++;
3766
3767 /* If it's varargs, we don't know how many arguments it has, so we
3768 can't do much.
3769 */
3770 if (is_varargs)
3771 {
3772 vi->fullsize = ~0;
3773 vi->size = ~0;
3774 vi->is_unknown_size_var = true;
3775 return index;
3776 }
3777
3778
3779 arg = DECL_ARGUMENTS (decl);
3780
3781 /* Set up variables for each argument. */
3782 for (i = 1; i < vi->fullsize; i++)
3783 {
3784 varinfo_t argvi;
3785 const char *newname;
3786 char *tempname;
3787 unsigned int newindex;
3788 tree argdecl = decl;
3789
3790 if (arg)
3791 argdecl = arg;
3792
3793 newindex = VEC_length (varinfo_t, varmap);
3794 asprintf (&tempname, "%s.arg%d", name, i-1);
3795 newname = ggc_strdup (tempname);
3796 free (tempname);
3797
3798 argvi = new_var_info (argdecl, newindex,newname, newindex);
3799 argvi->decl = argdecl;
3800 VEC_safe_push (varinfo_t, heap, varmap, argvi);
3801 argvi->offset = i;
3802 argvi->size = 1;
3803 argvi->fullsize = vi->fullsize;
3804 argvi->has_union = false;
3805 insert_into_field_list_sorted (vi, argvi);
3806 stats.total_vars ++;
3807 if (arg)
3808 {
3809 insert_id_for_tree (arg, newindex);
3810 arg = TREE_CHAIN (arg);
3811 }
3812 }
3813
3814 /* Create a variable for the return var. */
3815 if (DECL_RESULT (decl) != NULL
3816 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
3817 {
3818 varinfo_t resultvi;
3819 const char *newname;
3820 char *tempname;
3821 unsigned int newindex;
3822 tree resultdecl = decl;
3823
3824 vi->fullsize ++;
3825
3826 if (DECL_RESULT (decl))
3827 resultdecl = DECL_RESULT (decl);
3828
3829 newindex = VEC_length (varinfo_t, varmap);
3830 asprintf (&tempname, "%s.result", name);
3831 newname = ggc_strdup (tempname);
3832 free (tempname);
3833
3834 resultvi = new_var_info (resultdecl, newindex, newname, newindex);
3835 resultvi->decl = resultdecl;
3836 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
3837 resultvi->offset = i;
3838 resultvi->size = 1;
3839 resultvi->fullsize = vi->fullsize;
3840 resultvi->has_union = false;
3841 insert_into_field_list_sorted (vi, resultvi);
3842 stats.total_vars ++;
3843 if (DECL_RESULT (decl))
3844 insert_id_for_tree (DECL_RESULT (decl), newindex);
3845 }
3846 return index;
3847 }
3848
3849
3850 /* Return true if FIELDSTACK contains fields that overlap.
3851 FIELDSTACK is assumed to be sorted by offset. */
3852
3853 static bool
3854 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
3855 {
3856 fieldoff_s *fo = NULL;
3857 unsigned int i;
3858 HOST_WIDE_INT lastoffset = -1;
3859
3860 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3861 {
3862 if (fo->offset == lastoffset)
3863 return true;
3864 lastoffset = fo->offset;
3865 }
3866 return false;
3867 }
3868 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
3869 This will also create any varinfo structures necessary for fields
3870 of DECL. */
3871
3872 static unsigned int
3873 create_variable_info_for (tree decl, const char *name)
3874 {
3875 unsigned int index = VEC_length (varinfo_t, varmap);
3876 varinfo_t vi;
3877 tree decltype = TREE_TYPE (decl);
3878 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
3879 bool notokay = false;
3880 bool hasunion;
3881 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
3882 VEC (fieldoff_s,heap) *fieldstack = NULL;
3883
3884 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
3885 return create_function_info_for (decl, name);
3886
3887 hasunion = TREE_CODE (decltype) == UNION_TYPE
3888 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
3889 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
3890 {
3891 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion);
3892 if (hasunion)
3893 {
3894 VEC_free (fieldoff_s, heap, fieldstack);
3895 notokay = true;
3896 }
3897 }
3898
3899
3900 /* If the variable doesn't have subvars, we may end up needing to
3901 sort the field list and create fake variables for all the
3902 fields. */
3903 vi = new_var_info (decl, index, name, index);
3904 vi->decl = decl;
3905 vi->offset = 0;
3906 vi->has_union = hasunion;
3907 if (!declsize
3908 || TREE_CODE (declsize) != INTEGER_CST
3909 || TREE_CODE (decltype) == UNION_TYPE
3910 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
3911 {
3912 vi->is_unknown_size_var = true;
3913 vi->fullsize = ~0;
3914 vi->size = ~0;
3915 }
3916 else
3917 {
3918 vi->fullsize = TREE_INT_CST_LOW (declsize);
3919 vi->size = vi->fullsize;
3920 }
3921
3922 insert_id_for_tree (vi->decl, index);
3923 VEC_safe_push (varinfo_t, heap, varmap, vi);
3924 if (is_global && (!flag_whole_program || !in_ipa_mode))
3925 make_constraint_to_anything (vi);
3926
3927 stats.total_vars++;
3928 if (use_field_sensitive
3929 && !notokay
3930 && !vi->is_unknown_size_var
3931 && var_can_have_subvars (decl)
3932 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
3933 {
3934 unsigned int newindex = VEC_length (varinfo_t, varmap);
3935 fieldoff_s *fo = NULL;
3936 unsigned int i;
3937
3938 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3939 {
3940 if (! fo->size
3941 || TREE_CODE (fo->size) != INTEGER_CST
3942 || fo->offset < 0)
3943 {
3944 notokay = true;
3945 break;
3946 }
3947 }
3948
3949 /* We can't sort them if we have a field with a variable sized type,
3950 which will make notokay = true. In that case, we are going to return
3951 without creating varinfos for the fields anyway, so sorting them is a
3952 waste to boot. */
3953 if (!notokay)
3954 {
3955 sort_fieldstack (fieldstack);
3956 /* Due to some C++ FE issues, like PR 22488, we might end up
3957 what appear to be overlapping fields even though they,
3958 in reality, do not overlap. Until the C++ FE is fixed,
3959 we will simply disable field-sensitivity for these cases. */
3960 notokay = check_for_overlaps (fieldstack);
3961 }
3962
3963
3964 if (VEC_length (fieldoff_s, fieldstack) != 0)
3965 fo = VEC_index (fieldoff_s, fieldstack, 0);
3966
3967 if (fo == NULL || notokay)
3968 {
3969 vi->is_unknown_size_var = 1;
3970 vi->fullsize = ~0;
3971 vi->size = ~0;
3972 VEC_free (fieldoff_s, heap, fieldstack);
3973 return index;
3974 }
3975
3976 vi->size = TREE_INT_CST_LOW (fo->size);
3977 vi->offset = fo->offset;
3978 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
3979 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
3980 i--)
3981 {
3982 varinfo_t newvi;
3983 const char *newname;
3984 char *tempname;
3985
3986 newindex = VEC_length (varinfo_t, varmap);
3987 if (fo->decl)
3988 asprintf (&tempname, "%s.%s", vi->name, alias_get_name (fo->decl));
3989 else
3990 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC, vi->name, fo->offset);
3991 newname = ggc_strdup (tempname);
3992 free (tempname);
3993 newvi = new_var_info (decl, newindex, newname, newindex);
3994 newvi->offset = fo->offset;
3995 newvi->size = TREE_INT_CST_LOW (fo->size);
3996 newvi->fullsize = vi->fullsize;
3997 insert_into_field_list (vi, newvi);
3998 VEC_safe_push (varinfo_t, heap, varmap, newvi);
3999 if (is_global && (!flag_whole_program || !in_ipa_mode))
4000 make_constraint_to_anything (newvi);
4001
4002 stats.total_vars++;
4003 }
4004 VEC_free (fieldoff_s, heap, fieldstack);
4005 }
4006 return index;
4007 }
4008
4009 /* Print out the points-to solution for VAR to FILE. */
4010
4011 void
4012 dump_solution_for_var (FILE *file, unsigned int var)
4013 {
4014 varinfo_t vi = get_varinfo (var);
4015 unsigned int i;
4016 bitmap_iterator bi;
4017
4018 fprintf (file, "%s = { ", vi->name);
4019 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (vi->node)->solution, 0, i, bi)
4020 {
4021 fprintf (file, "%s ", get_varinfo (i)->name);
4022 }
4023 fprintf (file, "}\n");
4024 }
4025
4026 /* Print the points-to solution for VAR to stdout. */
4027
4028 void
4029 debug_solution_for_var (unsigned int var)
4030 {
4031 dump_solution_for_var (stdout, var);
4032 }
4033
4034
4035 /* Create varinfo structures for all of the variables in the
4036 function for intraprocedural mode. */
4037
4038 static void
4039 intra_create_variable_infos (void)
4040 {
4041 tree t;
4042
4043 /* For each incoming argument arg, ARG = &ANYTHING or a dummy variable if
4044 flag_argument_noalias > 2. */
4045 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4046 {
4047 struct constraint_expr lhs;
4048 varinfo_t p;
4049
4050 lhs.offset = 0;
4051 lhs.type = SCALAR;
4052 lhs.var = create_variable_info_for (t, alias_get_name (t));
4053
4054 /* With flag_argument_noalias greater than two means that the incoming
4055 argument cannot alias anything except for itself so create a HEAP
4056 variable. */
4057 if (POINTER_TYPE_P (TREE_TYPE (t))
4058 && flag_argument_noalias > 2)
4059 {
4060 varinfo_t vi;
4061 struct constraint_expr rhs;
4062 tree heapvar = heapvar_lookup (t);
4063 unsigned int id;
4064 if (heapvar == NULL_TREE)
4065 {
4066 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4067 "PARM_NOALIAS");
4068 DECL_EXTERNAL (heapvar) = 1;
4069 if (referenced_vars)
4070 add_referenced_tmp_var (heapvar);
4071 heapvar_insert (t, heapvar);
4072 }
4073 id = create_variable_info_for (heapvar,
4074 alias_get_name (heapvar));
4075 vi = get_varinfo (id);
4076 vi->is_artificial_var = 1;
4077 vi->is_heap_var = 1;
4078 rhs.var = id;
4079 rhs.type = ADDRESSOF;
4080 rhs.offset = 0;
4081 for (p = get_varinfo (lhs.var); p; p = p->next)
4082 {
4083 struct constraint_expr temp = lhs;
4084 temp.var = p->id;
4085 process_constraint (new_constraint (temp, rhs));
4086 }
4087 }
4088 else
4089 for (p = get_varinfo (lhs.var); p; p = p->next)
4090 make_constraint_to_anything (p);
4091 }
4092 }
4093
4094 /* Set bits in INTO corresponding to the variable uids in solution set
4095 FROM */
4096
4097 static void
4098 set_uids_in_ptset (bitmap into, bitmap from)
4099 {
4100 unsigned int i;
4101 bitmap_iterator bi;
4102 subvar_t sv;
4103
4104 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4105 {
4106 varinfo_t vi = get_varinfo (i);
4107
4108 /* The only artificial variables that are allowed in a may-alias
4109 set are heap variables. */
4110 if (vi->is_artificial_var && !vi->is_heap_var)
4111 continue;
4112
4113 if (vi->has_union && get_subvars_for_var (vi->decl) != NULL)
4114 {
4115 /* Variables containing unions may need to be converted to
4116 their SFT's, because SFT's can have unions and we cannot. */
4117 for (sv = get_subvars_for_var (vi->decl); sv; sv = sv->next)
4118 bitmap_set_bit (into, DECL_UID (sv->var));
4119 }
4120 else if (TREE_CODE (vi->decl) == VAR_DECL
4121 || TREE_CODE (vi->decl) == PARM_DECL)
4122 {
4123 if (var_can_have_subvars (vi->decl)
4124 && get_subvars_for_var (vi->decl))
4125 {
4126 /* If VI->DECL is an aggregate for which we created
4127 SFTs, add the SFT corresponding to VI->OFFSET. */
4128 tree sft = get_subvar_at (vi->decl, vi->offset);
4129 if (sft)
4130 bitmap_set_bit (into, DECL_UID (sft));
4131 }
4132 else
4133 {
4134 /* Otherwise, just add VI->DECL to the alias set. */
4135 bitmap_set_bit (into, DECL_UID (vi->decl));
4136 }
4137 }
4138 }
4139 }
4140
4141
4142 static bool have_alias_info = false;
4143
4144 /* Given a pointer variable P, fill in its points-to set, or return
4145 false if we can't. */
4146
4147 bool
4148 find_what_p_points_to (tree p)
4149 {
4150 unsigned int id = 0;
4151 tree lookup_p = p;
4152
4153 if (!have_alias_info)
4154 return false;
4155
4156 /* For parameters, get at the points-to set for the actual parm
4157 decl. */
4158 if (TREE_CODE (p) == SSA_NAME
4159 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4160 && default_def (SSA_NAME_VAR (p)) == p)
4161 lookup_p = SSA_NAME_VAR (p);
4162
4163 if (lookup_id_for_tree (lookup_p, &id))
4164 {
4165 varinfo_t vi = get_varinfo (id);
4166
4167 if (vi->is_artificial_var)
4168 return false;
4169
4170 /* See if this is a field or a structure. */
4171 if (vi->size != vi->fullsize)
4172 {
4173 /* Nothing currently asks about structure fields directly,
4174 but when they do, we need code here to hand back the
4175 points-to set. */
4176 if (!var_can_have_subvars (vi->decl)
4177 || get_subvars_for_var (vi->decl) == NULL)
4178 return false;
4179 }
4180 else
4181 {
4182 struct ptr_info_def *pi = get_ptr_info (p);
4183 unsigned int i;
4184 bitmap_iterator bi;
4185
4186 /* This variable may have been collapsed, let's get the real
4187 variable. */
4188 vi = get_varinfo (vi->node);
4189
4190 /* Translate artificial variables into SSA_NAME_PTR_INFO
4191 attributes. */
4192 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4193 {
4194 varinfo_t vi = get_varinfo (i);
4195
4196 if (vi->is_artificial_var)
4197 {
4198 /* FIXME. READONLY should be handled better so that
4199 flow insensitive aliasing can disregard writable
4200 aliases. */
4201 if (vi->id == nothing_id)
4202 pi->pt_null = 1;
4203 else if (vi->id == anything_id)
4204 pi->pt_anything = 1;
4205 else if (vi->id == readonly_id)
4206 pi->pt_anything = 1;
4207 else if (vi->id == integer_id)
4208 pi->pt_anything = 1;
4209 else if (vi->is_heap_var)
4210 pi->pt_global_mem = 1;
4211 }
4212 }
4213
4214 if (pi->pt_anything)
4215 return false;
4216
4217 if (!pi->pt_vars)
4218 pi->pt_vars = BITMAP_GGC_ALLOC ();
4219
4220 set_uids_in_ptset (pi->pt_vars, vi->solution);
4221
4222 if (bitmap_empty_p (pi->pt_vars))
4223 pi->pt_vars = NULL;
4224
4225 return true;
4226 }
4227 }
4228
4229 return false;
4230 }
4231
4232
4233
4234 /* Dump points-to information to OUTFILE. */
4235
4236 void
4237 dump_sa_points_to_info (FILE *outfile)
4238 {
4239 unsigned int i;
4240
4241 fprintf (outfile, "\nPoints-to sets\n\n");
4242
4243 if (dump_flags & TDF_STATS)
4244 {
4245 fprintf (outfile, "Stats:\n");
4246 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
4247 fprintf (outfile, "Statically unified vars: %d\n",
4248 stats.unified_vars_static);
4249 fprintf (outfile, "Collapsed vars: %d\n", stats.collapsed_vars);
4250 fprintf (outfile, "Dynamically unified vars: %d\n",
4251 stats.unified_vars_dynamic);
4252 fprintf (outfile, "Iterations: %d\n", stats.iterations);
4253 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
4254 }
4255
4256 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
4257 dump_solution_for_var (outfile, i);
4258 }
4259
4260
4261 /* Debug points-to information to stderr. */
4262
4263 void
4264 debug_sa_points_to_info (void)
4265 {
4266 dump_sa_points_to_info (stderr);
4267 }
4268
4269
4270 /* Initialize the always-existing constraint variables for NULL
4271 ANYTHING, READONLY, and INTEGER */
4272
4273 static void
4274 init_base_vars (void)
4275 {
4276 struct constraint_expr lhs, rhs;
4277
4278 /* Create the NULL variable, used to represent that a variable points
4279 to NULL. */
4280 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
4281 var_nothing = new_var_info (nothing_tree, 0, "NULL", 0);
4282 insert_id_for_tree (nothing_tree, 0);
4283 var_nothing->is_artificial_var = 1;
4284 var_nothing->offset = 0;
4285 var_nothing->size = ~0;
4286 var_nothing->fullsize = ~0;
4287 var_nothing->is_special_var = 1;
4288 nothing_id = 0;
4289 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
4290
4291 /* Create the ANYTHING variable, used to represent that a variable
4292 points to some unknown piece of memory. */
4293 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
4294 var_anything = new_var_info (anything_tree, 1, "ANYTHING", 1);
4295 insert_id_for_tree (anything_tree, 1);
4296 var_anything->is_artificial_var = 1;
4297 var_anything->size = ~0;
4298 var_anything->offset = 0;
4299 var_anything->next = NULL;
4300 var_anything->fullsize = ~0;
4301 var_anything->is_special_var = 1;
4302 anything_id = 1;
4303
4304 /* Anything points to anything. This makes deref constraints just
4305 work in the presence of linked list and other p = *p type loops,
4306 by saying that *ANYTHING = ANYTHING. */
4307 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
4308 lhs.type = SCALAR;
4309 lhs.var = anything_id;
4310 lhs.offset = 0;
4311 rhs.type = ADDRESSOF;
4312 rhs.var = anything_id;
4313 rhs.offset = 0;
4314 var_anything->address_taken = true;
4315
4316 /* This specifically does not use process_constraint because
4317 process_constraint ignores all anything = anything constraints, since all
4318 but this one are redundant. */
4319 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
4320
4321 /* Create the READONLY variable, used to represent that a variable
4322 points to readonly memory. */
4323 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
4324 var_readonly = new_var_info (readonly_tree, 2, "READONLY", 2);
4325 var_readonly->is_artificial_var = 1;
4326 var_readonly->offset = 0;
4327 var_readonly->size = ~0;
4328 var_readonly->fullsize = ~0;
4329 var_readonly->next = NULL;
4330 var_readonly->is_special_var = 1;
4331 insert_id_for_tree (readonly_tree, 2);
4332 readonly_id = 2;
4333 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
4334
4335 /* readonly memory points to anything, in order to make deref
4336 easier. In reality, it points to anything the particular
4337 readonly variable can point to, but we don't track this
4338 separately. */
4339 lhs.type = SCALAR;
4340 lhs.var = readonly_id;
4341 lhs.offset = 0;
4342 rhs.type = ADDRESSOF;
4343 rhs.var = anything_id;
4344 rhs.offset = 0;
4345
4346 process_constraint (new_constraint (lhs, rhs));
4347
4348 /* Create the INTEGER variable, used to represent that a variable points
4349 to an INTEGER. */
4350 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
4351 var_integer = new_var_info (integer_tree, 3, "INTEGER", 3);
4352 insert_id_for_tree (integer_tree, 3);
4353 var_integer->is_artificial_var = 1;
4354 var_integer->size = ~0;
4355 var_integer->fullsize = ~0;
4356 var_integer->offset = 0;
4357 var_integer->next = NULL;
4358 var_integer->is_special_var = 1;
4359 integer_id = 3;
4360 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
4361
4362 /* *INTEGER = ANYTHING, because we don't know where a dereference of a random
4363 integer will point to. */
4364 lhs.type = SCALAR;
4365 lhs.var = integer_id;
4366 lhs.offset = 0;
4367 rhs.type = ADDRESSOF;
4368 rhs.var = anything_id;
4369 rhs.offset = 0;
4370 process_constraint (new_constraint (lhs, rhs));
4371 }
4372
4373 /* Return true if we actually need to solve the constraint graph in order to
4374 get our points-to sets. This is false when, for example, no addresses are
4375 taken other than special vars, or all points-to sets with members already
4376 contain the anything variable and there are no predecessors for other
4377 sets. */
4378
4379 static bool
4380 need_to_solve (void)
4381 {
4382 int i;
4383 varinfo_t v;
4384 bool found_address_taken = false;
4385 bool found_non_anything = false;
4386
4387 for (i = 0; VEC_iterate (varinfo_t, varmap, i, v); i++)
4388 {
4389 if (v->is_special_var)
4390 continue;
4391
4392 if (v->address_taken)
4393 found_address_taken = true;
4394
4395 if (v->solution
4396 && !bitmap_empty_p (v->solution)
4397 && !bitmap_bit_p (v->solution, anything_id))
4398 found_non_anything = true;
4399 else if (bitmap_empty_p (v->solution)
4400 && (VEC_length (constraint_edge_t, graph->preds[v->id]) != 0
4401 || (graph->zero_weight_preds[v->id] && !bitmap_empty_p (graph->zero_weight_preds[v->id]))))
4402 found_non_anything = true;
4403
4404 if (found_address_taken && found_non_anything)
4405 return true;
4406 }
4407
4408 return false;
4409 }
4410
4411 /* Initialize things necessary to perform PTA */
4412
4413 static void
4414 init_alias_vars (void)
4415 {
4416 bitmap_obstack_initialize (&ptabitmap_obstack);
4417 bitmap_obstack_initialize (&predbitmap_obstack);
4418
4419 constraint_pool = create_alloc_pool ("Constraint pool",
4420 sizeof (struct constraint), 30);
4421 variable_info_pool = create_alloc_pool ("Variable info pool",
4422 sizeof (struct variable_info), 30);
4423 constraint_edge_pool = create_alloc_pool ("Constraint edges",
4424 sizeof (struct constraint_edge), 30);
4425
4426 constraints = VEC_alloc (constraint_t, heap, 8);
4427 varmap = VEC_alloc (varinfo_t, heap, 8);
4428 id_for_tree = htab_create (10, tree_id_hash, tree_id_eq, free);
4429 memset (&stats, 0, sizeof (stats));
4430
4431 init_base_vars ();
4432 }
4433
4434
4435 /* Create points-to sets for the current function. See the comments
4436 at the start of the file for an algorithmic overview. */
4437
4438 void
4439 compute_points_to_sets (struct alias_info *ai)
4440 {
4441 basic_block bb;
4442
4443 timevar_push (TV_TREE_PTA);
4444
4445 init_alias_vars ();
4446
4447 intra_create_variable_infos ();
4448
4449 /* Now walk all statements and derive aliases. */
4450 FOR_EACH_BB (bb)
4451 {
4452 block_stmt_iterator bsi;
4453 tree phi;
4454
4455 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
4456 {
4457 if (is_gimple_reg (PHI_RESULT (phi)))
4458 {
4459 find_func_aliases (phi);
4460 /* Update various related attributes like escaped
4461 addresses, pointer dereferences for loads and stores.
4462 This is used when creating name tags and alias
4463 sets. */
4464 update_alias_info (phi, ai);
4465 }
4466 }
4467
4468 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4469 {
4470 tree stmt = bsi_stmt (bsi);
4471 find_func_aliases (stmt);
4472 /* Update various related attributes like escaped
4473 addresses, pointer dereferences for loads and stores.
4474 This is used when creating name tags and alias
4475 sets. */
4476 update_alias_info (stmt, ai);
4477 }
4478 }
4479
4480 build_constraint_graph ();
4481
4482 if (dump_file)
4483 {
4484 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
4485 dump_constraints (dump_file);
4486 }
4487
4488 if (need_to_solve ())
4489 {
4490 if (dump_file)
4491 fprintf (dump_file,
4492 "\nCollapsing static cycles and doing variable "
4493 "substitution:\n");
4494
4495 find_and_collapse_graph_cycles (graph, false);
4496 perform_var_substitution (graph);
4497
4498 if (dump_file)
4499 fprintf (dump_file, "\nSolving graph:\n");
4500
4501 solve_graph (graph);
4502 }
4503
4504 if (dump_file)
4505 dump_sa_points_to_info (dump_file);
4506
4507 have_alias_info = true;
4508
4509 timevar_pop (TV_TREE_PTA);
4510 }
4511
4512
4513 /* Delete created points-to sets. */
4514
4515 void
4516 delete_points_to_sets (void)
4517 {
4518 varinfo_t v;
4519 int i;
4520
4521 htab_delete (id_for_tree);
4522 bitmap_obstack_release (&ptabitmap_obstack);
4523 bitmap_obstack_release (&predbitmap_obstack);
4524 VEC_free (constraint_t, heap, constraints);
4525
4526 for (i = 0; VEC_iterate (varinfo_t, varmap, i, v); i++)
4527 {
4528 VEC_free (constraint_edge_t, heap, graph->succs[i]);
4529 VEC_free (constraint_edge_t, heap, graph->preds[i]);
4530 VEC_free (constraint_t, heap, v->complex);
4531 }
4532 free (graph->zero_weight_preds);
4533 free (graph->zero_weight_succs);
4534 free (graph->succs);
4535 free (graph->preds);
4536 free (graph);
4537
4538 VEC_free (varinfo_t, heap, varmap);
4539 free_alloc_pool (variable_info_pool);
4540 free_alloc_pool (constraint_pool);
4541 free_alloc_pool (constraint_edge_pool);
4542
4543 have_alias_info = false;
4544 }
4545
4546 /* Return true if we should execute IPA PTA. */
4547 static bool
4548 gate_ipa_pta (void)
4549 {
4550 return (flag_unit_at_a_time != 0
4551 && flag_ipa_pta
4552 /* Don't bother doing anything if the program has errors. */
4553 && !(errorcount || sorrycount));
4554 }
4555
4556 /* Execute the driver for IPA PTA. */
4557 static unsigned int
4558 ipa_pta_execute (void)
4559 {
4560 struct cgraph_node *node;
4561 in_ipa_mode = 1;
4562 init_alias_heapvars ();
4563 init_alias_vars ();
4564
4565 for (node = cgraph_nodes; node; node = node->next)
4566 {
4567 if (!node->analyzed || cgraph_is_master_clone (node))
4568 {
4569 unsigned int varid;
4570
4571 varid = create_function_info_for (node->decl,
4572 cgraph_node_name (node));
4573 if (node->local.externally_visible)
4574 {
4575 varinfo_t fi = get_varinfo (varid);
4576 for (; fi; fi = fi->next)
4577 make_constraint_to_anything (fi);
4578 }
4579 }
4580 }
4581 for (node = cgraph_nodes; node; node = node->next)
4582 {
4583 if (node->analyzed && cgraph_is_master_clone (node))
4584 {
4585 struct function *cfun = DECL_STRUCT_FUNCTION (node->decl);
4586 basic_block bb;
4587 tree old_func_decl = current_function_decl;
4588 if (dump_file)
4589 fprintf (dump_file,
4590 "Generating constraints for %s\n",
4591 cgraph_node_name (node));
4592 push_cfun (cfun);
4593 current_function_decl = node->decl;
4594
4595 FOR_EACH_BB_FN (bb, cfun)
4596 {
4597 block_stmt_iterator bsi;
4598 tree phi;
4599
4600 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
4601 {
4602 if (is_gimple_reg (PHI_RESULT (phi)))
4603 {
4604 find_func_aliases (phi);
4605 }
4606 }
4607
4608 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4609 {
4610 tree stmt = bsi_stmt (bsi);
4611 find_func_aliases (stmt);
4612 }
4613 }
4614 current_function_decl = old_func_decl;
4615 pop_cfun ();
4616 }
4617 else
4618 {
4619 /* Make point to anything. */
4620 }
4621 }
4622
4623 build_constraint_graph ();
4624
4625 if (dump_file)
4626 {
4627 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
4628 dump_constraints (dump_file);
4629 }
4630
4631 if (need_to_solve ())
4632 {
4633 if (dump_file)
4634 fprintf (dump_file,
4635 "\nCollapsing static cycles and doing variable "
4636 "substitution:\n");
4637
4638 find_and_collapse_graph_cycles (graph, false);
4639 perform_var_substitution (graph);
4640
4641 if (dump_file)
4642 fprintf (dump_file, "\nSolving graph:\n");
4643
4644 solve_graph (graph);
4645 }
4646
4647 if (dump_file)
4648 dump_sa_points_to_info (dump_file);
4649 in_ipa_mode = 0;
4650 delete_alias_heapvars ();
4651 delete_points_to_sets ();
4652 return 0;
4653 }
4654
4655 struct tree_opt_pass pass_ipa_pta =
4656 {
4657 "pta", /* name */
4658 gate_ipa_pta, /* gate */
4659 ipa_pta_execute, /* execute */
4660 NULL, /* sub */
4661 NULL, /* next */
4662 0, /* static_pass_number */
4663 TV_IPA_PTA, /* tv_id */
4664 0, /* properties_required */
4665 0, /* properties_provided */
4666 0, /* properties_destroyed */
4667 0, /* todo_flags_start */
4668 0, /* todo_flags_finish */
4669 0 /* letter */
4670 };
4671
4672 /* Initialize the heapvar for statement mapping. */
4673 void
4674 init_alias_heapvars (void)
4675 {
4676 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq, NULL);
4677 }
4678
4679 void
4680 delete_alias_heapvars (void)
4681 {
4682 htab_delete (heapvar_for_stmt);
4683 }
4684
4685
4686 #include "gt-tree-ssa-structalias.h"