Update Copyright years for files modified in 2011 and/or 2012.
[gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dberlin@dberlin.org>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3 of the License, or
11 (at your option) any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "obstack.h"
28 #include "bitmap.h"
29 #include "flags.h"
30 #include "basic-block.h"
31 #include "tree.h"
32 #include "tree-flow.h"
33 #include "tree-inline.h"
34 #include "diagnostic-core.h"
35 #include "gimple.h"
36 #include "hashtab.h"
37 #include "function.h"
38 #include "cgraph.h"
39 #include "tree-pass.h"
40 #include "alloc-pool.h"
41 #include "splay-tree.h"
42 #include "params.h"
43 #include "cgraph.h"
44 #include "alias.h"
45 #include "pointer-set.h"
46
47 /* The idea behind this analyzer is to generate set constraints from the
48 program, then solve the resulting constraints in order to generate the
49 points-to sets.
50
51 Set constraints are a way of modeling program analysis problems that
52 involve sets. They consist of an inclusion constraint language,
53 describing the variables (each variable is a set) and operations that
54 are involved on the variables, and a set of rules that derive facts
55 from these operations. To solve a system of set constraints, you derive
56 all possible facts under the rules, which gives you the correct sets
57 as a consequence.
58
59 See "Efficient Field-sensitive pointer analysis for C" by "David
60 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
61 http://citeseer.ist.psu.edu/pearce04efficient.html
62
63 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
64 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
65 http://citeseer.ist.psu.edu/heintze01ultrafast.html
66
67 There are three types of real constraint expressions, DEREF,
68 ADDRESSOF, and SCALAR. Each constraint expression consists
69 of a constraint type, a variable, and an offset.
70
71 SCALAR is a constraint expression type used to represent x, whether
72 it appears on the LHS or the RHS of a statement.
73 DEREF is a constraint expression type used to represent *x, whether
74 it appears on the LHS or the RHS of a statement.
75 ADDRESSOF is a constraint expression used to represent &x, whether
76 it appears on the LHS or the RHS of a statement.
77
78 Each pointer variable in the program is assigned an integer id, and
79 each field of a structure variable is assigned an integer id as well.
80
81 Structure variables are linked to their list of fields through a "next
82 field" in each variable that points to the next field in offset
83 order.
84 Each variable for a structure field has
85
86 1. "size", that tells the size in bits of that field.
87 2. "fullsize, that tells the size in bits of the entire structure.
88 3. "offset", that tells the offset in bits from the beginning of the
89 structure to this field.
90
91 Thus,
92 struct f
93 {
94 int a;
95 int b;
96 } foo;
97 int *bar;
98
99 looks like
100
101 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
102 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
103 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
104
105
106 In order to solve the system of set constraints, the following is
107 done:
108
109 1. Each constraint variable x has a solution set associated with it,
110 Sol(x).
111
112 2. Constraints are separated into direct, copy, and complex.
113 Direct constraints are ADDRESSOF constraints that require no extra
114 processing, such as P = &Q
115 Copy constraints are those of the form P = Q.
116 Complex constraints are all the constraints involving dereferences
117 and offsets (including offsetted copies).
118
119 3. All direct constraints of the form P = &Q are processed, such
120 that Q is added to Sol(P)
121
122 4. All complex constraints for a given constraint variable are stored in a
123 linked list attached to that variable's node.
124
125 5. A directed graph is built out of the copy constraints. Each
126 constraint variable is a node in the graph, and an edge from
127 Q to P is added for each copy constraint of the form P = Q
128
129 6. The graph is then walked, and solution sets are
130 propagated along the copy edges, such that an edge from Q to P
131 causes Sol(P) <- Sol(P) union Sol(Q).
132
133 7. As we visit each node, all complex constraints associated with
134 that node are processed by adding appropriate copy edges to the graph, or the
135 appropriate variables to the solution set.
136
137 8. The process of walking the graph is iterated until no solution
138 sets change.
139
140 Prior to walking the graph in steps 6 and 7, We perform static
141 cycle elimination on the constraint graph, as well
142 as off-line variable substitution.
143
144 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
145 on and turned into anything), but isn't. You can just see what offset
146 inside the pointed-to struct it's going to access.
147
148 TODO: Constant bounded arrays can be handled as if they were structs of the
149 same number of elements.
150
151 TODO: Modeling heap and incoming pointers becomes much better if we
152 add fields to them as we discover them, which we could do.
153
154 TODO: We could handle unions, but to be honest, it's probably not
155 worth the pain or slowdown. */
156
157 /* IPA-PTA optimizations possible.
158
159 When the indirect function called is ANYTHING we can add disambiguation
160 based on the function signatures (or simply the parameter count which
161 is the varinfo size). We also do not need to consider functions that
162 do not have their address taken.
163
164 The is_global_var bit which marks escape points is overly conservative
165 in IPA mode. Split it to is_escape_point and is_global_var - only
166 externally visible globals are escape points in IPA mode. This is
167 also needed to fix the pt_solution_includes_global predicate
168 (and thus ptr_deref_may_alias_global_p).
169
170 The way we introduce DECL_PT_UID to avoid fixing up all points-to
171 sets in the translation unit when we copy a DECL during inlining
172 pessimizes precision. The advantage is that the DECL_PT_UID keeps
173 compile-time and memory usage overhead low - the points-to sets
174 do not grow or get unshared as they would during a fixup phase.
175 An alternative solution is to delay IPA PTA until after all
176 inlining transformations have been applied.
177
178 The way we propagate clobber/use information isn't optimized.
179 It should use a new complex constraint that properly filters
180 out local variables of the callee (though that would make
181 the sets invalid after inlining). OTOH we might as well
182 admit defeat to WHOPR and simply do all the clobber/use analysis
183 and propagation after PTA finished but before we threw away
184 points-to information for memory variables. WHOPR and PTA
185 do not play along well anyway - the whole constraint solving
186 would need to be done in WPA phase and it will be very interesting
187 to apply the results to local SSA names during LTRANS phase.
188
189 We probably should compute a per-function unit-ESCAPE solution
190 propagating it simply like the clobber / uses solutions. The
191 solution can go alongside the non-IPA espaced solution and be
192 used to query which vars escape the unit through a function.
193
194 We never put function decls in points-to sets so we do not
195 keep the set of called functions for indirect calls.
196
197 And probably more. */
198
199 static bool use_field_sensitive = true;
200 static int in_ipa_mode = 0;
201
202 /* Used for predecessor bitmaps. */
203 static bitmap_obstack predbitmap_obstack;
204
205 /* Used for points-to sets. */
206 static bitmap_obstack pta_obstack;
207
208 /* Used for oldsolution members of variables. */
209 static bitmap_obstack oldpta_obstack;
210
211 /* Used for per-solver-iteration bitmaps. */
212 static bitmap_obstack iteration_obstack;
213
214 static unsigned int create_variable_info_for (tree, const char *);
215 typedef struct constraint_graph *constraint_graph_t;
216 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
217
218 struct constraint;
219 typedef struct constraint *constraint_t;
220
221
222 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
223 if (a) \
224 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
225
226 static struct constraint_stats
227 {
228 unsigned int total_vars;
229 unsigned int nonpointer_vars;
230 unsigned int unified_vars_static;
231 unsigned int unified_vars_dynamic;
232 unsigned int iterations;
233 unsigned int num_edges;
234 unsigned int num_implicit_edges;
235 unsigned int points_to_sets_created;
236 } stats;
237
238 struct variable_info
239 {
240 /* ID of this variable */
241 unsigned int id;
242
243 /* True if this is a variable created by the constraint analysis, such as
244 heap variables and constraints we had to break up. */
245 unsigned int is_artificial_var : 1;
246
247 /* True if this is a special variable whose solution set should not be
248 changed. */
249 unsigned int is_special_var : 1;
250
251 /* True for variables whose size is not known or variable. */
252 unsigned int is_unknown_size_var : 1;
253
254 /* True for (sub-)fields that represent a whole variable. */
255 unsigned int is_full_var : 1;
256
257 /* True if this is a heap variable. */
258 unsigned int is_heap_var : 1;
259
260 /* True if this field may contain pointers. */
261 unsigned int may_have_pointers : 1;
262
263 /* True if this field has only restrict qualified pointers. */
264 unsigned int only_restrict_pointers : 1;
265
266 /* True if this represents a global variable. */
267 unsigned int is_global_var : 1;
268
269 /* True if this represents a IPA function info. */
270 unsigned int is_fn_info : 1;
271
272 /* A link to the variable for the next field in this structure. */
273 struct variable_info *next;
274
275 /* Offset of this variable, in bits, from the base variable */
276 unsigned HOST_WIDE_INT offset;
277
278 /* Size of the variable, in bits. */
279 unsigned HOST_WIDE_INT size;
280
281 /* Full size of the base variable, in bits. */
282 unsigned HOST_WIDE_INT fullsize;
283
284 /* Name of this variable */
285 const char *name;
286
287 /* Tree that this variable is associated with. */
288 tree decl;
289
290 /* Points-to set for this variable. */
291 bitmap solution;
292
293 /* Old points-to set for this variable. */
294 bitmap oldsolution;
295 };
296 typedef struct variable_info *varinfo_t;
297
298 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
299 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
300 unsigned HOST_WIDE_INT);
301 static varinfo_t lookup_vi_for_tree (tree);
302 static inline bool type_can_have_subvars (const_tree);
303
304 /* Pool of variable info structures. */
305 static alloc_pool variable_info_pool;
306
307
308
309 /* Table of variable info structures for constraint variables.
310 Indexed directly by variable info id. */
311 static vec<varinfo_t> varmap;
312
313 /* Return the varmap element N */
314
315 static inline varinfo_t
316 get_varinfo (unsigned int n)
317 {
318 return varmap[n];
319 }
320
321 /* Static IDs for the special variables. */
322 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
323 escaped_id = 3, nonlocal_id = 4,
324 storedanything_id = 5, integer_id = 6 };
325
326 /* Return a new variable info structure consisting for a variable
327 named NAME, and using constraint graph node NODE. Append it
328 to the vector of variable info structures. */
329
330 static varinfo_t
331 new_var_info (tree t, const char *name)
332 {
333 unsigned index = varmap.length ();
334 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
335
336 ret->id = index;
337 ret->name = name;
338 ret->decl = t;
339 /* Vars without decl are artificial and do not have sub-variables. */
340 ret->is_artificial_var = (t == NULL_TREE);
341 ret->is_special_var = false;
342 ret->is_unknown_size_var = false;
343 ret->is_full_var = (t == NULL_TREE);
344 ret->is_heap_var = false;
345 ret->may_have_pointers = true;
346 ret->only_restrict_pointers = false;
347 ret->is_global_var = (t == NULL_TREE);
348 ret->is_fn_info = false;
349 if (t && DECL_P (t))
350 ret->is_global_var = (is_global_var (t)
351 /* We have to treat even local register variables
352 as escape points. */
353 || (TREE_CODE (t) == VAR_DECL
354 && DECL_HARD_REGISTER (t)));
355 ret->solution = BITMAP_ALLOC (&pta_obstack);
356 ret->oldsolution = NULL;
357 ret->next = NULL;
358
359 stats.total_vars++;
360
361 varmap.safe_push (ret);
362
363 return ret;
364 }
365
366
367 /* A map mapping call statements to per-stmt variables for uses
368 and clobbers specific to the call. */
369 struct pointer_map_t *call_stmt_vars;
370
371 /* Lookup or create the variable for the call statement CALL. */
372
373 static varinfo_t
374 get_call_vi (gimple call)
375 {
376 void **slot_p;
377 varinfo_t vi, vi2;
378
379 slot_p = pointer_map_insert (call_stmt_vars, call);
380 if (*slot_p)
381 return (varinfo_t) *slot_p;
382
383 vi = new_var_info (NULL_TREE, "CALLUSED");
384 vi->offset = 0;
385 vi->size = 1;
386 vi->fullsize = 2;
387 vi->is_full_var = true;
388
389 vi->next = vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
390 vi2->offset = 1;
391 vi2->size = 1;
392 vi2->fullsize = 2;
393 vi2->is_full_var = true;
394
395 *slot_p = (void *) vi;
396 return vi;
397 }
398
399 /* Lookup the variable for the call statement CALL representing
400 the uses. Returns NULL if there is nothing special about this call. */
401
402 static varinfo_t
403 lookup_call_use_vi (gimple call)
404 {
405 void **slot_p;
406
407 slot_p = pointer_map_contains (call_stmt_vars, call);
408 if (slot_p)
409 return (varinfo_t) *slot_p;
410
411 return NULL;
412 }
413
414 /* Lookup the variable for the call statement CALL representing
415 the clobbers. Returns NULL if there is nothing special about this call. */
416
417 static varinfo_t
418 lookup_call_clobber_vi (gimple call)
419 {
420 varinfo_t uses = lookup_call_use_vi (call);
421 if (!uses)
422 return NULL;
423
424 return uses->next;
425 }
426
427 /* Lookup or create the variable for the call statement CALL representing
428 the uses. */
429
430 static varinfo_t
431 get_call_use_vi (gimple call)
432 {
433 return get_call_vi (call);
434 }
435
436 /* Lookup or create the variable for the call statement CALL representing
437 the clobbers. */
438
439 static varinfo_t ATTRIBUTE_UNUSED
440 get_call_clobber_vi (gimple call)
441 {
442 return get_call_vi (call)->next;
443 }
444
445
446 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
447
448 /* An expression that appears in a constraint. */
449
450 struct constraint_expr
451 {
452 /* Constraint type. */
453 constraint_expr_type type;
454
455 /* Variable we are referring to in the constraint. */
456 unsigned int var;
457
458 /* Offset, in bits, of this constraint from the beginning of
459 variables it ends up referring to.
460
461 IOW, in a deref constraint, we would deref, get the result set,
462 then add OFFSET to each member. */
463 HOST_WIDE_INT offset;
464 };
465
466 /* Use 0x8000... as special unknown offset. */
467 #define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
468
469 typedef struct constraint_expr ce_s;
470 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
471 static void get_constraint_for (tree, vec<ce_s> *);
472 static void get_constraint_for_rhs (tree, vec<ce_s> *);
473 static void do_deref (vec<ce_s> *);
474
475 /* Our set constraints are made up of two constraint expressions, one
476 LHS, and one RHS.
477
478 As described in the introduction, our set constraints each represent an
479 operation between set valued variables.
480 */
481 struct constraint
482 {
483 struct constraint_expr lhs;
484 struct constraint_expr rhs;
485 };
486
487 /* List of constraints that we use to build the constraint graph from. */
488
489 static vec<constraint_t> constraints;
490 static alloc_pool constraint_pool;
491
492 /* The constraint graph is represented as an array of bitmaps
493 containing successor nodes. */
494
495 struct constraint_graph
496 {
497 /* Size of this graph, which may be different than the number of
498 nodes in the variable map. */
499 unsigned int size;
500
501 /* Explicit successors of each node. */
502 bitmap *succs;
503
504 /* Implicit predecessors of each node (Used for variable
505 substitution). */
506 bitmap *implicit_preds;
507
508 /* Explicit predecessors of each node (Used for variable substitution). */
509 bitmap *preds;
510
511 /* Indirect cycle representatives, or -1 if the node has no indirect
512 cycles. */
513 int *indirect_cycles;
514
515 /* Representative node for a node. rep[a] == a unless the node has
516 been unified. */
517 unsigned int *rep;
518
519 /* Equivalence class representative for a label. This is used for
520 variable substitution. */
521 int *eq_rep;
522
523 /* Pointer equivalence label for a node. All nodes with the same
524 pointer equivalence label can be unified together at some point
525 (either during constraint optimization or after the constraint
526 graph is built). */
527 unsigned int *pe;
528
529 /* Pointer equivalence representative for a label. This is used to
530 handle nodes that are pointer equivalent but not location
531 equivalent. We can unite these once the addressof constraints
532 are transformed into initial points-to sets. */
533 int *pe_rep;
534
535 /* Pointer equivalence label for each node, used during variable
536 substitution. */
537 unsigned int *pointer_label;
538
539 /* Location equivalence label for each node, used during location
540 equivalence finding. */
541 unsigned int *loc_label;
542
543 /* Pointed-by set for each node, used during location equivalence
544 finding. This is pointed-by rather than pointed-to, because it
545 is constructed using the predecessor graph. */
546 bitmap *pointed_by;
547
548 /* Points to sets for pointer equivalence. This is *not* the actual
549 points-to sets for nodes. */
550 bitmap *points_to;
551
552 /* Bitmap of nodes where the bit is set if the node is a direct
553 node. Used for variable substitution. */
554 sbitmap direct_nodes;
555
556 /* Bitmap of nodes where the bit is set if the node is address
557 taken. Used for variable substitution. */
558 bitmap address_taken;
559
560 /* Vector of complex constraints for each graph node. Complex
561 constraints are those involving dereferences or offsets that are
562 not 0. */
563 vec<constraint_t> *complex;
564 };
565
566 static constraint_graph_t graph;
567
568 /* During variable substitution and the offline version of indirect
569 cycle finding, we create nodes to represent dereferences and
570 address taken constraints. These represent where these start and
571 end. */
572 #define FIRST_REF_NODE (varmap).length ()
573 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
574
575 /* Return the representative node for NODE, if NODE has been unioned
576 with another NODE.
577 This function performs path compression along the way to finding
578 the representative. */
579
580 static unsigned int
581 find (unsigned int node)
582 {
583 gcc_assert (node < graph->size);
584 if (graph->rep[node] != node)
585 return graph->rep[node] = find (graph->rep[node]);
586 return node;
587 }
588
589 /* Union the TO and FROM nodes to the TO nodes.
590 Note that at some point in the future, we may want to do
591 union-by-rank, in which case we are going to have to return the
592 node we unified to. */
593
594 static bool
595 unite (unsigned int to, unsigned int from)
596 {
597 gcc_assert (to < graph->size && from < graph->size);
598 if (to != from && graph->rep[from] != to)
599 {
600 graph->rep[from] = to;
601 return true;
602 }
603 return false;
604 }
605
606 /* Create a new constraint consisting of LHS and RHS expressions. */
607
608 static constraint_t
609 new_constraint (const struct constraint_expr lhs,
610 const struct constraint_expr rhs)
611 {
612 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
613 ret->lhs = lhs;
614 ret->rhs = rhs;
615 return ret;
616 }
617
618 /* Print out constraint C to FILE. */
619
620 static void
621 dump_constraint (FILE *file, constraint_t c)
622 {
623 if (c->lhs.type == ADDRESSOF)
624 fprintf (file, "&");
625 else if (c->lhs.type == DEREF)
626 fprintf (file, "*");
627 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
628 if (c->lhs.offset == UNKNOWN_OFFSET)
629 fprintf (file, " + UNKNOWN");
630 else if (c->lhs.offset != 0)
631 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
632 fprintf (file, " = ");
633 if (c->rhs.type == ADDRESSOF)
634 fprintf (file, "&");
635 else if (c->rhs.type == DEREF)
636 fprintf (file, "*");
637 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
638 if (c->rhs.offset == UNKNOWN_OFFSET)
639 fprintf (file, " + UNKNOWN");
640 else if (c->rhs.offset != 0)
641 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
642 }
643
644
645 void debug_constraint (constraint_t);
646 void debug_constraints (void);
647 void debug_constraint_graph (void);
648 void debug_solution_for_var (unsigned int);
649 void debug_sa_points_to_info (void);
650
651 /* Print out constraint C to stderr. */
652
653 DEBUG_FUNCTION void
654 debug_constraint (constraint_t c)
655 {
656 dump_constraint (stderr, c);
657 fprintf (stderr, "\n");
658 }
659
660 /* Print out all constraints to FILE */
661
662 static void
663 dump_constraints (FILE *file, int from)
664 {
665 int i;
666 constraint_t c;
667 for (i = from; constraints.iterate (i, &c); i++)
668 if (c)
669 {
670 dump_constraint (file, c);
671 fprintf (file, "\n");
672 }
673 }
674
675 /* Print out all constraints to stderr. */
676
677 DEBUG_FUNCTION void
678 debug_constraints (void)
679 {
680 dump_constraints (stderr, 0);
681 }
682
683 /* Print the constraint graph in dot format. */
684
685 static void
686 dump_constraint_graph (FILE *file)
687 {
688 unsigned int i;
689
690 /* Only print the graph if it has already been initialized: */
691 if (!graph)
692 return;
693
694 /* Prints the header of the dot file: */
695 fprintf (file, "strict digraph {\n");
696 fprintf (file, " node [\n shape = box\n ]\n");
697 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
698 fprintf (file, "\n // List of nodes and complex constraints in "
699 "the constraint graph:\n");
700
701 /* The next lines print the nodes in the graph together with the
702 complex constraints attached to them. */
703 for (i = 0; i < graph->size; i++)
704 {
705 if (find (i) != i)
706 continue;
707 if (i < FIRST_REF_NODE)
708 fprintf (file, "\"%s\"", get_varinfo (i)->name);
709 else
710 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
711 if (graph->complex[i].exists ())
712 {
713 unsigned j;
714 constraint_t c;
715 fprintf (file, " [label=\"\\N\\n");
716 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
717 {
718 dump_constraint (file, c);
719 fprintf (file, "\\l");
720 }
721 fprintf (file, "\"]");
722 }
723 fprintf (file, ";\n");
724 }
725
726 /* Go over the edges. */
727 fprintf (file, "\n // Edges in the constraint graph:\n");
728 for (i = 0; i < graph->size; i++)
729 {
730 unsigned j;
731 bitmap_iterator bi;
732 if (find (i) != i)
733 continue;
734 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
735 {
736 unsigned to = find (j);
737 if (i == to)
738 continue;
739 if (i < FIRST_REF_NODE)
740 fprintf (file, "\"%s\"", get_varinfo (i)->name);
741 else
742 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
743 fprintf (file, " -> ");
744 if (to < FIRST_REF_NODE)
745 fprintf (file, "\"%s\"", get_varinfo (to)->name);
746 else
747 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
748 fprintf (file, ";\n");
749 }
750 }
751
752 /* Prints the tail of the dot file. */
753 fprintf (file, "}\n");
754 }
755
756 /* Print out the constraint graph to stderr. */
757
758 DEBUG_FUNCTION void
759 debug_constraint_graph (void)
760 {
761 dump_constraint_graph (stderr);
762 }
763
764 /* SOLVER FUNCTIONS
765
766 The solver is a simple worklist solver, that works on the following
767 algorithm:
768
769 sbitmap changed_nodes = all zeroes;
770 changed_count = 0;
771 For each node that is not already collapsed:
772 changed_count++;
773 set bit in changed nodes
774
775 while (changed_count > 0)
776 {
777 compute topological ordering for constraint graph
778
779 find and collapse cycles in the constraint graph (updating
780 changed if necessary)
781
782 for each node (n) in the graph in topological order:
783 changed_count--;
784
785 Process each complex constraint associated with the node,
786 updating changed if necessary.
787
788 For each outgoing edge from n, propagate the solution from n to
789 the destination of the edge, updating changed as necessary.
790
791 } */
792
793 /* Return true if two constraint expressions A and B are equal. */
794
795 static bool
796 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
797 {
798 return a.type == b.type && a.var == b.var && a.offset == b.offset;
799 }
800
801 /* Return true if constraint expression A is less than constraint expression
802 B. This is just arbitrary, but consistent, in order to give them an
803 ordering. */
804
805 static bool
806 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
807 {
808 if (a.type == b.type)
809 {
810 if (a.var == b.var)
811 return a.offset < b.offset;
812 else
813 return a.var < b.var;
814 }
815 else
816 return a.type < b.type;
817 }
818
819 /* Return true if constraint A is less than constraint B. This is just
820 arbitrary, but consistent, in order to give them an ordering. */
821
822 static bool
823 constraint_less (const constraint_t &a, const constraint_t &b)
824 {
825 if (constraint_expr_less (a->lhs, b->lhs))
826 return true;
827 else if (constraint_expr_less (b->lhs, a->lhs))
828 return false;
829 else
830 return constraint_expr_less (a->rhs, b->rhs);
831 }
832
833 /* Return true if two constraints A and B are equal. */
834
835 static bool
836 constraint_equal (struct constraint a, struct constraint b)
837 {
838 return constraint_expr_equal (a.lhs, b.lhs)
839 && constraint_expr_equal (a.rhs, b.rhs);
840 }
841
842
843 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
844
845 static constraint_t
846 constraint_vec_find (vec<constraint_t> vec,
847 struct constraint lookfor)
848 {
849 unsigned int place;
850 constraint_t found;
851
852 if (!vec.exists ())
853 return NULL;
854
855 place = vec.lower_bound (&lookfor, constraint_less);
856 if (place >= vec.length ())
857 return NULL;
858 found = vec[place];
859 if (!constraint_equal (*found, lookfor))
860 return NULL;
861 return found;
862 }
863
864 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
865
866 static void
867 constraint_set_union (vec<constraint_t> *to,
868 vec<constraint_t> *from)
869 {
870 int i;
871 constraint_t c;
872
873 FOR_EACH_VEC_ELT (*from, i, c)
874 {
875 if (constraint_vec_find (*to, *c) == NULL)
876 {
877 unsigned int place = to->lower_bound (c, constraint_less);
878 to->safe_insert (place, c);
879 }
880 }
881 }
882
883 /* Expands the solution in SET to all sub-fields of variables included.
884 Union the expanded result into RESULT. */
885
886 static void
887 solution_set_expand (bitmap result, bitmap set)
888 {
889 bitmap_iterator bi;
890 bitmap vars = NULL;
891 unsigned j;
892
893 /* In a first pass record all variables we need to add all
894 sub-fields off. This avoids quadratic behavior. */
895 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
896 {
897 varinfo_t v = get_varinfo (j);
898 if (v->is_artificial_var
899 || v->is_full_var)
900 continue;
901 v = lookup_vi_for_tree (v->decl);
902 if (vars == NULL)
903 vars = BITMAP_ALLOC (NULL);
904 bitmap_set_bit (vars, v->id);
905 }
906
907 /* In the second pass now do the addition to the solution and
908 to speed up solving add it to the delta as well. */
909 if (vars != NULL)
910 {
911 EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
912 {
913 varinfo_t v = get_varinfo (j);
914 for (; v != NULL; v = v->next)
915 bitmap_set_bit (result, v->id);
916 }
917 BITMAP_FREE (vars);
918 }
919 }
920
921 /* Take a solution set SET, add OFFSET to each member of the set, and
922 overwrite SET with the result when done. */
923
924 static void
925 solution_set_add (bitmap set, HOST_WIDE_INT offset)
926 {
927 bitmap result = BITMAP_ALLOC (&iteration_obstack);
928 unsigned int i;
929 bitmap_iterator bi;
930
931 /* If the offset is unknown we have to expand the solution to
932 all subfields. */
933 if (offset == UNKNOWN_OFFSET)
934 {
935 solution_set_expand (set, set);
936 return;
937 }
938
939 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
940 {
941 varinfo_t vi = get_varinfo (i);
942
943 /* If this is a variable with just one field just set its bit
944 in the result. */
945 if (vi->is_artificial_var
946 || vi->is_unknown_size_var
947 || vi->is_full_var)
948 bitmap_set_bit (result, i);
949 else
950 {
951 unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
952
953 /* If the offset makes the pointer point to before the
954 variable use offset zero for the field lookup. */
955 if (offset < 0
956 && fieldoffset > vi->offset)
957 fieldoffset = 0;
958
959 if (offset != 0)
960 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
961
962 bitmap_set_bit (result, vi->id);
963 /* If the result is not exactly at fieldoffset include the next
964 field as well. See get_constraint_for_ptr_offset for more
965 rationale. */
966 if (vi->offset != fieldoffset
967 && vi->next != NULL)
968 bitmap_set_bit (result, vi->next->id);
969 }
970 }
971
972 bitmap_copy (set, result);
973 BITMAP_FREE (result);
974 }
975
976 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
977 process. */
978
979 static bool
980 set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
981 {
982 if (inc == 0)
983 return bitmap_ior_into (to, from);
984 else
985 {
986 bitmap tmp;
987 bool res;
988
989 tmp = BITMAP_ALLOC (&iteration_obstack);
990 bitmap_copy (tmp, from);
991 solution_set_add (tmp, inc);
992 res = bitmap_ior_into (to, tmp);
993 BITMAP_FREE (tmp);
994 return res;
995 }
996 }
997
998 /* Insert constraint C into the list of complex constraints for graph
999 node VAR. */
1000
1001 static void
1002 insert_into_complex (constraint_graph_t graph,
1003 unsigned int var, constraint_t c)
1004 {
1005 vec<constraint_t> complex = graph->complex[var];
1006 unsigned int place = complex.lower_bound (c, constraint_less);
1007
1008 /* Only insert constraints that do not already exist. */
1009 if (place >= complex.length ()
1010 || !constraint_equal (*c, *complex[place]))
1011 graph->complex[var].safe_insert (place, c);
1012 }
1013
1014
1015 /* Condense two variable nodes into a single variable node, by moving
1016 all associated info from SRC to TO. */
1017
1018 static void
1019 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1020 unsigned int from)
1021 {
1022 unsigned int i;
1023 constraint_t c;
1024
1025 gcc_assert (find (from) == to);
1026
1027 /* Move all complex constraints from src node into to node */
1028 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1029 {
1030 /* In complex constraints for node src, we may have either
1031 a = *src, and *src = a, or an offseted constraint which are
1032 always added to the rhs node's constraints. */
1033
1034 if (c->rhs.type == DEREF)
1035 c->rhs.var = to;
1036 else if (c->lhs.type == DEREF)
1037 c->lhs.var = to;
1038 else
1039 c->rhs.var = to;
1040 }
1041 constraint_set_union (&graph->complex[to], &graph->complex[from]);
1042 graph->complex[from].release ();
1043 }
1044
1045
1046 /* Remove edges involving NODE from GRAPH. */
1047
1048 static void
1049 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1050 {
1051 if (graph->succs[node])
1052 BITMAP_FREE (graph->succs[node]);
1053 }
1054
1055 /* Merge GRAPH nodes FROM and TO into node TO. */
1056
1057 static void
1058 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1059 unsigned int from)
1060 {
1061 if (graph->indirect_cycles[from] != -1)
1062 {
1063 /* If we have indirect cycles with the from node, and we have
1064 none on the to node, the to node has indirect cycles from the
1065 from node now that they are unified.
1066 If indirect cycles exist on both, unify the nodes that they
1067 are in a cycle with, since we know they are in a cycle with
1068 each other. */
1069 if (graph->indirect_cycles[to] == -1)
1070 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1071 }
1072
1073 /* Merge all the successor edges. */
1074 if (graph->succs[from])
1075 {
1076 if (!graph->succs[to])
1077 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1078 bitmap_ior_into (graph->succs[to],
1079 graph->succs[from]);
1080 }
1081
1082 clear_edges_for_node (graph, from);
1083 }
1084
1085
1086 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1087 it doesn't exist in the graph already. */
1088
1089 static void
1090 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1091 unsigned int from)
1092 {
1093 if (to == from)
1094 return;
1095
1096 if (!graph->implicit_preds[to])
1097 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1098
1099 if (bitmap_set_bit (graph->implicit_preds[to], from))
1100 stats.num_implicit_edges++;
1101 }
1102
1103 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1104 it doesn't exist in the graph already.
1105 Return false if the edge already existed, true otherwise. */
1106
1107 static void
1108 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1109 unsigned int from)
1110 {
1111 if (!graph->preds[to])
1112 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1113 bitmap_set_bit (graph->preds[to], from);
1114 }
1115
1116 /* Add a graph edge to GRAPH, going from FROM to TO if
1117 it doesn't exist in the graph already.
1118 Return false if the edge already existed, true otherwise. */
1119
1120 static bool
1121 add_graph_edge (constraint_graph_t graph, unsigned int to,
1122 unsigned int from)
1123 {
1124 if (to == from)
1125 {
1126 return false;
1127 }
1128 else
1129 {
1130 bool r = false;
1131
1132 if (!graph->succs[from])
1133 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1134 if (bitmap_set_bit (graph->succs[from], to))
1135 {
1136 r = true;
1137 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1138 stats.num_edges++;
1139 }
1140 return r;
1141 }
1142 }
1143
1144
1145 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1146
1147 static bool
1148 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1149 unsigned int dest)
1150 {
1151 return (graph->succs[dest]
1152 && bitmap_bit_p (graph->succs[dest], src));
1153 }
1154
1155 /* Initialize the constraint graph structure to contain SIZE nodes. */
1156
1157 static void
1158 init_graph (unsigned int size)
1159 {
1160 unsigned int j;
1161
1162 graph = XCNEW (struct constraint_graph);
1163 graph->size = size;
1164 graph->succs = XCNEWVEC (bitmap, graph->size);
1165 graph->indirect_cycles = XNEWVEC (int, graph->size);
1166 graph->rep = XNEWVEC (unsigned int, graph->size);
1167 /* ??? Macros do not support template types with multiple arguments,
1168 so we use a typedef to work around it. */
1169 typedef vec<constraint_t> vec_constraint_t_heap;
1170 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1171 graph->pe = XCNEWVEC (unsigned int, graph->size);
1172 graph->pe_rep = XNEWVEC (int, graph->size);
1173
1174 for (j = 0; j < graph->size; j++)
1175 {
1176 graph->rep[j] = j;
1177 graph->pe_rep[j] = -1;
1178 graph->indirect_cycles[j] = -1;
1179 }
1180 }
1181
1182 /* Build the constraint graph, adding only predecessor edges right now. */
1183
1184 static void
1185 build_pred_graph (void)
1186 {
1187 int i;
1188 constraint_t c;
1189 unsigned int j;
1190
1191 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1192 graph->preds = XCNEWVEC (bitmap, graph->size);
1193 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1194 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1195 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1196 graph->points_to = XCNEWVEC (bitmap, graph->size);
1197 graph->eq_rep = XNEWVEC (int, graph->size);
1198 graph->direct_nodes = sbitmap_alloc (graph->size);
1199 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1200 bitmap_clear (graph->direct_nodes);
1201
1202 for (j = 0; j < FIRST_REF_NODE; j++)
1203 {
1204 if (!get_varinfo (j)->is_special_var)
1205 bitmap_set_bit (graph->direct_nodes, j);
1206 }
1207
1208 for (j = 0; j < graph->size; j++)
1209 graph->eq_rep[j] = -1;
1210
1211 for (j = 0; j < varmap.length (); j++)
1212 graph->indirect_cycles[j] = -1;
1213
1214 FOR_EACH_VEC_ELT (constraints, i, c)
1215 {
1216 struct constraint_expr lhs = c->lhs;
1217 struct constraint_expr rhs = c->rhs;
1218 unsigned int lhsvar = lhs.var;
1219 unsigned int rhsvar = rhs.var;
1220
1221 if (lhs.type == DEREF)
1222 {
1223 /* *x = y. */
1224 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1225 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1226 }
1227 else if (rhs.type == DEREF)
1228 {
1229 /* x = *y */
1230 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1231 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1232 else
1233 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1234 }
1235 else if (rhs.type == ADDRESSOF)
1236 {
1237 varinfo_t v;
1238
1239 /* x = &y */
1240 if (graph->points_to[lhsvar] == NULL)
1241 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1242 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1243
1244 if (graph->pointed_by[rhsvar] == NULL)
1245 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1246 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1247
1248 /* Implicitly, *x = y */
1249 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1250
1251 /* All related variables are no longer direct nodes. */
1252 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1253 v = get_varinfo (rhsvar);
1254 if (!v->is_full_var)
1255 {
1256 v = lookup_vi_for_tree (v->decl);
1257 do
1258 {
1259 bitmap_clear_bit (graph->direct_nodes, v->id);
1260 v = v->next;
1261 }
1262 while (v != NULL);
1263 }
1264 bitmap_set_bit (graph->address_taken, rhsvar);
1265 }
1266 else if (lhsvar > anything_id
1267 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1268 {
1269 /* x = y */
1270 add_pred_graph_edge (graph, lhsvar, rhsvar);
1271 /* Implicitly, *x = *y */
1272 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1273 FIRST_REF_NODE + rhsvar);
1274 }
1275 else if (lhs.offset != 0 || rhs.offset != 0)
1276 {
1277 if (rhs.offset != 0)
1278 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1279 else if (lhs.offset != 0)
1280 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1281 }
1282 }
1283 }
1284
1285 /* Build the constraint graph, adding successor edges. */
1286
1287 static void
1288 build_succ_graph (void)
1289 {
1290 unsigned i, t;
1291 constraint_t c;
1292
1293 FOR_EACH_VEC_ELT (constraints, i, c)
1294 {
1295 struct constraint_expr lhs;
1296 struct constraint_expr rhs;
1297 unsigned int lhsvar;
1298 unsigned int rhsvar;
1299
1300 if (!c)
1301 continue;
1302
1303 lhs = c->lhs;
1304 rhs = c->rhs;
1305 lhsvar = find (lhs.var);
1306 rhsvar = find (rhs.var);
1307
1308 if (lhs.type == DEREF)
1309 {
1310 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1311 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1312 }
1313 else if (rhs.type == DEREF)
1314 {
1315 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1316 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1317 }
1318 else if (rhs.type == ADDRESSOF)
1319 {
1320 /* x = &y */
1321 gcc_assert (find (rhs.var) == rhs.var);
1322 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1323 }
1324 else if (lhsvar > anything_id
1325 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1326 {
1327 add_graph_edge (graph, lhsvar, rhsvar);
1328 }
1329 }
1330
1331 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1332 receive pointers. */
1333 t = find (storedanything_id);
1334 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1335 {
1336 if (!bitmap_bit_p (graph->direct_nodes, i)
1337 && get_varinfo (i)->may_have_pointers)
1338 add_graph_edge (graph, find (i), t);
1339 }
1340
1341 /* Everything stored to ANYTHING also potentially escapes. */
1342 add_graph_edge (graph, find (escaped_id), t);
1343 }
1344
1345
1346 /* Changed variables on the last iteration. */
1347 static bitmap changed;
1348
1349 /* Strongly Connected Component visitation info. */
1350
1351 struct scc_info
1352 {
1353 sbitmap visited;
1354 sbitmap deleted;
1355 unsigned int *dfs;
1356 unsigned int *node_mapping;
1357 int current_index;
1358 vec<unsigned> scc_stack;
1359 };
1360
1361
1362 /* Recursive routine to find strongly connected components in GRAPH.
1363 SI is the SCC info to store the information in, and N is the id of current
1364 graph node we are processing.
1365
1366 This is Tarjan's strongly connected component finding algorithm, as
1367 modified by Nuutila to keep only non-root nodes on the stack.
1368 The algorithm can be found in "On finding the strongly connected
1369 connected components in a directed graph" by Esko Nuutila and Eljas
1370 Soisalon-Soininen, in Information Processing Letters volume 49,
1371 number 1, pages 9-14. */
1372
1373 static void
1374 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1375 {
1376 unsigned int i;
1377 bitmap_iterator bi;
1378 unsigned int my_dfs;
1379
1380 bitmap_set_bit (si->visited, n);
1381 si->dfs[n] = si->current_index ++;
1382 my_dfs = si->dfs[n];
1383
1384 /* Visit all the successors. */
1385 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1386 {
1387 unsigned int w;
1388
1389 if (i > LAST_REF_NODE)
1390 break;
1391
1392 w = find (i);
1393 if (bitmap_bit_p (si->deleted, w))
1394 continue;
1395
1396 if (!bitmap_bit_p (si->visited, w))
1397 scc_visit (graph, si, w);
1398 {
1399 unsigned int t = find (w);
1400 unsigned int nnode = find (n);
1401 gcc_assert (nnode == n);
1402
1403 if (si->dfs[t] < si->dfs[nnode])
1404 si->dfs[n] = si->dfs[t];
1405 }
1406 }
1407
1408 /* See if any components have been identified. */
1409 if (si->dfs[n] == my_dfs)
1410 {
1411 if (si->scc_stack.length () > 0
1412 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1413 {
1414 bitmap scc = BITMAP_ALLOC (NULL);
1415 unsigned int lowest_node;
1416 bitmap_iterator bi;
1417
1418 bitmap_set_bit (scc, n);
1419
1420 while (si->scc_stack.length () != 0
1421 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1422 {
1423 unsigned int w = si->scc_stack.pop ();
1424
1425 bitmap_set_bit (scc, w);
1426 }
1427
1428 lowest_node = bitmap_first_set_bit (scc);
1429 gcc_assert (lowest_node < FIRST_REF_NODE);
1430
1431 /* Collapse the SCC nodes into a single node, and mark the
1432 indirect cycles. */
1433 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1434 {
1435 if (i < FIRST_REF_NODE)
1436 {
1437 if (unite (lowest_node, i))
1438 unify_nodes (graph, lowest_node, i, false);
1439 }
1440 else
1441 {
1442 unite (lowest_node, i);
1443 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1444 }
1445 }
1446 }
1447 bitmap_set_bit (si->deleted, n);
1448 }
1449 else
1450 si->scc_stack.safe_push (n);
1451 }
1452
1453 /* Unify node FROM into node TO, updating the changed count if
1454 necessary when UPDATE_CHANGED is true. */
1455
1456 static void
1457 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1458 bool update_changed)
1459 {
1460
1461 gcc_assert (to != from && find (to) == to);
1462 if (dump_file && (dump_flags & TDF_DETAILS))
1463 fprintf (dump_file, "Unifying %s to %s\n",
1464 get_varinfo (from)->name,
1465 get_varinfo (to)->name);
1466
1467 if (update_changed)
1468 stats.unified_vars_dynamic++;
1469 else
1470 stats.unified_vars_static++;
1471
1472 merge_graph_nodes (graph, to, from);
1473 merge_node_constraints (graph, to, from);
1474
1475 /* Mark TO as changed if FROM was changed. If TO was already marked
1476 as changed, decrease the changed count. */
1477
1478 if (update_changed
1479 && bitmap_bit_p (changed, from))
1480 {
1481 bitmap_clear_bit (changed, from);
1482 bitmap_set_bit (changed, to);
1483 }
1484 if (get_varinfo (from)->solution)
1485 {
1486 /* If the solution changes because of the merging, we need to mark
1487 the variable as changed. */
1488 if (bitmap_ior_into (get_varinfo (to)->solution,
1489 get_varinfo (from)->solution))
1490 {
1491 if (update_changed)
1492 bitmap_set_bit (changed, to);
1493 }
1494
1495 BITMAP_FREE (get_varinfo (from)->solution);
1496 if (get_varinfo (from)->oldsolution)
1497 BITMAP_FREE (get_varinfo (from)->oldsolution);
1498
1499 if (stats.iterations > 0
1500 && get_varinfo (to)->oldsolution)
1501 BITMAP_FREE (get_varinfo (to)->oldsolution);
1502 }
1503 if (valid_graph_edge (graph, to, to))
1504 {
1505 if (graph->succs[to])
1506 bitmap_clear_bit (graph->succs[to], to);
1507 }
1508 }
1509
1510 /* Information needed to compute the topological ordering of a graph. */
1511
1512 struct topo_info
1513 {
1514 /* sbitmap of visited nodes. */
1515 sbitmap visited;
1516 /* Array that stores the topological order of the graph, *in
1517 reverse*. */
1518 vec<unsigned> topo_order;
1519 };
1520
1521
1522 /* Initialize and return a topological info structure. */
1523
1524 static struct topo_info *
1525 init_topo_info (void)
1526 {
1527 size_t size = graph->size;
1528 struct topo_info *ti = XNEW (struct topo_info);
1529 ti->visited = sbitmap_alloc (size);
1530 bitmap_clear (ti->visited);
1531 ti->topo_order.create (1);
1532 return ti;
1533 }
1534
1535
1536 /* Free the topological sort info pointed to by TI. */
1537
1538 static void
1539 free_topo_info (struct topo_info *ti)
1540 {
1541 sbitmap_free (ti->visited);
1542 ti->topo_order.release ();
1543 free (ti);
1544 }
1545
1546 /* Visit the graph in topological order, and store the order in the
1547 topo_info structure. */
1548
1549 static void
1550 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1551 unsigned int n)
1552 {
1553 bitmap_iterator bi;
1554 unsigned int j;
1555
1556 bitmap_set_bit (ti->visited, n);
1557
1558 if (graph->succs[n])
1559 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1560 {
1561 if (!bitmap_bit_p (ti->visited, j))
1562 topo_visit (graph, ti, j);
1563 }
1564
1565 ti->topo_order.safe_push (n);
1566 }
1567
1568 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1569 starting solution for y. */
1570
1571 static void
1572 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1573 bitmap delta)
1574 {
1575 unsigned int lhs = c->lhs.var;
1576 bool flag = false;
1577 bitmap sol = get_varinfo (lhs)->solution;
1578 unsigned int j;
1579 bitmap_iterator bi;
1580 HOST_WIDE_INT roffset = c->rhs.offset;
1581
1582 /* Our IL does not allow this. */
1583 gcc_assert (c->lhs.offset == 0);
1584
1585 /* If the solution of Y contains anything it is good enough to transfer
1586 this to the LHS. */
1587 if (bitmap_bit_p (delta, anything_id))
1588 {
1589 flag |= bitmap_set_bit (sol, anything_id);
1590 goto done;
1591 }
1592
1593 /* If we do not know at with offset the rhs is dereferenced compute
1594 the reachability set of DELTA, conservatively assuming it is
1595 dereferenced at all valid offsets. */
1596 if (roffset == UNKNOWN_OFFSET)
1597 {
1598 solution_set_expand (delta, delta);
1599 /* No further offset processing is necessary. */
1600 roffset = 0;
1601 }
1602
1603 /* For each variable j in delta (Sol(y)), add
1604 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1605 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1606 {
1607 varinfo_t v = get_varinfo (j);
1608 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1609 unsigned int t;
1610
1611 if (v->is_full_var)
1612 fieldoffset = v->offset;
1613 else if (roffset != 0)
1614 v = first_vi_for_offset (v, fieldoffset);
1615 /* If the access is outside of the variable we can ignore it. */
1616 if (!v)
1617 continue;
1618
1619 do
1620 {
1621 t = find (v->id);
1622
1623 /* Adding edges from the special vars is pointless.
1624 They don't have sets that can change. */
1625 if (get_varinfo (t)->is_special_var)
1626 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1627 /* Merging the solution from ESCAPED needlessly increases
1628 the set. Use ESCAPED as representative instead. */
1629 else if (v->id == escaped_id)
1630 flag |= bitmap_set_bit (sol, escaped_id);
1631 else if (v->may_have_pointers
1632 && add_graph_edge (graph, lhs, t))
1633 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1634
1635 /* If the variable is not exactly at the requested offset
1636 we have to include the next one. */
1637 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1638 || v->next == NULL)
1639 break;
1640
1641 v = v->next;
1642 fieldoffset = v->offset;
1643 }
1644 while (1);
1645 }
1646
1647 done:
1648 /* If the LHS solution changed, mark the var as changed. */
1649 if (flag)
1650 {
1651 get_varinfo (lhs)->solution = sol;
1652 bitmap_set_bit (changed, lhs);
1653 }
1654 }
1655
1656 /* Process a constraint C that represents *(x + off) = y using DELTA
1657 as the starting solution for x. */
1658
1659 static void
1660 do_ds_constraint (constraint_t c, bitmap delta)
1661 {
1662 unsigned int rhs = c->rhs.var;
1663 bitmap sol = get_varinfo (rhs)->solution;
1664 unsigned int j;
1665 bitmap_iterator bi;
1666 HOST_WIDE_INT loff = c->lhs.offset;
1667 bool escaped_p = false;
1668
1669 /* Our IL does not allow this. */
1670 gcc_assert (c->rhs.offset == 0);
1671
1672 /* If the solution of y contains ANYTHING simply use the ANYTHING
1673 solution. This avoids needlessly increasing the points-to sets. */
1674 if (bitmap_bit_p (sol, anything_id))
1675 sol = get_varinfo (find (anything_id))->solution;
1676
1677 /* If the solution for x contains ANYTHING we have to merge the
1678 solution of y into all pointer variables which we do via
1679 STOREDANYTHING. */
1680 if (bitmap_bit_p (delta, anything_id))
1681 {
1682 unsigned t = find (storedanything_id);
1683 if (add_graph_edge (graph, t, rhs))
1684 {
1685 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1686 bitmap_set_bit (changed, t);
1687 }
1688 return;
1689 }
1690
1691 /* If we do not know at with offset the rhs is dereferenced compute
1692 the reachability set of DELTA, conservatively assuming it is
1693 dereferenced at all valid offsets. */
1694 if (loff == UNKNOWN_OFFSET)
1695 {
1696 solution_set_expand (delta, delta);
1697 loff = 0;
1698 }
1699
1700 /* For each member j of delta (Sol(x)), add an edge from y to j and
1701 union Sol(y) into Sol(j) */
1702 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1703 {
1704 varinfo_t v = get_varinfo (j);
1705 unsigned int t;
1706 HOST_WIDE_INT fieldoffset = v->offset + loff;
1707
1708 if (v->is_full_var)
1709 fieldoffset = v->offset;
1710 else if (loff != 0)
1711 v = first_vi_for_offset (v, fieldoffset);
1712 /* If the access is outside of the variable we can ignore it. */
1713 if (!v)
1714 continue;
1715
1716 do
1717 {
1718 if (v->may_have_pointers)
1719 {
1720 /* If v is a global variable then this is an escape point. */
1721 if (v->is_global_var
1722 && !escaped_p)
1723 {
1724 t = find (escaped_id);
1725 if (add_graph_edge (graph, t, rhs)
1726 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1727 bitmap_set_bit (changed, t);
1728 /* Enough to let rhs escape once. */
1729 escaped_p = true;
1730 }
1731
1732 if (v->is_special_var)
1733 break;
1734
1735 t = find (v->id);
1736 if (add_graph_edge (graph, t, rhs)
1737 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1738 bitmap_set_bit (changed, t);
1739 }
1740
1741 /* If the variable is not exactly at the requested offset
1742 we have to include the next one. */
1743 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1744 || v->next == NULL)
1745 break;
1746
1747 v = v->next;
1748 fieldoffset = v->offset;
1749 }
1750 while (1);
1751 }
1752 }
1753
1754 /* Handle a non-simple (simple meaning requires no iteration),
1755 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1756
1757 static void
1758 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1759 {
1760 if (c->lhs.type == DEREF)
1761 {
1762 if (c->rhs.type == ADDRESSOF)
1763 {
1764 gcc_unreachable();
1765 }
1766 else
1767 {
1768 /* *x = y */
1769 do_ds_constraint (c, delta);
1770 }
1771 }
1772 else if (c->rhs.type == DEREF)
1773 {
1774 /* x = *y */
1775 if (!(get_varinfo (c->lhs.var)->is_special_var))
1776 do_sd_constraint (graph, c, delta);
1777 }
1778 else
1779 {
1780 bitmap tmp;
1781 bitmap solution;
1782 bool flag = false;
1783
1784 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1785 solution = get_varinfo (c->rhs.var)->solution;
1786 tmp = get_varinfo (c->lhs.var)->solution;
1787
1788 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1789
1790 if (flag)
1791 {
1792 get_varinfo (c->lhs.var)->solution = tmp;
1793 bitmap_set_bit (changed, c->lhs.var);
1794 }
1795 }
1796 }
1797
1798 /* Initialize and return a new SCC info structure. */
1799
1800 static struct scc_info *
1801 init_scc_info (size_t size)
1802 {
1803 struct scc_info *si = XNEW (struct scc_info);
1804 size_t i;
1805
1806 si->current_index = 0;
1807 si->visited = sbitmap_alloc (size);
1808 bitmap_clear (si->visited);
1809 si->deleted = sbitmap_alloc (size);
1810 bitmap_clear (si->deleted);
1811 si->node_mapping = XNEWVEC (unsigned int, size);
1812 si->dfs = XCNEWVEC (unsigned int, size);
1813
1814 for (i = 0; i < size; i++)
1815 si->node_mapping[i] = i;
1816
1817 si->scc_stack.create (1);
1818 return si;
1819 }
1820
1821 /* Free an SCC info structure pointed to by SI */
1822
1823 static void
1824 free_scc_info (struct scc_info *si)
1825 {
1826 sbitmap_free (si->visited);
1827 sbitmap_free (si->deleted);
1828 free (si->node_mapping);
1829 free (si->dfs);
1830 si->scc_stack.release ();
1831 free (si);
1832 }
1833
1834
1835 /* Find indirect cycles in GRAPH that occur, using strongly connected
1836 components, and note them in the indirect cycles map.
1837
1838 This technique comes from Ben Hardekopf and Calvin Lin,
1839 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1840 Lines of Code", submitted to PLDI 2007. */
1841
1842 static void
1843 find_indirect_cycles (constraint_graph_t graph)
1844 {
1845 unsigned int i;
1846 unsigned int size = graph->size;
1847 struct scc_info *si = init_scc_info (size);
1848
1849 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1850 if (!bitmap_bit_p (si->visited, i) && find (i) == i)
1851 scc_visit (graph, si, i);
1852
1853 free_scc_info (si);
1854 }
1855
1856 /* Compute a topological ordering for GRAPH, and store the result in the
1857 topo_info structure TI. */
1858
1859 static void
1860 compute_topo_order (constraint_graph_t graph,
1861 struct topo_info *ti)
1862 {
1863 unsigned int i;
1864 unsigned int size = graph->size;
1865
1866 for (i = 0; i != size; ++i)
1867 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1868 topo_visit (graph, ti, i);
1869 }
1870
1871 /* Structure used to for hash value numbering of pointer equivalence
1872 classes. */
1873
1874 typedef struct equiv_class_label
1875 {
1876 hashval_t hashcode;
1877 unsigned int equivalence_class;
1878 bitmap labels;
1879 } *equiv_class_label_t;
1880 typedef const struct equiv_class_label *const_equiv_class_label_t;
1881
1882 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1883 classes. */
1884 static htab_t pointer_equiv_class_table;
1885
1886 /* A hashtable for mapping a bitmap of labels->location equivalence
1887 classes. */
1888 static htab_t location_equiv_class_table;
1889
1890 /* Hash function for a equiv_class_label_t */
1891
1892 static hashval_t
1893 equiv_class_label_hash (const void *p)
1894 {
1895 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1896 return ecl->hashcode;
1897 }
1898
1899 /* Equality function for two equiv_class_label_t's. */
1900
1901 static int
1902 equiv_class_label_eq (const void *p1, const void *p2)
1903 {
1904 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1905 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1906 return (eql1->hashcode == eql2->hashcode
1907 && bitmap_equal_p (eql1->labels, eql2->labels));
1908 }
1909
1910 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1911 contains. */
1912
1913 static unsigned int
1914 equiv_class_lookup (htab_t table, bitmap labels)
1915 {
1916 void **slot;
1917 struct equiv_class_label ecl;
1918
1919 ecl.labels = labels;
1920 ecl.hashcode = bitmap_hash (labels);
1921
1922 slot = htab_find_slot_with_hash (table, &ecl,
1923 ecl.hashcode, NO_INSERT);
1924 if (!slot)
1925 return 0;
1926 else
1927 return ((equiv_class_label_t) *slot)->equivalence_class;
1928 }
1929
1930
1931 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1932 to TABLE. */
1933
1934 static void
1935 equiv_class_add (htab_t table, unsigned int equivalence_class,
1936 bitmap labels)
1937 {
1938 void **slot;
1939 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1940
1941 ecl->labels = labels;
1942 ecl->equivalence_class = equivalence_class;
1943 ecl->hashcode = bitmap_hash (labels);
1944
1945 slot = htab_find_slot_with_hash (table, ecl,
1946 ecl->hashcode, INSERT);
1947 gcc_assert (!*slot);
1948 *slot = (void *) ecl;
1949 }
1950
1951 /* Perform offline variable substitution.
1952
1953 This is a worst case quadratic time way of identifying variables
1954 that must have equivalent points-to sets, including those caused by
1955 static cycles, and single entry subgraphs, in the constraint graph.
1956
1957 The technique is described in "Exploiting Pointer and Location
1958 Equivalence to Optimize Pointer Analysis. In the 14th International
1959 Static Analysis Symposium (SAS), August 2007." It is known as the
1960 "HU" algorithm, and is equivalent to value numbering the collapsed
1961 constraint graph including evaluating unions.
1962
1963 The general method of finding equivalence classes is as follows:
1964 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1965 Initialize all non-REF nodes to be direct nodes.
1966 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1967 variable}
1968 For each constraint containing the dereference, we also do the same
1969 thing.
1970
1971 We then compute SCC's in the graph and unify nodes in the same SCC,
1972 including pts sets.
1973
1974 For each non-collapsed node x:
1975 Visit all unvisited explicit incoming edges.
1976 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1977 where y->x.
1978 Lookup the equivalence class for pts(x).
1979 If we found one, equivalence_class(x) = found class.
1980 Otherwise, equivalence_class(x) = new class, and new_class is
1981 added to the lookup table.
1982
1983 All direct nodes with the same equivalence class can be replaced
1984 with a single representative node.
1985 All unlabeled nodes (label == 0) are not pointers and all edges
1986 involving them can be eliminated.
1987 We perform these optimizations during rewrite_constraints
1988
1989 In addition to pointer equivalence class finding, we also perform
1990 location equivalence class finding. This is the set of variables
1991 that always appear together in points-to sets. We use this to
1992 compress the size of the points-to sets. */
1993
1994 /* Current maximum pointer equivalence class id. */
1995 static int pointer_equiv_class;
1996
1997 /* Current maximum location equivalence class id. */
1998 static int location_equiv_class;
1999
2000 /* Recursive routine to find strongly connected components in GRAPH,
2001 and label it's nodes with DFS numbers. */
2002
2003 static void
2004 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2005 {
2006 unsigned int i;
2007 bitmap_iterator bi;
2008 unsigned int my_dfs;
2009
2010 gcc_assert (si->node_mapping[n] == n);
2011 bitmap_set_bit (si->visited, n);
2012 si->dfs[n] = si->current_index ++;
2013 my_dfs = si->dfs[n];
2014
2015 /* Visit all the successors. */
2016 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2017 {
2018 unsigned int w = si->node_mapping[i];
2019
2020 if (bitmap_bit_p (si->deleted, w))
2021 continue;
2022
2023 if (!bitmap_bit_p (si->visited, w))
2024 condense_visit (graph, si, w);
2025 {
2026 unsigned int t = si->node_mapping[w];
2027 unsigned int nnode = si->node_mapping[n];
2028 gcc_assert (nnode == n);
2029
2030 if (si->dfs[t] < si->dfs[nnode])
2031 si->dfs[n] = si->dfs[t];
2032 }
2033 }
2034
2035 /* Visit all the implicit predecessors. */
2036 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2037 {
2038 unsigned int w = si->node_mapping[i];
2039
2040 if (bitmap_bit_p (si->deleted, w))
2041 continue;
2042
2043 if (!bitmap_bit_p (si->visited, w))
2044 condense_visit (graph, si, w);
2045 {
2046 unsigned int t = si->node_mapping[w];
2047 unsigned int nnode = si->node_mapping[n];
2048 gcc_assert (nnode == n);
2049
2050 if (si->dfs[t] < si->dfs[nnode])
2051 si->dfs[n] = si->dfs[t];
2052 }
2053 }
2054
2055 /* See if any components have been identified. */
2056 if (si->dfs[n] == my_dfs)
2057 {
2058 while (si->scc_stack.length () != 0
2059 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2060 {
2061 unsigned int w = si->scc_stack.pop ();
2062 si->node_mapping[w] = n;
2063
2064 if (!bitmap_bit_p (graph->direct_nodes, w))
2065 bitmap_clear_bit (graph->direct_nodes, n);
2066
2067 /* Unify our nodes. */
2068 if (graph->preds[w])
2069 {
2070 if (!graph->preds[n])
2071 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2072 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2073 }
2074 if (graph->implicit_preds[w])
2075 {
2076 if (!graph->implicit_preds[n])
2077 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2078 bitmap_ior_into (graph->implicit_preds[n],
2079 graph->implicit_preds[w]);
2080 }
2081 if (graph->points_to[w])
2082 {
2083 if (!graph->points_to[n])
2084 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2085 bitmap_ior_into (graph->points_to[n],
2086 graph->points_to[w]);
2087 }
2088 }
2089 bitmap_set_bit (si->deleted, n);
2090 }
2091 else
2092 si->scc_stack.safe_push (n);
2093 }
2094
2095 /* Label pointer equivalences. */
2096
2097 static void
2098 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2099 {
2100 unsigned int i;
2101 bitmap_iterator bi;
2102 bitmap_set_bit (si->visited, n);
2103
2104 if (!graph->points_to[n])
2105 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2106
2107 /* Label and union our incoming edges's points to sets. */
2108 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2109 {
2110 unsigned int w = si->node_mapping[i];
2111 if (!bitmap_bit_p (si->visited, w))
2112 label_visit (graph, si, w);
2113
2114 /* Skip unused edges */
2115 if (w == n || graph->pointer_label[w] == 0)
2116 continue;
2117
2118 if (graph->points_to[w])
2119 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2120 }
2121 /* Indirect nodes get fresh variables. */
2122 if (!bitmap_bit_p (graph->direct_nodes, n))
2123 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2124
2125 if (!bitmap_empty_p (graph->points_to[n]))
2126 {
2127 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
2128 graph->points_to[n]);
2129 if (!label)
2130 {
2131 label = pointer_equiv_class++;
2132 equiv_class_add (pointer_equiv_class_table,
2133 label, graph->points_to[n]);
2134 }
2135 graph->pointer_label[n] = label;
2136 }
2137 }
2138
2139 /* Perform offline variable substitution, discovering equivalence
2140 classes, and eliminating non-pointer variables. */
2141
2142 static struct scc_info *
2143 perform_var_substitution (constraint_graph_t graph)
2144 {
2145 unsigned int i;
2146 unsigned int size = graph->size;
2147 struct scc_info *si = init_scc_info (size);
2148
2149 bitmap_obstack_initialize (&iteration_obstack);
2150 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2151 equiv_class_label_eq, free);
2152 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2153 equiv_class_label_eq, free);
2154 pointer_equiv_class = 1;
2155 location_equiv_class = 1;
2156
2157 /* Condense the nodes, which means to find SCC's, count incoming
2158 predecessors, and unite nodes in SCC's. */
2159 for (i = 0; i < FIRST_REF_NODE; i++)
2160 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2161 condense_visit (graph, si, si->node_mapping[i]);
2162
2163 bitmap_clear (si->visited);
2164 /* Actually the label the nodes for pointer equivalences */
2165 for (i = 0; i < FIRST_REF_NODE; i++)
2166 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2167 label_visit (graph, si, si->node_mapping[i]);
2168
2169 /* Calculate location equivalence labels. */
2170 for (i = 0; i < FIRST_REF_NODE; i++)
2171 {
2172 bitmap pointed_by;
2173 bitmap_iterator bi;
2174 unsigned int j;
2175 unsigned int label;
2176
2177 if (!graph->pointed_by[i])
2178 continue;
2179 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2180
2181 /* Translate the pointed-by mapping for pointer equivalence
2182 labels. */
2183 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2184 {
2185 bitmap_set_bit (pointed_by,
2186 graph->pointer_label[si->node_mapping[j]]);
2187 }
2188 /* The original pointed_by is now dead. */
2189 BITMAP_FREE (graph->pointed_by[i]);
2190
2191 /* Look up the location equivalence label if one exists, or make
2192 one otherwise. */
2193 label = equiv_class_lookup (location_equiv_class_table,
2194 pointed_by);
2195 if (label == 0)
2196 {
2197 label = location_equiv_class++;
2198 equiv_class_add (location_equiv_class_table,
2199 label, pointed_by);
2200 }
2201 else
2202 {
2203 if (dump_file && (dump_flags & TDF_DETAILS))
2204 fprintf (dump_file, "Found location equivalence for node %s\n",
2205 get_varinfo (i)->name);
2206 BITMAP_FREE (pointed_by);
2207 }
2208 graph->loc_label[i] = label;
2209
2210 }
2211
2212 if (dump_file && (dump_flags & TDF_DETAILS))
2213 for (i = 0; i < FIRST_REF_NODE; i++)
2214 {
2215 bool direct_node = bitmap_bit_p (graph->direct_nodes, i);
2216 fprintf (dump_file,
2217 "Equivalence classes for %s node id %d:%s are pointer: %d"
2218 ", location:%d\n",
2219 direct_node ? "Direct node" : "Indirect node", i,
2220 get_varinfo (i)->name,
2221 graph->pointer_label[si->node_mapping[i]],
2222 graph->loc_label[si->node_mapping[i]]);
2223 }
2224
2225 /* Quickly eliminate our non-pointer variables. */
2226
2227 for (i = 0; i < FIRST_REF_NODE; i++)
2228 {
2229 unsigned int node = si->node_mapping[i];
2230
2231 if (graph->pointer_label[node] == 0)
2232 {
2233 if (dump_file && (dump_flags & TDF_DETAILS))
2234 fprintf (dump_file,
2235 "%s is a non-pointer variable, eliminating edges.\n",
2236 get_varinfo (node)->name);
2237 stats.nonpointer_vars++;
2238 clear_edges_for_node (graph, node);
2239 }
2240 }
2241
2242 return si;
2243 }
2244
2245 /* Free information that was only necessary for variable
2246 substitution. */
2247
2248 static void
2249 free_var_substitution_info (struct scc_info *si)
2250 {
2251 free_scc_info (si);
2252 free (graph->pointer_label);
2253 free (graph->loc_label);
2254 free (graph->pointed_by);
2255 free (graph->points_to);
2256 free (graph->eq_rep);
2257 sbitmap_free (graph->direct_nodes);
2258 htab_delete (pointer_equiv_class_table);
2259 htab_delete (location_equiv_class_table);
2260 bitmap_obstack_release (&iteration_obstack);
2261 }
2262
2263 /* Return an existing node that is equivalent to NODE, which has
2264 equivalence class LABEL, if one exists. Return NODE otherwise. */
2265
2266 static unsigned int
2267 find_equivalent_node (constraint_graph_t graph,
2268 unsigned int node, unsigned int label)
2269 {
2270 /* If the address version of this variable is unused, we can
2271 substitute it for anything else with the same label.
2272 Otherwise, we know the pointers are equivalent, but not the
2273 locations, and we can unite them later. */
2274
2275 if (!bitmap_bit_p (graph->address_taken, node))
2276 {
2277 gcc_assert (label < graph->size);
2278
2279 if (graph->eq_rep[label] != -1)
2280 {
2281 /* Unify the two variables since we know they are equivalent. */
2282 if (unite (graph->eq_rep[label], node))
2283 unify_nodes (graph, graph->eq_rep[label], node, false);
2284 return graph->eq_rep[label];
2285 }
2286 else
2287 {
2288 graph->eq_rep[label] = node;
2289 graph->pe_rep[label] = node;
2290 }
2291 }
2292 else
2293 {
2294 gcc_assert (label < graph->size);
2295 graph->pe[node] = label;
2296 if (graph->pe_rep[label] == -1)
2297 graph->pe_rep[label] = node;
2298 }
2299
2300 return node;
2301 }
2302
2303 /* Unite pointer equivalent but not location equivalent nodes in
2304 GRAPH. This may only be performed once variable substitution is
2305 finished. */
2306
2307 static void
2308 unite_pointer_equivalences (constraint_graph_t graph)
2309 {
2310 unsigned int i;
2311
2312 /* Go through the pointer equivalences and unite them to their
2313 representative, if they aren't already. */
2314 for (i = 0; i < FIRST_REF_NODE; i++)
2315 {
2316 unsigned int label = graph->pe[i];
2317 if (label)
2318 {
2319 int label_rep = graph->pe_rep[label];
2320
2321 if (label_rep == -1)
2322 continue;
2323
2324 label_rep = find (label_rep);
2325 if (label_rep >= 0 && unite (label_rep, find (i)))
2326 unify_nodes (graph, label_rep, i, false);
2327 }
2328 }
2329 }
2330
2331 /* Move complex constraints to the GRAPH nodes they belong to. */
2332
2333 static void
2334 move_complex_constraints (constraint_graph_t graph)
2335 {
2336 int i;
2337 constraint_t c;
2338
2339 FOR_EACH_VEC_ELT (constraints, i, c)
2340 {
2341 if (c)
2342 {
2343 struct constraint_expr lhs = c->lhs;
2344 struct constraint_expr rhs = c->rhs;
2345
2346 if (lhs.type == DEREF)
2347 {
2348 insert_into_complex (graph, lhs.var, c);
2349 }
2350 else if (rhs.type == DEREF)
2351 {
2352 if (!(get_varinfo (lhs.var)->is_special_var))
2353 insert_into_complex (graph, rhs.var, c);
2354 }
2355 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2356 && (lhs.offset != 0 || rhs.offset != 0))
2357 {
2358 insert_into_complex (graph, rhs.var, c);
2359 }
2360 }
2361 }
2362 }
2363
2364
2365 /* Optimize and rewrite complex constraints while performing
2366 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2367 result of perform_variable_substitution. */
2368
2369 static void
2370 rewrite_constraints (constraint_graph_t graph,
2371 struct scc_info *si)
2372 {
2373 int i;
2374 unsigned int j;
2375 constraint_t c;
2376
2377 for (j = 0; j < graph->size; j++)
2378 gcc_assert (find (j) == j);
2379
2380 FOR_EACH_VEC_ELT (constraints, i, c)
2381 {
2382 struct constraint_expr lhs = c->lhs;
2383 struct constraint_expr rhs = c->rhs;
2384 unsigned int lhsvar = find (lhs.var);
2385 unsigned int rhsvar = find (rhs.var);
2386 unsigned int lhsnode, rhsnode;
2387 unsigned int lhslabel, rhslabel;
2388
2389 lhsnode = si->node_mapping[lhsvar];
2390 rhsnode = si->node_mapping[rhsvar];
2391 lhslabel = graph->pointer_label[lhsnode];
2392 rhslabel = graph->pointer_label[rhsnode];
2393
2394 /* See if it is really a non-pointer variable, and if so, ignore
2395 the constraint. */
2396 if (lhslabel == 0)
2397 {
2398 if (dump_file && (dump_flags & TDF_DETAILS))
2399 {
2400
2401 fprintf (dump_file, "%s is a non-pointer variable,"
2402 "ignoring constraint:",
2403 get_varinfo (lhs.var)->name);
2404 dump_constraint (dump_file, c);
2405 fprintf (dump_file, "\n");
2406 }
2407 constraints[i] = NULL;
2408 continue;
2409 }
2410
2411 if (rhslabel == 0)
2412 {
2413 if (dump_file && (dump_flags & TDF_DETAILS))
2414 {
2415
2416 fprintf (dump_file, "%s is a non-pointer variable,"
2417 "ignoring constraint:",
2418 get_varinfo (rhs.var)->name);
2419 dump_constraint (dump_file, c);
2420 fprintf (dump_file, "\n");
2421 }
2422 constraints[i] = NULL;
2423 continue;
2424 }
2425
2426 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2427 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2428 c->lhs.var = lhsvar;
2429 c->rhs.var = rhsvar;
2430
2431 }
2432 }
2433
2434 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2435 part of an SCC, false otherwise. */
2436
2437 static bool
2438 eliminate_indirect_cycles (unsigned int node)
2439 {
2440 if (graph->indirect_cycles[node] != -1
2441 && !bitmap_empty_p (get_varinfo (node)->solution))
2442 {
2443 unsigned int i;
2444 vec<unsigned> queue = vNULL;
2445 int queuepos;
2446 unsigned int to = find (graph->indirect_cycles[node]);
2447 bitmap_iterator bi;
2448
2449 /* We can't touch the solution set and call unify_nodes
2450 at the same time, because unify_nodes is going to do
2451 bitmap unions into it. */
2452
2453 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2454 {
2455 if (find (i) == i && i != to)
2456 {
2457 if (unite (to, i))
2458 queue.safe_push (i);
2459 }
2460 }
2461
2462 for (queuepos = 0;
2463 queue.iterate (queuepos, &i);
2464 queuepos++)
2465 {
2466 unify_nodes (graph, to, i, true);
2467 }
2468 queue.release ();
2469 return true;
2470 }
2471 return false;
2472 }
2473
2474 /* Solve the constraint graph GRAPH using our worklist solver.
2475 This is based on the PW* family of solvers from the "Efficient Field
2476 Sensitive Pointer Analysis for C" paper.
2477 It works by iterating over all the graph nodes, processing the complex
2478 constraints and propagating the copy constraints, until everything stops
2479 changed. This corresponds to steps 6-8 in the solving list given above. */
2480
2481 static void
2482 solve_graph (constraint_graph_t graph)
2483 {
2484 unsigned int size = graph->size;
2485 unsigned int i;
2486 bitmap pts;
2487
2488 changed = BITMAP_ALLOC (NULL);
2489
2490 /* Mark all initial non-collapsed nodes as changed. */
2491 for (i = 0; i < size; i++)
2492 {
2493 varinfo_t ivi = get_varinfo (i);
2494 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2495 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2496 || graph->complex[i].length () > 0))
2497 bitmap_set_bit (changed, i);
2498 }
2499
2500 /* Allocate a bitmap to be used to store the changed bits. */
2501 pts = BITMAP_ALLOC (&pta_obstack);
2502
2503 while (!bitmap_empty_p (changed))
2504 {
2505 unsigned int i;
2506 struct topo_info *ti = init_topo_info ();
2507 stats.iterations++;
2508
2509 bitmap_obstack_initialize (&iteration_obstack);
2510
2511 compute_topo_order (graph, ti);
2512
2513 while (ti->topo_order.length () != 0)
2514 {
2515
2516 i = ti->topo_order.pop ();
2517
2518 /* If this variable is not a representative, skip it. */
2519 if (find (i) != i)
2520 continue;
2521
2522 /* In certain indirect cycle cases, we may merge this
2523 variable to another. */
2524 if (eliminate_indirect_cycles (i) && find (i) != i)
2525 continue;
2526
2527 /* If the node has changed, we need to process the
2528 complex constraints and outgoing edges again. */
2529 if (bitmap_clear_bit (changed, i))
2530 {
2531 unsigned int j;
2532 constraint_t c;
2533 bitmap solution;
2534 vec<constraint_t> complex = graph->complex[i];
2535 varinfo_t vi = get_varinfo (i);
2536 bool solution_empty;
2537
2538 /* Compute the changed set of solution bits. */
2539 if (vi->oldsolution)
2540 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2541 else
2542 bitmap_copy (pts, vi->solution);
2543
2544 if (bitmap_empty_p (pts))
2545 continue;
2546
2547 if (vi->oldsolution)
2548 bitmap_ior_into (vi->oldsolution, pts);
2549 else
2550 {
2551 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2552 bitmap_copy (vi->oldsolution, pts);
2553 }
2554
2555 solution = vi->solution;
2556 solution_empty = bitmap_empty_p (solution);
2557
2558 /* Process the complex constraints */
2559 FOR_EACH_VEC_ELT (complex, j, c)
2560 {
2561 /* XXX: This is going to unsort the constraints in
2562 some cases, which will occasionally add duplicate
2563 constraints during unification. This does not
2564 affect correctness. */
2565 c->lhs.var = find (c->lhs.var);
2566 c->rhs.var = find (c->rhs.var);
2567
2568 /* The only complex constraint that can change our
2569 solution to non-empty, given an empty solution,
2570 is a constraint where the lhs side is receiving
2571 some set from elsewhere. */
2572 if (!solution_empty || c->lhs.type != DEREF)
2573 do_complex_constraint (graph, c, pts);
2574 }
2575
2576 solution_empty = bitmap_empty_p (solution);
2577
2578 if (!solution_empty)
2579 {
2580 bitmap_iterator bi;
2581 unsigned eff_escaped_id = find (escaped_id);
2582
2583 /* Propagate solution to all successors. */
2584 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2585 0, j, bi)
2586 {
2587 bitmap tmp;
2588 bool flag;
2589
2590 unsigned int to = find (j);
2591 tmp = get_varinfo (to)->solution;
2592 flag = false;
2593
2594 /* Don't try to propagate to ourselves. */
2595 if (to == i)
2596 continue;
2597
2598 /* If we propagate from ESCAPED use ESCAPED as
2599 placeholder. */
2600 if (i == eff_escaped_id)
2601 flag = bitmap_set_bit (tmp, escaped_id);
2602 else
2603 flag = set_union_with_increment (tmp, pts, 0);
2604
2605 if (flag)
2606 {
2607 get_varinfo (to)->solution = tmp;
2608 bitmap_set_bit (changed, to);
2609 }
2610 }
2611 }
2612 }
2613 }
2614 free_topo_info (ti);
2615 bitmap_obstack_release (&iteration_obstack);
2616 }
2617
2618 BITMAP_FREE (pts);
2619 BITMAP_FREE (changed);
2620 bitmap_obstack_release (&oldpta_obstack);
2621 }
2622
2623 /* Map from trees to variable infos. */
2624 static struct pointer_map_t *vi_for_tree;
2625
2626
2627 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2628
2629 static void
2630 insert_vi_for_tree (tree t, varinfo_t vi)
2631 {
2632 void **slot = pointer_map_insert (vi_for_tree, t);
2633 gcc_assert (vi);
2634 gcc_assert (*slot == NULL);
2635 *slot = vi;
2636 }
2637
2638 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2639 exist in the map, return NULL, otherwise, return the varinfo we found. */
2640
2641 static varinfo_t
2642 lookup_vi_for_tree (tree t)
2643 {
2644 void **slot = pointer_map_contains (vi_for_tree, t);
2645 if (slot == NULL)
2646 return NULL;
2647
2648 return (varinfo_t) *slot;
2649 }
2650
2651 /* Return a printable name for DECL */
2652
2653 static const char *
2654 alias_get_name (tree decl)
2655 {
2656 const char *res = NULL;
2657 char *temp;
2658 int num_printed = 0;
2659
2660 if (!dump_file)
2661 return "NULL";
2662
2663 if (TREE_CODE (decl) == SSA_NAME)
2664 {
2665 res = get_name (decl);
2666 if (res)
2667 num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2668 else
2669 num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2670 if (num_printed > 0)
2671 {
2672 res = ggc_strdup (temp);
2673 free (temp);
2674 }
2675 }
2676 else if (DECL_P (decl))
2677 {
2678 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2679 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2680 else
2681 {
2682 res = get_name (decl);
2683 if (!res)
2684 {
2685 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2686 if (num_printed > 0)
2687 {
2688 res = ggc_strdup (temp);
2689 free (temp);
2690 }
2691 }
2692 }
2693 }
2694 if (res != NULL)
2695 return res;
2696
2697 return "NULL";
2698 }
2699
2700 /* Find the variable id for tree T in the map.
2701 If T doesn't exist in the map, create an entry for it and return it. */
2702
2703 static varinfo_t
2704 get_vi_for_tree (tree t)
2705 {
2706 void **slot = pointer_map_contains (vi_for_tree, t);
2707 if (slot == NULL)
2708 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2709
2710 return (varinfo_t) *slot;
2711 }
2712
2713 /* Get a scalar constraint expression for a new temporary variable. */
2714
2715 static struct constraint_expr
2716 new_scalar_tmp_constraint_exp (const char *name)
2717 {
2718 struct constraint_expr tmp;
2719 varinfo_t vi;
2720
2721 vi = new_var_info (NULL_TREE, name);
2722 vi->offset = 0;
2723 vi->size = -1;
2724 vi->fullsize = -1;
2725 vi->is_full_var = 1;
2726
2727 tmp.var = vi->id;
2728 tmp.type = SCALAR;
2729 tmp.offset = 0;
2730
2731 return tmp;
2732 }
2733
2734 /* Get a constraint expression vector from an SSA_VAR_P node.
2735 If address_p is true, the result will be taken its address of. */
2736
2737 static void
2738 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2739 {
2740 struct constraint_expr cexpr;
2741 varinfo_t vi;
2742
2743 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2744 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2745
2746 /* For parameters, get at the points-to set for the actual parm
2747 decl. */
2748 if (TREE_CODE (t) == SSA_NAME
2749 && SSA_NAME_IS_DEFAULT_DEF (t)
2750 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2751 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2752 {
2753 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2754 return;
2755 }
2756
2757 /* For global variables resort to the alias target. */
2758 if (TREE_CODE (t) == VAR_DECL
2759 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2760 {
2761 struct varpool_node *node = varpool_get_node (t);
2762 if (node && node->alias)
2763 {
2764 node = varpool_variable_node (node, NULL);
2765 t = node->symbol.decl;
2766 }
2767 }
2768
2769 vi = get_vi_for_tree (t);
2770 cexpr.var = vi->id;
2771 cexpr.type = SCALAR;
2772 cexpr.offset = 0;
2773 /* If we determine the result is "anything", and we know this is readonly,
2774 say it points to readonly memory instead. */
2775 if (cexpr.var == anything_id && TREE_READONLY (t))
2776 {
2777 gcc_unreachable ();
2778 cexpr.type = ADDRESSOF;
2779 cexpr.var = readonly_id;
2780 }
2781
2782 /* If we are not taking the address of the constraint expr, add all
2783 sub-fiels of the variable as well. */
2784 if (!address_p
2785 && !vi->is_full_var)
2786 {
2787 for (; vi; vi = vi->next)
2788 {
2789 cexpr.var = vi->id;
2790 results->safe_push (cexpr);
2791 }
2792 return;
2793 }
2794
2795 results->safe_push (cexpr);
2796 }
2797
2798 /* Process constraint T, performing various simplifications and then
2799 adding it to our list of overall constraints. */
2800
2801 static void
2802 process_constraint (constraint_t t)
2803 {
2804 struct constraint_expr rhs = t->rhs;
2805 struct constraint_expr lhs = t->lhs;
2806
2807 gcc_assert (rhs.var < varmap.length ());
2808 gcc_assert (lhs.var < varmap.length ());
2809
2810 /* If we didn't get any useful constraint from the lhs we get
2811 &ANYTHING as fallback from get_constraint_for. Deal with
2812 it here by turning it into *ANYTHING. */
2813 if (lhs.type == ADDRESSOF
2814 && lhs.var == anything_id)
2815 lhs.type = DEREF;
2816
2817 /* ADDRESSOF on the lhs is invalid. */
2818 gcc_assert (lhs.type != ADDRESSOF);
2819
2820 /* We shouldn't add constraints from things that cannot have pointers.
2821 It's not completely trivial to avoid in the callers, so do it here. */
2822 if (rhs.type != ADDRESSOF
2823 && !get_varinfo (rhs.var)->may_have_pointers)
2824 return;
2825
2826 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2827 if (!get_varinfo (lhs.var)->may_have_pointers)
2828 return;
2829
2830 /* This can happen in our IR with things like n->a = *p */
2831 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2832 {
2833 /* Split into tmp = *rhs, *lhs = tmp */
2834 struct constraint_expr tmplhs;
2835 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2836 process_constraint (new_constraint (tmplhs, rhs));
2837 process_constraint (new_constraint (lhs, tmplhs));
2838 }
2839 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2840 {
2841 /* Split into tmp = &rhs, *lhs = tmp */
2842 struct constraint_expr tmplhs;
2843 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2844 process_constraint (new_constraint (tmplhs, rhs));
2845 process_constraint (new_constraint (lhs, tmplhs));
2846 }
2847 else
2848 {
2849 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2850 constraints.safe_push (t);
2851 }
2852 }
2853
2854
2855 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2856 structure. */
2857
2858 static HOST_WIDE_INT
2859 bitpos_of_field (const tree fdecl)
2860 {
2861 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2862 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2863 return -1;
2864
2865 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
2866 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2867 }
2868
2869
2870 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2871 resulting constraint expressions in *RESULTS. */
2872
2873 static void
2874 get_constraint_for_ptr_offset (tree ptr, tree offset,
2875 vec<ce_s> *results)
2876 {
2877 struct constraint_expr c;
2878 unsigned int j, n;
2879 HOST_WIDE_INT rhsoffset;
2880
2881 /* If we do not do field-sensitive PTA adding offsets to pointers
2882 does not change the points-to solution. */
2883 if (!use_field_sensitive)
2884 {
2885 get_constraint_for_rhs (ptr, results);
2886 return;
2887 }
2888
2889 /* If the offset is not a non-negative integer constant that fits
2890 in a HOST_WIDE_INT, we have to fall back to a conservative
2891 solution which includes all sub-fields of all pointed-to
2892 variables of ptr. */
2893 if (offset == NULL_TREE
2894 || TREE_CODE (offset) != INTEGER_CST)
2895 rhsoffset = UNKNOWN_OFFSET;
2896 else
2897 {
2898 /* Sign-extend the offset. */
2899 double_int soffset = tree_to_double_int (offset)
2900 .sext (TYPE_PRECISION (TREE_TYPE (offset)));
2901 if (!soffset.fits_shwi ())
2902 rhsoffset = UNKNOWN_OFFSET;
2903 else
2904 {
2905 /* Make sure the bit-offset also fits. */
2906 HOST_WIDE_INT rhsunitoffset = soffset.low;
2907 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
2908 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
2909 rhsoffset = UNKNOWN_OFFSET;
2910 }
2911 }
2912
2913 get_constraint_for_rhs (ptr, results);
2914 if (rhsoffset == 0)
2915 return;
2916
2917 /* As we are eventually appending to the solution do not use
2918 vec::iterate here. */
2919 n = results->length ();
2920 for (j = 0; j < n; j++)
2921 {
2922 varinfo_t curr;
2923 c = (*results)[j];
2924 curr = get_varinfo (c.var);
2925
2926 if (c.type == ADDRESSOF
2927 /* If this varinfo represents a full variable just use it. */
2928 && curr->is_full_var)
2929 c.offset = 0;
2930 else if (c.type == ADDRESSOF
2931 /* If we do not know the offset add all subfields. */
2932 && rhsoffset == UNKNOWN_OFFSET)
2933 {
2934 varinfo_t temp = lookup_vi_for_tree (curr->decl);
2935 do
2936 {
2937 struct constraint_expr c2;
2938 c2.var = temp->id;
2939 c2.type = ADDRESSOF;
2940 c2.offset = 0;
2941 if (c2.var != c.var)
2942 results->safe_push (c2);
2943 temp = temp->next;
2944 }
2945 while (temp);
2946 }
2947 else if (c.type == ADDRESSOF)
2948 {
2949 varinfo_t temp;
2950 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
2951
2952 /* Search the sub-field which overlaps with the
2953 pointed-to offset. If the result is outside of the variable
2954 we have to provide a conservative result, as the variable is
2955 still reachable from the resulting pointer (even though it
2956 technically cannot point to anything). The last and first
2957 sub-fields are such conservative results.
2958 ??? If we always had a sub-field for &object + 1 then
2959 we could represent this in a more precise way. */
2960 if (rhsoffset < 0
2961 && curr->offset < offset)
2962 offset = 0;
2963 temp = first_or_preceding_vi_for_offset (curr, offset);
2964
2965 /* If the found variable is not exactly at the pointed to
2966 result, we have to include the next variable in the
2967 solution as well. Otherwise two increments by offset / 2
2968 do not result in the same or a conservative superset
2969 solution. */
2970 if (temp->offset != offset
2971 && temp->next != NULL)
2972 {
2973 struct constraint_expr c2;
2974 c2.var = temp->next->id;
2975 c2.type = ADDRESSOF;
2976 c2.offset = 0;
2977 results->safe_push (c2);
2978 }
2979 c.var = temp->id;
2980 c.offset = 0;
2981 }
2982 else
2983 c.offset = rhsoffset;
2984
2985 (*results)[j] = c;
2986 }
2987 }
2988
2989
2990 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
2991 If address_p is true the result will be taken its address of.
2992 If lhs_p is true then the constraint expression is assumed to be used
2993 as the lhs. */
2994
2995 static void
2996 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
2997 bool address_p, bool lhs_p)
2998 {
2999 tree orig_t = t;
3000 HOST_WIDE_INT bitsize = -1;
3001 HOST_WIDE_INT bitmaxsize = -1;
3002 HOST_WIDE_INT bitpos;
3003 tree forzero;
3004
3005 /* Some people like to do cute things like take the address of
3006 &0->a.b */
3007 forzero = t;
3008 while (handled_component_p (forzero)
3009 || INDIRECT_REF_P (forzero)
3010 || TREE_CODE (forzero) == MEM_REF)
3011 forzero = TREE_OPERAND (forzero, 0);
3012
3013 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3014 {
3015 struct constraint_expr temp;
3016
3017 temp.offset = 0;
3018 temp.var = integer_id;
3019 temp.type = SCALAR;
3020 results->safe_push (temp);
3021 return;
3022 }
3023
3024 /* Handle type-punning through unions. If we are extracting a pointer
3025 from a union via a possibly type-punning access that pointer
3026 points to anything, similar to a conversion of an integer to
3027 a pointer. */
3028 if (!lhs_p)
3029 {
3030 tree u;
3031 for (u = t;
3032 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3033 u = TREE_OPERAND (u, 0))
3034 if (TREE_CODE (u) == COMPONENT_REF
3035 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3036 {
3037 struct constraint_expr temp;
3038
3039 temp.offset = 0;
3040 temp.var = anything_id;
3041 temp.type = ADDRESSOF;
3042 results->safe_push (temp);
3043 return;
3044 }
3045 }
3046
3047 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3048
3049 /* Pretend to take the address of the base, we'll take care of
3050 adding the required subset of sub-fields below. */
3051 get_constraint_for_1 (t, results, true, lhs_p);
3052 gcc_assert (results->length () == 1);
3053 struct constraint_expr &result = results->last ();
3054
3055 if (result.type == SCALAR
3056 && get_varinfo (result.var)->is_full_var)
3057 /* For single-field vars do not bother about the offset. */
3058 result.offset = 0;
3059 else if (result.type == SCALAR)
3060 {
3061 /* In languages like C, you can access one past the end of an
3062 array. You aren't allowed to dereference it, so we can
3063 ignore this constraint. When we handle pointer subtraction,
3064 we may have to do something cute here. */
3065
3066 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3067 && bitmaxsize != 0)
3068 {
3069 /* It's also not true that the constraint will actually start at the
3070 right offset, it may start in some padding. We only care about
3071 setting the constraint to the first actual field it touches, so
3072 walk to find it. */
3073 struct constraint_expr cexpr = result;
3074 varinfo_t curr;
3075 results->pop ();
3076 cexpr.offset = 0;
3077 for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
3078 {
3079 if (ranges_overlap_p (curr->offset, curr->size,
3080 bitpos, bitmaxsize))
3081 {
3082 cexpr.var = curr->id;
3083 results->safe_push (cexpr);
3084 if (address_p)
3085 break;
3086 }
3087 }
3088 /* If we are going to take the address of this field then
3089 to be able to compute reachability correctly add at least
3090 the last field of the variable. */
3091 if (address_p && results->length () == 0)
3092 {
3093 curr = get_varinfo (cexpr.var);
3094 while (curr->next != NULL)
3095 curr = curr->next;
3096 cexpr.var = curr->id;
3097 results->safe_push (cexpr);
3098 }
3099 else if (results->length () == 0)
3100 /* Assert that we found *some* field there. The user couldn't be
3101 accessing *only* padding. */
3102 /* Still the user could access one past the end of an array
3103 embedded in a struct resulting in accessing *only* padding. */
3104 /* Or accessing only padding via type-punning to a type
3105 that has a filed just in padding space. */
3106 {
3107 cexpr.type = SCALAR;
3108 cexpr.var = anything_id;
3109 cexpr.offset = 0;
3110 results->safe_push (cexpr);
3111 }
3112 }
3113 else if (bitmaxsize == 0)
3114 {
3115 if (dump_file && (dump_flags & TDF_DETAILS))
3116 fprintf (dump_file, "Access to zero-sized part of variable,"
3117 "ignoring\n");
3118 }
3119 else
3120 if (dump_file && (dump_flags & TDF_DETAILS))
3121 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3122 }
3123 else if (result.type == DEREF)
3124 {
3125 /* If we do not know exactly where the access goes say so. Note
3126 that only for non-structure accesses we know that we access
3127 at most one subfiled of any variable. */
3128 if (bitpos == -1
3129 || bitsize != bitmaxsize
3130 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3131 || result.offset == UNKNOWN_OFFSET)
3132 result.offset = UNKNOWN_OFFSET;
3133 else
3134 result.offset += bitpos;
3135 }
3136 else if (result.type == ADDRESSOF)
3137 {
3138 /* We can end up here for component references on a
3139 VIEW_CONVERT_EXPR <>(&foobar). */
3140 result.type = SCALAR;
3141 result.var = anything_id;
3142 result.offset = 0;
3143 }
3144 else
3145 gcc_unreachable ();
3146 }
3147
3148
3149 /* Dereference the constraint expression CONS, and return the result.
3150 DEREF (ADDRESSOF) = SCALAR
3151 DEREF (SCALAR) = DEREF
3152 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3153 This is needed so that we can handle dereferencing DEREF constraints. */
3154
3155 static void
3156 do_deref (vec<ce_s> *constraints)
3157 {
3158 struct constraint_expr *c;
3159 unsigned int i = 0;
3160
3161 FOR_EACH_VEC_ELT (*constraints, i, c)
3162 {
3163 if (c->type == SCALAR)
3164 c->type = DEREF;
3165 else if (c->type == ADDRESSOF)
3166 c->type = SCALAR;
3167 else if (c->type == DEREF)
3168 {
3169 struct constraint_expr tmplhs;
3170 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3171 process_constraint (new_constraint (tmplhs, *c));
3172 c->var = tmplhs.var;
3173 }
3174 else
3175 gcc_unreachable ();
3176 }
3177 }
3178
3179 /* Given a tree T, return the constraint expression for taking the
3180 address of it. */
3181
3182 static void
3183 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3184 {
3185 struct constraint_expr *c;
3186 unsigned int i;
3187
3188 get_constraint_for_1 (t, results, true, true);
3189
3190 FOR_EACH_VEC_ELT (*results, i, c)
3191 {
3192 if (c->type == DEREF)
3193 c->type = SCALAR;
3194 else
3195 c->type = ADDRESSOF;
3196 }
3197 }
3198
3199 /* Given a tree T, return the constraint expression for it. */
3200
3201 static void
3202 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3203 bool lhs_p)
3204 {
3205 struct constraint_expr temp;
3206
3207 /* x = integer is all glommed to a single variable, which doesn't
3208 point to anything by itself. That is, of course, unless it is an
3209 integer constant being treated as a pointer, in which case, we
3210 will return that this is really the addressof anything. This
3211 happens below, since it will fall into the default case. The only
3212 case we know something about an integer treated like a pointer is
3213 when it is the NULL pointer, and then we just say it points to
3214 NULL.
3215
3216 Do not do that if -fno-delete-null-pointer-checks though, because
3217 in that case *NULL does not fail, so it _should_ alias *anything.
3218 It is not worth adding a new option or renaming the existing one,
3219 since this case is relatively obscure. */
3220 if ((TREE_CODE (t) == INTEGER_CST
3221 && integer_zerop (t))
3222 /* The only valid CONSTRUCTORs in gimple with pointer typed
3223 elements are zero-initializer. But in IPA mode we also
3224 process global initializers, so verify at least. */
3225 || (TREE_CODE (t) == CONSTRUCTOR
3226 && CONSTRUCTOR_NELTS (t) == 0))
3227 {
3228 if (flag_delete_null_pointer_checks)
3229 temp.var = nothing_id;
3230 else
3231 temp.var = nonlocal_id;
3232 temp.type = ADDRESSOF;
3233 temp.offset = 0;
3234 results->safe_push (temp);
3235 return;
3236 }
3237
3238 /* String constants are read-only. */
3239 if (TREE_CODE (t) == STRING_CST)
3240 {
3241 temp.var = readonly_id;
3242 temp.type = SCALAR;
3243 temp.offset = 0;
3244 results->safe_push (temp);
3245 return;
3246 }
3247
3248 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3249 {
3250 case tcc_expression:
3251 {
3252 switch (TREE_CODE (t))
3253 {
3254 case ADDR_EXPR:
3255 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3256 return;
3257 default:;
3258 }
3259 break;
3260 }
3261 case tcc_reference:
3262 {
3263 switch (TREE_CODE (t))
3264 {
3265 case MEM_REF:
3266 {
3267 struct constraint_expr cs;
3268 varinfo_t vi, curr;
3269 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3270 TREE_OPERAND (t, 1), results);
3271 do_deref (results);
3272
3273 /* If we are not taking the address then make sure to process
3274 all subvariables we might access. */
3275 if (address_p)
3276 return;
3277
3278 cs = results->last ();
3279 if (cs.type == DEREF
3280 && type_can_have_subvars (TREE_TYPE (t)))
3281 {
3282 /* For dereferences this means we have to defer it
3283 to solving time. */
3284 results->last ().offset = UNKNOWN_OFFSET;
3285 return;
3286 }
3287 if (cs.type != SCALAR)
3288 return;
3289
3290 vi = get_varinfo (cs.var);
3291 curr = vi->next;
3292 if (!vi->is_full_var
3293 && curr)
3294 {
3295 unsigned HOST_WIDE_INT size;
3296 if (host_integerp (TYPE_SIZE (TREE_TYPE (t)), 1))
3297 size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t)));
3298 else
3299 size = -1;
3300 for (; curr; curr = curr->next)
3301 {
3302 if (curr->offset - vi->offset < size)
3303 {
3304 cs.var = curr->id;
3305 results->safe_push (cs);
3306 }
3307 else
3308 break;
3309 }
3310 }
3311 return;
3312 }
3313 case ARRAY_REF:
3314 case ARRAY_RANGE_REF:
3315 case COMPONENT_REF:
3316 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3317 return;
3318 case VIEW_CONVERT_EXPR:
3319 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3320 lhs_p);
3321 return;
3322 /* We are missing handling for TARGET_MEM_REF here. */
3323 default:;
3324 }
3325 break;
3326 }
3327 case tcc_exceptional:
3328 {
3329 switch (TREE_CODE (t))
3330 {
3331 case SSA_NAME:
3332 {
3333 get_constraint_for_ssa_var (t, results, address_p);
3334 return;
3335 }
3336 case CONSTRUCTOR:
3337 {
3338 unsigned int i;
3339 tree val;
3340 vec<ce_s> tmp = vNULL;
3341 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3342 {
3343 struct constraint_expr *rhsp;
3344 unsigned j;
3345 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3346 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3347 results->safe_push (*rhsp);
3348 tmp.truncate (0);
3349 }
3350 tmp.release ();
3351 /* We do not know whether the constructor was complete,
3352 so technically we have to add &NOTHING or &ANYTHING
3353 like we do for an empty constructor as well. */
3354 return;
3355 }
3356 default:;
3357 }
3358 break;
3359 }
3360 case tcc_declaration:
3361 {
3362 get_constraint_for_ssa_var (t, results, address_p);
3363 return;
3364 }
3365 case tcc_constant:
3366 {
3367 /* We cannot refer to automatic variables through constants. */
3368 temp.type = ADDRESSOF;
3369 temp.var = nonlocal_id;
3370 temp.offset = 0;
3371 results->safe_push (temp);
3372 return;
3373 }
3374 default:;
3375 }
3376
3377 /* The default fallback is a constraint from anything. */
3378 temp.type = ADDRESSOF;
3379 temp.var = anything_id;
3380 temp.offset = 0;
3381 results->safe_push (temp);
3382 }
3383
3384 /* Given a gimple tree T, return the constraint expression vector for it. */
3385
3386 static void
3387 get_constraint_for (tree t, vec<ce_s> *results)
3388 {
3389 gcc_assert (results->length () == 0);
3390
3391 get_constraint_for_1 (t, results, false, true);
3392 }
3393
3394 /* Given a gimple tree T, return the constraint expression vector for it
3395 to be used as the rhs of a constraint. */
3396
3397 static void
3398 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3399 {
3400 gcc_assert (results->length () == 0);
3401
3402 get_constraint_for_1 (t, results, false, false);
3403 }
3404
3405
3406 /* Efficiently generates constraints from all entries in *RHSC to all
3407 entries in *LHSC. */
3408
3409 static void
3410 process_all_all_constraints (vec<ce_s> lhsc,
3411 vec<ce_s> rhsc)
3412 {
3413 struct constraint_expr *lhsp, *rhsp;
3414 unsigned i, j;
3415
3416 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3417 {
3418 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3419 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3420 process_constraint (new_constraint (*lhsp, *rhsp));
3421 }
3422 else
3423 {
3424 struct constraint_expr tmp;
3425 tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3426 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3427 process_constraint (new_constraint (tmp, *rhsp));
3428 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3429 process_constraint (new_constraint (*lhsp, tmp));
3430 }
3431 }
3432
3433 /* Handle aggregate copies by expanding into copies of the respective
3434 fields of the structures. */
3435
3436 static void
3437 do_structure_copy (tree lhsop, tree rhsop)
3438 {
3439 struct constraint_expr *lhsp, *rhsp;
3440 vec<ce_s> lhsc = vNULL;
3441 vec<ce_s> rhsc = vNULL;
3442 unsigned j;
3443
3444 get_constraint_for (lhsop, &lhsc);
3445 get_constraint_for_rhs (rhsop, &rhsc);
3446 lhsp = &lhsc[0];
3447 rhsp = &rhsc[0];
3448 if (lhsp->type == DEREF
3449 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3450 || rhsp->type == DEREF)
3451 {
3452 if (lhsp->type == DEREF)
3453 {
3454 gcc_assert (lhsc.length () == 1);
3455 lhsp->offset = UNKNOWN_OFFSET;
3456 }
3457 if (rhsp->type == DEREF)
3458 {
3459 gcc_assert (rhsc.length () == 1);
3460 rhsp->offset = UNKNOWN_OFFSET;
3461 }
3462 process_all_all_constraints (lhsc, rhsc);
3463 }
3464 else if (lhsp->type == SCALAR
3465 && (rhsp->type == SCALAR
3466 || rhsp->type == ADDRESSOF))
3467 {
3468 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3469 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3470 unsigned k = 0;
3471 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3472 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3473 for (j = 0; lhsc.iterate (j, &lhsp);)
3474 {
3475 varinfo_t lhsv, rhsv;
3476 rhsp = &rhsc[k];
3477 lhsv = get_varinfo (lhsp->var);
3478 rhsv = get_varinfo (rhsp->var);
3479 if (lhsv->may_have_pointers
3480 && (lhsv->is_full_var
3481 || rhsv->is_full_var
3482 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3483 rhsv->offset + lhsoffset, rhsv->size)))
3484 process_constraint (new_constraint (*lhsp, *rhsp));
3485 if (!rhsv->is_full_var
3486 && (lhsv->is_full_var
3487 || (lhsv->offset + rhsoffset + lhsv->size
3488 > rhsv->offset + lhsoffset + rhsv->size)))
3489 {
3490 ++k;
3491 if (k >= rhsc.length ())
3492 break;
3493 }
3494 else
3495 ++j;
3496 }
3497 }
3498 else
3499 gcc_unreachable ();
3500
3501 lhsc.release ();
3502 rhsc.release ();
3503 }
3504
3505 /* Create constraints ID = { rhsc }. */
3506
3507 static void
3508 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3509 {
3510 struct constraint_expr *c;
3511 struct constraint_expr includes;
3512 unsigned int j;
3513
3514 includes.var = id;
3515 includes.offset = 0;
3516 includes.type = SCALAR;
3517
3518 FOR_EACH_VEC_ELT (rhsc, j, c)
3519 process_constraint (new_constraint (includes, *c));
3520 }
3521
3522 /* Create a constraint ID = OP. */
3523
3524 static void
3525 make_constraint_to (unsigned id, tree op)
3526 {
3527 vec<ce_s> rhsc = vNULL;
3528 get_constraint_for_rhs (op, &rhsc);
3529 make_constraints_to (id, rhsc);
3530 rhsc.release ();
3531 }
3532
3533 /* Create a constraint ID = &FROM. */
3534
3535 static void
3536 make_constraint_from (varinfo_t vi, int from)
3537 {
3538 struct constraint_expr lhs, rhs;
3539
3540 lhs.var = vi->id;
3541 lhs.offset = 0;
3542 lhs.type = SCALAR;
3543
3544 rhs.var = from;
3545 rhs.offset = 0;
3546 rhs.type = ADDRESSOF;
3547 process_constraint (new_constraint (lhs, rhs));
3548 }
3549
3550 /* Create a constraint ID = FROM. */
3551
3552 static void
3553 make_copy_constraint (varinfo_t vi, int from)
3554 {
3555 struct constraint_expr lhs, rhs;
3556
3557 lhs.var = vi->id;
3558 lhs.offset = 0;
3559 lhs.type = SCALAR;
3560
3561 rhs.var = from;
3562 rhs.offset = 0;
3563 rhs.type = SCALAR;
3564 process_constraint (new_constraint (lhs, rhs));
3565 }
3566
3567 /* Make constraints necessary to make OP escape. */
3568
3569 static void
3570 make_escape_constraint (tree op)
3571 {
3572 make_constraint_to (escaped_id, op);
3573 }
3574
3575 /* Add constraints to that the solution of VI is transitively closed. */
3576
3577 static void
3578 make_transitive_closure_constraints (varinfo_t vi)
3579 {
3580 struct constraint_expr lhs, rhs;
3581
3582 /* VAR = *VAR; */
3583 lhs.type = SCALAR;
3584 lhs.var = vi->id;
3585 lhs.offset = 0;
3586 rhs.type = DEREF;
3587 rhs.var = vi->id;
3588 rhs.offset = 0;
3589 process_constraint (new_constraint (lhs, rhs));
3590
3591 /* VAR = VAR + UNKNOWN; */
3592 lhs.type = SCALAR;
3593 lhs.var = vi->id;
3594 lhs.offset = 0;
3595 rhs.type = SCALAR;
3596 rhs.var = vi->id;
3597 rhs.offset = UNKNOWN_OFFSET;
3598 process_constraint (new_constraint (lhs, rhs));
3599 }
3600
3601 /* Temporary storage for fake var decls. */
3602 struct obstack fake_var_decl_obstack;
3603
3604 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3605
3606 static tree
3607 build_fake_var_decl (tree type)
3608 {
3609 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3610 memset (decl, 0, sizeof (struct tree_var_decl));
3611 TREE_SET_CODE (decl, VAR_DECL);
3612 TREE_TYPE (decl) = type;
3613 DECL_UID (decl) = allocate_decl_uid ();
3614 SET_DECL_PT_UID (decl, -1);
3615 layout_decl (decl, 0);
3616 return decl;
3617 }
3618
3619 /* Create a new artificial heap variable with NAME.
3620 Return the created variable. */
3621
3622 static varinfo_t
3623 make_heapvar (const char *name)
3624 {
3625 varinfo_t vi;
3626 tree heapvar;
3627
3628 heapvar = build_fake_var_decl (ptr_type_node);
3629 DECL_EXTERNAL (heapvar) = 1;
3630
3631 vi = new_var_info (heapvar, name);
3632 vi->is_artificial_var = true;
3633 vi->is_heap_var = true;
3634 vi->is_unknown_size_var = true;
3635 vi->offset = 0;
3636 vi->fullsize = ~0;
3637 vi->size = ~0;
3638 vi->is_full_var = true;
3639 insert_vi_for_tree (heapvar, vi);
3640
3641 return vi;
3642 }
3643
3644 /* Create a new artificial heap variable with NAME and make a
3645 constraint from it to LHS. Set flags according to a tag used
3646 for tracking restrict pointers. */
3647
3648 static varinfo_t
3649 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3650 {
3651 varinfo_t vi = make_heapvar (name);
3652 vi->is_global_var = 1;
3653 vi->may_have_pointers = 1;
3654 make_constraint_from (lhs, vi->id);
3655 return vi;
3656 }
3657
3658 /* Create a new artificial heap variable with NAME and make a
3659 constraint from it to LHS. Set flags according to a tag used
3660 for tracking restrict pointers and make the artificial heap
3661 point to global memory. */
3662
3663 static varinfo_t
3664 make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
3665 {
3666 varinfo_t vi = make_constraint_from_restrict (lhs, name);
3667 make_copy_constraint (vi, nonlocal_id);
3668 return vi;
3669 }
3670
3671 /* In IPA mode there are varinfos for different aspects of reach
3672 function designator. One for the points-to set of the return
3673 value, one for the variables that are clobbered by the function,
3674 one for its uses and one for each parameter (including a single
3675 glob for remaining variadic arguments). */
3676
3677 enum { fi_clobbers = 1, fi_uses = 2,
3678 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3679
3680 /* Get a constraint for the requested part of a function designator FI
3681 when operating in IPA mode. */
3682
3683 static struct constraint_expr
3684 get_function_part_constraint (varinfo_t fi, unsigned part)
3685 {
3686 struct constraint_expr c;
3687
3688 gcc_assert (in_ipa_mode);
3689
3690 if (fi->id == anything_id)
3691 {
3692 /* ??? We probably should have a ANYFN special variable. */
3693 c.var = anything_id;
3694 c.offset = 0;
3695 c.type = SCALAR;
3696 }
3697 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3698 {
3699 varinfo_t ai = first_vi_for_offset (fi, part);
3700 if (ai)
3701 c.var = ai->id;
3702 else
3703 c.var = anything_id;
3704 c.offset = 0;
3705 c.type = SCALAR;
3706 }
3707 else
3708 {
3709 c.var = fi->id;
3710 c.offset = part;
3711 c.type = DEREF;
3712 }
3713
3714 return c;
3715 }
3716
3717 /* For non-IPA mode, generate constraints necessary for a call on the
3718 RHS. */
3719
3720 static void
3721 handle_rhs_call (gimple stmt, vec<ce_s> *results)
3722 {
3723 struct constraint_expr rhsc;
3724 unsigned i;
3725 bool returns_uses = false;
3726
3727 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3728 {
3729 tree arg = gimple_call_arg (stmt, i);
3730 int flags = gimple_call_arg_flags (stmt, i);
3731
3732 /* If the argument is not used we can ignore it. */
3733 if (flags & EAF_UNUSED)
3734 continue;
3735
3736 /* As we compute ESCAPED context-insensitive we do not gain
3737 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3738 set. The argument would still get clobbered through the
3739 escape solution. */
3740 if ((flags & EAF_NOCLOBBER)
3741 && (flags & EAF_NOESCAPE))
3742 {
3743 varinfo_t uses = get_call_use_vi (stmt);
3744 if (!(flags & EAF_DIRECT))
3745 {
3746 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3747 make_constraint_to (tem->id, arg);
3748 make_transitive_closure_constraints (tem);
3749 make_copy_constraint (uses, tem->id);
3750 }
3751 else
3752 make_constraint_to (uses->id, arg);
3753 returns_uses = true;
3754 }
3755 else if (flags & EAF_NOESCAPE)
3756 {
3757 struct constraint_expr lhs, rhs;
3758 varinfo_t uses = get_call_use_vi (stmt);
3759 varinfo_t clobbers = get_call_clobber_vi (stmt);
3760 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3761 make_constraint_to (tem->id, arg);
3762 if (!(flags & EAF_DIRECT))
3763 make_transitive_closure_constraints (tem);
3764 make_copy_constraint (uses, tem->id);
3765 make_copy_constraint (clobbers, tem->id);
3766 /* Add *tem = nonlocal, do not add *tem = callused as
3767 EAF_NOESCAPE parameters do not escape to other parameters
3768 and all other uses appear in NONLOCAL as well. */
3769 lhs.type = DEREF;
3770 lhs.var = tem->id;
3771 lhs.offset = 0;
3772 rhs.type = SCALAR;
3773 rhs.var = nonlocal_id;
3774 rhs.offset = 0;
3775 process_constraint (new_constraint (lhs, rhs));
3776 returns_uses = true;
3777 }
3778 else
3779 make_escape_constraint (arg);
3780 }
3781
3782 /* If we added to the calls uses solution make sure we account for
3783 pointers to it to be returned. */
3784 if (returns_uses)
3785 {
3786 rhsc.var = get_call_use_vi (stmt)->id;
3787 rhsc.offset = 0;
3788 rhsc.type = SCALAR;
3789 results->safe_push (rhsc);
3790 }
3791
3792 /* The static chain escapes as well. */
3793 if (gimple_call_chain (stmt))
3794 make_escape_constraint (gimple_call_chain (stmt));
3795
3796 /* And if we applied NRV the address of the return slot escapes as well. */
3797 if (gimple_call_return_slot_opt_p (stmt)
3798 && gimple_call_lhs (stmt) != NULL_TREE
3799 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3800 {
3801 vec<ce_s> tmpc = vNULL;
3802 struct constraint_expr lhsc, *c;
3803 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3804 lhsc.var = escaped_id;
3805 lhsc.offset = 0;
3806 lhsc.type = SCALAR;
3807 FOR_EACH_VEC_ELT (tmpc, i, c)
3808 process_constraint (new_constraint (lhsc, *c));
3809 tmpc.release ();
3810 }
3811
3812 /* Regular functions return nonlocal memory. */
3813 rhsc.var = nonlocal_id;
3814 rhsc.offset = 0;
3815 rhsc.type = SCALAR;
3816 results->safe_push (rhsc);
3817 }
3818
3819 /* For non-IPA mode, generate constraints necessary for a call
3820 that returns a pointer and assigns it to LHS. This simply makes
3821 the LHS point to global and escaped variables. */
3822
3823 static void
3824 handle_lhs_call (gimple stmt, tree lhs, int flags, vec<ce_s> rhsc,
3825 tree fndecl)
3826 {
3827 vec<ce_s> lhsc = vNULL;
3828
3829 get_constraint_for (lhs, &lhsc);
3830 /* If the store is to a global decl make sure to
3831 add proper escape constraints. */
3832 lhs = get_base_address (lhs);
3833 if (lhs
3834 && DECL_P (lhs)
3835 && is_global_var (lhs))
3836 {
3837 struct constraint_expr tmpc;
3838 tmpc.var = escaped_id;
3839 tmpc.offset = 0;
3840 tmpc.type = SCALAR;
3841 lhsc.safe_push (tmpc);
3842 }
3843
3844 /* If the call returns an argument unmodified override the rhs
3845 constraints. */
3846 flags = gimple_call_return_flags (stmt);
3847 if (flags & ERF_RETURNS_ARG
3848 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
3849 {
3850 tree arg;
3851 rhsc.create (0);
3852 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
3853 get_constraint_for (arg, &rhsc);
3854 process_all_all_constraints (lhsc, rhsc);
3855 rhsc.release ();
3856 }
3857 else if (flags & ERF_NOALIAS)
3858 {
3859 varinfo_t vi;
3860 struct constraint_expr tmpc;
3861 rhsc.create (0);
3862 vi = make_heapvar ("HEAP");
3863 /* We delay marking allocated storage global until we know if
3864 it escapes. */
3865 DECL_EXTERNAL (vi->decl) = 0;
3866 vi->is_global_var = 0;
3867 /* If this is not a real malloc call assume the memory was
3868 initialized and thus may point to global memory. All
3869 builtin functions with the malloc attribute behave in a sane way. */
3870 if (!fndecl
3871 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3872 make_constraint_from (vi, nonlocal_id);
3873 tmpc.var = vi->id;
3874 tmpc.offset = 0;
3875 tmpc.type = ADDRESSOF;
3876 rhsc.safe_push (tmpc);
3877 process_all_all_constraints (lhsc, rhsc);
3878 rhsc.release ();
3879 }
3880 else
3881 process_all_all_constraints (lhsc, rhsc);
3882
3883 lhsc.release ();
3884 }
3885
3886 /* For non-IPA mode, generate constraints necessary for a call of a
3887 const function that returns a pointer in the statement STMT. */
3888
3889 static void
3890 handle_const_call (gimple stmt, vec<ce_s> *results)
3891 {
3892 struct constraint_expr rhsc;
3893 unsigned int k;
3894
3895 /* Treat nested const functions the same as pure functions as far
3896 as the static chain is concerned. */
3897 if (gimple_call_chain (stmt))
3898 {
3899 varinfo_t uses = get_call_use_vi (stmt);
3900 make_transitive_closure_constraints (uses);
3901 make_constraint_to (uses->id, gimple_call_chain (stmt));
3902 rhsc.var = uses->id;
3903 rhsc.offset = 0;
3904 rhsc.type = SCALAR;
3905 results->safe_push (rhsc);
3906 }
3907
3908 /* May return arguments. */
3909 for (k = 0; k < gimple_call_num_args (stmt); ++k)
3910 {
3911 tree arg = gimple_call_arg (stmt, k);
3912 vec<ce_s> argc = vNULL;
3913 unsigned i;
3914 struct constraint_expr *argp;
3915 get_constraint_for_rhs (arg, &argc);
3916 FOR_EACH_VEC_ELT (argc, i, argp)
3917 results->safe_push (*argp);
3918 argc.release ();
3919 }
3920
3921 /* May return addresses of globals. */
3922 rhsc.var = nonlocal_id;
3923 rhsc.offset = 0;
3924 rhsc.type = ADDRESSOF;
3925 results->safe_push (rhsc);
3926 }
3927
3928 /* For non-IPA mode, generate constraints necessary for a call to a
3929 pure function in statement STMT. */
3930
3931 static void
3932 handle_pure_call (gimple stmt, vec<ce_s> *results)
3933 {
3934 struct constraint_expr rhsc;
3935 unsigned i;
3936 varinfo_t uses = NULL;
3937
3938 /* Memory reached from pointer arguments is call-used. */
3939 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3940 {
3941 tree arg = gimple_call_arg (stmt, i);
3942 if (!uses)
3943 {
3944 uses = get_call_use_vi (stmt);
3945 make_transitive_closure_constraints (uses);
3946 }
3947 make_constraint_to (uses->id, arg);
3948 }
3949
3950 /* The static chain is used as well. */
3951 if (gimple_call_chain (stmt))
3952 {
3953 if (!uses)
3954 {
3955 uses = get_call_use_vi (stmt);
3956 make_transitive_closure_constraints (uses);
3957 }
3958 make_constraint_to (uses->id, gimple_call_chain (stmt));
3959 }
3960
3961 /* Pure functions may return call-used and nonlocal memory. */
3962 if (uses)
3963 {
3964 rhsc.var = uses->id;
3965 rhsc.offset = 0;
3966 rhsc.type = SCALAR;
3967 results->safe_push (rhsc);
3968 }
3969 rhsc.var = nonlocal_id;
3970 rhsc.offset = 0;
3971 rhsc.type = SCALAR;
3972 results->safe_push (rhsc);
3973 }
3974
3975
3976 /* Return the varinfo for the callee of CALL. */
3977
3978 static varinfo_t
3979 get_fi_for_callee (gimple call)
3980 {
3981 tree decl, fn = gimple_call_fn (call);
3982
3983 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
3984 fn = OBJ_TYPE_REF_EXPR (fn);
3985
3986 /* If we can directly resolve the function being called, do so.
3987 Otherwise, it must be some sort of indirect expression that
3988 we should still be able to handle. */
3989 decl = gimple_call_addr_fndecl (fn);
3990 if (decl)
3991 return get_vi_for_tree (decl);
3992
3993 /* If the function is anything other than a SSA name pointer we have no
3994 clue and should be getting ANYFN (well, ANYTHING for now). */
3995 if (!fn || TREE_CODE (fn) != SSA_NAME)
3996 return get_varinfo (anything_id);
3997
3998 if (SSA_NAME_IS_DEFAULT_DEF (fn)
3999 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4000 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4001 fn = SSA_NAME_VAR (fn);
4002
4003 return get_vi_for_tree (fn);
4004 }
4005
4006 /* Create constraints for the builtin call T. Return true if the call
4007 was handled, otherwise false. */
4008
4009 static bool
4010 find_func_aliases_for_builtin_call (gimple t)
4011 {
4012 tree fndecl = gimple_call_fndecl (t);
4013 vec<ce_s> lhsc = vNULL;
4014 vec<ce_s> rhsc = vNULL;
4015 varinfo_t fi;
4016
4017 if (fndecl != NULL_TREE
4018 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4019 /* ??? All builtins that are handled here need to be handled
4020 in the alias-oracle query functions explicitly! */
4021 switch (DECL_FUNCTION_CODE (fndecl))
4022 {
4023 /* All the following functions return a pointer to the same object
4024 as their first argument points to. The functions do not add
4025 to the ESCAPED solution. The functions make the first argument
4026 pointed to memory point to what the second argument pointed to
4027 memory points to. */
4028 case BUILT_IN_STRCPY:
4029 case BUILT_IN_STRNCPY:
4030 case BUILT_IN_BCOPY:
4031 case BUILT_IN_MEMCPY:
4032 case BUILT_IN_MEMMOVE:
4033 case BUILT_IN_MEMPCPY:
4034 case BUILT_IN_STPCPY:
4035 case BUILT_IN_STPNCPY:
4036 case BUILT_IN_STRCAT:
4037 case BUILT_IN_STRNCAT:
4038 case BUILT_IN_STRCPY_CHK:
4039 case BUILT_IN_STRNCPY_CHK:
4040 case BUILT_IN_MEMCPY_CHK:
4041 case BUILT_IN_MEMMOVE_CHK:
4042 case BUILT_IN_MEMPCPY_CHK:
4043 case BUILT_IN_STPCPY_CHK:
4044 case BUILT_IN_STPNCPY_CHK:
4045 case BUILT_IN_STRCAT_CHK:
4046 case BUILT_IN_STRNCAT_CHK:
4047 case BUILT_IN_TM_MEMCPY:
4048 case BUILT_IN_TM_MEMMOVE:
4049 {
4050 tree res = gimple_call_lhs (t);
4051 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4052 == BUILT_IN_BCOPY ? 1 : 0));
4053 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4054 == BUILT_IN_BCOPY ? 0 : 1));
4055 if (res != NULL_TREE)
4056 {
4057 get_constraint_for (res, &lhsc);
4058 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4059 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4060 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4061 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4062 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4063 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4064 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4065 else
4066 get_constraint_for (dest, &rhsc);
4067 process_all_all_constraints (lhsc, rhsc);
4068 lhsc.release ();
4069 rhsc.release ();
4070 }
4071 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4072 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4073 do_deref (&lhsc);
4074 do_deref (&rhsc);
4075 process_all_all_constraints (lhsc, rhsc);
4076 lhsc.release ();
4077 rhsc.release ();
4078 return true;
4079 }
4080 case BUILT_IN_MEMSET:
4081 case BUILT_IN_MEMSET_CHK:
4082 case BUILT_IN_TM_MEMSET:
4083 {
4084 tree res = gimple_call_lhs (t);
4085 tree dest = gimple_call_arg (t, 0);
4086 unsigned i;
4087 ce_s *lhsp;
4088 struct constraint_expr ac;
4089 if (res != NULL_TREE)
4090 {
4091 get_constraint_for (res, &lhsc);
4092 get_constraint_for (dest, &rhsc);
4093 process_all_all_constraints (lhsc, rhsc);
4094 lhsc.release ();
4095 rhsc.release ();
4096 }
4097 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4098 do_deref (&lhsc);
4099 if (flag_delete_null_pointer_checks
4100 && integer_zerop (gimple_call_arg (t, 1)))
4101 {
4102 ac.type = ADDRESSOF;
4103 ac.var = nothing_id;
4104 }
4105 else
4106 {
4107 ac.type = SCALAR;
4108 ac.var = integer_id;
4109 }
4110 ac.offset = 0;
4111 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4112 process_constraint (new_constraint (*lhsp, ac));
4113 lhsc.release ();
4114 return true;
4115 }
4116 case BUILT_IN_ASSUME_ALIGNED:
4117 {
4118 tree res = gimple_call_lhs (t);
4119 tree dest = gimple_call_arg (t, 0);
4120 if (res != NULL_TREE)
4121 {
4122 get_constraint_for (res, &lhsc);
4123 get_constraint_for (dest, &rhsc);
4124 process_all_all_constraints (lhsc, rhsc);
4125 lhsc.release ();
4126 rhsc.release ();
4127 }
4128 return true;
4129 }
4130 /* All the following functions do not return pointers, do not
4131 modify the points-to sets of memory reachable from their
4132 arguments and do not add to the ESCAPED solution. */
4133 case BUILT_IN_SINCOS:
4134 case BUILT_IN_SINCOSF:
4135 case BUILT_IN_SINCOSL:
4136 case BUILT_IN_FREXP:
4137 case BUILT_IN_FREXPF:
4138 case BUILT_IN_FREXPL:
4139 case BUILT_IN_GAMMA_R:
4140 case BUILT_IN_GAMMAF_R:
4141 case BUILT_IN_GAMMAL_R:
4142 case BUILT_IN_LGAMMA_R:
4143 case BUILT_IN_LGAMMAF_R:
4144 case BUILT_IN_LGAMMAL_R:
4145 case BUILT_IN_MODF:
4146 case BUILT_IN_MODFF:
4147 case BUILT_IN_MODFL:
4148 case BUILT_IN_REMQUO:
4149 case BUILT_IN_REMQUOF:
4150 case BUILT_IN_REMQUOL:
4151 case BUILT_IN_FREE:
4152 return true;
4153 case BUILT_IN_STRDUP:
4154 case BUILT_IN_STRNDUP:
4155 if (gimple_call_lhs (t))
4156 {
4157 handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
4158 vNULL, fndecl);
4159 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4160 NULL_TREE, &lhsc);
4161 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4162 NULL_TREE, &rhsc);
4163 do_deref (&lhsc);
4164 do_deref (&rhsc);
4165 process_all_all_constraints (lhsc, rhsc);
4166 lhsc.release ();
4167 rhsc.release ();
4168 return true;
4169 }
4170 break;
4171 /* Trampolines are special - they set up passing the static
4172 frame. */
4173 case BUILT_IN_INIT_TRAMPOLINE:
4174 {
4175 tree tramp = gimple_call_arg (t, 0);
4176 tree nfunc = gimple_call_arg (t, 1);
4177 tree frame = gimple_call_arg (t, 2);
4178 unsigned i;
4179 struct constraint_expr lhs, *rhsp;
4180 if (in_ipa_mode)
4181 {
4182 varinfo_t nfi = NULL;
4183 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4184 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4185 if (nfi)
4186 {
4187 lhs = get_function_part_constraint (nfi, fi_static_chain);
4188 get_constraint_for (frame, &rhsc);
4189 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4190 process_constraint (new_constraint (lhs, *rhsp));
4191 rhsc.release ();
4192
4193 /* Make the frame point to the function for
4194 the trampoline adjustment call. */
4195 get_constraint_for (tramp, &lhsc);
4196 do_deref (&lhsc);
4197 get_constraint_for (nfunc, &rhsc);
4198 process_all_all_constraints (lhsc, rhsc);
4199 rhsc.release ();
4200 lhsc.release ();
4201
4202 return true;
4203 }
4204 }
4205 /* Else fallthru to generic handling which will let
4206 the frame escape. */
4207 break;
4208 }
4209 case BUILT_IN_ADJUST_TRAMPOLINE:
4210 {
4211 tree tramp = gimple_call_arg (t, 0);
4212 tree res = gimple_call_lhs (t);
4213 if (in_ipa_mode && res)
4214 {
4215 get_constraint_for (res, &lhsc);
4216 get_constraint_for (tramp, &rhsc);
4217 do_deref (&rhsc);
4218 process_all_all_constraints (lhsc, rhsc);
4219 rhsc.release ();
4220 lhsc.release ();
4221 }
4222 return true;
4223 }
4224 CASE_BUILT_IN_TM_STORE (1):
4225 CASE_BUILT_IN_TM_STORE (2):
4226 CASE_BUILT_IN_TM_STORE (4):
4227 CASE_BUILT_IN_TM_STORE (8):
4228 CASE_BUILT_IN_TM_STORE (FLOAT):
4229 CASE_BUILT_IN_TM_STORE (DOUBLE):
4230 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4231 CASE_BUILT_IN_TM_STORE (M64):
4232 CASE_BUILT_IN_TM_STORE (M128):
4233 CASE_BUILT_IN_TM_STORE (M256):
4234 {
4235 tree addr = gimple_call_arg (t, 0);
4236 tree src = gimple_call_arg (t, 1);
4237
4238 get_constraint_for (addr, &lhsc);
4239 do_deref (&lhsc);
4240 get_constraint_for (src, &rhsc);
4241 process_all_all_constraints (lhsc, rhsc);
4242 lhsc.release ();
4243 rhsc.release ();
4244 return true;
4245 }
4246 CASE_BUILT_IN_TM_LOAD (1):
4247 CASE_BUILT_IN_TM_LOAD (2):
4248 CASE_BUILT_IN_TM_LOAD (4):
4249 CASE_BUILT_IN_TM_LOAD (8):
4250 CASE_BUILT_IN_TM_LOAD (FLOAT):
4251 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4252 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4253 CASE_BUILT_IN_TM_LOAD (M64):
4254 CASE_BUILT_IN_TM_LOAD (M128):
4255 CASE_BUILT_IN_TM_LOAD (M256):
4256 {
4257 tree dest = gimple_call_lhs (t);
4258 tree addr = gimple_call_arg (t, 0);
4259
4260 get_constraint_for (dest, &lhsc);
4261 get_constraint_for (addr, &rhsc);
4262 do_deref (&rhsc);
4263 process_all_all_constraints (lhsc, rhsc);
4264 lhsc.release ();
4265 rhsc.release ();
4266 return true;
4267 }
4268 /* Variadic argument handling needs to be handled in IPA
4269 mode as well. */
4270 case BUILT_IN_VA_START:
4271 {
4272 tree valist = gimple_call_arg (t, 0);
4273 struct constraint_expr rhs, *lhsp;
4274 unsigned i;
4275 get_constraint_for (valist, &lhsc);
4276 do_deref (&lhsc);
4277 /* The va_list gets access to pointers in variadic
4278 arguments. Which we know in the case of IPA analysis
4279 and otherwise are just all nonlocal variables. */
4280 if (in_ipa_mode)
4281 {
4282 fi = lookup_vi_for_tree (cfun->decl);
4283 rhs = get_function_part_constraint (fi, ~0);
4284 rhs.type = ADDRESSOF;
4285 }
4286 else
4287 {
4288 rhs.var = nonlocal_id;
4289 rhs.type = ADDRESSOF;
4290 rhs.offset = 0;
4291 }
4292 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4293 process_constraint (new_constraint (*lhsp, rhs));
4294 lhsc.release ();
4295 /* va_list is clobbered. */
4296 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4297 return true;
4298 }
4299 /* va_end doesn't have any effect that matters. */
4300 case BUILT_IN_VA_END:
4301 return true;
4302 /* Alternate return. Simply give up for now. */
4303 case BUILT_IN_RETURN:
4304 {
4305 fi = NULL;
4306 if (!in_ipa_mode
4307 || !(fi = get_vi_for_tree (cfun->decl)))
4308 make_constraint_from (get_varinfo (escaped_id), anything_id);
4309 else if (in_ipa_mode
4310 && fi != NULL)
4311 {
4312 struct constraint_expr lhs, rhs;
4313 lhs = get_function_part_constraint (fi, fi_result);
4314 rhs.var = anything_id;
4315 rhs.offset = 0;
4316 rhs.type = SCALAR;
4317 process_constraint (new_constraint (lhs, rhs));
4318 }
4319 return true;
4320 }
4321 /* printf-style functions may have hooks to set pointers to
4322 point to somewhere into the generated string. Leave them
4323 for a later excercise... */
4324 default:
4325 /* Fallthru to general call handling. */;
4326 }
4327
4328 return false;
4329 }
4330
4331 /* Create constraints for the call T. */
4332
4333 static void
4334 find_func_aliases_for_call (gimple t)
4335 {
4336 tree fndecl = gimple_call_fndecl (t);
4337 vec<ce_s> lhsc = vNULL;
4338 vec<ce_s> rhsc = vNULL;
4339 varinfo_t fi;
4340
4341 if (fndecl != NULL_TREE
4342 && DECL_BUILT_IN (fndecl)
4343 && find_func_aliases_for_builtin_call (t))
4344 return;
4345
4346 fi = get_fi_for_callee (t);
4347 if (!in_ipa_mode
4348 || (fndecl && !fi->is_fn_info))
4349 {
4350 vec<ce_s> rhsc = vNULL;
4351 int flags = gimple_call_flags (t);
4352
4353 /* Const functions can return their arguments and addresses
4354 of global memory but not of escaped memory. */
4355 if (flags & (ECF_CONST|ECF_NOVOPS))
4356 {
4357 if (gimple_call_lhs (t))
4358 handle_const_call (t, &rhsc);
4359 }
4360 /* Pure functions can return addresses in and of memory
4361 reachable from their arguments, but they are not an escape
4362 point for reachable memory of their arguments. */
4363 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4364 handle_pure_call (t, &rhsc);
4365 else
4366 handle_rhs_call (t, &rhsc);
4367 if (gimple_call_lhs (t))
4368 handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
4369 rhsc.release ();
4370 }
4371 else
4372 {
4373 tree lhsop;
4374 unsigned j;
4375
4376 /* Assign all the passed arguments to the appropriate incoming
4377 parameters of the function. */
4378 for (j = 0; j < gimple_call_num_args (t); j++)
4379 {
4380 struct constraint_expr lhs ;
4381 struct constraint_expr *rhsp;
4382 tree arg = gimple_call_arg (t, j);
4383
4384 get_constraint_for_rhs (arg, &rhsc);
4385 lhs = get_function_part_constraint (fi, fi_parm_base + j);
4386 while (rhsc.length () != 0)
4387 {
4388 rhsp = &rhsc.last ();
4389 process_constraint (new_constraint (lhs, *rhsp));
4390 rhsc.pop ();
4391 }
4392 }
4393
4394 /* If we are returning a value, assign it to the result. */
4395 lhsop = gimple_call_lhs (t);
4396 if (lhsop)
4397 {
4398 struct constraint_expr rhs;
4399 struct constraint_expr *lhsp;
4400
4401 get_constraint_for (lhsop, &lhsc);
4402 rhs = get_function_part_constraint (fi, fi_result);
4403 if (fndecl
4404 && DECL_RESULT (fndecl)
4405 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4406 {
4407 vec<ce_s> tem = vNULL;
4408 tem.safe_push (rhs);
4409 do_deref (&tem);
4410 rhs = tem[0];
4411 tem.release ();
4412 }
4413 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4414 process_constraint (new_constraint (*lhsp, rhs));
4415 }
4416
4417 /* If we pass the result decl by reference, honor that. */
4418 if (lhsop
4419 && fndecl
4420 && DECL_RESULT (fndecl)
4421 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4422 {
4423 struct constraint_expr lhs;
4424 struct constraint_expr *rhsp;
4425
4426 get_constraint_for_address_of (lhsop, &rhsc);
4427 lhs = get_function_part_constraint (fi, fi_result);
4428 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4429 process_constraint (new_constraint (lhs, *rhsp));
4430 rhsc.release ();
4431 }
4432
4433 /* If we use a static chain, pass it along. */
4434 if (gimple_call_chain (t))
4435 {
4436 struct constraint_expr lhs;
4437 struct constraint_expr *rhsp;
4438
4439 get_constraint_for (gimple_call_chain (t), &rhsc);
4440 lhs = get_function_part_constraint (fi, fi_static_chain);
4441 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4442 process_constraint (new_constraint (lhs, *rhsp));
4443 }
4444 }
4445 }
4446
4447 /* Walk statement T setting up aliasing constraints according to the
4448 references found in T. This function is the main part of the
4449 constraint builder. AI points to auxiliary alias information used
4450 when building alias sets and computing alias grouping heuristics. */
4451
4452 static void
4453 find_func_aliases (gimple origt)
4454 {
4455 gimple t = origt;
4456 vec<ce_s> lhsc = vNULL;
4457 vec<ce_s> rhsc = vNULL;
4458 struct constraint_expr *c;
4459 varinfo_t fi;
4460
4461 /* Now build constraints expressions. */
4462 if (gimple_code (t) == GIMPLE_PHI)
4463 {
4464 size_t i;
4465 unsigned int j;
4466
4467 /* For a phi node, assign all the arguments to
4468 the result. */
4469 get_constraint_for (gimple_phi_result (t), &lhsc);
4470 for (i = 0; i < gimple_phi_num_args (t); i++)
4471 {
4472 tree strippedrhs = PHI_ARG_DEF (t, i);
4473
4474 STRIP_NOPS (strippedrhs);
4475 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4476
4477 FOR_EACH_VEC_ELT (lhsc, j, c)
4478 {
4479 struct constraint_expr *c2;
4480 while (rhsc.length () > 0)
4481 {
4482 c2 = &rhsc.last ();
4483 process_constraint (new_constraint (*c, *c2));
4484 rhsc.pop ();
4485 }
4486 }
4487 }
4488 }
4489 /* In IPA mode, we need to generate constraints to pass call
4490 arguments through their calls. There are two cases,
4491 either a GIMPLE_CALL returning a value, or just a plain
4492 GIMPLE_CALL when we are not.
4493
4494 In non-ipa mode, we need to generate constraints for each
4495 pointer passed by address. */
4496 else if (is_gimple_call (t))
4497 find_func_aliases_for_call (t);
4498
4499 /* Otherwise, just a regular assignment statement. Only care about
4500 operations with pointer result, others are dealt with as escape
4501 points if they have pointer operands. */
4502 else if (is_gimple_assign (t))
4503 {
4504 /* Otherwise, just a regular assignment statement. */
4505 tree lhsop = gimple_assign_lhs (t);
4506 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4507
4508 if (rhsop && TREE_CLOBBER_P (rhsop))
4509 /* Ignore clobbers, they don't actually store anything into
4510 the LHS. */
4511 ;
4512 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4513 do_structure_copy (lhsop, rhsop);
4514 else
4515 {
4516 enum tree_code code = gimple_assign_rhs_code (t);
4517
4518 get_constraint_for (lhsop, &lhsc);
4519
4520 if (code == POINTER_PLUS_EXPR)
4521 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4522 gimple_assign_rhs2 (t), &rhsc);
4523 else if (code == BIT_AND_EXPR
4524 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4525 {
4526 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4527 the pointer. Handle it by offsetting it by UNKNOWN. */
4528 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4529 NULL_TREE, &rhsc);
4530 }
4531 else if ((CONVERT_EXPR_CODE_P (code)
4532 && !(POINTER_TYPE_P (gimple_expr_type (t))
4533 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4534 || gimple_assign_single_p (t))
4535 get_constraint_for_rhs (rhsop, &rhsc);
4536 else if (code == COND_EXPR)
4537 {
4538 /* The result is a merge of both COND_EXPR arms. */
4539 vec<ce_s> tmp = vNULL;
4540 struct constraint_expr *rhsp;
4541 unsigned i;
4542 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4543 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4544 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4545 rhsc.safe_push (*rhsp);
4546 tmp.release ();
4547 }
4548 else if (truth_value_p (code))
4549 /* Truth value results are not pointer (parts). Or at least
4550 very very unreasonable obfuscation of a part. */
4551 ;
4552 else
4553 {
4554 /* All other operations are merges. */
4555 vec<ce_s> tmp = vNULL;
4556 struct constraint_expr *rhsp;
4557 unsigned i, j;
4558 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4559 for (i = 2; i < gimple_num_ops (t); ++i)
4560 {
4561 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4562 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4563 rhsc.safe_push (*rhsp);
4564 tmp.truncate (0);
4565 }
4566 tmp.release ();
4567 }
4568 process_all_all_constraints (lhsc, rhsc);
4569 }
4570 /* If there is a store to a global variable the rhs escapes. */
4571 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4572 && DECL_P (lhsop)
4573 && is_global_var (lhsop)
4574 && (!in_ipa_mode
4575 || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4576 make_escape_constraint (rhsop);
4577 }
4578 /* Handle escapes through return. */
4579 else if (gimple_code (t) == GIMPLE_RETURN
4580 && gimple_return_retval (t) != NULL_TREE)
4581 {
4582 fi = NULL;
4583 if (!in_ipa_mode
4584 || !(fi = get_vi_for_tree (cfun->decl)))
4585 make_escape_constraint (gimple_return_retval (t));
4586 else if (in_ipa_mode
4587 && fi != NULL)
4588 {
4589 struct constraint_expr lhs ;
4590 struct constraint_expr *rhsp;
4591 unsigned i;
4592
4593 lhs = get_function_part_constraint (fi, fi_result);
4594 get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
4595 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4596 process_constraint (new_constraint (lhs, *rhsp));
4597 }
4598 }
4599 /* Handle asms conservatively by adding escape constraints to everything. */
4600 else if (gimple_code (t) == GIMPLE_ASM)
4601 {
4602 unsigned i, noutputs;
4603 const char **oconstraints;
4604 const char *constraint;
4605 bool allows_mem, allows_reg, is_inout;
4606
4607 noutputs = gimple_asm_noutputs (t);
4608 oconstraints = XALLOCAVEC (const char *, noutputs);
4609
4610 for (i = 0; i < noutputs; ++i)
4611 {
4612 tree link = gimple_asm_output_op (t, i);
4613 tree op = TREE_VALUE (link);
4614
4615 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4616 oconstraints[i] = constraint;
4617 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4618 &allows_reg, &is_inout);
4619
4620 /* A memory constraint makes the address of the operand escape. */
4621 if (!allows_reg && allows_mem)
4622 make_escape_constraint (build_fold_addr_expr (op));
4623
4624 /* The asm may read global memory, so outputs may point to
4625 any global memory. */
4626 if (op)
4627 {
4628 vec<ce_s> lhsc = vNULL;
4629 struct constraint_expr rhsc, *lhsp;
4630 unsigned j;
4631 get_constraint_for (op, &lhsc);
4632 rhsc.var = nonlocal_id;
4633 rhsc.offset = 0;
4634 rhsc.type = SCALAR;
4635 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4636 process_constraint (new_constraint (*lhsp, rhsc));
4637 lhsc.release ();
4638 }
4639 }
4640 for (i = 0; i < gimple_asm_ninputs (t); ++i)
4641 {
4642 tree link = gimple_asm_input_op (t, i);
4643 tree op = TREE_VALUE (link);
4644
4645 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4646
4647 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4648 &allows_mem, &allows_reg);
4649
4650 /* A memory constraint makes the address of the operand escape. */
4651 if (!allows_reg && allows_mem)
4652 make_escape_constraint (build_fold_addr_expr (op));
4653 /* Strictly we'd only need the constraint to ESCAPED if
4654 the asm clobbers memory, otherwise using something
4655 along the lines of per-call clobbers/uses would be enough. */
4656 else if (op)
4657 make_escape_constraint (op);
4658 }
4659 }
4660
4661 rhsc.release ();
4662 lhsc.release ();
4663 }
4664
4665
4666 /* Create a constraint adding to the clobber set of FI the memory
4667 pointed to by PTR. */
4668
4669 static void
4670 process_ipa_clobber (varinfo_t fi, tree ptr)
4671 {
4672 vec<ce_s> ptrc = vNULL;
4673 struct constraint_expr *c, lhs;
4674 unsigned i;
4675 get_constraint_for_rhs (ptr, &ptrc);
4676 lhs = get_function_part_constraint (fi, fi_clobbers);
4677 FOR_EACH_VEC_ELT (ptrc, i, c)
4678 process_constraint (new_constraint (lhs, *c));
4679 ptrc.release ();
4680 }
4681
4682 /* Walk statement T setting up clobber and use constraints according to the
4683 references found in T. This function is a main part of the
4684 IPA constraint builder. */
4685
4686 static void
4687 find_func_clobbers (gimple origt)
4688 {
4689 gimple t = origt;
4690 vec<ce_s> lhsc = vNULL;
4691 vec<ce_s> rhsc = vNULL;
4692 varinfo_t fi;
4693
4694 /* Add constraints for clobbered/used in IPA mode.
4695 We are not interested in what automatic variables are clobbered
4696 or used as we only use the information in the caller to which
4697 they do not escape. */
4698 gcc_assert (in_ipa_mode);
4699
4700 /* If the stmt refers to memory in any way it better had a VUSE. */
4701 if (gimple_vuse (t) == NULL_TREE)
4702 return;
4703
4704 /* We'd better have function information for the current function. */
4705 fi = lookup_vi_for_tree (cfun->decl);
4706 gcc_assert (fi != NULL);
4707
4708 /* Account for stores in assignments and calls. */
4709 if (gimple_vdef (t) != NULL_TREE
4710 && gimple_has_lhs (t))
4711 {
4712 tree lhs = gimple_get_lhs (t);
4713 tree tem = lhs;
4714 while (handled_component_p (tem))
4715 tem = TREE_OPERAND (tem, 0);
4716 if ((DECL_P (tem)
4717 && !auto_var_in_fn_p (tem, cfun->decl))
4718 || INDIRECT_REF_P (tem)
4719 || (TREE_CODE (tem) == MEM_REF
4720 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4721 && auto_var_in_fn_p
4722 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4723 {
4724 struct constraint_expr lhsc, *rhsp;
4725 unsigned i;
4726 lhsc = get_function_part_constraint (fi, fi_clobbers);
4727 get_constraint_for_address_of (lhs, &rhsc);
4728 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4729 process_constraint (new_constraint (lhsc, *rhsp));
4730 rhsc.release ();
4731 }
4732 }
4733
4734 /* Account for uses in assigments and returns. */
4735 if (gimple_assign_single_p (t)
4736 || (gimple_code (t) == GIMPLE_RETURN
4737 && gimple_return_retval (t) != NULL_TREE))
4738 {
4739 tree rhs = (gimple_assign_single_p (t)
4740 ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4741 tree tem = rhs;
4742 while (handled_component_p (tem))
4743 tem = TREE_OPERAND (tem, 0);
4744 if ((DECL_P (tem)
4745 && !auto_var_in_fn_p (tem, cfun->decl))
4746 || INDIRECT_REF_P (tem)
4747 || (TREE_CODE (tem) == MEM_REF
4748 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4749 && auto_var_in_fn_p
4750 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4751 {
4752 struct constraint_expr lhs, *rhsp;
4753 unsigned i;
4754 lhs = get_function_part_constraint (fi, fi_uses);
4755 get_constraint_for_address_of (rhs, &rhsc);
4756 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4757 process_constraint (new_constraint (lhs, *rhsp));
4758 rhsc.release ();
4759 }
4760 }
4761
4762 if (is_gimple_call (t))
4763 {
4764 varinfo_t cfi = NULL;
4765 tree decl = gimple_call_fndecl (t);
4766 struct constraint_expr lhs, rhs;
4767 unsigned i, j;
4768
4769 /* For builtins we do not have separate function info. For those
4770 we do not generate escapes for we have to generate clobbers/uses. */
4771 if (decl
4772 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
4773 switch (DECL_FUNCTION_CODE (decl))
4774 {
4775 /* The following functions use and clobber memory pointed to
4776 by their arguments. */
4777 case BUILT_IN_STRCPY:
4778 case BUILT_IN_STRNCPY:
4779 case BUILT_IN_BCOPY:
4780 case BUILT_IN_MEMCPY:
4781 case BUILT_IN_MEMMOVE:
4782 case BUILT_IN_MEMPCPY:
4783 case BUILT_IN_STPCPY:
4784 case BUILT_IN_STPNCPY:
4785 case BUILT_IN_STRCAT:
4786 case BUILT_IN_STRNCAT:
4787 case BUILT_IN_STRCPY_CHK:
4788 case BUILT_IN_STRNCPY_CHK:
4789 case BUILT_IN_MEMCPY_CHK:
4790 case BUILT_IN_MEMMOVE_CHK:
4791 case BUILT_IN_MEMPCPY_CHK:
4792 case BUILT_IN_STPCPY_CHK:
4793 case BUILT_IN_STPNCPY_CHK:
4794 case BUILT_IN_STRCAT_CHK:
4795 case BUILT_IN_STRNCAT_CHK:
4796 {
4797 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4798 == BUILT_IN_BCOPY ? 1 : 0));
4799 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4800 == BUILT_IN_BCOPY ? 0 : 1));
4801 unsigned i;
4802 struct constraint_expr *rhsp, *lhsp;
4803 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4804 lhs = get_function_part_constraint (fi, fi_clobbers);
4805 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4806 process_constraint (new_constraint (lhs, *lhsp));
4807 lhsc.release ();
4808 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4809 lhs = get_function_part_constraint (fi, fi_uses);
4810 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4811 process_constraint (new_constraint (lhs, *rhsp));
4812 rhsc.release ();
4813 return;
4814 }
4815 /* The following function clobbers memory pointed to by
4816 its argument. */
4817 case BUILT_IN_MEMSET:
4818 case BUILT_IN_MEMSET_CHK:
4819 {
4820 tree dest = gimple_call_arg (t, 0);
4821 unsigned i;
4822 ce_s *lhsp;
4823 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4824 lhs = get_function_part_constraint (fi, fi_clobbers);
4825 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4826 process_constraint (new_constraint (lhs, *lhsp));
4827 lhsc.release ();
4828 return;
4829 }
4830 /* The following functions clobber their second and third
4831 arguments. */
4832 case BUILT_IN_SINCOS:
4833 case BUILT_IN_SINCOSF:
4834 case BUILT_IN_SINCOSL:
4835 {
4836 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4837 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4838 return;
4839 }
4840 /* The following functions clobber their second argument. */
4841 case BUILT_IN_FREXP:
4842 case BUILT_IN_FREXPF:
4843 case BUILT_IN_FREXPL:
4844 case BUILT_IN_LGAMMA_R:
4845 case BUILT_IN_LGAMMAF_R:
4846 case BUILT_IN_LGAMMAL_R:
4847 case BUILT_IN_GAMMA_R:
4848 case BUILT_IN_GAMMAF_R:
4849 case BUILT_IN_GAMMAL_R:
4850 case BUILT_IN_MODF:
4851 case BUILT_IN_MODFF:
4852 case BUILT_IN_MODFL:
4853 {
4854 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4855 return;
4856 }
4857 /* The following functions clobber their third argument. */
4858 case BUILT_IN_REMQUO:
4859 case BUILT_IN_REMQUOF:
4860 case BUILT_IN_REMQUOL:
4861 {
4862 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4863 return;
4864 }
4865 /* The following functions neither read nor clobber memory. */
4866 case BUILT_IN_ASSUME_ALIGNED:
4867 case BUILT_IN_FREE:
4868 return;
4869 /* Trampolines are of no interest to us. */
4870 case BUILT_IN_INIT_TRAMPOLINE:
4871 case BUILT_IN_ADJUST_TRAMPOLINE:
4872 return;
4873 case BUILT_IN_VA_START:
4874 case BUILT_IN_VA_END:
4875 return;
4876 /* printf-style functions may have hooks to set pointers to
4877 point to somewhere into the generated string. Leave them
4878 for a later excercise... */
4879 default:
4880 /* Fallthru to general call handling. */;
4881 }
4882
4883 /* Parameters passed by value are used. */
4884 lhs = get_function_part_constraint (fi, fi_uses);
4885 for (i = 0; i < gimple_call_num_args (t); i++)
4886 {
4887 struct constraint_expr *rhsp;
4888 tree arg = gimple_call_arg (t, i);
4889
4890 if (TREE_CODE (arg) == SSA_NAME
4891 || is_gimple_min_invariant (arg))
4892 continue;
4893
4894 get_constraint_for_address_of (arg, &rhsc);
4895 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4896 process_constraint (new_constraint (lhs, *rhsp));
4897 rhsc.release ();
4898 }
4899
4900 /* Build constraints for propagating clobbers/uses along the
4901 callgraph edges. */
4902 cfi = get_fi_for_callee (t);
4903 if (cfi->id == anything_id)
4904 {
4905 if (gimple_vdef (t))
4906 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4907 anything_id);
4908 make_constraint_from (first_vi_for_offset (fi, fi_uses),
4909 anything_id);
4910 return;
4911 }
4912
4913 /* For callees without function info (that's external functions),
4914 ESCAPED is clobbered and used. */
4915 if (gimple_call_fndecl (t)
4916 && !cfi->is_fn_info)
4917 {
4918 varinfo_t vi;
4919
4920 if (gimple_vdef (t))
4921 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4922 escaped_id);
4923 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
4924
4925 /* Also honor the call statement use/clobber info. */
4926 if ((vi = lookup_call_clobber_vi (t)) != NULL)
4927 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4928 vi->id);
4929 if ((vi = lookup_call_use_vi (t)) != NULL)
4930 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
4931 vi->id);
4932 return;
4933 }
4934
4935 /* Otherwise the caller clobbers and uses what the callee does.
4936 ??? This should use a new complex constraint that filters
4937 local variables of the callee. */
4938 if (gimple_vdef (t))
4939 {
4940 lhs = get_function_part_constraint (fi, fi_clobbers);
4941 rhs = get_function_part_constraint (cfi, fi_clobbers);
4942 process_constraint (new_constraint (lhs, rhs));
4943 }
4944 lhs = get_function_part_constraint (fi, fi_uses);
4945 rhs = get_function_part_constraint (cfi, fi_uses);
4946 process_constraint (new_constraint (lhs, rhs));
4947 }
4948 else if (gimple_code (t) == GIMPLE_ASM)
4949 {
4950 /* ??? Ick. We can do better. */
4951 if (gimple_vdef (t))
4952 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4953 anything_id);
4954 make_constraint_from (first_vi_for_offset (fi, fi_uses),
4955 anything_id);
4956 }
4957
4958 rhsc.release ();
4959 }
4960
4961
4962 /* Find the first varinfo in the same variable as START that overlaps with
4963 OFFSET. Return NULL if we can't find one. */
4964
4965 static varinfo_t
4966 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
4967 {
4968 /* If the offset is outside of the variable, bail out. */
4969 if (offset >= start->fullsize)
4970 return NULL;
4971
4972 /* If we cannot reach offset from start, lookup the first field
4973 and start from there. */
4974 if (start->offset > offset)
4975 start = lookup_vi_for_tree (start->decl);
4976
4977 while (start)
4978 {
4979 /* We may not find a variable in the field list with the actual
4980 offset when when we have glommed a structure to a variable.
4981 In that case, however, offset should still be within the size
4982 of the variable. */
4983 if (offset >= start->offset
4984 && (offset - start->offset) < start->size)
4985 return start;
4986
4987 start= start->next;
4988 }
4989
4990 return NULL;
4991 }
4992
4993 /* Find the first varinfo in the same variable as START that overlaps with
4994 OFFSET. If there is no such varinfo the varinfo directly preceding
4995 OFFSET is returned. */
4996
4997 static varinfo_t
4998 first_or_preceding_vi_for_offset (varinfo_t start,
4999 unsigned HOST_WIDE_INT offset)
5000 {
5001 /* If we cannot reach offset from start, lookup the first field
5002 and start from there. */
5003 if (start->offset > offset)
5004 start = lookup_vi_for_tree (start->decl);
5005
5006 /* We may not find a variable in the field list with the actual
5007 offset when when we have glommed a structure to a variable.
5008 In that case, however, offset should still be within the size
5009 of the variable.
5010 If we got beyond the offset we look for return the field
5011 directly preceding offset which may be the last field. */
5012 while (start->next
5013 && offset >= start->offset
5014 && !((offset - start->offset) < start->size))
5015 start = start->next;
5016
5017 return start;
5018 }
5019
5020
5021 /* This structure is used during pushing fields onto the fieldstack
5022 to track the offset of the field, since bitpos_of_field gives it
5023 relative to its immediate containing type, and we want it relative
5024 to the ultimate containing object. */
5025
5026 struct fieldoff
5027 {
5028 /* Offset from the base of the base containing object to this field. */
5029 HOST_WIDE_INT offset;
5030
5031 /* Size, in bits, of the field. */
5032 unsigned HOST_WIDE_INT size;
5033
5034 unsigned has_unknown_size : 1;
5035
5036 unsigned must_have_pointers : 1;
5037
5038 unsigned may_have_pointers : 1;
5039
5040 unsigned only_restrict_pointers : 1;
5041 };
5042 typedef struct fieldoff fieldoff_s;
5043
5044
5045 /* qsort comparison function for two fieldoff's PA and PB */
5046
5047 static int
5048 fieldoff_compare (const void *pa, const void *pb)
5049 {
5050 const fieldoff_s *foa = (const fieldoff_s *)pa;
5051 const fieldoff_s *fob = (const fieldoff_s *)pb;
5052 unsigned HOST_WIDE_INT foasize, fobsize;
5053
5054 if (foa->offset < fob->offset)
5055 return -1;
5056 else if (foa->offset > fob->offset)
5057 return 1;
5058
5059 foasize = foa->size;
5060 fobsize = fob->size;
5061 if (foasize < fobsize)
5062 return -1;
5063 else if (foasize > fobsize)
5064 return 1;
5065 return 0;
5066 }
5067
5068 /* Sort a fieldstack according to the field offset and sizes. */
5069 static void
5070 sort_fieldstack (vec<fieldoff_s> fieldstack)
5071 {
5072 fieldstack.qsort (fieldoff_compare);
5073 }
5074
5075 /* Return true if T is a type that can have subvars. */
5076
5077 static inline bool
5078 type_can_have_subvars (const_tree t)
5079 {
5080 /* Aggregates without overlapping fields can have subvars. */
5081 return TREE_CODE (t) == RECORD_TYPE;
5082 }
5083
5084 /* Return true if V is a tree that we can have subvars for.
5085 Normally, this is any aggregate type. Also complex
5086 types which are not gimple registers can have subvars. */
5087
5088 static inline bool
5089 var_can_have_subvars (const_tree v)
5090 {
5091 /* Volatile variables should never have subvars. */
5092 if (TREE_THIS_VOLATILE (v))
5093 return false;
5094
5095 /* Non decls or memory tags can never have subvars. */
5096 if (!DECL_P (v))
5097 return false;
5098
5099 return type_can_have_subvars (TREE_TYPE (v));
5100 }
5101
5102 /* Return true if T is a type that does contain pointers. */
5103
5104 static bool
5105 type_must_have_pointers (tree type)
5106 {
5107 if (POINTER_TYPE_P (type))
5108 return true;
5109
5110 if (TREE_CODE (type) == ARRAY_TYPE)
5111 return type_must_have_pointers (TREE_TYPE (type));
5112
5113 /* A function or method can have pointers as arguments, so track
5114 those separately. */
5115 if (TREE_CODE (type) == FUNCTION_TYPE
5116 || TREE_CODE (type) == METHOD_TYPE)
5117 return true;
5118
5119 return false;
5120 }
5121
5122 static bool
5123 field_must_have_pointers (tree t)
5124 {
5125 return type_must_have_pointers (TREE_TYPE (t));
5126 }
5127
5128 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5129 the fields of TYPE onto fieldstack, recording their offsets along
5130 the way.
5131
5132 OFFSET is used to keep track of the offset in this entire
5133 structure, rather than just the immediately containing structure.
5134 Returns false if the caller is supposed to handle the field we
5135 recursed for. */
5136
5137 static bool
5138 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5139 HOST_WIDE_INT offset)
5140 {
5141 tree field;
5142 bool empty_p = true;
5143
5144 if (TREE_CODE (type) != RECORD_TYPE)
5145 return false;
5146
5147 /* If the vector of fields is growing too big, bail out early.
5148 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5149 sure this fails. */
5150 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5151 return false;
5152
5153 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5154 if (TREE_CODE (field) == FIELD_DECL)
5155 {
5156 bool push = false;
5157 HOST_WIDE_INT foff = bitpos_of_field (field);
5158
5159 if (!var_can_have_subvars (field)
5160 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
5161 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
5162 push = true;
5163 else if (!push_fields_onto_fieldstack
5164 (TREE_TYPE (field), fieldstack, offset + foff)
5165 && (DECL_SIZE (field)
5166 && !integer_zerop (DECL_SIZE (field))))
5167 /* Empty structures may have actual size, like in C++. So
5168 see if we didn't push any subfields and the size is
5169 nonzero, push the field onto the stack. */
5170 push = true;
5171
5172 if (push)
5173 {
5174 fieldoff_s *pair = NULL;
5175 bool has_unknown_size = false;
5176 bool must_have_pointers_p;
5177
5178 if (!fieldstack->is_empty ())
5179 pair = &fieldstack->last ();
5180
5181 /* If there isn't anything at offset zero, create sth. */
5182 if (!pair
5183 && offset + foff != 0)
5184 {
5185 fieldoff_s e = {0, offset + foff, false, false, false, false};
5186 pair = fieldstack->safe_push (e);
5187 }
5188
5189 if (!DECL_SIZE (field)
5190 || !host_integerp (DECL_SIZE (field), 1))
5191 has_unknown_size = true;
5192
5193 /* If adjacent fields do not contain pointers merge them. */
5194 must_have_pointers_p = field_must_have_pointers (field);
5195 if (pair
5196 && !has_unknown_size
5197 && !must_have_pointers_p
5198 && !pair->must_have_pointers
5199 && !pair->has_unknown_size
5200 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5201 {
5202 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
5203 }
5204 else
5205 {
5206 fieldoff_s e;
5207 e.offset = offset + foff;
5208 e.has_unknown_size = has_unknown_size;
5209 if (!has_unknown_size)
5210 e.size = TREE_INT_CST_LOW (DECL_SIZE (field));
5211 else
5212 e.size = -1;
5213 e.must_have_pointers = must_have_pointers_p;
5214 e.may_have_pointers = true;
5215 e.only_restrict_pointers
5216 = (!has_unknown_size
5217 && POINTER_TYPE_P (TREE_TYPE (field))
5218 && TYPE_RESTRICT (TREE_TYPE (field)));
5219 fieldstack->safe_push (e);
5220 }
5221 }
5222
5223 empty_p = false;
5224 }
5225
5226 return !empty_p;
5227 }
5228
5229 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5230 if it is a varargs function. */
5231
5232 static unsigned int
5233 count_num_arguments (tree decl, bool *is_varargs)
5234 {
5235 unsigned int num = 0;
5236 tree t;
5237
5238 /* Capture named arguments for K&R functions. They do not
5239 have a prototype and thus no TYPE_ARG_TYPES. */
5240 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5241 ++num;
5242
5243 /* Check if the function has variadic arguments. */
5244 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5245 if (TREE_VALUE (t) == void_type_node)
5246 break;
5247 if (!t)
5248 *is_varargs = true;
5249
5250 return num;
5251 }
5252
5253 /* Creation function node for DECL, using NAME, and return the index
5254 of the variable we've created for the function. */
5255
5256 static varinfo_t
5257 create_function_info_for (tree decl, const char *name)
5258 {
5259 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5260 varinfo_t vi, prev_vi;
5261 tree arg;
5262 unsigned int i;
5263 bool is_varargs = false;
5264 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5265
5266 /* Create the variable info. */
5267
5268 vi = new_var_info (decl, name);
5269 vi->offset = 0;
5270 vi->size = 1;
5271 vi->fullsize = fi_parm_base + num_args;
5272 vi->is_fn_info = 1;
5273 vi->may_have_pointers = false;
5274 if (is_varargs)
5275 vi->fullsize = ~0;
5276 insert_vi_for_tree (vi->decl, vi);
5277
5278 prev_vi = vi;
5279
5280 /* Create a variable for things the function clobbers and one for
5281 things the function uses. */
5282 {
5283 varinfo_t clobbervi, usevi;
5284 const char *newname;
5285 char *tempname;
5286
5287 asprintf (&tempname, "%s.clobber", name);
5288 newname = ggc_strdup (tempname);
5289 free (tempname);
5290
5291 clobbervi = new_var_info (NULL, newname);
5292 clobbervi->offset = fi_clobbers;
5293 clobbervi->size = 1;
5294 clobbervi->fullsize = vi->fullsize;
5295 clobbervi->is_full_var = true;
5296 clobbervi->is_global_var = false;
5297 gcc_assert (prev_vi->offset < clobbervi->offset);
5298 prev_vi->next = clobbervi;
5299 prev_vi = clobbervi;
5300
5301 asprintf (&tempname, "%s.use", name);
5302 newname = ggc_strdup (tempname);
5303 free (tempname);
5304
5305 usevi = new_var_info (NULL, newname);
5306 usevi->offset = fi_uses;
5307 usevi->size = 1;
5308 usevi->fullsize = vi->fullsize;
5309 usevi->is_full_var = true;
5310 usevi->is_global_var = false;
5311 gcc_assert (prev_vi->offset < usevi->offset);
5312 prev_vi->next = usevi;
5313 prev_vi = usevi;
5314 }
5315
5316 /* And one for the static chain. */
5317 if (fn->static_chain_decl != NULL_TREE)
5318 {
5319 varinfo_t chainvi;
5320 const char *newname;
5321 char *tempname;
5322
5323 asprintf (&tempname, "%s.chain", name);
5324 newname = ggc_strdup (tempname);
5325 free (tempname);
5326
5327 chainvi = new_var_info (fn->static_chain_decl, newname);
5328 chainvi->offset = fi_static_chain;
5329 chainvi->size = 1;
5330 chainvi->fullsize = vi->fullsize;
5331 chainvi->is_full_var = true;
5332 chainvi->is_global_var = false;
5333 gcc_assert (prev_vi->offset < chainvi->offset);
5334 prev_vi->next = chainvi;
5335 prev_vi = chainvi;
5336 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5337 }
5338
5339 /* Create a variable for the return var. */
5340 if (DECL_RESULT (decl) != NULL
5341 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5342 {
5343 varinfo_t resultvi;
5344 const char *newname;
5345 char *tempname;
5346 tree resultdecl = decl;
5347
5348 if (DECL_RESULT (decl))
5349 resultdecl = DECL_RESULT (decl);
5350
5351 asprintf (&tempname, "%s.result", name);
5352 newname = ggc_strdup (tempname);
5353 free (tempname);
5354
5355 resultvi = new_var_info (resultdecl, newname);
5356 resultvi->offset = fi_result;
5357 resultvi->size = 1;
5358 resultvi->fullsize = vi->fullsize;
5359 resultvi->is_full_var = true;
5360 if (DECL_RESULT (decl))
5361 resultvi->may_have_pointers = true;
5362 gcc_assert (prev_vi->offset < resultvi->offset);
5363 prev_vi->next = resultvi;
5364 prev_vi = resultvi;
5365 if (DECL_RESULT (decl))
5366 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5367 }
5368
5369 /* Set up variables for each argument. */
5370 arg = DECL_ARGUMENTS (decl);
5371 for (i = 0; i < num_args; i++)
5372 {
5373 varinfo_t argvi;
5374 const char *newname;
5375 char *tempname;
5376 tree argdecl = decl;
5377
5378 if (arg)
5379 argdecl = arg;
5380
5381 asprintf (&tempname, "%s.arg%d", name, i);
5382 newname = ggc_strdup (tempname);
5383 free (tempname);
5384
5385 argvi = new_var_info (argdecl, newname);
5386 argvi->offset = fi_parm_base + i;
5387 argvi->size = 1;
5388 argvi->is_full_var = true;
5389 argvi->fullsize = vi->fullsize;
5390 if (arg)
5391 argvi->may_have_pointers = true;
5392 gcc_assert (prev_vi->offset < argvi->offset);
5393 prev_vi->next = argvi;
5394 prev_vi = argvi;
5395 if (arg)
5396 {
5397 insert_vi_for_tree (arg, argvi);
5398 arg = DECL_CHAIN (arg);
5399 }
5400 }
5401
5402 /* Add one representative for all further args. */
5403 if (is_varargs)
5404 {
5405 varinfo_t argvi;
5406 const char *newname;
5407 char *tempname;
5408 tree decl;
5409
5410 asprintf (&tempname, "%s.varargs", name);
5411 newname = ggc_strdup (tempname);
5412 free (tempname);
5413
5414 /* We need sth that can be pointed to for va_start. */
5415 decl = build_fake_var_decl (ptr_type_node);
5416
5417 argvi = new_var_info (decl, newname);
5418 argvi->offset = fi_parm_base + num_args;
5419 argvi->size = ~0;
5420 argvi->is_full_var = true;
5421 argvi->is_heap_var = true;
5422 argvi->fullsize = vi->fullsize;
5423 gcc_assert (prev_vi->offset < argvi->offset);
5424 prev_vi->next = argvi;
5425 prev_vi = argvi;
5426 }
5427
5428 return vi;
5429 }
5430
5431
5432 /* Return true if FIELDSTACK contains fields that overlap.
5433 FIELDSTACK is assumed to be sorted by offset. */
5434
5435 static bool
5436 check_for_overlaps (vec<fieldoff_s> fieldstack)
5437 {
5438 fieldoff_s *fo = NULL;
5439 unsigned int i;
5440 HOST_WIDE_INT lastoffset = -1;
5441
5442 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5443 {
5444 if (fo->offset == lastoffset)
5445 return true;
5446 lastoffset = fo->offset;
5447 }
5448 return false;
5449 }
5450
5451 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5452 This will also create any varinfo structures necessary for fields
5453 of DECL. */
5454
5455 static varinfo_t
5456 create_variable_info_for_1 (tree decl, const char *name)
5457 {
5458 varinfo_t vi, newvi;
5459 tree decl_type = TREE_TYPE (decl);
5460 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5461 vec<fieldoff_s> fieldstack = vNULL;
5462 fieldoff_s *fo;
5463 unsigned int i;
5464
5465 if (!declsize
5466 || !host_integerp (declsize, 1))
5467 {
5468 vi = new_var_info (decl, name);
5469 vi->offset = 0;
5470 vi->size = ~0;
5471 vi->fullsize = ~0;
5472 vi->is_unknown_size_var = true;
5473 vi->is_full_var = true;
5474 vi->may_have_pointers = true;
5475 return vi;
5476 }
5477
5478 /* Collect field information. */
5479 if (use_field_sensitive
5480 && var_can_have_subvars (decl)
5481 /* ??? Force us to not use subfields for global initializers
5482 in IPA mode. Else we'd have to parse arbitrary initializers. */
5483 && !(in_ipa_mode
5484 && is_global_var (decl)
5485 && DECL_INITIAL (decl)))
5486 {
5487 fieldoff_s *fo = NULL;
5488 bool notokay = false;
5489 unsigned int i;
5490
5491 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5492
5493 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5494 if (fo->has_unknown_size
5495 || fo->offset < 0)
5496 {
5497 notokay = true;
5498 break;
5499 }
5500
5501 /* We can't sort them if we have a field with a variable sized type,
5502 which will make notokay = true. In that case, we are going to return
5503 without creating varinfos for the fields anyway, so sorting them is a
5504 waste to boot. */
5505 if (!notokay)
5506 {
5507 sort_fieldstack (fieldstack);
5508 /* Due to some C++ FE issues, like PR 22488, we might end up
5509 what appear to be overlapping fields even though they,
5510 in reality, do not overlap. Until the C++ FE is fixed,
5511 we will simply disable field-sensitivity for these cases. */
5512 notokay = check_for_overlaps (fieldstack);
5513 }
5514
5515 if (notokay)
5516 fieldstack.release ();
5517 }
5518
5519 /* If we didn't end up collecting sub-variables create a full
5520 variable for the decl. */
5521 if (fieldstack.length () <= 1
5522 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5523 {
5524 vi = new_var_info (decl, name);
5525 vi->offset = 0;
5526 vi->may_have_pointers = true;
5527 vi->fullsize = TREE_INT_CST_LOW (declsize);
5528 vi->size = vi->fullsize;
5529 vi->is_full_var = true;
5530 fieldstack.release ();
5531 return vi;
5532 }
5533
5534 vi = new_var_info (decl, name);
5535 vi->fullsize = TREE_INT_CST_LOW (declsize);
5536 for (i = 0, newvi = vi;
5537 fieldstack.iterate (i, &fo);
5538 ++i, newvi = newvi->next)
5539 {
5540 const char *newname = "NULL";
5541 char *tempname;
5542
5543 if (dump_file)
5544 {
5545 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5546 "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
5547 newname = ggc_strdup (tempname);
5548 free (tempname);
5549 }
5550 newvi->name = newname;
5551 newvi->offset = fo->offset;
5552 newvi->size = fo->size;
5553 newvi->fullsize = vi->fullsize;
5554 newvi->may_have_pointers = fo->may_have_pointers;
5555 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5556 if (i + 1 < fieldstack.length ())
5557 newvi->next = new_var_info (decl, name);
5558 }
5559
5560 fieldstack.release ();
5561
5562 return vi;
5563 }
5564
5565 static unsigned int
5566 create_variable_info_for (tree decl, const char *name)
5567 {
5568 varinfo_t vi = create_variable_info_for_1 (decl, name);
5569 unsigned int id = vi->id;
5570
5571 insert_vi_for_tree (decl, vi);
5572
5573 if (TREE_CODE (decl) != VAR_DECL)
5574 return id;
5575
5576 /* Create initial constraints for globals. */
5577 for (; vi; vi = vi->next)
5578 {
5579 if (!vi->may_have_pointers
5580 || !vi->is_global_var)
5581 continue;
5582
5583 /* Mark global restrict qualified pointers. */
5584 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5585 && TYPE_RESTRICT (TREE_TYPE (decl)))
5586 || vi->only_restrict_pointers)
5587 {
5588 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5589 continue;
5590 }
5591
5592 /* In non-IPA mode the initializer from nonlocal is all we need. */
5593 if (!in_ipa_mode
5594 || DECL_HARD_REGISTER (decl))
5595 make_copy_constraint (vi, nonlocal_id);
5596
5597 /* In IPA mode parse the initializer and generate proper constraints
5598 for it. */
5599 else
5600 {
5601 struct varpool_node *vnode = varpool_get_node (decl);
5602
5603 /* For escaped variables initialize them from nonlocal. */
5604 if (!varpool_all_refs_explicit_p (vnode))
5605 make_copy_constraint (vi, nonlocal_id);
5606
5607 /* If this is a global variable with an initializer and we are in
5608 IPA mode generate constraints for it. */
5609 if (DECL_INITIAL (decl)
5610 && vnode->analyzed)
5611 {
5612 vec<ce_s> rhsc = vNULL;
5613 struct constraint_expr lhs, *rhsp;
5614 unsigned i;
5615 get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
5616 lhs.var = vi->id;
5617 lhs.offset = 0;
5618 lhs.type = SCALAR;
5619 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5620 process_constraint (new_constraint (lhs, *rhsp));
5621 /* If this is a variable that escapes from the unit
5622 the initializer escapes as well. */
5623 if (!varpool_all_refs_explicit_p (vnode))
5624 {
5625 lhs.var = escaped_id;
5626 lhs.offset = 0;
5627 lhs.type = SCALAR;
5628 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5629 process_constraint (new_constraint (lhs, *rhsp));
5630 }
5631 rhsc.release ();
5632 }
5633 }
5634 }
5635
5636 return id;
5637 }
5638
5639 /* Print out the points-to solution for VAR to FILE. */
5640
5641 static void
5642 dump_solution_for_var (FILE *file, unsigned int var)
5643 {
5644 varinfo_t vi = get_varinfo (var);
5645 unsigned int i;
5646 bitmap_iterator bi;
5647
5648 /* Dump the solution for unified vars anyway, this avoids difficulties
5649 in scanning dumps in the testsuite. */
5650 fprintf (file, "%s = { ", vi->name);
5651 vi = get_varinfo (find (var));
5652 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5653 fprintf (file, "%s ", get_varinfo (i)->name);
5654 fprintf (file, "}");
5655
5656 /* But note when the variable was unified. */
5657 if (vi->id != var)
5658 fprintf (file, " same as %s", vi->name);
5659
5660 fprintf (file, "\n");
5661 }
5662
5663 /* Print the points-to solution for VAR to stdout. */
5664
5665 DEBUG_FUNCTION void
5666 debug_solution_for_var (unsigned int var)
5667 {
5668 dump_solution_for_var (stdout, var);
5669 }
5670
5671 /* Create varinfo structures for all of the variables in the
5672 function for intraprocedural mode. */
5673
5674 static void
5675 intra_create_variable_infos (void)
5676 {
5677 tree t;
5678
5679 /* For each incoming pointer argument arg, create the constraint ARG
5680 = NONLOCAL or a dummy variable if it is a restrict qualified
5681 passed-by-reference argument. */
5682 for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
5683 {
5684 varinfo_t p = get_vi_for_tree (t);
5685
5686 /* For restrict qualified pointers to objects passed by
5687 reference build a real representative for the pointed-to object.
5688 Treat restrict qualified references the same. */
5689 if (TYPE_RESTRICT (TREE_TYPE (t))
5690 && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
5691 || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
5692 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
5693 {
5694 struct constraint_expr lhsc, rhsc;
5695 varinfo_t vi;
5696 tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
5697 DECL_EXTERNAL (heapvar) = 1;
5698 vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
5699 insert_vi_for_tree (heapvar, vi);
5700 lhsc.var = p->id;
5701 lhsc.type = SCALAR;
5702 lhsc.offset = 0;
5703 rhsc.var = vi->id;
5704 rhsc.type = ADDRESSOF;
5705 rhsc.offset = 0;
5706 process_constraint (new_constraint (lhsc, rhsc));
5707 for (; vi; vi = vi->next)
5708 if (vi->may_have_pointers)
5709 {
5710 if (vi->only_restrict_pointers)
5711 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5712 else
5713 make_copy_constraint (vi, nonlocal_id);
5714 }
5715 continue;
5716 }
5717
5718 if (POINTER_TYPE_P (TREE_TYPE (t))
5719 && TYPE_RESTRICT (TREE_TYPE (t)))
5720 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5721 else
5722 {
5723 for (; p; p = p->next)
5724 {
5725 if (p->only_restrict_pointers)
5726 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5727 else if (p->may_have_pointers)
5728 make_constraint_from (p, nonlocal_id);
5729 }
5730 }
5731 }
5732
5733 /* Add a constraint for a result decl that is passed by reference. */
5734 if (DECL_RESULT (cfun->decl)
5735 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5736 {
5737 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5738
5739 for (p = result_vi; p; p = p->next)
5740 make_constraint_from (p, nonlocal_id);
5741 }
5742
5743 /* Add a constraint for the incoming static chain parameter. */
5744 if (cfun->static_chain_decl != NULL_TREE)
5745 {
5746 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5747
5748 for (p = chain_vi; p; p = p->next)
5749 make_constraint_from (p, nonlocal_id);
5750 }
5751 }
5752
5753 /* Structure used to put solution bitmaps in a hashtable so they can
5754 be shared among variables with the same points-to set. */
5755
5756 typedef struct shared_bitmap_info
5757 {
5758 bitmap pt_vars;
5759 hashval_t hashcode;
5760 } *shared_bitmap_info_t;
5761 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5762
5763 static htab_t shared_bitmap_table;
5764
5765 /* Hash function for a shared_bitmap_info_t */
5766
5767 static hashval_t
5768 shared_bitmap_hash (const void *p)
5769 {
5770 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
5771 return bi->hashcode;
5772 }
5773
5774 /* Equality function for two shared_bitmap_info_t's. */
5775
5776 static int
5777 shared_bitmap_eq (const void *p1, const void *p2)
5778 {
5779 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
5780 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
5781 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5782 }
5783
5784 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5785 existing instance if there is one, NULL otherwise. */
5786
5787 static bitmap
5788 shared_bitmap_lookup (bitmap pt_vars)
5789 {
5790 void **slot;
5791 struct shared_bitmap_info sbi;
5792
5793 sbi.pt_vars = pt_vars;
5794 sbi.hashcode = bitmap_hash (pt_vars);
5795
5796 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
5797 sbi.hashcode, NO_INSERT);
5798 if (!slot)
5799 return NULL;
5800 else
5801 return ((shared_bitmap_info_t) *slot)->pt_vars;
5802 }
5803
5804
5805 /* Add a bitmap to the shared bitmap hashtable. */
5806
5807 static void
5808 shared_bitmap_add (bitmap pt_vars)
5809 {
5810 void **slot;
5811 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5812
5813 sbi->pt_vars = pt_vars;
5814 sbi->hashcode = bitmap_hash (pt_vars);
5815
5816 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
5817 sbi->hashcode, INSERT);
5818 gcc_assert (!*slot);
5819 *slot = (void *) sbi;
5820 }
5821
5822
5823 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5824
5825 static void
5826 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5827 {
5828 unsigned int i;
5829 bitmap_iterator bi;
5830
5831 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
5832 {
5833 varinfo_t vi = get_varinfo (i);
5834
5835 /* The only artificial variables that are allowed in a may-alias
5836 set are heap variables. */
5837 if (vi->is_artificial_var && !vi->is_heap_var)
5838 continue;
5839
5840 if (TREE_CODE (vi->decl) == VAR_DECL
5841 || TREE_CODE (vi->decl) == PARM_DECL
5842 || TREE_CODE (vi->decl) == RESULT_DECL)
5843 {
5844 /* If we are in IPA mode we will not recompute points-to
5845 sets after inlining so make sure they stay valid. */
5846 if (in_ipa_mode
5847 && !DECL_PT_UID_SET_P (vi->decl))
5848 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
5849
5850 /* Add the decl to the points-to set. Note that the points-to
5851 set contains global variables. */
5852 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
5853 if (vi->is_global_var)
5854 pt->vars_contains_global = true;
5855 }
5856 }
5857 }
5858
5859
5860 /* Compute the points-to solution *PT for the variable VI. */
5861
5862 static void
5863 find_what_var_points_to (varinfo_t orig_vi, struct pt_solution *pt)
5864 {
5865 unsigned int i;
5866 bitmap_iterator bi;
5867 bitmap finished_solution;
5868 bitmap result;
5869 varinfo_t vi;
5870
5871 memset (pt, 0, sizeof (struct pt_solution));
5872
5873 /* This variable may have been collapsed, let's get the real
5874 variable. */
5875 vi = get_varinfo (find (orig_vi->id));
5876
5877 /* Translate artificial variables into SSA_NAME_PTR_INFO
5878 attributes. */
5879 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5880 {
5881 varinfo_t vi = get_varinfo (i);
5882
5883 if (vi->is_artificial_var)
5884 {
5885 if (vi->id == nothing_id)
5886 pt->null = 1;
5887 else if (vi->id == escaped_id)
5888 {
5889 if (in_ipa_mode)
5890 pt->ipa_escaped = 1;
5891 else
5892 pt->escaped = 1;
5893 }
5894 else if (vi->id == nonlocal_id)
5895 pt->nonlocal = 1;
5896 else if (vi->is_heap_var)
5897 /* We represent heapvars in the points-to set properly. */
5898 ;
5899 else if (vi->id == readonly_id)
5900 /* Nobody cares. */
5901 ;
5902 else if (vi->id == anything_id
5903 || vi->id == integer_id)
5904 pt->anything = 1;
5905 }
5906 }
5907
5908 /* Instead of doing extra work, simply do not create
5909 elaborate points-to information for pt_anything pointers. */
5910 if (pt->anything)
5911 return;
5912
5913 /* Share the final set of variables when possible. */
5914 finished_solution = BITMAP_GGC_ALLOC ();
5915 stats.points_to_sets_created++;
5916
5917 set_uids_in_ptset (finished_solution, vi->solution, pt);
5918 result = shared_bitmap_lookup (finished_solution);
5919 if (!result)
5920 {
5921 shared_bitmap_add (finished_solution);
5922 pt->vars = finished_solution;
5923 }
5924 else
5925 {
5926 pt->vars = result;
5927 bitmap_clear (finished_solution);
5928 }
5929 }
5930
5931 /* Given a pointer variable P, fill in its points-to set. */
5932
5933 static void
5934 find_what_p_points_to (tree p)
5935 {
5936 struct ptr_info_def *pi;
5937 tree lookup_p = p;
5938 varinfo_t vi;
5939
5940 /* For parameters, get at the points-to set for the actual parm
5941 decl. */
5942 if (TREE_CODE (p) == SSA_NAME
5943 && SSA_NAME_IS_DEFAULT_DEF (p)
5944 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
5945 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
5946 lookup_p = SSA_NAME_VAR (p);
5947
5948 vi = lookup_vi_for_tree (lookup_p);
5949 if (!vi)
5950 return;
5951
5952 pi = get_ptr_info (p);
5953 find_what_var_points_to (vi, &pi->pt);
5954 }
5955
5956
5957 /* Query statistics for points-to solutions. */
5958
5959 static struct {
5960 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
5961 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
5962 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
5963 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
5964 } pta_stats;
5965
5966 void
5967 dump_pta_stats (FILE *s)
5968 {
5969 fprintf (s, "\nPTA query stats:\n");
5970 fprintf (s, " pt_solution_includes: "
5971 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
5972 HOST_WIDE_INT_PRINT_DEC" queries\n",
5973 pta_stats.pt_solution_includes_no_alias,
5974 pta_stats.pt_solution_includes_no_alias
5975 + pta_stats.pt_solution_includes_may_alias);
5976 fprintf (s, " pt_solutions_intersect: "
5977 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
5978 HOST_WIDE_INT_PRINT_DEC" queries\n",
5979 pta_stats.pt_solutions_intersect_no_alias,
5980 pta_stats.pt_solutions_intersect_no_alias
5981 + pta_stats.pt_solutions_intersect_may_alias);
5982 }
5983
5984
5985 /* Reset the points-to solution *PT to a conservative default
5986 (point to anything). */
5987
5988 void
5989 pt_solution_reset (struct pt_solution *pt)
5990 {
5991 memset (pt, 0, sizeof (struct pt_solution));
5992 pt->anything = true;
5993 }
5994
5995 /* Set the points-to solution *PT to point only to the variables
5996 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
5997 global variables and VARS_CONTAINS_RESTRICT specifies whether
5998 it contains restrict tag variables. */
5999
6000 void
6001 pt_solution_set (struct pt_solution *pt, bitmap vars, bool vars_contains_global)
6002 {
6003 memset (pt, 0, sizeof (struct pt_solution));
6004 pt->vars = vars;
6005 pt->vars_contains_global = vars_contains_global;
6006 }
6007
6008 /* Set the points-to solution *PT to point only to the variable VAR. */
6009
6010 void
6011 pt_solution_set_var (struct pt_solution *pt, tree var)
6012 {
6013 memset (pt, 0, sizeof (struct pt_solution));
6014 pt->vars = BITMAP_GGC_ALLOC ();
6015 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6016 pt->vars_contains_global = is_global_var (var);
6017 }
6018
6019 /* Computes the union of the points-to solutions *DEST and *SRC and
6020 stores the result in *DEST. This changes the points-to bitmap
6021 of *DEST and thus may not be used if that might be shared.
6022 The points-to bitmap of *SRC and *DEST will not be shared after
6023 this function if they were not before. */
6024
6025 static void
6026 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6027 {
6028 dest->anything |= src->anything;
6029 if (dest->anything)
6030 {
6031 pt_solution_reset (dest);
6032 return;
6033 }
6034
6035 dest->nonlocal |= src->nonlocal;
6036 dest->escaped |= src->escaped;
6037 dest->ipa_escaped |= src->ipa_escaped;
6038 dest->null |= src->null;
6039 dest->vars_contains_global |= src->vars_contains_global;
6040 if (!src->vars)
6041 return;
6042
6043 if (!dest->vars)
6044 dest->vars = BITMAP_GGC_ALLOC ();
6045 bitmap_ior_into (dest->vars, src->vars);
6046 }
6047
6048 /* Return true if the points-to solution *PT is empty. */
6049
6050 bool
6051 pt_solution_empty_p (struct pt_solution *pt)
6052 {
6053 if (pt->anything
6054 || pt->nonlocal)
6055 return false;
6056
6057 if (pt->vars
6058 && !bitmap_empty_p (pt->vars))
6059 return false;
6060
6061 /* If the solution includes ESCAPED, check if that is empty. */
6062 if (pt->escaped
6063 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6064 return false;
6065
6066 /* If the solution includes ESCAPED, check if that is empty. */
6067 if (pt->ipa_escaped
6068 && !pt_solution_empty_p (&ipa_escaped_pt))
6069 return false;
6070
6071 return true;
6072 }
6073
6074 /* Return true if the points-to solution *PT only point to a single var, and
6075 return the var uid in *UID. */
6076
6077 bool
6078 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6079 {
6080 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6081 || pt->null || pt->vars == NULL
6082 || !bitmap_single_bit_set_p (pt->vars))
6083 return false;
6084
6085 *uid = bitmap_first_set_bit (pt->vars);
6086 return true;
6087 }
6088
6089 /* Return true if the points-to solution *PT includes global memory. */
6090
6091 bool
6092 pt_solution_includes_global (struct pt_solution *pt)
6093 {
6094 if (pt->anything
6095 || pt->nonlocal
6096 || pt->vars_contains_global)
6097 return true;
6098
6099 if (pt->escaped)
6100 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6101
6102 if (pt->ipa_escaped)
6103 return pt_solution_includes_global (&ipa_escaped_pt);
6104
6105 /* ??? This predicate is not correct for the IPA-PTA solution
6106 as we do not properly distinguish between unit escape points
6107 and global variables. */
6108 if (cfun->gimple_df->ipa_pta)
6109 return true;
6110
6111 return false;
6112 }
6113
6114 /* Return true if the points-to solution *PT includes the variable
6115 declaration DECL. */
6116
6117 static bool
6118 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6119 {
6120 if (pt->anything)
6121 return true;
6122
6123 if (pt->nonlocal
6124 && is_global_var (decl))
6125 return true;
6126
6127 if (pt->vars
6128 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6129 return true;
6130
6131 /* If the solution includes ESCAPED, check it. */
6132 if (pt->escaped
6133 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6134 return true;
6135
6136 /* If the solution includes ESCAPED, check it. */
6137 if (pt->ipa_escaped
6138 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6139 return true;
6140
6141 return false;
6142 }
6143
6144 bool
6145 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6146 {
6147 bool res = pt_solution_includes_1 (pt, decl);
6148 if (res)
6149 ++pta_stats.pt_solution_includes_may_alias;
6150 else
6151 ++pta_stats.pt_solution_includes_no_alias;
6152 return res;
6153 }
6154
6155 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6156 intersection. */
6157
6158 static bool
6159 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6160 {
6161 if (pt1->anything || pt2->anything)
6162 return true;
6163
6164 /* If either points to unknown global memory and the other points to
6165 any global memory they alias. */
6166 if ((pt1->nonlocal
6167 && (pt2->nonlocal
6168 || pt2->vars_contains_global))
6169 || (pt2->nonlocal
6170 && pt1->vars_contains_global))
6171 return true;
6172
6173 /* Check the escaped solution if required. */
6174 if ((pt1->escaped || pt2->escaped)
6175 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6176 {
6177 /* If both point to escaped memory and that solution
6178 is not empty they alias. */
6179 if (pt1->escaped && pt2->escaped)
6180 return true;
6181
6182 /* If either points to escaped memory see if the escaped solution
6183 intersects with the other. */
6184 if ((pt1->escaped
6185 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
6186 || (pt2->escaped
6187 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
6188 return true;
6189 }
6190
6191 /* Check the escaped solution if required.
6192 ??? Do we need to check the local against the IPA escaped sets? */
6193 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6194 && !pt_solution_empty_p (&ipa_escaped_pt))
6195 {
6196 /* If both point to escaped memory and that solution
6197 is not empty they alias. */
6198 if (pt1->ipa_escaped && pt2->ipa_escaped)
6199 return true;
6200
6201 /* If either points to escaped memory see if the escaped solution
6202 intersects with the other. */
6203 if ((pt1->ipa_escaped
6204 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6205 || (pt2->ipa_escaped
6206 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6207 return true;
6208 }
6209
6210 /* Now both pointers alias if their points-to solution intersects. */
6211 return (pt1->vars
6212 && pt2->vars
6213 && bitmap_intersect_p (pt1->vars, pt2->vars));
6214 }
6215
6216 bool
6217 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6218 {
6219 bool res = pt_solutions_intersect_1 (pt1, pt2);
6220 if (res)
6221 ++pta_stats.pt_solutions_intersect_may_alias;
6222 else
6223 ++pta_stats.pt_solutions_intersect_no_alias;
6224 return res;
6225 }
6226
6227
6228 /* Dump points-to information to OUTFILE. */
6229
6230 static void
6231 dump_sa_points_to_info (FILE *outfile)
6232 {
6233 unsigned int i;
6234
6235 fprintf (outfile, "\nPoints-to sets\n\n");
6236
6237 if (dump_flags & TDF_STATS)
6238 {
6239 fprintf (outfile, "Stats:\n");
6240 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6241 fprintf (outfile, "Non-pointer vars: %d\n",
6242 stats.nonpointer_vars);
6243 fprintf (outfile, "Statically unified vars: %d\n",
6244 stats.unified_vars_static);
6245 fprintf (outfile, "Dynamically unified vars: %d\n",
6246 stats.unified_vars_dynamic);
6247 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6248 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6249 fprintf (outfile, "Number of implicit edges: %d\n",
6250 stats.num_implicit_edges);
6251 }
6252
6253 for (i = 0; i < varmap.length (); i++)
6254 {
6255 varinfo_t vi = get_varinfo (i);
6256 if (!vi->may_have_pointers)
6257 continue;
6258 dump_solution_for_var (outfile, i);
6259 }
6260 }
6261
6262
6263 /* Debug points-to information to stderr. */
6264
6265 DEBUG_FUNCTION void
6266 debug_sa_points_to_info (void)
6267 {
6268 dump_sa_points_to_info (stderr);
6269 }
6270
6271
6272 /* Initialize the always-existing constraint variables for NULL
6273 ANYTHING, READONLY, and INTEGER */
6274
6275 static void
6276 init_base_vars (void)
6277 {
6278 struct constraint_expr lhs, rhs;
6279 varinfo_t var_anything;
6280 varinfo_t var_nothing;
6281 varinfo_t var_readonly;
6282 varinfo_t var_escaped;
6283 varinfo_t var_nonlocal;
6284 varinfo_t var_storedanything;
6285 varinfo_t var_integer;
6286
6287 /* Create the NULL variable, used to represent that a variable points
6288 to NULL. */
6289 var_nothing = new_var_info (NULL_TREE, "NULL");
6290 gcc_assert (var_nothing->id == nothing_id);
6291 var_nothing->is_artificial_var = 1;
6292 var_nothing->offset = 0;
6293 var_nothing->size = ~0;
6294 var_nothing->fullsize = ~0;
6295 var_nothing->is_special_var = 1;
6296 var_nothing->may_have_pointers = 0;
6297 var_nothing->is_global_var = 0;
6298
6299 /* Create the ANYTHING variable, used to represent that a variable
6300 points to some unknown piece of memory. */
6301 var_anything = new_var_info (NULL_TREE, "ANYTHING");
6302 gcc_assert (var_anything->id == anything_id);
6303 var_anything->is_artificial_var = 1;
6304 var_anything->size = ~0;
6305 var_anything->offset = 0;
6306 var_anything->next = NULL;
6307 var_anything->fullsize = ~0;
6308 var_anything->is_special_var = 1;
6309
6310 /* Anything points to anything. This makes deref constraints just
6311 work in the presence of linked list and other p = *p type loops,
6312 by saying that *ANYTHING = ANYTHING. */
6313 lhs.type = SCALAR;
6314 lhs.var = anything_id;
6315 lhs.offset = 0;
6316 rhs.type = ADDRESSOF;
6317 rhs.var = anything_id;
6318 rhs.offset = 0;
6319
6320 /* This specifically does not use process_constraint because
6321 process_constraint ignores all anything = anything constraints, since all
6322 but this one are redundant. */
6323 constraints.safe_push (new_constraint (lhs, rhs));
6324
6325 /* Create the READONLY variable, used to represent that a variable
6326 points to readonly memory. */
6327 var_readonly = new_var_info (NULL_TREE, "READONLY");
6328 gcc_assert (var_readonly->id == readonly_id);
6329 var_readonly->is_artificial_var = 1;
6330 var_readonly->offset = 0;
6331 var_readonly->size = ~0;
6332 var_readonly->fullsize = ~0;
6333 var_readonly->next = NULL;
6334 var_readonly->is_special_var = 1;
6335
6336 /* readonly memory points to anything, in order to make deref
6337 easier. In reality, it points to anything the particular
6338 readonly variable can point to, but we don't track this
6339 separately. */
6340 lhs.type = SCALAR;
6341 lhs.var = readonly_id;
6342 lhs.offset = 0;
6343 rhs.type = ADDRESSOF;
6344 rhs.var = readonly_id; /* FIXME */
6345 rhs.offset = 0;
6346 process_constraint (new_constraint (lhs, rhs));
6347
6348 /* Create the ESCAPED variable, used to represent the set of escaped
6349 memory. */
6350 var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6351 gcc_assert (var_escaped->id == escaped_id);
6352 var_escaped->is_artificial_var = 1;
6353 var_escaped->offset = 0;
6354 var_escaped->size = ~0;
6355 var_escaped->fullsize = ~0;
6356 var_escaped->is_special_var = 0;
6357
6358 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6359 memory. */
6360 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6361 gcc_assert (var_nonlocal->id == nonlocal_id);
6362 var_nonlocal->is_artificial_var = 1;
6363 var_nonlocal->offset = 0;
6364 var_nonlocal->size = ~0;
6365 var_nonlocal->fullsize = ~0;
6366 var_nonlocal->is_special_var = 1;
6367
6368 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6369 lhs.type = SCALAR;
6370 lhs.var = escaped_id;
6371 lhs.offset = 0;
6372 rhs.type = DEREF;
6373 rhs.var = escaped_id;
6374 rhs.offset = 0;
6375 process_constraint (new_constraint (lhs, rhs));
6376
6377 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6378 whole variable escapes. */
6379 lhs.type = SCALAR;
6380 lhs.var = escaped_id;
6381 lhs.offset = 0;
6382 rhs.type = SCALAR;
6383 rhs.var = escaped_id;
6384 rhs.offset = UNKNOWN_OFFSET;
6385 process_constraint (new_constraint (lhs, rhs));
6386
6387 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6388 everything pointed to by escaped points to what global memory can
6389 point to. */
6390 lhs.type = DEREF;
6391 lhs.var = escaped_id;
6392 lhs.offset = 0;
6393 rhs.type = SCALAR;
6394 rhs.var = nonlocal_id;
6395 rhs.offset = 0;
6396 process_constraint (new_constraint (lhs, rhs));
6397
6398 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6399 global memory may point to global memory and escaped memory. */
6400 lhs.type = SCALAR;
6401 lhs.var = nonlocal_id;
6402 lhs.offset = 0;
6403 rhs.type = ADDRESSOF;
6404 rhs.var = nonlocal_id;
6405 rhs.offset = 0;
6406 process_constraint (new_constraint (lhs, rhs));
6407 rhs.type = ADDRESSOF;
6408 rhs.var = escaped_id;
6409 rhs.offset = 0;
6410 process_constraint (new_constraint (lhs, rhs));
6411
6412 /* Create the STOREDANYTHING variable, used to represent the set of
6413 variables stored to *ANYTHING. */
6414 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6415 gcc_assert (var_storedanything->id == storedanything_id);
6416 var_storedanything->is_artificial_var = 1;
6417 var_storedanything->offset = 0;
6418 var_storedanything->size = ~0;
6419 var_storedanything->fullsize = ~0;
6420 var_storedanything->is_special_var = 0;
6421
6422 /* Create the INTEGER variable, used to represent that a variable points
6423 to what an INTEGER "points to". */
6424 var_integer = new_var_info (NULL_TREE, "INTEGER");
6425 gcc_assert (var_integer->id == integer_id);
6426 var_integer->is_artificial_var = 1;
6427 var_integer->size = ~0;
6428 var_integer->fullsize = ~0;
6429 var_integer->offset = 0;
6430 var_integer->next = NULL;
6431 var_integer->is_special_var = 1;
6432
6433 /* INTEGER = ANYTHING, because we don't know where a dereference of
6434 a random integer will point to. */
6435 lhs.type = SCALAR;
6436 lhs.var = integer_id;
6437 lhs.offset = 0;
6438 rhs.type = ADDRESSOF;
6439 rhs.var = anything_id;
6440 rhs.offset = 0;
6441 process_constraint (new_constraint (lhs, rhs));
6442 }
6443
6444 /* Initialize things necessary to perform PTA */
6445
6446 static void
6447 init_alias_vars (void)
6448 {
6449 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6450
6451 bitmap_obstack_initialize (&pta_obstack);
6452 bitmap_obstack_initialize (&oldpta_obstack);
6453 bitmap_obstack_initialize (&predbitmap_obstack);
6454
6455 constraint_pool = create_alloc_pool ("Constraint pool",
6456 sizeof (struct constraint), 30);
6457 variable_info_pool = create_alloc_pool ("Variable info pool",
6458 sizeof (struct variable_info), 30);
6459 constraints.create (8);
6460 varmap.create (8);
6461 vi_for_tree = pointer_map_create ();
6462 call_stmt_vars = pointer_map_create ();
6463
6464 memset (&stats, 0, sizeof (stats));
6465 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
6466 shared_bitmap_eq, free);
6467 init_base_vars ();
6468
6469 gcc_obstack_init (&fake_var_decl_obstack);
6470 }
6471
6472 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6473 predecessor edges. */
6474
6475 static void
6476 remove_preds_and_fake_succs (constraint_graph_t graph)
6477 {
6478 unsigned int i;
6479
6480 /* Clear the implicit ref and address nodes from the successor
6481 lists. */
6482 for (i = 0; i < FIRST_REF_NODE; i++)
6483 {
6484 if (graph->succs[i])
6485 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6486 FIRST_REF_NODE * 2);
6487 }
6488
6489 /* Free the successor list for the non-ref nodes. */
6490 for (i = FIRST_REF_NODE; i < graph->size; i++)
6491 {
6492 if (graph->succs[i])
6493 BITMAP_FREE (graph->succs[i]);
6494 }
6495
6496 /* Now reallocate the size of the successor list as, and blow away
6497 the predecessor bitmaps. */
6498 graph->size = varmap.length ();
6499 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6500
6501 free (graph->implicit_preds);
6502 graph->implicit_preds = NULL;
6503 free (graph->preds);
6504 graph->preds = NULL;
6505 bitmap_obstack_release (&predbitmap_obstack);
6506 }
6507
6508 /* Solve the constraint set. */
6509
6510 static void
6511 solve_constraints (void)
6512 {
6513 struct scc_info *si;
6514
6515 if (dump_file)
6516 fprintf (dump_file,
6517 "\nCollapsing static cycles and doing variable "
6518 "substitution\n");
6519
6520 init_graph (varmap.length () * 2);
6521
6522 if (dump_file)
6523 fprintf (dump_file, "Building predecessor graph\n");
6524 build_pred_graph ();
6525
6526 if (dump_file)
6527 fprintf (dump_file, "Detecting pointer and location "
6528 "equivalences\n");
6529 si = perform_var_substitution (graph);
6530
6531 if (dump_file)
6532 fprintf (dump_file, "Rewriting constraints and unifying "
6533 "variables\n");
6534 rewrite_constraints (graph, si);
6535
6536 build_succ_graph ();
6537
6538 free_var_substitution_info (si);
6539
6540 /* Attach complex constraints to graph nodes. */
6541 move_complex_constraints (graph);
6542
6543 if (dump_file)
6544 fprintf (dump_file, "Uniting pointer but not location equivalent "
6545 "variables\n");
6546 unite_pointer_equivalences (graph);
6547
6548 if (dump_file)
6549 fprintf (dump_file, "Finding indirect cycles\n");
6550 find_indirect_cycles (graph);
6551
6552 /* Implicit nodes and predecessors are no longer necessary at this
6553 point. */
6554 remove_preds_and_fake_succs (graph);
6555
6556 if (dump_file && (dump_flags & TDF_GRAPH))
6557 {
6558 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
6559 "in dot format:\n");
6560 dump_constraint_graph (dump_file);
6561 fprintf (dump_file, "\n\n");
6562 }
6563
6564 if (dump_file)
6565 fprintf (dump_file, "Solving graph\n");
6566
6567 solve_graph (graph);
6568
6569 if (dump_file && (dump_flags & TDF_GRAPH))
6570 {
6571 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
6572 "in dot format:\n");
6573 dump_constraint_graph (dump_file);
6574 fprintf (dump_file, "\n\n");
6575 }
6576
6577 if (dump_file)
6578 dump_sa_points_to_info (dump_file);
6579 }
6580
6581 /* Create points-to sets for the current function. See the comments
6582 at the start of the file for an algorithmic overview. */
6583
6584 static void
6585 compute_points_to_sets (void)
6586 {
6587 basic_block bb;
6588 unsigned i;
6589 varinfo_t vi;
6590
6591 timevar_push (TV_TREE_PTA);
6592
6593 init_alias_vars ();
6594
6595 intra_create_variable_infos ();
6596
6597 /* Now walk all statements and build the constraint set. */
6598 FOR_EACH_BB (bb)
6599 {
6600 gimple_stmt_iterator gsi;
6601
6602 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6603 {
6604 gimple phi = gsi_stmt (gsi);
6605
6606 if (! virtual_operand_p (gimple_phi_result (phi)))
6607 find_func_aliases (phi);
6608 }
6609
6610 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6611 {
6612 gimple stmt = gsi_stmt (gsi);
6613
6614 find_func_aliases (stmt);
6615 }
6616 }
6617
6618 if (dump_file)
6619 {
6620 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6621 dump_constraints (dump_file, 0);
6622 }
6623
6624 /* From the constraints compute the points-to sets. */
6625 solve_constraints ();
6626
6627 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6628 find_what_var_points_to (get_varinfo (escaped_id),
6629 &cfun->gimple_df->escaped);
6630
6631 /* Make sure the ESCAPED solution (which is used as placeholder in
6632 other solutions) does not reference itself. This simplifies
6633 points-to solution queries. */
6634 cfun->gimple_df->escaped.escaped = 0;
6635
6636 /* Mark escaped HEAP variables as global. */
6637 FOR_EACH_VEC_ELT (varmap, i, vi)
6638 if (vi->is_heap_var
6639 && !vi->is_global_var)
6640 DECL_EXTERNAL (vi->decl) = vi->is_global_var
6641 = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
6642
6643 /* Compute the points-to sets for pointer SSA_NAMEs. */
6644 for (i = 0; i < num_ssa_names; ++i)
6645 {
6646 tree ptr = ssa_name (i);
6647 if (ptr
6648 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6649 find_what_p_points_to (ptr);
6650 }
6651
6652 /* Compute the call-used/clobbered sets. */
6653 FOR_EACH_BB (bb)
6654 {
6655 gimple_stmt_iterator gsi;
6656
6657 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6658 {
6659 gimple stmt = gsi_stmt (gsi);
6660 struct pt_solution *pt;
6661 if (!is_gimple_call (stmt))
6662 continue;
6663
6664 pt = gimple_call_use_set (stmt);
6665 if (gimple_call_flags (stmt) & ECF_CONST)
6666 memset (pt, 0, sizeof (struct pt_solution));
6667 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6668 {
6669 find_what_var_points_to (vi, pt);
6670 /* Escaped (and thus nonlocal) variables are always
6671 implicitly used by calls. */
6672 /* ??? ESCAPED can be empty even though NONLOCAL
6673 always escaped. */
6674 pt->nonlocal = 1;
6675 pt->escaped = 1;
6676 }
6677 else
6678 {
6679 /* If there is nothing special about this call then
6680 we have made everything that is used also escape. */
6681 *pt = cfun->gimple_df->escaped;
6682 pt->nonlocal = 1;
6683 }
6684
6685 pt = gimple_call_clobber_set (stmt);
6686 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6687 memset (pt, 0, sizeof (struct pt_solution));
6688 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6689 {
6690 find_what_var_points_to (vi, pt);
6691 /* Escaped (and thus nonlocal) variables are always
6692 implicitly clobbered by calls. */
6693 /* ??? ESCAPED can be empty even though NONLOCAL
6694 always escaped. */
6695 pt->nonlocal = 1;
6696 pt->escaped = 1;
6697 }
6698 else
6699 {
6700 /* If there is nothing special about this call then
6701 we have made everything that is used also escape. */
6702 *pt = cfun->gimple_df->escaped;
6703 pt->nonlocal = 1;
6704 }
6705 }
6706 }
6707
6708 timevar_pop (TV_TREE_PTA);
6709 }
6710
6711
6712 /* Delete created points-to sets. */
6713
6714 static void
6715 delete_points_to_sets (void)
6716 {
6717 unsigned int i;
6718
6719 htab_delete (shared_bitmap_table);
6720 if (dump_file && (dump_flags & TDF_STATS))
6721 fprintf (dump_file, "Points to sets created:%d\n",
6722 stats.points_to_sets_created);
6723
6724 pointer_map_destroy (vi_for_tree);
6725 pointer_map_destroy (call_stmt_vars);
6726 bitmap_obstack_release (&pta_obstack);
6727 constraints.release ();
6728
6729 for (i = 0; i < graph->size; i++)
6730 graph->complex[i].release ();
6731 free (graph->complex);
6732
6733 free (graph->rep);
6734 free (graph->succs);
6735 free (graph->pe);
6736 free (graph->pe_rep);
6737 free (graph->indirect_cycles);
6738 free (graph);
6739
6740 varmap.release ();
6741 free_alloc_pool (variable_info_pool);
6742 free_alloc_pool (constraint_pool);
6743
6744 obstack_free (&fake_var_decl_obstack, NULL);
6745 }
6746
6747
6748 /* Compute points-to information for every SSA_NAME pointer in the
6749 current function and compute the transitive closure of escaped
6750 variables to re-initialize the call-clobber states of local variables. */
6751
6752 unsigned int
6753 compute_may_aliases (void)
6754 {
6755 if (cfun->gimple_df->ipa_pta)
6756 {
6757 if (dump_file)
6758 {
6759 fprintf (dump_file, "\nNot re-computing points-to information "
6760 "because IPA points-to information is available.\n\n");
6761
6762 /* But still dump what we have remaining it. */
6763 dump_alias_info (dump_file);
6764 }
6765
6766 return 0;
6767 }
6768
6769 /* For each pointer P_i, determine the sets of variables that P_i may
6770 point-to. Compute the reachability set of escaped and call-used
6771 variables. */
6772 compute_points_to_sets ();
6773
6774 /* Debugging dumps. */
6775 if (dump_file)
6776 dump_alias_info (dump_file);
6777
6778 /* Deallocate memory used by aliasing data structures and the internal
6779 points-to solution. */
6780 delete_points_to_sets ();
6781
6782 gcc_assert (!need_ssa_update_p (cfun));
6783
6784 return 0;
6785 }
6786
6787 static bool
6788 gate_tree_pta (void)
6789 {
6790 return flag_tree_pta;
6791 }
6792
6793 /* A dummy pass to cause points-to information to be computed via
6794 TODO_rebuild_alias. */
6795
6796 struct gimple_opt_pass pass_build_alias =
6797 {
6798 {
6799 GIMPLE_PASS,
6800 "alias", /* name */
6801 OPTGROUP_NONE, /* optinfo_flags */
6802 gate_tree_pta, /* gate */
6803 NULL, /* execute */
6804 NULL, /* sub */
6805 NULL, /* next */
6806 0, /* static_pass_number */
6807 TV_NONE, /* tv_id */
6808 PROP_cfg | PROP_ssa, /* properties_required */
6809 0, /* properties_provided */
6810 0, /* properties_destroyed */
6811 0, /* todo_flags_start */
6812 TODO_rebuild_alias /* todo_flags_finish */
6813 }
6814 };
6815
6816 /* A dummy pass to cause points-to information to be computed via
6817 TODO_rebuild_alias. */
6818
6819 struct gimple_opt_pass pass_build_ealias =
6820 {
6821 {
6822 GIMPLE_PASS,
6823 "ealias", /* name */
6824 OPTGROUP_NONE, /* optinfo_flags */
6825 gate_tree_pta, /* gate */
6826 NULL, /* execute */
6827 NULL, /* sub */
6828 NULL, /* next */
6829 0, /* static_pass_number */
6830 TV_NONE, /* tv_id */
6831 PROP_cfg | PROP_ssa, /* properties_required */
6832 0, /* properties_provided */
6833 0, /* properties_destroyed */
6834 0, /* todo_flags_start */
6835 TODO_rebuild_alias /* todo_flags_finish */
6836 }
6837 };
6838
6839
6840 /* Return true if we should execute IPA PTA. */
6841 static bool
6842 gate_ipa_pta (void)
6843 {
6844 return (optimize
6845 && flag_ipa_pta
6846 /* Don't bother doing anything if the program has errors. */
6847 && !seen_error ());
6848 }
6849
6850 /* IPA PTA solutions for ESCAPED. */
6851 struct pt_solution ipa_escaped_pt
6852 = { true, false, false, false, false, false, NULL };
6853
6854 /* Associate node with varinfo DATA. Worker for
6855 cgraph_for_node_and_aliases. */
6856 static bool
6857 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
6858 {
6859 if (node->alias || node->thunk.thunk_p)
6860 insert_vi_for_tree (node->symbol.decl, (varinfo_t)data);
6861 return false;
6862 }
6863
6864 /* Execute the driver for IPA PTA. */
6865 static unsigned int
6866 ipa_pta_execute (void)
6867 {
6868 struct cgraph_node *node;
6869 struct varpool_node *var;
6870 int from;
6871
6872 in_ipa_mode = 1;
6873
6874 init_alias_vars ();
6875
6876 if (dump_file && (dump_flags & TDF_DETAILS))
6877 {
6878 dump_symtab (dump_file);
6879 fprintf (dump_file, "\n");
6880 }
6881
6882 /* Build the constraints. */
6883 FOR_EACH_DEFINED_FUNCTION (node)
6884 {
6885 varinfo_t vi;
6886 /* Nodes without a body are not interesting. Especially do not
6887 visit clones at this point for now - we get duplicate decls
6888 there for inline clones at least. */
6889 if (!cgraph_function_with_gimple_body_p (node))
6890 continue;
6891
6892 gcc_assert (!node->clone_of);
6893
6894 vi = create_function_info_for (node->symbol.decl,
6895 alias_get_name (node->symbol.decl));
6896 cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
6897 }
6898
6899 /* Create constraints for global variables and their initializers. */
6900 FOR_EACH_VARIABLE (var)
6901 {
6902 if (var->alias)
6903 continue;
6904
6905 get_vi_for_tree (var->symbol.decl);
6906 }
6907
6908 if (dump_file)
6909 {
6910 fprintf (dump_file,
6911 "Generating constraints for global initializers\n\n");
6912 dump_constraints (dump_file, 0);
6913 fprintf (dump_file, "\n");
6914 }
6915 from = constraints.length ();
6916
6917 FOR_EACH_DEFINED_FUNCTION (node)
6918 {
6919 struct function *func;
6920 basic_block bb;
6921
6922 /* Nodes without a body are not interesting. */
6923 if (!cgraph_function_with_gimple_body_p (node))
6924 continue;
6925
6926 if (dump_file)
6927 {
6928 fprintf (dump_file,
6929 "Generating constraints for %s", cgraph_node_name (node));
6930 if (DECL_ASSEMBLER_NAME_SET_P (node->symbol.decl))
6931 fprintf (dump_file, " (%s)",
6932 IDENTIFIER_POINTER
6933 (DECL_ASSEMBLER_NAME (node->symbol.decl)));
6934 fprintf (dump_file, "\n");
6935 }
6936
6937 func = DECL_STRUCT_FUNCTION (node->symbol.decl);
6938 push_cfun (func);
6939
6940 /* For externally visible or attribute used annotated functions use
6941 local constraints for their arguments.
6942 For local functions we see all callers and thus do not need initial
6943 constraints for parameters. */
6944 if (node->symbol.used_from_other_partition
6945 || node->symbol.externally_visible
6946 || node->symbol.force_output)
6947 {
6948 intra_create_variable_infos ();
6949
6950 /* We also need to make function return values escape. Nothing
6951 escapes by returning from main though. */
6952 if (!MAIN_NAME_P (DECL_NAME (node->symbol.decl)))
6953 {
6954 varinfo_t fi, rvi;
6955 fi = lookup_vi_for_tree (node->symbol.decl);
6956 rvi = first_vi_for_offset (fi, fi_result);
6957 if (rvi && rvi->offset == fi_result)
6958 {
6959 struct constraint_expr includes;
6960 struct constraint_expr var;
6961 includes.var = escaped_id;
6962 includes.offset = 0;
6963 includes.type = SCALAR;
6964 var.var = rvi->id;
6965 var.offset = 0;
6966 var.type = SCALAR;
6967 process_constraint (new_constraint (includes, var));
6968 }
6969 }
6970 }
6971
6972 /* Build constriants for the function body. */
6973 FOR_EACH_BB_FN (bb, func)
6974 {
6975 gimple_stmt_iterator gsi;
6976
6977 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6978 gsi_next (&gsi))
6979 {
6980 gimple phi = gsi_stmt (gsi);
6981
6982 if (! virtual_operand_p (gimple_phi_result (phi)))
6983 find_func_aliases (phi);
6984 }
6985
6986 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6987 {
6988 gimple stmt = gsi_stmt (gsi);
6989
6990 find_func_aliases (stmt);
6991 find_func_clobbers (stmt);
6992 }
6993 }
6994
6995 pop_cfun ();
6996
6997 if (dump_file)
6998 {
6999 fprintf (dump_file, "\n");
7000 dump_constraints (dump_file, from);
7001 fprintf (dump_file, "\n");
7002 }
7003 from = constraints.length ();
7004 }
7005
7006 /* From the constraints compute the points-to sets. */
7007 solve_constraints ();
7008
7009 /* Compute the global points-to sets for ESCAPED.
7010 ??? Note that the computed escape set is not correct
7011 for the whole unit as we fail to consider graph edges to
7012 externally visible functions. */
7013 find_what_var_points_to (get_varinfo (escaped_id), &ipa_escaped_pt);
7014
7015 /* Make sure the ESCAPED solution (which is used as placeholder in
7016 other solutions) does not reference itself. This simplifies
7017 points-to solution queries. */
7018 ipa_escaped_pt.ipa_escaped = 0;
7019
7020 /* Assign the points-to sets to the SSA names in the unit. */
7021 FOR_EACH_DEFINED_FUNCTION (node)
7022 {
7023 tree ptr;
7024 struct function *fn;
7025 unsigned i;
7026 varinfo_t fi;
7027 basic_block bb;
7028 struct pt_solution uses, clobbers;
7029 struct cgraph_edge *e;
7030
7031 /* Nodes without a body are not interesting. */
7032 if (!cgraph_function_with_gimple_body_p (node))
7033 continue;
7034
7035 fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
7036
7037 /* Compute the points-to sets for pointer SSA_NAMEs. */
7038 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7039 {
7040 if (ptr
7041 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7042 find_what_p_points_to (ptr);
7043 }
7044
7045 /* Compute the call-use and call-clobber sets for all direct calls. */
7046 fi = lookup_vi_for_tree (node->symbol.decl);
7047 gcc_assert (fi->is_fn_info);
7048 find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers),
7049 &clobbers);
7050 find_what_var_points_to (first_vi_for_offset (fi, fi_uses), &uses);
7051 for (e = node->callers; e; e = e->next_caller)
7052 {
7053 if (!e->call_stmt)
7054 continue;
7055
7056 *gimple_call_clobber_set (e->call_stmt) = clobbers;
7057 *gimple_call_use_set (e->call_stmt) = uses;
7058 }
7059
7060 /* Compute the call-use and call-clobber sets for indirect calls
7061 and calls to external functions. */
7062 FOR_EACH_BB_FN (bb, fn)
7063 {
7064 gimple_stmt_iterator gsi;
7065
7066 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7067 {
7068 gimple stmt = gsi_stmt (gsi);
7069 struct pt_solution *pt;
7070 varinfo_t vi;
7071 tree decl;
7072
7073 if (!is_gimple_call (stmt))
7074 continue;
7075
7076 /* Handle direct calls to external functions. */
7077 decl = gimple_call_fndecl (stmt);
7078 if (decl
7079 && (!(fi = lookup_vi_for_tree (decl))
7080 || !fi->is_fn_info))
7081 {
7082 pt = gimple_call_use_set (stmt);
7083 if (gimple_call_flags (stmt) & ECF_CONST)
7084 memset (pt, 0, sizeof (struct pt_solution));
7085 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7086 {
7087 find_what_var_points_to (vi, pt);
7088 /* Escaped (and thus nonlocal) variables are always
7089 implicitly used by calls. */
7090 /* ??? ESCAPED can be empty even though NONLOCAL
7091 always escaped. */
7092 pt->nonlocal = 1;
7093 pt->ipa_escaped = 1;
7094 }
7095 else
7096 {
7097 /* If there is nothing special about this call then
7098 we have made everything that is used also escape. */
7099 *pt = ipa_escaped_pt;
7100 pt->nonlocal = 1;
7101 }
7102
7103 pt = gimple_call_clobber_set (stmt);
7104 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7105 memset (pt, 0, sizeof (struct pt_solution));
7106 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7107 {
7108 find_what_var_points_to (vi, pt);
7109 /* Escaped (and thus nonlocal) variables are always
7110 implicitly clobbered by calls. */
7111 /* ??? ESCAPED can be empty even though NONLOCAL
7112 always escaped. */
7113 pt->nonlocal = 1;
7114 pt->ipa_escaped = 1;
7115 }
7116 else
7117 {
7118 /* If there is nothing special about this call then
7119 we have made everything that is used also escape. */
7120 *pt = ipa_escaped_pt;
7121 pt->nonlocal = 1;
7122 }
7123 }
7124
7125 /* Handle indirect calls. */
7126 if (!decl
7127 && (fi = get_fi_for_callee (stmt)))
7128 {
7129 /* We need to accumulate all clobbers/uses of all possible
7130 callees. */
7131 fi = get_varinfo (find (fi->id));
7132 /* If we cannot constrain the set of functions we'll end up
7133 calling we end up using/clobbering everything. */
7134 if (bitmap_bit_p (fi->solution, anything_id)
7135 || bitmap_bit_p (fi->solution, nonlocal_id)
7136 || bitmap_bit_p (fi->solution, escaped_id))
7137 {
7138 pt_solution_reset (gimple_call_clobber_set (stmt));
7139 pt_solution_reset (gimple_call_use_set (stmt));
7140 }
7141 else
7142 {
7143 bitmap_iterator bi;
7144 unsigned i;
7145 struct pt_solution *uses, *clobbers;
7146
7147 uses = gimple_call_use_set (stmt);
7148 clobbers = gimple_call_clobber_set (stmt);
7149 memset (uses, 0, sizeof (struct pt_solution));
7150 memset (clobbers, 0, sizeof (struct pt_solution));
7151 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7152 {
7153 struct pt_solution sol;
7154
7155 vi = get_varinfo (i);
7156 if (!vi->is_fn_info)
7157 {
7158 /* ??? We could be more precise here? */
7159 uses->nonlocal = 1;
7160 uses->ipa_escaped = 1;
7161 clobbers->nonlocal = 1;
7162 clobbers->ipa_escaped = 1;
7163 continue;
7164 }
7165
7166 if (!uses->anything)
7167 {
7168 find_what_var_points_to
7169 (first_vi_for_offset (vi, fi_uses), &sol);
7170 pt_solution_ior_into (uses, &sol);
7171 }
7172 if (!clobbers->anything)
7173 {
7174 find_what_var_points_to
7175 (first_vi_for_offset (vi, fi_clobbers), &sol);
7176 pt_solution_ior_into (clobbers, &sol);
7177 }
7178 }
7179 }
7180 }
7181 }
7182 }
7183
7184 fn->gimple_df->ipa_pta = true;
7185 }
7186
7187 delete_points_to_sets ();
7188
7189 in_ipa_mode = 0;
7190
7191 return 0;
7192 }
7193
7194 struct simple_ipa_opt_pass pass_ipa_pta =
7195 {
7196 {
7197 SIMPLE_IPA_PASS,
7198 "pta", /* name */
7199 OPTGROUP_NONE, /* optinfo_flags */
7200 gate_ipa_pta, /* gate */
7201 ipa_pta_execute, /* execute */
7202 NULL, /* sub */
7203 NULL, /* next */
7204 0, /* static_pass_number */
7205 TV_IPA_PTA, /* tv_id */
7206 0, /* properties_required */
7207 0, /* properties_provided */
7208 0, /* properties_destroyed */
7209 0, /* todo_flags_start */
7210 TODO_update_ssa /* todo_flags_finish */
7211 }
7212 };