re PR tree-optimization/55890 (calling a builtin func through a cast triggers an...
[gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dberlin@dberlin.org>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3 of the License, or
11 (at your option) any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "obstack.h"
28 #include "bitmap.h"
29 #include "flags.h"
30 #include "basic-block.h"
31 #include "tree.h"
32 #include "tree-flow.h"
33 #include "tree-inline.h"
34 #include "diagnostic-core.h"
35 #include "gimple.h"
36 #include "hashtab.h"
37 #include "function.h"
38 #include "cgraph.h"
39 #include "tree-pass.h"
40 #include "alloc-pool.h"
41 #include "splay-tree.h"
42 #include "params.h"
43 #include "cgraph.h"
44 #include "alias.h"
45 #include "pointer-set.h"
46
47 /* The idea behind this analyzer is to generate set constraints from the
48 program, then solve the resulting constraints in order to generate the
49 points-to sets.
50
51 Set constraints are a way of modeling program analysis problems that
52 involve sets. They consist of an inclusion constraint language,
53 describing the variables (each variable is a set) and operations that
54 are involved on the variables, and a set of rules that derive facts
55 from these operations. To solve a system of set constraints, you derive
56 all possible facts under the rules, which gives you the correct sets
57 as a consequence.
58
59 See "Efficient Field-sensitive pointer analysis for C" by "David
60 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
61 http://citeseer.ist.psu.edu/pearce04efficient.html
62
63 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
64 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
65 http://citeseer.ist.psu.edu/heintze01ultrafast.html
66
67 There are three types of real constraint expressions, DEREF,
68 ADDRESSOF, and SCALAR. Each constraint expression consists
69 of a constraint type, a variable, and an offset.
70
71 SCALAR is a constraint expression type used to represent x, whether
72 it appears on the LHS or the RHS of a statement.
73 DEREF is a constraint expression type used to represent *x, whether
74 it appears on the LHS or the RHS of a statement.
75 ADDRESSOF is a constraint expression used to represent &x, whether
76 it appears on the LHS or the RHS of a statement.
77
78 Each pointer variable in the program is assigned an integer id, and
79 each field of a structure variable is assigned an integer id as well.
80
81 Structure variables are linked to their list of fields through a "next
82 field" in each variable that points to the next field in offset
83 order.
84 Each variable for a structure field has
85
86 1. "size", that tells the size in bits of that field.
87 2. "fullsize, that tells the size in bits of the entire structure.
88 3. "offset", that tells the offset in bits from the beginning of the
89 structure to this field.
90
91 Thus,
92 struct f
93 {
94 int a;
95 int b;
96 } foo;
97 int *bar;
98
99 looks like
100
101 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
102 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
103 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
104
105
106 In order to solve the system of set constraints, the following is
107 done:
108
109 1. Each constraint variable x has a solution set associated with it,
110 Sol(x).
111
112 2. Constraints are separated into direct, copy, and complex.
113 Direct constraints are ADDRESSOF constraints that require no extra
114 processing, such as P = &Q
115 Copy constraints are those of the form P = Q.
116 Complex constraints are all the constraints involving dereferences
117 and offsets (including offsetted copies).
118
119 3. All direct constraints of the form P = &Q are processed, such
120 that Q is added to Sol(P)
121
122 4. All complex constraints for a given constraint variable are stored in a
123 linked list attached to that variable's node.
124
125 5. A directed graph is built out of the copy constraints. Each
126 constraint variable is a node in the graph, and an edge from
127 Q to P is added for each copy constraint of the form P = Q
128
129 6. The graph is then walked, and solution sets are
130 propagated along the copy edges, such that an edge from Q to P
131 causes Sol(P) <- Sol(P) union Sol(Q).
132
133 7. As we visit each node, all complex constraints associated with
134 that node are processed by adding appropriate copy edges to the graph, or the
135 appropriate variables to the solution set.
136
137 8. The process of walking the graph is iterated until no solution
138 sets change.
139
140 Prior to walking the graph in steps 6 and 7, We perform static
141 cycle elimination on the constraint graph, as well
142 as off-line variable substitution.
143
144 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
145 on and turned into anything), but isn't. You can just see what offset
146 inside the pointed-to struct it's going to access.
147
148 TODO: Constant bounded arrays can be handled as if they were structs of the
149 same number of elements.
150
151 TODO: Modeling heap and incoming pointers becomes much better if we
152 add fields to them as we discover them, which we could do.
153
154 TODO: We could handle unions, but to be honest, it's probably not
155 worth the pain or slowdown. */
156
157 /* IPA-PTA optimizations possible.
158
159 When the indirect function called is ANYTHING we can add disambiguation
160 based on the function signatures (or simply the parameter count which
161 is the varinfo size). We also do not need to consider functions that
162 do not have their address taken.
163
164 The is_global_var bit which marks escape points is overly conservative
165 in IPA mode. Split it to is_escape_point and is_global_var - only
166 externally visible globals are escape points in IPA mode. This is
167 also needed to fix the pt_solution_includes_global predicate
168 (and thus ptr_deref_may_alias_global_p).
169
170 The way we introduce DECL_PT_UID to avoid fixing up all points-to
171 sets in the translation unit when we copy a DECL during inlining
172 pessimizes precision. The advantage is that the DECL_PT_UID keeps
173 compile-time and memory usage overhead low - the points-to sets
174 do not grow or get unshared as they would during a fixup phase.
175 An alternative solution is to delay IPA PTA until after all
176 inlining transformations have been applied.
177
178 The way we propagate clobber/use information isn't optimized.
179 It should use a new complex constraint that properly filters
180 out local variables of the callee (though that would make
181 the sets invalid after inlining). OTOH we might as well
182 admit defeat to WHOPR and simply do all the clobber/use analysis
183 and propagation after PTA finished but before we threw away
184 points-to information for memory variables. WHOPR and PTA
185 do not play along well anyway - the whole constraint solving
186 would need to be done in WPA phase and it will be very interesting
187 to apply the results to local SSA names during LTRANS phase.
188
189 We probably should compute a per-function unit-ESCAPE solution
190 propagating it simply like the clobber / uses solutions. The
191 solution can go alongside the non-IPA espaced solution and be
192 used to query which vars escape the unit through a function.
193
194 We never put function decls in points-to sets so we do not
195 keep the set of called functions for indirect calls.
196
197 And probably more. */
198
199 static bool use_field_sensitive = true;
200 static int in_ipa_mode = 0;
201
202 /* Used for predecessor bitmaps. */
203 static bitmap_obstack predbitmap_obstack;
204
205 /* Used for points-to sets. */
206 static bitmap_obstack pta_obstack;
207
208 /* Used for oldsolution members of variables. */
209 static bitmap_obstack oldpta_obstack;
210
211 /* Used for per-solver-iteration bitmaps. */
212 static bitmap_obstack iteration_obstack;
213
214 static unsigned int create_variable_info_for (tree, const char *);
215 typedef struct constraint_graph *constraint_graph_t;
216 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
217
218 struct constraint;
219 typedef struct constraint *constraint_t;
220
221
222 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
223 if (a) \
224 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
225
226 static struct constraint_stats
227 {
228 unsigned int total_vars;
229 unsigned int nonpointer_vars;
230 unsigned int unified_vars_static;
231 unsigned int unified_vars_dynamic;
232 unsigned int iterations;
233 unsigned int num_edges;
234 unsigned int num_implicit_edges;
235 unsigned int points_to_sets_created;
236 } stats;
237
238 struct variable_info
239 {
240 /* ID of this variable */
241 unsigned int id;
242
243 /* True if this is a variable created by the constraint analysis, such as
244 heap variables and constraints we had to break up. */
245 unsigned int is_artificial_var : 1;
246
247 /* True if this is a special variable whose solution set should not be
248 changed. */
249 unsigned int is_special_var : 1;
250
251 /* True for variables whose size is not known or variable. */
252 unsigned int is_unknown_size_var : 1;
253
254 /* True for (sub-)fields that represent a whole variable. */
255 unsigned int is_full_var : 1;
256
257 /* True if this is a heap variable. */
258 unsigned int is_heap_var : 1;
259
260 /* True if this field may contain pointers. */
261 unsigned int may_have_pointers : 1;
262
263 /* True if this field has only restrict qualified pointers. */
264 unsigned int only_restrict_pointers : 1;
265
266 /* True if this represents a global variable. */
267 unsigned int is_global_var : 1;
268
269 /* True if this represents a IPA function info. */
270 unsigned int is_fn_info : 1;
271
272 /* A link to the variable for the next field in this structure. */
273 struct variable_info *next;
274
275 /* Offset of this variable, in bits, from the base variable */
276 unsigned HOST_WIDE_INT offset;
277
278 /* Size of the variable, in bits. */
279 unsigned HOST_WIDE_INT size;
280
281 /* Full size of the base variable, in bits. */
282 unsigned HOST_WIDE_INT fullsize;
283
284 /* Name of this variable */
285 const char *name;
286
287 /* Tree that this variable is associated with. */
288 tree decl;
289
290 /* Points-to set for this variable. */
291 bitmap solution;
292
293 /* Old points-to set for this variable. */
294 bitmap oldsolution;
295 };
296 typedef struct variable_info *varinfo_t;
297
298 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
299 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
300 unsigned HOST_WIDE_INT);
301 static varinfo_t lookup_vi_for_tree (tree);
302 static inline bool type_can_have_subvars (const_tree);
303
304 /* Pool of variable info structures. */
305 static alloc_pool variable_info_pool;
306
307
308
309 /* Table of variable info structures for constraint variables.
310 Indexed directly by variable info id. */
311 static vec<varinfo_t> varmap;
312
313 /* Return the varmap element N */
314
315 static inline varinfo_t
316 get_varinfo (unsigned int n)
317 {
318 return varmap[n];
319 }
320
321 /* Static IDs for the special variables. */
322 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
323 escaped_id = 3, nonlocal_id = 4,
324 storedanything_id = 5, integer_id = 6 };
325
326 /* Return a new variable info structure consisting for a variable
327 named NAME, and using constraint graph node NODE. Append it
328 to the vector of variable info structures. */
329
330 static varinfo_t
331 new_var_info (tree t, const char *name)
332 {
333 unsigned index = varmap.length ();
334 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
335
336 ret->id = index;
337 ret->name = name;
338 ret->decl = t;
339 /* Vars without decl are artificial and do not have sub-variables. */
340 ret->is_artificial_var = (t == NULL_TREE);
341 ret->is_special_var = false;
342 ret->is_unknown_size_var = false;
343 ret->is_full_var = (t == NULL_TREE);
344 ret->is_heap_var = false;
345 ret->may_have_pointers = true;
346 ret->only_restrict_pointers = false;
347 ret->is_global_var = (t == NULL_TREE);
348 ret->is_fn_info = false;
349 if (t && DECL_P (t))
350 ret->is_global_var = (is_global_var (t)
351 /* We have to treat even local register variables
352 as escape points. */
353 || (TREE_CODE (t) == VAR_DECL
354 && DECL_HARD_REGISTER (t)));
355 ret->solution = BITMAP_ALLOC (&pta_obstack);
356 ret->oldsolution = NULL;
357 ret->next = NULL;
358
359 stats.total_vars++;
360
361 varmap.safe_push (ret);
362
363 return ret;
364 }
365
366
367 /* A map mapping call statements to per-stmt variables for uses
368 and clobbers specific to the call. */
369 struct pointer_map_t *call_stmt_vars;
370
371 /* Lookup or create the variable for the call statement CALL. */
372
373 static varinfo_t
374 get_call_vi (gimple call)
375 {
376 void **slot_p;
377 varinfo_t vi, vi2;
378
379 slot_p = pointer_map_insert (call_stmt_vars, call);
380 if (*slot_p)
381 return (varinfo_t) *slot_p;
382
383 vi = new_var_info (NULL_TREE, "CALLUSED");
384 vi->offset = 0;
385 vi->size = 1;
386 vi->fullsize = 2;
387 vi->is_full_var = true;
388
389 vi->next = vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
390 vi2->offset = 1;
391 vi2->size = 1;
392 vi2->fullsize = 2;
393 vi2->is_full_var = true;
394
395 *slot_p = (void *) vi;
396 return vi;
397 }
398
399 /* Lookup the variable for the call statement CALL representing
400 the uses. Returns NULL if there is nothing special about this call. */
401
402 static varinfo_t
403 lookup_call_use_vi (gimple call)
404 {
405 void **slot_p;
406
407 slot_p = pointer_map_contains (call_stmt_vars, call);
408 if (slot_p)
409 return (varinfo_t) *slot_p;
410
411 return NULL;
412 }
413
414 /* Lookup the variable for the call statement CALL representing
415 the clobbers. Returns NULL if there is nothing special about this call. */
416
417 static varinfo_t
418 lookup_call_clobber_vi (gimple call)
419 {
420 varinfo_t uses = lookup_call_use_vi (call);
421 if (!uses)
422 return NULL;
423
424 return uses->next;
425 }
426
427 /* Lookup or create the variable for the call statement CALL representing
428 the uses. */
429
430 static varinfo_t
431 get_call_use_vi (gimple call)
432 {
433 return get_call_vi (call);
434 }
435
436 /* Lookup or create the variable for the call statement CALL representing
437 the clobbers. */
438
439 static varinfo_t ATTRIBUTE_UNUSED
440 get_call_clobber_vi (gimple call)
441 {
442 return get_call_vi (call)->next;
443 }
444
445
446 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
447
448 /* An expression that appears in a constraint. */
449
450 struct constraint_expr
451 {
452 /* Constraint type. */
453 constraint_expr_type type;
454
455 /* Variable we are referring to in the constraint. */
456 unsigned int var;
457
458 /* Offset, in bits, of this constraint from the beginning of
459 variables it ends up referring to.
460
461 IOW, in a deref constraint, we would deref, get the result set,
462 then add OFFSET to each member. */
463 HOST_WIDE_INT offset;
464 };
465
466 /* Use 0x8000... as special unknown offset. */
467 #define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
468
469 typedef struct constraint_expr ce_s;
470 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
471 static void get_constraint_for (tree, vec<ce_s> *);
472 static void get_constraint_for_rhs (tree, vec<ce_s> *);
473 static void do_deref (vec<ce_s> *);
474
475 /* Our set constraints are made up of two constraint expressions, one
476 LHS, and one RHS.
477
478 As described in the introduction, our set constraints each represent an
479 operation between set valued variables.
480 */
481 struct constraint
482 {
483 struct constraint_expr lhs;
484 struct constraint_expr rhs;
485 };
486
487 /* List of constraints that we use to build the constraint graph from. */
488
489 static vec<constraint_t> constraints;
490 static alloc_pool constraint_pool;
491
492 /* The constraint graph is represented as an array of bitmaps
493 containing successor nodes. */
494
495 struct constraint_graph
496 {
497 /* Size of this graph, which may be different than the number of
498 nodes in the variable map. */
499 unsigned int size;
500
501 /* Explicit successors of each node. */
502 bitmap *succs;
503
504 /* Implicit predecessors of each node (Used for variable
505 substitution). */
506 bitmap *implicit_preds;
507
508 /* Explicit predecessors of each node (Used for variable substitution). */
509 bitmap *preds;
510
511 /* Indirect cycle representatives, or -1 if the node has no indirect
512 cycles. */
513 int *indirect_cycles;
514
515 /* Representative node for a node. rep[a] == a unless the node has
516 been unified. */
517 unsigned int *rep;
518
519 /* Equivalence class representative for a label. This is used for
520 variable substitution. */
521 int *eq_rep;
522
523 /* Pointer equivalence label for a node. All nodes with the same
524 pointer equivalence label can be unified together at some point
525 (either during constraint optimization or after the constraint
526 graph is built). */
527 unsigned int *pe;
528
529 /* Pointer equivalence representative for a label. This is used to
530 handle nodes that are pointer equivalent but not location
531 equivalent. We can unite these once the addressof constraints
532 are transformed into initial points-to sets. */
533 int *pe_rep;
534
535 /* Pointer equivalence label for each node, used during variable
536 substitution. */
537 unsigned int *pointer_label;
538
539 /* Location equivalence label for each node, used during location
540 equivalence finding. */
541 unsigned int *loc_label;
542
543 /* Pointed-by set for each node, used during location equivalence
544 finding. This is pointed-by rather than pointed-to, because it
545 is constructed using the predecessor graph. */
546 bitmap *pointed_by;
547
548 /* Points to sets for pointer equivalence. This is *not* the actual
549 points-to sets for nodes. */
550 bitmap *points_to;
551
552 /* Bitmap of nodes where the bit is set if the node is a direct
553 node. Used for variable substitution. */
554 sbitmap direct_nodes;
555
556 /* Bitmap of nodes where the bit is set if the node is address
557 taken. Used for variable substitution. */
558 bitmap address_taken;
559
560 /* Vector of complex constraints for each graph node. Complex
561 constraints are those involving dereferences or offsets that are
562 not 0. */
563 vec<constraint_t> *complex;
564 };
565
566 static constraint_graph_t graph;
567
568 /* During variable substitution and the offline version of indirect
569 cycle finding, we create nodes to represent dereferences and
570 address taken constraints. These represent where these start and
571 end. */
572 #define FIRST_REF_NODE (varmap).length ()
573 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
574
575 /* Return the representative node for NODE, if NODE has been unioned
576 with another NODE.
577 This function performs path compression along the way to finding
578 the representative. */
579
580 static unsigned int
581 find (unsigned int node)
582 {
583 gcc_assert (node < graph->size);
584 if (graph->rep[node] != node)
585 return graph->rep[node] = find (graph->rep[node]);
586 return node;
587 }
588
589 /* Union the TO and FROM nodes to the TO nodes.
590 Note that at some point in the future, we may want to do
591 union-by-rank, in which case we are going to have to return the
592 node we unified to. */
593
594 static bool
595 unite (unsigned int to, unsigned int from)
596 {
597 gcc_assert (to < graph->size && from < graph->size);
598 if (to != from && graph->rep[from] != to)
599 {
600 graph->rep[from] = to;
601 return true;
602 }
603 return false;
604 }
605
606 /* Create a new constraint consisting of LHS and RHS expressions. */
607
608 static constraint_t
609 new_constraint (const struct constraint_expr lhs,
610 const struct constraint_expr rhs)
611 {
612 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
613 ret->lhs = lhs;
614 ret->rhs = rhs;
615 return ret;
616 }
617
618 /* Print out constraint C to FILE. */
619
620 static void
621 dump_constraint (FILE *file, constraint_t c)
622 {
623 if (c->lhs.type == ADDRESSOF)
624 fprintf (file, "&");
625 else if (c->lhs.type == DEREF)
626 fprintf (file, "*");
627 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
628 if (c->lhs.offset == UNKNOWN_OFFSET)
629 fprintf (file, " + UNKNOWN");
630 else if (c->lhs.offset != 0)
631 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
632 fprintf (file, " = ");
633 if (c->rhs.type == ADDRESSOF)
634 fprintf (file, "&");
635 else if (c->rhs.type == DEREF)
636 fprintf (file, "*");
637 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
638 if (c->rhs.offset == UNKNOWN_OFFSET)
639 fprintf (file, " + UNKNOWN");
640 else if (c->rhs.offset != 0)
641 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
642 }
643
644
645 void debug_constraint (constraint_t);
646 void debug_constraints (void);
647 void debug_constraint_graph (void);
648 void debug_solution_for_var (unsigned int);
649 void debug_sa_points_to_info (void);
650
651 /* Print out constraint C to stderr. */
652
653 DEBUG_FUNCTION void
654 debug_constraint (constraint_t c)
655 {
656 dump_constraint (stderr, c);
657 fprintf (stderr, "\n");
658 }
659
660 /* Print out all constraints to FILE */
661
662 static void
663 dump_constraints (FILE *file, int from)
664 {
665 int i;
666 constraint_t c;
667 for (i = from; constraints.iterate (i, &c); i++)
668 if (c)
669 {
670 dump_constraint (file, c);
671 fprintf (file, "\n");
672 }
673 }
674
675 /* Print out all constraints to stderr. */
676
677 DEBUG_FUNCTION void
678 debug_constraints (void)
679 {
680 dump_constraints (stderr, 0);
681 }
682
683 /* Print the constraint graph in dot format. */
684
685 static void
686 dump_constraint_graph (FILE *file)
687 {
688 unsigned int i;
689
690 /* Only print the graph if it has already been initialized: */
691 if (!graph)
692 return;
693
694 /* Prints the header of the dot file: */
695 fprintf (file, "strict digraph {\n");
696 fprintf (file, " node [\n shape = box\n ]\n");
697 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
698 fprintf (file, "\n // List of nodes and complex constraints in "
699 "the constraint graph:\n");
700
701 /* The next lines print the nodes in the graph together with the
702 complex constraints attached to them. */
703 for (i = 0; i < graph->size; i++)
704 {
705 if (find (i) != i)
706 continue;
707 if (i < FIRST_REF_NODE)
708 fprintf (file, "\"%s\"", get_varinfo (i)->name);
709 else
710 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
711 if (graph->complex[i].exists ())
712 {
713 unsigned j;
714 constraint_t c;
715 fprintf (file, " [label=\"\\N\\n");
716 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
717 {
718 dump_constraint (file, c);
719 fprintf (file, "\\l");
720 }
721 fprintf (file, "\"]");
722 }
723 fprintf (file, ";\n");
724 }
725
726 /* Go over the edges. */
727 fprintf (file, "\n // Edges in the constraint graph:\n");
728 for (i = 0; i < graph->size; i++)
729 {
730 unsigned j;
731 bitmap_iterator bi;
732 if (find (i) != i)
733 continue;
734 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
735 {
736 unsigned to = find (j);
737 if (i == to)
738 continue;
739 if (i < FIRST_REF_NODE)
740 fprintf (file, "\"%s\"", get_varinfo (i)->name);
741 else
742 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
743 fprintf (file, " -> ");
744 if (to < FIRST_REF_NODE)
745 fprintf (file, "\"%s\"", get_varinfo (to)->name);
746 else
747 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
748 fprintf (file, ";\n");
749 }
750 }
751
752 /* Prints the tail of the dot file. */
753 fprintf (file, "}\n");
754 }
755
756 /* Print out the constraint graph to stderr. */
757
758 DEBUG_FUNCTION void
759 debug_constraint_graph (void)
760 {
761 dump_constraint_graph (stderr);
762 }
763
764 /* SOLVER FUNCTIONS
765
766 The solver is a simple worklist solver, that works on the following
767 algorithm:
768
769 sbitmap changed_nodes = all zeroes;
770 changed_count = 0;
771 For each node that is not already collapsed:
772 changed_count++;
773 set bit in changed nodes
774
775 while (changed_count > 0)
776 {
777 compute topological ordering for constraint graph
778
779 find and collapse cycles in the constraint graph (updating
780 changed if necessary)
781
782 for each node (n) in the graph in topological order:
783 changed_count--;
784
785 Process each complex constraint associated with the node,
786 updating changed if necessary.
787
788 For each outgoing edge from n, propagate the solution from n to
789 the destination of the edge, updating changed as necessary.
790
791 } */
792
793 /* Return true if two constraint expressions A and B are equal. */
794
795 static bool
796 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
797 {
798 return a.type == b.type && a.var == b.var && a.offset == b.offset;
799 }
800
801 /* Return true if constraint expression A is less than constraint expression
802 B. This is just arbitrary, but consistent, in order to give them an
803 ordering. */
804
805 static bool
806 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
807 {
808 if (a.type == b.type)
809 {
810 if (a.var == b.var)
811 return a.offset < b.offset;
812 else
813 return a.var < b.var;
814 }
815 else
816 return a.type < b.type;
817 }
818
819 /* Return true if constraint A is less than constraint B. This is just
820 arbitrary, but consistent, in order to give them an ordering. */
821
822 static bool
823 constraint_less (const constraint_t &a, const constraint_t &b)
824 {
825 if (constraint_expr_less (a->lhs, b->lhs))
826 return true;
827 else if (constraint_expr_less (b->lhs, a->lhs))
828 return false;
829 else
830 return constraint_expr_less (a->rhs, b->rhs);
831 }
832
833 /* Return true if two constraints A and B are equal. */
834
835 static bool
836 constraint_equal (struct constraint a, struct constraint b)
837 {
838 return constraint_expr_equal (a.lhs, b.lhs)
839 && constraint_expr_equal (a.rhs, b.rhs);
840 }
841
842
843 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
844
845 static constraint_t
846 constraint_vec_find (vec<constraint_t> vec,
847 struct constraint lookfor)
848 {
849 unsigned int place;
850 constraint_t found;
851
852 if (!vec.exists ())
853 return NULL;
854
855 place = vec.lower_bound (&lookfor, constraint_less);
856 if (place >= vec.length ())
857 return NULL;
858 found = vec[place];
859 if (!constraint_equal (*found, lookfor))
860 return NULL;
861 return found;
862 }
863
864 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
865
866 static void
867 constraint_set_union (vec<constraint_t> *to,
868 vec<constraint_t> *from)
869 {
870 int i;
871 constraint_t c;
872
873 FOR_EACH_VEC_ELT (*from, i, c)
874 {
875 if (constraint_vec_find (*to, *c) == NULL)
876 {
877 unsigned int place = to->lower_bound (c, constraint_less);
878 to->safe_insert (place, c);
879 }
880 }
881 }
882
883 /* Expands the solution in SET to all sub-fields of variables included.
884 Union the expanded result into RESULT. */
885
886 static void
887 solution_set_expand (bitmap result, bitmap set)
888 {
889 bitmap_iterator bi;
890 bitmap vars = NULL;
891 unsigned j;
892
893 /* In a first pass record all variables we need to add all
894 sub-fields off. This avoids quadratic behavior. */
895 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
896 {
897 varinfo_t v = get_varinfo (j);
898 if (v->is_artificial_var
899 || v->is_full_var)
900 continue;
901 v = lookup_vi_for_tree (v->decl);
902 if (vars == NULL)
903 vars = BITMAP_ALLOC (NULL);
904 bitmap_set_bit (vars, v->id);
905 }
906
907 /* In the second pass now do the addition to the solution and
908 to speed up solving add it to the delta as well. */
909 if (vars != NULL)
910 {
911 EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
912 {
913 varinfo_t v = get_varinfo (j);
914 for (; v != NULL; v = v->next)
915 bitmap_set_bit (result, v->id);
916 }
917 BITMAP_FREE (vars);
918 }
919 }
920
921 /* Take a solution set SET, add OFFSET to each member of the set, and
922 overwrite SET with the result when done. */
923
924 static void
925 solution_set_add (bitmap set, HOST_WIDE_INT offset)
926 {
927 bitmap result = BITMAP_ALLOC (&iteration_obstack);
928 unsigned int i;
929 bitmap_iterator bi;
930
931 /* If the offset is unknown we have to expand the solution to
932 all subfields. */
933 if (offset == UNKNOWN_OFFSET)
934 {
935 solution_set_expand (set, set);
936 return;
937 }
938
939 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
940 {
941 varinfo_t vi = get_varinfo (i);
942
943 /* If this is a variable with just one field just set its bit
944 in the result. */
945 if (vi->is_artificial_var
946 || vi->is_unknown_size_var
947 || vi->is_full_var)
948 bitmap_set_bit (result, i);
949 else
950 {
951 unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
952
953 /* If the offset makes the pointer point to before the
954 variable use offset zero for the field lookup. */
955 if (offset < 0
956 && fieldoffset > vi->offset)
957 fieldoffset = 0;
958
959 if (offset != 0)
960 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
961
962 bitmap_set_bit (result, vi->id);
963 /* If the result is not exactly at fieldoffset include the next
964 field as well. See get_constraint_for_ptr_offset for more
965 rationale. */
966 if (vi->offset != fieldoffset
967 && vi->next != NULL)
968 bitmap_set_bit (result, vi->next->id);
969 }
970 }
971
972 bitmap_copy (set, result);
973 BITMAP_FREE (result);
974 }
975
976 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
977 process. */
978
979 static bool
980 set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
981 {
982 if (inc == 0)
983 return bitmap_ior_into (to, from);
984 else
985 {
986 bitmap tmp;
987 bool res;
988
989 tmp = BITMAP_ALLOC (&iteration_obstack);
990 bitmap_copy (tmp, from);
991 solution_set_add (tmp, inc);
992 res = bitmap_ior_into (to, tmp);
993 BITMAP_FREE (tmp);
994 return res;
995 }
996 }
997
998 /* Insert constraint C into the list of complex constraints for graph
999 node VAR. */
1000
1001 static void
1002 insert_into_complex (constraint_graph_t graph,
1003 unsigned int var, constraint_t c)
1004 {
1005 vec<constraint_t> complex = graph->complex[var];
1006 unsigned int place = complex.lower_bound (c, constraint_less);
1007
1008 /* Only insert constraints that do not already exist. */
1009 if (place >= complex.length ()
1010 || !constraint_equal (*c, *complex[place]))
1011 graph->complex[var].safe_insert (place, c);
1012 }
1013
1014
1015 /* Condense two variable nodes into a single variable node, by moving
1016 all associated info from SRC to TO. */
1017
1018 static void
1019 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1020 unsigned int from)
1021 {
1022 unsigned int i;
1023 constraint_t c;
1024
1025 gcc_assert (find (from) == to);
1026
1027 /* Move all complex constraints from src node into to node */
1028 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1029 {
1030 /* In complex constraints for node src, we may have either
1031 a = *src, and *src = a, or an offseted constraint which are
1032 always added to the rhs node's constraints. */
1033
1034 if (c->rhs.type == DEREF)
1035 c->rhs.var = to;
1036 else if (c->lhs.type == DEREF)
1037 c->lhs.var = to;
1038 else
1039 c->rhs.var = to;
1040 }
1041 constraint_set_union (&graph->complex[to], &graph->complex[from]);
1042 graph->complex[from].release ();
1043 }
1044
1045
1046 /* Remove edges involving NODE from GRAPH. */
1047
1048 static void
1049 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1050 {
1051 if (graph->succs[node])
1052 BITMAP_FREE (graph->succs[node]);
1053 }
1054
1055 /* Merge GRAPH nodes FROM and TO into node TO. */
1056
1057 static void
1058 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1059 unsigned int from)
1060 {
1061 if (graph->indirect_cycles[from] != -1)
1062 {
1063 /* If we have indirect cycles with the from node, and we have
1064 none on the to node, the to node has indirect cycles from the
1065 from node now that they are unified.
1066 If indirect cycles exist on both, unify the nodes that they
1067 are in a cycle with, since we know they are in a cycle with
1068 each other. */
1069 if (graph->indirect_cycles[to] == -1)
1070 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1071 }
1072
1073 /* Merge all the successor edges. */
1074 if (graph->succs[from])
1075 {
1076 if (!graph->succs[to])
1077 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1078 bitmap_ior_into (graph->succs[to],
1079 graph->succs[from]);
1080 }
1081
1082 clear_edges_for_node (graph, from);
1083 }
1084
1085
1086 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1087 it doesn't exist in the graph already. */
1088
1089 static void
1090 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1091 unsigned int from)
1092 {
1093 if (to == from)
1094 return;
1095
1096 if (!graph->implicit_preds[to])
1097 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1098
1099 if (bitmap_set_bit (graph->implicit_preds[to], from))
1100 stats.num_implicit_edges++;
1101 }
1102
1103 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1104 it doesn't exist in the graph already.
1105 Return false if the edge already existed, true otherwise. */
1106
1107 static void
1108 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1109 unsigned int from)
1110 {
1111 if (!graph->preds[to])
1112 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1113 bitmap_set_bit (graph->preds[to], from);
1114 }
1115
1116 /* Add a graph edge to GRAPH, going from FROM to TO if
1117 it doesn't exist in the graph already.
1118 Return false if the edge already existed, true otherwise. */
1119
1120 static bool
1121 add_graph_edge (constraint_graph_t graph, unsigned int to,
1122 unsigned int from)
1123 {
1124 if (to == from)
1125 {
1126 return false;
1127 }
1128 else
1129 {
1130 bool r = false;
1131
1132 if (!graph->succs[from])
1133 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1134 if (bitmap_set_bit (graph->succs[from], to))
1135 {
1136 r = true;
1137 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1138 stats.num_edges++;
1139 }
1140 return r;
1141 }
1142 }
1143
1144
1145 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1146
1147 static bool
1148 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1149 unsigned int dest)
1150 {
1151 return (graph->succs[dest]
1152 && bitmap_bit_p (graph->succs[dest], src));
1153 }
1154
1155 /* Initialize the constraint graph structure to contain SIZE nodes. */
1156
1157 static void
1158 init_graph (unsigned int size)
1159 {
1160 unsigned int j;
1161
1162 graph = XCNEW (struct constraint_graph);
1163 graph->size = size;
1164 graph->succs = XCNEWVEC (bitmap, graph->size);
1165 graph->indirect_cycles = XNEWVEC (int, graph->size);
1166 graph->rep = XNEWVEC (unsigned int, graph->size);
1167 /* ??? Macros do not support template types with multiple arguments,
1168 so we use a typedef to work around it. */
1169 typedef vec<constraint_t> vec_constraint_t_heap;
1170 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1171 graph->pe = XCNEWVEC (unsigned int, graph->size);
1172 graph->pe_rep = XNEWVEC (int, graph->size);
1173
1174 for (j = 0; j < graph->size; j++)
1175 {
1176 graph->rep[j] = j;
1177 graph->pe_rep[j] = -1;
1178 graph->indirect_cycles[j] = -1;
1179 }
1180 }
1181
1182 /* Build the constraint graph, adding only predecessor edges right now. */
1183
1184 static void
1185 build_pred_graph (void)
1186 {
1187 int i;
1188 constraint_t c;
1189 unsigned int j;
1190
1191 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1192 graph->preds = XCNEWVEC (bitmap, graph->size);
1193 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1194 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1195 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1196 graph->points_to = XCNEWVEC (bitmap, graph->size);
1197 graph->eq_rep = XNEWVEC (int, graph->size);
1198 graph->direct_nodes = sbitmap_alloc (graph->size);
1199 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1200 bitmap_clear (graph->direct_nodes);
1201
1202 for (j = 0; j < FIRST_REF_NODE; j++)
1203 {
1204 if (!get_varinfo (j)->is_special_var)
1205 bitmap_set_bit (graph->direct_nodes, j);
1206 }
1207
1208 for (j = 0; j < graph->size; j++)
1209 graph->eq_rep[j] = -1;
1210
1211 for (j = 0; j < varmap.length (); j++)
1212 graph->indirect_cycles[j] = -1;
1213
1214 FOR_EACH_VEC_ELT (constraints, i, c)
1215 {
1216 struct constraint_expr lhs = c->lhs;
1217 struct constraint_expr rhs = c->rhs;
1218 unsigned int lhsvar = lhs.var;
1219 unsigned int rhsvar = rhs.var;
1220
1221 if (lhs.type == DEREF)
1222 {
1223 /* *x = y. */
1224 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1225 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1226 }
1227 else if (rhs.type == DEREF)
1228 {
1229 /* x = *y */
1230 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1231 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1232 else
1233 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1234 }
1235 else if (rhs.type == ADDRESSOF)
1236 {
1237 varinfo_t v;
1238
1239 /* x = &y */
1240 if (graph->points_to[lhsvar] == NULL)
1241 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1242 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1243
1244 if (graph->pointed_by[rhsvar] == NULL)
1245 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1246 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1247
1248 /* Implicitly, *x = y */
1249 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1250
1251 /* All related variables are no longer direct nodes. */
1252 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1253 v = get_varinfo (rhsvar);
1254 if (!v->is_full_var)
1255 {
1256 v = lookup_vi_for_tree (v->decl);
1257 do
1258 {
1259 bitmap_clear_bit (graph->direct_nodes, v->id);
1260 v = v->next;
1261 }
1262 while (v != NULL);
1263 }
1264 bitmap_set_bit (graph->address_taken, rhsvar);
1265 }
1266 else if (lhsvar > anything_id
1267 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1268 {
1269 /* x = y */
1270 add_pred_graph_edge (graph, lhsvar, rhsvar);
1271 /* Implicitly, *x = *y */
1272 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1273 FIRST_REF_NODE + rhsvar);
1274 }
1275 else if (lhs.offset != 0 || rhs.offset != 0)
1276 {
1277 if (rhs.offset != 0)
1278 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1279 else if (lhs.offset != 0)
1280 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1281 }
1282 }
1283 }
1284
1285 /* Build the constraint graph, adding successor edges. */
1286
1287 static void
1288 build_succ_graph (void)
1289 {
1290 unsigned i, t;
1291 constraint_t c;
1292
1293 FOR_EACH_VEC_ELT (constraints, i, c)
1294 {
1295 struct constraint_expr lhs;
1296 struct constraint_expr rhs;
1297 unsigned int lhsvar;
1298 unsigned int rhsvar;
1299
1300 if (!c)
1301 continue;
1302
1303 lhs = c->lhs;
1304 rhs = c->rhs;
1305 lhsvar = find (lhs.var);
1306 rhsvar = find (rhs.var);
1307
1308 if (lhs.type == DEREF)
1309 {
1310 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1311 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1312 }
1313 else if (rhs.type == DEREF)
1314 {
1315 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1316 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1317 }
1318 else if (rhs.type == ADDRESSOF)
1319 {
1320 /* x = &y */
1321 gcc_assert (find (rhs.var) == rhs.var);
1322 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1323 }
1324 else if (lhsvar > anything_id
1325 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1326 {
1327 add_graph_edge (graph, lhsvar, rhsvar);
1328 }
1329 }
1330
1331 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1332 receive pointers. */
1333 t = find (storedanything_id);
1334 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1335 {
1336 if (!bitmap_bit_p (graph->direct_nodes, i)
1337 && get_varinfo (i)->may_have_pointers)
1338 add_graph_edge (graph, find (i), t);
1339 }
1340
1341 /* Everything stored to ANYTHING also potentially escapes. */
1342 add_graph_edge (graph, find (escaped_id), t);
1343 }
1344
1345
1346 /* Changed variables on the last iteration. */
1347 static bitmap changed;
1348
1349 /* Strongly Connected Component visitation info. */
1350
1351 struct scc_info
1352 {
1353 sbitmap visited;
1354 sbitmap deleted;
1355 unsigned int *dfs;
1356 unsigned int *node_mapping;
1357 int current_index;
1358 vec<unsigned> scc_stack;
1359 };
1360
1361
1362 /* Recursive routine to find strongly connected components in GRAPH.
1363 SI is the SCC info to store the information in, and N is the id of current
1364 graph node we are processing.
1365
1366 This is Tarjan's strongly connected component finding algorithm, as
1367 modified by Nuutila to keep only non-root nodes on the stack.
1368 The algorithm can be found in "On finding the strongly connected
1369 connected components in a directed graph" by Esko Nuutila and Eljas
1370 Soisalon-Soininen, in Information Processing Letters volume 49,
1371 number 1, pages 9-14. */
1372
1373 static void
1374 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1375 {
1376 unsigned int i;
1377 bitmap_iterator bi;
1378 unsigned int my_dfs;
1379
1380 bitmap_set_bit (si->visited, n);
1381 si->dfs[n] = si->current_index ++;
1382 my_dfs = si->dfs[n];
1383
1384 /* Visit all the successors. */
1385 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1386 {
1387 unsigned int w;
1388
1389 if (i > LAST_REF_NODE)
1390 break;
1391
1392 w = find (i);
1393 if (bitmap_bit_p (si->deleted, w))
1394 continue;
1395
1396 if (!bitmap_bit_p (si->visited, w))
1397 scc_visit (graph, si, w);
1398 {
1399 unsigned int t = find (w);
1400 unsigned int nnode = find (n);
1401 gcc_assert (nnode == n);
1402
1403 if (si->dfs[t] < si->dfs[nnode])
1404 si->dfs[n] = si->dfs[t];
1405 }
1406 }
1407
1408 /* See if any components have been identified. */
1409 if (si->dfs[n] == my_dfs)
1410 {
1411 if (si->scc_stack.length () > 0
1412 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1413 {
1414 bitmap scc = BITMAP_ALLOC (NULL);
1415 unsigned int lowest_node;
1416 bitmap_iterator bi;
1417
1418 bitmap_set_bit (scc, n);
1419
1420 while (si->scc_stack.length () != 0
1421 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1422 {
1423 unsigned int w = si->scc_stack.pop ();
1424
1425 bitmap_set_bit (scc, w);
1426 }
1427
1428 lowest_node = bitmap_first_set_bit (scc);
1429 gcc_assert (lowest_node < FIRST_REF_NODE);
1430
1431 /* Collapse the SCC nodes into a single node, and mark the
1432 indirect cycles. */
1433 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1434 {
1435 if (i < FIRST_REF_NODE)
1436 {
1437 if (unite (lowest_node, i))
1438 unify_nodes (graph, lowest_node, i, false);
1439 }
1440 else
1441 {
1442 unite (lowest_node, i);
1443 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1444 }
1445 }
1446 }
1447 bitmap_set_bit (si->deleted, n);
1448 }
1449 else
1450 si->scc_stack.safe_push (n);
1451 }
1452
1453 /* Unify node FROM into node TO, updating the changed count if
1454 necessary when UPDATE_CHANGED is true. */
1455
1456 static void
1457 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1458 bool update_changed)
1459 {
1460
1461 gcc_assert (to != from && find (to) == to);
1462 if (dump_file && (dump_flags & TDF_DETAILS))
1463 fprintf (dump_file, "Unifying %s to %s\n",
1464 get_varinfo (from)->name,
1465 get_varinfo (to)->name);
1466
1467 if (update_changed)
1468 stats.unified_vars_dynamic++;
1469 else
1470 stats.unified_vars_static++;
1471
1472 merge_graph_nodes (graph, to, from);
1473 merge_node_constraints (graph, to, from);
1474
1475 /* Mark TO as changed if FROM was changed. If TO was already marked
1476 as changed, decrease the changed count. */
1477
1478 if (update_changed
1479 && bitmap_bit_p (changed, from))
1480 {
1481 bitmap_clear_bit (changed, from);
1482 bitmap_set_bit (changed, to);
1483 }
1484 if (get_varinfo (from)->solution)
1485 {
1486 /* If the solution changes because of the merging, we need to mark
1487 the variable as changed. */
1488 if (bitmap_ior_into (get_varinfo (to)->solution,
1489 get_varinfo (from)->solution))
1490 {
1491 if (update_changed)
1492 bitmap_set_bit (changed, to);
1493 }
1494
1495 BITMAP_FREE (get_varinfo (from)->solution);
1496 if (get_varinfo (from)->oldsolution)
1497 BITMAP_FREE (get_varinfo (from)->oldsolution);
1498
1499 if (stats.iterations > 0
1500 && get_varinfo (to)->oldsolution)
1501 BITMAP_FREE (get_varinfo (to)->oldsolution);
1502 }
1503 if (valid_graph_edge (graph, to, to))
1504 {
1505 if (graph->succs[to])
1506 bitmap_clear_bit (graph->succs[to], to);
1507 }
1508 }
1509
1510 /* Information needed to compute the topological ordering of a graph. */
1511
1512 struct topo_info
1513 {
1514 /* sbitmap of visited nodes. */
1515 sbitmap visited;
1516 /* Array that stores the topological order of the graph, *in
1517 reverse*. */
1518 vec<unsigned> topo_order;
1519 };
1520
1521
1522 /* Initialize and return a topological info structure. */
1523
1524 static struct topo_info *
1525 init_topo_info (void)
1526 {
1527 size_t size = graph->size;
1528 struct topo_info *ti = XNEW (struct topo_info);
1529 ti->visited = sbitmap_alloc (size);
1530 bitmap_clear (ti->visited);
1531 ti->topo_order.create (1);
1532 return ti;
1533 }
1534
1535
1536 /* Free the topological sort info pointed to by TI. */
1537
1538 static void
1539 free_topo_info (struct topo_info *ti)
1540 {
1541 sbitmap_free (ti->visited);
1542 ti->topo_order.release ();
1543 free (ti);
1544 }
1545
1546 /* Visit the graph in topological order, and store the order in the
1547 topo_info structure. */
1548
1549 static void
1550 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1551 unsigned int n)
1552 {
1553 bitmap_iterator bi;
1554 unsigned int j;
1555
1556 bitmap_set_bit (ti->visited, n);
1557
1558 if (graph->succs[n])
1559 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1560 {
1561 if (!bitmap_bit_p (ti->visited, j))
1562 topo_visit (graph, ti, j);
1563 }
1564
1565 ti->topo_order.safe_push (n);
1566 }
1567
1568 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1569 starting solution for y. */
1570
1571 static void
1572 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1573 bitmap delta)
1574 {
1575 unsigned int lhs = c->lhs.var;
1576 bool flag = false;
1577 bitmap sol = get_varinfo (lhs)->solution;
1578 unsigned int j;
1579 bitmap_iterator bi;
1580 HOST_WIDE_INT roffset = c->rhs.offset;
1581
1582 /* Our IL does not allow this. */
1583 gcc_assert (c->lhs.offset == 0);
1584
1585 /* If the solution of Y contains anything it is good enough to transfer
1586 this to the LHS. */
1587 if (bitmap_bit_p (delta, anything_id))
1588 {
1589 flag |= bitmap_set_bit (sol, anything_id);
1590 goto done;
1591 }
1592
1593 /* If we do not know at with offset the rhs is dereferenced compute
1594 the reachability set of DELTA, conservatively assuming it is
1595 dereferenced at all valid offsets. */
1596 if (roffset == UNKNOWN_OFFSET)
1597 {
1598 solution_set_expand (delta, delta);
1599 /* No further offset processing is necessary. */
1600 roffset = 0;
1601 }
1602
1603 /* For each variable j in delta (Sol(y)), add
1604 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1605 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1606 {
1607 varinfo_t v = get_varinfo (j);
1608 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1609 unsigned int t;
1610
1611 if (v->is_full_var)
1612 fieldoffset = v->offset;
1613 else if (roffset != 0)
1614 v = first_vi_for_offset (v, fieldoffset);
1615 /* If the access is outside of the variable we can ignore it. */
1616 if (!v)
1617 continue;
1618
1619 do
1620 {
1621 t = find (v->id);
1622
1623 /* Adding edges from the special vars is pointless.
1624 They don't have sets that can change. */
1625 if (get_varinfo (t)->is_special_var)
1626 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1627 /* Merging the solution from ESCAPED needlessly increases
1628 the set. Use ESCAPED as representative instead. */
1629 else if (v->id == escaped_id)
1630 flag |= bitmap_set_bit (sol, escaped_id);
1631 else if (v->may_have_pointers
1632 && add_graph_edge (graph, lhs, t))
1633 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1634
1635 /* If the variable is not exactly at the requested offset
1636 we have to include the next one. */
1637 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1638 || v->next == NULL)
1639 break;
1640
1641 v = v->next;
1642 fieldoffset = v->offset;
1643 }
1644 while (1);
1645 }
1646
1647 done:
1648 /* If the LHS solution changed, mark the var as changed. */
1649 if (flag)
1650 {
1651 get_varinfo (lhs)->solution = sol;
1652 bitmap_set_bit (changed, lhs);
1653 }
1654 }
1655
1656 /* Process a constraint C that represents *(x + off) = y using DELTA
1657 as the starting solution for x. */
1658
1659 static void
1660 do_ds_constraint (constraint_t c, bitmap delta)
1661 {
1662 unsigned int rhs = c->rhs.var;
1663 bitmap sol = get_varinfo (rhs)->solution;
1664 unsigned int j;
1665 bitmap_iterator bi;
1666 HOST_WIDE_INT loff = c->lhs.offset;
1667 bool escaped_p = false;
1668
1669 /* Our IL does not allow this. */
1670 gcc_assert (c->rhs.offset == 0);
1671
1672 /* If the solution of y contains ANYTHING simply use the ANYTHING
1673 solution. This avoids needlessly increasing the points-to sets. */
1674 if (bitmap_bit_p (sol, anything_id))
1675 sol = get_varinfo (find (anything_id))->solution;
1676
1677 /* If the solution for x contains ANYTHING we have to merge the
1678 solution of y into all pointer variables which we do via
1679 STOREDANYTHING. */
1680 if (bitmap_bit_p (delta, anything_id))
1681 {
1682 unsigned t = find (storedanything_id);
1683 if (add_graph_edge (graph, t, rhs))
1684 {
1685 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1686 bitmap_set_bit (changed, t);
1687 }
1688 return;
1689 }
1690
1691 /* If we do not know at with offset the rhs is dereferenced compute
1692 the reachability set of DELTA, conservatively assuming it is
1693 dereferenced at all valid offsets. */
1694 if (loff == UNKNOWN_OFFSET)
1695 {
1696 solution_set_expand (delta, delta);
1697 loff = 0;
1698 }
1699
1700 /* For each member j of delta (Sol(x)), add an edge from y to j and
1701 union Sol(y) into Sol(j) */
1702 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1703 {
1704 varinfo_t v = get_varinfo (j);
1705 unsigned int t;
1706 HOST_WIDE_INT fieldoffset = v->offset + loff;
1707
1708 if (v->is_full_var)
1709 fieldoffset = v->offset;
1710 else if (loff != 0)
1711 v = first_vi_for_offset (v, fieldoffset);
1712 /* If the access is outside of the variable we can ignore it. */
1713 if (!v)
1714 continue;
1715
1716 do
1717 {
1718 if (v->may_have_pointers)
1719 {
1720 /* If v is a global variable then this is an escape point. */
1721 if (v->is_global_var
1722 && !escaped_p)
1723 {
1724 t = find (escaped_id);
1725 if (add_graph_edge (graph, t, rhs)
1726 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1727 bitmap_set_bit (changed, t);
1728 /* Enough to let rhs escape once. */
1729 escaped_p = true;
1730 }
1731
1732 if (v->is_special_var)
1733 break;
1734
1735 t = find (v->id);
1736 if (add_graph_edge (graph, t, rhs)
1737 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1738 bitmap_set_bit (changed, t);
1739 }
1740
1741 /* If the variable is not exactly at the requested offset
1742 we have to include the next one. */
1743 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1744 || v->next == NULL)
1745 break;
1746
1747 v = v->next;
1748 fieldoffset = v->offset;
1749 }
1750 while (1);
1751 }
1752 }
1753
1754 /* Handle a non-simple (simple meaning requires no iteration),
1755 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1756
1757 static void
1758 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1759 {
1760 if (c->lhs.type == DEREF)
1761 {
1762 if (c->rhs.type == ADDRESSOF)
1763 {
1764 gcc_unreachable();
1765 }
1766 else
1767 {
1768 /* *x = y */
1769 do_ds_constraint (c, delta);
1770 }
1771 }
1772 else if (c->rhs.type == DEREF)
1773 {
1774 /* x = *y */
1775 if (!(get_varinfo (c->lhs.var)->is_special_var))
1776 do_sd_constraint (graph, c, delta);
1777 }
1778 else
1779 {
1780 bitmap tmp;
1781 bitmap solution;
1782 bool flag = false;
1783
1784 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1785 solution = get_varinfo (c->rhs.var)->solution;
1786 tmp = get_varinfo (c->lhs.var)->solution;
1787
1788 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1789
1790 if (flag)
1791 {
1792 get_varinfo (c->lhs.var)->solution = tmp;
1793 bitmap_set_bit (changed, c->lhs.var);
1794 }
1795 }
1796 }
1797
1798 /* Initialize and return a new SCC info structure. */
1799
1800 static struct scc_info *
1801 init_scc_info (size_t size)
1802 {
1803 struct scc_info *si = XNEW (struct scc_info);
1804 size_t i;
1805
1806 si->current_index = 0;
1807 si->visited = sbitmap_alloc (size);
1808 bitmap_clear (si->visited);
1809 si->deleted = sbitmap_alloc (size);
1810 bitmap_clear (si->deleted);
1811 si->node_mapping = XNEWVEC (unsigned int, size);
1812 si->dfs = XCNEWVEC (unsigned int, size);
1813
1814 for (i = 0; i < size; i++)
1815 si->node_mapping[i] = i;
1816
1817 si->scc_stack.create (1);
1818 return si;
1819 }
1820
1821 /* Free an SCC info structure pointed to by SI */
1822
1823 static void
1824 free_scc_info (struct scc_info *si)
1825 {
1826 sbitmap_free (si->visited);
1827 sbitmap_free (si->deleted);
1828 free (si->node_mapping);
1829 free (si->dfs);
1830 si->scc_stack.release ();
1831 free (si);
1832 }
1833
1834
1835 /* Find indirect cycles in GRAPH that occur, using strongly connected
1836 components, and note them in the indirect cycles map.
1837
1838 This technique comes from Ben Hardekopf and Calvin Lin,
1839 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1840 Lines of Code", submitted to PLDI 2007. */
1841
1842 static void
1843 find_indirect_cycles (constraint_graph_t graph)
1844 {
1845 unsigned int i;
1846 unsigned int size = graph->size;
1847 struct scc_info *si = init_scc_info (size);
1848
1849 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1850 if (!bitmap_bit_p (si->visited, i) && find (i) == i)
1851 scc_visit (graph, si, i);
1852
1853 free_scc_info (si);
1854 }
1855
1856 /* Compute a topological ordering for GRAPH, and store the result in the
1857 topo_info structure TI. */
1858
1859 static void
1860 compute_topo_order (constraint_graph_t graph,
1861 struct topo_info *ti)
1862 {
1863 unsigned int i;
1864 unsigned int size = graph->size;
1865
1866 for (i = 0; i != size; ++i)
1867 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1868 topo_visit (graph, ti, i);
1869 }
1870
1871 /* Structure used to for hash value numbering of pointer equivalence
1872 classes. */
1873
1874 typedef struct equiv_class_label
1875 {
1876 hashval_t hashcode;
1877 unsigned int equivalence_class;
1878 bitmap labels;
1879 } *equiv_class_label_t;
1880 typedef const struct equiv_class_label *const_equiv_class_label_t;
1881
1882 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1883 classes. */
1884 static htab_t pointer_equiv_class_table;
1885
1886 /* A hashtable for mapping a bitmap of labels->location equivalence
1887 classes. */
1888 static htab_t location_equiv_class_table;
1889
1890 /* Hash function for a equiv_class_label_t */
1891
1892 static hashval_t
1893 equiv_class_label_hash (const void *p)
1894 {
1895 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1896 return ecl->hashcode;
1897 }
1898
1899 /* Equality function for two equiv_class_label_t's. */
1900
1901 static int
1902 equiv_class_label_eq (const void *p1, const void *p2)
1903 {
1904 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1905 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1906 return (eql1->hashcode == eql2->hashcode
1907 && bitmap_equal_p (eql1->labels, eql2->labels));
1908 }
1909
1910 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1911 contains. */
1912
1913 static unsigned int
1914 equiv_class_lookup (htab_t table, bitmap labels)
1915 {
1916 void **slot;
1917 struct equiv_class_label ecl;
1918
1919 ecl.labels = labels;
1920 ecl.hashcode = bitmap_hash (labels);
1921
1922 slot = htab_find_slot_with_hash (table, &ecl,
1923 ecl.hashcode, NO_INSERT);
1924 if (!slot)
1925 return 0;
1926 else
1927 return ((equiv_class_label_t) *slot)->equivalence_class;
1928 }
1929
1930
1931 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1932 to TABLE. */
1933
1934 static void
1935 equiv_class_add (htab_t table, unsigned int equivalence_class,
1936 bitmap labels)
1937 {
1938 void **slot;
1939 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1940
1941 ecl->labels = labels;
1942 ecl->equivalence_class = equivalence_class;
1943 ecl->hashcode = bitmap_hash (labels);
1944
1945 slot = htab_find_slot_with_hash (table, ecl,
1946 ecl->hashcode, INSERT);
1947 gcc_assert (!*slot);
1948 *slot = (void *) ecl;
1949 }
1950
1951 /* Perform offline variable substitution.
1952
1953 This is a worst case quadratic time way of identifying variables
1954 that must have equivalent points-to sets, including those caused by
1955 static cycles, and single entry subgraphs, in the constraint graph.
1956
1957 The technique is described in "Exploiting Pointer and Location
1958 Equivalence to Optimize Pointer Analysis. In the 14th International
1959 Static Analysis Symposium (SAS), August 2007." It is known as the
1960 "HU" algorithm, and is equivalent to value numbering the collapsed
1961 constraint graph including evaluating unions.
1962
1963 The general method of finding equivalence classes is as follows:
1964 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1965 Initialize all non-REF nodes to be direct nodes.
1966 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1967 variable}
1968 For each constraint containing the dereference, we also do the same
1969 thing.
1970
1971 We then compute SCC's in the graph and unify nodes in the same SCC,
1972 including pts sets.
1973
1974 For each non-collapsed node x:
1975 Visit all unvisited explicit incoming edges.
1976 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1977 where y->x.
1978 Lookup the equivalence class for pts(x).
1979 If we found one, equivalence_class(x) = found class.
1980 Otherwise, equivalence_class(x) = new class, and new_class is
1981 added to the lookup table.
1982
1983 All direct nodes with the same equivalence class can be replaced
1984 with a single representative node.
1985 All unlabeled nodes (label == 0) are not pointers and all edges
1986 involving them can be eliminated.
1987 We perform these optimizations during rewrite_constraints
1988
1989 In addition to pointer equivalence class finding, we also perform
1990 location equivalence class finding. This is the set of variables
1991 that always appear together in points-to sets. We use this to
1992 compress the size of the points-to sets. */
1993
1994 /* Current maximum pointer equivalence class id. */
1995 static int pointer_equiv_class;
1996
1997 /* Current maximum location equivalence class id. */
1998 static int location_equiv_class;
1999
2000 /* Recursive routine to find strongly connected components in GRAPH,
2001 and label it's nodes with DFS numbers. */
2002
2003 static void
2004 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2005 {
2006 unsigned int i;
2007 bitmap_iterator bi;
2008 unsigned int my_dfs;
2009
2010 gcc_assert (si->node_mapping[n] == n);
2011 bitmap_set_bit (si->visited, n);
2012 si->dfs[n] = si->current_index ++;
2013 my_dfs = si->dfs[n];
2014
2015 /* Visit all the successors. */
2016 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2017 {
2018 unsigned int w = si->node_mapping[i];
2019
2020 if (bitmap_bit_p (si->deleted, w))
2021 continue;
2022
2023 if (!bitmap_bit_p (si->visited, w))
2024 condense_visit (graph, si, w);
2025 {
2026 unsigned int t = si->node_mapping[w];
2027 unsigned int nnode = si->node_mapping[n];
2028 gcc_assert (nnode == n);
2029
2030 if (si->dfs[t] < si->dfs[nnode])
2031 si->dfs[n] = si->dfs[t];
2032 }
2033 }
2034
2035 /* Visit all the implicit predecessors. */
2036 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2037 {
2038 unsigned int w = si->node_mapping[i];
2039
2040 if (bitmap_bit_p (si->deleted, w))
2041 continue;
2042
2043 if (!bitmap_bit_p (si->visited, w))
2044 condense_visit (graph, si, w);
2045 {
2046 unsigned int t = si->node_mapping[w];
2047 unsigned int nnode = si->node_mapping[n];
2048 gcc_assert (nnode == n);
2049
2050 if (si->dfs[t] < si->dfs[nnode])
2051 si->dfs[n] = si->dfs[t];
2052 }
2053 }
2054
2055 /* See if any components have been identified. */
2056 if (si->dfs[n] == my_dfs)
2057 {
2058 while (si->scc_stack.length () != 0
2059 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2060 {
2061 unsigned int w = si->scc_stack.pop ();
2062 si->node_mapping[w] = n;
2063
2064 if (!bitmap_bit_p (graph->direct_nodes, w))
2065 bitmap_clear_bit (graph->direct_nodes, n);
2066
2067 /* Unify our nodes. */
2068 if (graph->preds[w])
2069 {
2070 if (!graph->preds[n])
2071 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2072 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2073 }
2074 if (graph->implicit_preds[w])
2075 {
2076 if (!graph->implicit_preds[n])
2077 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2078 bitmap_ior_into (graph->implicit_preds[n],
2079 graph->implicit_preds[w]);
2080 }
2081 if (graph->points_to[w])
2082 {
2083 if (!graph->points_to[n])
2084 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2085 bitmap_ior_into (graph->points_to[n],
2086 graph->points_to[w]);
2087 }
2088 }
2089 bitmap_set_bit (si->deleted, n);
2090 }
2091 else
2092 si->scc_stack.safe_push (n);
2093 }
2094
2095 /* Label pointer equivalences. */
2096
2097 static void
2098 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2099 {
2100 unsigned int i;
2101 bitmap_iterator bi;
2102 bitmap_set_bit (si->visited, n);
2103
2104 if (!graph->points_to[n])
2105 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2106
2107 /* Label and union our incoming edges's points to sets. */
2108 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2109 {
2110 unsigned int w = si->node_mapping[i];
2111 if (!bitmap_bit_p (si->visited, w))
2112 label_visit (graph, si, w);
2113
2114 /* Skip unused edges */
2115 if (w == n || graph->pointer_label[w] == 0)
2116 continue;
2117
2118 if (graph->points_to[w])
2119 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2120 }
2121 /* Indirect nodes get fresh variables. */
2122 if (!bitmap_bit_p (graph->direct_nodes, n))
2123 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2124
2125 if (!bitmap_empty_p (graph->points_to[n]))
2126 {
2127 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
2128 graph->points_to[n]);
2129 if (!label)
2130 {
2131 label = pointer_equiv_class++;
2132 equiv_class_add (pointer_equiv_class_table,
2133 label, graph->points_to[n]);
2134 }
2135 graph->pointer_label[n] = label;
2136 }
2137 }
2138
2139 /* Perform offline variable substitution, discovering equivalence
2140 classes, and eliminating non-pointer variables. */
2141
2142 static struct scc_info *
2143 perform_var_substitution (constraint_graph_t graph)
2144 {
2145 unsigned int i;
2146 unsigned int size = graph->size;
2147 struct scc_info *si = init_scc_info (size);
2148
2149 bitmap_obstack_initialize (&iteration_obstack);
2150 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2151 equiv_class_label_eq, free);
2152 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2153 equiv_class_label_eq, free);
2154 pointer_equiv_class = 1;
2155 location_equiv_class = 1;
2156
2157 /* Condense the nodes, which means to find SCC's, count incoming
2158 predecessors, and unite nodes in SCC's. */
2159 for (i = 0; i < FIRST_REF_NODE; i++)
2160 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2161 condense_visit (graph, si, si->node_mapping[i]);
2162
2163 bitmap_clear (si->visited);
2164 /* Actually the label the nodes for pointer equivalences */
2165 for (i = 0; i < FIRST_REF_NODE; i++)
2166 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2167 label_visit (graph, si, si->node_mapping[i]);
2168
2169 /* Calculate location equivalence labels. */
2170 for (i = 0; i < FIRST_REF_NODE; i++)
2171 {
2172 bitmap pointed_by;
2173 bitmap_iterator bi;
2174 unsigned int j;
2175 unsigned int label;
2176
2177 if (!graph->pointed_by[i])
2178 continue;
2179 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2180
2181 /* Translate the pointed-by mapping for pointer equivalence
2182 labels. */
2183 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2184 {
2185 bitmap_set_bit (pointed_by,
2186 graph->pointer_label[si->node_mapping[j]]);
2187 }
2188 /* The original pointed_by is now dead. */
2189 BITMAP_FREE (graph->pointed_by[i]);
2190
2191 /* Look up the location equivalence label if one exists, or make
2192 one otherwise. */
2193 label = equiv_class_lookup (location_equiv_class_table,
2194 pointed_by);
2195 if (label == 0)
2196 {
2197 label = location_equiv_class++;
2198 equiv_class_add (location_equiv_class_table,
2199 label, pointed_by);
2200 }
2201 else
2202 {
2203 if (dump_file && (dump_flags & TDF_DETAILS))
2204 fprintf (dump_file, "Found location equivalence for node %s\n",
2205 get_varinfo (i)->name);
2206 BITMAP_FREE (pointed_by);
2207 }
2208 graph->loc_label[i] = label;
2209
2210 }
2211
2212 if (dump_file && (dump_flags & TDF_DETAILS))
2213 for (i = 0; i < FIRST_REF_NODE; i++)
2214 {
2215 bool direct_node = bitmap_bit_p (graph->direct_nodes, i);
2216 fprintf (dump_file,
2217 "Equivalence classes for %s node id %d:%s are pointer: %d"
2218 ", location:%d\n",
2219 direct_node ? "Direct node" : "Indirect node", i,
2220 get_varinfo (i)->name,
2221 graph->pointer_label[si->node_mapping[i]],
2222 graph->loc_label[si->node_mapping[i]]);
2223 }
2224
2225 /* Quickly eliminate our non-pointer variables. */
2226
2227 for (i = 0; i < FIRST_REF_NODE; i++)
2228 {
2229 unsigned int node = si->node_mapping[i];
2230
2231 if (graph->pointer_label[node] == 0)
2232 {
2233 if (dump_file && (dump_flags & TDF_DETAILS))
2234 fprintf (dump_file,
2235 "%s is a non-pointer variable, eliminating edges.\n",
2236 get_varinfo (node)->name);
2237 stats.nonpointer_vars++;
2238 clear_edges_for_node (graph, node);
2239 }
2240 }
2241
2242 return si;
2243 }
2244
2245 /* Free information that was only necessary for variable
2246 substitution. */
2247
2248 static void
2249 free_var_substitution_info (struct scc_info *si)
2250 {
2251 free_scc_info (si);
2252 free (graph->pointer_label);
2253 free (graph->loc_label);
2254 free (graph->pointed_by);
2255 free (graph->points_to);
2256 free (graph->eq_rep);
2257 sbitmap_free (graph->direct_nodes);
2258 htab_delete (pointer_equiv_class_table);
2259 htab_delete (location_equiv_class_table);
2260 bitmap_obstack_release (&iteration_obstack);
2261 }
2262
2263 /* Return an existing node that is equivalent to NODE, which has
2264 equivalence class LABEL, if one exists. Return NODE otherwise. */
2265
2266 static unsigned int
2267 find_equivalent_node (constraint_graph_t graph,
2268 unsigned int node, unsigned int label)
2269 {
2270 /* If the address version of this variable is unused, we can
2271 substitute it for anything else with the same label.
2272 Otherwise, we know the pointers are equivalent, but not the
2273 locations, and we can unite them later. */
2274
2275 if (!bitmap_bit_p (graph->address_taken, node))
2276 {
2277 gcc_assert (label < graph->size);
2278
2279 if (graph->eq_rep[label] != -1)
2280 {
2281 /* Unify the two variables since we know they are equivalent. */
2282 if (unite (graph->eq_rep[label], node))
2283 unify_nodes (graph, graph->eq_rep[label], node, false);
2284 return graph->eq_rep[label];
2285 }
2286 else
2287 {
2288 graph->eq_rep[label] = node;
2289 graph->pe_rep[label] = node;
2290 }
2291 }
2292 else
2293 {
2294 gcc_assert (label < graph->size);
2295 graph->pe[node] = label;
2296 if (graph->pe_rep[label] == -1)
2297 graph->pe_rep[label] = node;
2298 }
2299
2300 return node;
2301 }
2302
2303 /* Unite pointer equivalent but not location equivalent nodes in
2304 GRAPH. This may only be performed once variable substitution is
2305 finished. */
2306
2307 static void
2308 unite_pointer_equivalences (constraint_graph_t graph)
2309 {
2310 unsigned int i;
2311
2312 /* Go through the pointer equivalences and unite them to their
2313 representative, if they aren't already. */
2314 for (i = 0; i < FIRST_REF_NODE; i++)
2315 {
2316 unsigned int label = graph->pe[i];
2317 if (label)
2318 {
2319 int label_rep = graph->pe_rep[label];
2320
2321 if (label_rep == -1)
2322 continue;
2323
2324 label_rep = find (label_rep);
2325 if (label_rep >= 0 && unite (label_rep, find (i)))
2326 unify_nodes (graph, label_rep, i, false);
2327 }
2328 }
2329 }
2330
2331 /* Move complex constraints to the GRAPH nodes they belong to. */
2332
2333 static void
2334 move_complex_constraints (constraint_graph_t graph)
2335 {
2336 int i;
2337 constraint_t c;
2338
2339 FOR_EACH_VEC_ELT (constraints, i, c)
2340 {
2341 if (c)
2342 {
2343 struct constraint_expr lhs = c->lhs;
2344 struct constraint_expr rhs = c->rhs;
2345
2346 if (lhs.type == DEREF)
2347 {
2348 insert_into_complex (graph, lhs.var, c);
2349 }
2350 else if (rhs.type == DEREF)
2351 {
2352 if (!(get_varinfo (lhs.var)->is_special_var))
2353 insert_into_complex (graph, rhs.var, c);
2354 }
2355 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2356 && (lhs.offset != 0 || rhs.offset != 0))
2357 {
2358 insert_into_complex (graph, rhs.var, c);
2359 }
2360 }
2361 }
2362 }
2363
2364
2365 /* Optimize and rewrite complex constraints while performing
2366 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2367 result of perform_variable_substitution. */
2368
2369 static void
2370 rewrite_constraints (constraint_graph_t graph,
2371 struct scc_info *si)
2372 {
2373 int i;
2374 unsigned int j;
2375 constraint_t c;
2376
2377 for (j = 0; j < graph->size; j++)
2378 gcc_assert (find (j) == j);
2379
2380 FOR_EACH_VEC_ELT (constraints, i, c)
2381 {
2382 struct constraint_expr lhs = c->lhs;
2383 struct constraint_expr rhs = c->rhs;
2384 unsigned int lhsvar = find (lhs.var);
2385 unsigned int rhsvar = find (rhs.var);
2386 unsigned int lhsnode, rhsnode;
2387 unsigned int lhslabel, rhslabel;
2388
2389 lhsnode = si->node_mapping[lhsvar];
2390 rhsnode = si->node_mapping[rhsvar];
2391 lhslabel = graph->pointer_label[lhsnode];
2392 rhslabel = graph->pointer_label[rhsnode];
2393
2394 /* See if it is really a non-pointer variable, and if so, ignore
2395 the constraint. */
2396 if (lhslabel == 0)
2397 {
2398 if (dump_file && (dump_flags & TDF_DETAILS))
2399 {
2400
2401 fprintf (dump_file, "%s is a non-pointer variable,"
2402 "ignoring constraint:",
2403 get_varinfo (lhs.var)->name);
2404 dump_constraint (dump_file, c);
2405 fprintf (dump_file, "\n");
2406 }
2407 constraints[i] = NULL;
2408 continue;
2409 }
2410
2411 if (rhslabel == 0)
2412 {
2413 if (dump_file && (dump_flags & TDF_DETAILS))
2414 {
2415
2416 fprintf (dump_file, "%s is a non-pointer variable,"
2417 "ignoring constraint:",
2418 get_varinfo (rhs.var)->name);
2419 dump_constraint (dump_file, c);
2420 fprintf (dump_file, "\n");
2421 }
2422 constraints[i] = NULL;
2423 continue;
2424 }
2425
2426 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2427 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2428 c->lhs.var = lhsvar;
2429 c->rhs.var = rhsvar;
2430
2431 }
2432 }
2433
2434 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2435 part of an SCC, false otherwise. */
2436
2437 static bool
2438 eliminate_indirect_cycles (unsigned int node)
2439 {
2440 if (graph->indirect_cycles[node] != -1
2441 && !bitmap_empty_p (get_varinfo (node)->solution))
2442 {
2443 unsigned int i;
2444 vec<unsigned> queue = vNULL;
2445 int queuepos;
2446 unsigned int to = find (graph->indirect_cycles[node]);
2447 bitmap_iterator bi;
2448
2449 /* We can't touch the solution set and call unify_nodes
2450 at the same time, because unify_nodes is going to do
2451 bitmap unions into it. */
2452
2453 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2454 {
2455 if (find (i) == i && i != to)
2456 {
2457 if (unite (to, i))
2458 queue.safe_push (i);
2459 }
2460 }
2461
2462 for (queuepos = 0;
2463 queue.iterate (queuepos, &i);
2464 queuepos++)
2465 {
2466 unify_nodes (graph, to, i, true);
2467 }
2468 queue.release ();
2469 return true;
2470 }
2471 return false;
2472 }
2473
2474 /* Solve the constraint graph GRAPH using our worklist solver.
2475 This is based on the PW* family of solvers from the "Efficient Field
2476 Sensitive Pointer Analysis for C" paper.
2477 It works by iterating over all the graph nodes, processing the complex
2478 constraints and propagating the copy constraints, until everything stops
2479 changed. This corresponds to steps 6-8 in the solving list given above. */
2480
2481 static void
2482 solve_graph (constraint_graph_t graph)
2483 {
2484 unsigned int size = graph->size;
2485 unsigned int i;
2486 bitmap pts;
2487
2488 changed = BITMAP_ALLOC (NULL);
2489
2490 /* Mark all initial non-collapsed nodes as changed. */
2491 for (i = 0; i < size; i++)
2492 {
2493 varinfo_t ivi = get_varinfo (i);
2494 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2495 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2496 || graph->complex[i].length () > 0))
2497 bitmap_set_bit (changed, i);
2498 }
2499
2500 /* Allocate a bitmap to be used to store the changed bits. */
2501 pts = BITMAP_ALLOC (&pta_obstack);
2502
2503 while (!bitmap_empty_p (changed))
2504 {
2505 unsigned int i;
2506 struct topo_info *ti = init_topo_info ();
2507 stats.iterations++;
2508
2509 bitmap_obstack_initialize (&iteration_obstack);
2510
2511 compute_topo_order (graph, ti);
2512
2513 while (ti->topo_order.length () != 0)
2514 {
2515
2516 i = ti->topo_order.pop ();
2517
2518 /* If this variable is not a representative, skip it. */
2519 if (find (i) != i)
2520 continue;
2521
2522 /* In certain indirect cycle cases, we may merge this
2523 variable to another. */
2524 if (eliminate_indirect_cycles (i) && find (i) != i)
2525 continue;
2526
2527 /* If the node has changed, we need to process the
2528 complex constraints and outgoing edges again. */
2529 if (bitmap_clear_bit (changed, i))
2530 {
2531 unsigned int j;
2532 constraint_t c;
2533 bitmap solution;
2534 vec<constraint_t> complex = graph->complex[i];
2535 varinfo_t vi = get_varinfo (i);
2536 bool solution_empty;
2537
2538 /* Compute the changed set of solution bits. */
2539 if (vi->oldsolution)
2540 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2541 else
2542 bitmap_copy (pts, vi->solution);
2543
2544 if (bitmap_empty_p (pts))
2545 continue;
2546
2547 if (vi->oldsolution)
2548 bitmap_ior_into (vi->oldsolution, pts);
2549 else
2550 {
2551 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2552 bitmap_copy (vi->oldsolution, pts);
2553 }
2554
2555 solution = vi->solution;
2556 solution_empty = bitmap_empty_p (solution);
2557
2558 /* Process the complex constraints */
2559 FOR_EACH_VEC_ELT (complex, j, c)
2560 {
2561 /* XXX: This is going to unsort the constraints in
2562 some cases, which will occasionally add duplicate
2563 constraints during unification. This does not
2564 affect correctness. */
2565 c->lhs.var = find (c->lhs.var);
2566 c->rhs.var = find (c->rhs.var);
2567
2568 /* The only complex constraint that can change our
2569 solution to non-empty, given an empty solution,
2570 is a constraint where the lhs side is receiving
2571 some set from elsewhere. */
2572 if (!solution_empty || c->lhs.type != DEREF)
2573 do_complex_constraint (graph, c, pts);
2574 }
2575
2576 solution_empty = bitmap_empty_p (solution);
2577
2578 if (!solution_empty)
2579 {
2580 bitmap_iterator bi;
2581 unsigned eff_escaped_id = find (escaped_id);
2582
2583 /* Propagate solution to all successors. */
2584 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2585 0, j, bi)
2586 {
2587 bitmap tmp;
2588 bool flag;
2589
2590 unsigned int to = find (j);
2591 tmp = get_varinfo (to)->solution;
2592 flag = false;
2593
2594 /* Don't try to propagate to ourselves. */
2595 if (to == i)
2596 continue;
2597
2598 /* If we propagate from ESCAPED use ESCAPED as
2599 placeholder. */
2600 if (i == eff_escaped_id)
2601 flag = bitmap_set_bit (tmp, escaped_id);
2602 else
2603 flag = set_union_with_increment (tmp, pts, 0);
2604
2605 if (flag)
2606 {
2607 get_varinfo (to)->solution = tmp;
2608 bitmap_set_bit (changed, to);
2609 }
2610 }
2611 }
2612 }
2613 }
2614 free_topo_info (ti);
2615 bitmap_obstack_release (&iteration_obstack);
2616 }
2617
2618 BITMAP_FREE (pts);
2619 BITMAP_FREE (changed);
2620 bitmap_obstack_release (&oldpta_obstack);
2621 }
2622
2623 /* Map from trees to variable infos. */
2624 static struct pointer_map_t *vi_for_tree;
2625
2626
2627 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2628
2629 static void
2630 insert_vi_for_tree (tree t, varinfo_t vi)
2631 {
2632 void **slot = pointer_map_insert (vi_for_tree, t);
2633 gcc_assert (vi);
2634 gcc_assert (*slot == NULL);
2635 *slot = vi;
2636 }
2637
2638 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2639 exist in the map, return NULL, otherwise, return the varinfo we found. */
2640
2641 static varinfo_t
2642 lookup_vi_for_tree (tree t)
2643 {
2644 void **slot = pointer_map_contains (vi_for_tree, t);
2645 if (slot == NULL)
2646 return NULL;
2647
2648 return (varinfo_t) *slot;
2649 }
2650
2651 /* Return a printable name for DECL */
2652
2653 static const char *
2654 alias_get_name (tree decl)
2655 {
2656 const char *res = NULL;
2657 char *temp;
2658 int num_printed = 0;
2659
2660 if (!dump_file)
2661 return "NULL";
2662
2663 if (TREE_CODE (decl) == SSA_NAME)
2664 {
2665 res = get_name (decl);
2666 if (res)
2667 num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2668 else
2669 num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2670 if (num_printed > 0)
2671 {
2672 res = ggc_strdup (temp);
2673 free (temp);
2674 }
2675 }
2676 else if (DECL_P (decl))
2677 {
2678 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2679 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2680 else
2681 {
2682 res = get_name (decl);
2683 if (!res)
2684 {
2685 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2686 if (num_printed > 0)
2687 {
2688 res = ggc_strdup (temp);
2689 free (temp);
2690 }
2691 }
2692 }
2693 }
2694 if (res != NULL)
2695 return res;
2696
2697 return "NULL";
2698 }
2699
2700 /* Find the variable id for tree T in the map.
2701 If T doesn't exist in the map, create an entry for it and return it. */
2702
2703 static varinfo_t
2704 get_vi_for_tree (tree t)
2705 {
2706 void **slot = pointer_map_contains (vi_for_tree, t);
2707 if (slot == NULL)
2708 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2709
2710 return (varinfo_t) *slot;
2711 }
2712
2713 /* Get a scalar constraint expression for a new temporary variable. */
2714
2715 static struct constraint_expr
2716 new_scalar_tmp_constraint_exp (const char *name)
2717 {
2718 struct constraint_expr tmp;
2719 varinfo_t vi;
2720
2721 vi = new_var_info (NULL_TREE, name);
2722 vi->offset = 0;
2723 vi->size = -1;
2724 vi->fullsize = -1;
2725 vi->is_full_var = 1;
2726
2727 tmp.var = vi->id;
2728 tmp.type = SCALAR;
2729 tmp.offset = 0;
2730
2731 return tmp;
2732 }
2733
2734 /* Get a constraint expression vector from an SSA_VAR_P node.
2735 If address_p is true, the result will be taken its address of. */
2736
2737 static void
2738 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2739 {
2740 struct constraint_expr cexpr;
2741 varinfo_t vi;
2742
2743 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2744 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2745
2746 /* For parameters, get at the points-to set for the actual parm
2747 decl. */
2748 if (TREE_CODE (t) == SSA_NAME
2749 && SSA_NAME_IS_DEFAULT_DEF (t)
2750 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2751 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2752 {
2753 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2754 return;
2755 }
2756
2757 /* For global variables resort to the alias target. */
2758 if (TREE_CODE (t) == VAR_DECL
2759 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2760 {
2761 struct varpool_node *node = varpool_get_node (t);
2762 if (node && node->alias)
2763 {
2764 node = varpool_variable_node (node, NULL);
2765 t = node->symbol.decl;
2766 }
2767 }
2768
2769 vi = get_vi_for_tree (t);
2770 cexpr.var = vi->id;
2771 cexpr.type = SCALAR;
2772 cexpr.offset = 0;
2773 /* If we determine the result is "anything", and we know this is readonly,
2774 say it points to readonly memory instead. */
2775 if (cexpr.var == anything_id && TREE_READONLY (t))
2776 {
2777 gcc_unreachable ();
2778 cexpr.type = ADDRESSOF;
2779 cexpr.var = readonly_id;
2780 }
2781
2782 /* If we are not taking the address of the constraint expr, add all
2783 sub-fiels of the variable as well. */
2784 if (!address_p
2785 && !vi->is_full_var)
2786 {
2787 for (; vi; vi = vi->next)
2788 {
2789 cexpr.var = vi->id;
2790 results->safe_push (cexpr);
2791 }
2792 return;
2793 }
2794
2795 results->safe_push (cexpr);
2796 }
2797
2798 /* Process constraint T, performing various simplifications and then
2799 adding it to our list of overall constraints. */
2800
2801 static void
2802 process_constraint (constraint_t t)
2803 {
2804 struct constraint_expr rhs = t->rhs;
2805 struct constraint_expr lhs = t->lhs;
2806
2807 gcc_assert (rhs.var < varmap.length ());
2808 gcc_assert (lhs.var < varmap.length ());
2809
2810 /* If we didn't get any useful constraint from the lhs we get
2811 &ANYTHING as fallback from get_constraint_for. Deal with
2812 it here by turning it into *ANYTHING. */
2813 if (lhs.type == ADDRESSOF
2814 && lhs.var == anything_id)
2815 lhs.type = DEREF;
2816
2817 /* ADDRESSOF on the lhs is invalid. */
2818 gcc_assert (lhs.type != ADDRESSOF);
2819
2820 /* We shouldn't add constraints from things that cannot have pointers.
2821 It's not completely trivial to avoid in the callers, so do it here. */
2822 if (rhs.type != ADDRESSOF
2823 && !get_varinfo (rhs.var)->may_have_pointers)
2824 return;
2825
2826 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2827 if (!get_varinfo (lhs.var)->may_have_pointers)
2828 return;
2829
2830 /* This can happen in our IR with things like n->a = *p */
2831 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2832 {
2833 /* Split into tmp = *rhs, *lhs = tmp */
2834 struct constraint_expr tmplhs;
2835 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2836 process_constraint (new_constraint (tmplhs, rhs));
2837 process_constraint (new_constraint (lhs, tmplhs));
2838 }
2839 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2840 {
2841 /* Split into tmp = &rhs, *lhs = tmp */
2842 struct constraint_expr tmplhs;
2843 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2844 process_constraint (new_constraint (tmplhs, rhs));
2845 process_constraint (new_constraint (lhs, tmplhs));
2846 }
2847 else
2848 {
2849 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2850 constraints.safe_push (t);
2851 }
2852 }
2853
2854
2855 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2856 structure. */
2857
2858 static HOST_WIDE_INT
2859 bitpos_of_field (const tree fdecl)
2860 {
2861 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2862 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2863 return -1;
2864
2865 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
2866 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2867 }
2868
2869
2870 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2871 resulting constraint expressions in *RESULTS. */
2872
2873 static void
2874 get_constraint_for_ptr_offset (tree ptr, tree offset,
2875 vec<ce_s> *results)
2876 {
2877 struct constraint_expr c;
2878 unsigned int j, n;
2879 HOST_WIDE_INT rhsoffset;
2880
2881 /* If we do not do field-sensitive PTA adding offsets to pointers
2882 does not change the points-to solution. */
2883 if (!use_field_sensitive)
2884 {
2885 get_constraint_for_rhs (ptr, results);
2886 return;
2887 }
2888
2889 /* If the offset is not a non-negative integer constant that fits
2890 in a HOST_WIDE_INT, we have to fall back to a conservative
2891 solution which includes all sub-fields of all pointed-to
2892 variables of ptr. */
2893 if (offset == NULL_TREE
2894 || TREE_CODE (offset) != INTEGER_CST)
2895 rhsoffset = UNKNOWN_OFFSET;
2896 else
2897 {
2898 /* Sign-extend the offset. */
2899 double_int soffset = tree_to_double_int (offset)
2900 .sext (TYPE_PRECISION (TREE_TYPE (offset)));
2901 if (!soffset.fits_shwi ())
2902 rhsoffset = UNKNOWN_OFFSET;
2903 else
2904 {
2905 /* Make sure the bit-offset also fits. */
2906 HOST_WIDE_INT rhsunitoffset = soffset.low;
2907 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
2908 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
2909 rhsoffset = UNKNOWN_OFFSET;
2910 }
2911 }
2912
2913 get_constraint_for_rhs (ptr, results);
2914 if (rhsoffset == 0)
2915 return;
2916
2917 /* As we are eventually appending to the solution do not use
2918 vec::iterate here. */
2919 n = results->length ();
2920 for (j = 0; j < n; j++)
2921 {
2922 varinfo_t curr;
2923 c = (*results)[j];
2924 curr = get_varinfo (c.var);
2925
2926 if (c.type == ADDRESSOF
2927 /* If this varinfo represents a full variable just use it. */
2928 && curr->is_full_var)
2929 c.offset = 0;
2930 else if (c.type == ADDRESSOF
2931 /* If we do not know the offset add all subfields. */
2932 && rhsoffset == UNKNOWN_OFFSET)
2933 {
2934 varinfo_t temp = lookup_vi_for_tree (curr->decl);
2935 do
2936 {
2937 struct constraint_expr c2;
2938 c2.var = temp->id;
2939 c2.type = ADDRESSOF;
2940 c2.offset = 0;
2941 if (c2.var != c.var)
2942 results->safe_push (c2);
2943 temp = temp->next;
2944 }
2945 while (temp);
2946 }
2947 else if (c.type == ADDRESSOF)
2948 {
2949 varinfo_t temp;
2950 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
2951
2952 /* Search the sub-field which overlaps with the
2953 pointed-to offset. If the result is outside of the variable
2954 we have to provide a conservative result, as the variable is
2955 still reachable from the resulting pointer (even though it
2956 technically cannot point to anything). The last and first
2957 sub-fields are such conservative results.
2958 ??? If we always had a sub-field for &object + 1 then
2959 we could represent this in a more precise way. */
2960 if (rhsoffset < 0
2961 && curr->offset < offset)
2962 offset = 0;
2963 temp = first_or_preceding_vi_for_offset (curr, offset);
2964
2965 /* If the found variable is not exactly at the pointed to
2966 result, we have to include the next variable in the
2967 solution as well. Otherwise two increments by offset / 2
2968 do not result in the same or a conservative superset
2969 solution. */
2970 if (temp->offset != offset
2971 && temp->next != NULL)
2972 {
2973 struct constraint_expr c2;
2974 c2.var = temp->next->id;
2975 c2.type = ADDRESSOF;
2976 c2.offset = 0;
2977 results->safe_push (c2);
2978 }
2979 c.var = temp->id;
2980 c.offset = 0;
2981 }
2982 else
2983 c.offset = rhsoffset;
2984
2985 (*results)[j] = c;
2986 }
2987 }
2988
2989
2990 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
2991 If address_p is true the result will be taken its address of.
2992 If lhs_p is true then the constraint expression is assumed to be used
2993 as the lhs. */
2994
2995 static void
2996 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
2997 bool address_p, bool lhs_p)
2998 {
2999 tree orig_t = t;
3000 HOST_WIDE_INT bitsize = -1;
3001 HOST_WIDE_INT bitmaxsize = -1;
3002 HOST_WIDE_INT bitpos;
3003 tree forzero;
3004
3005 /* Some people like to do cute things like take the address of
3006 &0->a.b */
3007 forzero = t;
3008 while (handled_component_p (forzero)
3009 || INDIRECT_REF_P (forzero)
3010 || TREE_CODE (forzero) == MEM_REF)
3011 forzero = TREE_OPERAND (forzero, 0);
3012
3013 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3014 {
3015 struct constraint_expr temp;
3016
3017 temp.offset = 0;
3018 temp.var = integer_id;
3019 temp.type = SCALAR;
3020 results->safe_push (temp);
3021 return;
3022 }
3023
3024 /* Handle type-punning through unions. If we are extracting a pointer
3025 from a union via a possibly type-punning access that pointer
3026 points to anything, similar to a conversion of an integer to
3027 a pointer. */
3028 if (!lhs_p)
3029 {
3030 tree u;
3031 for (u = t;
3032 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3033 u = TREE_OPERAND (u, 0))
3034 if (TREE_CODE (u) == COMPONENT_REF
3035 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3036 {
3037 struct constraint_expr temp;
3038
3039 temp.offset = 0;
3040 temp.var = anything_id;
3041 temp.type = ADDRESSOF;
3042 results->safe_push (temp);
3043 return;
3044 }
3045 }
3046
3047 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3048
3049 /* Pretend to take the address of the base, we'll take care of
3050 adding the required subset of sub-fields below. */
3051 get_constraint_for_1 (t, results, true, lhs_p);
3052 gcc_assert (results->length () == 1);
3053 struct constraint_expr &result = results->last ();
3054
3055 if (result.type == SCALAR
3056 && get_varinfo (result.var)->is_full_var)
3057 /* For single-field vars do not bother about the offset. */
3058 result.offset = 0;
3059 else if (result.type == SCALAR)
3060 {
3061 /* In languages like C, you can access one past the end of an
3062 array. You aren't allowed to dereference it, so we can
3063 ignore this constraint. When we handle pointer subtraction,
3064 we may have to do something cute here. */
3065
3066 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3067 && bitmaxsize != 0)
3068 {
3069 /* It's also not true that the constraint will actually start at the
3070 right offset, it may start in some padding. We only care about
3071 setting the constraint to the first actual field it touches, so
3072 walk to find it. */
3073 struct constraint_expr cexpr = result;
3074 varinfo_t curr;
3075 results->pop ();
3076 cexpr.offset = 0;
3077 for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
3078 {
3079 if (ranges_overlap_p (curr->offset, curr->size,
3080 bitpos, bitmaxsize))
3081 {
3082 cexpr.var = curr->id;
3083 results->safe_push (cexpr);
3084 if (address_p)
3085 break;
3086 }
3087 }
3088 /* If we are going to take the address of this field then
3089 to be able to compute reachability correctly add at least
3090 the last field of the variable. */
3091 if (address_p && results->length () == 0)
3092 {
3093 curr = get_varinfo (cexpr.var);
3094 while (curr->next != NULL)
3095 curr = curr->next;
3096 cexpr.var = curr->id;
3097 results->safe_push (cexpr);
3098 }
3099 else if (results->length () == 0)
3100 /* Assert that we found *some* field there. The user couldn't be
3101 accessing *only* padding. */
3102 /* Still the user could access one past the end of an array
3103 embedded in a struct resulting in accessing *only* padding. */
3104 /* Or accessing only padding via type-punning to a type
3105 that has a filed just in padding space. */
3106 {
3107 cexpr.type = SCALAR;
3108 cexpr.var = anything_id;
3109 cexpr.offset = 0;
3110 results->safe_push (cexpr);
3111 }
3112 }
3113 else if (bitmaxsize == 0)
3114 {
3115 if (dump_file && (dump_flags & TDF_DETAILS))
3116 fprintf (dump_file, "Access to zero-sized part of variable,"
3117 "ignoring\n");
3118 }
3119 else
3120 if (dump_file && (dump_flags & TDF_DETAILS))
3121 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3122 }
3123 else if (result.type == DEREF)
3124 {
3125 /* If we do not know exactly where the access goes say so. Note
3126 that only for non-structure accesses we know that we access
3127 at most one subfiled of any variable. */
3128 if (bitpos == -1
3129 || bitsize != bitmaxsize
3130 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3131 || result.offset == UNKNOWN_OFFSET)
3132 result.offset = UNKNOWN_OFFSET;
3133 else
3134 result.offset += bitpos;
3135 }
3136 else if (result.type == ADDRESSOF)
3137 {
3138 /* We can end up here for component references on a
3139 VIEW_CONVERT_EXPR <>(&foobar). */
3140 result.type = SCALAR;
3141 result.var = anything_id;
3142 result.offset = 0;
3143 }
3144 else
3145 gcc_unreachable ();
3146 }
3147
3148
3149 /* Dereference the constraint expression CONS, and return the result.
3150 DEREF (ADDRESSOF) = SCALAR
3151 DEREF (SCALAR) = DEREF
3152 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3153 This is needed so that we can handle dereferencing DEREF constraints. */
3154
3155 static void
3156 do_deref (vec<ce_s> *constraints)
3157 {
3158 struct constraint_expr *c;
3159 unsigned int i = 0;
3160
3161 FOR_EACH_VEC_ELT (*constraints, i, c)
3162 {
3163 if (c->type == SCALAR)
3164 c->type = DEREF;
3165 else if (c->type == ADDRESSOF)
3166 c->type = SCALAR;
3167 else if (c->type == DEREF)
3168 {
3169 struct constraint_expr tmplhs;
3170 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3171 process_constraint (new_constraint (tmplhs, *c));
3172 c->var = tmplhs.var;
3173 }
3174 else
3175 gcc_unreachable ();
3176 }
3177 }
3178
3179 /* Given a tree T, return the constraint expression for taking the
3180 address of it. */
3181
3182 static void
3183 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3184 {
3185 struct constraint_expr *c;
3186 unsigned int i;
3187
3188 get_constraint_for_1 (t, results, true, true);
3189
3190 FOR_EACH_VEC_ELT (*results, i, c)
3191 {
3192 if (c->type == DEREF)
3193 c->type = SCALAR;
3194 else
3195 c->type = ADDRESSOF;
3196 }
3197 }
3198
3199 /* Given a tree T, return the constraint expression for it. */
3200
3201 static void
3202 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3203 bool lhs_p)
3204 {
3205 struct constraint_expr temp;
3206
3207 /* x = integer is all glommed to a single variable, which doesn't
3208 point to anything by itself. That is, of course, unless it is an
3209 integer constant being treated as a pointer, in which case, we
3210 will return that this is really the addressof anything. This
3211 happens below, since it will fall into the default case. The only
3212 case we know something about an integer treated like a pointer is
3213 when it is the NULL pointer, and then we just say it points to
3214 NULL.
3215
3216 Do not do that if -fno-delete-null-pointer-checks though, because
3217 in that case *NULL does not fail, so it _should_ alias *anything.
3218 It is not worth adding a new option or renaming the existing one,
3219 since this case is relatively obscure. */
3220 if ((TREE_CODE (t) == INTEGER_CST
3221 && integer_zerop (t))
3222 /* The only valid CONSTRUCTORs in gimple with pointer typed
3223 elements are zero-initializer. But in IPA mode we also
3224 process global initializers, so verify at least. */
3225 || (TREE_CODE (t) == CONSTRUCTOR
3226 && CONSTRUCTOR_NELTS (t) == 0))
3227 {
3228 if (flag_delete_null_pointer_checks)
3229 temp.var = nothing_id;
3230 else
3231 temp.var = nonlocal_id;
3232 temp.type = ADDRESSOF;
3233 temp.offset = 0;
3234 results->safe_push (temp);
3235 return;
3236 }
3237
3238 /* String constants are read-only. */
3239 if (TREE_CODE (t) == STRING_CST)
3240 {
3241 temp.var = readonly_id;
3242 temp.type = SCALAR;
3243 temp.offset = 0;
3244 results->safe_push (temp);
3245 return;
3246 }
3247
3248 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3249 {
3250 case tcc_expression:
3251 {
3252 switch (TREE_CODE (t))
3253 {
3254 case ADDR_EXPR:
3255 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3256 return;
3257 default:;
3258 }
3259 break;
3260 }
3261 case tcc_reference:
3262 {
3263 switch (TREE_CODE (t))
3264 {
3265 case MEM_REF:
3266 {
3267 struct constraint_expr cs;
3268 varinfo_t vi, curr;
3269 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3270 TREE_OPERAND (t, 1), results);
3271 do_deref (results);
3272
3273 /* If we are not taking the address then make sure to process
3274 all subvariables we might access. */
3275 if (address_p)
3276 return;
3277
3278 cs = results->last ();
3279 if (cs.type == DEREF
3280 && type_can_have_subvars (TREE_TYPE (t)))
3281 {
3282 /* For dereferences this means we have to defer it
3283 to solving time. */
3284 results->last ().offset = UNKNOWN_OFFSET;
3285 return;
3286 }
3287 if (cs.type != SCALAR)
3288 return;
3289
3290 vi = get_varinfo (cs.var);
3291 curr = vi->next;
3292 if (!vi->is_full_var
3293 && curr)
3294 {
3295 unsigned HOST_WIDE_INT size;
3296 if (host_integerp (TYPE_SIZE (TREE_TYPE (t)), 1))
3297 size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t)));
3298 else
3299 size = -1;
3300 for (; curr; curr = curr->next)
3301 {
3302 if (curr->offset - vi->offset < size)
3303 {
3304 cs.var = curr->id;
3305 results->safe_push (cs);
3306 }
3307 else
3308 break;
3309 }
3310 }
3311 return;
3312 }
3313 case ARRAY_REF:
3314 case ARRAY_RANGE_REF:
3315 case COMPONENT_REF:
3316 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3317 return;
3318 case VIEW_CONVERT_EXPR:
3319 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3320 lhs_p);
3321 return;
3322 /* We are missing handling for TARGET_MEM_REF here. */
3323 default:;
3324 }
3325 break;
3326 }
3327 case tcc_exceptional:
3328 {
3329 switch (TREE_CODE (t))
3330 {
3331 case SSA_NAME:
3332 {
3333 get_constraint_for_ssa_var (t, results, address_p);
3334 return;
3335 }
3336 case CONSTRUCTOR:
3337 {
3338 unsigned int i;
3339 tree val;
3340 vec<ce_s> tmp = vNULL;
3341 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3342 {
3343 struct constraint_expr *rhsp;
3344 unsigned j;
3345 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3346 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3347 results->safe_push (*rhsp);
3348 tmp.truncate (0);
3349 }
3350 tmp.release ();
3351 /* We do not know whether the constructor was complete,
3352 so technically we have to add &NOTHING or &ANYTHING
3353 like we do for an empty constructor as well. */
3354 return;
3355 }
3356 default:;
3357 }
3358 break;
3359 }
3360 case tcc_declaration:
3361 {
3362 get_constraint_for_ssa_var (t, results, address_p);
3363 return;
3364 }
3365 case tcc_constant:
3366 {
3367 /* We cannot refer to automatic variables through constants. */
3368 temp.type = ADDRESSOF;
3369 temp.var = nonlocal_id;
3370 temp.offset = 0;
3371 results->safe_push (temp);
3372 return;
3373 }
3374 default:;
3375 }
3376
3377 /* The default fallback is a constraint from anything. */
3378 temp.type = ADDRESSOF;
3379 temp.var = anything_id;
3380 temp.offset = 0;
3381 results->safe_push (temp);
3382 }
3383
3384 /* Given a gimple tree T, return the constraint expression vector for it. */
3385
3386 static void
3387 get_constraint_for (tree t, vec<ce_s> *results)
3388 {
3389 gcc_assert (results->length () == 0);
3390
3391 get_constraint_for_1 (t, results, false, true);
3392 }
3393
3394 /* Given a gimple tree T, return the constraint expression vector for it
3395 to be used as the rhs of a constraint. */
3396
3397 static void
3398 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3399 {
3400 gcc_assert (results->length () == 0);
3401
3402 get_constraint_for_1 (t, results, false, false);
3403 }
3404
3405
3406 /* Efficiently generates constraints from all entries in *RHSC to all
3407 entries in *LHSC. */
3408
3409 static void
3410 process_all_all_constraints (vec<ce_s> lhsc,
3411 vec<ce_s> rhsc)
3412 {
3413 struct constraint_expr *lhsp, *rhsp;
3414 unsigned i, j;
3415
3416 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3417 {
3418 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3419 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3420 process_constraint (new_constraint (*lhsp, *rhsp));
3421 }
3422 else
3423 {
3424 struct constraint_expr tmp;
3425 tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3426 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3427 process_constraint (new_constraint (tmp, *rhsp));
3428 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3429 process_constraint (new_constraint (*lhsp, tmp));
3430 }
3431 }
3432
3433 /* Handle aggregate copies by expanding into copies of the respective
3434 fields of the structures. */
3435
3436 static void
3437 do_structure_copy (tree lhsop, tree rhsop)
3438 {
3439 struct constraint_expr *lhsp, *rhsp;
3440 vec<ce_s> lhsc = vNULL;
3441 vec<ce_s> rhsc = vNULL;
3442 unsigned j;
3443
3444 get_constraint_for (lhsop, &lhsc);
3445 get_constraint_for_rhs (rhsop, &rhsc);
3446 lhsp = &lhsc[0];
3447 rhsp = &rhsc[0];
3448 if (lhsp->type == DEREF
3449 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3450 || rhsp->type == DEREF)
3451 {
3452 if (lhsp->type == DEREF)
3453 {
3454 gcc_assert (lhsc.length () == 1);
3455 lhsp->offset = UNKNOWN_OFFSET;
3456 }
3457 if (rhsp->type == DEREF)
3458 {
3459 gcc_assert (rhsc.length () == 1);
3460 rhsp->offset = UNKNOWN_OFFSET;
3461 }
3462 process_all_all_constraints (lhsc, rhsc);
3463 }
3464 else if (lhsp->type == SCALAR
3465 && (rhsp->type == SCALAR
3466 || rhsp->type == ADDRESSOF))
3467 {
3468 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3469 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3470 unsigned k = 0;
3471 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3472 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3473 for (j = 0; lhsc.iterate (j, &lhsp);)
3474 {
3475 varinfo_t lhsv, rhsv;
3476 rhsp = &rhsc[k];
3477 lhsv = get_varinfo (lhsp->var);
3478 rhsv = get_varinfo (rhsp->var);
3479 if (lhsv->may_have_pointers
3480 && (lhsv->is_full_var
3481 || rhsv->is_full_var
3482 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3483 rhsv->offset + lhsoffset, rhsv->size)))
3484 process_constraint (new_constraint (*lhsp, *rhsp));
3485 if (!rhsv->is_full_var
3486 && (lhsv->is_full_var
3487 || (lhsv->offset + rhsoffset + lhsv->size
3488 > rhsv->offset + lhsoffset + rhsv->size)))
3489 {
3490 ++k;
3491 if (k >= rhsc.length ())
3492 break;
3493 }
3494 else
3495 ++j;
3496 }
3497 }
3498 else
3499 gcc_unreachable ();
3500
3501 lhsc.release ();
3502 rhsc.release ();
3503 }
3504
3505 /* Create constraints ID = { rhsc }. */
3506
3507 static void
3508 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3509 {
3510 struct constraint_expr *c;
3511 struct constraint_expr includes;
3512 unsigned int j;
3513
3514 includes.var = id;
3515 includes.offset = 0;
3516 includes.type = SCALAR;
3517
3518 FOR_EACH_VEC_ELT (rhsc, j, c)
3519 process_constraint (new_constraint (includes, *c));
3520 }
3521
3522 /* Create a constraint ID = OP. */
3523
3524 static void
3525 make_constraint_to (unsigned id, tree op)
3526 {
3527 vec<ce_s> rhsc = vNULL;
3528 get_constraint_for_rhs (op, &rhsc);
3529 make_constraints_to (id, rhsc);
3530 rhsc.release ();
3531 }
3532
3533 /* Create a constraint ID = &FROM. */
3534
3535 static void
3536 make_constraint_from (varinfo_t vi, int from)
3537 {
3538 struct constraint_expr lhs, rhs;
3539
3540 lhs.var = vi->id;
3541 lhs.offset = 0;
3542 lhs.type = SCALAR;
3543
3544 rhs.var = from;
3545 rhs.offset = 0;
3546 rhs.type = ADDRESSOF;
3547 process_constraint (new_constraint (lhs, rhs));
3548 }
3549
3550 /* Create a constraint ID = FROM. */
3551
3552 static void
3553 make_copy_constraint (varinfo_t vi, int from)
3554 {
3555 struct constraint_expr lhs, rhs;
3556
3557 lhs.var = vi->id;
3558 lhs.offset = 0;
3559 lhs.type = SCALAR;
3560
3561 rhs.var = from;
3562 rhs.offset = 0;
3563 rhs.type = SCALAR;
3564 process_constraint (new_constraint (lhs, rhs));
3565 }
3566
3567 /* Make constraints necessary to make OP escape. */
3568
3569 static void
3570 make_escape_constraint (tree op)
3571 {
3572 make_constraint_to (escaped_id, op);
3573 }
3574
3575 /* Add constraints to that the solution of VI is transitively closed. */
3576
3577 static void
3578 make_transitive_closure_constraints (varinfo_t vi)
3579 {
3580 struct constraint_expr lhs, rhs;
3581
3582 /* VAR = *VAR; */
3583 lhs.type = SCALAR;
3584 lhs.var = vi->id;
3585 lhs.offset = 0;
3586 rhs.type = DEREF;
3587 rhs.var = vi->id;
3588 rhs.offset = 0;
3589 process_constraint (new_constraint (lhs, rhs));
3590
3591 /* VAR = VAR + UNKNOWN; */
3592 lhs.type = SCALAR;
3593 lhs.var = vi->id;
3594 lhs.offset = 0;
3595 rhs.type = SCALAR;
3596 rhs.var = vi->id;
3597 rhs.offset = UNKNOWN_OFFSET;
3598 process_constraint (new_constraint (lhs, rhs));
3599 }
3600
3601 /* Temporary storage for fake var decls. */
3602 struct obstack fake_var_decl_obstack;
3603
3604 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3605
3606 static tree
3607 build_fake_var_decl (tree type)
3608 {
3609 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3610 memset (decl, 0, sizeof (struct tree_var_decl));
3611 TREE_SET_CODE (decl, VAR_DECL);
3612 TREE_TYPE (decl) = type;
3613 DECL_UID (decl) = allocate_decl_uid ();
3614 SET_DECL_PT_UID (decl, -1);
3615 layout_decl (decl, 0);
3616 return decl;
3617 }
3618
3619 /* Create a new artificial heap variable with NAME.
3620 Return the created variable. */
3621
3622 static varinfo_t
3623 make_heapvar (const char *name)
3624 {
3625 varinfo_t vi;
3626 tree heapvar;
3627
3628 heapvar = build_fake_var_decl (ptr_type_node);
3629 DECL_EXTERNAL (heapvar) = 1;
3630
3631 vi = new_var_info (heapvar, name);
3632 vi->is_artificial_var = true;
3633 vi->is_heap_var = true;
3634 vi->is_unknown_size_var = true;
3635 vi->offset = 0;
3636 vi->fullsize = ~0;
3637 vi->size = ~0;
3638 vi->is_full_var = true;
3639 insert_vi_for_tree (heapvar, vi);
3640
3641 return vi;
3642 }
3643
3644 /* Create a new artificial heap variable with NAME and make a
3645 constraint from it to LHS. Set flags according to a tag used
3646 for tracking restrict pointers. */
3647
3648 static varinfo_t
3649 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3650 {
3651 varinfo_t vi = make_heapvar (name);
3652 vi->is_global_var = 1;
3653 vi->may_have_pointers = 1;
3654 make_constraint_from (lhs, vi->id);
3655 return vi;
3656 }
3657
3658 /* Create a new artificial heap variable with NAME and make a
3659 constraint from it to LHS. Set flags according to a tag used
3660 for tracking restrict pointers and make the artificial heap
3661 point to global memory. */
3662
3663 static varinfo_t
3664 make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
3665 {
3666 varinfo_t vi = make_constraint_from_restrict (lhs, name);
3667 make_copy_constraint (vi, nonlocal_id);
3668 return vi;
3669 }
3670
3671 /* In IPA mode there are varinfos for different aspects of reach
3672 function designator. One for the points-to set of the return
3673 value, one for the variables that are clobbered by the function,
3674 one for its uses and one for each parameter (including a single
3675 glob for remaining variadic arguments). */
3676
3677 enum { fi_clobbers = 1, fi_uses = 2,
3678 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3679
3680 /* Get a constraint for the requested part of a function designator FI
3681 when operating in IPA mode. */
3682
3683 static struct constraint_expr
3684 get_function_part_constraint (varinfo_t fi, unsigned part)
3685 {
3686 struct constraint_expr c;
3687
3688 gcc_assert (in_ipa_mode);
3689
3690 if (fi->id == anything_id)
3691 {
3692 /* ??? We probably should have a ANYFN special variable. */
3693 c.var = anything_id;
3694 c.offset = 0;
3695 c.type = SCALAR;
3696 }
3697 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3698 {
3699 varinfo_t ai = first_vi_for_offset (fi, part);
3700 if (ai)
3701 c.var = ai->id;
3702 else
3703 c.var = anything_id;
3704 c.offset = 0;
3705 c.type = SCALAR;
3706 }
3707 else
3708 {
3709 c.var = fi->id;
3710 c.offset = part;
3711 c.type = DEREF;
3712 }
3713
3714 return c;
3715 }
3716
3717 /* For non-IPA mode, generate constraints necessary for a call on the
3718 RHS. */
3719
3720 static void
3721 handle_rhs_call (gimple stmt, vec<ce_s> *results)
3722 {
3723 struct constraint_expr rhsc;
3724 unsigned i;
3725 bool returns_uses = false;
3726
3727 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3728 {
3729 tree arg = gimple_call_arg (stmt, i);
3730 int flags = gimple_call_arg_flags (stmt, i);
3731
3732 /* If the argument is not used we can ignore it. */
3733 if (flags & EAF_UNUSED)
3734 continue;
3735
3736 /* As we compute ESCAPED context-insensitive we do not gain
3737 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3738 set. The argument would still get clobbered through the
3739 escape solution. */
3740 if ((flags & EAF_NOCLOBBER)
3741 && (flags & EAF_NOESCAPE))
3742 {
3743 varinfo_t uses = get_call_use_vi (stmt);
3744 if (!(flags & EAF_DIRECT))
3745 {
3746 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3747 make_constraint_to (tem->id, arg);
3748 make_transitive_closure_constraints (tem);
3749 make_copy_constraint (uses, tem->id);
3750 }
3751 else
3752 make_constraint_to (uses->id, arg);
3753 returns_uses = true;
3754 }
3755 else if (flags & EAF_NOESCAPE)
3756 {
3757 struct constraint_expr lhs, rhs;
3758 varinfo_t uses = get_call_use_vi (stmt);
3759 varinfo_t clobbers = get_call_clobber_vi (stmt);
3760 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3761 make_constraint_to (tem->id, arg);
3762 if (!(flags & EAF_DIRECT))
3763 make_transitive_closure_constraints (tem);
3764 make_copy_constraint (uses, tem->id);
3765 make_copy_constraint (clobbers, tem->id);
3766 /* Add *tem = nonlocal, do not add *tem = callused as
3767 EAF_NOESCAPE parameters do not escape to other parameters
3768 and all other uses appear in NONLOCAL as well. */
3769 lhs.type = DEREF;
3770 lhs.var = tem->id;
3771 lhs.offset = 0;
3772 rhs.type = SCALAR;
3773 rhs.var = nonlocal_id;
3774 rhs.offset = 0;
3775 process_constraint (new_constraint (lhs, rhs));
3776 returns_uses = true;
3777 }
3778 else
3779 make_escape_constraint (arg);
3780 }
3781
3782 /* If we added to the calls uses solution make sure we account for
3783 pointers to it to be returned. */
3784 if (returns_uses)
3785 {
3786 rhsc.var = get_call_use_vi (stmt)->id;
3787 rhsc.offset = 0;
3788 rhsc.type = SCALAR;
3789 results->safe_push (rhsc);
3790 }
3791
3792 /* The static chain escapes as well. */
3793 if (gimple_call_chain (stmt))
3794 make_escape_constraint (gimple_call_chain (stmt));
3795
3796 /* And if we applied NRV the address of the return slot escapes as well. */
3797 if (gimple_call_return_slot_opt_p (stmt)
3798 && gimple_call_lhs (stmt) != NULL_TREE
3799 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3800 {
3801 vec<ce_s> tmpc = vNULL;
3802 struct constraint_expr lhsc, *c;
3803 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3804 lhsc.var = escaped_id;
3805 lhsc.offset = 0;
3806 lhsc.type = SCALAR;
3807 FOR_EACH_VEC_ELT (tmpc, i, c)
3808 process_constraint (new_constraint (lhsc, *c));
3809 tmpc.release ();
3810 }
3811
3812 /* Regular functions return nonlocal memory. */
3813 rhsc.var = nonlocal_id;
3814 rhsc.offset = 0;
3815 rhsc.type = SCALAR;
3816 results->safe_push (rhsc);
3817 }
3818
3819 /* For non-IPA mode, generate constraints necessary for a call
3820 that returns a pointer and assigns it to LHS. This simply makes
3821 the LHS point to global and escaped variables. */
3822
3823 static void
3824 handle_lhs_call (gimple stmt, tree lhs, int flags, vec<ce_s> rhsc,
3825 tree fndecl)
3826 {
3827 vec<ce_s> lhsc = vNULL;
3828
3829 get_constraint_for (lhs, &lhsc);
3830 /* If the store is to a global decl make sure to
3831 add proper escape constraints. */
3832 lhs = get_base_address (lhs);
3833 if (lhs
3834 && DECL_P (lhs)
3835 && is_global_var (lhs))
3836 {
3837 struct constraint_expr tmpc;
3838 tmpc.var = escaped_id;
3839 tmpc.offset = 0;
3840 tmpc.type = SCALAR;
3841 lhsc.safe_push (tmpc);
3842 }
3843
3844 /* If the call returns an argument unmodified override the rhs
3845 constraints. */
3846 flags = gimple_call_return_flags (stmt);
3847 if (flags & ERF_RETURNS_ARG
3848 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
3849 {
3850 tree arg;
3851 rhsc.create (0);
3852 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
3853 get_constraint_for (arg, &rhsc);
3854 process_all_all_constraints (lhsc, rhsc);
3855 rhsc.release ();
3856 }
3857 else if (flags & ERF_NOALIAS)
3858 {
3859 varinfo_t vi;
3860 struct constraint_expr tmpc;
3861 rhsc.create (0);
3862 vi = make_heapvar ("HEAP");
3863 /* We delay marking allocated storage global until we know if
3864 it escapes. */
3865 DECL_EXTERNAL (vi->decl) = 0;
3866 vi->is_global_var = 0;
3867 /* If this is not a real malloc call assume the memory was
3868 initialized and thus may point to global memory. All
3869 builtin functions with the malloc attribute behave in a sane way. */
3870 if (!fndecl
3871 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3872 make_constraint_from (vi, nonlocal_id);
3873 tmpc.var = vi->id;
3874 tmpc.offset = 0;
3875 tmpc.type = ADDRESSOF;
3876 rhsc.safe_push (tmpc);
3877 process_all_all_constraints (lhsc, rhsc);
3878 rhsc.release ();
3879 }
3880 else
3881 process_all_all_constraints (lhsc, rhsc);
3882
3883 lhsc.release ();
3884 }
3885
3886 /* For non-IPA mode, generate constraints necessary for a call of a
3887 const function that returns a pointer in the statement STMT. */
3888
3889 static void
3890 handle_const_call (gimple stmt, vec<ce_s> *results)
3891 {
3892 struct constraint_expr rhsc;
3893 unsigned int k;
3894
3895 /* Treat nested const functions the same as pure functions as far
3896 as the static chain is concerned. */
3897 if (gimple_call_chain (stmt))
3898 {
3899 varinfo_t uses = get_call_use_vi (stmt);
3900 make_transitive_closure_constraints (uses);
3901 make_constraint_to (uses->id, gimple_call_chain (stmt));
3902 rhsc.var = uses->id;
3903 rhsc.offset = 0;
3904 rhsc.type = SCALAR;
3905 results->safe_push (rhsc);
3906 }
3907
3908 /* May return arguments. */
3909 for (k = 0; k < gimple_call_num_args (stmt); ++k)
3910 {
3911 tree arg = gimple_call_arg (stmt, k);
3912 vec<ce_s> argc = vNULL;
3913 unsigned i;
3914 struct constraint_expr *argp;
3915 get_constraint_for_rhs (arg, &argc);
3916 FOR_EACH_VEC_ELT (argc, i, argp)
3917 results->safe_push (*argp);
3918 argc.release ();
3919 }
3920
3921 /* May return addresses of globals. */
3922 rhsc.var = nonlocal_id;
3923 rhsc.offset = 0;
3924 rhsc.type = ADDRESSOF;
3925 results->safe_push (rhsc);
3926 }
3927
3928 /* For non-IPA mode, generate constraints necessary for a call to a
3929 pure function in statement STMT. */
3930
3931 static void
3932 handle_pure_call (gimple stmt, vec<ce_s> *results)
3933 {
3934 struct constraint_expr rhsc;
3935 unsigned i;
3936 varinfo_t uses = NULL;
3937
3938 /* Memory reached from pointer arguments is call-used. */
3939 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3940 {
3941 tree arg = gimple_call_arg (stmt, i);
3942 if (!uses)
3943 {
3944 uses = get_call_use_vi (stmt);
3945 make_transitive_closure_constraints (uses);
3946 }
3947 make_constraint_to (uses->id, arg);
3948 }
3949
3950 /* The static chain is used as well. */
3951 if (gimple_call_chain (stmt))
3952 {
3953 if (!uses)
3954 {
3955 uses = get_call_use_vi (stmt);
3956 make_transitive_closure_constraints (uses);
3957 }
3958 make_constraint_to (uses->id, gimple_call_chain (stmt));
3959 }
3960
3961 /* Pure functions may return call-used and nonlocal memory. */
3962 if (uses)
3963 {
3964 rhsc.var = uses->id;
3965 rhsc.offset = 0;
3966 rhsc.type = SCALAR;
3967 results->safe_push (rhsc);
3968 }
3969 rhsc.var = nonlocal_id;
3970 rhsc.offset = 0;
3971 rhsc.type = SCALAR;
3972 results->safe_push (rhsc);
3973 }
3974
3975
3976 /* Return the varinfo for the callee of CALL. */
3977
3978 static varinfo_t
3979 get_fi_for_callee (gimple call)
3980 {
3981 tree decl, fn = gimple_call_fn (call);
3982
3983 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
3984 fn = OBJ_TYPE_REF_EXPR (fn);
3985
3986 /* If we can directly resolve the function being called, do so.
3987 Otherwise, it must be some sort of indirect expression that
3988 we should still be able to handle. */
3989 decl = gimple_call_addr_fndecl (fn);
3990 if (decl)
3991 return get_vi_for_tree (decl);
3992
3993 /* If the function is anything other than a SSA name pointer we have no
3994 clue and should be getting ANYFN (well, ANYTHING for now). */
3995 if (!fn || TREE_CODE (fn) != SSA_NAME)
3996 return get_varinfo (anything_id);
3997
3998 if (SSA_NAME_IS_DEFAULT_DEF (fn)
3999 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4000 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4001 fn = SSA_NAME_VAR (fn);
4002
4003 return get_vi_for_tree (fn);
4004 }
4005
4006 /* Create constraints for the builtin call T. Return true if the call
4007 was handled, otherwise false. */
4008
4009 static bool
4010 find_func_aliases_for_builtin_call (gimple t)
4011 {
4012 tree fndecl = gimple_call_fndecl (t);
4013 vec<ce_s> lhsc = vNULL;
4014 vec<ce_s> rhsc = vNULL;
4015 varinfo_t fi;
4016
4017 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4018 /* ??? All builtins that are handled here need to be handled
4019 in the alias-oracle query functions explicitly! */
4020 switch (DECL_FUNCTION_CODE (fndecl))
4021 {
4022 /* All the following functions return a pointer to the same object
4023 as their first argument points to. The functions do not add
4024 to the ESCAPED solution. The functions make the first argument
4025 pointed to memory point to what the second argument pointed to
4026 memory points to. */
4027 case BUILT_IN_STRCPY:
4028 case BUILT_IN_STRNCPY:
4029 case BUILT_IN_BCOPY:
4030 case BUILT_IN_MEMCPY:
4031 case BUILT_IN_MEMMOVE:
4032 case BUILT_IN_MEMPCPY:
4033 case BUILT_IN_STPCPY:
4034 case BUILT_IN_STPNCPY:
4035 case BUILT_IN_STRCAT:
4036 case BUILT_IN_STRNCAT:
4037 case BUILT_IN_STRCPY_CHK:
4038 case BUILT_IN_STRNCPY_CHK:
4039 case BUILT_IN_MEMCPY_CHK:
4040 case BUILT_IN_MEMMOVE_CHK:
4041 case BUILT_IN_MEMPCPY_CHK:
4042 case BUILT_IN_STPCPY_CHK:
4043 case BUILT_IN_STPNCPY_CHK:
4044 case BUILT_IN_STRCAT_CHK:
4045 case BUILT_IN_STRNCAT_CHK:
4046 case BUILT_IN_TM_MEMCPY:
4047 case BUILT_IN_TM_MEMMOVE:
4048 {
4049 tree res = gimple_call_lhs (t);
4050 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4051 == BUILT_IN_BCOPY ? 1 : 0));
4052 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4053 == BUILT_IN_BCOPY ? 0 : 1));
4054 if (res != NULL_TREE)
4055 {
4056 get_constraint_for (res, &lhsc);
4057 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4058 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4059 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4060 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4061 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4062 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4063 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4064 else
4065 get_constraint_for (dest, &rhsc);
4066 process_all_all_constraints (lhsc, rhsc);
4067 lhsc.release ();
4068 rhsc.release ();
4069 }
4070 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4071 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4072 do_deref (&lhsc);
4073 do_deref (&rhsc);
4074 process_all_all_constraints (lhsc, rhsc);
4075 lhsc.release ();
4076 rhsc.release ();
4077 return true;
4078 }
4079 case BUILT_IN_MEMSET:
4080 case BUILT_IN_MEMSET_CHK:
4081 case BUILT_IN_TM_MEMSET:
4082 {
4083 tree res = gimple_call_lhs (t);
4084 tree dest = gimple_call_arg (t, 0);
4085 unsigned i;
4086 ce_s *lhsp;
4087 struct constraint_expr ac;
4088 if (res != NULL_TREE)
4089 {
4090 get_constraint_for (res, &lhsc);
4091 get_constraint_for (dest, &rhsc);
4092 process_all_all_constraints (lhsc, rhsc);
4093 lhsc.release ();
4094 rhsc.release ();
4095 }
4096 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4097 do_deref (&lhsc);
4098 if (flag_delete_null_pointer_checks
4099 && integer_zerop (gimple_call_arg (t, 1)))
4100 {
4101 ac.type = ADDRESSOF;
4102 ac.var = nothing_id;
4103 }
4104 else
4105 {
4106 ac.type = SCALAR;
4107 ac.var = integer_id;
4108 }
4109 ac.offset = 0;
4110 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4111 process_constraint (new_constraint (*lhsp, ac));
4112 lhsc.release ();
4113 return true;
4114 }
4115 case BUILT_IN_ASSUME_ALIGNED:
4116 {
4117 tree res = gimple_call_lhs (t);
4118 tree dest = gimple_call_arg (t, 0);
4119 if (res != NULL_TREE)
4120 {
4121 get_constraint_for (res, &lhsc);
4122 get_constraint_for (dest, &rhsc);
4123 process_all_all_constraints (lhsc, rhsc);
4124 lhsc.release ();
4125 rhsc.release ();
4126 }
4127 return true;
4128 }
4129 /* All the following functions do not return pointers, do not
4130 modify the points-to sets of memory reachable from their
4131 arguments and do not add to the ESCAPED solution. */
4132 case BUILT_IN_SINCOS:
4133 case BUILT_IN_SINCOSF:
4134 case BUILT_IN_SINCOSL:
4135 case BUILT_IN_FREXP:
4136 case BUILT_IN_FREXPF:
4137 case BUILT_IN_FREXPL:
4138 case BUILT_IN_GAMMA_R:
4139 case BUILT_IN_GAMMAF_R:
4140 case BUILT_IN_GAMMAL_R:
4141 case BUILT_IN_LGAMMA_R:
4142 case BUILT_IN_LGAMMAF_R:
4143 case BUILT_IN_LGAMMAL_R:
4144 case BUILT_IN_MODF:
4145 case BUILT_IN_MODFF:
4146 case BUILT_IN_MODFL:
4147 case BUILT_IN_REMQUO:
4148 case BUILT_IN_REMQUOF:
4149 case BUILT_IN_REMQUOL:
4150 case BUILT_IN_FREE:
4151 return true;
4152 case BUILT_IN_STRDUP:
4153 case BUILT_IN_STRNDUP:
4154 if (gimple_call_lhs (t))
4155 {
4156 handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
4157 vNULL, fndecl);
4158 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4159 NULL_TREE, &lhsc);
4160 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4161 NULL_TREE, &rhsc);
4162 do_deref (&lhsc);
4163 do_deref (&rhsc);
4164 process_all_all_constraints (lhsc, rhsc);
4165 lhsc.release ();
4166 rhsc.release ();
4167 return true;
4168 }
4169 break;
4170 /* Trampolines are special - they set up passing the static
4171 frame. */
4172 case BUILT_IN_INIT_TRAMPOLINE:
4173 {
4174 tree tramp = gimple_call_arg (t, 0);
4175 tree nfunc = gimple_call_arg (t, 1);
4176 tree frame = gimple_call_arg (t, 2);
4177 unsigned i;
4178 struct constraint_expr lhs, *rhsp;
4179 if (in_ipa_mode)
4180 {
4181 varinfo_t nfi = NULL;
4182 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4183 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4184 if (nfi)
4185 {
4186 lhs = get_function_part_constraint (nfi, fi_static_chain);
4187 get_constraint_for (frame, &rhsc);
4188 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4189 process_constraint (new_constraint (lhs, *rhsp));
4190 rhsc.release ();
4191
4192 /* Make the frame point to the function for
4193 the trampoline adjustment call. */
4194 get_constraint_for (tramp, &lhsc);
4195 do_deref (&lhsc);
4196 get_constraint_for (nfunc, &rhsc);
4197 process_all_all_constraints (lhsc, rhsc);
4198 rhsc.release ();
4199 lhsc.release ();
4200
4201 return true;
4202 }
4203 }
4204 /* Else fallthru to generic handling which will let
4205 the frame escape. */
4206 break;
4207 }
4208 case BUILT_IN_ADJUST_TRAMPOLINE:
4209 {
4210 tree tramp = gimple_call_arg (t, 0);
4211 tree res = gimple_call_lhs (t);
4212 if (in_ipa_mode && res)
4213 {
4214 get_constraint_for (res, &lhsc);
4215 get_constraint_for (tramp, &rhsc);
4216 do_deref (&rhsc);
4217 process_all_all_constraints (lhsc, rhsc);
4218 rhsc.release ();
4219 lhsc.release ();
4220 }
4221 return true;
4222 }
4223 CASE_BUILT_IN_TM_STORE (1):
4224 CASE_BUILT_IN_TM_STORE (2):
4225 CASE_BUILT_IN_TM_STORE (4):
4226 CASE_BUILT_IN_TM_STORE (8):
4227 CASE_BUILT_IN_TM_STORE (FLOAT):
4228 CASE_BUILT_IN_TM_STORE (DOUBLE):
4229 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4230 CASE_BUILT_IN_TM_STORE (M64):
4231 CASE_BUILT_IN_TM_STORE (M128):
4232 CASE_BUILT_IN_TM_STORE (M256):
4233 {
4234 tree addr = gimple_call_arg (t, 0);
4235 tree src = gimple_call_arg (t, 1);
4236
4237 get_constraint_for (addr, &lhsc);
4238 do_deref (&lhsc);
4239 get_constraint_for (src, &rhsc);
4240 process_all_all_constraints (lhsc, rhsc);
4241 lhsc.release ();
4242 rhsc.release ();
4243 return true;
4244 }
4245 CASE_BUILT_IN_TM_LOAD (1):
4246 CASE_BUILT_IN_TM_LOAD (2):
4247 CASE_BUILT_IN_TM_LOAD (4):
4248 CASE_BUILT_IN_TM_LOAD (8):
4249 CASE_BUILT_IN_TM_LOAD (FLOAT):
4250 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4251 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4252 CASE_BUILT_IN_TM_LOAD (M64):
4253 CASE_BUILT_IN_TM_LOAD (M128):
4254 CASE_BUILT_IN_TM_LOAD (M256):
4255 {
4256 tree dest = gimple_call_lhs (t);
4257 tree addr = gimple_call_arg (t, 0);
4258
4259 get_constraint_for (dest, &lhsc);
4260 get_constraint_for (addr, &rhsc);
4261 do_deref (&rhsc);
4262 process_all_all_constraints (lhsc, rhsc);
4263 lhsc.release ();
4264 rhsc.release ();
4265 return true;
4266 }
4267 /* Variadic argument handling needs to be handled in IPA
4268 mode as well. */
4269 case BUILT_IN_VA_START:
4270 {
4271 tree valist = gimple_call_arg (t, 0);
4272 struct constraint_expr rhs, *lhsp;
4273 unsigned i;
4274 get_constraint_for (valist, &lhsc);
4275 do_deref (&lhsc);
4276 /* The va_list gets access to pointers in variadic
4277 arguments. Which we know in the case of IPA analysis
4278 and otherwise are just all nonlocal variables. */
4279 if (in_ipa_mode)
4280 {
4281 fi = lookup_vi_for_tree (cfun->decl);
4282 rhs = get_function_part_constraint (fi, ~0);
4283 rhs.type = ADDRESSOF;
4284 }
4285 else
4286 {
4287 rhs.var = nonlocal_id;
4288 rhs.type = ADDRESSOF;
4289 rhs.offset = 0;
4290 }
4291 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4292 process_constraint (new_constraint (*lhsp, rhs));
4293 lhsc.release ();
4294 /* va_list is clobbered. */
4295 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4296 return true;
4297 }
4298 /* va_end doesn't have any effect that matters. */
4299 case BUILT_IN_VA_END:
4300 return true;
4301 /* Alternate return. Simply give up for now. */
4302 case BUILT_IN_RETURN:
4303 {
4304 fi = NULL;
4305 if (!in_ipa_mode
4306 || !(fi = get_vi_for_tree (cfun->decl)))
4307 make_constraint_from (get_varinfo (escaped_id), anything_id);
4308 else if (in_ipa_mode
4309 && fi != NULL)
4310 {
4311 struct constraint_expr lhs, rhs;
4312 lhs = get_function_part_constraint (fi, fi_result);
4313 rhs.var = anything_id;
4314 rhs.offset = 0;
4315 rhs.type = SCALAR;
4316 process_constraint (new_constraint (lhs, rhs));
4317 }
4318 return true;
4319 }
4320 /* printf-style functions may have hooks to set pointers to
4321 point to somewhere into the generated string. Leave them
4322 for a later excercise... */
4323 default:
4324 /* Fallthru to general call handling. */;
4325 }
4326
4327 return false;
4328 }
4329
4330 /* Create constraints for the call T. */
4331
4332 static void
4333 find_func_aliases_for_call (gimple t)
4334 {
4335 tree fndecl = gimple_call_fndecl (t);
4336 vec<ce_s> lhsc = vNULL;
4337 vec<ce_s> rhsc = vNULL;
4338 varinfo_t fi;
4339
4340 if (fndecl != NULL_TREE
4341 && DECL_BUILT_IN (fndecl)
4342 && find_func_aliases_for_builtin_call (t))
4343 return;
4344
4345 fi = get_fi_for_callee (t);
4346 if (!in_ipa_mode
4347 || (fndecl && !fi->is_fn_info))
4348 {
4349 vec<ce_s> rhsc = vNULL;
4350 int flags = gimple_call_flags (t);
4351
4352 /* Const functions can return their arguments and addresses
4353 of global memory but not of escaped memory. */
4354 if (flags & (ECF_CONST|ECF_NOVOPS))
4355 {
4356 if (gimple_call_lhs (t))
4357 handle_const_call (t, &rhsc);
4358 }
4359 /* Pure functions can return addresses in and of memory
4360 reachable from their arguments, but they are not an escape
4361 point for reachable memory of their arguments. */
4362 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4363 handle_pure_call (t, &rhsc);
4364 else
4365 handle_rhs_call (t, &rhsc);
4366 if (gimple_call_lhs (t))
4367 handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
4368 rhsc.release ();
4369 }
4370 else
4371 {
4372 tree lhsop;
4373 unsigned j;
4374
4375 /* Assign all the passed arguments to the appropriate incoming
4376 parameters of the function. */
4377 for (j = 0; j < gimple_call_num_args (t); j++)
4378 {
4379 struct constraint_expr lhs ;
4380 struct constraint_expr *rhsp;
4381 tree arg = gimple_call_arg (t, j);
4382
4383 get_constraint_for_rhs (arg, &rhsc);
4384 lhs = get_function_part_constraint (fi, fi_parm_base + j);
4385 while (rhsc.length () != 0)
4386 {
4387 rhsp = &rhsc.last ();
4388 process_constraint (new_constraint (lhs, *rhsp));
4389 rhsc.pop ();
4390 }
4391 }
4392
4393 /* If we are returning a value, assign it to the result. */
4394 lhsop = gimple_call_lhs (t);
4395 if (lhsop)
4396 {
4397 struct constraint_expr rhs;
4398 struct constraint_expr *lhsp;
4399
4400 get_constraint_for (lhsop, &lhsc);
4401 rhs = get_function_part_constraint (fi, fi_result);
4402 if (fndecl
4403 && DECL_RESULT (fndecl)
4404 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4405 {
4406 vec<ce_s> tem = vNULL;
4407 tem.safe_push (rhs);
4408 do_deref (&tem);
4409 rhs = tem[0];
4410 tem.release ();
4411 }
4412 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4413 process_constraint (new_constraint (*lhsp, rhs));
4414 }
4415
4416 /* If we pass the result decl by reference, honor that. */
4417 if (lhsop
4418 && fndecl
4419 && DECL_RESULT (fndecl)
4420 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4421 {
4422 struct constraint_expr lhs;
4423 struct constraint_expr *rhsp;
4424
4425 get_constraint_for_address_of (lhsop, &rhsc);
4426 lhs = get_function_part_constraint (fi, fi_result);
4427 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4428 process_constraint (new_constraint (lhs, *rhsp));
4429 rhsc.release ();
4430 }
4431
4432 /* If we use a static chain, pass it along. */
4433 if (gimple_call_chain (t))
4434 {
4435 struct constraint_expr lhs;
4436 struct constraint_expr *rhsp;
4437
4438 get_constraint_for (gimple_call_chain (t), &rhsc);
4439 lhs = get_function_part_constraint (fi, fi_static_chain);
4440 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4441 process_constraint (new_constraint (lhs, *rhsp));
4442 }
4443 }
4444 }
4445
4446 /* Walk statement T setting up aliasing constraints according to the
4447 references found in T. This function is the main part of the
4448 constraint builder. AI points to auxiliary alias information used
4449 when building alias sets and computing alias grouping heuristics. */
4450
4451 static void
4452 find_func_aliases (gimple origt)
4453 {
4454 gimple t = origt;
4455 vec<ce_s> lhsc = vNULL;
4456 vec<ce_s> rhsc = vNULL;
4457 struct constraint_expr *c;
4458 varinfo_t fi;
4459
4460 /* Now build constraints expressions. */
4461 if (gimple_code (t) == GIMPLE_PHI)
4462 {
4463 size_t i;
4464 unsigned int j;
4465
4466 /* For a phi node, assign all the arguments to
4467 the result. */
4468 get_constraint_for (gimple_phi_result (t), &lhsc);
4469 for (i = 0; i < gimple_phi_num_args (t); i++)
4470 {
4471 tree strippedrhs = PHI_ARG_DEF (t, i);
4472
4473 STRIP_NOPS (strippedrhs);
4474 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4475
4476 FOR_EACH_VEC_ELT (lhsc, j, c)
4477 {
4478 struct constraint_expr *c2;
4479 while (rhsc.length () > 0)
4480 {
4481 c2 = &rhsc.last ();
4482 process_constraint (new_constraint (*c, *c2));
4483 rhsc.pop ();
4484 }
4485 }
4486 }
4487 }
4488 /* In IPA mode, we need to generate constraints to pass call
4489 arguments through their calls. There are two cases,
4490 either a GIMPLE_CALL returning a value, or just a plain
4491 GIMPLE_CALL when we are not.
4492
4493 In non-ipa mode, we need to generate constraints for each
4494 pointer passed by address. */
4495 else if (is_gimple_call (t))
4496 find_func_aliases_for_call (t);
4497
4498 /* Otherwise, just a regular assignment statement. Only care about
4499 operations with pointer result, others are dealt with as escape
4500 points if they have pointer operands. */
4501 else if (is_gimple_assign (t))
4502 {
4503 /* Otherwise, just a regular assignment statement. */
4504 tree lhsop = gimple_assign_lhs (t);
4505 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4506
4507 if (rhsop && TREE_CLOBBER_P (rhsop))
4508 /* Ignore clobbers, they don't actually store anything into
4509 the LHS. */
4510 ;
4511 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4512 do_structure_copy (lhsop, rhsop);
4513 else
4514 {
4515 enum tree_code code = gimple_assign_rhs_code (t);
4516
4517 get_constraint_for (lhsop, &lhsc);
4518
4519 if (code == POINTER_PLUS_EXPR)
4520 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4521 gimple_assign_rhs2 (t), &rhsc);
4522 else if (code == BIT_AND_EXPR
4523 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4524 {
4525 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4526 the pointer. Handle it by offsetting it by UNKNOWN. */
4527 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4528 NULL_TREE, &rhsc);
4529 }
4530 else if ((CONVERT_EXPR_CODE_P (code)
4531 && !(POINTER_TYPE_P (gimple_expr_type (t))
4532 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4533 || gimple_assign_single_p (t))
4534 get_constraint_for_rhs (rhsop, &rhsc);
4535 else if (code == COND_EXPR)
4536 {
4537 /* The result is a merge of both COND_EXPR arms. */
4538 vec<ce_s> tmp = vNULL;
4539 struct constraint_expr *rhsp;
4540 unsigned i;
4541 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4542 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4543 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4544 rhsc.safe_push (*rhsp);
4545 tmp.release ();
4546 }
4547 else if (truth_value_p (code))
4548 /* Truth value results are not pointer (parts). Or at least
4549 very very unreasonable obfuscation of a part. */
4550 ;
4551 else
4552 {
4553 /* All other operations are merges. */
4554 vec<ce_s> tmp = vNULL;
4555 struct constraint_expr *rhsp;
4556 unsigned i, j;
4557 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4558 for (i = 2; i < gimple_num_ops (t); ++i)
4559 {
4560 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4561 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4562 rhsc.safe_push (*rhsp);
4563 tmp.truncate (0);
4564 }
4565 tmp.release ();
4566 }
4567 process_all_all_constraints (lhsc, rhsc);
4568 }
4569 /* If there is a store to a global variable the rhs escapes. */
4570 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4571 && DECL_P (lhsop)
4572 && is_global_var (lhsop)
4573 && (!in_ipa_mode
4574 || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4575 make_escape_constraint (rhsop);
4576 }
4577 /* Handle escapes through return. */
4578 else if (gimple_code (t) == GIMPLE_RETURN
4579 && gimple_return_retval (t) != NULL_TREE)
4580 {
4581 fi = NULL;
4582 if (!in_ipa_mode
4583 || !(fi = get_vi_for_tree (cfun->decl)))
4584 make_escape_constraint (gimple_return_retval (t));
4585 else if (in_ipa_mode
4586 && fi != NULL)
4587 {
4588 struct constraint_expr lhs ;
4589 struct constraint_expr *rhsp;
4590 unsigned i;
4591
4592 lhs = get_function_part_constraint (fi, fi_result);
4593 get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
4594 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4595 process_constraint (new_constraint (lhs, *rhsp));
4596 }
4597 }
4598 /* Handle asms conservatively by adding escape constraints to everything. */
4599 else if (gimple_code (t) == GIMPLE_ASM)
4600 {
4601 unsigned i, noutputs;
4602 const char **oconstraints;
4603 const char *constraint;
4604 bool allows_mem, allows_reg, is_inout;
4605
4606 noutputs = gimple_asm_noutputs (t);
4607 oconstraints = XALLOCAVEC (const char *, noutputs);
4608
4609 for (i = 0; i < noutputs; ++i)
4610 {
4611 tree link = gimple_asm_output_op (t, i);
4612 tree op = TREE_VALUE (link);
4613
4614 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4615 oconstraints[i] = constraint;
4616 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4617 &allows_reg, &is_inout);
4618
4619 /* A memory constraint makes the address of the operand escape. */
4620 if (!allows_reg && allows_mem)
4621 make_escape_constraint (build_fold_addr_expr (op));
4622
4623 /* The asm may read global memory, so outputs may point to
4624 any global memory. */
4625 if (op)
4626 {
4627 vec<ce_s> lhsc = vNULL;
4628 struct constraint_expr rhsc, *lhsp;
4629 unsigned j;
4630 get_constraint_for (op, &lhsc);
4631 rhsc.var = nonlocal_id;
4632 rhsc.offset = 0;
4633 rhsc.type = SCALAR;
4634 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4635 process_constraint (new_constraint (*lhsp, rhsc));
4636 lhsc.release ();
4637 }
4638 }
4639 for (i = 0; i < gimple_asm_ninputs (t); ++i)
4640 {
4641 tree link = gimple_asm_input_op (t, i);
4642 tree op = TREE_VALUE (link);
4643
4644 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4645
4646 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4647 &allows_mem, &allows_reg);
4648
4649 /* A memory constraint makes the address of the operand escape. */
4650 if (!allows_reg && allows_mem)
4651 make_escape_constraint (build_fold_addr_expr (op));
4652 /* Strictly we'd only need the constraint to ESCAPED if
4653 the asm clobbers memory, otherwise using something
4654 along the lines of per-call clobbers/uses would be enough. */
4655 else if (op)
4656 make_escape_constraint (op);
4657 }
4658 }
4659
4660 rhsc.release ();
4661 lhsc.release ();
4662 }
4663
4664
4665 /* Create a constraint adding to the clobber set of FI the memory
4666 pointed to by PTR. */
4667
4668 static void
4669 process_ipa_clobber (varinfo_t fi, tree ptr)
4670 {
4671 vec<ce_s> ptrc = vNULL;
4672 struct constraint_expr *c, lhs;
4673 unsigned i;
4674 get_constraint_for_rhs (ptr, &ptrc);
4675 lhs = get_function_part_constraint (fi, fi_clobbers);
4676 FOR_EACH_VEC_ELT (ptrc, i, c)
4677 process_constraint (new_constraint (lhs, *c));
4678 ptrc.release ();
4679 }
4680
4681 /* Walk statement T setting up clobber and use constraints according to the
4682 references found in T. This function is a main part of the
4683 IPA constraint builder. */
4684
4685 static void
4686 find_func_clobbers (gimple origt)
4687 {
4688 gimple t = origt;
4689 vec<ce_s> lhsc = vNULL;
4690 vec<ce_s> rhsc = vNULL;
4691 varinfo_t fi;
4692
4693 /* Add constraints for clobbered/used in IPA mode.
4694 We are not interested in what automatic variables are clobbered
4695 or used as we only use the information in the caller to which
4696 they do not escape. */
4697 gcc_assert (in_ipa_mode);
4698
4699 /* If the stmt refers to memory in any way it better had a VUSE. */
4700 if (gimple_vuse (t) == NULL_TREE)
4701 return;
4702
4703 /* We'd better have function information for the current function. */
4704 fi = lookup_vi_for_tree (cfun->decl);
4705 gcc_assert (fi != NULL);
4706
4707 /* Account for stores in assignments and calls. */
4708 if (gimple_vdef (t) != NULL_TREE
4709 && gimple_has_lhs (t))
4710 {
4711 tree lhs = gimple_get_lhs (t);
4712 tree tem = lhs;
4713 while (handled_component_p (tem))
4714 tem = TREE_OPERAND (tem, 0);
4715 if ((DECL_P (tem)
4716 && !auto_var_in_fn_p (tem, cfun->decl))
4717 || INDIRECT_REF_P (tem)
4718 || (TREE_CODE (tem) == MEM_REF
4719 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4720 && auto_var_in_fn_p
4721 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4722 {
4723 struct constraint_expr lhsc, *rhsp;
4724 unsigned i;
4725 lhsc = get_function_part_constraint (fi, fi_clobbers);
4726 get_constraint_for_address_of (lhs, &rhsc);
4727 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4728 process_constraint (new_constraint (lhsc, *rhsp));
4729 rhsc.release ();
4730 }
4731 }
4732
4733 /* Account for uses in assigments and returns. */
4734 if (gimple_assign_single_p (t)
4735 || (gimple_code (t) == GIMPLE_RETURN
4736 && gimple_return_retval (t) != NULL_TREE))
4737 {
4738 tree rhs = (gimple_assign_single_p (t)
4739 ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4740 tree tem = rhs;
4741 while (handled_component_p (tem))
4742 tem = TREE_OPERAND (tem, 0);
4743 if ((DECL_P (tem)
4744 && !auto_var_in_fn_p (tem, cfun->decl))
4745 || INDIRECT_REF_P (tem)
4746 || (TREE_CODE (tem) == MEM_REF
4747 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4748 && auto_var_in_fn_p
4749 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4750 {
4751 struct constraint_expr lhs, *rhsp;
4752 unsigned i;
4753 lhs = get_function_part_constraint (fi, fi_uses);
4754 get_constraint_for_address_of (rhs, &rhsc);
4755 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4756 process_constraint (new_constraint (lhs, *rhsp));
4757 rhsc.release ();
4758 }
4759 }
4760
4761 if (is_gimple_call (t))
4762 {
4763 varinfo_t cfi = NULL;
4764 tree decl = gimple_call_fndecl (t);
4765 struct constraint_expr lhs, rhs;
4766 unsigned i, j;
4767
4768 /* For builtins we do not have separate function info. For those
4769 we do not generate escapes for we have to generate clobbers/uses. */
4770 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4771 switch (DECL_FUNCTION_CODE (decl))
4772 {
4773 /* The following functions use and clobber memory pointed to
4774 by their arguments. */
4775 case BUILT_IN_STRCPY:
4776 case BUILT_IN_STRNCPY:
4777 case BUILT_IN_BCOPY:
4778 case BUILT_IN_MEMCPY:
4779 case BUILT_IN_MEMMOVE:
4780 case BUILT_IN_MEMPCPY:
4781 case BUILT_IN_STPCPY:
4782 case BUILT_IN_STPNCPY:
4783 case BUILT_IN_STRCAT:
4784 case BUILT_IN_STRNCAT:
4785 case BUILT_IN_STRCPY_CHK:
4786 case BUILT_IN_STRNCPY_CHK:
4787 case BUILT_IN_MEMCPY_CHK:
4788 case BUILT_IN_MEMMOVE_CHK:
4789 case BUILT_IN_MEMPCPY_CHK:
4790 case BUILT_IN_STPCPY_CHK:
4791 case BUILT_IN_STPNCPY_CHK:
4792 case BUILT_IN_STRCAT_CHK:
4793 case BUILT_IN_STRNCAT_CHK:
4794 {
4795 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4796 == BUILT_IN_BCOPY ? 1 : 0));
4797 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4798 == BUILT_IN_BCOPY ? 0 : 1));
4799 unsigned i;
4800 struct constraint_expr *rhsp, *lhsp;
4801 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4802 lhs = get_function_part_constraint (fi, fi_clobbers);
4803 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4804 process_constraint (new_constraint (lhs, *lhsp));
4805 lhsc.release ();
4806 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4807 lhs = get_function_part_constraint (fi, fi_uses);
4808 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4809 process_constraint (new_constraint (lhs, *rhsp));
4810 rhsc.release ();
4811 return;
4812 }
4813 /* The following function clobbers memory pointed to by
4814 its argument. */
4815 case BUILT_IN_MEMSET:
4816 case BUILT_IN_MEMSET_CHK:
4817 {
4818 tree dest = gimple_call_arg (t, 0);
4819 unsigned i;
4820 ce_s *lhsp;
4821 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4822 lhs = get_function_part_constraint (fi, fi_clobbers);
4823 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4824 process_constraint (new_constraint (lhs, *lhsp));
4825 lhsc.release ();
4826 return;
4827 }
4828 /* The following functions clobber their second and third
4829 arguments. */
4830 case BUILT_IN_SINCOS:
4831 case BUILT_IN_SINCOSF:
4832 case BUILT_IN_SINCOSL:
4833 {
4834 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4835 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4836 return;
4837 }
4838 /* The following functions clobber their second argument. */
4839 case BUILT_IN_FREXP:
4840 case BUILT_IN_FREXPF:
4841 case BUILT_IN_FREXPL:
4842 case BUILT_IN_LGAMMA_R:
4843 case BUILT_IN_LGAMMAF_R:
4844 case BUILT_IN_LGAMMAL_R:
4845 case BUILT_IN_GAMMA_R:
4846 case BUILT_IN_GAMMAF_R:
4847 case BUILT_IN_GAMMAL_R:
4848 case BUILT_IN_MODF:
4849 case BUILT_IN_MODFF:
4850 case BUILT_IN_MODFL:
4851 {
4852 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4853 return;
4854 }
4855 /* The following functions clobber their third argument. */
4856 case BUILT_IN_REMQUO:
4857 case BUILT_IN_REMQUOF:
4858 case BUILT_IN_REMQUOL:
4859 {
4860 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4861 return;
4862 }
4863 /* The following functions neither read nor clobber memory. */
4864 case BUILT_IN_ASSUME_ALIGNED:
4865 case BUILT_IN_FREE:
4866 return;
4867 /* Trampolines are of no interest to us. */
4868 case BUILT_IN_INIT_TRAMPOLINE:
4869 case BUILT_IN_ADJUST_TRAMPOLINE:
4870 return;
4871 case BUILT_IN_VA_START:
4872 case BUILT_IN_VA_END:
4873 return;
4874 /* printf-style functions may have hooks to set pointers to
4875 point to somewhere into the generated string. Leave them
4876 for a later excercise... */
4877 default:
4878 /* Fallthru to general call handling. */;
4879 }
4880
4881 /* Parameters passed by value are used. */
4882 lhs = get_function_part_constraint (fi, fi_uses);
4883 for (i = 0; i < gimple_call_num_args (t); i++)
4884 {
4885 struct constraint_expr *rhsp;
4886 tree arg = gimple_call_arg (t, i);
4887
4888 if (TREE_CODE (arg) == SSA_NAME
4889 || is_gimple_min_invariant (arg))
4890 continue;
4891
4892 get_constraint_for_address_of (arg, &rhsc);
4893 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4894 process_constraint (new_constraint (lhs, *rhsp));
4895 rhsc.release ();
4896 }
4897
4898 /* Build constraints for propagating clobbers/uses along the
4899 callgraph edges. */
4900 cfi = get_fi_for_callee (t);
4901 if (cfi->id == anything_id)
4902 {
4903 if (gimple_vdef (t))
4904 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4905 anything_id);
4906 make_constraint_from (first_vi_for_offset (fi, fi_uses),
4907 anything_id);
4908 return;
4909 }
4910
4911 /* For callees without function info (that's external functions),
4912 ESCAPED is clobbered and used. */
4913 if (gimple_call_fndecl (t)
4914 && !cfi->is_fn_info)
4915 {
4916 varinfo_t vi;
4917
4918 if (gimple_vdef (t))
4919 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4920 escaped_id);
4921 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
4922
4923 /* Also honor the call statement use/clobber info. */
4924 if ((vi = lookup_call_clobber_vi (t)) != NULL)
4925 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4926 vi->id);
4927 if ((vi = lookup_call_use_vi (t)) != NULL)
4928 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
4929 vi->id);
4930 return;
4931 }
4932
4933 /* Otherwise the caller clobbers and uses what the callee does.
4934 ??? This should use a new complex constraint that filters
4935 local variables of the callee. */
4936 if (gimple_vdef (t))
4937 {
4938 lhs = get_function_part_constraint (fi, fi_clobbers);
4939 rhs = get_function_part_constraint (cfi, fi_clobbers);
4940 process_constraint (new_constraint (lhs, rhs));
4941 }
4942 lhs = get_function_part_constraint (fi, fi_uses);
4943 rhs = get_function_part_constraint (cfi, fi_uses);
4944 process_constraint (new_constraint (lhs, rhs));
4945 }
4946 else if (gimple_code (t) == GIMPLE_ASM)
4947 {
4948 /* ??? Ick. We can do better. */
4949 if (gimple_vdef (t))
4950 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4951 anything_id);
4952 make_constraint_from (first_vi_for_offset (fi, fi_uses),
4953 anything_id);
4954 }
4955
4956 rhsc.release ();
4957 }
4958
4959
4960 /* Find the first varinfo in the same variable as START that overlaps with
4961 OFFSET. Return NULL if we can't find one. */
4962
4963 static varinfo_t
4964 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
4965 {
4966 /* If the offset is outside of the variable, bail out. */
4967 if (offset >= start->fullsize)
4968 return NULL;
4969
4970 /* If we cannot reach offset from start, lookup the first field
4971 and start from there. */
4972 if (start->offset > offset)
4973 start = lookup_vi_for_tree (start->decl);
4974
4975 while (start)
4976 {
4977 /* We may not find a variable in the field list with the actual
4978 offset when when we have glommed a structure to a variable.
4979 In that case, however, offset should still be within the size
4980 of the variable. */
4981 if (offset >= start->offset
4982 && (offset - start->offset) < start->size)
4983 return start;
4984
4985 start= start->next;
4986 }
4987
4988 return NULL;
4989 }
4990
4991 /* Find the first varinfo in the same variable as START that overlaps with
4992 OFFSET. If there is no such varinfo the varinfo directly preceding
4993 OFFSET is returned. */
4994
4995 static varinfo_t
4996 first_or_preceding_vi_for_offset (varinfo_t start,
4997 unsigned HOST_WIDE_INT offset)
4998 {
4999 /* If we cannot reach offset from start, lookup the first field
5000 and start from there. */
5001 if (start->offset > offset)
5002 start = lookup_vi_for_tree (start->decl);
5003
5004 /* We may not find a variable in the field list with the actual
5005 offset when when we have glommed a structure to a variable.
5006 In that case, however, offset should still be within the size
5007 of the variable.
5008 If we got beyond the offset we look for return the field
5009 directly preceding offset which may be the last field. */
5010 while (start->next
5011 && offset >= start->offset
5012 && !((offset - start->offset) < start->size))
5013 start = start->next;
5014
5015 return start;
5016 }
5017
5018
5019 /* This structure is used during pushing fields onto the fieldstack
5020 to track the offset of the field, since bitpos_of_field gives it
5021 relative to its immediate containing type, and we want it relative
5022 to the ultimate containing object. */
5023
5024 struct fieldoff
5025 {
5026 /* Offset from the base of the base containing object to this field. */
5027 HOST_WIDE_INT offset;
5028
5029 /* Size, in bits, of the field. */
5030 unsigned HOST_WIDE_INT size;
5031
5032 unsigned has_unknown_size : 1;
5033
5034 unsigned must_have_pointers : 1;
5035
5036 unsigned may_have_pointers : 1;
5037
5038 unsigned only_restrict_pointers : 1;
5039 };
5040 typedef struct fieldoff fieldoff_s;
5041
5042
5043 /* qsort comparison function for two fieldoff's PA and PB */
5044
5045 static int
5046 fieldoff_compare (const void *pa, const void *pb)
5047 {
5048 const fieldoff_s *foa = (const fieldoff_s *)pa;
5049 const fieldoff_s *fob = (const fieldoff_s *)pb;
5050 unsigned HOST_WIDE_INT foasize, fobsize;
5051
5052 if (foa->offset < fob->offset)
5053 return -1;
5054 else if (foa->offset > fob->offset)
5055 return 1;
5056
5057 foasize = foa->size;
5058 fobsize = fob->size;
5059 if (foasize < fobsize)
5060 return -1;
5061 else if (foasize > fobsize)
5062 return 1;
5063 return 0;
5064 }
5065
5066 /* Sort a fieldstack according to the field offset and sizes. */
5067 static void
5068 sort_fieldstack (vec<fieldoff_s> fieldstack)
5069 {
5070 fieldstack.qsort (fieldoff_compare);
5071 }
5072
5073 /* Return true if T is a type that can have subvars. */
5074
5075 static inline bool
5076 type_can_have_subvars (const_tree t)
5077 {
5078 /* Aggregates without overlapping fields can have subvars. */
5079 return TREE_CODE (t) == RECORD_TYPE;
5080 }
5081
5082 /* Return true if V is a tree that we can have subvars for.
5083 Normally, this is any aggregate type. Also complex
5084 types which are not gimple registers can have subvars. */
5085
5086 static inline bool
5087 var_can_have_subvars (const_tree v)
5088 {
5089 /* Volatile variables should never have subvars. */
5090 if (TREE_THIS_VOLATILE (v))
5091 return false;
5092
5093 /* Non decls or memory tags can never have subvars. */
5094 if (!DECL_P (v))
5095 return false;
5096
5097 return type_can_have_subvars (TREE_TYPE (v));
5098 }
5099
5100 /* Return true if T is a type that does contain pointers. */
5101
5102 static bool
5103 type_must_have_pointers (tree type)
5104 {
5105 if (POINTER_TYPE_P (type))
5106 return true;
5107
5108 if (TREE_CODE (type) == ARRAY_TYPE)
5109 return type_must_have_pointers (TREE_TYPE (type));
5110
5111 /* A function or method can have pointers as arguments, so track
5112 those separately. */
5113 if (TREE_CODE (type) == FUNCTION_TYPE
5114 || TREE_CODE (type) == METHOD_TYPE)
5115 return true;
5116
5117 return false;
5118 }
5119
5120 static bool
5121 field_must_have_pointers (tree t)
5122 {
5123 return type_must_have_pointers (TREE_TYPE (t));
5124 }
5125
5126 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5127 the fields of TYPE onto fieldstack, recording their offsets along
5128 the way.
5129
5130 OFFSET is used to keep track of the offset in this entire
5131 structure, rather than just the immediately containing structure.
5132 Returns false if the caller is supposed to handle the field we
5133 recursed for. */
5134
5135 static bool
5136 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5137 HOST_WIDE_INT offset)
5138 {
5139 tree field;
5140 bool empty_p = true;
5141
5142 if (TREE_CODE (type) != RECORD_TYPE)
5143 return false;
5144
5145 /* If the vector of fields is growing too big, bail out early.
5146 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5147 sure this fails. */
5148 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5149 return false;
5150
5151 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5152 if (TREE_CODE (field) == FIELD_DECL)
5153 {
5154 bool push = false;
5155 HOST_WIDE_INT foff = bitpos_of_field (field);
5156
5157 if (!var_can_have_subvars (field)
5158 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
5159 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
5160 push = true;
5161 else if (!push_fields_onto_fieldstack
5162 (TREE_TYPE (field), fieldstack, offset + foff)
5163 && (DECL_SIZE (field)
5164 && !integer_zerop (DECL_SIZE (field))))
5165 /* Empty structures may have actual size, like in C++. So
5166 see if we didn't push any subfields and the size is
5167 nonzero, push the field onto the stack. */
5168 push = true;
5169
5170 if (push)
5171 {
5172 fieldoff_s *pair = NULL;
5173 bool has_unknown_size = false;
5174 bool must_have_pointers_p;
5175
5176 if (!fieldstack->is_empty ())
5177 pair = &fieldstack->last ();
5178
5179 /* If there isn't anything at offset zero, create sth. */
5180 if (!pair
5181 && offset + foff != 0)
5182 {
5183 fieldoff_s e = {0, offset + foff, false, false, false, false};
5184 pair = fieldstack->safe_push (e);
5185 }
5186
5187 if (!DECL_SIZE (field)
5188 || !host_integerp (DECL_SIZE (field), 1))
5189 has_unknown_size = true;
5190
5191 /* If adjacent fields do not contain pointers merge them. */
5192 must_have_pointers_p = field_must_have_pointers (field);
5193 if (pair
5194 && !has_unknown_size
5195 && !must_have_pointers_p
5196 && !pair->must_have_pointers
5197 && !pair->has_unknown_size
5198 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5199 {
5200 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
5201 }
5202 else
5203 {
5204 fieldoff_s e;
5205 e.offset = offset + foff;
5206 e.has_unknown_size = has_unknown_size;
5207 if (!has_unknown_size)
5208 e.size = TREE_INT_CST_LOW (DECL_SIZE (field));
5209 else
5210 e.size = -1;
5211 e.must_have_pointers = must_have_pointers_p;
5212 e.may_have_pointers = true;
5213 e.only_restrict_pointers
5214 = (!has_unknown_size
5215 && POINTER_TYPE_P (TREE_TYPE (field))
5216 && TYPE_RESTRICT (TREE_TYPE (field)));
5217 fieldstack->safe_push (e);
5218 }
5219 }
5220
5221 empty_p = false;
5222 }
5223
5224 return !empty_p;
5225 }
5226
5227 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5228 if it is a varargs function. */
5229
5230 static unsigned int
5231 count_num_arguments (tree decl, bool *is_varargs)
5232 {
5233 unsigned int num = 0;
5234 tree t;
5235
5236 /* Capture named arguments for K&R functions. They do not
5237 have a prototype and thus no TYPE_ARG_TYPES. */
5238 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5239 ++num;
5240
5241 /* Check if the function has variadic arguments. */
5242 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5243 if (TREE_VALUE (t) == void_type_node)
5244 break;
5245 if (!t)
5246 *is_varargs = true;
5247
5248 return num;
5249 }
5250
5251 /* Creation function node for DECL, using NAME, and return the index
5252 of the variable we've created for the function. */
5253
5254 static varinfo_t
5255 create_function_info_for (tree decl, const char *name)
5256 {
5257 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5258 varinfo_t vi, prev_vi;
5259 tree arg;
5260 unsigned int i;
5261 bool is_varargs = false;
5262 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5263
5264 /* Create the variable info. */
5265
5266 vi = new_var_info (decl, name);
5267 vi->offset = 0;
5268 vi->size = 1;
5269 vi->fullsize = fi_parm_base + num_args;
5270 vi->is_fn_info = 1;
5271 vi->may_have_pointers = false;
5272 if (is_varargs)
5273 vi->fullsize = ~0;
5274 insert_vi_for_tree (vi->decl, vi);
5275
5276 prev_vi = vi;
5277
5278 /* Create a variable for things the function clobbers and one for
5279 things the function uses. */
5280 {
5281 varinfo_t clobbervi, usevi;
5282 const char *newname;
5283 char *tempname;
5284
5285 asprintf (&tempname, "%s.clobber", name);
5286 newname = ggc_strdup (tempname);
5287 free (tempname);
5288
5289 clobbervi = new_var_info (NULL, newname);
5290 clobbervi->offset = fi_clobbers;
5291 clobbervi->size = 1;
5292 clobbervi->fullsize = vi->fullsize;
5293 clobbervi->is_full_var = true;
5294 clobbervi->is_global_var = false;
5295 gcc_assert (prev_vi->offset < clobbervi->offset);
5296 prev_vi->next = clobbervi;
5297 prev_vi = clobbervi;
5298
5299 asprintf (&tempname, "%s.use", name);
5300 newname = ggc_strdup (tempname);
5301 free (tempname);
5302
5303 usevi = new_var_info (NULL, newname);
5304 usevi->offset = fi_uses;
5305 usevi->size = 1;
5306 usevi->fullsize = vi->fullsize;
5307 usevi->is_full_var = true;
5308 usevi->is_global_var = false;
5309 gcc_assert (prev_vi->offset < usevi->offset);
5310 prev_vi->next = usevi;
5311 prev_vi = usevi;
5312 }
5313
5314 /* And one for the static chain. */
5315 if (fn->static_chain_decl != NULL_TREE)
5316 {
5317 varinfo_t chainvi;
5318 const char *newname;
5319 char *tempname;
5320
5321 asprintf (&tempname, "%s.chain", name);
5322 newname = ggc_strdup (tempname);
5323 free (tempname);
5324
5325 chainvi = new_var_info (fn->static_chain_decl, newname);
5326 chainvi->offset = fi_static_chain;
5327 chainvi->size = 1;
5328 chainvi->fullsize = vi->fullsize;
5329 chainvi->is_full_var = true;
5330 chainvi->is_global_var = false;
5331 gcc_assert (prev_vi->offset < chainvi->offset);
5332 prev_vi->next = chainvi;
5333 prev_vi = chainvi;
5334 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5335 }
5336
5337 /* Create a variable for the return var. */
5338 if (DECL_RESULT (decl) != NULL
5339 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5340 {
5341 varinfo_t resultvi;
5342 const char *newname;
5343 char *tempname;
5344 tree resultdecl = decl;
5345
5346 if (DECL_RESULT (decl))
5347 resultdecl = DECL_RESULT (decl);
5348
5349 asprintf (&tempname, "%s.result", name);
5350 newname = ggc_strdup (tempname);
5351 free (tempname);
5352
5353 resultvi = new_var_info (resultdecl, newname);
5354 resultvi->offset = fi_result;
5355 resultvi->size = 1;
5356 resultvi->fullsize = vi->fullsize;
5357 resultvi->is_full_var = true;
5358 if (DECL_RESULT (decl))
5359 resultvi->may_have_pointers = true;
5360 gcc_assert (prev_vi->offset < resultvi->offset);
5361 prev_vi->next = resultvi;
5362 prev_vi = resultvi;
5363 if (DECL_RESULT (decl))
5364 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5365 }
5366
5367 /* Set up variables for each argument. */
5368 arg = DECL_ARGUMENTS (decl);
5369 for (i = 0; i < num_args; i++)
5370 {
5371 varinfo_t argvi;
5372 const char *newname;
5373 char *tempname;
5374 tree argdecl = decl;
5375
5376 if (arg)
5377 argdecl = arg;
5378
5379 asprintf (&tempname, "%s.arg%d", name, i);
5380 newname = ggc_strdup (tempname);
5381 free (tempname);
5382
5383 argvi = new_var_info (argdecl, newname);
5384 argvi->offset = fi_parm_base + i;
5385 argvi->size = 1;
5386 argvi->is_full_var = true;
5387 argvi->fullsize = vi->fullsize;
5388 if (arg)
5389 argvi->may_have_pointers = true;
5390 gcc_assert (prev_vi->offset < argvi->offset);
5391 prev_vi->next = argvi;
5392 prev_vi = argvi;
5393 if (arg)
5394 {
5395 insert_vi_for_tree (arg, argvi);
5396 arg = DECL_CHAIN (arg);
5397 }
5398 }
5399
5400 /* Add one representative for all further args. */
5401 if (is_varargs)
5402 {
5403 varinfo_t argvi;
5404 const char *newname;
5405 char *tempname;
5406 tree decl;
5407
5408 asprintf (&tempname, "%s.varargs", name);
5409 newname = ggc_strdup (tempname);
5410 free (tempname);
5411
5412 /* We need sth that can be pointed to for va_start. */
5413 decl = build_fake_var_decl (ptr_type_node);
5414
5415 argvi = new_var_info (decl, newname);
5416 argvi->offset = fi_parm_base + num_args;
5417 argvi->size = ~0;
5418 argvi->is_full_var = true;
5419 argvi->is_heap_var = true;
5420 argvi->fullsize = vi->fullsize;
5421 gcc_assert (prev_vi->offset < argvi->offset);
5422 prev_vi->next = argvi;
5423 prev_vi = argvi;
5424 }
5425
5426 return vi;
5427 }
5428
5429
5430 /* Return true if FIELDSTACK contains fields that overlap.
5431 FIELDSTACK is assumed to be sorted by offset. */
5432
5433 static bool
5434 check_for_overlaps (vec<fieldoff_s> fieldstack)
5435 {
5436 fieldoff_s *fo = NULL;
5437 unsigned int i;
5438 HOST_WIDE_INT lastoffset = -1;
5439
5440 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5441 {
5442 if (fo->offset == lastoffset)
5443 return true;
5444 lastoffset = fo->offset;
5445 }
5446 return false;
5447 }
5448
5449 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5450 This will also create any varinfo structures necessary for fields
5451 of DECL. */
5452
5453 static varinfo_t
5454 create_variable_info_for_1 (tree decl, const char *name)
5455 {
5456 varinfo_t vi, newvi;
5457 tree decl_type = TREE_TYPE (decl);
5458 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5459 vec<fieldoff_s> fieldstack = vNULL;
5460 fieldoff_s *fo;
5461 unsigned int i;
5462
5463 if (!declsize
5464 || !host_integerp (declsize, 1))
5465 {
5466 vi = new_var_info (decl, name);
5467 vi->offset = 0;
5468 vi->size = ~0;
5469 vi->fullsize = ~0;
5470 vi->is_unknown_size_var = true;
5471 vi->is_full_var = true;
5472 vi->may_have_pointers = true;
5473 return vi;
5474 }
5475
5476 /* Collect field information. */
5477 if (use_field_sensitive
5478 && var_can_have_subvars (decl)
5479 /* ??? Force us to not use subfields for global initializers
5480 in IPA mode. Else we'd have to parse arbitrary initializers. */
5481 && !(in_ipa_mode
5482 && is_global_var (decl)
5483 && DECL_INITIAL (decl)))
5484 {
5485 fieldoff_s *fo = NULL;
5486 bool notokay = false;
5487 unsigned int i;
5488
5489 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5490
5491 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5492 if (fo->has_unknown_size
5493 || fo->offset < 0)
5494 {
5495 notokay = true;
5496 break;
5497 }
5498
5499 /* We can't sort them if we have a field with a variable sized type,
5500 which will make notokay = true. In that case, we are going to return
5501 without creating varinfos for the fields anyway, so sorting them is a
5502 waste to boot. */
5503 if (!notokay)
5504 {
5505 sort_fieldstack (fieldstack);
5506 /* Due to some C++ FE issues, like PR 22488, we might end up
5507 what appear to be overlapping fields even though they,
5508 in reality, do not overlap. Until the C++ FE is fixed,
5509 we will simply disable field-sensitivity for these cases. */
5510 notokay = check_for_overlaps (fieldstack);
5511 }
5512
5513 if (notokay)
5514 fieldstack.release ();
5515 }
5516
5517 /* If we didn't end up collecting sub-variables create a full
5518 variable for the decl. */
5519 if (fieldstack.length () <= 1
5520 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5521 {
5522 vi = new_var_info (decl, name);
5523 vi->offset = 0;
5524 vi->may_have_pointers = true;
5525 vi->fullsize = TREE_INT_CST_LOW (declsize);
5526 vi->size = vi->fullsize;
5527 vi->is_full_var = true;
5528 fieldstack.release ();
5529 return vi;
5530 }
5531
5532 vi = new_var_info (decl, name);
5533 vi->fullsize = TREE_INT_CST_LOW (declsize);
5534 for (i = 0, newvi = vi;
5535 fieldstack.iterate (i, &fo);
5536 ++i, newvi = newvi->next)
5537 {
5538 const char *newname = "NULL";
5539 char *tempname;
5540
5541 if (dump_file)
5542 {
5543 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5544 "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
5545 newname = ggc_strdup (tempname);
5546 free (tempname);
5547 }
5548 newvi->name = newname;
5549 newvi->offset = fo->offset;
5550 newvi->size = fo->size;
5551 newvi->fullsize = vi->fullsize;
5552 newvi->may_have_pointers = fo->may_have_pointers;
5553 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5554 if (i + 1 < fieldstack.length ())
5555 newvi->next = new_var_info (decl, name);
5556 }
5557
5558 fieldstack.release ();
5559
5560 return vi;
5561 }
5562
5563 static unsigned int
5564 create_variable_info_for (tree decl, const char *name)
5565 {
5566 varinfo_t vi = create_variable_info_for_1 (decl, name);
5567 unsigned int id = vi->id;
5568
5569 insert_vi_for_tree (decl, vi);
5570
5571 if (TREE_CODE (decl) != VAR_DECL)
5572 return id;
5573
5574 /* Create initial constraints for globals. */
5575 for (; vi; vi = vi->next)
5576 {
5577 if (!vi->may_have_pointers
5578 || !vi->is_global_var)
5579 continue;
5580
5581 /* Mark global restrict qualified pointers. */
5582 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5583 && TYPE_RESTRICT (TREE_TYPE (decl)))
5584 || vi->only_restrict_pointers)
5585 {
5586 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5587 continue;
5588 }
5589
5590 /* In non-IPA mode the initializer from nonlocal is all we need. */
5591 if (!in_ipa_mode
5592 || DECL_HARD_REGISTER (decl))
5593 make_copy_constraint (vi, nonlocal_id);
5594
5595 /* In IPA mode parse the initializer and generate proper constraints
5596 for it. */
5597 else
5598 {
5599 struct varpool_node *vnode = varpool_get_node (decl);
5600
5601 /* For escaped variables initialize them from nonlocal. */
5602 if (!varpool_all_refs_explicit_p (vnode))
5603 make_copy_constraint (vi, nonlocal_id);
5604
5605 /* If this is a global variable with an initializer and we are in
5606 IPA mode generate constraints for it. */
5607 if (DECL_INITIAL (decl)
5608 && vnode->analyzed)
5609 {
5610 vec<ce_s> rhsc = vNULL;
5611 struct constraint_expr lhs, *rhsp;
5612 unsigned i;
5613 get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
5614 lhs.var = vi->id;
5615 lhs.offset = 0;
5616 lhs.type = SCALAR;
5617 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5618 process_constraint (new_constraint (lhs, *rhsp));
5619 /* If this is a variable that escapes from the unit
5620 the initializer escapes as well. */
5621 if (!varpool_all_refs_explicit_p (vnode))
5622 {
5623 lhs.var = escaped_id;
5624 lhs.offset = 0;
5625 lhs.type = SCALAR;
5626 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5627 process_constraint (new_constraint (lhs, *rhsp));
5628 }
5629 rhsc.release ();
5630 }
5631 }
5632 }
5633
5634 return id;
5635 }
5636
5637 /* Print out the points-to solution for VAR to FILE. */
5638
5639 static void
5640 dump_solution_for_var (FILE *file, unsigned int var)
5641 {
5642 varinfo_t vi = get_varinfo (var);
5643 unsigned int i;
5644 bitmap_iterator bi;
5645
5646 /* Dump the solution for unified vars anyway, this avoids difficulties
5647 in scanning dumps in the testsuite. */
5648 fprintf (file, "%s = { ", vi->name);
5649 vi = get_varinfo (find (var));
5650 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5651 fprintf (file, "%s ", get_varinfo (i)->name);
5652 fprintf (file, "}");
5653
5654 /* But note when the variable was unified. */
5655 if (vi->id != var)
5656 fprintf (file, " same as %s", vi->name);
5657
5658 fprintf (file, "\n");
5659 }
5660
5661 /* Print the points-to solution for VAR to stdout. */
5662
5663 DEBUG_FUNCTION void
5664 debug_solution_for_var (unsigned int var)
5665 {
5666 dump_solution_for_var (stdout, var);
5667 }
5668
5669 /* Create varinfo structures for all of the variables in the
5670 function for intraprocedural mode. */
5671
5672 static void
5673 intra_create_variable_infos (void)
5674 {
5675 tree t;
5676
5677 /* For each incoming pointer argument arg, create the constraint ARG
5678 = NONLOCAL or a dummy variable if it is a restrict qualified
5679 passed-by-reference argument. */
5680 for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
5681 {
5682 varinfo_t p = get_vi_for_tree (t);
5683
5684 /* For restrict qualified pointers to objects passed by
5685 reference build a real representative for the pointed-to object.
5686 Treat restrict qualified references the same. */
5687 if (TYPE_RESTRICT (TREE_TYPE (t))
5688 && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
5689 || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
5690 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
5691 {
5692 struct constraint_expr lhsc, rhsc;
5693 varinfo_t vi;
5694 tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
5695 DECL_EXTERNAL (heapvar) = 1;
5696 vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
5697 insert_vi_for_tree (heapvar, vi);
5698 lhsc.var = p->id;
5699 lhsc.type = SCALAR;
5700 lhsc.offset = 0;
5701 rhsc.var = vi->id;
5702 rhsc.type = ADDRESSOF;
5703 rhsc.offset = 0;
5704 process_constraint (new_constraint (lhsc, rhsc));
5705 for (; vi; vi = vi->next)
5706 if (vi->may_have_pointers)
5707 {
5708 if (vi->only_restrict_pointers)
5709 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5710 else
5711 make_copy_constraint (vi, nonlocal_id);
5712 }
5713 continue;
5714 }
5715
5716 if (POINTER_TYPE_P (TREE_TYPE (t))
5717 && TYPE_RESTRICT (TREE_TYPE (t)))
5718 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5719 else
5720 {
5721 for (; p; p = p->next)
5722 {
5723 if (p->only_restrict_pointers)
5724 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5725 else if (p->may_have_pointers)
5726 make_constraint_from (p, nonlocal_id);
5727 }
5728 }
5729 }
5730
5731 /* Add a constraint for a result decl that is passed by reference. */
5732 if (DECL_RESULT (cfun->decl)
5733 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5734 {
5735 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5736
5737 for (p = result_vi; p; p = p->next)
5738 make_constraint_from (p, nonlocal_id);
5739 }
5740
5741 /* Add a constraint for the incoming static chain parameter. */
5742 if (cfun->static_chain_decl != NULL_TREE)
5743 {
5744 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5745
5746 for (p = chain_vi; p; p = p->next)
5747 make_constraint_from (p, nonlocal_id);
5748 }
5749 }
5750
5751 /* Structure used to put solution bitmaps in a hashtable so they can
5752 be shared among variables with the same points-to set. */
5753
5754 typedef struct shared_bitmap_info
5755 {
5756 bitmap pt_vars;
5757 hashval_t hashcode;
5758 } *shared_bitmap_info_t;
5759 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5760
5761 static htab_t shared_bitmap_table;
5762
5763 /* Hash function for a shared_bitmap_info_t */
5764
5765 static hashval_t
5766 shared_bitmap_hash (const void *p)
5767 {
5768 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
5769 return bi->hashcode;
5770 }
5771
5772 /* Equality function for two shared_bitmap_info_t's. */
5773
5774 static int
5775 shared_bitmap_eq (const void *p1, const void *p2)
5776 {
5777 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
5778 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
5779 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5780 }
5781
5782 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5783 existing instance if there is one, NULL otherwise. */
5784
5785 static bitmap
5786 shared_bitmap_lookup (bitmap pt_vars)
5787 {
5788 void **slot;
5789 struct shared_bitmap_info sbi;
5790
5791 sbi.pt_vars = pt_vars;
5792 sbi.hashcode = bitmap_hash (pt_vars);
5793
5794 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
5795 sbi.hashcode, NO_INSERT);
5796 if (!slot)
5797 return NULL;
5798 else
5799 return ((shared_bitmap_info_t) *slot)->pt_vars;
5800 }
5801
5802
5803 /* Add a bitmap to the shared bitmap hashtable. */
5804
5805 static void
5806 shared_bitmap_add (bitmap pt_vars)
5807 {
5808 void **slot;
5809 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5810
5811 sbi->pt_vars = pt_vars;
5812 sbi->hashcode = bitmap_hash (pt_vars);
5813
5814 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
5815 sbi->hashcode, INSERT);
5816 gcc_assert (!*slot);
5817 *slot = (void *) sbi;
5818 }
5819
5820
5821 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5822
5823 static void
5824 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5825 {
5826 unsigned int i;
5827 bitmap_iterator bi;
5828
5829 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
5830 {
5831 varinfo_t vi = get_varinfo (i);
5832
5833 /* The only artificial variables that are allowed in a may-alias
5834 set are heap variables. */
5835 if (vi->is_artificial_var && !vi->is_heap_var)
5836 continue;
5837
5838 if (TREE_CODE (vi->decl) == VAR_DECL
5839 || TREE_CODE (vi->decl) == PARM_DECL
5840 || TREE_CODE (vi->decl) == RESULT_DECL)
5841 {
5842 /* If we are in IPA mode we will not recompute points-to
5843 sets after inlining so make sure they stay valid. */
5844 if (in_ipa_mode
5845 && !DECL_PT_UID_SET_P (vi->decl))
5846 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
5847
5848 /* Add the decl to the points-to set. Note that the points-to
5849 set contains global variables. */
5850 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
5851 if (vi->is_global_var)
5852 pt->vars_contains_global = true;
5853 }
5854 }
5855 }
5856
5857
5858 /* Compute the points-to solution *PT for the variable VI. */
5859
5860 static void
5861 find_what_var_points_to (varinfo_t orig_vi, struct pt_solution *pt)
5862 {
5863 unsigned int i;
5864 bitmap_iterator bi;
5865 bitmap finished_solution;
5866 bitmap result;
5867 varinfo_t vi;
5868
5869 memset (pt, 0, sizeof (struct pt_solution));
5870
5871 /* This variable may have been collapsed, let's get the real
5872 variable. */
5873 vi = get_varinfo (find (orig_vi->id));
5874
5875 /* Translate artificial variables into SSA_NAME_PTR_INFO
5876 attributes. */
5877 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5878 {
5879 varinfo_t vi = get_varinfo (i);
5880
5881 if (vi->is_artificial_var)
5882 {
5883 if (vi->id == nothing_id)
5884 pt->null = 1;
5885 else if (vi->id == escaped_id)
5886 {
5887 if (in_ipa_mode)
5888 pt->ipa_escaped = 1;
5889 else
5890 pt->escaped = 1;
5891 }
5892 else if (vi->id == nonlocal_id)
5893 pt->nonlocal = 1;
5894 else if (vi->is_heap_var)
5895 /* We represent heapvars in the points-to set properly. */
5896 ;
5897 else if (vi->id == readonly_id)
5898 /* Nobody cares. */
5899 ;
5900 else if (vi->id == anything_id
5901 || vi->id == integer_id)
5902 pt->anything = 1;
5903 }
5904 }
5905
5906 /* Instead of doing extra work, simply do not create
5907 elaborate points-to information for pt_anything pointers. */
5908 if (pt->anything)
5909 return;
5910
5911 /* Share the final set of variables when possible. */
5912 finished_solution = BITMAP_GGC_ALLOC ();
5913 stats.points_to_sets_created++;
5914
5915 set_uids_in_ptset (finished_solution, vi->solution, pt);
5916 result = shared_bitmap_lookup (finished_solution);
5917 if (!result)
5918 {
5919 shared_bitmap_add (finished_solution);
5920 pt->vars = finished_solution;
5921 }
5922 else
5923 {
5924 pt->vars = result;
5925 bitmap_clear (finished_solution);
5926 }
5927 }
5928
5929 /* Given a pointer variable P, fill in its points-to set. */
5930
5931 static void
5932 find_what_p_points_to (tree p)
5933 {
5934 struct ptr_info_def *pi;
5935 tree lookup_p = p;
5936 varinfo_t vi;
5937
5938 /* For parameters, get at the points-to set for the actual parm
5939 decl. */
5940 if (TREE_CODE (p) == SSA_NAME
5941 && SSA_NAME_IS_DEFAULT_DEF (p)
5942 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
5943 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
5944 lookup_p = SSA_NAME_VAR (p);
5945
5946 vi = lookup_vi_for_tree (lookup_p);
5947 if (!vi)
5948 return;
5949
5950 pi = get_ptr_info (p);
5951 find_what_var_points_to (vi, &pi->pt);
5952 }
5953
5954
5955 /* Query statistics for points-to solutions. */
5956
5957 static struct {
5958 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
5959 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
5960 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
5961 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
5962 } pta_stats;
5963
5964 void
5965 dump_pta_stats (FILE *s)
5966 {
5967 fprintf (s, "\nPTA query stats:\n");
5968 fprintf (s, " pt_solution_includes: "
5969 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
5970 HOST_WIDE_INT_PRINT_DEC" queries\n",
5971 pta_stats.pt_solution_includes_no_alias,
5972 pta_stats.pt_solution_includes_no_alias
5973 + pta_stats.pt_solution_includes_may_alias);
5974 fprintf (s, " pt_solutions_intersect: "
5975 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
5976 HOST_WIDE_INT_PRINT_DEC" queries\n",
5977 pta_stats.pt_solutions_intersect_no_alias,
5978 pta_stats.pt_solutions_intersect_no_alias
5979 + pta_stats.pt_solutions_intersect_may_alias);
5980 }
5981
5982
5983 /* Reset the points-to solution *PT to a conservative default
5984 (point to anything). */
5985
5986 void
5987 pt_solution_reset (struct pt_solution *pt)
5988 {
5989 memset (pt, 0, sizeof (struct pt_solution));
5990 pt->anything = true;
5991 }
5992
5993 /* Set the points-to solution *PT to point only to the variables
5994 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
5995 global variables and VARS_CONTAINS_RESTRICT specifies whether
5996 it contains restrict tag variables. */
5997
5998 void
5999 pt_solution_set (struct pt_solution *pt, bitmap vars, bool vars_contains_global)
6000 {
6001 memset (pt, 0, sizeof (struct pt_solution));
6002 pt->vars = vars;
6003 pt->vars_contains_global = vars_contains_global;
6004 }
6005
6006 /* Set the points-to solution *PT to point only to the variable VAR. */
6007
6008 void
6009 pt_solution_set_var (struct pt_solution *pt, tree var)
6010 {
6011 memset (pt, 0, sizeof (struct pt_solution));
6012 pt->vars = BITMAP_GGC_ALLOC ();
6013 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6014 pt->vars_contains_global = is_global_var (var);
6015 }
6016
6017 /* Computes the union of the points-to solutions *DEST and *SRC and
6018 stores the result in *DEST. This changes the points-to bitmap
6019 of *DEST and thus may not be used if that might be shared.
6020 The points-to bitmap of *SRC and *DEST will not be shared after
6021 this function if they were not before. */
6022
6023 static void
6024 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6025 {
6026 dest->anything |= src->anything;
6027 if (dest->anything)
6028 {
6029 pt_solution_reset (dest);
6030 return;
6031 }
6032
6033 dest->nonlocal |= src->nonlocal;
6034 dest->escaped |= src->escaped;
6035 dest->ipa_escaped |= src->ipa_escaped;
6036 dest->null |= src->null;
6037 dest->vars_contains_global |= src->vars_contains_global;
6038 if (!src->vars)
6039 return;
6040
6041 if (!dest->vars)
6042 dest->vars = BITMAP_GGC_ALLOC ();
6043 bitmap_ior_into (dest->vars, src->vars);
6044 }
6045
6046 /* Return true if the points-to solution *PT is empty. */
6047
6048 bool
6049 pt_solution_empty_p (struct pt_solution *pt)
6050 {
6051 if (pt->anything
6052 || pt->nonlocal)
6053 return false;
6054
6055 if (pt->vars
6056 && !bitmap_empty_p (pt->vars))
6057 return false;
6058
6059 /* If the solution includes ESCAPED, check if that is empty. */
6060 if (pt->escaped
6061 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6062 return false;
6063
6064 /* If the solution includes ESCAPED, check if that is empty. */
6065 if (pt->ipa_escaped
6066 && !pt_solution_empty_p (&ipa_escaped_pt))
6067 return false;
6068
6069 return true;
6070 }
6071
6072 /* Return true if the points-to solution *PT only point to a single var, and
6073 return the var uid in *UID. */
6074
6075 bool
6076 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6077 {
6078 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6079 || pt->null || pt->vars == NULL
6080 || !bitmap_single_bit_set_p (pt->vars))
6081 return false;
6082
6083 *uid = bitmap_first_set_bit (pt->vars);
6084 return true;
6085 }
6086
6087 /* Return true if the points-to solution *PT includes global memory. */
6088
6089 bool
6090 pt_solution_includes_global (struct pt_solution *pt)
6091 {
6092 if (pt->anything
6093 || pt->nonlocal
6094 || pt->vars_contains_global)
6095 return true;
6096
6097 if (pt->escaped)
6098 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6099
6100 if (pt->ipa_escaped)
6101 return pt_solution_includes_global (&ipa_escaped_pt);
6102
6103 /* ??? This predicate is not correct for the IPA-PTA solution
6104 as we do not properly distinguish between unit escape points
6105 and global variables. */
6106 if (cfun->gimple_df->ipa_pta)
6107 return true;
6108
6109 return false;
6110 }
6111
6112 /* Return true if the points-to solution *PT includes the variable
6113 declaration DECL. */
6114
6115 static bool
6116 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6117 {
6118 if (pt->anything)
6119 return true;
6120
6121 if (pt->nonlocal
6122 && is_global_var (decl))
6123 return true;
6124
6125 if (pt->vars
6126 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6127 return true;
6128
6129 /* If the solution includes ESCAPED, check it. */
6130 if (pt->escaped
6131 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6132 return true;
6133
6134 /* If the solution includes ESCAPED, check it. */
6135 if (pt->ipa_escaped
6136 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6137 return true;
6138
6139 return false;
6140 }
6141
6142 bool
6143 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6144 {
6145 bool res = pt_solution_includes_1 (pt, decl);
6146 if (res)
6147 ++pta_stats.pt_solution_includes_may_alias;
6148 else
6149 ++pta_stats.pt_solution_includes_no_alias;
6150 return res;
6151 }
6152
6153 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6154 intersection. */
6155
6156 static bool
6157 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6158 {
6159 if (pt1->anything || pt2->anything)
6160 return true;
6161
6162 /* If either points to unknown global memory and the other points to
6163 any global memory they alias. */
6164 if ((pt1->nonlocal
6165 && (pt2->nonlocal
6166 || pt2->vars_contains_global))
6167 || (pt2->nonlocal
6168 && pt1->vars_contains_global))
6169 return true;
6170
6171 /* Check the escaped solution if required. */
6172 if ((pt1->escaped || pt2->escaped)
6173 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6174 {
6175 /* If both point to escaped memory and that solution
6176 is not empty they alias. */
6177 if (pt1->escaped && pt2->escaped)
6178 return true;
6179
6180 /* If either points to escaped memory see if the escaped solution
6181 intersects with the other. */
6182 if ((pt1->escaped
6183 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
6184 || (pt2->escaped
6185 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
6186 return true;
6187 }
6188
6189 /* Check the escaped solution if required.
6190 ??? Do we need to check the local against the IPA escaped sets? */
6191 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6192 && !pt_solution_empty_p (&ipa_escaped_pt))
6193 {
6194 /* If both point to escaped memory and that solution
6195 is not empty they alias. */
6196 if (pt1->ipa_escaped && pt2->ipa_escaped)
6197 return true;
6198
6199 /* If either points to escaped memory see if the escaped solution
6200 intersects with the other. */
6201 if ((pt1->ipa_escaped
6202 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6203 || (pt2->ipa_escaped
6204 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6205 return true;
6206 }
6207
6208 /* Now both pointers alias if their points-to solution intersects. */
6209 return (pt1->vars
6210 && pt2->vars
6211 && bitmap_intersect_p (pt1->vars, pt2->vars));
6212 }
6213
6214 bool
6215 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6216 {
6217 bool res = pt_solutions_intersect_1 (pt1, pt2);
6218 if (res)
6219 ++pta_stats.pt_solutions_intersect_may_alias;
6220 else
6221 ++pta_stats.pt_solutions_intersect_no_alias;
6222 return res;
6223 }
6224
6225
6226 /* Dump points-to information to OUTFILE. */
6227
6228 static void
6229 dump_sa_points_to_info (FILE *outfile)
6230 {
6231 unsigned int i;
6232
6233 fprintf (outfile, "\nPoints-to sets\n\n");
6234
6235 if (dump_flags & TDF_STATS)
6236 {
6237 fprintf (outfile, "Stats:\n");
6238 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6239 fprintf (outfile, "Non-pointer vars: %d\n",
6240 stats.nonpointer_vars);
6241 fprintf (outfile, "Statically unified vars: %d\n",
6242 stats.unified_vars_static);
6243 fprintf (outfile, "Dynamically unified vars: %d\n",
6244 stats.unified_vars_dynamic);
6245 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6246 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6247 fprintf (outfile, "Number of implicit edges: %d\n",
6248 stats.num_implicit_edges);
6249 }
6250
6251 for (i = 0; i < varmap.length (); i++)
6252 {
6253 varinfo_t vi = get_varinfo (i);
6254 if (!vi->may_have_pointers)
6255 continue;
6256 dump_solution_for_var (outfile, i);
6257 }
6258 }
6259
6260
6261 /* Debug points-to information to stderr. */
6262
6263 DEBUG_FUNCTION void
6264 debug_sa_points_to_info (void)
6265 {
6266 dump_sa_points_to_info (stderr);
6267 }
6268
6269
6270 /* Initialize the always-existing constraint variables for NULL
6271 ANYTHING, READONLY, and INTEGER */
6272
6273 static void
6274 init_base_vars (void)
6275 {
6276 struct constraint_expr lhs, rhs;
6277 varinfo_t var_anything;
6278 varinfo_t var_nothing;
6279 varinfo_t var_readonly;
6280 varinfo_t var_escaped;
6281 varinfo_t var_nonlocal;
6282 varinfo_t var_storedanything;
6283 varinfo_t var_integer;
6284
6285 /* Create the NULL variable, used to represent that a variable points
6286 to NULL. */
6287 var_nothing = new_var_info (NULL_TREE, "NULL");
6288 gcc_assert (var_nothing->id == nothing_id);
6289 var_nothing->is_artificial_var = 1;
6290 var_nothing->offset = 0;
6291 var_nothing->size = ~0;
6292 var_nothing->fullsize = ~0;
6293 var_nothing->is_special_var = 1;
6294 var_nothing->may_have_pointers = 0;
6295 var_nothing->is_global_var = 0;
6296
6297 /* Create the ANYTHING variable, used to represent that a variable
6298 points to some unknown piece of memory. */
6299 var_anything = new_var_info (NULL_TREE, "ANYTHING");
6300 gcc_assert (var_anything->id == anything_id);
6301 var_anything->is_artificial_var = 1;
6302 var_anything->size = ~0;
6303 var_anything->offset = 0;
6304 var_anything->next = NULL;
6305 var_anything->fullsize = ~0;
6306 var_anything->is_special_var = 1;
6307
6308 /* Anything points to anything. This makes deref constraints just
6309 work in the presence of linked list and other p = *p type loops,
6310 by saying that *ANYTHING = ANYTHING. */
6311 lhs.type = SCALAR;
6312 lhs.var = anything_id;
6313 lhs.offset = 0;
6314 rhs.type = ADDRESSOF;
6315 rhs.var = anything_id;
6316 rhs.offset = 0;
6317
6318 /* This specifically does not use process_constraint because
6319 process_constraint ignores all anything = anything constraints, since all
6320 but this one are redundant. */
6321 constraints.safe_push (new_constraint (lhs, rhs));
6322
6323 /* Create the READONLY variable, used to represent that a variable
6324 points to readonly memory. */
6325 var_readonly = new_var_info (NULL_TREE, "READONLY");
6326 gcc_assert (var_readonly->id == readonly_id);
6327 var_readonly->is_artificial_var = 1;
6328 var_readonly->offset = 0;
6329 var_readonly->size = ~0;
6330 var_readonly->fullsize = ~0;
6331 var_readonly->next = NULL;
6332 var_readonly->is_special_var = 1;
6333
6334 /* readonly memory points to anything, in order to make deref
6335 easier. In reality, it points to anything the particular
6336 readonly variable can point to, but we don't track this
6337 separately. */
6338 lhs.type = SCALAR;
6339 lhs.var = readonly_id;
6340 lhs.offset = 0;
6341 rhs.type = ADDRESSOF;
6342 rhs.var = readonly_id; /* FIXME */
6343 rhs.offset = 0;
6344 process_constraint (new_constraint (lhs, rhs));
6345
6346 /* Create the ESCAPED variable, used to represent the set of escaped
6347 memory. */
6348 var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6349 gcc_assert (var_escaped->id == escaped_id);
6350 var_escaped->is_artificial_var = 1;
6351 var_escaped->offset = 0;
6352 var_escaped->size = ~0;
6353 var_escaped->fullsize = ~0;
6354 var_escaped->is_special_var = 0;
6355
6356 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6357 memory. */
6358 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6359 gcc_assert (var_nonlocal->id == nonlocal_id);
6360 var_nonlocal->is_artificial_var = 1;
6361 var_nonlocal->offset = 0;
6362 var_nonlocal->size = ~0;
6363 var_nonlocal->fullsize = ~0;
6364 var_nonlocal->is_special_var = 1;
6365
6366 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6367 lhs.type = SCALAR;
6368 lhs.var = escaped_id;
6369 lhs.offset = 0;
6370 rhs.type = DEREF;
6371 rhs.var = escaped_id;
6372 rhs.offset = 0;
6373 process_constraint (new_constraint (lhs, rhs));
6374
6375 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6376 whole variable escapes. */
6377 lhs.type = SCALAR;
6378 lhs.var = escaped_id;
6379 lhs.offset = 0;
6380 rhs.type = SCALAR;
6381 rhs.var = escaped_id;
6382 rhs.offset = UNKNOWN_OFFSET;
6383 process_constraint (new_constraint (lhs, rhs));
6384
6385 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6386 everything pointed to by escaped points to what global memory can
6387 point to. */
6388 lhs.type = DEREF;
6389 lhs.var = escaped_id;
6390 lhs.offset = 0;
6391 rhs.type = SCALAR;
6392 rhs.var = nonlocal_id;
6393 rhs.offset = 0;
6394 process_constraint (new_constraint (lhs, rhs));
6395
6396 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6397 global memory may point to global memory and escaped memory. */
6398 lhs.type = SCALAR;
6399 lhs.var = nonlocal_id;
6400 lhs.offset = 0;
6401 rhs.type = ADDRESSOF;
6402 rhs.var = nonlocal_id;
6403 rhs.offset = 0;
6404 process_constraint (new_constraint (lhs, rhs));
6405 rhs.type = ADDRESSOF;
6406 rhs.var = escaped_id;
6407 rhs.offset = 0;
6408 process_constraint (new_constraint (lhs, rhs));
6409
6410 /* Create the STOREDANYTHING variable, used to represent the set of
6411 variables stored to *ANYTHING. */
6412 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6413 gcc_assert (var_storedanything->id == storedanything_id);
6414 var_storedanything->is_artificial_var = 1;
6415 var_storedanything->offset = 0;
6416 var_storedanything->size = ~0;
6417 var_storedanything->fullsize = ~0;
6418 var_storedanything->is_special_var = 0;
6419
6420 /* Create the INTEGER variable, used to represent that a variable points
6421 to what an INTEGER "points to". */
6422 var_integer = new_var_info (NULL_TREE, "INTEGER");
6423 gcc_assert (var_integer->id == integer_id);
6424 var_integer->is_artificial_var = 1;
6425 var_integer->size = ~0;
6426 var_integer->fullsize = ~0;
6427 var_integer->offset = 0;
6428 var_integer->next = NULL;
6429 var_integer->is_special_var = 1;
6430
6431 /* INTEGER = ANYTHING, because we don't know where a dereference of
6432 a random integer will point to. */
6433 lhs.type = SCALAR;
6434 lhs.var = integer_id;
6435 lhs.offset = 0;
6436 rhs.type = ADDRESSOF;
6437 rhs.var = anything_id;
6438 rhs.offset = 0;
6439 process_constraint (new_constraint (lhs, rhs));
6440 }
6441
6442 /* Initialize things necessary to perform PTA */
6443
6444 static void
6445 init_alias_vars (void)
6446 {
6447 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6448
6449 bitmap_obstack_initialize (&pta_obstack);
6450 bitmap_obstack_initialize (&oldpta_obstack);
6451 bitmap_obstack_initialize (&predbitmap_obstack);
6452
6453 constraint_pool = create_alloc_pool ("Constraint pool",
6454 sizeof (struct constraint), 30);
6455 variable_info_pool = create_alloc_pool ("Variable info pool",
6456 sizeof (struct variable_info), 30);
6457 constraints.create (8);
6458 varmap.create (8);
6459 vi_for_tree = pointer_map_create ();
6460 call_stmt_vars = pointer_map_create ();
6461
6462 memset (&stats, 0, sizeof (stats));
6463 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
6464 shared_bitmap_eq, free);
6465 init_base_vars ();
6466
6467 gcc_obstack_init (&fake_var_decl_obstack);
6468 }
6469
6470 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6471 predecessor edges. */
6472
6473 static void
6474 remove_preds_and_fake_succs (constraint_graph_t graph)
6475 {
6476 unsigned int i;
6477
6478 /* Clear the implicit ref and address nodes from the successor
6479 lists. */
6480 for (i = 0; i < FIRST_REF_NODE; i++)
6481 {
6482 if (graph->succs[i])
6483 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6484 FIRST_REF_NODE * 2);
6485 }
6486
6487 /* Free the successor list for the non-ref nodes. */
6488 for (i = FIRST_REF_NODE; i < graph->size; i++)
6489 {
6490 if (graph->succs[i])
6491 BITMAP_FREE (graph->succs[i]);
6492 }
6493
6494 /* Now reallocate the size of the successor list as, and blow away
6495 the predecessor bitmaps. */
6496 graph->size = varmap.length ();
6497 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6498
6499 free (graph->implicit_preds);
6500 graph->implicit_preds = NULL;
6501 free (graph->preds);
6502 graph->preds = NULL;
6503 bitmap_obstack_release (&predbitmap_obstack);
6504 }
6505
6506 /* Solve the constraint set. */
6507
6508 static void
6509 solve_constraints (void)
6510 {
6511 struct scc_info *si;
6512
6513 if (dump_file)
6514 fprintf (dump_file,
6515 "\nCollapsing static cycles and doing variable "
6516 "substitution\n");
6517
6518 init_graph (varmap.length () * 2);
6519
6520 if (dump_file)
6521 fprintf (dump_file, "Building predecessor graph\n");
6522 build_pred_graph ();
6523
6524 if (dump_file)
6525 fprintf (dump_file, "Detecting pointer and location "
6526 "equivalences\n");
6527 si = perform_var_substitution (graph);
6528
6529 if (dump_file)
6530 fprintf (dump_file, "Rewriting constraints and unifying "
6531 "variables\n");
6532 rewrite_constraints (graph, si);
6533
6534 build_succ_graph ();
6535
6536 free_var_substitution_info (si);
6537
6538 /* Attach complex constraints to graph nodes. */
6539 move_complex_constraints (graph);
6540
6541 if (dump_file)
6542 fprintf (dump_file, "Uniting pointer but not location equivalent "
6543 "variables\n");
6544 unite_pointer_equivalences (graph);
6545
6546 if (dump_file)
6547 fprintf (dump_file, "Finding indirect cycles\n");
6548 find_indirect_cycles (graph);
6549
6550 /* Implicit nodes and predecessors are no longer necessary at this
6551 point. */
6552 remove_preds_and_fake_succs (graph);
6553
6554 if (dump_file && (dump_flags & TDF_GRAPH))
6555 {
6556 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
6557 "in dot format:\n");
6558 dump_constraint_graph (dump_file);
6559 fprintf (dump_file, "\n\n");
6560 }
6561
6562 if (dump_file)
6563 fprintf (dump_file, "Solving graph\n");
6564
6565 solve_graph (graph);
6566
6567 if (dump_file && (dump_flags & TDF_GRAPH))
6568 {
6569 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
6570 "in dot format:\n");
6571 dump_constraint_graph (dump_file);
6572 fprintf (dump_file, "\n\n");
6573 }
6574
6575 if (dump_file)
6576 dump_sa_points_to_info (dump_file);
6577 }
6578
6579 /* Create points-to sets for the current function. See the comments
6580 at the start of the file for an algorithmic overview. */
6581
6582 static void
6583 compute_points_to_sets (void)
6584 {
6585 basic_block bb;
6586 unsigned i;
6587 varinfo_t vi;
6588
6589 timevar_push (TV_TREE_PTA);
6590
6591 init_alias_vars ();
6592
6593 intra_create_variable_infos ();
6594
6595 /* Now walk all statements and build the constraint set. */
6596 FOR_EACH_BB (bb)
6597 {
6598 gimple_stmt_iterator gsi;
6599
6600 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6601 {
6602 gimple phi = gsi_stmt (gsi);
6603
6604 if (! virtual_operand_p (gimple_phi_result (phi)))
6605 find_func_aliases (phi);
6606 }
6607
6608 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6609 {
6610 gimple stmt = gsi_stmt (gsi);
6611
6612 find_func_aliases (stmt);
6613 }
6614 }
6615
6616 if (dump_file)
6617 {
6618 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6619 dump_constraints (dump_file, 0);
6620 }
6621
6622 /* From the constraints compute the points-to sets. */
6623 solve_constraints ();
6624
6625 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6626 find_what_var_points_to (get_varinfo (escaped_id),
6627 &cfun->gimple_df->escaped);
6628
6629 /* Make sure the ESCAPED solution (which is used as placeholder in
6630 other solutions) does not reference itself. This simplifies
6631 points-to solution queries. */
6632 cfun->gimple_df->escaped.escaped = 0;
6633
6634 /* Mark escaped HEAP variables as global. */
6635 FOR_EACH_VEC_ELT (varmap, i, vi)
6636 if (vi->is_heap_var
6637 && !vi->is_global_var)
6638 DECL_EXTERNAL (vi->decl) = vi->is_global_var
6639 = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
6640
6641 /* Compute the points-to sets for pointer SSA_NAMEs. */
6642 for (i = 0; i < num_ssa_names; ++i)
6643 {
6644 tree ptr = ssa_name (i);
6645 if (ptr
6646 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6647 find_what_p_points_to (ptr);
6648 }
6649
6650 /* Compute the call-used/clobbered sets. */
6651 FOR_EACH_BB (bb)
6652 {
6653 gimple_stmt_iterator gsi;
6654
6655 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6656 {
6657 gimple stmt = gsi_stmt (gsi);
6658 struct pt_solution *pt;
6659 if (!is_gimple_call (stmt))
6660 continue;
6661
6662 pt = gimple_call_use_set (stmt);
6663 if (gimple_call_flags (stmt) & ECF_CONST)
6664 memset (pt, 0, sizeof (struct pt_solution));
6665 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6666 {
6667 find_what_var_points_to (vi, pt);
6668 /* Escaped (and thus nonlocal) variables are always
6669 implicitly used by calls. */
6670 /* ??? ESCAPED can be empty even though NONLOCAL
6671 always escaped. */
6672 pt->nonlocal = 1;
6673 pt->escaped = 1;
6674 }
6675 else
6676 {
6677 /* If there is nothing special about this call then
6678 we have made everything that is used also escape. */
6679 *pt = cfun->gimple_df->escaped;
6680 pt->nonlocal = 1;
6681 }
6682
6683 pt = gimple_call_clobber_set (stmt);
6684 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6685 memset (pt, 0, sizeof (struct pt_solution));
6686 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6687 {
6688 find_what_var_points_to (vi, pt);
6689 /* Escaped (and thus nonlocal) variables are always
6690 implicitly clobbered by calls. */
6691 /* ??? ESCAPED can be empty even though NONLOCAL
6692 always escaped. */
6693 pt->nonlocal = 1;
6694 pt->escaped = 1;
6695 }
6696 else
6697 {
6698 /* If there is nothing special about this call then
6699 we have made everything that is used also escape. */
6700 *pt = cfun->gimple_df->escaped;
6701 pt->nonlocal = 1;
6702 }
6703 }
6704 }
6705
6706 timevar_pop (TV_TREE_PTA);
6707 }
6708
6709
6710 /* Delete created points-to sets. */
6711
6712 static void
6713 delete_points_to_sets (void)
6714 {
6715 unsigned int i;
6716
6717 htab_delete (shared_bitmap_table);
6718 if (dump_file && (dump_flags & TDF_STATS))
6719 fprintf (dump_file, "Points to sets created:%d\n",
6720 stats.points_to_sets_created);
6721
6722 pointer_map_destroy (vi_for_tree);
6723 pointer_map_destroy (call_stmt_vars);
6724 bitmap_obstack_release (&pta_obstack);
6725 constraints.release ();
6726
6727 for (i = 0; i < graph->size; i++)
6728 graph->complex[i].release ();
6729 free (graph->complex);
6730
6731 free (graph->rep);
6732 free (graph->succs);
6733 free (graph->pe);
6734 free (graph->pe_rep);
6735 free (graph->indirect_cycles);
6736 free (graph);
6737
6738 varmap.release ();
6739 free_alloc_pool (variable_info_pool);
6740 free_alloc_pool (constraint_pool);
6741
6742 obstack_free (&fake_var_decl_obstack, NULL);
6743 }
6744
6745
6746 /* Compute points-to information for every SSA_NAME pointer in the
6747 current function and compute the transitive closure of escaped
6748 variables to re-initialize the call-clobber states of local variables. */
6749
6750 unsigned int
6751 compute_may_aliases (void)
6752 {
6753 if (cfun->gimple_df->ipa_pta)
6754 {
6755 if (dump_file)
6756 {
6757 fprintf (dump_file, "\nNot re-computing points-to information "
6758 "because IPA points-to information is available.\n\n");
6759
6760 /* But still dump what we have remaining it. */
6761 dump_alias_info (dump_file);
6762 }
6763
6764 return 0;
6765 }
6766
6767 /* For each pointer P_i, determine the sets of variables that P_i may
6768 point-to. Compute the reachability set of escaped and call-used
6769 variables. */
6770 compute_points_to_sets ();
6771
6772 /* Debugging dumps. */
6773 if (dump_file)
6774 dump_alias_info (dump_file);
6775
6776 /* Deallocate memory used by aliasing data structures and the internal
6777 points-to solution. */
6778 delete_points_to_sets ();
6779
6780 gcc_assert (!need_ssa_update_p (cfun));
6781
6782 return 0;
6783 }
6784
6785 static bool
6786 gate_tree_pta (void)
6787 {
6788 return flag_tree_pta;
6789 }
6790
6791 /* A dummy pass to cause points-to information to be computed via
6792 TODO_rebuild_alias. */
6793
6794 struct gimple_opt_pass pass_build_alias =
6795 {
6796 {
6797 GIMPLE_PASS,
6798 "alias", /* name */
6799 OPTGROUP_NONE, /* optinfo_flags */
6800 gate_tree_pta, /* gate */
6801 NULL, /* execute */
6802 NULL, /* sub */
6803 NULL, /* next */
6804 0, /* static_pass_number */
6805 TV_NONE, /* tv_id */
6806 PROP_cfg | PROP_ssa, /* properties_required */
6807 0, /* properties_provided */
6808 0, /* properties_destroyed */
6809 0, /* todo_flags_start */
6810 TODO_rebuild_alias /* todo_flags_finish */
6811 }
6812 };
6813
6814 /* A dummy pass to cause points-to information to be computed via
6815 TODO_rebuild_alias. */
6816
6817 struct gimple_opt_pass pass_build_ealias =
6818 {
6819 {
6820 GIMPLE_PASS,
6821 "ealias", /* name */
6822 OPTGROUP_NONE, /* optinfo_flags */
6823 gate_tree_pta, /* gate */
6824 NULL, /* execute */
6825 NULL, /* sub */
6826 NULL, /* next */
6827 0, /* static_pass_number */
6828 TV_NONE, /* tv_id */
6829 PROP_cfg | PROP_ssa, /* properties_required */
6830 0, /* properties_provided */
6831 0, /* properties_destroyed */
6832 0, /* todo_flags_start */
6833 TODO_rebuild_alias /* todo_flags_finish */
6834 }
6835 };
6836
6837
6838 /* Return true if we should execute IPA PTA. */
6839 static bool
6840 gate_ipa_pta (void)
6841 {
6842 return (optimize
6843 && flag_ipa_pta
6844 /* Don't bother doing anything if the program has errors. */
6845 && !seen_error ());
6846 }
6847
6848 /* IPA PTA solutions for ESCAPED. */
6849 struct pt_solution ipa_escaped_pt
6850 = { true, false, false, false, false, false, NULL };
6851
6852 /* Associate node with varinfo DATA. Worker for
6853 cgraph_for_node_and_aliases. */
6854 static bool
6855 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
6856 {
6857 if (node->alias || node->thunk.thunk_p)
6858 insert_vi_for_tree (node->symbol.decl, (varinfo_t)data);
6859 return false;
6860 }
6861
6862 /* Execute the driver for IPA PTA. */
6863 static unsigned int
6864 ipa_pta_execute (void)
6865 {
6866 struct cgraph_node *node;
6867 struct varpool_node *var;
6868 int from;
6869
6870 in_ipa_mode = 1;
6871
6872 init_alias_vars ();
6873
6874 if (dump_file && (dump_flags & TDF_DETAILS))
6875 {
6876 dump_symtab (dump_file);
6877 fprintf (dump_file, "\n");
6878 }
6879
6880 /* Build the constraints. */
6881 FOR_EACH_DEFINED_FUNCTION (node)
6882 {
6883 varinfo_t vi;
6884 /* Nodes without a body are not interesting. Especially do not
6885 visit clones at this point for now - we get duplicate decls
6886 there for inline clones at least. */
6887 if (!cgraph_function_with_gimple_body_p (node))
6888 continue;
6889
6890 gcc_assert (!node->clone_of);
6891
6892 vi = create_function_info_for (node->symbol.decl,
6893 alias_get_name (node->symbol.decl));
6894 cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
6895 }
6896
6897 /* Create constraints for global variables and their initializers. */
6898 FOR_EACH_VARIABLE (var)
6899 {
6900 if (var->alias)
6901 continue;
6902
6903 get_vi_for_tree (var->symbol.decl);
6904 }
6905
6906 if (dump_file)
6907 {
6908 fprintf (dump_file,
6909 "Generating constraints for global initializers\n\n");
6910 dump_constraints (dump_file, 0);
6911 fprintf (dump_file, "\n");
6912 }
6913 from = constraints.length ();
6914
6915 FOR_EACH_DEFINED_FUNCTION (node)
6916 {
6917 struct function *func;
6918 basic_block bb;
6919
6920 /* Nodes without a body are not interesting. */
6921 if (!cgraph_function_with_gimple_body_p (node))
6922 continue;
6923
6924 if (dump_file)
6925 {
6926 fprintf (dump_file,
6927 "Generating constraints for %s", cgraph_node_name (node));
6928 if (DECL_ASSEMBLER_NAME_SET_P (node->symbol.decl))
6929 fprintf (dump_file, " (%s)",
6930 IDENTIFIER_POINTER
6931 (DECL_ASSEMBLER_NAME (node->symbol.decl)));
6932 fprintf (dump_file, "\n");
6933 }
6934
6935 func = DECL_STRUCT_FUNCTION (node->symbol.decl);
6936 push_cfun (func);
6937
6938 /* For externally visible or attribute used annotated functions use
6939 local constraints for their arguments.
6940 For local functions we see all callers and thus do not need initial
6941 constraints for parameters. */
6942 if (node->symbol.used_from_other_partition
6943 || node->symbol.externally_visible
6944 || node->symbol.force_output)
6945 {
6946 intra_create_variable_infos ();
6947
6948 /* We also need to make function return values escape. Nothing
6949 escapes by returning from main though. */
6950 if (!MAIN_NAME_P (DECL_NAME (node->symbol.decl)))
6951 {
6952 varinfo_t fi, rvi;
6953 fi = lookup_vi_for_tree (node->symbol.decl);
6954 rvi = first_vi_for_offset (fi, fi_result);
6955 if (rvi && rvi->offset == fi_result)
6956 {
6957 struct constraint_expr includes;
6958 struct constraint_expr var;
6959 includes.var = escaped_id;
6960 includes.offset = 0;
6961 includes.type = SCALAR;
6962 var.var = rvi->id;
6963 var.offset = 0;
6964 var.type = SCALAR;
6965 process_constraint (new_constraint (includes, var));
6966 }
6967 }
6968 }
6969
6970 /* Build constriants for the function body. */
6971 FOR_EACH_BB_FN (bb, func)
6972 {
6973 gimple_stmt_iterator gsi;
6974
6975 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6976 gsi_next (&gsi))
6977 {
6978 gimple phi = gsi_stmt (gsi);
6979
6980 if (! virtual_operand_p (gimple_phi_result (phi)))
6981 find_func_aliases (phi);
6982 }
6983
6984 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6985 {
6986 gimple stmt = gsi_stmt (gsi);
6987
6988 find_func_aliases (stmt);
6989 find_func_clobbers (stmt);
6990 }
6991 }
6992
6993 pop_cfun ();
6994
6995 if (dump_file)
6996 {
6997 fprintf (dump_file, "\n");
6998 dump_constraints (dump_file, from);
6999 fprintf (dump_file, "\n");
7000 }
7001 from = constraints.length ();
7002 }
7003
7004 /* From the constraints compute the points-to sets. */
7005 solve_constraints ();
7006
7007 /* Compute the global points-to sets for ESCAPED.
7008 ??? Note that the computed escape set is not correct
7009 for the whole unit as we fail to consider graph edges to
7010 externally visible functions. */
7011 find_what_var_points_to (get_varinfo (escaped_id), &ipa_escaped_pt);
7012
7013 /* Make sure the ESCAPED solution (which is used as placeholder in
7014 other solutions) does not reference itself. This simplifies
7015 points-to solution queries. */
7016 ipa_escaped_pt.ipa_escaped = 0;
7017
7018 /* Assign the points-to sets to the SSA names in the unit. */
7019 FOR_EACH_DEFINED_FUNCTION (node)
7020 {
7021 tree ptr;
7022 struct function *fn;
7023 unsigned i;
7024 varinfo_t fi;
7025 basic_block bb;
7026 struct pt_solution uses, clobbers;
7027 struct cgraph_edge *e;
7028
7029 /* Nodes without a body are not interesting. */
7030 if (!cgraph_function_with_gimple_body_p (node))
7031 continue;
7032
7033 fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
7034
7035 /* Compute the points-to sets for pointer SSA_NAMEs. */
7036 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7037 {
7038 if (ptr
7039 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7040 find_what_p_points_to (ptr);
7041 }
7042
7043 /* Compute the call-use and call-clobber sets for all direct calls. */
7044 fi = lookup_vi_for_tree (node->symbol.decl);
7045 gcc_assert (fi->is_fn_info);
7046 find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers),
7047 &clobbers);
7048 find_what_var_points_to (first_vi_for_offset (fi, fi_uses), &uses);
7049 for (e = node->callers; e; e = e->next_caller)
7050 {
7051 if (!e->call_stmt)
7052 continue;
7053
7054 *gimple_call_clobber_set (e->call_stmt) = clobbers;
7055 *gimple_call_use_set (e->call_stmt) = uses;
7056 }
7057
7058 /* Compute the call-use and call-clobber sets for indirect calls
7059 and calls to external functions. */
7060 FOR_EACH_BB_FN (bb, fn)
7061 {
7062 gimple_stmt_iterator gsi;
7063
7064 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7065 {
7066 gimple stmt = gsi_stmt (gsi);
7067 struct pt_solution *pt;
7068 varinfo_t vi;
7069 tree decl;
7070
7071 if (!is_gimple_call (stmt))
7072 continue;
7073
7074 /* Handle direct calls to external functions. */
7075 decl = gimple_call_fndecl (stmt);
7076 if (decl
7077 && (!(fi = lookup_vi_for_tree (decl))
7078 || !fi->is_fn_info))
7079 {
7080 pt = gimple_call_use_set (stmt);
7081 if (gimple_call_flags (stmt) & ECF_CONST)
7082 memset (pt, 0, sizeof (struct pt_solution));
7083 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7084 {
7085 find_what_var_points_to (vi, pt);
7086 /* Escaped (and thus nonlocal) variables are always
7087 implicitly used by calls. */
7088 /* ??? ESCAPED can be empty even though NONLOCAL
7089 always escaped. */
7090 pt->nonlocal = 1;
7091 pt->ipa_escaped = 1;
7092 }
7093 else
7094 {
7095 /* If there is nothing special about this call then
7096 we have made everything that is used also escape. */
7097 *pt = ipa_escaped_pt;
7098 pt->nonlocal = 1;
7099 }
7100
7101 pt = gimple_call_clobber_set (stmt);
7102 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7103 memset (pt, 0, sizeof (struct pt_solution));
7104 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7105 {
7106 find_what_var_points_to (vi, pt);
7107 /* Escaped (and thus nonlocal) variables are always
7108 implicitly clobbered by calls. */
7109 /* ??? ESCAPED can be empty even though NONLOCAL
7110 always escaped. */
7111 pt->nonlocal = 1;
7112 pt->ipa_escaped = 1;
7113 }
7114 else
7115 {
7116 /* If there is nothing special about this call then
7117 we have made everything that is used also escape. */
7118 *pt = ipa_escaped_pt;
7119 pt->nonlocal = 1;
7120 }
7121 }
7122
7123 /* Handle indirect calls. */
7124 if (!decl
7125 && (fi = get_fi_for_callee (stmt)))
7126 {
7127 /* We need to accumulate all clobbers/uses of all possible
7128 callees. */
7129 fi = get_varinfo (find (fi->id));
7130 /* If we cannot constrain the set of functions we'll end up
7131 calling we end up using/clobbering everything. */
7132 if (bitmap_bit_p (fi->solution, anything_id)
7133 || bitmap_bit_p (fi->solution, nonlocal_id)
7134 || bitmap_bit_p (fi->solution, escaped_id))
7135 {
7136 pt_solution_reset (gimple_call_clobber_set (stmt));
7137 pt_solution_reset (gimple_call_use_set (stmt));
7138 }
7139 else
7140 {
7141 bitmap_iterator bi;
7142 unsigned i;
7143 struct pt_solution *uses, *clobbers;
7144
7145 uses = gimple_call_use_set (stmt);
7146 clobbers = gimple_call_clobber_set (stmt);
7147 memset (uses, 0, sizeof (struct pt_solution));
7148 memset (clobbers, 0, sizeof (struct pt_solution));
7149 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7150 {
7151 struct pt_solution sol;
7152
7153 vi = get_varinfo (i);
7154 if (!vi->is_fn_info)
7155 {
7156 /* ??? We could be more precise here? */
7157 uses->nonlocal = 1;
7158 uses->ipa_escaped = 1;
7159 clobbers->nonlocal = 1;
7160 clobbers->ipa_escaped = 1;
7161 continue;
7162 }
7163
7164 if (!uses->anything)
7165 {
7166 find_what_var_points_to
7167 (first_vi_for_offset (vi, fi_uses), &sol);
7168 pt_solution_ior_into (uses, &sol);
7169 }
7170 if (!clobbers->anything)
7171 {
7172 find_what_var_points_to
7173 (first_vi_for_offset (vi, fi_clobbers), &sol);
7174 pt_solution_ior_into (clobbers, &sol);
7175 }
7176 }
7177 }
7178 }
7179 }
7180 }
7181
7182 fn->gimple_df->ipa_pta = true;
7183 }
7184
7185 delete_points_to_sets ();
7186
7187 in_ipa_mode = 0;
7188
7189 return 0;
7190 }
7191
7192 struct simple_ipa_opt_pass pass_ipa_pta =
7193 {
7194 {
7195 SIMPLE_IPA_PASS,
7196 "pta", /* name */
7197 OPTGROUP_NONE, /* optinfo_flags */
7198 gate_ipa_pta, /* gate */
7199 ipa_pta_execute, /* execute */
7200 NULL, /* sub */
7201 NULL, /* next */
7202 0, /* static_pass_number */
7203 TV_IPA_PTA, /* tv_id */
7204 0, /* properties_required */
7205 0, /* properties_provided */
7206 0, /* properties_destroyed */
7207 0, /* todo_flags_start */
7208 TODO_update_ssa /* todo_flags_finish */
7209 }
7210 };