c588e876b2bb2cca836c96927bef13bc8a835dfa
[gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "basic-block.h"
30 #include "tree.h"
31 #include "tree-flow.h"
32 #include "tree-inline.h"
33 #include "diagnostic-core.h"
34 #include "gimple.h"
35 #include "hashtab.h"
36 #include "function.h"
37 #include "cgraph.h"
38 #include "tree-pass.h"
39 #include "alloc-pool.h"
40 #include "splay-tree.h"
41 #include "params.h"
42 #include "cgraph.h"
43 #include "alias.h"
44 #include "pointer-set.h"
45
46 /* The idea behind this analyzer is to generate set constraints from the
47 program, then solve the resulting constraints in order to generate the
48 points-to sets.
49
50 Set constraints are a way of modeling program analysis problems that
51 involve sets. They consist of an inclusion constraint language,
52 describing the variables (each variable is a set) and operations that
53 are involved on the variables, and a set of rules that derive facts
54 from these operations. To solve a system of set constraints, you derive
55 all possible facts under the rules, which gives you the correct sets
56 as a consequence.
57
58 See "Efficient Field-sensitive pointer analysis for C" by "David
59 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
60 http://citeseer.ist.psu.edu/pearce04efficient.html
61
62 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
63 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
64 http://citeseer.ist.psu.edu/heintze01ultrafast.html
65
66 There are three types of real constraint expressions, DEREF,
67 ADDRESSOF, and SCALAR. Each constraint expression consists
68 of a constraint type, a variable, and an offset.
69
70 SCALAR is a constraint expression type used to represent x, whether
71 it appears on the LHS or the RHS of a statement.
72 DEREF is a constraint expression type used to represent *x, whether
73 it appears on the LHS or the RHS of a statement.
74 ADDRESSOF is a constraint expression used to represent &x, whether
75 it appears on the LHS or the RHS of a statement.
76
77 Each pointer variable in the program is assigned an integer id, and
78 each field of a structure variable is assigned an integer id as well.
79
80 Structure variables are linked to their list of fields through a "next
81 field" in each variable that points to the next field in offset
82 order.
83 Each variable for a structure field has
84
85 1. "size", that tells the size in bits of that field.
86 2. "fullsize, that tells the size in bits of the entire structure.
87 3. "offset", that tells the offset in bits from the beginning of the
88 structure to this field.
89
90 Thus,
91 struct f
92 {
93 int a;
94 int b;
95 } foo;
96 int *bar;
97
98 looks like
99
100 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
101 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
102 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
103
104
105 In order to solve the system of set constraints, the following is
106 done:
107
108 1. Each constraint variable x has a solution set associated with it,
109 Sol(x).
110
111 2. Constraints are separated into direct, copy, and complex.
112 Direct constraints are ADDRESSOF constraints that require no extra
113 processing, such as P = &Q
114 Copy constraints are those of the form P = Q.
115 Complex constraints are all the constraints involving dereferences
116 and offsets (including offsetted copies).
117
118 3. All direct constraints of the form P = &Q are processed, such
119 that Q is added to Sol(P)
120
121 4. All complex constraints for a given constraint variable are stored in a
122 linked list attached to that variable's node.
123
124 5. A directed graph is built out of the copy constraints. Each
125 constraint variable is a node in the graph, and an edge from
126 Q to P is added for each copy constraint of the form P = Q
127
128 6. The graph is then walked, and solution sets are
129 propagated along the copy edges, such that an edge from Q to P
130 causes Sol(P) <- Sol(P) union Sol(Q).
131
132 7. As we visit each node, all complex constraints associated with
133 that node are processed by adding appropriate copy edges to the graph, or the
134 appropriate variables to the solution set.
135
136 8. The process of walking the graph is iterated until no solution
137 sets change.
138
139 Prior to walking the graph in steps 6 and 7, We perform static
140 cycle elimination on the constraint graph, as well
141 as off-line variable substitution.
142
143 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
144 on and turned into anything), but isn't. You can just see what offset
145 inside the pointed-to struct it's going to access.
146
147 TODO: Constant bounded arrays can be handled as if they were structs of the
148 same number of elements.
149
150 TODO: Modeling heap and incoming pointers becomes much better if we
151 add fields to them as we discover them, which we could do.
152
153 TODO: We could handle unions, but to be honest, it's probably not
154 worth the pain or slowdown. */
155
156 /* IPA-PTA optimizations possible.
157
158 When the indirect function called is ANYTHING we can add disambiguation
159 based on the function signatures (or simply the parameter count which
160 is the varinfo size). We also do not need to consider functions that
161 do not have their address taken.
162
163 The is_global_var bit which marks escape points is overly conservative
164 in IPA mode. Split it to is_escape_point and is_global_var - only
165 externally visible globals are escape points in IPA mode. This is
166 also needed to fix the pt_solution_includes_global predicate
167 (and thus ptr_deref_may_alias_global_p).
168
169 The way we introduce DECL_PT_UID to avoid fixing up all points-to
170 sets in the translation unit when we copy a DECL during inlining
171 pessimizes precision. The advantage is that the DECL_PT_UID keeps
172 compile-time and memory usage overhead low - the points-to sets
173 do not grow or get unshared as they would during a fixup phase.
174 An alternative solution is to delay IPA PTA until after all
175 inlining transformations have been applied.
176
177 The way we propagate clobber/use information isn't optimized.
178 It should use a new complex constraint that properly filters
179 out local variables of the callee (though that would make
180 the sets invalid after inlining). OTOH we might as well
181 admit defeat to WHOPR and simply do all the clobber/use analysis
182 and propagation after PTA finished but before we threw away
183 points-to information for memory variables. WHOPR and PTA
184 do not play along well anyway - the whole constraint solving
185 would need to be done in WPA phase and it will be very interesting
186 to apply the results to local SSA names during LTRANS phase.
187
188 We probably should compute a per-function unit-ESCAPE solution
189 propagating it simply like the clobber / uses solutions. The
190 solution can go alongside the non-IPA espaced solution and be
191 used to query which vars escape the unit through a function.
192
193 We never put function decls in points-to sets so we do not
194 keep the set of called functions for indirect calls.
195
196 And probably more. */
197
198 static bool use_field_sensitive = true;
199 static int in_ipa_mode = 0;
200
201 /* Used for predecessor bitmaps. */
202 static bitmap_obstack predbitmap_obstack;
203
204 /* Used for points-to sets. */
205 static bitmap_obstack pta_obstack;
206
207 /* Used for oldsolution members of variables. */
208 static bitmap_obstack oldpta_obstack;
209
210 /* Used for per-solver-iteration bitmaps. */
211 static bitmap_obstack iteration_obstack;
212
213 static unsigned int create_variable_info_for (tree, const char *);
214 typedef struct constraint_graph *constraint_graph_t;
215 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
216
217 struct constraint;
218 typedef struct constraint *constraint_t;
219
220
221 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
222 if (a) \
223 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
224
225 static struct constraint_stats
226 {
227 unsigned int total_vars;
228 unsigned int nonpointer_vars;
229 unsigned int unified_vars_static;
230 unsigned int unified_vars_dynamic;
231 unsigned int iterations;
232 unsigned int num_edges;
233 unsigned int num_implicit_edges;
234 unsigned int points_to_sets_created;
235 } stats;
236
237 struct variable_info
238 {
239 /* ID of this variable */
240 unsigned int id;
241
242 /* True if this is a variable created by the constraint analysis, such as
243 heap variables and constraints we had to break up. */
244 unsigned int is_artificial_var : 1;
245
246 /* True if this is a special variable whose solution set should not be
247 changed. */
248 unsigned int is_special_var : 1;
249
250 /* True for variables whose size is not known or variable. */
251 unsigned int is_unknown_size_var : 1;
252
253 /* True for (sub-)fields that represent a whole variable. */
254 unsigned int is_full_var : 1;
255
256 /* True if this is a heap variable. */
257 unsigned int is_heap_var : 1;
258
259 /* True if this field may contain pointers. */
260 unsigned int may_have_pointers : 1;
261
262 /* True if this field has only restrict qualified pointers. */
263 unsigned int only_restrict_pointers : 1;
264
265 /* True if this represents a global variable. */
266 unsigned int is_global_var : 1;
267
268 /* True if this represents a IPA function info. */
269 unsigned int is_fn_info : 1;
270
271 /* The ID of the variable for the next field in this structure
272 or zero for the last field in this structure. */
273 unsigned next;
274
275 /* The ID of the variable for the first field in this structure. */
276 unsigned head;
277
278 /* Offset of this variable, in bits, from the base variable */
279 unsigned HOST_WIDE_INT offset;
280
281 /* Size of the variable, in bits. */
282 unsigned HOST_WIDE_INT size;
283
284 /* Full size of the base variable, in bits. */
285 unsigned HOST_WIDE_INT fullsize;
286
287 /* Name of this variable */
288 const char *name;
289
290 /* Tree that this variable is associated with. */
291 tree decl;
292
293 /* Points-to set for this variable. */
294 bitmap solution;
295
296 /* Old points-to set for this variable. */
297 bitmap oldsolution;
298 };
299 typedef struct variable_info *varinfo_t;
300
301 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
302 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
303 unsigned HOST_WIDE_INT);
304 static varinfo_t lookup_vi_for_tree (tree);
305 static inline bool type_can_have_subvars (const_tree);
306
307 /* Pool of variable info structures. */
308 static alloc_pool variable_info_pool;
309
310 /* Map varinfo to final pt_solution. */
311 static pointer_map_t *final_solutions;
312 struct obstack final_solutions_obstack;
313
314 /* Table of variable info structures for constraint variables.
315 Indexed directly by variable info id. */
316 static vec<varinfo_t> varmap;
317
318 /* Return the varmap element N */
319
320 static inline varinfo_t
321 get_varinfo (unsigned int n)
322 {
323 return varmap[n];
324 }
325
326 /* Return the next variable in the list of sub-variables of VI
327 or NULL if VI is the last sub-variable. */
328
329 static inline varinfo_t
330 vi_next (varinfo_t vi)
331 {
332 return get_varinfo (vi->next);
333 }
334
335 /* Static IDs for the special variables. Variable ID zero is unused
336 and used as terminator for the sub-variable chain. */
337 enum { nothing_id = 1, anything_id = 2, readonly_id = 3,
338 escaped_id = 4, nonlocal_id = 5,
339 storedanything_id = 6, integer_id = 7 };
340
341 /* Return a new variable info structure consisting for a variable
342 named NAME, and using constraint graph node NODE. Append it
343 to the vector of variable info structures. */
344
345 static varinfo_t
346 new_var_info (tree t, const char *name)
347 {
348 unsigned index = varmap.length ();
349 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
350
351 ret->id = index;
352 ret->name = name;
353 ret->decl = t;
354 /* Vars without decl are artificial and do not have sub-variables. */
355 ret->is_artificial_var = (t == NULL_TREE);
356 ret->is_special_var = false;
357 ret->is_unknown_size_var = false;
358 ret->is_full_var = (t == NULL_TREE);
359 ret->is_heap_var = false;
360 ret->may_have_pointers = true;
361 ret->only_restrict_pointers = false;
362 ret->is_global_var = (t == NULL_TREE);
363 ret->is_fn_info = false;
364 if (t && DECL_P (t))
365 ret->is_global_var = (is_global_var (t)
366 /* We have to treat even local register variables
367 as escape points. */
368 || (TREE_CODE (t) == VAR_DECL
369 && DECL_HARD_REGISTER (t)));
370 ret->solution = BITMAP_ALLOC (&pta_obstack);
371 ret->oldsolution = NULL;
372 ret->next = 0;
373 ret->head = ret->id;
374
375 stats.total_vars++;
376
377 varmap.safe_push (ret);
378
379 return ret;
380 }
381
382
383 /* A map mapping call statements to per-stmt variables for uses
384 and clobbers specific to the call. */
385 struct pointer_map_t *call_stmt_vars;
386
387 /* Lookup or create the variable for the call statement CALL. */
388
389 static varinfo_t
390 get_call_vi (gimple call)
391 {
392 void **slot_p;
393 varinfo_t vi, vi2;
394
395 slot_p = pointer_map_insert (call_stmt_vars, call);
396 if (*slot_p)
397 return (varinfo_t) *slot_p;
398
399 vi = new_var_info (NULL_TREE, "CALLUSED");
400 vi->offset = 0;
401 vi->size = 1;
402 vi->fullsize = 2;
403 vi->is_full_var = true;
404
405 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
406 vi2->offset = 1;
407 vi2->size = 1;
408 vi2->fullsize = 2;
409 vi2->is_full_var = true;
410
411 vi->next = vi2->id;
412
413 *slot_p = (void *) vi;
414 return vi;
415 }
416
417 /* Lookup the variable for the call statement CALL representing
418 the uses. Returns NULL if there is nothing special about this call. */
419
420 static varinfo_t
421 lookup_call_use_vi (gimple call)
422 {
423 void **slot_p;
424
425 slot_p = pointer_map_contains (call_stmt_vars, call);
426 if (slot_p)
427 return (varinfo_t) *slot_p;
428
429 return NULL;
430 }
431
432 /* Lookup the variable for the call statement CALL representing
433 the clobbers. Returns NULL if there is nothing special about this call. */
434
435 static varinfo_t
436 lookup_call_clobber_vi (gimple call)
437 {
438 varinfo_t uses = lookup_call_use_vi (call);
439 if (!uses)
440 return NULL;
441
442 return vi_next (uses);
443 }
444
445 /* Lookup or create the variable for the call statement CALL representing
446 the uses. */
447
448 static varinfo_t
449 get_call_use_vi (gimple call)
450 {
451 return get_call_vi (call);
452 }
453
454 /* Lookup or create the variable for the call statement CALL representing
455 the clobbers. */
456
457 static varinfo_t ATTRIBUTE_UNUSED
458 get_call_clobber_vi (gimple call)
459 {
460 return vi_next (get_call_vi (call));
461 }
462
463
464 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
465
466 /* An expression that appears in a constraint. */
467
468 struct constraint_expr
469 {
470 /* Constraint type. */
471 constraint_expr_type type;
472
473 /* Variable we are referring to in the constraint. */
474 unsigned int var;
475
476 /* Offset, in bits, of this constraint from the beginning of
477 variables it ends up referring to.
478
479 IOW, in a deref constraint, we would deref, get the result set,
480 then add OFFSET to each member. */
481 HOST_WIDE_INT offset;
482 };
483
484 /* Use 0x8000... as special unknown offset. */
485 #define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
486
487 typedef struct constraint_expr ce_s;
488 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
489 static void get_constraint_for (tree, vec<ce_s> *);
490 static void get_constraint_for_rhs (tree, vec<ce_s> *);
491 static void do_deref (vec<ce_s> *);
492
493 /* Our set constraints are made up of two constraint expressions, one
494 LHS, and one RHS.
495
496 As described in the introduction, our set constraints each represent an
497 operation between set valued variables.
498 */
499 struct constraint
500 {
501 struct constraint_expr lhs;
502 struct constraint_expr rhs;
503 };
504
505 /* List of constraints that we use to build the constraint graph from. */
506
507 static vec<constraint_t> constraints;
508 static alloc_pool constraint_pool;
509
510 /* The constraint graph is represented as an array of bitmaps
511 containing successor nodes. */
512
513 struct constraint_graph
514 {
515 /* Size of this graph, which may be different than the number of
516 nodes in the variable map. */
517 unsigned int size;
518
519 /* Explicit successors of each node. */
520 bitmap *succs;
521
522 /* Implicit predecessors of each node (Used for variable
523 substitution). */
524 bitmap *implicit_preds;
525
526 /* Explicit predecessors of each node (Used for variable substitution). */
527 bitmap *preds;
528
529 /* Indirect cycle representatives, or -1 if the node has no indirect
530 cycles. */
531 int *indirect_cycles;
532
533 /* Representative node for a node. rep[a] == a unless the node has
534 been unified. */
535 unsigned int *rep;
536
537 /* Equivalence class representative for a label. This is used for
538 variable substitution. */
539 int *eq_rep;
540
541 /* Pointer equivalence label for a node. All nodes with the same
542 pointer equivalence label can be unified together at some point
543 (either during constraint optimization or after the constraint
544 graph is built). */
545 unsigned int *pe;
546
547 /* Pointer equivalence representative for a label. This is used to
548 handle nodes that are pointer equivalent but not location
549 equivalent. We can unite these once the addressof constraints
550 are transformed into initial points-to sets. */
551 int *pe_rep;
552
553 /* Pointer equivalence label for each node, used during variable
554 substitution. */
555 unsigned int *pointer_label;
556
557 /* Location equivalence label for each node, used during location
558 equivalence finding. */
559 unsigned int *loc_label;
560
561 /* Pointed-by set for each node, used during location equivalence
562 finding. This is pointed-by rather than pointed-to, because it
563 is constructed using the predecessor graph. */
564 bitmap *pointed_by;
565
566 /* Points to sets for pointer equivalence. This is *not* the actual
567 points-to sets for nodes. */
568 bitmap *points_to;
569
570 /* Bitmap of nodes where the bit is set if the node is a direct
571 node. Used for variable substitution. */
572 sbitmap direct_nodes;
573
574 /* Bitmap of nodes where the bit is set if the node is address
575 taken. Used for variable substitution. */
576 bitmap address_taken;
577
578 /* Vector of complex constraints for each graph node. Complex
579 constraints are those involving dereferences or offsets that are
580 not 0. */
581 vec<constraint_t> *complex;
582 };
583
584 static constraint_graph_t graph;
585
586 /* During variable substitution and the offline version of indirect
587 cycle finding, we create nodes to represent dereferences and
588 address taken constraints. These represent where these start and
589 end. */
590 #define FIRST_REF_NODE (varmap).length ()
591 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
592
593 /* Return the representative node for NODE, if NODE has been unioned
594 with another NODE.
595 This function performs path compression along the way to finding
596 the representative. */
597
598 static unsigned int
599 find (unsigned int node)
600 {
601 gcc_checking_assert (node < graph->size);
602 if (graph->rep[node] != node)
603 return graph->rep[node] = find (graph->rep[node]);
604 return node;
605 }
606
607 /* Union the TO and FROM nodes to the TO nodes.
608 Note that at some point in the future, we may want to do
609 union-by-rank, in which case we are going to have to return the
610 node we unified to. */
611
612 static bool
613 unite (unsigned int to, unsigned int from)
614 {
615 gcc_checking_assert (to < graph->size && from < graph->size);
616 if (to != from && graph->rep[from] != to)
617 {
618 graph->rep[from] = to;
619 return true;
620 }
621 return false;
622 }
623
624 /* Create a new constraint consisting of LHS and RHS expressions. */
625
626 static constraint_t
627 new_constraint (const struct constraint_expr lhs,
628 const struct constraint_expr rhs)
629 {
630 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
631 ret->lhs = lhs;
632 ret->rhs = rhs;
633 return ret;
634 }
635
636 /* Print out constraint C to FILE. */
637
638 static void
639 dump_constraint (FILE *file, constraint_t c)
640 {
641 if (c->lhs.type == ADDRESSOF)
642 fprintf (file, "&");
643 else if (c->lhs.type == DEREF)
644 fprintf (file, "*");
645 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
646 if (c->lhs.offset == UNKNOWN_OFFSET)
647 fprintf (file, " + UNKNOWN");
648 else if (c->lhs.offset != 0)
649 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
650 fprintf (file, " = ");
651 if (c->rhs.type == ADDRESSOF)
652 fprintf (file, "&");
653 else if (c->rhs.type == DEREF)
654 fprintf (file, "*");
655 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
656 if (c->rhs.offset == UNKNOWN_OFFSET)
657 fprintf (file, " + UNKNOWN");
658 else if (c->rhs.offset != 0)
659 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
660 }
661
662
663 void debug_constraint (constraint_t);
664 void debug_constraints (void);
665 void debug_constraint_graph (void);
666 void debug_solution_for_var (unsigned int);
667 void debug_sa_points_to_info (void);
668
669 /* Print out constraint C to stderr. */
670
671 DEBUG_FUNCTION void
672 debug_constraint (constraint_t c)
673 {
674 dump_constraint (stderr, c);
675 fprintf (stderr, "\n");
676 }
677
678 /* Print out all constraints to FILE */
679
680 static void
681 dump_constraints (FILE *file, int from)
682 {
683 int i;
684 constraint_t c;
685 for (i = from; constraints.iterate (i, &c); i++)
686 if (c)
687 {
688 dump_constraint (file, c);
689 fprintf (file, "\n");
690 }
691 }
692
693 /* Print out all constraints to stderr. */
694
695 DEBUG_FUNCTION void
696 debug_constraints (void)
697 {
698 dump_constraints (stderr, 0);
699 }
700
701 /* Print the constraint graph in dot format. */
702
703 static void
704 dump_constraint_graph (FILE *file)
705 {
706 unsigned int i;
707
708 /* Only print the graph if it has already been initialized: */
709 if (!graph)
710 return;
711
712 /* Prints the header of the dot file: */
713 fprintf (file, "strict digraph {\n");
714 fprintf (file, " node [\n shape = box\n ]\n");
715 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
716 fprintf (file, "\n // List of nodes and complex constraints in "
717 "the constraint graph:\n");
718
719 /* The next lines print the nodes in the graph together with the
720 complex constraints attached to them. */
721 for (i = 1; i < graph->size; i++)
722 {
723 if (i == FIRST_REF_NODE)
724 continue;
725 if (find (i) != i)
726 continue;
727 if (i < FIRST_REF_NODE)
728 fprintf (file, "\"%s\"", get_varinfo (i)->name);
729 else
730 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
731 if (graph->complex[i].exists ())
732 {
733 unsigned j;
734 constraint_t c;
735 fprintf (file, " [label=\"\\N\\n");
736 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
737 {
738 dump_constraint (file, c);
739 fprintf (file, "\\l");
740 }
741 fprintf (file, "\"]");
742 }
743 fprintf (file, ";\n");
744 }
745
746 /* Go over the edges. */
747 fprintf (file, "\n // Edges in the constraint graph:\n");
748 for (i = 1; i < graph->size; i++)
749 {
750 unsigned j;
751 bitmap_iterator bi;
752 if (find (i) != i)
753 continue;
754 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
755 {
756 unsigned to = find (j);
757 if (i == to)
758 continue;
759 if (i < FIRST_REF_NODE)
760 fprintf (file, "\"%s\"", get_varinfo (i)->name);
761 else
762 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
763 fprintf (file, " -> ");
764 if (to < FIRST_REF_NODE)
765 fprintf (file, "\"%s\"", get_varinfo (to)->name);
766 else
767 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
768 fprintf (file, ";\n");
769 }
770 }
771
772 /* Prints the tail of the dot file. */
773 fprintf (file, "}\n");
774 }
775
776 /* Print out the constraint graph to stderr. */
777
778 DEBUG_FUNCTION void
779 debug_constraint_graph (void)
780 {
781 dump_constraint_graph (stderr);
782 }
783
784 /* SOLVER FUNCTIONS
785
786 The solver is a simple worklist solver, that works on the following
787 algorithm:
788
789 sbitmap changed_nodes = all zeroes;
790 changed_count = 0;
791 For each node that is not already collapsed:
792 changed_count++;
793 set bit in changed nodes
794
795 while (changed_count > 0)
796 {
797 compute topological ordering for constraint graph
798
799 find and collapse cycles in the constraint graph (updating
800 changed if necessary)
801
802 for each node (n) in the graph in topological order:
803 changed_count--;
804
805 Process each complex constraint associated with the node,
806 updating changed if necessary.
807
808 For each outgoing edge from n, propagate the solution from n to
809 the destination of the edge, updating changed as necessary.
810
811 } */
812
813 /* Return true if two constraint expressions A and B are equal. */
814
815 static bool
816 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
817 {
818 return a.type == b.type && a.var == b.var && a.offset == b.offset;
819 }
820
821 /* Return true if constraint expression A is less than constraint expression
822 B. This is just arbitrary, but consistent, in order to give them an
823 ordering. */
824
825 static bool
826 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
827 {
828 if (a.type == b.type)
829 {
830 if (a.var == b.var)
831 return a.offset < b.offset;
832 else
833 return a.var < b.var;
834 }
835 else
836 return a.type < b.type;
837 }
838
839 /* Return true if constraint A is less than constraint B. This is just
840 arbitrary, but consistent, in order to give them an ordering. */
841
842 static bool
843 constraint_less (const constraint_t &a, const constraint_t &b)
844 {
845 if (constraint_expr_less (a->lhs, b->lhs))
846 return true;
847 else if (constraint_expr_less (b->lhs, a->lhs))
848 return false;
849 else
850 return constraint_expr_less (a->rhs, b->rhs);
851 }
852
853 /* Return true if two constraints A and B are equal. */
854
855 static bool
856 constraint_equal (struct constraint a, struct constraint b)
857 {
858 return constraint_expr_equal (a.lhs, b.lhs)
859 && constraint_expr_equal (a.rhs, b.rhs);
860 }
861
862
863 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
864
865 static constraint_t
866 constraint_vec_find (vec<constraint_t> vec,
867 struct constraint lookfor)
868 {
869 unsigned int place;
870 constraint_t found;
871
872 if (!vec.exists ())
873 return NULL;
874
875 place = vec.lower_bound (&lookfor, constraint_less);
876 if (place >= vec.length ())
877 return NULL;
878 found = vec[place];
879 if (!constraint_equal (*found, lookfor))
880 return NULL;
881 return found;
882 }
883
884 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
885
886 static void
887 constraint_set_union (vec<constraint_t> *to,
888 vec<constraint_t> *from)
889 {
890 int i;
891 constraint_t c;
892
893 FOR_EACH_VEC_ELT (*from, i, c)
894 {
895 if (constraint_vec_find (*to, *c) == NULL)
896 {
897 unsigned int place = to->lower_bound (c, constraint_less);
898 to->safe_insert (place, c);
899 }
900 }
901 }
902
903 /* Expands the solution in SET to all sub-fields of variables included. */
904
905 static void
906 solution_set_expand (bitmap set)
907 {
908 bitmap_iterator bi;
909 unsigned j;
910
911 /* In a first pass expand to the head of the variables we need to
912 add all sub-fields off. This avoids quadratic behavior. */
913 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
914 {
915 varinfo_t v = get_varinfo (j);
916 if (v->is_artificial_var
917 || v->is_full_var)
918 continue;
919 bitmap_set_bit (set, v->head);
920 }
921
922 /* In the second pass now expand all head variables with subfields. */
923 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
924 {
925 varinfo_t v = get_varinfo (j);
926 if (v->is_artificial_var
927 || v->is_full_var
928 || v->head != j)
929 continue;
930 for (v = vi_next (v); v != NULL; v = vi_next (v))
931 bitmap_set_bit (set, v->id);
932 }
933 }
934
935 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
936 process. */
937
938 static bool
939 set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
940 {
941 bool changed = false;
942 bitmap_iterator bi;
943 unsigned int i;
944
945 /* If the solution of FROM contains anything it is good enough to transfer
946 this to TO. */
947 if (bitmap_bit_p (from, anything_id))
948 return bitmap_set_bit (to, anything_id);
949
950 /* For zero offset simply union the solution into the destination. */
951 if (inc == 0)
952 return bitmap_ior_into (to, from);
953
954 /* If the offset is unknown we have to expand the solution to
955 all subfields. */
956 if (inc == UNKNOWN_OFFSET)
957 {
958 bitmap tmp = BITMAP_ALLOC (&iteration_obstack);
959 bitmap_copy (tmp, from);
960 solution_set_expand (tmp);
961 changed |= bitmap_ior_into (to, tmp);
962 BITMAP_FREE (tmp);
963 return changed;
964 }
965
966 /* For non-zero offset union the offsetted solution into the destination. */
967 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
968 {
969 varinfo_t vi = get_varinfo (i);
970
971 /* If this is a variable with just one field just set its bit
972 in the result. */
973 if (vi->is_artificial_var
974 || vi->is_unknown_size_var
975 || vi->is_full_var)
976 changed |= bitmap_set_bit (to, i);
977 else
978 {
979 unsigned HOST_WIDE_INT fieldoffset = vi->offset + inc;
980
981 /* If the offset makes the pointer point to before the
982 variable use offset zero for the field lookup. */
983 if (inc < 0
984 && fieldoffset > vi->offset)
985 fieldoffset = 0;
986
987 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
988
989 changed |= bitmap_set_bit (to, vi->id);
990 /* If the result is not exactly at fieldoffset include the next
991 field as well. See get_constraint_for_ptr_offset for more
992 rationale. */
993 if (vi->offset != fieldoffset
994 && vi->next != 0)
995 changed |= bitmap_set_bit (to, vi->next);
996 }
997 }
998
999 return changed;
1000 }
1001
1002 /* Insert constraint C into the list of complex constraints for graph
1003 node VAR. */
1004
1005 static void
1006 insert_into_complex (constraint_graph_t graph,
1007 unsigned int var, constraint_t c)
1008 {
1009 vec<constraint_t> complex = graph->complex[var];
1010 unsigned int place = complex.lower_bound (c, constraint_less);
1011
1012 /* Only insert constraints that do not already exist. */
1013 if (place >= complex.length ()
1014 || !constraint_equal (*c, *complex[place]))
1015 graph->complex[var].safe_insert (place, c);
1016 }
1017
1018
1019 /* Condense two variable nodes into a single variable node, by moving
1020 all associated info from SRC to TO. */
1021
1022 static void
1023 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1024 unsigned int from)
1025 {
1026 unsigned int i;
1027 constraint_t c;
1028
1029 gcc_checking_assert (find (from) == to);
1030
1031 /* Move all complex constraints from src node into to node */
1032 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1033 {
1034 /* In complex constraints for node src, we may have either
1035 a = *src, and *src = a, or an offseted constraint which are
1036 always added to the rhs node's constraints. */
1037
1038 if (c->rhs.type == DEREF)
1039 c->rhs.var = to;
1040 else if (c->lhs.type == DEREF)
1041 c->lhs.var = to;
1042 else
1043 c->rhs.var = to;
1044 }
1045 constraint_set_union (&graph->complex[to], &graph->complex[from]);
1046 graph->complex[from].release ();
1047 }
1048
1049
1050 /* Remove edges involving NODE from GRAPH. */
1051
1052 static void
1053 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1054 {
1055 if (graph->succs[node])
1056 BITMAP_FREE (graph->succs[node]);
1057 }
1058
1059 /* Merge GRAPH nodes FROM and TO into node TO. */
1060
1061 static void
1062 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1063 unsigned int from)
1064 {
1065 if (graph->indirect_cycles[from] != -1)
1066 {
1067 /* If we have indirect cycles with the from node, and we have
1068 none on the to node, the to node has indirect cycles from the
1069 from node now that they are unified.
1070 If indirect cycles exist on both, unify the nodes that they
1071 are in a cycle with, since we know they are in a cycle with
1072 each other. */
1073 if (graph->indirect_cycles[to] == -1)
1074 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1075 }
1076
1077 /* Merge all the successor edges. */
1078 if (graph->succs[from])
1079 {
1080 if (!graph->succs[to])
1081 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1082 bitmap_ior_into (graph->succs[to],
1083 graph->succs[from]);
1084 }
1085
1086 clear_edges_for_node (graph, from);
1087 }
1088
1089
1090 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1091 it doesn't exist in the graph already. */
1092
1093 static void
1094 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1095 unsigned int from)
1096 {
1097 if (to == from)
1098 return;
1099
1100 if (!graph->implicit_preds[to])
1101 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1102
1103 if (bitmap_set_bit (graph->implicit_preds[to], from))
1104 stats.num_implicit_edges++;
1105 }
1106
1107 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1108 it doesn't exist in the graph already.
1109 Return false if the edge already existed, true otherwise. */
1110
1111 static void
1112 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1113 unsigned int from)
1114 {
1115 if (!graph->preds[to])
1116 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1117 bitmap_set_bit (graph->preds[to], from);
1118 }
1119
1120 /* Add a graph edge to GRAPH, going from FROM to TO if
1121 it doesn't exist in the graph already.
1122 Return false if the edge already existed, true otherwise. */
1123
1124 static bool
1125 add_graph_edge (constraint_graph_t graph, unsigned int to,
1126 unsigned int from)
1127 {
1128 if (to == from)
1129 {
1130 return false;
1131 }
1132 else
1133 {
1134 bool r = false;
1135
1136 if (!graph->succs[from])
1137 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1138 if (bitmap_set_bit (graph->succs[from], to))
1139 {
1140 r = true;
1141 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1142 stats.num_edges++;
1143 }
1144 return r;
1145 }
1146 }
1147
1148
1149 /* Initialize the constraint graph structure to contain SIZE nodes. */
1150
1151 static void
1152 init_graph (unsigned int size)
1153 {
1154 unsigned int j;
1155
1156 graph = XCNEW (struct constraint_graph);
1157 graph->size = size;
1158 graph->succs = XCNEWVEC (bitmap, graph->size);
1159 graph->indirect_cycles = XNEWVEC (int, graph->size);
1160 graph->rep = XNEWVEC (unsigned int, graph->size);
1161 /* ??? Macros do not support template types with multiple arguments,
1162 so we use a typedef to work around it. */
1163 typedef vec<constraint_t> vec_constraint_t_heap;
1164 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1165 graph->pe = XCNEWVEC (unsigned int, graph->size);
1166 graph->pe_rep = XNEWVEC (int, graph->size);
1167
1168 for (j = 0; j < graph->size; j++)
1169 {
1170 graph->rep[j] = j;
1171 graph->pe_rep[j] = -1;
1172 graph->indirect_cycles[j] = -1;
1173 }
1174 }
1175
1176 /* Build the constraint graph, adding only predecessor edges right now. */
1177
1178 static void
1179 build_pred_graph (void)
1180 {
1181 int i;
1182 constraint_t c;
1183 unsigned int j;
1184
1185 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1186 graph->preds = XCNEWVEC (bitmap, graph->size);
1187 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1188 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1189 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1190 graph->points_to = XCNEWVEC (bitmap, graph->size);
1191 graph->eq_rep = XNEWVEC (int, graph->size);
1192 graph->direct_nodes = sbitmap_alloc (graph->size);
1193 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1194 bitmap_clear (graph->direct_nodes);
1195
1196 for (j = 1; j < FIRST_REF_NODE; j++)
1197 {
1198 if (!get_varinfo (j)->is_special_var)
1199 bitmap_set_bit (graph->direct_nodes, j);
1200 }
1201
1202 for (j = 0; j < graph->size; j++)
1203 graph->eq_rep[j] = -1;
1204
1205 for (j = 0; j < varmap.length (); j++)
1206 graph->indirect_cycles[j] = -1;
1207
1208 FOR_EACH_VEC_ELT (constraints, i, c)
1209 {
1210 struct constraint_expr lhs = c->lhs;
1211 struct constraint_expr rhs = c->rhs;
1212 unsigned int lhsvar = lhs.var;
1213 unsigned int rhsvar = rhs.var;
1214
1215 if (lhs.type == DEREF)
1216 {
1217 /* *x = y. */
1218 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1219 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1220 }
1221 else if (rhs.type == DEREF)
1222 {
1223 /* x = *y */
1224 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1225 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1226 else
1227 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1228 }
1229 else if (rhs.type == ADDRESSOF)
1230 {
1231 varinfo_t v;
1232
1233 /* x = &y */
1234 if (graph->points_to[lhsvar] == NULL)
1235 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1236 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1237
1238 if (graph->pointed_by[rhsvar] == NULL)
1239 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1240 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1241
1242 /* Implicitly, *x = y */
1243 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1244
1245 /* All related variables are no longer direct nodes. */
1246 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1247 v = get_varinfo (rhsvar);
1248 if (!v->is_full_var)
1249 {
1250 v = get_varinfo (v->head);
1251 do
1252 {
1253 bitmap_clear_bit (graph->direct_nodes, v->id);
1254 v = vi_next (v);
1255 }
1256 while (v != NULL);
1257 }
1258 bitmap_set_bit (graph->address_taken, rhsvar);
1259 }
1260 else if (lhsvar > anything_id
1261 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1262 {
1263 /* x = y */
1264 add_pred_graph_edge (graph, lhsvar, rhsvar);
1265 /* Implicitly, *x = *y */
1266 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1267 FIRST_REF_NODE + rhsvar);
1268 }
1269 else if (lhs.offset != 0 || rhs.offset != 0)
1270 {
1271 if (rhs.offset != 0)
1272 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1273 else if (lhs.offset != 0)
1274 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1275 }
1276 }
1277 }
1278
1279 /* Build the constraint graph, adding successor edges. */
1280
1281 static void
1282 build_succ_graph (void)
1283 {
1284 unsigned i, t;
1285 constraint_t c;
1286
1287 FOR_EACH_VEC_ELT (constraints, i, c)
1288 {
1289 struct constraint_expr lhs;
1290 struct constraint_expr rhs;
1291 unsigned int lhsvar;
1292 unsigned int rhsvar;
1293
1294 if (!c)
1295 continue;
1296
1297 lhs = c->lhs;
1298 rhs = c->rhs;
1299 lhsvar = find (lhs.var);
1300 rhsvar = find (rhs.var);
1301
1302 if (lhs.type == DEREF)
1303 {
1304 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1305 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1306 }
1307 else if (rhs.type == DEREF)
1308 {
1309 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1310 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1311 }
1312 else if (rhs.type == ADDRESSOF)
1313 {
1314 /* x = &y */
1315 gcc_checking_assert (find (rhs.var) == rhs.var);
1316 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1317 }
1318 else if (lhsvar > anything_id
1319 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1320 {
1321 add_graph_edge (graph, lhsvar, rhsvar);
1322 }
1323 }
1324
1325 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1326 receive pointers. */
1327 t = find (storedanything_id);
1328 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1329 {
1330 if (!bitmap_bit_p (graph->direct_nodes, i)
1331 && get_varinfo (i)->may_have_pointers)
1332 add_graph_edge (graph, find (i), t);
1333 }
1334
1335 /* Everything stored to ANYTHING also potentially escapes. */
1336 add_graph_edge (graph, find (escaped_id), t);
1337 }
1338
1339
1340 /* Changed variables on the last iteration. */
1341 static bitmap changed;
1342
1343 /* Strongly Connected Component visitation info. */
1344
1345 struct scc_info
1346 {
1347 sbitmap visited;
1348 sbitmap deleted;
1349 unsigned int *dfs;
1350 unsigned int *node_mapping;
1351 int current_index;
1352 vec<unsigned> scc_stack;
1353 };
1354
1355
1356 /* Recursive routine to find strongly connected components in GRAPH.
1357 SI is the SCC info to store the information in, and N is the id of current
1358 graph node we are processing.
1359
1360 This is Tarjan's strongly connected component finding algorithm, as
1361 modified by Nuutila to keep only non-root nodes on the stack.
1362 The algorithm can be found in "On finding the strongly connected
1363 connected components in a directed graph" by Esko Nuutila and Eljas
1364 Soisalon-Soininen, in Information Processing Letters volume 49,
1365 number 1, pages 9-14. */
1366
1367 static void
1368 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1369 {
1370 unsigned int i;
1371 bitmap_iterator bi;
1372 unsigned int my_dfs;
1373
1374 bitmap_set_bit (si->visited, n);
1375 si->dfs[n] = si->current_index ++;
1376 my_dfs = si->dfs[n];
1377
1378 /* Visit all the successors. */
1379 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1380 {
1381 unsigned int w;
1382
1383 if (i > LAST_REF_NODE)
1384 break;
1385
1386 w = find (i);
1387 if (bitmap_bit_p (si->deleted, w))
1388 continue;
1389
1390 if (!bitmap_bit_p (si->visited, w))
1391 scc_visit (graph, si, w);
1392
1393 unsigned int t = find (w);
1394 gcc_checking_assert (find (n) == n);
1395 if (si->dfs[t] < si->dfs[n])
1396 si->dfs[n] = si->dfs[t];
1397 }
1398
1399 /* See if any components have been identified. */
1400 if (si->dfs[n] == my_dfs)
1401 {
1402 if (si->scc_stack.length () > 0
1403 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1404 {
1405 bitmap scc = BITMAP_ALLOC (NULL);
1406 unsigned int lowest_node;
1407 bitmap_iterator bi;
1408
1409 bitmap_set_bit (scc, n);
1410
1411 while (si->scc_stack.length () != 0
1412 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1413 {
1414 unsigned int w = si->scc_stack.pop ();
1415
1416 bitmap_set_bit (scc, w);
1417 }
1418
1419 lowest_node = bitmap_first_set_bit (scc);
1420 gcc_assert (lowest_node < FIRST_REF_NODE);
1421
1422 /* Collapse the SCC nodes into a single node, and mark the
1423 indirect cycles. */
1424 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1425 {
1426 if (i < FIRST_REF_NODE)
1427 {
1428 if (unite (lowest_node, i))
1429 unify_nodes (graph, lowest_node, i, false);
1430 }
1431 else
1432 {
1433 unite (lowest_node, i);
1434 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1435 }
1436 }
1437 }
1438 bitmap_set_bit (si->deleted, n);
1439 }
1440 else
1441 si->scc_stack.safe_push (n);
1442 }
1443
1444 /* Unify node FROM into node TO, updating the changed count if
1445 necessary when UPDATE_CHANGED is true. */
1446
1447 static void
1448 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1449 bool update_changed)
1450 {
1451 gcc_checking_assert (to != from && find (to) == to);
1452
1453 if (dump_file && (dump_flags & TDF_DETAILS))
1454 fprintf (dump_file, "Unifying %s to %s\n",
1455 get_varinfo (from)->name,
1456 get_varinfo (to)->name);
1457
1458 if (update_changed)
1459 stats.unified_vars_dynamic++;
1460 else
1461 stats.unified_vars_static++;
1462
1463 merge_graph_nodes (graph, to, from);
1464 merge_node_constraints (graph, to, from);
1465
1466 /* Mark TO as changed if FROM was changed. If TO was already marked
1467 as changed, decrease the changed count. */
1468
1469 if (update_changed
1470 && bitmap_clear_bit (changed, from))
1471 bitmap_set_bit (changed, to);
1472 varinfo_t fromvi = get_varinfo (from);
1473 if (fromvi->solution)
1474 {
1475 /* If the solution changes because of the merging, we need to mark
1476 the variable as changed. */
1477 varinfo_t tovi = get_varinfo (to);
1478 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1479 {
1480 if (update_changed)
1481 bitmap_set_bit (changed, to);
1482 }
1483
1484 BITMAP_FREE (fromvi->solution);
1485 if (fromvi->oldsolution)
1486 BITMAP_FREE (fromvi->oldsolution);
1487
1488 if (stats.iterations > 0
1489 && tovi->oldsolution)
1490 BITMAP_FREE (tovi->oldsolution);
1491 }
1492 if (graph->succs[to])
1493 bitmap_clear_bit (graph->succs[to], to);
1494 }
1495
1496 /* Information needed to compute the topological ordering of a graph. */
1497
1498 struct topo_info
1499 {
1500 /* sbitmap of visited nodes. */
1501 sbitmap visited;
1502 /* Array that stores the topological order of the graph, *in
1503 reverse*. */
1504 vec<unsigned> topo_order;
1505 };
1506
1507
1508 /* Initialize and return a topological info structure. */
1509
1510 static struct topo_info *
1511 init_topo_info (void)
1512 {
1513 size_t size = graph->size;
1514 struct topo_info *ti = XNEW (struct topo_info);
1515 ti->visited = sbitmap_alloc (size);
1516 bitmap_clear (ti->visited);
1517 ti->topo_order.create (1);
1518 return ti;
1519 }
1520
1521
1522 /* Free the topological sort info pointed to by TI. */
1523
1524 static void
1525 free_topo_info (struct topo_info *ti)
1526 {
1527 sbitmap_free (ti->visited);
1528 ti->topo_order.release ();
1529 free (ti);
1530 }
1531
1532 /* Visit the graph in topological order, and store the order in the
1533 topo_info structure. */
1534
1535 static void
1536 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1537 unsigned int n)
1538 {
1539 bitmap_iterator bi;
1540 unsigned int j;
1541
1542 bitmap_set_bit (ti->visited, n);
1543
1544 if (graph->succs[n])
1545 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1546 {
1547 if (!bitmap_bit_p (ti->visited, j))
1548 topo_visit (graph, ti, j);
1549 }
1550
1551 ti->topo_order.safe_push (n);
1552 }
1553
1554 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1555 starting solution for y. */
1556
1557 static void
1558 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1559 bitmap delta)
1560 {
1561 unsigned int lhs = c->lhs.var;
1562 bool flag = false;
1563 bitmap sol = get_varinfo (lhs)->solution;
1564 unsigned int j;
1565 bitmap_iterator bi;
1566 HOST_WIDE_INT roffset = c->rhs.offset;
1567
1568 /* Our IL does not allow this. */
1569 gcc_checking_assert (c->lhs.offset == 0);
1570
1571 /* If the solution of Y contains anything it is good enough to transfer
1572 this to the LHS. */
1573 if (bitmap_bit_p (delta, anything_id))
1574 {
1575 flag |= bitmap_set_bit (sol, anything_id);
1576 goto done;
1577 }
1578
1579 /* If we do not know at with offset the rhs is dereferenced compute
1580 the reachability set of DELTA, conservatively assuming it is
1581 dereferenced at all valid offsets. */
1582 if (roffset == UNKNOWN_OFFSET)
1583 {
1584 solution_set_expand (delta);
1585 /* No further offset processing is necessary. */
1586 roffset = 0;
1587 }
1588
1589 /* For each variable j in delta (Sol(y)), add
1590 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1591 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1592 {
1593 varinfo_t v = get_varinfo (j);
1594 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1595 unsigned int t;
1596
1597 if (v->is_full_var)
1598 fieldoffset = v->offset;
1599 else if (roffset != 0)
1600 v = first_vi_for_offset (v, fieldoffset);
1601 /* If the access is outside of the variable we can ignore it. */
1602 if (!v)
1603 continue;
1604
1605 do
1606 {
1607 t = find (v->id);
1608
1609 /* Adding edges from the special vars is pointless.
1610 They don't have sets that can change. */
1611 if (get_varinfo (t)->is_special_var)
1612 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1613 /* Merging the solution from ESCAPED needlessly increases
1614 the set. Use ESCAPED as representative instead. */
1615 else if (v->id == escaped_id)
1616 flag |= bitmap_set_bit (sol, escaped_id);
1617 else if (v->may_have_pointers
1618 && add_graph_edge (graph, lhs, t))
1619 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1620
1621 /* If the variable is not exactly at the requested offset
1622 we have to include the next one. */
1623 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1624 || v->next == 0)
1625 break;
1626
1627 v = vi_next (v);
1628 fieldoffset = v->offset;
1629 }
1630 while (1);
1631 }
1632
1633 done:
1634 /* If the LHS solution changed, mark the var as changed. */
1635 if (flag)
1636 {
1637 get_varinfo (lhs)->solution = sol;
1638 bitmap_set_bit (changed, lhs);
1639 }
1640 }
1641
1642 /* Process a constraint C that represents *(x + off) = y using DELTA
1643 as the starting solution for x. */
1644
1645 static void
1646 do_ds_constraint (constraint_t c, bitmap delta)
1647 {
1648 unsigned int rhs = c->rhs.var;
1649 bitmap sol = get_varinfo (rhs)->solution;
1650 unsigned int j;
1651 bitmap_iterator bi;
1652 HOST_WIDE_INT loff = c->lhs.offset;
1653 bool escaped_p = false;
1654
1655 /* Our IL does not allow this. */
1656 gcc_checking_assert (c->rhs.offset == 0);
1657
1658 /* If the solution of y contains ANYTHING simply use the ANYTHING
1659 solution. This avoids needlessly increasing the points-to sets. */
1660 if (bitmap_bit_p (sol, anything_id))
1661 sol = get_varinfo (find (anything_id))->solution;
1662
1663 /* If the solution for x contains ANYTHING we have to merge the
1664 solution of y into all pointer variables which we do via
1665 STOREDANYTHING. */
1666 if (bitmap_bit_p (delta, anything_id))
1667 {
1668 unsigned t = find (storedanything_id);
1669 if (add_graph_edge (graph, t, rhs))
1670 {
1671 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1672 bitmap_set_bit (changed, t);
1673 }
1674 return;
1675 }
1676
1677 /* If we do not know at with offset the rhs is dereferenced compute
1678 the reachability set of DELTA, conservatively assuming it is
1679 dereferenced at all valid offsets. */
1680 if (loff == UNKNOWN_OFFSET)
1681 {
1682 solution_set_expand (delta);
1683 loff = 0;
1684 }
1685
1686 /* For each member j of delta (Sol(x)), add an edge from y to j and
1687 union Sol(y) into Sol(j) */
1688 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1689 {
1690 varinfo_t v = get_varinfo (j);
1691 unsigned int t;
1692 HOST_WIDE_INT fieldoffset = v->offset + loff;
1693
1694 if (v->is_full_var)
1695 fieldoffset = v->offset;
1696 else if (loff != 0)
1697 v = first_vi_for_offset (v, fieldoffset);
1698 /* If the access is outside of the variable we can ignore it. */
1699 if (!v)
1700 continue;
1701
1702 do
1703 {
1704 if (v->may_have_pointers)
1705 {
1706 /* If v is a global variable then this is an escape point. */
1707 if (v->is_global_var
1708 && !escaped_p)
1709 {
1710 t = find (escaped_id);
1711 if (add_graph_edge (graph, t, rhs)
1712 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1713 bitmap_set_bit (changed, t);
1714 /* Enough to let rhs escape once. */
1715 escaped_p = true;
1716 }
1717
1718 if (v->is_special_var)
1719 break;
1720
1721 t = find (v->id);
1722 if (add_graph_edge (graph, t, rhs)
1723 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1724 bitmap_set_bit (changed, t);
1725 }
1726
1727 /* If the variable is not exactly at the requested offset
1728 we have to include the next one. */
1729 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1730 || v->next == 0)
1731 break;
1732
1733 v = vi_next (v);
1734 fieldoffset = v->offset;
1735 }
1736 while (1);
1737 }
1738 }
1739
1740 /* Handle a non-simple (simple meaning requires no iteration),
1741 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1742
1743 static void
1744 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1745 {
1746 if (c->lhs.type == DEREF)
1747 {
1748 if (c->rhs.type == ADDRESSOF)
1749 {
1750 gcc_unreachable();
1751 }
1752 else
1753 {
1754 /* *x = y */
1755 do_ds_constraint (c, delta);
1756 }
1757 }
1758 else if (c->rhs.type == DEREF)
1759 {
1760 /* x = *y */
1761 if (!(get_varinfo (c->lhs.var)->is_special_var))
1762 do_sd_constraint (graph, c, delta);
1763 }
1764 else
1765 {
1766 bitmap tmp;
1767 bitmap solution;
1768 bool flag = false;
1769
1770 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1771 solution = get_varinfo (c->rhs.var)->solution;
1772 tmp = get_varinfo (c->lhs.var)->solution;
1773
1774 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1775
1776 if (flag)
1777 bitmap_set_bit (changed, c->lhs.var);
1778 }
1779 }
1780
1781 /* Initialize and return a new SCC info structure. */
1782
1783 static struct scc_info *
1784 init_scc_info (size_t size)
1785 {
1786 struct scc_info *si = XNEW (struct scc_info);
1787 size_t i;
1788
1789 si->current_index = 0;
1790 si->visited = sbitmap_alloc (size);
1791 bitmap_clear (si->visited);
1792 si->deleted = sbitmap_alloc (size);
1793 bitmap_clear (si->deleted);
1794 si->node_mapping = XNEWVEC (unsigned int, size);
1795 si->dfs = XCNEWVEC (unsigned int, size);
1796
1797 for (i = 0; i < size; i++)
1798 si->node_mapping[i] = i;
1799
1800 si->scc_stack.create (1);
1801 return si;
1802 }
1803
1804 /* Free an SCC info structure pointed to by SI */
1805
1806 static void
1807 free_scc_info (struct scc_info *si)
1808 {
1809 sbitmap_free (si->visited);
1810 sbitmap_free (si->deleted);
1811 free (si->node_mapping);
1812 free (si->dfs);
1813 si->scc_stack.release ();
1814 free (si);
1815 }
1816
1817
1818 /* Find indirect cycles in GRAPH that occur, using strongly connected
1819 components, and note them in the indirect cycles map.
1820
1821 This technique comes from Ben Hardekopf and Calvin Lin,
1822 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1823 Lines of Code", submitted to PLDI 2007. */
1824
1825 static void
1826 find_indirect_cycles (constraint_graph_t graph)
1827 {
1828 unsigned int i;
1829 unsigned int size = graph->size;
1830 struct scc_info *si = init_scc_info (size);
1831
1832 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1833 if (!bitmap_bit_p (si->visited, i) && find (i) == i)
1834 scc_visit (graph, si, i);
1835
1836 free_scc_info (si);
1837 }
1838
1839 /* Compute a topological ordering for GRAPH, and store the result in the
1840 topo_info structure TI. */
1841
1842 static void
1843 compute_topo_order (constraint_graph_t graph,
1844 struct topo_info *ti)
1845 {
1846 unsigned int i;
1847 unsigned int size = graph->size;
1848
1849 for (i = 0; i != size; ++i)
1850 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1851 topo_visit (graph, ti, i);
1852 }
1853
1854 /* Structure used to for hash value numbering of pointer equivalence
1855 classes. */
1856
1857 typedef struct equiv_class_label
1858 {
1859 hashval_t hashcode;
1860 unsigned int equivalence_class;
1861 bitmap labels;
1862 } *equiv_class_label_t;
1863 typedef const struct equiv_class_label *const_equiv_class_label_t;
1864
1865 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1866 classes. */
1867 static htab_t pointer_equiv_class_table;
1868
1869 /* A hashtable for mapping a bitmap of labels->location equivalence
1870 classes. */
1871 static htab_t location_equiv_class_table;
1872
1873 /* Hash function for a equiv_class_label_t */
1874
1875 static hashval_t
1876 equiv_class_label_hash (const void *p)
1877 {
1878 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1879 return ecl->hashcode;
1880 }
1881
1882 /* Equality function for two equiv_class_label_t's. */
1883
1884 static int
1885 equiv_class_label_eq (const void *p1, const void *p2)
1886 {
1887 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1888 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1889 return (eql1->hashcode == eql2->hashcode
1890 && bitmap_equal_p (eql1->labels, eql2->labels));
1891 }
1892
1893 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1894 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1895 is equivalent to. */
1896
1897 static equiv_class_label *
1898 equiv_class_lookup_or_add (htab_t table, bitmap labels)
1899 {
1900 equiv_class_label **slot;
1901 equiv_class_label ecl;
1902
1903 ecl.labels = labels;
1904 ecl.hashcode = bitmap_hash (labels);
1905 slot = (equiv_class_label **) htab_find_slot_with_hash (table, &ecl,
1906 ecl.hashcode, INSERT);
1907 if (!*slot)
1908 {
1909 *slot = XNEW (struct equiv_class_label);
1910 (*slot)->labels = labels;
1911 (*slot)->hashcode = ecl.hashcode;
1912 (*slot)->equivalence_class = 0;
1913 }
1914
1915 return *slot;
1916 }
1917
1918 /* Perform offline variable substitution.
1919
1920 This is a worst case quadratic time way of identifying variables
1921 that must have equivalent points-to sets, including those caused by
1922 static cycles, and single entry subgraphs, in the constraint graph.
1923
1924 The technique is described in "Exploiting Pointer and Location
1925 Equivalence to Optimize Pointer Analysis. In the 14th International
1926 Static Analysis Symposium (SAS), August 2007." It is known as the
1927 "HU" algorithm, and is equivalent to value numbering the collapsed
1928 constraint graph including evaluating unions.
1929
1930 The general method of finding equivalence classes is as follows:
1931 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1932 Initialize all non-REF nodes to be direct nodes.
1933 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1934 variable}
1935 For each constraint containing the dereference, we also do the same
1936 thing.
1937
1938 We then compute SCC's in the graph and unify nodes in the same SCC,
1939 including pts sets.
1940
1941 For each non-collapsed node x:
1942 Visit all unvisited explicit incoming edges.
1943 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1944 where y->x.
1945 Lookup the equivalence class for pts(x).
1946 If we found one, equivalence_class(x) = found class.
1947 Otherwise, equivalence_class(x) = new class, and new_class is
1948 added to the lookup table.
1949
1950 All direct nodes with the same equivalence class can be replaced
1951 with a single representative node.
1952 All unlabeled nodes (label == 0) are not pointers and all edges
1953 involving them can be eliminated.
1954 We perform these optimizations during rewrite_constraints
1955
1956 In addition to pointer equivalence class finding, we also perform
1957 location equivalence class finding. This is the set of variables
1958 that always appear together in points-to sets. We use this to
1959 compress the size of the points-to sets. */
1960
1961 /* Current maximum pointer equivalence class id. */
1962 static int pointer_equiv_class;
1963
1964 /* Current maximum location equivalence class id. */
1965 static int location_equiv_class;
1966
1967 /* Recursive routine to find strongly connected components in GRAPH,
1968 and label it's nodes with DFS numbers. */
1969
1970 static void
1971 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1972 {
1973 unsigned int i;
1974 bitmap_iterator bi;
1975 unsigned int my_dfs;
1976
1977 gcc_checking_assert (si->node_mapping[n] == n);
1978 bitmap_set_bit (si->visited, n);
1979 si->dfs[n] = si->current_index ++;
1980 my_dfs = si->dfs[n];
1981
1982 /* Visit all the successors. */
1983 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1984 {
1985 unsigned int w = si->node_mapping[i];
1986
1987 if (bitmap_bit_p (si->deleted, w))
1988 continue;
1989
1990 if (!bitmap_bit_p (si->visited, w))
1991 condense_visit (graph, si, w);
1992
1993 unsigned int t = si->node_mapping[w];
1994 gcc_checking_assert (si->node_mapping[n] == n);
1995 if (si->dfs[t] < si->dfs[n])
1996 si->dfs[n] = si->dfs[t];
1997 }
1998
1999 /* Visit all the implicit predecessors. */
2000 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2001 {
2002 unsigned int w = si->node_mapping[i];
2003
2004 if (bitmap_bit_p (si->deleted, w))
2005 continue;
2006
2007 if (!bitmap_bit_p (si->visited, w))
2008 condense_visit (graph, si, w);
2009
2010 unsigned int t = si->node_mapping[w];
2011 gcc_assert (si->node_mapping[n] == n);
2012 if (si->dfs[t] < si->dfs[n])
2013 si->dfs[n] = si->dfs[t];
2014 }
2015
2016 /* See if any components have been identified. */
2017 if (si->dfs[n] == my_dfs)
2018 {
2019 while (si->scc_stack.length () != 0
2020 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2021 {
2022 unsigned int w = si->scc_stack.pop ();
2023 si->node_mapping[w] = n;
2024
2025 if (!bitmap_bit_p (graph->direct_nodes, w))
2026 bitmap_clear_bit (graph->direct_nodes, n);
2027
2028 /* Unify our nodes. */
2029 if (graph->preds[w])
2030 {
2031 if (!graph->preds[n])
2032 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2033 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2034 }
2035 if (graph->implicit_preds[w])
2036 {
2037 if (!graph->implicit_preds[n])
2038 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2039 bitmap_ior_into (graph->implicit_preds[n],
2040 graph->implicit_preds[w]);
2041 }
2042 if (graph->points_to[w])
2043 {
2044 if (!graph->points_to[n])
2045 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2046 bitmap_ior_into (graph->points_to[n],
2047 graph->points_to[w]);
2048 }
2049 }
2050 bitmap_set_bit (si->deleted, n);
2051 }
2052 else
2053 si->scc_stack.safe_push (n);
2054 }
2055
2056 /* Label pointer equivalences. */
2057
2058 static void
2059 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2060 {
2061 unsigned int i, first_pred;
2062 bitmap_iterator bi;
2063
2064 bitmap_set_bit (si->visited, n);
2065
2066 /* Label and union our incoming edges's points to sets. */
2067 first_pred = -1U;
2068 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2069 {
2070 unsigned int w = si->node_mapping[i];
2071 if (!bitmap_bit_p (si->visited, w))
2072 label_visit (graph, si, w);
2073
2074 /* Skip unused edges */
2075 if (w == n || graph->pointer_label[w] == 0)
2076 continue;
2077
2078 if (graph->points_to[w])
2079 {
2080 if (!graph->points_to[n])
2081 {
2082 if (first_pred == -1U)
2083 first_pred = w;
2084 else
2085 {
2086 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2087 bitmap_ior (graph->points_to[n],
2088 graph->points_to[first_pred],
2089 graph->points_to[w]);
2090 }
2091 }
2092 else
2093 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2094 }
2095 }
2096
2097 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2098 if (!bitmap_bit_p (graph->direct_nodes, n))
2099 {
2100 if (!graph->points_to[n])
2101 {
2102 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2103 if (first_pred != -1U)
2104 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2105 }
2106 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2107 graph->pointer_label[n] = pointer_equiv_class++;
2108 equiv_class_label_t ecl;
2109 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2110 graph->points_to[n]);
2111 ecl->equivalence_class = graph->pointer_label[n];
2112 return;
2113 }
2114
2115 /* If there was only a single non-empty predecessor the pointer equiv
2116 class is the same. */
2117 if (!graph->points_to[n])
2118 {
2119 if (first_pred != -1U)
2120 {
2121 graph->pointer_label[n] = graph->pointer_label[first_pred];
2122 graph->points_to[n] = graph->points_to[first_pred];
2123 }
2124 return;
2125 }
2126
2127 if (!bitmap_empty_p (graph->points_to[n]))
2128 {
2129 equiv_class_label_t ecl;
2130 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2131 graph->points_to[n]);
2132 if (ecl->equivalence_class == 0)
2133 ecl->equivalence_class = pointer_equiv_class++;
2134 else
2135 {
2136 BITMAP_FREE (graph->points_to[n]);
2137 graph->points_to[n] = ecl->labels;
2138 }
2139 graph->pointer_label[n] = ecl->equivalence_class;
2140 }
2141 }
2142
2143 /* Print the pred graph in dot format. */
2144
2145 static void
2146 dump_pred_graph (struct scc_info *si, FILE *file)
2147 {
2148 unsigned int i;
2149
2150 /* Only print the graph if it has already been initialized: */
2151 if (!graph)
2152 return;
2153
2154 /* Prints the header of the dot file: */
2155 fprintf (file, "strict digraph {\n");
2156 fprintf (file, " node [\n shape = box\n ]\n");
2157 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2158 fprintf (file, "\n // List of nodes and complex constraints in "
2159 "the constraint graph:\n");
2160
2161 /* The next lines print the nodes in the graph together with the
2162 complex constraints attached to them. */
2163 for (i = 1; i < graph->size; i++)
2164 {
2165 if (i == FIRST_REF_NODE)
2166 continue;
2167 if (si->node_mapping[i] != i)
2168 continue;
2169 if (i < FIRST_REF_NODE)
2170 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2171 else
2172 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2173 if (graph->points_to[i]
2174 && !bitmap_empty_p (graph->points_to[i]))
2175 {
2176 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2177 unsigned j;
2178 bitmap_iterator bi;
2179 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2180 fprintf (file, " %d", j);
2181 fprintf (file, " }\"]");
2182 }
2183 fprintf (file, ";\n");
2184 }
2185
2186 /* Go over the edges. */
2187 fprintf (file, "\n // Edges in the constraint graph:\n");
2188 for (i = 1; i < graph->size; i++)
2189 {
2190 unsigned j;
2191 bitmap_iterator bi;
2192 if (si->node_mapping[i] != i)
2193 continue;
2194 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2195 {
2196 unsigned from = si->node_mapping[j];
2197 if (from < FIRST_REF_NODE)
2198 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2199 else
2200 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2201 fprintf (file, " -> ");
2202 if (i < FIRST_REF_NODE)
2203 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2204 else
2205 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2206 fprintf (file, ";\n");
2207 }
2208 }
2209
2210 /* Prints the tail of the dot file. */
2211 fprintf (file, "}\n");
2212 }
2213
2214 /* Perform offline variable substitution, discovering equivalence
2215 classes, and eliminating non-pointer variables. */
2216
2217 static struct scc_info *
2218 perform_var_substitution (constraint_graph_t graph)
2219 {
2220 unsigned int i;
2221 unsigned int size = graph->size;
2222 struct scc_info *si = init_scc_info (size);
2223
2224 bitmap_obstack_initialize (&iteration_obstack);
2225 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2226 equiv_class_label_eq, free);
2227 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2228 equiv_class_label_eq, free);
2229 pointer_equiv_class = 1;
2230 location_equiv_class = 1;
2231
2232 /* Condense the nodes, which means to find SCC's, count incoming
2233 predecessors, and unite nodes in SCC's. */
2234 for (i = 1; i < FIRST_REF_NODE; i++)
2235 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2236 condense_visit (graph, si, si->node_mapping[i]);
2237
2238 if (dump_file && (dump_flags & TDF_GRAPH))
2239 {
2240 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2241 "in dot format:\n");
2242 dump_pred_graph (si, dump_file);
2243 fprintf (dump_file, "\n\n");
2244 }
2245
2246 bitmap_clear (si->visited);
2247 /* Actually the label the nodes for pointer equivalences */
2248 for (i = 1; i < FIRST_REF_NODE; i++)
2249 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2250 label_visit (graph, si, si->node_mapping[i]);
2251
2252 /* Calculate location equivalence labels. */
2253 for (i = 1; i < FIRST_REF_NODE; i++)
2254 {
2255 bitmap pointed_by;
2256 bitmap_iterator bi;
2257 unsigned int j;
2258
2259 if (!graph->pointed_by[i])
2260 continue;
2261 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2262
2263 /* Translate the pointed-by mapping for pointer equivalence
2264 labels. */
2265 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2266 {
2267 bitmap_set_bit (pointed_by,
2268 graph->pointer_label[si->node_mapping[j]]);
2269 }
2270 /* The original pointed_by is now dead. */
2271 BITMAP_FREE (graph->pointed_by[i]);
2272
2273 /* Look up the location equivalence label if one exists, or make
2274 one otherwise. */
2275 equiv_class_label_t ecl;
2276 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2277 if (ecl->equivalence_class == 0)
2278 ecl->equivalence_class = location_equiv_class++;
2279 else
2280 {
2281 if (dump_file && (dump_flags & TDF_DETAILS))
2282 fprintf (dump_file, "Found location equivalence for node %s\n",
2283 get_varinfo (i)->name);
2284 BITMAP_FREE (pointed_by);
2285 }
2286 graph->loc_label[i] = ecl->equivalence_class;
2287
2288 }
2289
2290 if (dump_file && (dump_flags & TDF_DETAILS))
2291 for (i = 1; i < FIRST_REF_NODE; i++)
2292 {
2293 unsigned j = si->node_mapping[i];
2294 if (j != i)
2295 fprintf (dump_file, "%s node id %d (%s) mapped to SCC leader "
2296 "node id %d (%s)\n",
2297 bitmap_bit_p (graph->direct_nodes, i)
2298 ? "Direct" : "Indirect", i, get_varinfo (i)->name,
2299 j, get_varinfo (j)->name);
2300 else
2301 fprintf (dump_file,
2302 "Equivalence classes for %s node id %d (%s): pointer %d"
2303 ", location %d\n",
2304 bitmap_bit_p (graph->direct_nodes, i)
2305 ? "direct" : "indirect", i, get_varinfo (i)->name,
2306 graph->pointer_label[i], graph->loc_label[i]);
2307 }
2308
2309 /* Quickly eliminate our non-pointer variables. */
2310
2311 for (i = 1; i < FIRST_REF_NODE; i++)
2312 {
2313 unsigned int node = si->node_mapping[i];
2314
2315 if (graph->pointer_label[node] == 0)
2316 {
2317 if (dump_file && (dump_flags & TDF_DETAILS))
2318 fprintf (dump_file,
2319 "%s is a non-pointer variable, eliminating edges.\n",
2320 get_varinfo (node)->name);
2321 stats.nonpointer_vars++;
2322 clear_edges_for_node (graph, node);
2323 }
2324 }
2325
2326 return si;
2327 }
2328
2329 /* Free information that was only necessary for variable
2330 substitution. */
2331
2332 static void
2333 free_var_substitution_info (struct scc_info *si)
2334 {
2335 free_scc_info (si);
2336 free (graph->pointer_label);
2337 free (graph->loc_label);
2338 free (graph->pointed_by);
2339 free (graph->points_to);
2340 free (graph->eq_rep);
2341 sbitmap_free (graph->direct_nodes);
2342 htab_delete (pointer_equiv_class_table);
2343 htab_delete (location_equiv_class_table);
2344 bitmap_obstack_release (&iteration_obstack);
2345 }
2346
2347 /* Return an existing node that is equivalent to NODE, which has
2348 equivalence class LABEL, if one exists. Return NODE otherwise. */
2349
2350 static unsigned int
2351 find_equivalent_node (constraint_graph_t graph,
2352 unsigned int node, unsigned int label)
2353 {
2354 /* If the address version of this variable is unused, we can
2355 substitute it for anything else with the same label.
2356 Otherwise, we know the pointers are equivalent, but not the
2357 locations, and we can unite them later. */
2358
2359 if (!bitmap_bit_p (graph->address_taken, node))
2360 {
2361 gcc_checking_assert (label < graph->size);
2362
2363 if (graph->eq_rep[label] != -1)
2364 {
2365 /* Unify the two variables since we know they are equivalent. */
2366 if (unite (graph->eq_rep[label], node))
2367 unify_nodes (graph, graph->eq_rep[label], node, false);
2368 return graph->eq_rep[label];
2369 }
2370 else
2371 {
2372 graph->eq_rep[label] = node;
2373 graph->pe_rep[label] = node;
2374 }
2375 }
2376 else
2377 {
2378 gcc_checking_assert (label < graph->size);
2379 graph->pe[node] = label;
2380 if (graph->pe_rep[label] == -1)
2381 graph->pe_rep[label] = node;
2382 }
2383
2384 return node;
2385 }
2386
2387 /* Unite pointer equivalent but not location equivalent nodes in
2388 GRAPH. This may only be performed once variable substitution is
2389 finished. */
2390
2391 static void
2392 unite_pointer_equivalences (constraint_graph_t graph)
2393 {
2394 unsigned int i;
2395
2396 /* Go through the pointer equivalences and unite them to their
2397 representative, if they aren't already. */
2398 for (i = 1; i < FIRST_REF_NODE; i++)
2399 {
2400 unsigned int label = graph->pe[i];
2401 if (label)
2402 {
2403 int label_rep = graph->pe_rep[label];
2404
2405 if (label_rep == -1)
2406 continue;
2407
2408 label_rep = find (label_rep);
2409 if (label_rep >= 0 && unite (label_rep, find (i)))
2410 unify_nodes (graph, label_rep, i, false);
2411 }
2412 }
2413 }
2414
2415 /* Move complex constraints to the GRAPH nodes they belong to. */
2416
2417 static void
2418 move_complex_constraints (constraint_graph_t graph)
2419 {
2420 int i;
2421 constraint_t c;
2422
2423 FOR_EACH_VEC_ELT (constraints, i, c)
2424 {
2425 if (c)
2426 {
2427 struct constraint_expr lhs = c->lhs;
2428 struct constraint_expr rhs = c->rhs;
2429
2430 if (lhs.type == DEREF)
2431 {
2432 insert_into_complex (graph, lhs.var, c);
2433 }
2434 else if (rhs.type == DEREF)
2435 {
2436 if (!(get_varinfo (lhs.var)->is_special_var))
2437 insert_into_complex (graph, rhs.var, c);
2438 }
2439 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2440 && (lhs.offset != 0 || rhs.offset != 0))
2441 {
2442 insert_into_complex (graph, rhs.var, c);
2443 }
2444 }
2445 }
2446 }
2447
2448
2449 /* Optimize and rewrite complex constraints while performing
2450 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2451 result of perform_variable_substitution. */
2452
2453 static void
2454 rewrite_constraints (constraint_graph_t graph,
2455 struct scc_info *si)
2456 {
2457 int i;
2458 constraint_t c;
2459
2460 #ifdef ENABLE_CHECKING
2461 for (unsigned int j = 0; j < graph->size; j++)
2462 gcc_assert (find (j) == j);
2463 #endif
2464
2465 FOR_EACH_VEC_ELT (constraints, i, c)
2466 {
2467 struct constraint_expr lhs = c->lhs;
2468 struct constraint_expr rhs = c->rhs;
2469 unsigned int lhsvar = find (lhs.var);
2470 unsigned int rhsvar = find (rhs.var);
2471 unsigned int lhsnode, rhsnode;
2472 unsigned int lhslabel, rhslabel;
2473
2474 lhsnode = si->node_mapping[lhsvar];
2475 rhsnode = si->node_mapping[rhsvar];
2476 lhslabel = graph->pointer_label[lhsnode];
2477 rhslabel = graph->pointer_label[rhsnode];
2478
2479 /* See if it is really a non-pointer variable, and if so, ignore
2480 the constraint. */
2481 if (lhslabel == 0)
2482 {
2483 if (dump_file && (dump_flags & TDF_DETAILS))
2484 {
2485
2486 fprintf (dump_file, "%s is a non-pointer variable,"
2487 "ignoring constraint:",
2488 get_varinfo (lhs.var)->name);
2489 dump_constraint (dump_file, c);
2490 fprintf (dump_file, "\n");
2491 }
2492 constraints[i] = NULL;
2493 continue;
2494 }
2495
2496 if (rhslabel == 0)
2497 {
2498 if (dump_file && (dump_flags & TDF_DETAILS))
2499 {
2500
2501 fprintf (dump_file, "%s is a non-pointer variable,"
2502 "ignoring constraint:",
2503 get_varinfo (rhs.var)->name);
2504 dump_constraint (dump_file, c);
2505 fprintf (dump_file, "\n");
2506 }
2507 constraints[i] = NULL;
2508 continue;
2509 }
2510
2511 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2512 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2513 c->lhs.var = lhsvar;
2514 c->rhs.var = rhsvar;
2515 }
2516 }
2517
2518 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2519 part of an SCC, false otherwise. */
2520
2521 static bool
2522 eliminate_indirect_cycles (unsigned int node)
2523 {
2524 if (graph->indirect_cycles[node] != -1
2525 && !bitmap_empty_p (get_varinfo (node)->solution))
2526 {
2527 unsigned int i;
2528 vec<unsigned> queue = vNULL;
2529 int queuepos;
2530 unsigned int to = find (graph->indirect_cycles[node]);
2531 bitmap_iterator bi;
2532
2533 /* We can't touch the solution set and call unify_nodes
2534 at the same time, because unify_nodes is going to do
2535 bitmap unions into it. */
2536
2537 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2538 {
2539 if (find (i) == i && i != to)
2540 {
2541 if (unite (to, i))
2542 queue.safe_push (i);
2543 }
2544 }
2545
2546 for (queuepos = 0;
2547 queue.iterate (queuepos, &i);
2548 queuepos++)
2549 {
2550 unify_nodes (graph, to, i, true);
2551 }
2552 queue.release ();
2553 return true;
2554 }
2555 return false;
2556 }
2557
2558 /* Solve the constraint graph GRAPH using our worklist solver.
2559 This is based on the PW* family of solvers from the "Efficient Field
2560 Sensitive Pointer Analysis for C" paper.
2561 It works by iterating over all the graph nodes, processing the complex
2562 constraints and propagating the copy constraints, until everything stops
2563 changed. This corresponds to steps 6-8 in the solving list given above. */
2564
2565 static void
2566 solve_graph (constraint_graph_t graph)
2567 {
2568 unsigned int size = graph->size;
2569 unsigned int i;
2570 bitmap pts;
2571
2572 changed = BITMAP_ALLOC (NULL);
2573
2574 /* Mark all initial non-collapsed nodes as changed. */
2575 for (i = 1; i < size; i++)
2576 {
2577 varinfo_t ivi = get_varinfo (i);
2578 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2579 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2580 || graph->complex[i].length () > 0))
2581 bitmap_set_bit (changed, i);
2582 }
2583
2584 /* Allocate a bitmap to be used to store the changed bits. */
2585 pts = BITMAP_ALLOC (&pta_obstack);
2586
2587 while (!bitmap_empty_p (changed))
2588 {
2589 unsigned int i;
2590 struct topo_info *ti = init_topo_info ();
2591 stats.iterations++;
2592
2593 bitmap_obstack_initialize (&iteration_obstack);
2594
2595 compute_topo_order (graph, ti);
2596
2597 while (ti->topo_order.length () != 0)
2598 {
2599
2600 i = ti->topo_order.pop ();
2601
2602 /* If this variable is not a representative, skip it. */
2603 if (find (i) != i)
2604 continue;
2605
2606 /* In certain indirect cycle cases, we may merge this
2607 variable to another. */
2608 if (eliminate_indirect_cycles (i) && find (i) != i)
2609 continue;
2610
2611 /* If the node has changed, we need to process the
2612 complex constraints and outgoing edges again. */
2613 if (bitmap_clear_bit (changed, i))
2614 {
2615 unsigned int j;
2616 constraint_t c;
2617 bitmap solution;
2618 vec<constraint_t> complex = graph->complex[i];
2619 varinfo_t vi = get_varinfo (i);
2620 bool solution_empty;
2621
2622 /* Compute the changed set of solution bits. If anything
2623 is in the solution just propagate that. */
2624 if (bitmap_bit_p (vi->solution, anything_id))
2625 {
2626 /* If anything is also in the old solution there is
2627 nothing to do.
2628 ??? But we shouldn't ended up with "changed" set ... */
2629 if (vi->oldsolution
2630 && bitmap_bit_p (vi->oldsolution, anything_id))
2631 continue;
2632 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2633 }
2634 else if (vi->oldsolution)
2635 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2636 else
2637 bitmap_copy (pts, vi->solution);
2638
2639 if (bitmap_empty_p (pts))
2640 continue;
2641
2642 if (vi->oldsolution)
2643 bitmap_ior_into (vi->oldsolution, pts);
2644 else
2645 {
2646 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2647 bitmap_copy (vi->oldsolution, pts);
2648 }
2649
2650 solution = vi->solution;
2651 solution_empty = bitmap_empty_p (solution);
2652
2653 /* Process the complex constraints */
2654 FOR_EACH_VEC_ELT (complex, j, c)
2655 {
2656 /* XXX: This is going to unsort the constraints in
2657 some cases, which will occasionally add duplicate
2658 constraints during unification. This does not
2659 affect correctness. */
2660 c->lhs.var = find (c->lhs.var);
2661 c->rhs.var = find (c->rhs.var);
2662
2663 /* The only complex constraint that can change our
2664 solution to non-empty, given an empty solution,
2665 is a constraint where the lhs side is receiving
2666 some set from elsewhere. */
2667 if (!solution_empty || c->lhs.type != DEREF)
2668 do_complex_constraint (graph, c, pts);
2669 }
2670
2671 solution_empty = bitmap_empty_p (solution);
2672
2673 if (!solution_empty)
2674 {
2675 bitmap_iterator bi;
2676 unsigned eff_escaped_id = find (escaped_id);
2677
2678 /* Propagate solution to all successors. */
2679 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2680 0, j, bi)
2681 {
2682 bitmap tmp;
2683 bool flag;
2684
2685 unsigned int to = find (j);
2686 tmp = get_varinfo (to)->solution;
2687 flag = false;
2688
2689 /* Don't try to propagate to ourselves. */
2690 if (to == i)
2691 continue;
2692
2693 /* If we propagate from ESCAPED use ESCAPED as
2694 placeholder. */
2695 if (i == eff_escaped_id)
2696 flag = bitmap_set_bit (tmp, escaped_id);
2697 else
2698 flag = bitmap_ior_into (tmp, pts);
2699
2700 if (flag)
2701 bitmap_set_bit (changed, to);
2702 }
2703 }
2704 }
2705 }
2706 free_topo_info (ti);
2707 bitmap_obstack_release (&iteration_obstack);
2708 }
2709
2710 BITMAP_FREE (pts);
2711 BITMAP_FREE (changed);
2712 bitmap_obstack_release (&oldpta_obstack);
2713 }
2714
2715 /* Map from trees to variable infos. */
2716 static struct pointer_map_t *vi_for_tree;
2717
2718
2719 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2720
2721 static void
2722 insert_vi_for_tree (tree t, varinfo_t vi)
2723 {
2724 void **slot = pointer_map_insert (vi_for_tree, t);
2725 gcc_assert (vi);
2726 gcc_assert (*slot == NULL);
2727 *slot = vi;
2728 }
2729
2730 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2731 exist in the map, return NULL, otherwise, return the varinfo we found. */
2732
2733 static varinfo_t
2734 lookup_vi_for_tree (tree t)
2735 {
2736 void **slot = pointer_map_contains (vi_for_tree, t);
2737 if (slot == NULL)
2738 return NULL;
2739
2740 return (varinfo_t) *slot;
2741 }
2742
2743 /* Return a printable name for DECL */
2744
2745 static const char *
2746 alias_get_name (tree decl)
2747 {
2748 const char *res = NULL;
2749 char *temp;
2750 int num_printed = 0;
2751
2752 if (!dump_file)
2753 return "NULL";
2754
2755 if (TREE_CODE (decl) == SSA_NAME)
2756 {
2757 res = get_name (decl);
2758 if (res)
2759 num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2760 else
2761 num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2762 if (num_printed > 0)
2763 {
2764 res = ggc_strdup (temp);
2765 free (temp);
2766 }
2767 }
2768 else if (DECL_P (decl))
2769 {
2770 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2771 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2772 else
2773 {
2774 res = get_name (decl);
2775 if (!res)
2776 {
2777 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2778 if (num_printed > 0)
2779 {
2780 res = ggc_strdup (temp);
2781 free (temp);
2782 }
2783 }
2784 }
2785 }
2786 if (res != NULL)
2787 return res;
2788
2789 return "NULL";
2790 }
2791
2792 /* Find the variable id for tree T in the map.
2793 If T doesn't exist in the map, create an entry for it and return it. */
2794
2795 static varinfo_t
2796 get_vi_for_tree (tree t)
2797 {
2798 void **slot = pointer_map_contains (vi_for_tree, t);
2799 if (slot == NULL)
2800 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2801
2802 return (varinfo_t) *slot;
2803 }
2804
2805 /* Get a scalar constraint expression for a new temporary variable. */
2806
2807 static struct constraint_expr
2808 new_scalar_tmp_constraint_exp (const char *name)
2809 {
2810 struct constraint_expr tmp;
2811 varinfo_t vi;
2812
2813 vi = new_var_info (NULL_TREE, name);
2814 vi->offset = 0;
2815 vi->size = -1;
2816 vi->fullsize = -1;
2817 vi->is_full_var = 1;
2818
2819 tmp.var = vi->id;
2820 tmp.type = SCALAR;
2821 tmp.offset = 0;
2822
2823 return tmp;
2824 }
2825
2826 /* Get a constraint expression vector from an SSA_VAR_P node.
2827 If address_p is true, the result will be taken its address of. */
2828
2829 static void
2830 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2831 {
2832 struct constraint_expr cexpr;
2833 varinfo_t vi;
2834
2835 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2836 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2837
2838 /* For parameters, get at the points-to set for the actual parm
2839 decl. */
2840 if (TREE_CODE (t) == SSA_NAME
2841 && SSA_NAME_IS_DEFAULT_DEF (t)
2842 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2843 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2844 {
2845 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2846 return;
2847 }
2848
2849 /* For global variables resort to the alias target. */
2850 if (TREE_CODE (t) == VAR_DECL
2851 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2852 {
2853 struct varpool_node *node = varpool_get_node (t);
2854 if (node && node->alias)
2855 {
2856 node = varpool_variable_node (node, NULL);
2857 t = node->symbol.decl;
2858 }
2859 }
2860
2861 vi = get_vi_for_tree (t);
2862 cexpr.var = vi->id;
2863 cexpr.type = SCALAR;
2864 cexpr.offset = 0;
2865 /* If we determine the result is "anything", and we know this is readonly,
2866 say it points to readonly memory instead. */
2867 if (cexpr.var == anything_id && TREE_READONLY (t))
2868 {
2869 gcc_unreachable ();
2870 cexpr.type = ADDRESSOF;
2871 cexpr.var = readonly_id;
2872 }
2873
2874 /* If we are not taking the address of the constraint expr, add all
2875 sub-fiels of the variable as well. */
2876 if (!address_p
2877 && !vi->is_full_var)
2878 {
2879 for (; vi; vi = vi_next (vi))
2880 {
2881 cexpr.var = vi->id;
2882 results->safe_push (cexpr);
2883 }
2884 return;
2885 }
2886
2887 results->safe_push (cexpr);
2888 }
2889
2890 /* Process constraint T, performing various simplifications and then
2891 adding it to our list of overall constraints. */
2892
2893 static void
2894 process_constraint (constraint_t t)
2895 {
2896 struct constraint_expr rhs = t->rhs;
2897 struct constraint_expr lhs = t->lhs;
2898
2899 gcc_assert (rhs.var < varmap.length ());
2900 gcc_assert (lhs.var < varmap.length ());
2901
2902 /* If we didn't get any useful constraint from the lhs we get
2903 &ANYTHING as fallback from get_constraint_for. Deal with
2904 it here by turning it into *ANYTHING. */
2905 if (lhs.type == ADDRESSOF
2906 && lhs.var == anything_id)
2907 lhs.type = DEREF;
2908
2909 /* ADDRESSOF on the lhs is invalid. */
2910 gcc_assert (lhs.type != ADDRESSOF);
2911
2912 /* We shouldn't add constraints from things that cannot have pointers.
2913 It's not completely trivial to avoid in the callers, so do it here. */
2914 if (rhs.type != ADDRESSOF
2915 && !get_varinfo (rhs.var)->may_have_pointers)
2916 return;
2917
2918 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2919 if (!get_varinfo (lhs.var)->may_have_pointers)
2920 return;
2921
2922 /* This can happen in our IR with things like n->a = *p */
2923 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2924 {
2925 /* Split into tmp = *rhs, *lhs = tmp */
2926 struct constraint_expr tmplhs;
2927 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2928 process_constraint (new_constraint (tmplhs, rhs));
2929 process_constraint (new_constraint (lhs, tmplhs));
2930 }
2931 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2932 {
2933 /* Split into tmp = &rhs, *lhs = tmp */
2934 struct constraint_expr tmplhs;
2935 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2936 process_constraint (new_constraint (tmplhs, rhs));
2937 process_constraint (new_constraint (lhs, tmplhs));
2938 }
2939 else
2940 {
2941 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2942 constraints.safe_push (t);
2943 }
2944 }
2945
2946
2947 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2948 structure. */
2949
2950 static HOST_WIDE_INT
2951 bitpos_of_field (const tree fdecl)
2952 {
2953 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2954 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2955 return -1;
2956
2957 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
2958 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2959 }
2960
2961
2962 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2963 resulting constraint expressions in *RESULTS. */
2964
2965 static void
2966 get_constraint_for_ptr_offset (tree ptr, tree offset,
2967 vec<ce_s> *results)
2968 {
2969 struct constraint_expr c;
2970 unsigned int j, n;
2971 HOST_WIDE_INT rhsoffset;
2972
2973 /* If we do not do field-sensitive PTA adding offsets to pointers
2974 does not change the points-to solution. */
2975 if (!use_field_sensitive)
2976 {
2977 get_constraint_for_rhs (ptr, results);
2978 return;
2979 }
2980
2981 /* If the offset is not a non-negative integer constant that fits
2982 in a HOST_WIDE_INT, we have to fall back to a conservative
2983 solution which includes all sub-fields of all pointed-to
2984 variables of ptr. */
2985 if (offset == NULL_TREE
2986 || TREE_CODE (offset) != INTEGER_CST)
2987 rhsoffset = UNKNOWN_OFFSET;
2988 else
2989 {
2990 /* Sign-extend the offset. */
2991 double_int soffset = tree_to_double_int (offset)
2992 .sext (TYPE_PRECISION (TREE_TYPE (offset)));
2993 if (!soffset.fits_shwi ())
2994 rhsoffset = UNKNOWN_OFFSET;
2995 else
2996 {
2997 /* Make sure the bit-offset also fits. */
2998 HOST_WIDE_INT rhsunitoffset = soffset.low;
2999 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3000 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3001 rhsoffset = UNKNOWN_OFFSET;
3002 }
3003 }
3004
3005 get_constraint_for_rhs (ptr, results);
3006 if (rhsoffset == 0)
3007 return;
3008
3009 /* As we are eventually appending to the solution do not use
3010 vec::iterate here. */
3011 n = results->length ();
3012 for (j = 0; j < n; j++)
3013 {
3014 varinfo_t curr;
3015 c = (*results)[j];
3016 curr = get_varinfo (c.var);
3017
3018 if (c.type == ADDRESSOF
3019 /* If this varinfo represents a full variable just use it. */
3020 && curr->is_full_var)
3021 c.offset = 0;
3022 else if (c.type == ADDRESSOF
3023 /* If we do not know the offset add all subfields. */
3024 && rhsoffset == UNKNOWN_OFFSET)
3025 {
3026 varinfo_t temp = get_varinfo (curr->head);
3027 do
3028 {
3029 struct constraint_expr c2;
3030 c2.var = temp->id;
3031 c2.type = ADDRESSOF;
3032 c2.offset = 0;
3033 if (c2.var != c.var)
3034 results->safe_push (c2);
3035 temp = vi_next (temp);
3036 }
3037 while (temp);
3038 }
3039 else if (c.type == ADDRESSOF)
3040 {
3041 varinfo_t temp;
3042 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3043
3044 /* Search the sub-field which overlaps with the
3045 pointed-to offset. If the result is outside of the variable
3046 we have to provide a conservative result, as the variable is
3047 still reachable from the resulting pointer (even though it
3048 technically cannot point to anything). The last and first
3049 sub-fields are such conservative results.
3050 ??? If we always had a sub-field for &object + 1 then
3051 we could represent this in a more precise way. */
3052 if (rhsoffset < 0
3053 && curr->offset < offset)
3054 offset = 0;
3055 temp = first_or_preceding_vi_for_offset (curr, offset);
3056
3057 /* If the found variable is not exactly at the pointed to
3058 result, we have to include the next variable in the
3059 solution as well. Otherwise two increments by offset / 2
3060 do not result in the same or a conservative superset
3061 solution. */
3062 if (temp->offset != offset
3063 && temp->next != 0)
3064 {
3065 struct constraint_expr c2;
3066 c2.var = temp->next;
3067 c2.type = ADDRESSOF;
3068 c2.offset = 0;
3069 results->safe_push (c2);
3070 }
3071 c.var = temp->id;
3072 c.offset = 0;
3073 }
3074 else
3075 c.offset = rhsoffset;
3076
3077 (*results)[j] = c;
3078 }
3079 }
3080
3081
3082 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3083 If address_p is true the result will be taken its address of.
3084 If lhs_p is true then the constraint expression is assumed to be used
3085 as the lhs. */
3086
3087 static void
3088 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3089 bool address_p, bool lhs_p)
3090 {
3091 tree orig_t = t;
3092 HOST_WIDE_INT bitsize = -1;
3093 HOST_WIDE_INT bitmaxsize = -1;
3094 HOST_WIDE_INT bitpos;
3095 tree forzero;
3096
3097 /* Some people like to do cute things like take the address of
3098 &0->a.b */
3099 forzero = t;
3100 while (handled_component_p (forzero)
3101 || INDIRECT_REF_P (forzero)
3102 || TREE_CODE (forzero) == MEM_REF)
3103 forzero = TREE_OPERAND (forzero, 0);
3104
3105 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3106 {
3107 struct constraint_expr temp;
3108
3109 temp.offset = 0;
3110 temp.var = integer_id;
3111 temp.type = SCALAR;
3112 results->safe_push (temp);
3113 return;
3114 }
3115
3116 /* Handle type-punning through unions. If we are extracting a pointer
3117 from a union via a possibly type-punning access that pointer
3118 points to anything, similar to a conversion of an integer to
3119 a pointer. */
3120 if (!lhs_p)
3121 {
3122 tree u;
3123 for (u = t;
3124 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3125 u = TREE_OPERAND (u, 0))
3126 if (TREE_CODE (u) == COMPONENT_REF
3127 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3128 {
3129 struct constraint_expr temp;
3130
3131 temp.offset = 0;
3132 temp.var = anything_id;
3133 temp.type = ADDRESSOF;
3134 results->safe_push (temp);
3135 return;
3136 }
3137 }
3138
3139 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3140
3141 /* Pretend to take the address of the base, we'll take care of
3142 adding the required subset of sub-fields below. */
3143 get_constraint_for_1 (t, results, true, lhs_p);
3144 gcc_assert (results->length () == 1);
3145 struct constraint_expr &result = results->last ();
3146
3147 if (result.type == SCALAR
3148 && get_varinfo (result.var)->is_full_var)
3149 /* For single-field vars do not bother about the offset. */
3150 result.offset = 0;
3151 else if (result.type == SCALAR)
3152 {
3153 /* In languages like C, you can access one past the end of an
3154 array. You aren't allowed to dereference it, so we can
3155 ignore this constraint. When we handle pointer subtraction,
3156 we may have to do something cute here. */
3157
3158 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3159 && bitmaxsize != 0)
3160 {
3161 /* It's also not true that the constraint will actually start at the
3162 right offset, it may start in some padding. We only care about
3163 setting the constraint to the first actual field it touches, so
3164 walk to find it. */
3165 struct constraint_expr cexpr = result;
3166 varinfo_t curr;
3167 results->pop ();
3168 cexpr.offset = 0;
3169 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3170 {
3171 if (ranges_overlap_p (curr->offset, curr->size,
3172 bitpos, bitmaxsize))
3173 {
3174 cexpr.var = curr->id;
3175 results->safe_push (cexpr);
3176 if (address_p)
3177 break;
3178 }
3179 }
3180 /* If we are going to take the address of this field then
3181 to be able to compute reachability correctly add at least
3182 the last field of the variable. */
3183 if (address_p && results->length () == 0)
3184 {
3185 curr = get_varinfo (cexpr.var);
3186 while (curr->next != 0)
3187 curr = vi_next (curr);
3188 cexpr.var = curr->id;
3189 results->safe_push (cexpr);
3190 }
3191 else if (results->length () == 0)
3192 /* Assert that we found *some* field there. The user couldn't be
3193 accessing *only* padding. */
3194 /* Still the user could access one past the end of an array
3195 embedded in a struct resulting in accessing *only* padding. */
3196 /* Or accessing only padding via type-punning to a type
3197 that has a filed just in padding space. */
3198 {
3199 cexpr.type = SCALAR;
3200 cexpr.var = anything_id;
3201 cexpr.offset = 0;
3202 results->safe_push (cexpr);
3203 }
3204 }
3205 else if (bitmaxsize == 0)
3206 {
3207 if (dump_file && (dump_flags & TDF_DETAILS))
3208 fprintf (dump_file, "Access to zero-sized part of variable,"
3209 "ignoring\n");
3210 }
3211 else
3212 if (dump_file && (dump_flags & TDF_DETAILS))
3213 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3214 }
3215 else if (result.type == DEREF)
3216 {
3217 /* If we do not know exactly where the access goes say so. Note
3218 that only for non-structure accesses we know that we access
3219 at most one subfiled of any variable. */
3220 if (bitpos == -1
3221 || bitsize != bitmaxsize
3222 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3223 || result.offset == UNKNOWN_OFFSET)
3224 result.offset = UNKNOWN_OFFSET;
3225 else
3226 result.offset += bitpos;
3227 }
3228 else if (result.type == ADDRESSOF)
3229 {
3230 /* We can end up here for component references on a
3231 VIEW_CONVERT_EXPR <>(&foobar). */
3232 result.type = SCALAR;
3233 result.var = anything_id;
3234 result.offset = 0;
3235 }
3236 else
3237 gcc_unreachable ();
3238 }
3239
3240
3241 /* Dereference the constraint expression CONS, and return the result.
3242 DEREF (ADDRESSOF) = SCALAR
3243 DEREF (SCALAR) = DEREF
3244 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3245 This is needed so that we can handle dereferencing DEREF constraints. */
3246
3247 static void
3248 do_deref (vec<ce_s> *constraints)
3249 {
3250 struct constraint_expr *c;
3251 unsigned int i = 0;
3252
3253 FOR_EACH_VEC_ELT (*constraints, i, c)
3254 {
3255 if (c->type == SCALAR)
3256 c->type = DEREF;
3257 else if (c->type == ADDRESSOF)
3258 c->type = SCALAR;
3259 else if (c->type == DEREF)
3260 {
3261 struct constraint_expr tmplhs;
3262 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3263 process_constraint (new_constraint (tmplhs, *c));
3264 c->var = tmplhs.var;
3265 }
3266 else
3267 gcc_unreachable ();
3268 }
3269 }
3270
3271 /* Given a tree T, return the constraint expression for taking the
3272 address of it. */
3273
3274 static void
3275 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3276 {
3277 struct constraint_expr *c;
3278 unsigned int i;
3279
3280 get_constraint_for_1 (t, results, true, true);
3281
3282 FOR_EACH_VEC_ELT (*results, i, c)
3283 {
3284 if (c->type == DEREF)
3285 c->type = SCALAR;
3286 else
3287 c->type = ADDRESSOF;
3288 }
3289 }
3290
3291 /* Given a tree T, return the constraint expression for it. */
3292
3293 static void
3294 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3295 bool lhs_p)
3296 {
3297 struct constraint_expr temp;
3298
3299 /* x = integer is all glommed to a single variable, which doesn't
3300 point to anything by itself. That is, of course, unless it is an
3301 integer constant being treated as a pointer, in which case, we
3302 will return that this is really the addressof anything. This
3303 happens below, since it will fall into the default case. The only
3304 case we know something about an integer treated like a pointer is
3305 when it is the NULL pointer, and then we just say it points to
3306 NULL.
3307
3308 Do not do that if -fno-delete-null-pointer-checks though, because
3309 in that case *NULL does not fail, so it _should_ alias *anything.
3310 It is not worth adding a new option or renaming the existing one,
3311 since this case is relatively obscure. */
3312 if ((TREE_CODE (t) == INTEGER_CST
3313 && integer_zerop (t))
3314 /* The only valid CONSTRUCTORs in gimple with pointer typed
3315 elements are zero-initializer. But in IPA mode we also
3316 process global initializers, so verify at least. */
3317 || (TREE_CODE (t) == CONSTRUCTOR
3318 && CONSTRUCTOR_NELTS (t) == 0))
3319 {
3320 if (flag_delete_null_pointer_checks)
3321 temp.var = nothing_id;
3322 else
3323 temp.var = nonlocal_id;
3324 temp.type = ADDRESSOF;
3325 temp.offset = 0;
3326 results->safe_push (temp);
3327 return;
3328 }
3329
3330 /* String constants are read-only. */
3331 if (TREE_CODE (t) == STRING_CST)
3332 {
3333 temp.var = readonly_id;
3334 temp.type = SCALAR;
3335 temp.offset = 0;
3336 results->safe_push (temp);
3337 return;
3338 }
3339
3340 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3341 {
3342 case tcc_expression:
3343 {
3344 switch (TREE_CODE (t))
3345 {
3346 case ADDR_EXPR:
3347 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3348 return;
3349 default:;
3350 }
3351 break;
3352 }
3353 case tcc_reference:
3354 {
3355 switch (TREE_CODE (t))
3356 {
3357 case MEM_REF:
3358 {
3359 struct constraint_expr cs;
3360 varinfo_t vi, curr;
3361 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3362 TREE_OPERAND (t, 1), results);
3363 do_deref (results);
3364
3365 /* If we are not taking the address then make sure to process
3366 all subvariables we might access. */
3367 if (address_p)
3368 return;
3369
3370 cs = results->last ();
3371 if (cs.type == DEREF
3372 && type_can_have_subvars (TREE_TYPE (t)))
3373 {
3374 /* For dereferences this means we have to defer it
3375 to solving time. */
3376 results->last ().offset = UNKNOWN_OFFSET;
3377 return;
3378 }
3379 if (cs.type != SCALAR)
3380 return;
3381
3382 vi = get_varinfo (cs.var);
3383 curr = vi_next (vi);
3384 if (!vi->is_full_var
3385 && curr)
3386 {
3387 unsigned HOST_WIDE_INT size;
3388 if (host_integerp (TYPE_SIZE (TREE_TYPE (t)), 1))
3389 size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t)));
3390 else
3391 size = -1;
3392 for (; curr; curr = vi_next (curr))
3393 {
3394 if (curr->offset - vi->offset < size)
3395 {
3396 cs.var = curr->id;
3397 results->safe_push (cs);
3398 }
3399 else
3400 break;
3401 }
3402 }
3403 return;
3404 }
3405 case ARRAY_REF:
3406 case ARRAY_RANGE_REF:
3407 case COMPONENT_REF:
3408 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3409 return;
3410 case VIEW_CONVERT_EXPR:
3411 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3412 lhs_p);
3413 return;
3414 /* We are missing handling for TARGET_MEM_REF here. */
3415 default:;
3416 }
3417 break;
3418 }
3419 case tcc_exceptional:
3420 {
3421 switch (TREE_CODE (t))
3422 {
3423 case SSA_NAME:
3424 {
3425 get_constraint_for_ssa_var (t, results, address_p);
3426 return;
3427 }
3428 case CONSTRUCTOR:
3429 {
3430 unsigned int i;
3431 tree val;
3432 vec<ce_s> tmp = vNULL;
3433 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3434 {
3435 struct constraint_expr *rhsp;
3436 unsigned j;
3437 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3438 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3439 results->safe_push (*rhsp);
3440 tmp.truncate (0);
3441 }
3442 tmp.release ();
3443 /* We do not know whether the constructor was complete,
3444 so technically we have to add &NOTHING or &ANYTHING
3445 like we do for an empty constructor as well. */
3446 return;
3447 }
3448 default:;
3449 }
3450 break;
3451 }
3452 case tcc_declaration:
3453 {
3454 get_constraint_for_ssa_var (t, results, address_p);
3455 return;
3456 }
3457 case tcc_constant:
3458 {
3459 /* We cannot refer to automatic variables through constants. */
3460 temp.type = ADDRESSOF;
3461 temp.var = nonlocal_id;
3462 temp.offset = 0;
3463 results->safe_push (temp);
3464 return;
3465 }
3466 default:;
3467 }
3468
3469 /* The default fallback is a constraint from anything. */
3470 temp.type = ADDRESSOF;
3471 temp.var = anything_id;
3472 temp.offset = 0;
3473 results->safe_push (temp);
3474 }
3475
3476 /* Given a gimple tree T, return the constraint expression vector for it. */
3477
3478 static void
3479 get_constraint_for (tree t, vec<ce_s> *results)
3480 {
3481 gcc_assert (results->length () == 0);
3482
3483 get_constraint_for_1 (t, results, false, true);
3484 }
3485
3486 /* Given a gimple tree T, return the constraint expression vector for it
3487 to be used as the rhs of a constraint. */
3488
3489 static void
3490 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3491 {
3492 gcc_assert (results->length () == 0);
3493
3494 get_constraint_for_1 (t, results, false, false);
3495 }
3496
3497
3498 /* Efficiently generates constraints from all entries in *RHSC to all
3499 entries in *LHSC. */
3500
3501 static void
3502 process_all_all_constraints (vec<ce_s> lhsc,
3503 vec<ce_s> rhsc)
3504 {
3505 struct constraint_expr *lhsp, *rhsp;
3506 unsigned i, j;
3507
3508 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3509 {
3510 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3511 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3512 process_constraint (new_constraint (*lhsp, *rhsp));
3513 }
3514 else
3515 {
3516 struct constraint_expr tmp;
3517 tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3518 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3519 process_constraint (new_constraint (tmp, *rhsp));
3520 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3521 process_constraint (new_constraint (*lhsp, tmp));
3522 }
3523 }
3524
3525 /* Handle aggregate copies by expanding into copies of the respective
3526 fields of the structures. */
3527
3528 static void
3529 do_structure_copy (tree lhsop, tree rhsop)
3530 {
3531 struct constraint_expr *lhsp, *rhsp;
3532 vec<ce_s> lhsc = vNULL;
3533 vec<ce_s> rhsc = vNULL;
3534 unsigned j;
3535
3536 get_constraint_for (lhsop, &lhsc);
3537 get_constraint_for_rhs (rhsop, &rhsc);
3538 lhsp = &lhsc[0];
3539 rhsp = &rhsc[0];
3540 if (lhsp->type == DEREF
3541 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3542 || rhsp->type == DEREF)
3543 {
3544 if (lhsp->type == DEREF)
3545 {
3546 gcc_assert (lhsc.length () == 1);
3547 lhsp->offset = UNKNOWN_OFFSET;
3548 }
3549 if (rhsp->type == DEREF)
3550 {
3551 gcc_assert (rhsc.length () == 1);
3552 rhsp->offset = UNKNOWN_OFFSET;
3553 }
3554 process_all_all_constraints (lhsc, rhsc);
3555 }
3556 else if (lhsp->type == SCALAR
3557 && (rhsp->type == SCALAR
3558 || rhsp->type == ADDRESSOF))
3559 {
3560 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3561 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3562 unsigned k = 0;
3563 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3564 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3565 for (j = 0; lhsc.iterate (j, &lhsp);)
3566 {
3567 varinfo_t lhsv, rhsv;
3568 rhsp = &rhsc[k];
3569 lhsv = get_varinfo (lhsp->var);
3570 rhsv = get_varinfo (rhsp->var);
3571 if (lhsv->may_have_pointers
3572 && (lhsv->is_full_var
3573 || rhsv->is_full_var
3574 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3575 rhsv->offset + lhsoffset, rhsv->size)))
3576 process_constraint (new_constraint (*lhsp, *rhsp));
3577 if (!rhsv->is_full_var
3578 && (lhsv->is_full_var
3579 || (lhsv->offset + rhsoffset + lhsv->size
3580 > rhsv->offset + lhsoffset + rhsv->size)))
3581 {
3582 ++k;
3583 if (k >= rhsc.length ())
3584 break;
3585 }
3586 else
3587 ++j;
3588 }
3589 }
3590 else
3591 gcc_unreachable ();
3592
3593 lhsc.release ();
3594 rhsc.release ();
3595 }
3596
3597 /* Create constraints ID = { rhsc }. */
3598
3599 static void
3600 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3601 {
3602 struct constraint_expr *c;
3603 struct constraint_expr includes;
3604 unsigned int j;
3605
3606 includes.var = id;
3607 includes.offset = 0;
3608 includes.type = SCALAR;
3609
3610 FOR_EACH_VEC_ELT (rhsc, j, c)
3611 process_constraint (new_constraint (includes, *c));
3612 }
3613
3614 /* Create a constraint ID = OP. */
3615
3616 static void
3617 make_constraint_to (unsigned id, tree op)
3618 {
3619 vec<ce_s> rhsc = vNULL;
3620 get_constraint_for_rhs (op, &rhsc);
3621 make_constraints_to (id, rhsc);
3622 rhsc.release ();
3623 }
3624
3625 /* Create a constraint ID = &FROM. */
3626
3627 static void
3628 make_constraint_from (varinfo_t vi, int from)
3629 {
3630 struct constraint_expr lhs, rhs;
3631
3632 lhs.var = vi->id;
3633 lhs.offset = 0;
3634 lhs.type = SCALAR;
3635
3636 rhs.var = from;
3637 rhs.offset = 0;
3638 rhs.type = ADDRESSOF;
3639 process_constraint (new_constraint (lhs, rhs));
3640 }
3641
3642 /* Create a constraint ID = FROM. */
3643
3644 static void
3645 make_copy_constraint (varinfo_t vi, int from)
3646 {
3647 struct constraint_expr lhs, rhs;
3648
3649 lhs.var = vi->id;
3650 lhs.offset = 0;
3651 lhs.type = SCALAR;
3652
3653 rhs.var = from;
3654 rhs.offset = 0;
3655 rhs.type = SCALAR;
3656 process_constraint (new_constraint (lhs, rhs));
3657 }
3658
3659 /* Make constraints necessary to make OP escape. */
3660
3661 static void
3662 make_escape_constraint (tree op)
3663 {
3664 make_constraint_to (escaped_id, op);
3665 }
3666
3667 /* Add constraints to that the solution of VI is transitively closed. */
3668
3669 static void
3670 make_transitive_closure_constraints (varinfo_t vi)
3671 {
3672 struct constraint_expr lhs, rhs;
3673
3674 /* VAR = *VAR; */
3675 lhs.type = SCALAR;
3676 lhs.var = vi->id;
3677 lhs.offset = 0;
3678 rhs.type = DEREF;
3679 rhs.var = vi->id;
3680 rhs.offset = 0;
3681 process_constraint (new_constraint (lhs, rhs));
3682
3683 /* VAR = VAR + UNKNOWN; */
3684 lhs.type = SCALAR;
3685 lhs.var = vi->id;
3686 lhs.offset = 0;
3687 rhs.type = SCALAR;
3688 rhs.var = vi->id;
3689 rhs.offset = UNKNOWN_OFFSET;
3690 process_constraint (new_constraint (lhs, rhs));
3691 }
3692
3693 /* Temporary storage for fake var decls. */
3694 struct obstack fake_var_decl_obstack;
3695
3696 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3697
3698 static tree
3699 build_fake_var_decl (tree type)
3700 {
3701 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3702 memset (decl, 0, sizeof (struct tree_var_decl));
3703 TREE_SET_CODE (decl, VAR_DECL);
3704 TREE_TYPE (decl) = type;
3705 DECL_UID (decl) = allocate_decl_uid ();
3706 SET_DECL_PT_UID (decl, -1);
3707 layout_decl (decl, 0);
3708 return decl;
3709 }
3710
3711 /* Create a new artificial heap variable with NAME.
3712 Return the created variable. */
3713
3714 static varinfo_t
3715 make_heapvar (const char *name)
3716 {
3717 varinfo_t vi;
3718 tree heapvar;
3719
3720 heapvar = build_fake_var_decl (ptr_type_node);
3721 DECL_EXTERNAL (heapvar) = 1;
3722
3723 vi = new_var_info (heapvar, name);
3724 vi->is_artificial_var = true;
3725 vi->is_heap_var = true;
3726 vi->is_unknown_size_var = true;
3727 vi->offset = 0;
3728 vi->fullsize = ~0;
3729 vi->size = ~0;
3730 vi->is_full_var = true;
3731 insert_vi_for_tree (heapvar, vi);
3732
3733 return vi;
3734 }
3735
3736 /* Create a new artificial heap variable with NAME and make a
3737 constraint from it to LHS. Set flags according to a tag used
3738 for tracking restrict pointers. */
3739
3740 static varinfo_t
3741 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3742 {
3743 varinfo_t vi = make_heapvar (name);
3744 vi->is_global_var = 1;
3745 vi->may_have_pointers = 1;
3746 make_constraint_from (lhs, vi->id);
3747 return vi;
3748 }
3749
3750 /* Create a new artificial heap variable with NAME and make a
3751 constraint from it to LHS. Set flags according to a tag used
3752 for tracking restrict pointers and make the artificial heap
3753 point to global memory. */
3754
3755 static varinfo_t
3756 make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
3757 {
3758 varinfo_t vi = make_constraint_from_restrict (lhs, name);
3759 make_copy_constraint (vi, nonlocal_id);
3760 return vi;
3761 }
3762
3763 /* In IPA mode there are varinfos for different aspects of reach
3764 function designator. One for the points-to set of the return
3765 value, one for the variables that are clobbered by the function,
3766 one for its uses and one for each parameter (including a single
3767 glob for remaining variadic arguments). */
3768
3769 enum { fi_clobbers = 1, fi_uses = 2,
3770 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3771
3772 /* Get a constraint for the requested part of a function designator FI
3773 when operating in IPA mode. */
3774
3775 static struct constraint_expr
3776 get_function_part_constraint (varinfo_t fi, unsigned part)
3777 {
3778 struct constraint_expr c;
3779
3780 gcc_assert (in_ipa_mode);
3781
3782 if (fi->id == anything_id)
3783 {
3784 /* ??? We probably should have a ANYFN special variable. */
3785 c.var = anything_id;
3786 c.offset = 0;
3787 c.type = SCALAR;
3788 }
3789 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3790 {
3791 varinfo_t ai = first_vi_for_offset (fi, part);
3792 if (ai)
3793 c.var = ai->id;
3794 else
3795 c.var = anything_id;
3796 c.offset = 0;
3797 c.type = SCALAR;
3798 }
3799 else
3800 {
3801 c.var = fi->id;
3802 c.offset = part;
3803 c.type = DEREF;
3804 }
3805
3806 return c;
3807 }
3808
3809 /* For non-IPA mode, generate constraints necessary for a call on the
3810 RHS. */
3811
3812 static void
3813 handle_rhs_call (gimple stmt, vec<ce_s> *results)
3814 {
3815 struct constraint_expr rhsc;
3816 unsigned i;
3817 bool returns_uses = false;
3818
3819 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3820 {
3821 tree arg = gimple_call_arg (stmt, i);
3822 int flags = gimple_call_arg_flags (stmt, i);
3823
3824 /* If the argument is not used we can ignore it. */
3825 if (flags & EAF_UNUSED)
3826 continue;
3827
3828 /* As we compute ESCAPED context-insensitive we do not gain
3829 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3830 set. The argument would still get clobbered through the
3831 escape solution. */
3832 if ((flags & EAF_NOCLOBBER)
3833 && (flags & EAF_NOESCAPE))
3834 {
3835 varinfo_t uses = get_call_use_vi (stmt);
3836 if (!(flags & EAF_DIRECT))
3837 {
3838 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3839 make_constraint_to (tem->id, arg);
3840 make_transitive_closure_constraints (tem);
3841 make_copy_constraint (uses, tem->id);
3842 }
3843 else
3844 make_constraint_to (uses->id, arg);
3845 returns_uses = true;
3846 }
3847 else if (flags & EAF_NOESCAPE)
3848 {
3849 struct constraint_expr lhs, rhs;
3850 varinfo_t uses = get_call_use_vi (stmt);
3851 varinfo_t clobbers = get_call_clobber_vi (stmt);
3852 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3853 make_constraint_to (tem->id, arg);
3854 if (!(flags & EAF_DIRECT))
3855 make_transitive_closure_constraints (tem);
3856 make_copy_constraint (uses, tem->id);
3857 make_copy_constraint (clobbers, tem->id);
3858 /* Add *tem = nonlocal, do not add *tem = callused as
3859 EAF_NOESCAPE parameters do not escape to other parameters
3860 and all other uses appear in NONLOCAL as well. */
3861 lhs.type = DEREF;
3862 lhs.var = tem->id;
3863 lhs.offset = 0;
3864 rhs.type = SCALAR;
3865 rhs.var = nonlocal_id;
3866 rhs.offset = 0;
3867 process_constraint (new_constraint (lhs, rhs));
3868 returns_uses = true;
3869 }
3870 else
3871 make_escape_constraint (arg);
3872 }
3873
3874 /* If we added to the calls uses solution make sure we account for
3875 pointers to it to be returned. */
3876 if (returns_uses)
3877 {
3878 rhsc.var = get_call_use_vi (stmt)->id;
3879 rhsc.offset = 0;
3880 rhsc.type = SCALAR;
3881 results->safe_push (rhsc);
3882 }
3883
3884 /* The static chain escapes as well. */
3885 if (gimple_call_chain (stmt))
3886 make_escape_constraint (gimple_call_chain (stmt));
3887
3888 /* And if we applied NRV the address of the return slot escapes as well. */
3889 if (gimple_call_return_slot_opt_p (stmt)
3890 && gimple_call_lhs (stmt) != NULL_TREE
3891 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3892 {
3893 vec<ce_s> tmpc = vNULL;
3894 struct constraint_expr lhsc, *c;
3895 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3896 lhsc.var = escaped_id;
3897 lhsc.offset = 0;
3898 lhsc.type = SCALAR;
3899 FOR_EACH_VEC_ELT (tmpc, i, c)
3900 process_constraint (new_constraint (lhsc, *c));
3901 tmpc.release ();
3902 }
3903
3904 /* Regular functions return nonlocal memory. */
3905 rhsc.var = nonlocal_id;
3906 rhsc.offset = 0;
3907 rhsc.type = SCALAR;
3908 results->safe_push (rhsc);
3909 }
3910
3911 /* For non-IPA mode, generate constraints necessary for a call
3912 that returns a pointer and assigns it to LHS. This simply makes
3913 the LHS point to global and escaped variables. */
3914
3915 static void
3916 handle_lhs_call (gimple stmt, tree lhs, int flags, vec<ce_s> rhsc,
3917 tree fndecl)
3918 {
3919 vec<ce_s> lhsc = vNULL;
3920
3921 get_constraint_for (lhs, &lhsc);
3922 /* If the store is to a global decl make sure to
3923 add proper escape constraints. */
3924 lhs = get_base_address (lhs);
3925 if (lhs
3926 && DECL_P (lhs)
3927 && is_global_var (lhs))
3928 {
3929 struct constraint_expr tmpc;
3930 tmpc.var = escaped_id;
3931 tmpc.offset = 0;
3932 tmpc.type = SCALAR;
3933 lhsc.safe_push (tmpc);
3934 }
3935
3936 /* If the call returns an argument unmodified override the rhs
3937 constraints. */
3938 flags = gimple_call_return_flags (stmt);
3939 if (flags & ERF_RETURNS_ARG
3940 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
3941 {
3942 tree arg;
3943 rhsc.create (0);
3944 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
3945 get_constraint_for (arg, &rhsc);
3946 process_all_all_constraints (lhsc, rhsc);
3947 rhsc.release ();
3948 }
3949 else if (flags & ERF_NOALIAS)
3950 {
3951 varinfo_t vi;
3952 struct constraint_expr tmpc;
3953 rhsc.create (0);
3954 vi = make_heapvar ("HEAP");
3955 /* We delay marking allocated storage global until we know if
3956 it escapes. */
3957 DECL_EXTERNAL (vi->decl) = 0;
3958 vi->is_global_var = 0;
3959 /* If this is not a real malloc call assume the memory was
3960 initialized and thus may point to global memory. All
3961 builtin functions with the malloc attribute behave in a sane way. */
3962 if (!fndecl
3963 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3964 make_constraint_from (vi, nonlocal_id);
3965 tmpc.var = vi->id;
3966 tmpc.offset = 0;
3967 tmpc.type = ADDRESSOF;
3968 rhsc.safe_push (tmpc);
3969 process_all_all_constraints (lhsc, rhsc);
3970 rhsc.release ();
3971 }
3972 else
3973 process_all_all_constraints (lhsc, rhsc);
3974
3975 lhsc.release ();
3976 }
3977
3978 /* For non-IPA mode, generate constraints necessary for a call of a
3979 const function that returns a pointer in the statement STMT. */
3980
3981 static void
3982 handle_const_call (gimple stmt, vec<ce_s> *results)
3983 {
3984 struct constraint_expr rhsc;
3985 unsigned int k;
3986
3987 /* Treat nested const functions the same as pure functions as far
3988 as the static chain is concerned. */
3989 if (gimple_call_chain (stmt))
3990 {
3991 varinfo_t uses = get_call_use_vi (stmt);
3992 make_transitive_closure_constraints (uses);
3993 make_constraint_to (uses->id, gimple_call_chain (stmt));
3994 rhsc.var = uses->id;
3995 rhsc.offset = 0;
3996 rhsc.type = SCALAR;
3997 results->safe_push (rhsc);
3998 }
3999
4000 /* May return arguments. */
4001 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4002 {
4003 tree arg = gimple_call_arg (stmt, k);
4004 vec<ce_s> argc = vNULL;
4005 unsigned i;
4006 struct constraint_expr *argp;
4007 get_constraint_for_rhs (arg, &argc);
4008 FOR_EACH_VEC_ELT (argc, i, argp)
4009 results->safe_push (*argp);
4010 argc.release ();
4011 }
4012
4013 /* May return addresses of globals. */
4014 rhsc.var = nonlocal_id;
4015 rhsc.offset = 0;
4016 rhsc.type = ADDRESSOF;
4017 results->safe_push (rhsc);
4018 }
4019
4020 /* For non-IPA mode, generate constraints necessary for a call to a
4021 pure function in statement STMT. */
4022
4023 static void
4024 handle_pure_call (gimple stmt, vec<ce_s> *results)
4025 {
4026 struct constraint_expr rhsc;
4027 unsigned i;
4028 varinfo_t uses = NULL;
4029
4030 /* Memory reached from pointer arguments is call-used. */
4031 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4032 {
4033 tree arg = gimple_call_arg (stmt, i);
4034 if (!uses)
4035 {
4036 uses = get_call_use_vi (stmt);
4037 make_transitive_closure_constraints (uses);
4038 }
4039 make_constraint_to (uses->id, arg);
4040 }
4041
4042 /* The static chain is used as well. */
4043 if (gimple_call_chain (stmt))
4044 {
4045 if (!uses)
4046 {
4047 uses = get_call_use_vi (stmt);
4048 make_transitive_closure_constraints (uses);
4049 }
4050 make_constraint_to (uses->id, gimple_call_chain (stmt));
4051 }
4052
4053 /* Pure functions may return call-used and nonlocal memory. */
4054 if (uses)
4055 {
4056 rhsc.var = uses->id;
4057 rhsc.offset = 0;
4058 rhsc.type = SCALAR;
4059 results->safe_push (rhsc);
4060 }
4061 rhsc.var = nonlocal_id;
4062 rhsc.offset = 0;
4063 rhsc.type = SCALAR;
4064 results->safe_push (rhsc);
4065 }
4066
4067
4068 /* Return the varinfo for the callee of CALL. */
4069
4070 static varinfo_t
4071 get_fi_for_callee (gimple call)
4072 {
4073 tree decl, fn = gimple_call_fn (call);
4074
4075 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4076 fn = OBJ_TYPE_REF_EXPR (fn);
4077
4078 /* If we can directly resolve the function being called, do so.
4079 Otherwise, it must be some sort of indirect expression that
4080 we should still be able to handle. */
4081 decl = gimple_call_addr_fndecl (fn);
4082 if (decl)
4083 return get_vi_for_tree (decl);
4084
4085 /* If the function is anything other than a SSA name pointer we have no
4086 clue and should be getting ANYFN (well, ANYTHING for now). */
4087 if (!fn || TREE_CODE (fn) != SSA_NAME)
4088 return get_varinfo (anything_id);
4089
4090 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4091 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4092 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4093 fn = SSA_NAME_VAR (fn);
4094
4095 return get_vi_for_tree (fn);
4096 }
4097
4098 /* Create constraints for the builtin call T. Return true if the call
4099 was handled, otherwise false. */
4100
4101 static bool
4102 find_func_aliases_for_builtin_call (gimple t)
4103 {
4104 tree fndecl = gimple_call_fndecl (t);
4105 vec<ce_s> lhsc = vNULL;
4106 vec<ce_s> rhsc = vNULL;
4107 varinfo_t fi;
4108
4109 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4110 /* ??? All builtins that are handled here need to be handled
4111 in the alias-oracle query functions explicitly! */
4112 switch (DECL_FUNCTION_CODE (fndecl))
4113 {
4114 /* All the following functions return a pointer to the same object
4115 as their first argument points to. The functions do not add
4116 to the ESCAPED solution. The functions make the first argument
4117 pointed to memory point to what the second argument pointed to
4118 memory points to. */
4119 case BUILT_IN_STRCPY:
4120 case BUILT_IN_STRNCPY:
4121 case BUILT_IN_BCOPY:
4122 case BUILT_IN_MEMCPY:
4123 case BUILT_IN_MEMMOVE:
4124 case BUILT_IN_MEMPCPY:
4125 case BUILT_IN_STPCPY:
4126 case BUILT_IN_STPNCPY:
4127 case BUILT_IN_STRCAT:
4128 case BUILT_IN_STRNCAT:
4129 case BUILT_IN_STRCPY_CHK:
4130 case BUILT_IN_STRNCPY_CHK:
4131 case BUILT_IN_MEMCPY_CHK:
4132 case BUILT_IN_MEMMOVE_CHK:
4133 case BUILT_IN_MEMPCPY_CHK:
4134 case BUILT_IN_STPCPY_CHK:
4135 case BUILT_IN_STPNCPY_CHK:
4136 case BUILT_IN_STRCAT_CHK:
4137 case BUILT_IN_STRNCAT_CHK:
4138 case BUILT_IN_TM_MEMCPY:
4139 case BUILT_IN_TM_MEMMOVE:
4140 {
4141 tree res = gimple_call_lhs (t);
4142 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4143 == BUILT_IN_BCOPY ? 1 : 0));
4144 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4145 == BUILT_IN_BCOPY ? 0 : 1));
4146 if (res != NULL_TREE)
4147 {
4148 get_constraint_for (res, &lhsc);
4149 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4150 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4151 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4152 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4153 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4154 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4155 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4156 else
4157 get_constraint_for (dest, &rhsc);
4158 process_all_all_constraints (lhsc, rhsc);
4159 lhsc.release ();
4160 rhsc.release ();
4161 }
4162 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4163 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4164 do_deref (&lhsc);
4165 do_deref (&rhsc);
4166 process_all_all_constraints (lhsc, rhsc);
4167 lhsc.release ();
4168 rhsc.release ();
4169 return true;
4170 }
4171 case BUILT_IN_MEMSET:
4172 case BUILT_IN_MEMSET_CHK:
4173 case BUILT_IN_TM_MEMSET:
4174 {
4175 tree res = gimple_call_lhs (t);
4176 tree dest = gimple_call_arg (t, 0);
4177 unsigned i;
4178 ce_s *lhsp;
4179 struct constraint_expr ac;
4180 if (res != NULL_TREE)
4181 {
4182 get_constraint_for (res, &lhsc);
4183 get_constraint_for (dest, &rhsc);
4184 process_all_all_constraints (lhsc, rhsc);
4185 lhsc.release ();
4186 rhsc.release ();
4187 }
4188 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4189 do_deref (&lhsc);
4190 if (flag_delete_null_pointer_checks
4191 && integer_zerop (gimple_call_arg (t, 1)))
4192 {
4193 ac.type = ADDRESSOF;
4194 ac.var = nothing_id;
4195 }
4196 else
4197 {
4198 ac.type = SCALAR;
4199 ac.var = integer_id;
4200 }
4201 ac.offset = 0;
4202 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4203 process_constraint (new_constraint (*lhsp, ac));
4204 lhsc.release ();
4205 return true;
4206 }
4207 case BUILT_IN_ASSUME_ALIGNED:
4208 {
4209 tree res = gimple_call_lhs (t);
4210 tree dest = gimple_call_arg (t, 0);
4211 if (res != NULL_TREE)
4212 {
4213 get_constraint_for (res, &lhsc);
4214 get_constraint_for (dest, &rhsc);
4215 process_all_all_constraints (lhsc, rhsc);
4216 lhsc.release ();
4217 rhsc.release ();
4218 }
4219 return true;
4220 }
4221 /* All the following functions do not return pointers, do not
4222 modify the points-to sets of memory reachable from their
4223 arguments and do not add to the ESCAPED solution. */
4224 case BUILT_IN_SINCOS:
4225 case BUILT_IN_SINCOSF:
4226 case BUILT_IN_SINCOSL:
4227 case BUILT_IN_FREXP:
4228 case BUILT_IN_FREXPF:
4229 case BUILT_IN_FREXPL:
4230 case BUILT_IN_GAMMA_R:
4231 case BUILT_IN_GAMMAF_R:
4232 case BUILT_IN_GAMMAL_R:
4233 case BUILT_IN_LGAMMA_R:
4234 case BUILT_IN_LGAMMAF_R:
4235 case BUILT_IN_LGAMMAL_R:
4236 case BUILT_IN_MODF:
4237 case BUILT_IN_MODFF:
4238 case BUILT_IN_MODFL:
4239 case BUILT_IN_REMQUO:
4240 case BUILT_IN_REMQUOF:
4241 case BUILT_IN_REMQUOL:
4242 case BUILT_IN_FREE:
4243 return true;
4244 case BUILT_IN_STRDUP:
4245 case BUILT_IN_STRNDUP:
4246 if (gimple_call_lhs (t))
4247 {
4248 handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
4249 vNULL, fndecl);
4250 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4251 NULL_TREE, &lhsc);
4252 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4253 NULL_TREE, &rhsc);
4254 do_deref (&lhsc);
4255 do_deref (&rhsc);
4256 process_all_all_constraints (lhsc, rhsc);
4257 lhsc.release ();
4258 rhsc.release ();
4259 return true;
4260 }
4261 break;
4262 /* String / character search functions return a pointer into the
4263 source string or NULL. */
4264 case BUILT_IN_INDEX:
4265 case BUILT_IN_STRCHR:
4266 case BUILT_IN_STRRCHR:
4267 case BUILT_IN_MEMCHR:
4268 case BUILT_IN_STRSTR:
4269 case BUILT_IN_STRPBRK:
4270 if (gimple_call_lhs (t))
4271 {
4272 tree src = gimple_call_arg (t, 0);
4273 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4274 constraint_expr nul;
4275 nul.var = nothing_id;
4276 nul.offset = 0;
4277 nul.type = ADDRESSOF;
4278 rhsc.safe_push (nul);
4279 get_constraint_for (gimple_call_lhs (t), &lhsc);
4280 process_all_all_constraints (lhsc, rhsc);
4281 lhsc.release();
4282 rhsc.release();
4283 }
4284 return true;
4285 /* Trampolines are special - they set up passing the static
4286 frame. */
4287 case BUILT_IN_INIT_TRAMPOLINE:
4288 {
4289 tree tramp = gimple_call_arg (t, 0);
4290 tree nfunc = gimple_call_arg (t, 1);
4291 tree frame = gimple_call_arg (t, 2);
4292 unsigned i;
4293 struct constraint_expr lhs, *rhsp;
4294 if (in_ipa_mode)
4295 {
4296 varinfo_t nfi = NULL;
4297 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4298 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4299 if (nfi)
4300 {
4301 lhs = get_function_part_constraint (nfi, fi_static_chain);
4302 get_constraint_for (frame, &rhsc);
4303 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4304 process_constraint (new_constraint (lhs, *rhsp));
4305 rhsc.release ();
4306
4307 /* Make the frame point to the function for
4308 the trampoline adjustment call. */
4309 get_constraint_for (tramp, &lhsc);
4310 do_deref (&lhsc);
4311 get_constraint_for (nfunc, &rhsc);
4312 process_all_all_constraints (lhsc, rhsc);
4313 rhsc.release ();
4314 lhsc.release ();
4315
4316 return true;
4317 }
4318 }
4319 /* Else fallthru to generic handling which will let
4320 the frame escape. */
4321 break;
4322 }
4323 case BUILT_IN_ADJUST_TRAMPOLINE:
4324 {
4325 tree tramp = gimple_call_arg (t, 0);
4326 tree res = gimple_call_lhs (t);
4327 if (in_ipa_mode && res)
4328 {
4329 get_constraint_for (res, &lhsc);
4330 get_constraint_for (tramp, &rhsc);
4331 do_deref (&rhsc);
4332 process_all_all_constraints (lhsc, rhsc);
4333 rhsc.release ();
4334 lhsc.release ();
4335 }
4336 return true;
4337 }
4338 CASE_BUILT_IN_TM_STORE (1):
4339 CASE_BUILT_IN_TM_STORE (2):
4340 CASE_BUILT_IN_TM_STORE (4):
4341 CASE_BUILT_IN_TM_STORE (8):
4342 CASE_BUILT_IN_TM_STORE (FLOAT):
4343 CASE_BUILT_IN_TM_STORE (DOUBLE):
4344 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4345 CASE_BUILT_IN_TM_STORE (M64):
4346 CASE_BUILT_IN_TM_STORE (M128):
4347 CASE_BUILT_IN_TM_STORE (M256):
4348 {
4349 tree addr = gimple_call_arg (t, 0);
4350 tree src = gimple_call_arg (t, 1);
4351
4352 get_constraint_for (addr, &lhsc);
4353 do_deref (&lhsc);
4354 get_constraint_for (src, &rhsc);
4355 process_all_all_constraints (lhsc, rhsc);
4356 lhsc.release ();
4357 rhsc.release ();
4358 return true;
4359 }
4360 CASE_BUILT_IN_TM_LOAD (1):
4361 CASE_BUILT_IN_TM_LOAD (2):
4362 CASE_BUILT_IN_TM_LOAD (4):
4363 CASE_BUILT_IN_TM_LOAD (8):
4364 CASE_BUILT_IN_TM_LOAD (FLOAT):
4365 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4366 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4367 CASE_BUILT_IN_TM_LOAD (M64):
4368 CASE_BUILT_IN_TM_LOAD (M128):
4369 CASE_BUILT_IN_TM_LOAD (M256):
4370 {
4371 tree dest = gimple_call_lhs (t);
4372 tree addr = gimple_call_arg (t, 0);
4373
4374 get_constraint_for (dest, &lhsc);
4375 get_constraint_for (addr, &rhsc);
4376 do_deref (&rhsc);
4377 process_all_all_constraints (lhsc, rhsc);
4378 lhsc.release ();
4379 rhsc.release ();
4380 return true;
4381 }
4382 /* Variadic argument handling needs to be handled in IPA
4383 mode as well. */
4384 case BUILT_IN_VA_START:
4385 {
4386 tree valist = gimple_call_arg (t, 0);
4387 struct constraint_expr rhs, *lhsp;
4388 unsigned i;
4389 get_constraint_for (valist, &lhsc);
4390 do_deref (&lhsc);
4391 /* The va_list gets access to pointers in variadic
4392 arguments. Which we know in the case of IPA analysis
4393 and otherwise are just all nonlocal variables. */
4394 if (in_ipa_mode)
4395 {
4396 fi = lookup_vi_for_tree (cfun->decl);
4397 rhs = get_function_part_constraint (fi, ~0);
4398 rhs.type = ADDRESSOF;
4399 }
4400 else
4401 {
4402 rhs.var = nonlocal_id;
4403 rhs.type = ADDRESSOF;
4404 rhs.offset = 0;
4405 }
4406 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4407 process_constraint (new_constraint (*lhsp, rhs));
4408 lhsc.release ();
4409 /* va_list is clobbered. */
4410 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4411 return true;
4412 }
4413 /* va_end doesn't have any effect that matters. */
4414 case BUILT_IN_VA_END:
4415 return true;
4416 /* Alternate return. Simply give up for now. */
4417 case BUILT_IN_RETURN:
4418 {
4419 fi = NULL;
4420 if (!in_ipa_mode
4421 || !(fi = get_vi_for_tree (cfun->decl)))
4422 make_constraint_from (get_varinfo (escaped_id), anything_id);
4423 else if (in_ipa_mode
4424 && fi != NULL)
4425 {
4426 struct constraint_expr lhs, rhs;
4427 lhs = get_function_part_constraint (fi, fi_result);
4428 rhs.var = anything_id;
4429 rhs.offset = 0;
4430 rhs.type = SCALAR;
4431 process_constraint (new_constraint (lhs, rhs));
4432 }
4433 return true;
4434 }
4435 /* printf-style functions may have hooks to set pointers to
4436 point to somewhere into the generated string. Leave them
4437 for a later excercise... */
4438 default:
4439 /* Fallthru to general call handling. */;
4440 }
4441
4442 return false;
4443 }
4444
4445 /* Create constraints for the call T. */
4446
4447 static void
4448 find_func_aliases_for_call (gimple t)
4449 {
4450 tree fndecl = gimple_call_fndecl (t);
4451 vec<ce_s> lhsc = vNULL;
4452 vec<ce_s> rhsc = vNULL;
4453 varinfo_t fi;
4454
4455 if (fndecl != NULL_TREE
4456 && DECL_BUILT_IN (fndecl)
4457 && find_func_aliases_for_builtin_call (t))
4458 return;
4459
4460 fi = get_fi_for_callee (t);
4461 if (!in_ipa_mode
4462 || (fndecl && !fi->is_fn_info))
4463 {
4464 vec<ce_s> rhsc = vNULL;
4465 int flags = gimple_call_flags (t);
4466
4467 /* Const functions can return their arguments and addresses
4468 of global memory but not of escaped memory. */
4469 if (flags & (ECF_CONST|ECF_NOVOPS))
4470 {
4471 if (gimple_call_lhs (t))
4472 handle_const_call (t, &rhsc);
4473 }
4474 /* Pure functions can return addresses in and of memory
4475 reachable from their arguments, but they are not an escape
4476 point for reachable memory of their arguments. */
4477 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4478 handle_pure_call (t, &rhsc);
4479 else
4480 handle_rhs_call (t, &rhsc);
4481 if (gimple_call_lhs (t))
4482 handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
4483 rhsc.release ();
4484 }
4485 else
4486 {
4487 tree lhsop;
4488 unsigned j;
4489
4490 /* Assign all the passed arguments to the appropriate incoming
4491 parameters of the function. */
4492 for (j = 0; j < gimple_call_num_args (t); j++)
4493 {
4494 struct constraint_expr lhs ;
4495 struct constraint_expr *rhsp;
4496 tree arg = gimple_call_arg (t, j);
4497
4498 get_constraint_for_rhs (arg, &rhsc);
4499 lhs = get_function_part_constraint (fi, fi_parm_base + j);
4500 while (rhsc.length () != 0)
4501 {
4502 rhsp = &rhsc.last ();
4503 process_constraint (new_constraint (lhs, *rhsp));
4504 rhsc.pop ();
4505 }
4506 }
4507
4508 /* If we are returning a value, assign it to the result. */
4509 lhsop = gimple_call_lhs (t);
4510 if (lhsop)
4511 {
4512 struct constraint_expr rhs;
4513 struct constraint_expr *lhsp;
4514
4515 get_constraint_for (lhsop, &lhsc);
4516 rhs = get_function_part_constraint (fi, fi_result);
4517 if (fndecl
4518 && DECL_RESULT (fndecl)
4519 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4520 {
4521 vec<ce_s> tem = vNULL;
4522 tem.safe_push (rhs);
4523 do_deref (&tem);
4524 rhs = tem[0];
4525 tem.release ();
4526 }
4527 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4528 process_constraint (new_constraint (*lhsp, rhs));
4529 }
4530
4531 /* If we pass the result decl by reference, honor that. */
4532 if (lhsop
4533 && fndecl
4534 && DECL_RESULT (fndecl)
4535 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4536 {
4537 struct constraint_expr lhs;
4538 struct constraint_expr *rhsp;
4539
4540 get_constraint_for_address_of (lhsop, &rhsc);
4541 lhs = get_function_part_constraint (fi, fi_result);
4542 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4543 process_constraint (new_constraint (lhs, *rhsp));
4544 rhsc.release ();
4545 }
4546
4547 /* If we use a static chain, pass it along. */
4548 if (gimple_call_chain (t))
4549 {
4550 struct constraint_expr lhs;
4551 struct constraint_expr *rhsp;
4552
4553 get_constraint_for (gimple_call_chain (t), &rhsc);
4554 lhs = get_function_part_constraint (fi, fi_static_chain);
4555 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4556 process_constraint (new_constraint (lhs, *rhsp));
4557 }
4558 }
4559 }
4560
4561 /* Walk statement T setting up aliasing constraints according to the
4562 references found in T. This function is the main part of the
4563 constraint builder. AI points to auxiliary alias information used
4564 when building alias sets and computing alias grouping heuristics. */
4565
4566 static void
4567 find_func_aliases (gimple origt)
4568 {
4569 gimple t = origt;
4570 vec<ce_s> lhsc = vNULL;
4571 vec<ce_s> rhsc = vNULL;
4572 struct constraint_expr *c;
4573 varinfo_t fi;
4574
4575 /* Now build constraints expressions. */
4576 if (gimple_code (t) == GIMPLE_PHI)
4577 {
4578 size_t i;
4579 unsigned int j;
4580
4581 /* For a phi node, assign all the arguments to
4582 the result. */
4583 get_constraint_for (gimple_phi_result (t), &lhsc);
4584 for (i = 0; i < gimple_phi_num_args (t); i++)
4585 {
4586 tree strippedrhs = PHI_ARG_DEF (t, i);
4587
4588 STRIP_NOPS (strippedrhs);
4589 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4590
4591 FOR_EACH_VEC_ELT (lhsc, j, c)
4592 {
4593 struct constraint_expr *c2;
4594 while (rhsc.length () > 0)
4595 {
4596 c2 = &rhsc.last ();
4597 process_constraint (new_constraint (*c, *c2));
4598 rhsc.pop ();
4599 }
4600 }
4601 }
4602 }
4603 /* In IPA mode, we need to generate constraints to pass call
4604 arguments through their calls. There are two cases,
4605 either a GIMPLE_CALL returning a value, or just a plain
4606 GIMPLE_CALL when we are not.
4607
4608 In non-ipa mode, we need to generate constraints for each
4609 pointer passed by address. */
4610 else if (is_gimple_call (t))
4611 find_func_aliases_for_call (t);
4612
4613 /* Otherwise, just a regular assignment statement. Only care about
4614 operations with pointer result, others are dealt with as escape
4615 points if they have pointer operands. */
4616 else if (is_gimple_assign (t))
4617 {
4618 /* Otherwise, just a regular assignment statement. */
4619 tree lhsop = gimple_assign_lhs (t);
4620 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4621
4622 if (rhsop && TREE_CLOBBER_P (rhsop))
4623 /* Ignore clobbers, they don't actually store anything into
4624 the LHS. */
4625 ;
4626 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4627 do_structure_copy (lhsop, rhsop);
4628 else
4629 {
4630 enum tree_code code = gimple_assign_rhs_code (t);
4631
4632 get_constraint_for (lhsop, &lhsc);
4633
4634 if (FLOAT_TYPE_P (TREE_TYPE (lhsop)))
4635 /* If the operation produces a floating point result then
4636 assume the value is not produced to transfer a pointer. */
4637 ;
4638 else if (code == POINTER_PLUS_EXPR)
4639 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4640 gimple_assign_rhs2 (t), &rhsc);
4641 else if (code == BIT_AND_EXPR
4642 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4643 {
4644 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4645 the pointer. Handle it by offsetting it by UNKNOWN. */
4646 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4647 NULL_TREE, &rhsc);
4648 }
4649 else if ((CONVERT_EXPR_CODE_P (code)
4650 && !(POINTER_TYPE_P (gimple_expr_type (t))
4651 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4652 || gimple_assign_single_p (t))
4653 get_constraint_for_rhs (rhsop, &rhsc);
4654 else if (code == COND_EXPR)
4655 {
4656 /* The result is a merge of both COND_EXPR arms. */
4657 vec<ce_s> tmp = vNULL;
4658 struct constraint_expr *rhsp;
4659 unsigned i;
4660 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4661 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4662 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4663 rhsc.safe_push (*rhsp);
4664 tmp.release ();
4665 }
4666 else if (truth_value_p (code))
4667 /* Truth value results are not pointer (parts). Or at least
4668 very very unreasonable obfuscation of a part. */
4669 ;
4670 else
4671 {
4672 /* All other operations are merges. */
4673 vec<ce_s> tmp = vNULL;
4674 struct constraint_expr *rhsp;
4675 unsigned i, j;
4676 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4677 for (i = 2; i < gimple_num_ops (t); ++i)
4678 {
4679 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4680 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4681 rhsc.safe_push (*rhsp);
4682 tmp.truncate (0);
4683 }
4684 tmp.release ();
4685 }
4686 process_all_all_constraints (lhsc, rhsc);
4687 }
4688 /* If there is a store to a global variable the rhs escapes. */
4689 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4690 && DECL_P (lhsop)
4691 && is_global_var (lhsop)
4692 && (!in_ipa_mode
4693 || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4694 make_escape_constraint (rhsop);
4695 }
4696 /* Handle escapes through return. */
4697 else if (gimple_code (t) == GIMPLE_RETURN
4698 && gimple_return_retval (t) != NULL_TREE)
4699 {
4700 fi = NULL;
4701 if (!in_ipa_mode
4702 || !(fi = get_vi_for_tree (cfun->decl)))
4703 make_escape_constraint (gimple_return_retval (t));
4704 else if (in_ipa_mode
4705 && fi != NULL)
4706 {
4707 struct constraint_expr lhs ;
4708 struct constraint_expr *rhsp;
4709 unsigned i;
4710
4711 lhs = get_function_part_constraint (fi, fi_result);
4712 get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
4713 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4714 process_constraint (new_constraint (lhs, *rhsp));
4715 }
4716 }
4717 /* Handle asms conservatively by adding escape constraints to everything. */
4718 else if (gimple_code (t) == GIMPLE_ASM)
4719 {
4720 unsigned i, noutputs;
4721 const char **oconstraints;
4722 const char *constraint;
4723 bool allows_mem, allows_reg, is_inout;
4724
4725 noutputs = gimple_asm_noutputs (t);
4726 oconstraints = XALLOCAVEC (const char *, noutputs);
4727
4728 for (i = 0; i < noutputs; ++i)
4729 {
4730 tree link = gimple_asm_output_op (t, i);
4731 tree op = TREE_VALUE (link);
4732
4733 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4734 oconstraints[i] = constraint;
4735 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4736 &allows_reg, &is_inout);
4737
4738 /* A memory constraint makes the address of the operand escape. */
4739 if (!allows_reg && allows_mem)
4740 make_escape_constraint (build_fold_addr_expr (op));
4741
4742 /* The asm may read global memory, so outputs may point to
4743 any global memory. */
4744 if (op)
4745 {
4746 vec<ce_s> lhsc = vNULL;
4747 struct constraint_expr rhsc, *lhsp;
4748 unsigned j;
4749 get_constraint_for (op, &lhsc);
4750 rhsc.var = nonlocal_id;
4751 rhsc.offset = 0;
4752 rhsc.type = SCALAR;
4753 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4754 process_constraint (new_constraint (*lhsp, rhsc));
4755 lhsc.release ();
4756 }
4757 }
4758 for (i = 0; i < gimple_asm_ninputs (t); ++i)
4759 {
4760 tree link = gimple_asm_input_op (t, i);
4761 tree op = TREE_VALUE (link);
4762
4763 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4764
4765 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4766 &allows_mem, &allows_reg);
4767
4768 /* A memory constraint makes the address of the operand escape. */
4769 if (!allows_reg && allows_mem)
4770 make_escape_constraint (build_fold_addr_expr (op));
4771 /* Strictly we'd only need the constraint to ESCAPED if
4772 the asm clobbers memory, otherwise using something
4773 along the lines of per-call clobbers/uses would be enough. */
4774 else if (op)
4775 make_escape_constraint (op);
4776 }
4777 }
4778
4779 rhsc.release ();
4780 lhsc.release ();
4781 }
4782
4783
4784 /* Create a constraint adding to the clobber set of FI the memory
4785 pointed to by PTR. */
4786
4787 static void
4788 process_ipa_clobber (varinfo_t fi, tree ptr)
4789 {
4790 vec<ce_s> ptrc = vNULL;
4791 struct constraint_expr *c, lhs;
4792 unsigned i;
4793 get_constraint_for_rhs (ptr, &ptrc);
4794 lhs = get_function_part_constraint (fi, fi_clobbers);
4795 FOR_EACH_VEC_ELT (ptrc, i, c)
4796 process_constraint (new_constraint (lhs, *c));
4797 ptrc.release ();
4798 }
4799
4800 /* Walk statement T setting up clobber and use constraints according to the
4801 references found in T. This function is a main part of the
4802 IPA constraint builder. */
4803
4804 static void
4805 find_func_clobbers (gimple origt)
4806 {
4807 gimple t = origt;
4808 vec<ce_s> lhsc = vNULL;
4809 vec<ce_s> rhsc = vNULL;
4810 varinfo_t fi;
4811
4812 /* Add constraints for clobbered/used in IPA mode.
4813 We are not interested in what automatic variables are clobbered
4814 or used as we only use the information in the caller to which
4815 they do not escape. */
4816 gcc_assert (in_ipa_mode);
4817
4818 /* If the stmt refers to memory in any way it better had a VUSE. */
4819 if (gimple_vuse (t) == NULL_TREE)
4820 return;
4821
4822 /* We'd better have function information for the current function. */
4823 fi = lookup_vi_for_tree (cfun->decl);
4824 gcc_assert (fi != NULL);
4825
4826 /* Account for stores in assignments and calls. */
4827 if (gimple_vdef (t) != NULL_TREE
4828 && gimple_has_lhs (t))
4829 {
4830 tree lhs = gimple_get_lhs (t);
4831 tree tem = lhs;
4832 while (handled_component_p (tem))
4833 tem = TREE_OPERAND (tem, 0);
4834 if ((DECL_P (tem)
4835 && !auto_var_in_fn_p (tem, cfun->decl))
4836 || INDIRECT_REF_P (tem)
4837 || (TREE_CODE (tem) == MEM_REF
4838 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4839 && auto_var_in_fn_p
4840 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4841 {
4842 struct constraint_expr lhsc, *rhsp;
4843 unsigned i;
4844 lhsc = get_function_part_constraint (fi, fi_clobbers);
4845 get_constraint_for_address_of (lhs, &rhsc);
4846 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4847 process_constraint (new_constraint (lhsc, *rhsp));
4848 rhsc.release ();
4849 }
4850 }
4851
4852 /* Account for uses in assigments and returns. */
4853 if (gimple_assign_single_p (t)
4854 || (gimple_code (t) == GIMPLE_RETURN
4855 && gimple_return_retval (t) != NULL_TREE))
4856 {
4857 tree rhs = (gimple_assign_single_p (t)
4858 ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4859 tree tem = rhs;
4860 while (handled_component_p (tem))
4861 tem = TREE_OPERAND (tem, 0);
4862 if ((DECL_P (tem)
4863 && !auto_var_in_fn_p (tem, cfun->decl))
4864 || INDIRECT_REF_P (tem)
4865 || (TREE_CODE (tem) == MEM_REF
4866 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4867 && auto_var_in_fn_p
4868 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4869 {
4870 struct constraint_expr lhs, *rhsp;
4871 unsigned i;
4872 lhs = get_function_part_constraint (fi, fi_uses);
4873 get_constraint_for_address_of (rhs, &rhsc);
4874 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4875 process_constraint (new_constraint (lhs, *rhsp));
4876 rhsc.release ();
4877 }
4878 }
4879
4880 if (is_gimple_call (t))
4881 {
4882 varinfo_t cfi = NULL;
4883 tree decl = gimple_call_fndecl (t);
4884 struct constraint_expr lhs, rhs;
4885 unsigned i, j;
4886
4887 /* For builtins we do not have separate function info. For those
4888 we do not generate escapes for we have to generate clobbers/uses. */
4889 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4890 switch (DECL_FUNCTION_CODE (decl))
4891 {
4892 /* The following functions use and clobber memory pointed to
4893 by their arguments. */
4894 case BUILT_IN_STRCPY:
4895 case BUILT_IN_STRNCPY:
4896 case BUILT_IN_BCOPY:
4897 case BUILT_IN_MEMCPY:
4898 case BUILT_IN_MEMMOVE:
4899 case BUILT_IN_MEMPCPY:
4900 case BUILT_IN_STPCPY:
4901 case BUILT_IN_STPNCPY:
4902 case BUILT_IN_STRCAT:
4903 case BUILT_IN_STRNCAT:
4904 case BUILT_IN_STRCPY_CHK:
4905 case BUILT_IN_STRNCPY_CHK:
4906 case BUILT_IN_MEMCPY_CHK:
4907 case BUILT_IN_MEMMOVE_CHK:
4908 case BUILT_IN_MEMPCPY_CHK:
4909 case BUILT_IN_STPCPY_CHK:
4910 case BUILT_IN_STPNCPY_CHK:
4911 case BUILT_IN_STRCAT_CHK:
4912 case BUILT_IN_STRNCAT_CHK:
4913 {
4914 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4915 == BUILT_IN_BCOPY ? 1 : 0));
4916 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4917 == BUILT_IN_BCOPY ? 0 : 1));
4918 unsigned i;
4919 struct constraint_expr *rhsp, *lhsp;
4920 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4921 lhs = get_function_part_constraint (fi, fi_clobbers);
4922 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4923 process_constraint (new_constraint (lhs, *lhsp));
4924 lhsc.release ();
4925 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4926 lhs = get_function_part_constraint (fi, fi_uses);
4927 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4928 process_constraint (new_constraint (lhs, *rhsp));
4929 rhsc.release ();
4930 return;
4931 }
4932 /* The following function clobbers memory pointed to by
4933 its argument. */
4934 case BUILT_IN_MEMSET:
4935 case BUILT_IN_MEMSET_CHK:
4936 {
4937 tree dest = gimple_call_arg (t, 0);
4938 unsigned i;
4939 ce_s *lhsp;
4940 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4941 lhs = get_function_part_constraint (fi, fi_clobbers);
4942 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4943 process_constraint (new_constraint (lhs, *lhsp));
4944 lhsc.release ();
4945 return;
4946 }
4947 /* The following functions clobber their second and third
4948 arguments. */
4949 case BUILT_IN_SINCOS:
4950 case BUILT_IN_SINCOSF:
4951 case BUILT_IN_SINCOSL:
4952 {
4953 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4954 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4955 return;
4956 }
4957 /* The following functions clobber their second argument. */
4958 case BUILT_IN_FREXP:
4959 case BUILT_IN_FREXPF:
4960 case BUILT_IN_FREXPL:
4961 case BUILT_IN_LGAMMA_R:
4962 case BUILT_IN_LGAMMAF_R:
4963 case BUILT_IN_LGAMMAL_R:
4964 case BUILT_IN_GAMMA_R:
4965 case BUILT_IN_GAMMAF_R:
4966 case BUILT_IN_GAMMAL_R:
4967 case BUILT_IN_MODF:
4968 case BUILT_IN_MODFF:
4969 case BUILT_IN_MODFL:
4970 {
4971 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4972 return;
4973 }
4974 /* The following functions clobber their third argument. */
4975 case BUILT_IN_REMQUO:
4976 case BUILT_IN_REMQUOF:
4977 case BUILT_IN_REMQUOL:
4978 {
4979 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4980 return;
4981 }
4982 /* The following functions neither read nor clobber memory. */
4983 case BUILT_IN_ASSUME_ALIGNED:
4984 case BUILT_IN_FREE:
4985 return;
4986 /* Trampolines are of no interest to us. */
4987 case BUILT_IN_INIT_TRAMPOLINE:
4988 case BUILT_IN_ADJUST_TRAMPOLINE:
4989 return;
4990 case BUILT_IN_VA_START:
4991 case BUILT_IN_VA_END:
4992 return;
4993 /* printf-style functions may have hooks to set pointers to
4994 point to somewhere into the generated string. Leave them
4995 for a later excercise... */
4996 default:
4997 /* Fallthru to general call handling. */;
4998 }
4999
5000 /* Parameters passed by value are used. */
5001 lhs = get_function_part_constraint (fi, fi_uses);
5002 for (i = 0; i < gimple_call_num_args (t); i++)
5003 {
5004 struct constraint_expr *rhsp;
5005 tree arg = gimple_call_arg (t, i);
5006
5007 if (TREE_CODE (arg) == SSA_NAME
5008 || is_gimple_min_invariant (arg))
5009 continue;
5010
5011 get_constraint_for_address_of (arg, &rhsc);
5012 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5013 process_constraint (new_constraint (lhs, *rhsp));
5014 rhsc.release ();
5015 }
5016
5017 /* Build constraints for propagating clobbers/uses along the
5018 callgraph edges. */
5019 cfi = get_fi_for_callee (t);
5020 if (cfi->id == anything_id)
5021 {
5022 if (gimple_vdef (t))
5023 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5024 anything_id);
5025 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5026 anything_id);
5027 return;
5028 }
5029
5030 /* For callees without function info (that's external functions),
5031 ESCAPED is clobbered and used. */
5032 if (gimple_call_fndecl (t)
5033 && !cfi->is_fn_info)
5034 {
5035 varinfo_t vi;
5036
5037 if (gimple_vdef (t))
5038 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5039 escaped_id);
5040 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5041
5042 /* Also honor the call statement use/clobber info. */
5043 if ((vi = lookup_call_clobber_vi (t)) != NULL)
5044 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5045 vi->id);
5046 if ((vi = lookup_call_use_vi (t)) != NULL)
5047 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5048 vi->id);
5049 return;
5050 }
5051
5052 /* Otherwise the caller clobbers and uses what the callee does.
5053 ??? This should use a new complex constraint that filters
5054 local variables of the callee. */
5055 if (gimple_vdef (t))
5056 {
5057 lhs = get_function_part_constraint (fi, fi_clobbers);
5058 rhs = get_function_part_constraint (cfi, fi_clobbers);
5059 process_constraint (new_constraint (lhs, rhs));
5060 }
5061 lhs = get_function_part_constraint (fi, fi_uses);
5062 rhs = get_function_part_constraint (cfi, fi_uses);
5063 process_constraint (new_constraint (lhs, rhs));
5064 }
5065 else if (gimple_code (t) == GIMPLE_ASM)
5066 {
5067 /* ??? Ick. We can do better. */
5068 if (gimple_vdef (t))
5069 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5070 anything_id);
5071 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5072 anything_id);
5073 }
5074
5075 rhsc.release ();
5076 }
5077
5078
5079 /* Find the first varinfo in the same variable as START that overlaps with
5080 OFFSET. Return NULL if we can't find one. */
5081
5082 static varinfo_t
5083 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5084 {
5085 /* If the offset is outside of the variable, bail out. */
5086 if (offset >= start->fullsize)
5087 return NULL;
5088
5089 /* If we cannot reach offset from start, lookup the first field
5090 and start from there. */
5091 if (start->offset > offset)
5092 start = get_varinfo (start->head);
5093
5094 while (start)
5095 {
5096 /* We may not find a variable in the field list with the actual
5097 offset when when we have glommed a structure to a variable.
5098 In that case, however, offset should still be within the size
5099 of the variable. */
5100 if (offset >= start->offset
5101 && (offset - start->offset) < start->size)
5102 return start;
5103
5104 start = vi_next (start);
5105 }
5106
5107 return NULL;
5108 }
5109
5110 /* Find the first varinfo in the same variable as START that overlaps with
5111 OFFSET. If there is no such varinfo the varinfo directly preceding
5112 OFFSET is returned. */
5113
5114 static varinfo_t
5115 first_or_preceding_vi_for_offset (varinfo_t start,
5116 unsigned HOST_WIDE_INT offset)
5117 {
5118 /* If we cannot reach offset from start, lookup the first field
5119 and start from there. */
5120 if (start->offset > offset)
5121 start = get_varinfo (start->head);
5122
5123 /* We may not find a variable in the field list with the actual
5124 offset when when we have glommed a structure to a variable.
5125 In that case, however, offset should still be within the size
5126 of the variable.
5127 If we got beyond the offset we look for return the field
5128 directly preceding offset which may be the last field. */
5129 while (start->next
5130 && offset >= start->offset
5131 && !((offset - start->offset) < start->size))
5132 start = vi_next (start);
5133
5134 return start;
5135 }
5136
5137
5138 /* This structure is used during pushing fields onto the fieldstack
5139 to track the offset of the field, since bitpos_of_field gives it
5140 relative to its immediate containing type, and we want it relative
5141 to the ultimate containing object. */
5142
5143 struct fieldoff
5144 {
5145 /* Offset from the base of the base containing object to this field. */
5146 HOST_WIDE_INT offset;
5147
5148 /* Size, in bits, of the field. */
5149 unsigned HOST_WIDE_INT size;
5150
5151 unsigned has_unknown_size : 1;
5152
5153 unsigned must_have_pointers : 1;
5154
5155 unsigned may_have_pointers : 1;
5156
5157 unsigned only_restrict_pointers : 1;
5158 };
5159 typedef struct fieldoff fieldoff_s;
5160
5161
5162 /* qsort comparison function for two fieldoff's PA and PB */
5163
5164 static int
5165 fieldoff_compare (const void *pa, const void *pb)
5166 {
5167 const fieldoff_s *foa = (const fieldoff_s *)pa;
5168 const fieldoff_s *fob = (const fieldoff_s *)pb;
5169 unsigned HOST_WIDE_INT foasize, fobsize;
5170
5171 if (foa->offset < fob->offset)
5172 return -1;
5173 else if (foa->offset > fob->offset)
5174 return 1;
5175
5176 foasize = foa->size;
5177 fobsize = fob->size;
5178 if (foasize < fobsize)
5179 return -1;
5180 else if (foasize > fobsize)
5181 return 1;
5182 return 0;
5183 }
5184
5185 /* Sort a fieldstack according to the field offset and sizes. */
5186 static void
5187 sort_fieldstack (vec<fieldoff_s> fieldstack)
5188 {
5189 fieldstack.qsort (fieldoff_compare);
5190 }
5191
5192 /* Return true if T is a type that can have subvars. */
5193
5194 static inline bool
5195 type_can_have_subvars (const_tree t)
5196 {
5197 /* Aggregates without overlapping fields can have subvars. */
5198 return TREE_CODE (t) == RECORD_TYPE;
5199 }
5200
5201 /* Return true if V is a tree that we can have subvars for.
5202 Normally, this is any aggregate type. Also complex
5203 types which are not gimple registers can have subvars. */
5204
5205 static inline bool
5206 var_can_have_subvars (const_tree v)
5207 {
5208 /* Volatile variables should never have subvars. */
5209 if (TREE_THIS_VOLATILE (v))
5210 return false;
5211
5212 /* Non decls or memory tags can never have subvars. */
5213 if (!DECL_P (v))
5214 return false;
5215
5216 return type_can_have_subvars (TREE_TYPE (v));
5217 }
5218
5219 /* Return true if T is a type that does contain pointers. */
5220
5221 static bool
5222 type_must_have_pointers (tree type)
5223 {
5224 if (POINTER_TYPE_P (type))
5225 return true;
5226
5227 if (TREE_CODE (type) == ARRAY_TYPE)
5228 return type_must_have_pointers (TREE_TYPE (type));
5229
5230 /* A function or method can have pointers as arguments, so track
5231 those separately. */
5232 if (TREE_CODE (type) == FUNCTION_TYPE
5233 || TREE_CODE (type) == METHOD_TYPE)
5234 return true;
5235
5236 return false;
5237 }
5238
5239 static bool
5240 field_must_have_pointers (tree t)
5241 {
5242 return type_must_have_pointers (TREE_TYPE (t));
5243 }
5244
5245 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5246 the fields of TYPE onto fieldstack, recording their offsets along
5247 the way.
5248
5249 OFFSET is used to keep track of the offset in this entire
5250 structure, rather than just the immediately containing structure.
5251 Returns false if the caller is supposed to handle the field we
5252 recursed for. */
5253
5254 static bool
5255 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5256 HOST_WIDE_INT offset)
5257 {
5258 tree field;
5259 bool empty_p = true;
5260
5261 if (TREE_CODE (type) != RECORD_TYPE)
5262 return false;
5263
5264 /* If the vector of fields is growing too big, bail out early.
5265 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5266 sure this fails. */
5267 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5268 return false;
5269
5270 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5271 if (TREE_CODE (field) == FIELD_DECL)
5272 {
5273 bool push = false;
5274 HOST_WIDE_INT foff = bitpos_of_field (field);
5275
5276 if (!var_can_have_subvars (field)
5277 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
5278 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
5279 push = true;
5280 else if (!push_fields_onto_fieldstack
5281 (TREE_TYPE (field), fieldstack, offset + foff)
5282 && (DECL_SIZE (field)
5283 && !integer_zerop (DECL_SIZE (field))))
5284 /* Empty structures may have actual size, like in C++. So
5285 see if we didn't push any subfields and the size is
5286 nonzero, push the field onto the stack. */
5287 push = true;
5288
5289 if (push)
5290 {
5291 fieldoff_s *pair = NULL;
5292 bool has_unknown_size = false;
5293 bool must_have_pointers_p;
5294
5295 if (!fieldstack->is_empty ())
5296 pair = &fieldstack->last ();
5297
5298 /* If there isn't anything at offset zero, create sth. */
5299 if (!pair
5300 && offset + foff != 0)
5301 {
5302 fieldoff_s e = {0, offset + foff, false, false, false, false};
5303 pair = fieldstack->safe_push (e);
5304 }
5305
5306 if (!DECL_SIZE (field)
5307 || !host_integerp (DECL_SIZE (field), 1))
5308 has_unknown_size = true;
5309
5310 /* If adjacent fields do not contain pointers merge them. */
5311 must_have_pointers_p = field_must_have_pointers (field);
5312 if (pair
5313 && !has_unknown_size
5314 && !must_have_pointers_p
5315 && !pair->must_have_pointers
5316 && !pair->has_unknown_size
5317 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5318 {
5319 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
5320 }
5321 else
5322 {
5323 fieldoff_s e;
5324 e.offset = offset + foff;
5325 e.has_unknown_size = has_unknown_size;
5326 if (!has_unknown_size)
5327 e.size = TREE_INT_CST_LOW (DECL_SIZE (field));
5328 else
5329 e.size = -1;
5330 e.must_have_pointers = must_have_pointers_p;
5331 e.may_have_pointers = true;
5332 e.only_restrict_pointers
5333 = (!has_unknown_size
5334 && POINTER_TYPE_P (TREE_TYPE (field))
5335 && TYPE_RESTRICT (TREE_TYPE (field)));
5336 fieldstack->safe_push (e);
5337 }
5338 }
5339
5340 empty_p = false;
5341 }
5342
5343 return !empty_p;
5344 }
5345
5346 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5347 if it is a varargs function. */
5348
5349 static unsigned int
5350 count_num_arguments (tree decl, bool *is_varargs)
5351 {
5352 unsigned int num = 0;
5353 tree t;
5354
5355 /* Capture named arguments for K&R functions. They do not
5356 have a prototype and thus no TYPE_ARG_TYPES. */
5357 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5358 ++num;
5359
5360 /* Check if the function has variadic arguments. */
5361 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5362 if (TREE_VALUE (t) == void_type_node)
5363 break;
5364 if (!t)
5365 *is_varargs = true;
5366
5367 return num;
5368 }
5369
5370 /* Creation function node for DECL, using NAME, and return the index
5371 of the variable we've created for the function. */
5372
5373 static varinfo_t
5374 create_function_info_for (tree decl, const char *name)
5375 {
5376 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5377 varinfo_t vi, prev_vi;
5378 tree arg;
5379 unsigned int i;
5380 bool is_varargs = false;
5381 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5382
5383 /* Create the variable info. */
5384
5385 vi = new_var_info (decl, name);
5386 vi->offset = 0;
5387 vi->size = 1;
5388 vi->fullsize = fi_parm_base + num_args;
5389 vi->is_fn_info = 1;
5390 vi->may_have_pointers = false;
5391 if (is_varargs)
5392 vi->fullsize = ~0;
5393 insert_vi_for_tree (vi->decl, vi);
5394
5395 prev_vi = vi;
5396
5397 /* Create a variable for things the function clobbers and one for
5398 things the function uses. */
5399 {
5400 varinfo_t clobbervi, usevi;
5401 const char *newname;
5402 char *tempname;
5403
5404 asprintf (&tempname, "%s.clobber", name);
5405 newname = ggc_strdup (tempname);
5406 free (tempname);
5407
5408 clobbervi = new_var_info (NULL, newname);
5409 clobbervi->offset = fi_clobbers;
5410 clobbervi->size = 1;
5411 clobbervi->fullsize = vi->fullsize;
5412 clobbervi->is_full_var = true;
5413 clobbervi->is_global_var = false;
5414 gcc_assert (prev_vi->offset < clobbervi->offset);
5415 prev_vi->next = clobbervi->id;
5416 prev_vi = clobbervi;
5417
5418 asprintf (&tempname, "%s.use", name);
5419 newname = ggc_strdup (tempname);
5420 free (tempname);
5421
5422 usevi = new_var_info (NULL, newname);
5423 usevi->offset = fi_uses;
5424 usevi->size = 1;
5425 usevi->fullsize = vi->fullsize;
5426 usevi->is_full_var = true;
5427 usevi->is_global_var = false;
5428 gcc_assert (prev_vi->offset < usevi->offset);
5429 prev_vi->next = usevi->id;
5430 prev_vi = usevi;
5431 }
5432
5433 /* And one for the static chain. */
5434 if (fn->static_chain_decl != NULL_TREE)
5435 {
5436 varinfo_t chainvi;
5437 const char *newname;
5438 char *tempname;
5439
5440 asprintf (&tempname, "%s.chain", name);
5441 newname = ggc_strdup (tempname);
5442 free (tempname);
5443
5444 chainvi = new_var_info (fn->static_chain_decl, newname);
5445 chainvi->offset = fi_static_chain;
5446 chainvi->size = 1;
5447 chainvi->fullsize = vi->fullsize;
5448 chainvi->is_full_var = true;
5449 chainvi->is_global_var = false;
5450 gcc_assert (prev_vi->offset < chainvi->offset);
5451 prev_vi->next = chainvi->id;
5452 prev_vi = chainvi;
5453 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5454 }
5455
5456 /* Create a variable for the return var. */
5457 if (DECL_RESULT (decl) != NULL
5458 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5459 {
5460 varinfo_t resultvi;
5461 const char *newname;
5462 char *tempname;
5463 tree resultdecl = decl;
5464
5465 if (DECL_RESULT (decl))
5466 resultdecl = DECL_RESULT (decl);
5467
5468 asprintf (&tempname, "%s.result", name);
5469 newname = ggc_strdup (tempname);
5470 free (tempname);
5471
5472 resultvi = new_var_info (resultdecl, newname);
5473 resultvi->offset = fi_result;
5474 resultvi->size = 1;
5475 resultvi->fullsize = vi->fullsize;
5476 resultvi->is_full_var = true;
5477 if (DECL_RESULT (decl))
5478 resultvi->may_have_pointers = true;
5479 gcc_assert (prev_vi->offset < resultvi->offset);
5480 prev_vi->next = resultvi->id;
5481 prev_vi = resultvi;
5482 if (DECL_RESULT (decl))
5483 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5484 }
5485
5486 /* Set up variables for each argument. */
5487 arg = DECL_ARGUMENTS (decl);
5488 for (i = 0; i < num_args; i++)
5489 {
5490 varinfo_t argvi;
5491 const char *newname;
5492 char *tempname;
5493 tree argdecl = decl;
5494
5495 if (arg)
5496 argdecl = arg;
5497
5498 asprintf (&tempname, "%s.arg%d", name, i);
5499 newname = ggc_strdup (tempname);
5500 free (tempname);
5501
5502 argvi = new_var_info (argdecl, newname);
5503 argvi->offset = fi_parm_base + i;
5504 argvi->size = 1;
5505 argvi->is_full_var = true;
5506 argvi->fullsize = vi->fullsize;
5507 if (arg)
5508 argvi->may_have_pointers = true;
5509 gcc_assert (prev_vi->offset < argvi->offset);
5510 prev_vi->next = argvi->id;
5511 prev_vi = argvi;
5512 if (arg)
5513 {
5514 insert_vi_for_tree (arg, argvi);
5515 arg = DECL_CHAIN (arg);
5516 }
5517 }
5518
5519 /* Add one representative for all further args. */
5520 if (is_varargs)
5521 {
5522 varinfo_t argvi;
5523 const char *newname;
5524 char *tempname;
5525 tree decl;
5526
5527 asprintf (&tempname, "%s.varargs", name);
5528 newname = ggc_strdup (tempname);
5529 free (tempname);
5530
5531 /* We need sth that can be pointed to for va_start. */
5532 decl = build_fake_var_decl (ptr_type_node);
5533
5534 argvi = new_var_info (decl, newname);
5535 argvi->offset = fi_parm_base + num_args;
5536 argvi->size = ~0;
5537 argvi->is_full_var = true;
5538 argvi->is_heap_var = true;
5539 argvi->fullsize = vi->fullsize;
5540 gcc_assert (prev_vi->offset < argvi->offset);
5541 prev_vi->next = argvi->id;
5542 prev_vi = argvi;
5543 }
5544
5545 return vi;
5546 }
5547
5548
5549 /* Return true if FIELDSTACK contains fields that overlap.
5550 FIELDSTACK is assumed to be sorted by offset. */
5551
5552 static bool
5553 check_for_overlaps (vec<fieldoff_s> fieldstack)
5554 {
5555 fieldoff_s *fo = NULL;
5556 unsigned int i;
5557 HOST_WIDE_INT lastoffset = -1;
5558
5559 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5560 {
5561 if (fo->offset == lastoffset)
5562 return true;
5563 lastoffset = fo->offset;
5564 }
5565 return false;
5566 }
5567
5568 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5569 This will also create any varinfo structures necessary for fields
5570 of DECL. */
5571
5572 static varinfo_t
5573 create_variable_info_for_1 (tree decl, const char *name)
5574 {
5575 varinfo_t vi, newvi;
5576 tree decl_type = TREE_TYPE (decl);
5577 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5578 vec<fieldoff_s> fieldstack = vNULL;
5579 fieldoff_s *fo;
5580 unsigned int i;
5581
5582 if (!declsize
5583 || !host_integerp (declsize, 1))
5584 {
5585 vi = new_var_info (decl, name);
5586 vi->offset = 0;
5587 vi->size = ~0;
5588 vi->fullsize = ~0;
5589 vi->is_unknown_size_var = true;
5590 vi->is_full_var = true;
5591 vi->may_have_pointers = true;
5592 return vi;
5593 }
5594
5595 /* Collect field information. */
5596 if (use_field_sensitive
5597 && var_can_have_subvars (decl)
5598 /* ??? Force us to not use subfields for global initializers
5599 in IPA mode. Else we'd have to parse arbitrary initializers. */
5600 && !(in_ipa_mode
5601 && is_global_var (decl)
5602 && DECL_INITIAL (decl)))
5603 {
5604 fieldoff_s *fo = NULL;
5605 bool notokay = false;
5606 unsigned int i;
5607
5608 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5609
5610 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5611 if (fo->has_unknown_size
5612 || fo->offset < 0)
5613 {
5614 notokay = true;
5615 break;
5616 }
5617
5618 /* We can't sort them if we have a field with a variable sized type,
5619 which will make notokay = true. In that case, we are going to return
5620 without creating varinfos for the fields anyway, so sorting them is a
5621 waste to boot. */
5622 if (!notokay)
5623 {
5624 sort_fieldstack (fieldstack);
5625 /* Due to some C++ FE issues, like PR 22488, we might end up
5626 what appear to be overlapping fields even though they,
5627 in reality, do not overlap. Until the C++ FE is fixed,
5628 we will simply disable field-sensitivity for these cases. */
5629 notokay = check_for_overlaps (fieldstack);
5630 }
5631
5632 if (notokay)
5633 fieldstack.release ();
5634 }
5635
5636 /* If we didn't end up collecting sub-variables create a full
5637 variable for the decl. */
5638 if (fieldstack.length () <= 1
5639 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5640 {
5641 vi = new_var_info (decl, name);
5642 vi->offset = 0;
5643 vi->may_have_pointers = true;
5644 vi->fullsize = TREE_INT_CST_LOW (declsize);
5645 vi->size = vi->fullsize;
5646 vi->is_full_var = true;
5647 fieldstack.release ();
5648 return vi;
5649 }
5650
5651 vi = new_var_info (decl, name);
5652 vi->fullsize = TREE_INT_CST_LOW (declsize);
5653 for (i = 0, newvi = vi;
5654 fieldstack.iterate (i, &fo);
5655 ++i, newvi = vi_next (newvi))
5656 {
5657 const char *newname = "NULL";
5658 char *tempname;
5659
5660 if (dump_file)
5661 {
5662 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5663 "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
5664 newname = ggc_strdup (tempname);
5665 free (tempname);
5666 }
5667 newvi->name = newname;
5668 newvi->offset = fo->offset;
5669 newvi->size = fo->size;
5670 newvi->fullsize = vi->fullsize;
5671 newvi->may_have_pointers = fo->may_have_pointers;
5672 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5673 if (i + 1 < fieldstack.length ())
5674 {
5675 varinfo_t tem = new_var_info (decl, name);
5676 newvi->next = tem->id;
5677 tem->head = vi->id;
5678 }
5679 }
5680
5681 fieldstack.release ();
5682
5683 return vi;
5684 }
5685
5686 static unsigned int
5687 create_variable_info_for (tree decl, const char *name)
5688 {
5689 varinfo_t vi = create_variable_info_for_1 (decl, name);
5690 unsigned int id = vi->id;
5691
5692 insert_vi_for_tree (decl, vi);
5693
5694 if (TREE_CODE (decl) != VAR_DECL)
5695 return id;
5696
5697 /* Create initial constraints for globals. */
5698 for (; vi; vi = vi_next (vi))
5699 {
5700 if (!vi->may_have_pointers
5701 || !vi->is_global_var)
5702 continue;
5703
5704 /* Mark global restrict qualified pointers. */
5705 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5706 && TYPE_RESTRICT (TREE_TYPE (decl)))
5707 || vi->only_restrict_pointers)
5708 {
5709 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5710 continue;
5711 }
5712
5713 /* In non-IPA mode the initializer from nonlocal is all we need. */
5714 if (!in_ipa_mode
5715 || DECL_HARD_REGISTER (decl))
5716 make_copy_constraint (vi, nonlocal_id);
5717
5718 /* In IPA mode parse the initializer and generate proper constraints
5719 for it. */
5720 else
5721 {
5722 struct varpool_node *vnode = varpool_get_node (decl);
5723
5724 /* For escaped variables initialize them from nonlocal. */
5725 if (!varpool_all_refs_explicit_p (vnode))
5726 make_copy_constraint (vi, nonlocal_id);
5727
5728 /* If this is a global variable with an initializer and we are in
5729 IPA mode generate constraints for it. */
5730 if (DECL_INITIAL (decl)
5731 && vnode->analyzed)
5732 {
5733 vec<ce_s> rhsc = vNULL;
5734 struct constraint_expr lhs, *rhsp;
5735 unsigned i;
5736 get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
5737 lhs.var = vi->id;
5738 lhs.offset = 0;
5739 lhs.type = SCALAR;
5740 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5741 process_constraint (new_constraint (lhs, *rhsp));
5742 /* If this is a variable that escapes from the unit
5743 the initializer escapes as well. */
5744 if (!varpool_all_refs_explicit_p (vnode))
5745 {
5746 lhs.var = escaped_id;
5747 lhs.offset = 0;
5748 lhs.type = SCALAR;
5749 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5750 process_constraint (new_constraint (lhs, *rhsp));
5751 }
5752 rhsc.release ();
5753 }
5754 }
5755 }
5756
5757 return id;
5758 }
5759
5760 /* Print out the points-to solution for VAR to FILE. */
5761
5762 static void
5763 dump_solution_for_var (FILE *file, unsigned int var)
5764 {
5765 varinfo_t vi = get_varinfo (var);
5766 unsigned int i;
5767 bitmap_iterator bi;
5768
5769 /* Dump the solution for unified vars anyway, this avoids difficulties
5770 in scanning dumps in the testsuite. */
5771 fprintf (file, "%s = { ", vi->name);
5772 vi = get_varinfo (find (var));
5773 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5774 fprintf (file, "%s ", get_varinfo (i)->name);
5775 fprintf (file, "}");
5776
5777 /* But note when the variable was unified. */
5778 if (vi->id != var)
5779 fprintf (file, " same as %s", vi->name);
5780
5781 fprintf (file, "\n");
5782 }
5783
5784 /* Print the points-to solution for VAR to stdout. */
5785
5786 DEBUG_FUNCTION void
5787 debug_solution_for_var (unsigned int var)
5788 {
5789 dump_solution_for_var (stdout, var);
5790 }
5791
5792 /* Create varinfo structures for all of the variables in the
5793 function for intraprocedural mode. */
5794
5795 static void
5796 intra_create_variable_infos (void)
5797 {
5798 tree t;
5799
5800 /* For each incoming pointer argument arg, create the constraint ARG
5801 = NONLOCAL or a dummy variable if it is a restrict qualified
5802 passed-by-reference argument. */
5803 for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
5804 {
5805 varinfo_t p = get_vi_for_tree (t);
5806
5807 /* For restrict qualified pointers to objects passed by
5808 reference build a real representative for the pointed-to object.
5809 Treat restrict qualified references the same. */
5810 if (TYPE_RESTRICT (TREE_TYPE (t))
5811 && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
5812 || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
5813 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
5814 {
5815 struct constraint_expr lhsc, rhsc;
5816 varinfo_t vi;
5817 tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
5818 DECL_EXTERNAL (heapvar) = 1;
5819 vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
5820 insert_vi_for_tree (heapvar, vi);
5821 lhsc.var = p->id;
5822 lhsc.type = SCALAR;
5823 lhsc.offset = 0;
5824 rhsc.var = vi->id;
5825 rhsc.type = ADDRESSOF;
5826 rhsc.offset = 0;
5827 process_constraint (new_constraint (lhsc, rhsc));
5828 for (; vi; vi = vi_next (vi))
5829 if (vi->may_have_pointers)
5830 {
5831 if (vi->only_restrict_pointers)
5832 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5833 else
5834 make_copy_constraint (vi, nonlocal_id);
5835 }
5836 continue;
5837 }
5838
5839 if (POINTER_TYPE_P (TREE_TYPE (t))
5840 && TYPE_RESTRICT (TREE_TYPE (t)))
5841 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5842 else
5843 {
5844 for (; p; p = vi_next (p))
5845 {
5846 if (p->only_restrict_pointers)
5847 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5848 else if (p->may_have_pointers)
5849 make_constraint_from (p, nonlocal_id);
5850 }
5851 }
5852 }
5853
5854 /* Add a constraint for a result decl that is passed by reference. */
5855 if (DECL_RESULT (cfun->decl)
5856 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5857 {
5858 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5859
5860 for (p = result_vi; p; p = vi_next (p))
5861 make_constraint_from (p, nonlocal_id);
5862 }
5863
5864 /* Add a constraint for the incoming static chain parameter. */
5865 if (cfun->static_chain_decl != NULL_TREE)
5866 {
5867 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5868
5869 for (p = chain_vi; p; p = vi_next (p))
5870 make_constraint_from (p, nonlocal_id);
5871 }
5872 }
5873
5874 /* Structure used to put solution bitmaps in a hashtable so they can
5875 be shared among variables with the same points-to set. */
5876
5877 typedef struct shared_bitmap_info
5878 {
5879 bitmap pt_vars;
5880 hashval_t hashcode;
5881 } *shared_bitmap_info_t;
5882 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5883
5884 static htab_t shared_bitmap_table;
5885
5886 /* Hash function for a shared_bitmap_info_t */
5887
5888 static hashval_t
5889 shared_bitmap_hash (const void *p)
5890 {
5891 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
5892 return bi->hashcode;
5893 }
5894
5895 /* Equality function for two shared_bitmap_info_t's. */
5896
5897 static int
5898 shared_bitmap_eq (const void *p1, const void *p2)
5899 {
5900 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
5901 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
5902 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5903 }
5904
5905 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5906 existing instance if there is one, NULL otherwise. */
5907
5908 static bitmap
5909 shared_bitmap_lookup (bitmap pt_vars)
5910 {
5911 void **slot;
5912 struct shared_bitmap_info sbi;
5913
5914 sbi.pt_vars = pt_vars;
5915 sbi.hashcode = bitmap_hash (pt_vars);
5916
5917 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
5918 sbi.hashcode, NO_INSERT);
5919 if (!slot)
5920 return NULL;
5921 else
5922 return ((shared_bitmap_info_t) *slot)->pt_vars;
5923 }
5924
5925
5926 /* Add a bitmap to the shared bitmap hashtable. */
5927
5928 static void
5929 shared_bitmap_add (bitmap pt_vars)
5930 {
5931 void **slot;
5932 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5933
5934 sbi->pt_vars = pt_vars;
5935 sbi->hashcode = bitmap_hash (pt_vars);
5936
5937 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
5938 sbi->hashcode, INSERT);
5939 gcc_assert (!*slot);
5940 *slot = (void *) sbi;
5941 }
5942
5943
5944 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5945
5946 static void
5947 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5948 {
5949 unsigned int i;
5950 bitmap_iterator bi;
5951
5952 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
5953 {
5954 varinfo_t vi = get_varinfo (i);
5955
5956 /* The only artificial variables that are allowed in a may-alias
5957 set are heap variables. */
5958 if (vi->is_artificial_var && !vi->is_heap_var)
5959 continue;
5960
5961 if (TREE_CODE (vi->decl) == VAR_DECL
5962 || TREE_CODE (vi->decl) == PARM_DECL
5963 || TREE_CODE (vi->decl) == RESULT_DECL)
5964 {
5965 /* If we are in IPA mode we will not recompute points-to
5966 sets after inlining so make sure they stay valid. */
5967 if (in_ipa_mode
5968 && !DECL_PT_UID_SET_P (vi->decl))
5969 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
5970
5971 /* Add the decl to the points-to set. Note that the points-to
5972 set contains global variables. */
5973 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
5974 if (vi->is_global_var)
5975 pt->vars_contains_global = true;
5976 }
5977 }
5978 }
5979
5980
5981 /* Compute the points-to solution *PT for the variable VI. */
5982
5983 static struct pt_solution
5984 find_what_var_points_to (varinfo_t orig_vi)
5985 {
5986 unsigned int i;
5987 bitmap_iterator bi;
5988 bitmap finished_solution;
5989 bitmap result;
5990 varinfo_t vi;
5991 void **slot;
5992 struct pt_solution *pt;
5993
5994 /* This variable may have been collapsed, let's get the real
5995 variable. */
5996 vi = get_varinfo (find (orig_vi->id));
5997
5998 /* See if we have already computed the solution and return it. */
5999 slot = pointer_map_insert (final_solutions, vi);
6000 if (*slot != NULL)
6001 return *(struct pt_solution *)*slot;
6002
6003 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6004 memset (pt, 0, sizeof (struct pt_solution));
6005
6006 /* Translate artificial variables into SSA_NAME_PTR_INFO
6007 attributes. */
6008 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6009 {
6010 varinfo_t vi = get_varinfo (i);
6011
6012 if (vi->is_artificial_var)
6013 {
6014 if (vi->id == nothing_id)
6015 pt->null = 1;
6016 else if (vi->id == escaped_id)
6017 {
6018 if (in_ipa_mode)
6019 pt->ipa_escaped = 1;
6020 else
6021 pt->escaped = 1;
6022 }
6023 else if (vi->id == nonlocal_id)
6024 pt->nonlocal = 1;
6025 else if (vi->is_heap_var)
6026 /* We represent heapvars in the points-to set properly. */
6027 ;
6028 else if (vi->id == readonly_id)
6029 /* Nobody cares. */
6030 ;
6031 else if (vi->id == anything_id
6032 || vi->id == integer_id)
6033 pt->anything = 1;
6034 }
6035 }
6036
6037 /* Instead of doing extra work, simply do not create
6038 elaborate points-to information for pt_anything pointers. */
6039 if (pt->anything)
6040 return *pt;
6041
6042 /* Share the final set of variables when possible. */
6043 finished_solution = BITMAP_GGC_ALLOC ();
6044 stats.points_to_sets_created++;
6045
6046 set_uids_in_ptset (finished_solution, vi->solution, pt);
6047 result = shared_bitmap_lookup (finished_solution);
6048 if (!result)
6049 {
6050 shared_bitmap_add (finished_solution);
6051 pt->vars = finished_solution;
6052 }
6053 else
6054 {
6055 pt->vars = result;
6056 bitmap_clear (finished_solution);
6057 }
6058
6059 return *pt;
6060 }
6061
6062 /* Given a pointer variable P, fill in its points-to set. */
6063
6064 static void
6065 find_what_p_points_to (tree p)
6066 {
6067 struct ptr_info_def *pi;
6068 tree lookup_p = p;
6069 varinfo_t vi;
6070
6071 /* For parameters, get at the points-to set for the actual parm
6072 decl. */
6073 if (TREE_CODE (p) == SSA_NAME
6074 && SSA_NAME_IS_DEFAULT_DEF (p)
6075 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6076 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6077 lookup_p = SSA_NAME_VAR (p);
6078
6079 vi = lookup_vi_for_tree (lookup_p);
6080 if (!vi)
6081 return;
6082
6083 pi = get_ptr_info (p);
6084 pi->pt = find_what_var_points_to (vi);
6085 }
6086
6087
6088 /* Query statistics for points-to solutions. */
6089
6090 static struct {
6091 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6092 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6093 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6094 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6095 } pta_stats;
6096
6097 void
6098 dump_pta_stats (FILE *s)
6099 {
6100 fprintf (s, "\nPTA query stats:\n");
6101 fprintf (s, " pt_solution_includes: "
6102 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6103 HOST_WIDE_INT_PRINT_DEC" queries\n",
6104 pta_stats.pt_solution_includes_no_alias,
6105 pta_stats.pt_solution_includes_no_alias
6106 + pta_stats.pt_solution_includes_may_alias);
6107 fprintf (s, " pt_solutions_intersect: "
6108 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6109 HOST_WIDE_INT_PRINT_DEC" queries\n",
6110 pta_stats.pt_solutions_intersect_no_alias,
6111 pta_stats.pt_solutions_intersect_no_alias
6112 + pta_stats.pt_solutions_intersect_may_alias);
6113 }
6114
6115
6116 /* Reset the points-to solution *PT to a conservative default
6117 (point to anything). */
6118
6119 void
6120 pt_solution_reset (struct pt_solution *pt)
6121 {
6122 memset (pt, 0, sizeof (struct pt_solution));
6123 pt->anything = true;
6124 }
6125
6126 /* Set the points-to solution *PT to point only to the variables
6127 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6128 global variables and VARS_CONTAINS_RESTRICT specifies whether
6129 it contains restrict tag variables. */
6130
6131 void
6132 pt_solution_set (struct pt_solution *pt, bitmap vars, bool vars_contains_global)
6133 {
6134 memset (pt, 0, sizeof (struct pt_solution));
6135 pt->vars = vars;
6136 pt->vars_contains_global = vars_contains_global;
6137 }
6138
6139 /* Set the points-to solution *PT to point only to the variable VAR. */
6140
6141 void
6142 pt_solution_set_var (struct pt_solution *pt, tree var)
6143 {
6144 memset (pt, 0, sizeof (struct pt_solution));
6145 pt->vars = BITMAP_GGC_ALLOC ();
6146 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6147 pt->vars_contains_global = is_global_var (var);
6148 }
6149
6150 /* Computes the union of the points-to solutions *DEST and *SRC and
6151 stores the result in *DEST. This changes the points-to bitmap
6152 of *DEST and thus may not be used if that might be shared.
6153 The points-to bitmap of *SRC and *DEST will not be shared after
6154 this function if they were not before. */
6155
6156 static void
6157 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6158 {
6159 dest->anything |= src->anything;
6160 if (dest->anything)
6161 {
6162 pt_solution_reset (dest);
6163 return;
6164 }
6165
6166 dest->nonlocal |= src->nonlocal;
6167 dest->escaped |= src->escaped;
6168 dest->ipa_escaped |= src->ipa_escaped;
6169 dest->null |= src->null;
6170 dest->vars_contains_global |= src->vars_contains_global;
6171 if (!src->vars)
6172 return;
6173
6174 if (!dest->vars)
6175 dest->vars = BITMAP_GGC_ALLOC ();
6176 bitmap_ior_into (dest->vars, src->vars);
6177 }
6178
6179 /* Return true if the points-to solution *PT is empty. */
6180
6181 bool
6182 pt_solution_empty_p (struct pt_solution *pt)
6183 {
6184 if (pt->anything
6185 || pt->nonlocal)
6186 return false;
6187
6188 if (pt->vars
6189 && !bitmap_empty_p (pt->vars))
6190 return false;
6191
6192 /* If the solution includes ESCAPED, check if that is empty. */
6193 if (pt->escaped
6194 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6195 return false;
6196
6197 /* If the solution includes ESCAPED, check if that is empty. */
6198 if (pt->ipa_escaped
6199 && !pt_solution_empty_p (&ipa_escaped_pt))
6200 return false;
6201
6202 return true;
6203 }
6204
6205 /* Return true if the points-to solution *PT only point to a single var, and
6206 return the var uid in *UID. */
6207
6208 bool
6209 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6210 {
6211 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6212 || pt->null || pt->vars == NULL
6213 || !bitmap_single_bit_set_p (pt->vars))
6214 return false;
6215
6216 *uid = bitmap_first_set_bit (pt->vars);
6217 return true;
6218 }
6219
6220 /* Return true if the points-to solution *PT includes global memory. */
6221
6222 bool
6223 pt_solution_includes_global (struct pt_solution *pt)
6224 {
6225 if (pt->anything
6226 || pt->nonlocal
6227 || pt->vars_contains_global)
6228 return true;
6229
6230 if (pt->escaped)
6231 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6232
6233 if (pt->ipa_escaped)
6234 return pt_solution_includes_global (&ipa_escaped_pt);
6235
6236 /* ??? This predicate is not correct for the IPA-PTA solution
6237 as we do not properly distinguish between unit escape points
6238 and global variables. */
6239 if (cfun->gimple_df->ipa_pta)
6240 return true;
6241
6242 return false;
6243 }
6244
6245 /* Return true if the points-to solution *PT includes the variable
6246 declaration DECL. */
6247
6248 static bool
6249 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6250 {
6251 if (pt->anything)
6252 return true;
6253
6254 if (pt->nonlocal
6255 && is_global_var (decl))
6256 return true;
6257
6258 if (pt->vars
6259 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6260 return true;
6261
6262 /* If the solution includes ESCAPED, check it. */
6263 if (pt->escaped
6264 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6265 return true;
6266
6267 /* If the solution includes ESCAPED, check it. */
6268 if (pt->ipa_escaped
6269 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6270 return true;
6271
6272 return false;
6273 }
6274
6275 bool
6276 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6277 {
6278 bool res = pt_solution_includes_1 (pt, decl);
6279 if (res)
6280 ++pta_stats.pt_solution_includes_may_alias;
6281 else
6282 ++pta_stats.pt_solution_includes_no_alias;
6283 return res;
6284 }
6285
6286 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6287 intersection. */
6288
6289 static bool
6290 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6291 {
6292 if (pt1->anything || pt2->anything)
6293 return true;
6294
6295 /* If either points to unknown global memory and the other points to
6296 any global memory they alias. */
6297 if ((pt1->nonlocal
6298 && (pt2->nonlocal
6299 || pt2->vars_contains_global))
6300 || (pt2->nonlocal
6301 && pt1->vars_contains_global))
6302 return true;
6303
6304 /* Check the escaped solution if required. */
6305 if ((pt1->escaped || pt2->escaped)
6306 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6307 {
6308 /* If both point to escaped memory and that solution
6309 is not empty they alias. */
6310 if (pt1->escaped && pt2->escaped)
6311 return true;
6312
6313 /* If either points to escaped memory see if the escaped solution
6314 intersects with the other. */
6315 if ((pt1->escaped
6316 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
6317 || (pt2->escaped
6318 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
6319 return true;
6320 }
6321
6322 /* Check the escaped solution if required.
6323 ??? Do we need to check the local against the IPA escaped sets? */
6324 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6325 && !pt_solution_empty_p (&ipa_escaped_pt))
6326 {
6327 /* If both point to escaped memory and that solution
6328 is not empty they alias. */
6329 if (pt1->ipa_escaped && pt2->ipa_escaped)
6330 return true;
6331
6332 /* If either points to escaped memory see if the escaped solution
6333 intersects with the other. */
6334 if ((pt1->ipa_escaped
6335 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6336 || (pt2->ipa_escaped
6337 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6338 return true;
6339 }
6340
6341 /* Now both pointers alias if their points-to solution intersects. */
6342 return (pt1->vars
6343 && pt2->vars
6344 && bitmap_intersect_p (pt1->vars, pt2->vars));
6345 }
6346
6347 bool
6348 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6349 {
6350 bool res = pt_solutions_intersect_1 (pt1, pt2);
6351 if (res)
6352 ++pta_stats.pt_solutions_intersect_may_alias;
6353 else
6354 ++pta_stats.pt_solutions_intersect_no_alias;
6355 return res;
6356 }
6357
6358
6359 /* Dump points-to information to OUTFILE. */
6360
6361 static void
6362 dump_sa_points_to_info (FILE *outfile)
6363 {
6364 unsigned int i;
6365
6366 fprintf (outfile, "\nPoints-to sets\n\n");
6367
6368 if (dump_flags & TDF_STATS)
6369 {
6370 fprintf (outfile, "Stats:\n");
6371 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6372 fprintf (outfile, "Non-pointer vars: %d\n",
6373 stats.nonpointer_vars);
6374 fprintf (outfile, "Statically unified vars: %d\n",
6375 stats.unified_vars_static);
6376 fprintf (outfile, "Dynamically unified vars: %d\n",
6377 stats.unified_vars_dynamic);
6378 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6379 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6380 fprintf (outfile, "Number of implicit edges: %d\n",
6381 stats.num_implicit_edges);
6382 }
6383
6384 for (i = 1; i < varmap.length (); i++)
6385 {
6386 varinfo_t vi = get_varinfo (i);
6387 if (!vi->may_have_pointers)
6388 continue;
6389 dump_solution_for_var (outfile, i);
6390 }
6391 }
6392
6393
6394 /* Debug points-to information to stderr. */
6395
6396 DEBUG_FUNCTION void
6397 debug_sa_points_to_info (void)
6398 {
6399 dump_sa_points_to_info (stderr);
6400 }
6401
6402
6403 /* Initialize the always-existing constraint variables for NULL
6404 ANYTHING, READONLY, and INTEGER */
6405
6406 static void
6407 init_base_vars (void)
6408 {
6409 struct constraint_expr lhs, rhs;
6410 varinfo_t var_anything;
6411 varinfo_t var_nothing;
6412 varinfo_t var_readonly;
6413 varinfo_t var_escaped;
6414 varinfo_t var_nonlocal;
6415 varinfo_t var_storedanything;
6416 varinfo_t var_integer;
6417
6418 /* Variable ID zero is reserved and should be NULL. */
6419 varmap.safe_push (NULL);
6420
6421 /* Create the NULL variable, used to represent that a variable points
6422 to NULL. */
6423 var_nothing = new_var_info (NULL_TREE, "NULL");
6424 gcc_assert (var_nothing->id == nothing_id);
6425 var_nothing->is_artificial_var = 1;
6426 var_nothing->offset = 0;
6427 var_nothing->size = ~0;
6428 var_nothing->fullsize = ~0;
6429 var_nothing->is_special_var = 1;
6430 var_nothing->may_have_pointers = 0;
6431 var_nothing->is_global_var = 0;
6432
6433 /* Create the ANYTHING variable, used to represent that a variable
6434 points to some unknown piece of memory. */
6435 var_anything = new_var_info (NULL_TREE, "ANYTHING");
6436 gcc_assert (var_anything->id == anything_id);
6437 var_anything->is_artificial_var = 1;
6438 var_anything->size = ~0;
6439 var_anything->offset = 0;
6440 var_anything->fullsize = ~0;
6441 var_anything->is_special_var = 1;
6442
6443 /* Anything points to anything. This makes deref constraints just
6444 work in the presence of linked list and other p = *p type loops,
6445 by saying that *ANYTHING = ANYTHING. */
6446 lhs.type = SCALAR;
6447 lhs.var = anything_id;
6448 lhs.offset = 0;
6449 rhs.type = ADDRESSOF;
6450 rhs.var = anything_id;
6451 rhs.offset = 0;
6452
6453 /* This specifically does not use process_constraint because
6454 process_constraint ignores all anything = anything constraints, since all
6455 but this one are redundant. */
6456 constraints.safe_push (new_constraint (lhs, rhs));
6457
6458 /* Create the READONLY variable, used to represent that a variable
6459 points to readonly memory. */
6460 var_readonly = new_var_info (NULL_TREE, "READONLY");
6461 gcc_assert (var_readonly->id == readonly_id);
6462 var_readonly->is_artificial_var = 1;
6463 var_readonly->offset = 0;
6464 var_readonly->size = ~0;
6465 var_readonly->fullsize = ~0;
6466 var_readonly->is_special_var = 1;
6467
6468 /* readonly memory points to anything, in order to make deref
6469 easier. In reality, it points to anything the particular
6470 readonly variable can point to, but we don't track this
6471 separately. */
6472 lhs.type = SCALAR;
6473 lhs.var = readonly_id;
6474 lhs.offset = 0;
6475 rhs.type = ADDRESSOF;
6476 rhs.var = readonly_id; /* FIXME */
6477 rhs.offset = 0;
6478 process_constraint (new_constraint (lhs, rhs));
6479
6480 /* Create the ESCAPED variable, used to represent the set of escaped
6481 memory. */
6482 var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6483 gcc_assert (var_escaped->id == escaped_id);
6484 var_escaped->is_artificial_var = 1;
6485 var_escaped->offset = 0;
6486 var_escaped->size = ~0;
6487 var_escaped->fullsize = ~0;
6488 var_escaped->is_special_var = 0;
6489
6490 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6491 memory. */
6492 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6493 gcc_assert (var_nonlocal->id == nonlocal_id);
6494 var_nonlocal->is_artificial_var = 1;
6495 var_nonlocal->offset = 0;
6496 var_nonlocal->size = ~0;
6497 var_nonlocal->fullsize = ~0;
6498 var_nonlocal->is_special_var = 1;
6499
6500 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6501 lhs.type = SCALAR;
6502 lhs.var = escaped_id;
6503 lhs.offset = 0;
6504 rhs.type = DEREF;
6505 rhs.var = escaped_id;
6506 rhs.offset = 0;
6507 process_constraint (new_constraint (lhs, rhs));
6508
6509 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6510 whole variable escapes. */
6511 lhs.type = SCALAR;
6512 lhs.var = escaped_id;
6513 lhs.offset = 0;
6514 rhs.type = SCALAR;
6515 rhs.var = escaped_id;
6516 rhs.offset = UNKNOWN_OFFSET;
6517 process_constraint (new_constraint (lhs, rhs));
6518
6519 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6520 everything pointed to by escaped points to what global memory can
6521 point to. */
6522 lhs.type = DEREF;
6523 lhs.var = escaped_id;
6524 lhs.offset = 0;
6525 rhs.type = SCALAR;
6526 rhs.var = nonlocal_id;
6527 rhs.offset = 0;
6528 process_constraint (new_constraint (lhs, rhs));
6529
6530 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6531 global memory may point to global memory and escaped memory. */
6532 lhs.type = SCALAR;
6533 lhs.var = nonlocal_id;
6534 lhs.offset = 0;
6535 rhs.type = ADDRESSOF;
6536 rhs.var = nonlocal_id;
6537 rhs.offset = 0;
6538 process_constraint (new_constraint (lhs, rhs));
6539 rhs.type = ADDRESSOF;
6540 rhs.var = escaped_id;
6541 rhs.offset = 0;
6542 process_constraint (new_constraint (lhs, rhs));
6543
6544 /* Create the STOREDANYTHING variable, used to represent the set of
6545 variables stored to *ANYTHING. */
6546 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6547 gcc_assert (var_storedanything->id == storedanything_id);
6548 var_storedanything->is_artificial_var = 1;
6549 var_storedanything->offset = 0;
6550 var_storedanything->size = ~0;
6551 var_storedanything->fullsize = ~0;
6552 var_storedanything->is_special_var = 0;
6553
6554 /* Create the INTEGER variable, used to represent that a variable points
6555 to what an INTEGER "points to". */
6556 var_integer = new_var_info (NULL_TREE, "INTEGER");
6557 gcc_assert (var_integer->id == integer_id);
6558 var_integer->is_artificial_var = 1;
6559 var_integer->size = ~0;
6560 var_integer->fullsize = ~0;
6561 var_integer->offset = 0;
6562 var_integer->is_special_var = 1;
6563
6564 /* INTEGER = ANYTHING, because we don't know where a dereference of
6565 a random integer will point to. */
6566 lhs.type = SCALAR;
6567 lhs.var = integer_id;
6568 lhs.offset = 0;
6569 rhs.type = ADDRESSOF;
6570 rhs.var = anything_id;
6571 rhs.offset = 0;
6572 process_constraint (new_constraint (lhs, rhs));
6573 }
6574
6575 /* Initialize things necessary to perform PTA */
6576
6577 static void
6578 init_alias_vars (void)
6579 {
6580 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6581
6582 bitmap_obstack_initialize (&pta_obstack);
6583 bitmap_obstack_initialize (&oldpta_obstack);
6584 bitmap_obstack_initialize (&predbitmap_obstack);
6585
6586 constraint_pool = create_alloc_pool ("Constraint pool",
6587 sizeof (struct constraint), 30);
6588 variable_info_pool = create_alloc_pool ("Variable info pool",
6589 sizeof (struct variable_info), 30);
6590 constraints.create (8);
6591 varmap.create (8);
6592 vi_for_tree = pointer_map_create ();
6593 call_stmt_vars = pointer_map_create ();
6594
6595 memset (&stats, 0, sizeof (stats));
6596 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
6597 shared_bitmap_eq, free);
6598 init_base_vars ();
6599
6600 gcc_obstack_init (&fake_var_decl_obstack);
6601
6602 final_solutions = pointer_map_create ();
6603 gcc_obstack_init (&final_solutions_obstack);
6604 }
6605
6606 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6607 predecessor edges. */
6608
6609 static void
6610 remove_preds_and_fake_succs (constraint_graph_t graph)
6611 {
6612 unsigned int i;
6613
6614 /* Clear the implicit ref and address nodes from the successor
6615 lists. */
6616 for (i = 1; i < FIRST_REF_NODE; i++)
6617 {
6618 if (graph->succs[i])
6619 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6620 FIRST_REF_NODE * 2);
6621 }
6622
6623 /* Free the successor list for the non-ref nodes. */
6624 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
6625 {
6626 if (graph->succs[i])
6627 BITMAP_FREE (graph->succs[i]);
6628 }
6629
6630 /* Now reallocate the size of the successor list as, and blow away
6631 the predecessor bitmaps. */
6632 graph->size = varmap.length ();
6633 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6634
6635 free (graph->implicit_preds);
6636 graph->implicit_preds = NULL;
6637 free (graph->preds);
6638 graph->preds = NULL;
6639 bitmap_obstack_release (&predbitmap_obstack);
6640 }
6641
6642 /* Solve the constraint set. */
6643
6644 static void
6645 solve_constraints (void)
6646 {
6647 struct scc_info *si;
6648
6649 if (dump_file)
6650 fprintf (dump_file,
6651 "\nCollapsing static cycles and doing variable "
6652 "substitution\n");
6653
6654 init_graph (varmap.length () * 2);
6655
6656 if (dump_file)
6657 fprintf (dump_file, "Building predecessor graph\n");
6658 build_pred_graph ();
6659
6660 if (dump_file)
6661 fprintf (dump_file, "Detecting pointer and location "
6662 "equivalences\n");
6663 si = perform_var_substitution (graph);
6664
6665 if (dump_file)
6666 fprintf (dump_file, "Rewriting constraints and unifying "
6667 "variables\n");
6668 rewrite_constraints (graph, si);
6669
6670 build_succ_graph ();
6671
6672 free_var_substitution_info (si);
6673
6674 /* Attach complex constraints to graph nodes. */
6675 move_complex_constraints (graph);
6676
6677 if (dump_file)
6678 fprintf (dump_file, "Uniting pointer but not location equivalent "
6679 "variables\n");
6680 unite_pointer_equivalences (graph);
6681
6682 if (dump_file)
6683 fprintf (dump_file, "Finding indirect cycles\n");
6684 find_indirect_cycles (graph);
6685
6686 /* Implicit nodes and predecessors are no longer necessary at this
6687 point. */
6688 remove_preds_and_fake_succs (graph);
6689
6690 if (dump_file && (dump_flags & TDF_GRAPH))
6691 {
6692 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
6693 "in dot format:\n");
6694 dump_constraint_graph (dump_file);
6695 fprintf (dump_file, "\n\n");
6696 }
6697
6698 if (dump_file)
6699 fprintf (dump_file, "Solving graph\n");
6700
6701 solve_graph (graph);
6702
6703 if (dump_file && (dump_flags & TDF_GRAPH))
6704 {
6705 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
6706 "in dot format:\n");
6707 dump_constraint_graph (dump_file);
6708 fprintf (dump_file, "\n\n");
6709 }
6710
6711 if (dump_file)
6712 dump_sa_points_to_info (dump_file);
6713 }
6714
6715 /* Create points-to sets for the current function. See the comments
6716 at the start of the file for an algorithmic overview. */
6717
6718 static void
6719 compute_points_to_sets (void)
6720 {
6721 basic_block bb;
6722 unsigned i;
6723 varinfo_t vi;
6724
6725 timevar_push (TV_TREE_PTA);
6726
6727 init_alias_vars ();
6728
6729 intra_create_variable_infos ();
6730
6731 /* Now walk all statements and build the constraint set. */
6732 FOR_EACH_BB (bb)
6733 {
6734 gimple_stmt_iterator gsi;
6735
6736 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6737 {
6738 gimple phi = gsi_stmt (gsi);
6739
6740 if (! virtual_operand_p (gimple_phi_result (phi)))
6741 find_func_aliases (phi);
6742 }
6743
6744 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6745 {
6746 gimple stmt = gsi_stmt (gsi);
6747
6748 find_func_aliases (stmt);
6749 }
6750 }
6751
6752 if (dump_file)
6753 {
6754 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6755 dump_constraints (dump_file, 0);
6756 }
6757
6758 /* From the constraints compute the points-to sets. */
6759 solve_constraints ();
6760
6761 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6762 cfun->gimple_df->escaped = find_what_var_points_to (get_varinfo (escaped_id));
6763
6764 /* Make sure the ESCAPED solution (which is used as placeholder in
6765 other solutions) does not reference itself. This simplifies
6766 points-to solution queries. */
6767 cfun->gimple_df->escaped.escaped = 0;
6768
6769 /* Mark escaped HEAP variables as global. */
6770 FOR_EACH_VEC_ELT (varmap, i, vi)
6771 if (vi
6772 && vi->is_heap_var
6773 && !vi->is_global_var)
6774 DECL_EXTERNAL (vi->decl) = vi->is_global_var
6775 = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
6776
6777 /* Compute the points-to sets for pointer SSA_NAMEs. */
6778 for (i = 0; i < num_ssa_names; ++i)
6779 {
6780 tree ptr = ssa_name (i);
6781 if (ptr
6782 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6783 find_what_p_points_to (ptr);
6784 }
6785
6786 /* Compute the call-used/clobbered sets. */
6787 FOR_EACH_BB (bb)
6788 {
6789 gimple_stmt_iterator gsi;
6790
6791 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6792 {
6793 gimple stmt = gsi_stmt (gsi);
6794 struct pt_solution *pt;
6795 if (!is_gimple_call (stmt))
6796 continue;
6797
6798 pt = gimple_call_use_set (stmt);
6799 if (gimple_call_flags (stmt) & ECF_CONST)
6800 memset (pt, 0, sizeof (struct pt_solution));
6801 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6802 {
6803 *pt = find_what_var_points_to (vi);
6804 /* Escaped (and thus nonlocal) variables are always
6805 implicitly used by calls. */
6806 /* ??? ESCAPED can be empty even though NONLOCAL
6807 always escaped. */
6808 pt->nonlocal = 1;
6809 pt->escaped = 1;
6810 }
6811 else
6812 {
6813 /* If there is nothing special about this call then
6814 we have made everything that is used also escape. */
6815 *pt = cfun->gimple_df->escaped;
6816 pt->nonlocal = 1;
6817 }
6818
6819 pt = gimple_call_clobber_set (stmt);
6820 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6821 memset (pt, 0, sizeof (struct pt_solution));
6822 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6823 {
6824 *pt = find_what_var_points_to (vi);
6825 /* Escaped (and thus nonlocal) variables are always
6826 implicitly clobbered by calls. */
6827 /* ??? ESCAPED can be empty even though NONLOCAL
6828 always escaped. */
6829 pt->nonlocal = 1;
6830 pt->escaped = 1;
6831 }
6832 else
6833 {
6834 /* If there is nothing special about this call then
6835 we have made everything that is used also escape. */
6836 *pt = cfun->gimple_df->escaped;
6837 pt->nonlocal = 1;
6838 }
6839 }
6840 }
6841
6842 timevar_pop (TV_TREE_PTA);
6843 }
6844
6845
6846 /* Delete created points-to sets. */
6847
6848 static void
6849 delete_points_to_sets (void)
6850 {
6851 unsigned int i;
6852
6853 htab_delete (shared_bitmap_table);
6854 if (dump_file && (dump_flags & TDF_STATS))
6855 fprintf (dump_file, "Points to sets created:%d\n",
6856 stats.points_to_sets_created);
6857
6858 pointer_map_destroy (vi_for_tree);
6859 pointer_map_destroy (call_stmt_vars);
6860 bitmap_obstack_release (&pta_obstack);
6861 constraints.release ();
6862
6863 for (i = 0; i < graph->size; i++)
6864 graph->complex[i].release ();
6865 free (graph->complex);
6866
6867 free (graph->rep);
6868 free (graph->succs);
6869 free (graph->pe);
6870 free (graph->pe_rep);
6871 free (graph->indirect_cycles);
6872 free (graph);
6873
6874 varmap.release ();
6875 free_alloc_pool (variable_info_pool);
6876 free_alloc_pool (constraint_pool);
6877
6878 obstack_free (&fake_var_decl_obstack, NULL);
6879
6880 pointer_map_destroy (final_solutions);
6881 obstack_free (&final_solutions_obstack, NULL);
6882 }
6883
6884
6885 /* Compute points-to information for every SSA_NAME pointer in the
6886 current function and compute the transitive closure of escaped
6887 variables to re-initialize the call-clobber states of local variables. */
6888
6889 unsigned int
6890 compute_may_aliases (void)
6891 {
6892 if (cfun->gimple_df->ipa_pta)
6893 {
6894 if (dump_file)
6895 {
6896 fprintf (dump_file, "\nNot re-computing points-to information "
6897 "because IPA points-to information is available.\n\n");
6898
6899 /* But still dump what we have remaining it. */
6900 dump_alias_info (dump_file);
6901 }
6902
6903 return 0;
6904 }
6905
6906 /* For each pointer P_i, determine the sets of variables that P_i may
6907 point-to. Compute the reachability set of escaped and call-used
6908 variables. */
6909 compute_points_to_sets ();
6910
6911 /* Debugging dumps. */
6912 if (dump_file)
6913 dump_alias_info (dump_file);
6914
6915 /* Deallocate memory used by aliasing data structures and the internal
6916 points-to solution. */
6917 delete_points_to_sets ();
6918
6919 gcc_assert (!need_ssa_update_p (cfun));
6920
6921 return 0;
6922 }
6923
6924 static bool
6925 gate_tree_pta (void)
6926 {
6927 return flag_tree_pta;
6928 }
6929
6930 /* A dummy pass to cause points-to information to be computed via
6931 TODO_rebuild_alias. */
6932
6933 struct gimple_opt_pass pass_build_alias =
6934 {
6935 {
6936 GIMPLE_PASS,
6937 "alias", /* name */
6938 OPTGROUP_NONE, /* optinfo_flags */
6939 gate_tree_pta, /* gate */
6940 NULL, /* execute */
6941 NULL, /* sub */
6942 NULL, /* next */
6943 0, /* static_pass_number */
6944 TV_NONE, /* tv_id */
6945 PROP_cfg | PROP_ssa, /* properties_required */
6946 0, /* properties_provided */
6947 0, /* properties_destroyed */
6948 0, /* todo_flags_start */
6949 TODO_rebuild_alias /* todo_flags_finish */
6950 }
6951 };
6952
6953 /* A dummy pass to cause points-to information to be computed via
6954 TODO_rebuild_alias. */
6955
6956 struct gimple_opt_pass pass_build_ealias =
6957 {
6958 {
6959 GIMPLE_PASS,
6960 "ealias", /* name */
6961 OPTGROUP_NONE, /* optinfo_flags */
6962 gate_tree_pta, /* gate */
6963 NULL, /* execute */
6964 NULL, /* sub */
6965 NULL, /* next */
6966 0, /* static_pass_number */
6967 TV_NONE, /* tv_id */
6968 PROP_cfg | PROP_ssa, /* properties_required */
6969 0, /* properties_provided */
6970 0, /* properties_destroyed */
6971 0, /* todo_flags_start */
6972 TODO_rebuild_alias /* todo_flags_finish */
6973 }
6974 };
6975
6976
6977 /* Return true if we should execute IPA PTA. */
6978 static bool
6979 gate_ipa_pta (void)
6980 {
6981 return (optimize
6982 && flag_ipa_pta
6983 /* Don't bother doing anything if the program has errors. */
6984 && !seen_error ());
6985 }
6986
6987 /* IPA PTA solutions for ESCAPED. */
6988 struct pt_solution ipa_escaped_pt
6989 = { true, false, false, false, false, false, NULL };
6990
6991 /* Associate node with varinfo DATA. Worker for
6992 cgraph_for_node_and_aliases. */
6993 static bool
6994 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
6995 {
6996 if (node->alias || node->thunk.thunk_p)
6997 insert_vi_for_tree (node->symbol.decl, (varinfo_t)data);
6998 return false;
6999 }
7000
7001 /* Execute the driver for IPA PTA. */
7002 static unsigned int
7003 ipa_pta_execute (void)
7004 {
7005 struct cgraph_node *node;
7006 struct varpool_node *var;
7007 int from;
7008
7009 in_ipa_mode = 1;
7010
7011 init_alias_vars ();
7012
7013 if (dump_file && (dump_flags & TDF_DETAILS))
7014 {
7015 dump_symtab (dump_file);
7016 fprintf (dump_file, "\n");
7017 }
7018
7019 /* Build the constraints. */
7020 FOR_EACH_DEFINED_FUNCTION (node)
7021 {
7022 varinfo_t vi;
7023 /* Nodes without a body are not interesting. Especially do not
7024 visit clones at this point for now - we get duplicate decls
7025 there for inline clones at least. */
7026 if (!cgraph_function_with_gimple_body_p (node))
7027 continue;
7028
7029 gcc_assert (!node->clone_of);
7030
7031 vi = create_function_info_for (node->symbol.decl,
7032 alias_get_name (node->symbol.decl));
7033 cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
7034 }
7035
7036 /* Create constraints for global variables and their initializers. */
7037 FOR_EACH_VARIABLE (var)
7038 {
7039 if (var->alias)
7040 continue;
7041
7042 get_vi_for_tree (var->symbol.decl);
7043 }
7044
7045 if (dump_file)
7046 {
7047 fprintf (dump_file,
7048 "Generating constraints for global initializers\n\n");
7049 dump_constraints (dump_file, 0);
7050 fprintf (dump_file, "\n");
7051 }
7052 from = constraints.length ();
7053
7054 FOR_EACH_DEFINED_FUNCTION (node)
7055 {
7056 struct function *func;
7057 basic_block bb;
7058
7059 /* Nodes without a body are not interesting. */
7060 if (!cgraph_function_with_gimple_body_p (node))
7061 continue;
7062
7063 if (dump_file)
7064 {
7065 fprintf (dump_file,
7066 "Generating constraints for %s", cgraph_node_name (node));
7067 if (DECL_ASSEMBLER_NAME_SET_P (node->symbol.decl))
7068 fprintf (dump_file, " (%s)",
7069 IDENTIFIER_POINTER
7070 (DECL_ASSEMBLER_NAME (node->symbol.decl)));
7071 fprintf (dump_file, "\n");
7072 }
7073
7074 func = DECL_STRUCT_FUNCTION (node->symbol.decl);
7075 push_cfun (func);
7076
7077 /* For externally visible or attribute used annotated functions use
7078 local constraints for their arguments.
7079 For local functions we see all callers and thus do not need initial
7080 constraints for parameters. */
7081 if (node->symbol.used_from_other_partition
7082 || node->symbol.externally_visible
7083 || node->symbol.force_output)
7084 {
7085 intra_create_variable_infos ();
7086
7087 /* We also need to make function return values escape. Nothing
7088 escapes by returning from main though. */
7089 if (!MAIN_NAME_P (DECL_NAME (node->symbol.decl)))
7090 {
7091 varinfo_t fi, rvi;
7092 fi = lookup_vi_for_tree (node->symbol.decl);
7093 rvi = first_vi_for_offset (fi, fi_result);
7094 if (rvi && rvi->offset == fi_result)
7095 {
7096 struct constraint_expr includes;
7097 struct constraint_expr var;
7098 includes.var = escaped_id;
7099 includes.offset = 0;
7100 includes.type = SCALAR;
7101 var.var = rvi->id;
7102 var.offset = 0;
7103 var.type = SCALAR;
7104 process_constraint (new_constraint (includes, var));
7105 }
7106 }
7107 }
7108
7109 /* Build constriants for the function body. */
7110 FOR_EACH_BB_FN (bb, func)
7111 {
7112 gimple_stmt_iterator gsi;
7113
7114 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7115 gsi_next (&gsi))
7116 {
7117 gimple phi = gsi_stmt (gsi);
7118
7119 if (! virtual_operand_p (gimple_phi_result (phi)))
7120 find_func_aliases (phi);
7121 }
7122
7123 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7124 {
7125 gimple stmt = gsi_stmt (gsi);
7126
7127 find_func_aliases (stmt);
7128 find_func_clobbers (stmt);
7129 }
7130 }
7131
7132 pop_cfun ();
7133
7134 if (dump_file)
7135 {
7136 fprintf (dump_file, "\n");
7137 dump_constraints (dump_file, from);
7138 fprintf (dump_file, "\n");
7139 }
7140 from = constraints.length ();
7141 }
7142
7143 /* From the constraints compute the points-to sets. */
7144 solve_constraints ();
7145
7146 /* Compute the global points-to sets for ESCAPED.
7147 ??? Note that the computed escape set is not correct
7148 for the whole unit as we fail to consider graph edges to
7149 externally visible functions. */
7150 ipa_escaped_pt = find_what_var_points_to (get_varinfo (escaped_id));
7151
7152 /* Make sure the ESCAPED solution (which is used as placeholder in
7153 other solutions) does not reference itself. This simplifies
7154 points-to solution queries. */
7155 ipa_escaped_pt.ipa_escaped = 0;
7156
7157 /* Assign the points-to sets to the SSA names in the unit. */
7158 FOR_EACH_DEFINED_FUNCTION (node)
7159 {
7160 tree ptr;
7161 struct function *fn;
7162 unsigned i;
7163 varinfo_t fi;
7164 basic_block bb;
7165 struct pt_solution uses, clobbers;
7166 struct cgraph_edge *e;
7167
7168 /* Nodes without a body are not interesting. */
7169 if (!cgraph_function_with_gimple_body_p (node))
7170 continue;
7171
7172 fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
7173
7174 /* Compute the points-to sets for pointer SSA_NAMEs. */
7175 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7176 {
7177 if (ptr
7178 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7179 find_what_p_points_to (ptr);
7180 }
7181
7182 /* Compute the call-use and call-clobber sets for all direct calls. */
7183 fi = lookup_vi_for_tree (node->symbol.decl);
7184 gcc_assert (fi->is_fn_info);
7185 clobbers
7186 = find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers));
7187 uses = find_what_var_points_to (first_vi_for_offset (fi, fi_uses));
7188 for (e = node->callers; e; e = e->next_caller)
7189 {
7190 if (!e->call_stmt)
7191 continue;
7192
7193 *gimple_call_clobber_set (e->call_stmt) = clobbers;
7194 *gimple_call_use_set (e->call_stmt) = uses;
7195 }
7196
7197 /* Compute the call-use and call-clobber sets for indirect calls
7198 and calls to external functions. */
7199 FOR_EACH_BB_FN (bb, fn)
7200 {
7201 gimple_stmt_iterator gsi;
7202
7203 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7204 {
7205 gimple stmt = gsi_stmt (gsi);
7206 struct pt_solution *pt;
7207 varinfo_t vi;
7208 tree decl;
7209
7210 if (!is_gimple_call (stmt))
7211 continue;
7212
7213 /* Handle direct calls to external functions. */
7214 decl = gimple_call_fndecl (stmt);
7215 if (decl
7216 && (!(fi = lookup_vi_for_tree (decl))
7217 || !fi->is_fn_info))
7218 {
7219 pt = gimple_call_use_set (stmt);
7220 if (gimple_call_flags (stmt) & ECF_CONST)
7221 memset (pt, 0, sizeof (struct pt_solution));
7222 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7223 {
7224 *pt = find_what_var_points_to (vi);
7225 /* Escaped (and thus nonlocal) variables are always
7226 implicitly used by calls. */
7227 /* ??? ESCAPED can be empty even though NONLOCAL
7228 always escaped. */
7229 pt->nonlocal = 1;
7230 pt->ipa_escaped = 1;
7231 }
7232 else
7233 {
7234 /* If there is nothing special about this call then
7235 we have made everything that is used also escape. */
7236 *pt = ipa_escaped_pt;
7237 pt->nonlocal = 1;
7238 }
7239
7240 pt = gimple_call_clobber_set (stmt);
7241 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7242 memset (pt, 0, sizeof (struct pt_solution));
7243 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7244 {
7245 *pt = find_what_var_points_to (vi);
7246 /* Escaped (and thus nonlocal) variables are always
7247 implicitly clobbered by calls. */
7248 /* ??? ESCAPED can be empty even though NONLOCAL
7249 always escaped. */
7250 pt->nonlocal = 1;
7251 pt->ipa_escaped = 1;
7252 }
7253 else
7254 {
7255 /* If there is nothing special about this call then
7256 we have made everything that is used also escape. */
7257 *pt = ipa_escaped_pt;
7258 pt->nonlocal = 1;
7259 }
7260 }
7261
7262 /* Handle indirect calls. */
7263 if (!decl
7264 && (fi = get_fi_for_callee (stmt)))
7265 {
7266 /* We need to accumulate all clobbers/uses of all possible
7267 callees. */
7268 fi = get_varinfo (find (fi->id));
7269 /* If we cannot constrain the set of functions we'll end up
7270 calling we end up using/clobbering everything. */
7271 if (bitmap_bit_p (fi->solution, anything_id)
7272 || bitmap_bit_p (fi->solution, nonlocal_id)
7273 || bitmap_bit_p (fi->solution, escaped_id))
7274 {
7275 pt_solution_reset (gimple_call_clobber_set (stmt));
7276 pt_solution_reset (gimple_call_use_set (stmt));
7277 }
7278 else
7279 {
7280 bitmap_iterator bi;
7281 unsigned i;
7282 struct pt_solution *uses, *clobbers;
7283
7284 uses = gimple_call_use_set (stmt);
7285 clobbers = gimple_call_clobber_set (stmt);
7286 memset (uses, 0, sizeof (struct pt_solution));
7287 memset (clobbers, 0, sizeof (struct pt_solution));
7288 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7289 {
7290 struct pt_solution sol;
7291
7292 vi = get_varinfo (i);
7293 if (!vi->is_fn_info)
7294 {
7295 /* ??? We could be more precise here? */
7296 uses->nonlocal = 1;
7297 uses->ipa_escaped = 1;
7298 clobbers->nonlocal = 1;
7299 clobbers->ipa_escaped = 1;
7300 continue;
7301 }
7302
7303 if (!uses->anything)
7304 {
7305 sol = find_what_var_points_to
7306 (first_vi_for_offset (vi, fi_uses));
7307 pt_solution_ior_into (uses, &sol);
7308 }
7309 if (!clobbers->anything)
7310 {
7311 sol = find_what_var_points_to
7312 (first_vi_for_offset (vi, fi_clobbers));
7313 pt_solution_ior_into (clobbers, &sol);
7314 }
7315 }
7316 }
7317 }
7318 }
7319 }
7320
7321 fn->gimple_df->ipa_pta = true;
7322 }
7323
7324 delete_points_to_sets ();
7325
7326 in_ipa_mode = 0;
7327
7328 return 0;
7329 }
7330
7331 struct simple_ipa_opt_pass pass_ipa_pta =
7332 {
7333 {
7334 SIMPLE_IPA_PASS,
7335 "pta", /* name */
7336 OPTGROUP_NONE, /* optinfo_flags */
7337 gate_ipa_pta, /* gate */
7338 ipa_pta_execute, /* execute */
7339 NULL, /* sub */
7340 NULL, /* next */
7341 0, /* static_pass_number */
7342 TV_IPA_PTA, /* tv_id */
7343 0, /* properties_required */
7344 0, /* properties_provided */
7345 0, /* properties_destroyed */
7346 0, /* todo_flags_start */
7347 TODO_update_ssa /* todo_flags_finish */
7348 }
7349 };