re PR ipa/58492 (ICE: verify_flow_info failed)
[gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "sbitmap.h"
29 #include "flags.h"
30 #include "basic-block.h"
31 #include "tree.h"
32 #include "gimple.h"
33 #include "gimple-ssa.h"
34 #include "cgraph.h"
35 #include "tree-ssanames.h"
36 #include "tree-into-ssa.h"
37 #include "tree-dfa.h"
38 #include "tree-inline.h"
39 #include "diagnostic-core.h"
40 #include "hash-table.h"
41 #include "function.h"
42 #include "tree-pass.h"
43 #include "alloc-pool.h"
44 #include "splay-tree.h"
45 #include "params.h"
46 #include "alias.h"
47 #include "pointer-set.h"
48
49 /* The idea behind this analyzer is to generate set constraints from the
50 program, then solve the resulting constraints in order to generate the
51 points-to sets.
52
53 Set constraints are a way of modeling program analysis problems that
54 involve sets. They consist of an inclusion constraint language,
55 describing the variables (each variable is a set) and operations that
56 are involved on the variables, and a set of rules that derive facts
57 from these operations. To solve a system of set constraints, you derive
58 all possible facts under the rules, which gives you the correct sets
59 as a consequence.
60
61 See "Efficient Field-sensitive pointer analysis for C" by "David
62 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
63 http://citeseer.ist.psu.edu/pearce04efficient.html
64
65 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
66 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
67 http://citeseer.ist.psu.edu/heintze01ultrafast.html
68
69 There are three types of real constraint expressions, DEREF,
70 ADDRESSOF, and SCALAR. Each constraint expression consists
71 of a constraint type, a variable, and an offset.
72
73 SCALAR is a constraint expression type used to represent x, whether
74 it appears on the LHS or the RHS of a statement.
75 DEREF is a constraint expression type used to represent *x, whether
76 it appears on the LHS or the RHS of a statement.
77 ADDRESSOF is a constraint expression used to represent &x, whether
78 it appears on the LHS or the RHS of a statement.
79
80 Each pointer variable in the program is assigned an integer id, and
81 each field of a structure variable is assigned an integer id as well.
82
83 Structure variables are linked to their list of fields through a "next
84 field" in each variable that points to the next field in offset
85 order.
86 Each variable for a structure field has
87
88 1. "size", that tells the size in bits of that field.
89 2. "fullsize, that tells the size in bits of the entire structure.
90 3. "offset", that tells the offset in bits from the beginning of the
91 structure to this field.
92
93 Thus,
94 struct f
95 {
96 int a;
97 int b;
98 } foo;
99 int *bar;
100
101 looks like
102
103 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
104 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
105 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
106
107
108 In order to solve the system of set constraints, the following is
109 done:
110
111 1. Each constraint variable x has a solution set associated with it,
112 Sol(x).
113
114 2. Constraints are separated into direct, copy, and complex.
115 Direct constraints are ADDRESSOF constraints that require no extra
116 processing, such as P = &Q
117 Copy constraints are those of the form P = Q.
118 Complex constraints are all the constraints involving dereferences
119 and offsets (including offsetted copies).
120
121 3. All direct constraints of the form P = &Q are processed, such
122 that Q is added to Sol(P)
123
124 4. All complex constraints for a given constraint variable are stored in a
125 linked list attached to that variable's node.
126
127 5. A directed graph is built out of the copy constraints. Each
128 constraint variable is a node in the graph, and an edge from
129 Q to P is added for each copy constraint of the form P = Q
130
131 6. The graph is then walked, and solution sets are
132 propagated along the copy edges, such that an edge from Q to P
133 causes Sol(P) <- Sol(P) union Sol(Q).
134
135 7. As we visit each node, all complex constraints associated with
136 that node are processed by adding appropriate copy edges to the graph, or the
137 appropriate variables to the solution set.
138
139 8. The process of walking the graph is iterated until no solution
140 sets change.
141
142 Prior to walking the graph in steps 6 and 7, We perform static
143 cycle elimination on the constraint graph, as well
144 as off-line variable substitution.
145
146 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
147 on and turned into anything), but isn't. You can just see what offset
148 inside the pointed-to struct it's going to access.
149
150 TODO: Constant bounded arrays can be handled as if they were structs of the
151 same number of elements.
152
153 TODO: Modeling heap and incoming pointers becomes much better if we
154 add fields to them as we discover them, which we could do.
155
156 TODO: We could handle unions, but to be honest, it's probably not
157 worth the pain or slowdown. */
158
159 /* IPA-PTA optimizations possible.
160
161 When the indirect function called is ANYTHING we can add disambiguation
162 based on the function signatures (or simply the parameter count which
163 is the varinfo size). We also do not need to consider functions that
164 do not have their address taken.
165
166 The is_global_var bit which marks escape points is overly conservative
167 in IPA mode. Split it to is_escape_point and is_global_var - only
168 externally visible globals are escape points in IPA mode. This is
169 also needed to fix the pt_solution_includes_global predicate
170 (and thus ptr_deref_may_alias_global_p).
171
172 The way we introduce DECL_PT_UID to avoid fixing up all points-to
173 sets in the translation unit when we copy a DECL during inlining
174 pessimizes precision. The advantage is that the DECL_PT_UID keeps
175 compile-time and memory usage overhead low - the points-to sets
176 do not grow or get unshared as they would during a fixup phase.
177 An alternative solution is to delay IPA PTA until after all
178 inlining transformations have been applied.
179
180 The way we propagate clobber/use information isn't optimized.
181 It should use a new complex constraint that properly filters
182 out local variables of the callee (though that would make
183 the sets invalid after inlining). OTOH we might as well
184 admit defeat to WHOPR and simply do all the clobber/use analysis
185 and propagation after PTA finished but before we threw away
186 points-to information for memory variables. WHOPR and PTA
187 do not play along well anyway - the whole constraint solving
188 would need to be done in WPA phase and it will be very interesting
189 to apply the results to local SSA names during LTRANS phase.
190
191 We probably should compute a per-function unit-ESCAPE solution
192 propagating it simply like the clobber / uses solutions. The
193 solution can go alongside the non-IPA espaced solution and be
194 used to query which vars escape the unit through a function.
195
196 We never put function decls in points-to sets so we do not
197 keep the set of called functions for indirect calls.
198
199 And probably more. */
200
201 static bool use_field_sensitive = true;
202 static int in_ipa_mode = 0;
203
204 /* Used for predecessor bitmaps. */
205 static bitmap_obstack predbitmap_obstack;
206
207 /* Used for points-to sets. */
208 static bitmap_obstack pta_obstack;
209
210 /* Used for oldsolution members of variables. */
211 static bitmap_obstack oldpta_obstack;
212
213 /* Used for per-solver-iteration bitmaps. */
214 static bitmap_obstack iteration_obstack;
215
216 static unsigned int create_variable_info_for (tree, const char *);
217 typedef struct constraint_graph *constraint_graph_t;
218 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
219
220 struct constraint;
221 typedef struct constraint *constraint_t;
222
223
224 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
225 if (a) \
226 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
227
228 static struct constraint_stats
229 {
230 unsigned int total_vars;
231 unsigned int nonpointer_vars;
232 unsigned int unified_vars_static;
233 unsigned int unified_vars_dynamic;
234 unsigned int iterations;
235 unsigned int num_edges;
236 unsigned int num_implicit_edges;
237 unsigned int points_to_sets_created;
238 } stats;
239
240 struct variable_info
241 {
242 /* ID of this variable */
243 unsigned int id;
244
245 /* True if this is a variable created by the constraint analysis, such as
246 heap variables and constraints we had to break up. */
247 unsigned int is_artificial_var : 1;
248
249 /* True if this is a special variable whose solution set should not be
250 changed. */
251 unsigned int is_special_var : 1;
252
253 /* True for variables whose size is not known or variable. */
254 unsigned int is_unknown_size_var : 1;
255
256 /* True for (sub-)fields that represent a whole variable. */
257 unsigned int is_full_var : 1;
258
259 /* True if this is a heap variable. */
260 unsigned int is_heap_var : 1;
261
262 /* True if this field may contain pointers. */
263 unsigned int may_have_pointers : 1;
264
265 /* True if this field has only restrict qualified pointers. */
266 unsigned int only_restrict_pointers : 1;
267
268 /* True if this represents a global variable. */
269 unsigned int is_global_var : 1;
270
271 /* True if this represents a IPA function info. */
272 unsigned int is_fn_info : 1;
273
274 /* The ID of the variable for the next field in this structure
275 or zero for the last field in this structure. */
276 unsigned next;
277
278 /* The ID of the variable for the first field in this structure. */
279 unsigned head;
280
281 /* Offset of this variable, in bits, from the base variable */
282 unsigned HOST_WIDE_INT offset;
283
284 /* Size of the variable, in bits. */
285 unsigned HOST_WIDE_INT size;
286
287 /* Full size of the base variable, in bits. */
288 unsigned HOST_WIDE_INT fullsize;
289
290 /* Name of this variable */
291 const char *name;
292
293 /* Tree that this variable is associated with. */
294 tree decl;
295
296 /* Points-to set for this variable. */
297 bitmap solution;
298
299 /* Old points-to set for this variable. */
300 bitmap oldsolution;
301 };
302 typedef struct variable_info *varinfo_t;
303
304 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
305 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
306 unsigned HOST_WIDE_INT);
307 static varinfo_t lookup_vi_for_tree (tree);
308 static inline bool type_can_have_subvars (const_tree);
309
310 /* Pool of variable info structures. */
311 static alloc_pool variable_info_pool;
312
313 /* Map varinfo to final pt_solution. */
314 static pointer_map_t *final_solutions;
315 struct obstack final_solutions_obstack;
316
317 /* Table of variable info structures for constraint variables.
318 Indexed directly by variable info id. */
319 static vec<varinfo_t> varmap;
320
321 /* Return the varmap element N */
322
323 static inline varinfo_t
324 get_varinfo (unsigned int n)
325 {
326 return varmap[n];
327 }
328
329 /* Return the next variable in the list of sub-variables of VI
330 or NULL if VI is the last sub-variable. */
331
332 static inline varinfo_t
333 vi_next (varinfo_t vi)
334 {
335 return get_varinfo (vi->next);
336 }
337
338 /* Static IDs for the special variables. Variable ID zero is unused
339 and used as terminator for the sub-variable chain. */
340 enum { nothing_id = 1, anything_id = 2, readonly_id = 3,
341 escaped_id = 4, nonlocal_id = 5,
342 storedanything_id = 6, integer_id = 7 };
343
344 /* Return a new variable info structure consisting for a variable
345 named NAME, and using constraint graph node NODE. Append it
346 to the vector of variable info structures. */
347
348 static varinfo_t
349 new_var_info (tree t, const char *name)
350 {
351 unsigned index = varmap.length ();
352 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
353
354 ret->id = index;
355 ret->name = name;
356 ret->decl = t;
357 /* Vars without decl are artificial and do not have sub-variables. */
358 ret->is_artificial_var = (t == NULL_TREE);
359 ret->is_special_var = false;
360 ret->is_unknown_size_var = false;
361 ret->is_full_var = (t == NULL_TREE);
362 ret->is_heap_var = false;
363 ret->may_have_pointers = true;
364 ret->only_restrict_pointers = false;
365 ret->is_global_var = (t == NULL_TREE);
366 ret->is_fn_info = false;
367 if (t && DECL_P (t))
368 ret->is_global_var = (is_global_var (t)
369 /* We have to treat even local register variables
370 as escape points. */
371 || (TREE_CODE (t) == VAR_DECL
372 && DECL_HARD_REGISTER (t)));
373 ret->solution = BITMAP_ALLOC (&pta_obstack);
374 ret->oldsolution = NULL;
375 ret->next = 0;
376 ret->head = ret->id;
377
378 stats.total_vars++;
379
380 varmap.safe_push (ret);
381
382 return ret;
383 }
384
385
386 /* A map mapping call statements to per-stmt variables for uses
387 and clobbers specific to the call. */
388 static struct pointer_map_t *call_stmt_vars;
389
390 /* Lookup or create the variable for the call statement CALL. */
391
392 static varinfo_t
393 get_call_vi (gimple call)
394 {
395 void **slot_p;
396 varinfo_t vi, vi2;
397
398 slot_p = pointer_map_insert (call_stmt_vars, call);
399 if (*slot_p)
400 return (varinfo_t) *slot_p;
401
402 vi = new_var_info (NULL_TREE, "CALLUSED");
403 vi->offset = 0;
404 vi->size = 1;
405 vi->fullsize = 2;
406 vi->is_full_var = true;
407
408 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
409 vi2->offset = 1;
410 vi2->size = 1;
411 vi2->fullsize = 2;
412 vi2->is_full_var = true;
413
414 vi->next = vi2->id;
415
416 *slot_p = (void *) vi;
417 return vi;
418 }
419
420 /* Lookup the variable for the call statement CALL representing
421 the uses. Returns NULL if there is nothing special about this call. */
422
423 static varinfo_t
424 lookup_call_use_vi (gimple call)
425 {
426 void **slot_p;
427
428 slot_p = pointer_map_contains (call_stmt_vars, call);
429 if (slot_p)
430 return (varinfo_t) *slot_p;
431
432 return NULL;
433 }
434
435 /* Lookup the variable for the call statement CALL representing
436 the clobbers. Returns NULL if there is nothing special about this call. */
437
438 static varinfo_t
439 lookup_call_clobber_vi (gimple call)
440 {
441 varinfo_t uses = lookup_call_use_vi (call);
442 if (!uses)
443 return NULL;
444
445 return vi_next (uses);
446 }
447
448 /* Lookup or create the variable for the call statement CALL representing
449 the uses. */
450
451 static varinfo_t
452 get_call_use_vi (gimple call)
453 {
454 return get_call_vi (call);
455 }
456
457 /* Lookup or create the variable for the call statement CALL representing
458 the clobbers. */
459
460 static varinfo_t ATTRIBUTE_UNUSED
461 get_call_clobber_vi (gimple call)
462 {
463 return vi_next (get_call_vi (call));
464 }
465
466
467 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
468
469 /* An expression that appears in a constraint. */
470
471 struct constraint_expr
472 {
473 /* Constraint type. */
474 constraint_expr_type type;
475
476 /* Variable we are referring to in the constraint. */
477 unsigned int var;
478
479 /* Offset, in bits, of this constraint from the beginning of
480 variables it ends up referring to.
481
482 IOW, in a deref constraint, we would deref, get the result set,
483 then add OFFSET to each member. */
484 HOST_WIDE_INT offset;
485 };
486
487 /* Use 0x8000... as special unknown offset. */
488 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
489
490 typedef struct constraint_expr ce_s;
491 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
492 static void get_constraint_for (tree, vec<ce_s> *);
493 static void get_constraint_for_rhs (tree, vec<ce_s> *);
494 static void do_deref (vec<ce_s> *);
495
496 /* Our set constraints are made up of two constraint expressions, one
497 LHS, and one RHS.
498
499 As described in the introduction, our set constraints each represent an
500 operation between set valued variables.
501 */
502 struct constraint
503 {
504 struct constraint_expr lhs;
505 struct constraint_expr rhs;
506 };
507
508 /* List of constraints that we use to build the constraint graph from. */
509
510 static vec<constraint_t> constraints;
511 static alloc_pool constraint_pool;
512
513 /* The constraint graph is represented as an array of bitmaps
514 containing successor nodes. */
515
516 struct constraint_graph
517 {
518 /* Size of this graph, which may be different than the number of
519 nodes in the variable map. */
520 unsigned int size;
521
522 /* Explicit successors of each node. */
523 bitmap *succs;
524
525 /* Implicit predecessors of each node (Used for variable
526 substitution). */
527 bitmap *implicit_preds;
528
529 /* Explicit predecessors of each node (Used for variable substitution). */
530 bitmap *preds;
531
532 /* Indirect cycle representatives, or -1 if the node has no indirect
533 cycles. */
534 int *indirect_cycles;
535
536 /* Representative node for a node. rep[a] == a unless the node has
537 been unified. */
538 unsigned int *rep;
539
540 /* Equivalence class representative for a label. This is used for
541 variable substitution. */
542 int *eq_rep;
543
544 /* Pointer equivalence label for a node. All nodes with the same
545 pointer equivalence label can be unified together at some point
546 (either during constraint optimization or after the constraint
547 graph is built). */
548 unsigned int *pe;
549
550 /* Pointer equivalence representative for a label. This is used to
551 handle nodes that are pointer equivalent but not location
552 equivalent. We can unite these once the addressof constraints
553 are transformed into initial points-to sets. */
554 int *pe_rep;
555
556 /* Pointer equivalence label for each node, used during variable
557 substitution. */
558 unsigned int *pointer_label;
559
560 /* Location equivalence label for each node, used during location
561 equivalence finding. */
562 unsigned int *loc_label;
563
564 /* Pointed-by set for each node, used during location equivalence
565 finding. This is pointed-by rather than pointed-to, because it
566 is constructed using the predecessor graph. */
567 bitmap *pointed_by;
568
569 /* Points to sets for pointer equivalence. This is *not* the actual
570 points-to sets for nodes. */
571 bitmap *points_to;
572
573 /* Bitmap of nodes where the bit is set if the node is a direct
574 node. Used for variable substitution. */
575 sbitmap direct_nodes;
576
577 /* Bitmap of nodes where the bit is set if the node is address
578 taken. Used for variable substitution. */
579 bitmap address_taken;
580
581 /* Vector of complex constraints for each graph node. Complex
582 constraints are those involving dereferences or offsets that are
583 not 0. */
584 vec<constraint_t> *complex;
585 };
586
587 static constraint_graph_t graph;
588
589 /* During variable substitution and the offline version of indirect
590 cycle finding, we create nodes to represent dereferences and
591 address taken constraints. These represent where these start and
592 end. */
593 #define FIRST_REF_NODE (varmap).length ()
594 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
595
596 /* Return the representative node for NODE, if NODE has been unioned
597 with another NODE.
598 This function performs path compression along the way to finding
599 the representative. */
600
601 static unsigned int
602 find (unsigned int node)
603 {
604 gcc_checking_assert (node < graph->size);
605 if (graph->rep[node] != node)
606 return graph->rep[node] = find (graph->rep[node]);
607 return node;
608 }
609
610 /* Union the TO and FROM nodes to the TO nodes.
611 Note that at some point in the future, we may want to do
612 union-by-rank, in which case we are going to have to return the
613 node we unified to. */
614
615 static bool
616 unite (unsigned int to, unsigned int from)
617 {
618 gcc_checking_assert (to < graph->size && from < graph->size);
619 if (to != from && graph->rep[from] != to)
620 {
621 graph->rep[from] = to;
622 return true;
623 }
624 return false;
625 }
626
627 /* Create a new constraint consisting of LHS and RHS expressions. */
628
629 static constraint_t
630 new_constraint (const struct constraint_expr lhs,
631 const struct constraint_expr rhs)
632 {
633 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
634 ret->lhs = lhs;
635 ret->rhs = rhs;
636 return ret;
637 }
638
639 /* Print out constraint C to FILE. */
640
641 static void
642 dump_constraint (FILE *file, constraint_t c)
643 {
644 if (c->lhs.type == ADDRESSOF)
645 fprintf (file, "&");
646 else if (c->lhs.type == DEREF)
647 fprintf (file, "*");
648 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
649 if (c->lhs.offset == UNKNOWN_OFFSET)
650 fprintf (file, " + UNKNOWN");
651 else if (c->lhs.offset != 0)
652 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
653 fprintf (file, " = ");
654 if (c->rhs.type == ADDRESSOF)
655 fprintf (file, "&");
656 else if (c->rhs.type == DEREF)
657 fprintf (file, "*");
658 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
659 if (c->rhs.offset == UNKNOWN_OFFSET)
660 fprintf (file, " + UNKNOWN");
661 else if (c->rhs.offset != 0)
662 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
663 }
664
665
666 void debug_constraint (constraint_t);
667 void debug_constraints (void);
668 void debug_constraint_graph (void);
669 void debug_solution_for_var (unsigned int);
670 void debug_sa_points_to_info (void);
671
672 /* Print out constraint C to stderr. */
673
674 DEBUG_FUNCTION void
675 debug_constraint (constraint_t c)
676 {
677 dump_constraint (stderr, c);
678 fprintf (stderr, "\n");
679 }
680
681 /* Print out all constraints to FILE */
682
683 static void
684 dump_constraints (FILE *file, int from)
685 {
686 int i;
687 constraint_t c;
688 for (i = from; constraints.iterate (i, &c); i++)
689 if (c)
690 {
691 dump_constraint (file, c);
692 fprintf (file, "\n");
693 }
694 }
695
696 /* Print out all constraints to stderr. */
697
698 DEBUG_FUNCTION void
699 debug_constraints (void)
700 {
701 dump_constraints (stderr, 0);
702 }
703
704 /* Print the constraint graph in dot format. */
705
706 static void
707 dump_constraint_graph (FILE *file)
708 {
709 unsigned int i;
710
711 /* Only print the graph if it has already been initialized: */
712 if (!graph)
713 return;
714
715 /* Prints the header of the dot file: */
716 fprintf (file, "strict digraph {\n");
717 fprintf (file, " node [\n shape = box\n ]\n");
718 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
719 fprintf (file, "\n // List of nodes and complex constraints in "
720 "the constraint graph:\n");
721
722 /* The next lines print the nodes in the graph together with the
723 complex constraints attached to them. */
724 for (i = 1; i < graph->size; i++)
725 {
726 if (i == FIRST_REF_NODE)
727 continue;
728 if (find (i) != i)
729 continue;
730 if (i < FIRST_REF_NODE)
731 fprintf (file, "\"%s\"", get_varinfo (i)->name);
732 else
733 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
734 if (graph->complex[i].exists ())
735 {
736 unsigned j;
737 constraint_t c;
738 fprintf (file, " [label=\"\\N\\n");
739 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
740 {
741 dump_constraint (file, c);
742 fprintf (file, "\\l");
743 }
744 fprintf (file, "\"]");
745 }
746 fprintf (file, ";\n");
747 }
748
749 /* Go over the edges. */
750 fprintf (file, "\n // Edges in the constraint graph:\n");
751 for (i = 1; i < graph->size; i++)
752 {
753 unsigned j;
754 bitmap_iterator bi;
755 if (find (i) != i)
756 continue;
757 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
758 {
759 unsigned to = find (j);
760 if (i == to)
761 continue;
762 if (i < FIRST_REF_NODE)
763 fprintf (file, "\"%s\"", get_varinfo (i)->name);
764 else
765 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
766 fprintf (file, " -> ");
767 if (to < FIRST_REF_NODE)
768 fprintf (file, "\"%s\"", get_varinfo (to)->name);
769 else
770 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
771 fprintf (file, ";\n");
772 }
773 }
774
775 /* Prints the tail of the dot file. */
776 fprintf (file, "}\n");
777 }
778
779 /* Print out the constraint graph to stderr. */
780
781 DEBUG_FUNCTION void
782 debug_constraint_graph (void)
783 {
784 dump_constraint_graph (stderr);
785 }
786
787 /* SOLVER FUNCTIONS
788
789 The solver is a simple worklist solver, that works on the following
790 algorithm:
791
792 sbitmap changed_nodes = all zeroes;
793 changed_count = 0;
794 For each node that is not already collapsed:
795 changed_count++;
796 set bit in changed nodes
797
798 while (changed_count > 0)
799 {
800 compute topological ordering for constraint graph
801
802 find and collapse cycles in the constraint graph (updating
803 changed if necessary)
804
805 for each node (n) in the graph in topological order:
806 changed_count--;
807
808 Process each complex constraint associated with the node,
809 updating changed if necessary.
810
811 For each outgoing edge from n, propagate the solution from n to
812 the destination of the edge, updating changed as necessary.
813
814 } */
815
816 /* Return true if two constraint expressions A and B are equal. */
817
818 static bool
819 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
820 {
821 return a.type == b.type && a.var == b.var && a.offset == b.offset;
822 }
823
824 /* Return true if constraint expression A is less than constraint expression
825 B. This is just arbitrary, but consistent, in order to give them an
826 ordering. */
827
828 static bool
829 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
830 {
831 if (a.type == b.type)
832 {
833 if (a.var == b.var)
834 return a.offset < b.offset;
835 else
836 return a.var < b.var;
837 }
838 else
839 return a.type < b.type;
840 }
841
842 /* Return true if constraint A is less than constraint B. This is just
843 arbitrary, but consistent, in order to give them an ordering. */
844
845 static bool
846 constraint_less (const constraint_t &a, const constraint_t &b)
847 {
848 if (constraint_expr_less (a->lhs, b->lhs))
849 return true;
850 else if (constraint_expr_less (b->lhs, a->lhs))
851 return false;
852 else
853 return constraint_expr_less (a->rhs, b->rhs);
854 }
855
856 /* Return true if two constraints A and B are equal. */
857
858 static bool
859 constraint_equal (struct constraint a, struct constraint b)
860 {
861 return constraint_expr_equal (a.lhs, b.lhs)
862 && constraint_expr_equal (a.rhs, b.rhs);
863 }
864
865
866 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
867
868 static constraint_t
869 constraint_vec_find (vec<constraint_t> vec,
870 struct constraint lookfor)
871 {
872 unsigned int place;
873 constraint_t found;
874
875 if (!vec.exists ())
876 return NULL;
877
878 place = vec.lower_bound (&lookfor, constraint_less);
879 if (place >= vec.length ())
880 return NULL;
881 found = vec[place];
882 if (!constraint_equal (*found, lookfor))
883 return NULL;
884 return found;
885 }
886
887 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
888
889 static void
890 constraint_set_union (vec<constraint_t> *to,
891 vec<constraint_t> *from)
892 {
893 int i;
894 constraint_t c;
895
896 FOR_EACH_VEC_ELT (*from, i, c)
897 {
898 if (constraint_vec_find (*to, *c) == NULL)
899 {
900 unsigned int place = to->lower_bound (c, constraint_less);
901 to->safe_insert (place, c);
902 }
903 }
904 }
905
906 /* Expands the solution in SET to all sub-fields of variables included. */
907
908 static void
909 solution_set_expand (bitmap set)
910 {
911 bitmap_iterator bi;
912 unsigned j;
913
914 /* In a first pass expand to the head of the variables we need to
915 add all sub-fields off. This avoids quadratic behavior. */
916 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
917 {
918 varinfo_t v = get_varinfo (j);
919 if (v->is_artificial_var
920 || v->is_full_var)
921 continue;
922 bitmap_set_bit (set, v->head);
923 }
924
925 /* In the second pass now expand all head variables with subfields. */
926 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
927 {
928 varinfo_t v = get_varinfo (j);
929 if (v->is_artificial_var
930 || v->is_full_var
931 || v->head != j)
932 continue;
933 for (v = vi_next (v); v != NULL; v = vi_next (v))
934 bitmap_set_bit (set, v->id);
935 }
936 }
937
938 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
939 process. */
940
941 static bool
942 set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
943 {
944 bool changed = false;
945 bitmap_iterator bi;
946 unsigned int i;
947
948 /* If the solution of FROM contains anything it is good enough to transfer
949 this to TO. */
950 if (bitmap_bit_p (from, anything_id))
951 return bitmap_set_bit (to, anything_id);
952
953 /* For zero offset simply union the solution into the destination. */
954 if (inc == 0)
955 return bitmap_ior_into (to, from);
956
957 /* If the offset is unknown we have to expand the solution to
958 all subfields. */
959 if (inc == UNKNOWN_OFFSET)
960 {
961 bitmap tmp = BITMAP_ALLOC (&iteration_obstack);
962 bitmap_copy (tmp, from);
963 solution_set_expand (tmp);
964 changed |= bitmap_ior_into (to, tmp);
965 BITMAP_FREE (tmp);
966 return changed;
967 }
968
969 /* For non-zero offset union the offsetted solution into the destination. */
970 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
971 {
972 varinfo_t vi = get_varinfo (i);
973
974 /* If this is a variable with just one field just set its bit
975 in the result. */
976 if (vi->is_artificial_var
977 || vi->is_unknown_size_var
978 || vi->is_full_var)
979 changed |= bitmap_set_bit (to, i);
980 else
981 {
982 unsigned HOST_WIDE_INT fieldoffset = vi->offset + inc;
983
984 /* If the offset makes the pointer point to before the
985 variable use offset zero for the field lookup. */
986 if (inc < 0
987 && fieldoffset > vi->offset)
988 fieldoffset = 0;
989
990 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
991
992 changed |= bitmap_set_bit (to, vi->id);
993 /* If the result is not exactly at fieldoffset include the next
994 field as well. See get_constraint_for_ptr_offset for more
995 rationale. */
996 if (vi->offset != fieldoffset
997 && vi->next != 0)
998 changed |= bitmap_set_bit (to, vi->next);
999 }
1000 }
1001
1002 return changed;
1003 }
1004
1005 /* Insert constraint C into the list of complex constraints for graph
1006 node VAR. */
1007
1008 static void
1009 insert_into_complex (constraint_graph_t graph,
1010 unsigned int var, constraint_t c)
1011 {
1012 vec<constraint_t> complex = graph->complex[var];
1013 unsigned int place = complex.lower_bound (c, constraint_less);
1014
1015 /* Only insert constraints that do not already exist. */
1016 if (place >= complex.length ()
1017 || !constraint_equal (*c, *complex[place]))
1018 graph->complex[var].safe_insert (place, c);
1019 }
1020
1021
1022 /* Condense two variable nodes into a single variable node, by moving
1023 all associated info from SRC to TO. */
1024
1025 static void
1026 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1027 unsigned int from)
1028 {
1029 unsigned int i;
1030 constraint_t c;
1031
1032 gcc_checking_assert (find (from) == to);
1033
1034 /* Move all complex constraints from src node into to node */
1035 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1036 {
1037 /* In complex constraints for node src, we may have either
1038 a = *src, and *src = a, or an offseted constraint which are
1039 always added to the rhs node's constraints. */
1040
1041 if (c->rhs.type == DEREF)
1042 c->rhs.var = to;
1043 else if (c->lhs.type == DEREF)
1044 c->lhs.var = to;
1045 else
1046 c->rhs.var = to;
1047 }
1048 constraint_set_union (&graph->complex[to], &graph->complex[from]);
1049 graph->complex[from].release ();
1050 }
1051
1052
1053 /* Remove edges involving NODE from GRAPH. */
1054
1055 static void
1056 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1057 {
1058 if (graph->succs[node])
1059 BITMAP_FREE (graph->succs[node]);
1060 }
1061
1062 /* Merge GRAPH nodes FROM and TO into node TO. */
1063
1064 static void
1065 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1066 unsigned int from)
1067 {
1068 if (graph->indirect_cycles[from] != -1)
1069 {
1070 /* If we have indirect cycles with the from node, and we have
1071 none on the to node, the to node has indirect cycles from the
1072 from node now that they are unified.
1073 If indirect cycles exist on both, unify the nodes that they
1074 are in a cycle with, since we know they are in a cycle with
1075 each other. */
1076 if (graph->indirect_cycles[to] == -1)
1077 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1078 }
1079
1080 /* Merge all the successor edges. */
1081 if (graph->succs[from])
1082 {
1083 if (!graph->succs[to])
1084 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1085 bitmap_ior_into (graph->succs[to],
1086 graph->succs[from]);
1087 }
1088
1089 clear_edges_for_node (graph, from);
1090 }
1091
1092
1093 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1094 it doesn't exist in the graph already. */
1095
1096 static void
1097 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1098 unsigned int from)
1099 {
1100 if (to == from)
1101 return;
1102
1103 if (!graph->implicit_preds[to])
1104 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1105
1106 if (bitmap_set_bit (graph->implicit_preds[to], from))
1107 stats.num_implicit_edges++;
1108 }
1109
1110 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1111 it doesn't exist in the graph already.
1112 Return false if the edge already existed, true otherwise. */
1113
1114 static void
1115 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1116 unsigned int from)
1117 {
1118 if (!graph->preds[to])
1119 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1120 bitmap_set_bit (graph->preds[to], from);
1121 }
1122
1123 /* Add a graph edge to GRAPH, going from FROM to TO if
1124 it doesn't exist in the graph already.
1125 Return false if the edge already existed, true otherwise. */
1126
1127 static bool
1128 add_graph_edge (constraint_graph_t graph, unsigned int to,
1129 unsigned int from)
1130 {
1131 if (to == from)
1132 {
1133 return false;
1134 }
1135 else
1136 {
1137 bool r = false;
1138
1139 if (!graph->succs[from])
1140 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1141 if (bitmap_set_bit (graph->succs[from], to))
1142 {
1143 r = true;
1144 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1145 stats.num_edges++;
1146 }
1147 return r;
1148 }
1149 }
1150
1151
1152 /* Initialize the constraint graph structure to contain SIZE nodes. */
1153
1154 static void
1155 init_graph (unsigned int size)
1156 {
1157 unsigned int j;
1158
1159 graph = XCNEW (struct constraint_graph);
1160 graph->size = size;
1161 graph->succs = XCNEWVEC (bitmap, graph->size);
1162 graph->indirect_cycles = XNEWVEC (int, graph->size);
1163 graph->rep = XNEWVEC (unsigned int, graph->size);
1164 /* ??? Macros do not support template types with multiple arguments,
1165 so we use a typedef to work around it. */
1166 typedef vec<constraint_t> vec_constraint_t_heap;
1167 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1168 graph->pe = XCNEWVEC (unsigned int, graph->size);
1169 graph->pe_rep = XNEWVEC (int, graph->size);
1170
1171 for (j = 0; j < graph->size; j++)
1172 {
1173 graph->rep[j] = j;
1174 graph->pe_rep[j] = -1;
1175 graph->indirect_cycles[j] = -1;
1176 }
1177 }
1178
1179 /* Build the constraint graph, adding only predecessor edges right now. */
1180
1181 static void
1182 build_pred_graph (void)
1183 {
1184 int i;
1185 constraint_t c;
1186 unsigned int j;
1187
1188 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1189 graph->preds = XCNEWVEC (bitmap, graph->size);
1190 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1191 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1192 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1193 graph->points_to = XCNEWVEC (bitmap, graph->size);
1194 graph->eq_rep = XNEWVEC (int, graph->size);
1195 graph->direct_nodes = sbitmap_alloc (graph->size);
1196 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1197 bitmap_clear (graph->direct_nodes);
1198
1199 for (j = 1; j < FIRST_REF_NODE; j++)
1200 {
1201 if (!get_varinfo (j)->is_special_var)
1202 bitmap_set_bit (graph->direct_nodes, j);
1203 }
1204
1205 for (j = 0; j < graph->size; j++)
1206 graph->eq_rep[j] = -1;
1207
1208 for (j = 0; j < varmap.length (); j++)
1209 graph->indirect_cycles[j] = -1;
1210
1211 FOR_EACH_VEC_ELT (constraints, i, c)
1212 {
1213 struct constraint_expr lhs = c->lhs;
1214 struct constraint_expr rhs = c->rhs;
1215 unsigned int lhsvar = lhs.var;
1216 unsigned int rhsvar = rhs.var;
1217
1218 if (lhs.type == DEREF)
1219 {
1220 /* *x = y. */
1221 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1222 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1223 }
1224 else if (rhs.type == DEREF)
1225 {
1226 /* x = *y */
1227 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1228 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1229 else
1230 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1231 }
1232 else if (rhs.type == ADDRESSOF)
1233 {
1234 varinfo_t v;
1235
1236 /* x = &y */
1237 if (graph->points_to[lhsvar] == NULL)
1238 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1239 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1240
1241 if (graph->pointed_by[rhsvar] == NULL)
1242 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1243 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1244
1245 /* Implicitly, *x = y */
1246 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1247
1248 /* All related variables are no longer direct nodes. */
1249 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1250 v = get_varinfo (rhsvar);
1251 if (!v->is_full_var)
1252 {
1253 v = get_varinfo (v->head);
1254 do
1255 {
1256 bitmap_clear_bit (graph->direct_nodes, v->id);
1257 v = vi_next (v);
1258 }
1259 while (v != NULL);
1260 }
1261 bitmap_set_bit (graph->address_taken, rhsvar);
1262 }
1263 else if (lhsvar > anything_id
1264 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1265 {
1266 /* x = y */
1267 add_pred_graph_edge (graph, lhsvar, rhsvar);
1268 /* Implicitly, *x = *y */
1269 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1270 FIRST_REF_NODE + rhsvar);
1271 }
1272 else if (lhs.offset != 0 || rhs.offset != 0)
1273 {
1274 if (rhs.offset != 0)
1275 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1276 else if (lhs.offset != 0)
1277 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1278 }
1279 }
1280 }
1281
1282 /* Build the constraint graph, adding successor edges. */
1283
1284 static void
1285 build_succ_graph (void)
1286 {
1287 unsigned i, t;
1288 constraint_t c;
1289
1290 FOR_EACH_VEC_ELT (constraints, i, c)
1291 {
1292 struct constraint_expr lhs;
1293 struct constraint_expr rhs;
1294 unsigned int lhsvar;
1295 unsigned int rhsvar;
1296
1297 if (!c)
1298 continue;
1299
1300 lhs = c->lhs;
1301 rhs = c->rhs;
1302 lhsvar = find (lhs.var);
1303 rhsvar = find (rhs.var);
1304
1305 if (lhs.type == DEREF)
1306 {
1307 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1308 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1309 }
1310 else if (rhs.type == DEREF)
1311 {
1312 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1313 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1314 }
1315 else if (rhs.type == ADDRESSOF)
1316 {
1317 /* x = &y */
1318 gcc_checking_assert (find (rhs.var) == rhs.var);
1319 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1320 }
1321 else if (lhsvar > anything_id
1322 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1323 {
1324 add_graph_edge (graph, lhsvar, rhsvar);
1325 }
1326 }
1327
1328 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1329 receive pointers. */
1330 t = find (storedanything_id);
1331 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1332 {
1333 if (!bitmap_bit_p (graph->direct_nodes, i)
1334 && get_varinfo (i)->may_have_pointers)
1335 add_graph_edge (graph, find (i), t);
1336 }
1337
1338 /* Everything stored to ANYTHING also potentially escapes. */
1339 add_graph_edge (graph, find (escaped_id), t);
1340 }
1341
1342
1343 /* Changed variables on the last iteration. */
1344 static bitmap changed;
1345
1346 /* Strongly Connected Component visitation info. */
1347
1348 struct scc_info
1349 {
1350 sbitmap visited;
1351 sbitmap deleted;
1352 unsigned int *dfs;
1353 unsigned int *node_mapping;
1354 int current_index;
1355 vec<unsigned> scc_stack;
1356 };
1357
1358
1359 /* Recursive routine to find strongly connected components in GRAPH.
1360 SI is the SCC info to store the information in, and N is the id of current
1361 graph node we are processing.
1362
1363 This is Tarjan's strongly connected component finding algorithm, as
1364 modified by Nuutila to keep only non-root nodes on the stack.
1365 The algorithm can be found in "On finding the strongly connected
1366 connected components in a directed graph" by Esko Nuutila and Eljas
1367 Soisalon-Soininen, in Information Processing Letters volume 49,
1368 number 1, pages 9-14. */
1369
1370 static void
1371 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1372 {
1373 unsigned int i;
1374 bitmap_iterator bi;
1375 unsigned int my_dfs;
1376
1377 bitmap_set_bit (si->visited, n);
1378 si->dfs[n] = si->current_index ++;
1379 my_dfs = si->dfs[n];
1380
1381 /* Visit all the successors. */
1382 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1383 {
1384 unsigned int w;
1385
1386 if (i > LAST_REF_NODE)
1387 break;
1388
1389 w = find (i);
1390 if (bitmap_bit_p (si->deleted, w))
1391 continue;
1392
1393 if (!bitmap_bit_p (si->visited, w))
1394 scc_visit (graph, si, w);
1395
1396 unsigned int t = find (w);
1397 gcc_checking_assert (find (n) == n);
1398 if (si->dfs[t] < si->dfs[n])
1399 si->dfs[n] = si->dfs[t];
1400 }
1401
1402 /* See if any components have been identified. */
1403 if (si->dfs[n] == my_dfs)
1404 {
1405 if (si->scc_stack.length () > 0
1406 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1407 {
1408 bitmap scc = BITMAP_ALLOC (NULL);
1409 unsigned int lowest_node;
1410 bitmap_iterator bi;
1411
1412 bitmap_set_bit (scc, n);
1413
1414 while (si->scc_stack.length () != 0
1415 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1416 {
1417 unsigned int w = si->scc_stack.pop ();
1418
1419 bitmap_set_bit (scc, w);
1420 }
1421
1422 lowest_node = bitmap_first_set_bit (scc);
1423 gcc_assert (lowest_node < FIRST_REF_NODE);
1424
1425 /* Collapse the SCC nodes into a single node, and mark the
1426 indirect cycles. */
1427 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1428 {
1429 if (i < FIRST_REF_NODE)
1430 {
1431 if (unite (lowest_node, i))
1432 unify_nodes (graph, lowest_node, i, false);
1433 }
1434 else
1435 {
1436 unite (lowest_node, i);
1437 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1438 }
1439 }
1440 }
1441 bitmap_set_bit (si->deleted, n);
1442 }
1443 else
1444 si->scc_stack.safe_push (n);
1445 }
1446
1447 /* Unify node FROM into node TO, updating the changed count if
1448 necessary when UPDATE_CHANGED is true. */
1449
1450 static void
1451 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1452 bool update_changed)
1453 {
1454 gcc_checking_assert (to != from && find (to) == to);
1455
1456 if (dump_file && (dump_flags & TDF_DETAILS))
1457 fprintf (dump_file, "Unifying %s to %s\n",
1458 get_varinfo (from)->name,
1459 get_varinfo (to)->name);
1460
1461 if (update_changed)
1462 stats.unified_vars_dynamic++;
1463 else
1464 stats.unified_vars_static++;
1465
1466 merge_graph_nodes (graph, to, from);
1467 merge_node_constraints (graph, to, from);
1468
1469 /* Mark TO as changed if FROM was changed. If TO was already marked
1470 as changed, decrease the changed count. */
1471
1472 if (update_changed
1473 && bitmap_clear_bit (changed, from))
1474 bitmap_set_bit (changed, to);
1475 varinfo_t fromvi = get_varinfo (from);
1476 if (fromvi->solution)
1477 {
1478 /* If the solution changes because of the merging, we need to mark
1479 the variable as changed. */
1480 varinfo_t tovi = get_varinfo (to);
1481 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1482 {
1483 if (update_changed)
1484 bitmap_set_bit (changed, to);
1485 }
1486
1487 BITMAP_FREE (fromvi->solution);
1488 if (fromvi->oldsolution)
1489 BITMAP_FREE (fromvi->oldsolution);
1490
1491 if (stats.iterations > 0
1492 && tovi->oldsolution)
1493 BITMAP_FREE (tovi->oldsolution);
1494 }
1495 if (graph->succs[to])
1496 bitmap_clear_bit (graph->succs[to], to);
1497 }
1498
1499 /* Information needed to compute the topological ordering of a graph. */
1500
1501 struct topo_info
1502 {
1503 /* sbitmap of visited nodes. */
1504 sbitmap visited;
1505 /* Array that stores the topological order of the graph, *in
1506 reverse*. */
1507 vec<unsigned> topo_order;
1508 };
1509
1510
1511 /* Initialize and return a topological info structure. */
1512
1513 static struct topo_info *
1514 init_topo_info (void)
1515 {
1516 size_t size = graph->size;
1517 struct topo_info *ti = XNEW (struct topo_info);
1518 ti->visited = sbitmap_alloc (size);
1519 bitmap_clear (ti->visited);
1520 ti->topo_order.create (1);
1521 return ti;
1522 }
1523
1524
1525 /* Free the topological sort info pointed to by TI. */
1526
1527 static void
1528 free_topo_info (struct topo_info *ti)
1529 {
1530 sbitmap_free (ti->visited);
1531 ti->topo_order.release ();
1532 free (ti);
1533 }
1534
1535 /* Visit the graph in topological order, and store the order in the
1536 topo_info structure. */
1537
1538 static void
1539 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1540 unsigned int n)
1541 {
1542 bitmap_iterator bi;
1543 unsigned int j;
1544
1545 bitmap_set_bit (ti->visited, n);
1546
1547 if (graph->succs[n])
1548 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1549 {
1550 if (!bitmap_bit_p (ti->visited, j))
1551 topo_visit (graph, ti, j);
1552 }
1553
1554 ti->topo_order.safe_push (n);
1555 }
1556
1557 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1558 starting solution for y. */
1559
1560 static void
1561 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1562 bitmap delta)
1563 {
1564 unsigned int lhs = c->lhs.var;
1565 bool flag = false;
1566 bitmap sol = get_varinfo (lhs)->solution;
1567 unsigned int j;
1568 bitmap_iterator bi;
1569 HOST_WIDE_INT roffset = c->rhs.offset;
1570
1571 /* Our IL does not allow this. */
1572 gcc_checking_assert (c->lhs.offset == 0);
1573
1574 /* If the solution of Y contains anything it is good enough to transfer
1575 this to the LHS. */
1576 if (bitmap_bit_p (delta, anything_id))
1577 {
1578 flag |= bitmap_set_bit (sol, anything_id);
1579 goto done;
1580 }
1581
1582 /* If we do not know at with offset the rhs is dereferenced compute
1583 the reachability set of DELTA, conservatively assuming it is
1584 dereferenced at all valid offsets. */
1585 if (roffset == UNKNOWN_OFFSET)
1586 {
1587 solution_set_expand (delta);
1588 /* No further offset processing is necessary. */
1589 roffset = 0;
1590 }
1591
1592 /* For each variable j in delta (Sol(y)), add
1593 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1594 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1595 {
1596 varinfo_t v = get_varinfo (j);
1597 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1598 unsigned int t;
1599
1600 if (v->is_full_var)
1601 fieldoffset = v->offset;
1602 else if (roffset != 0)
1603 v = first_vi_for_offset (v, fieldoffset);
1604 /* If the access is outside of the variable we can ignore it. */
1605 if (!v)
1606 continue;
1607
1608 do
1609 {
1610 t = find (v->id);
1611
1612 /* Adding edges from the special vars is pointless.
1613 They don't have sets that can change. */
1614 if (get_varinfo (t)->is_special_var)
1615 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1616 /* Merging the solution from ESCAPED needlessly increases
1617 the set. Use ESCAPED as representative instead. */
1618 else if (v->id == escaped_id)
1619 flag |= bitmap_set_bit (sol, escaped_id);
1620 else if (v->may_have_pointers
1621 && add_graph_edge (graph, lhs, t))
1622 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1623
1624 /* If the variable is not exactly at the requested offset
1625 we have to include the next one. */
1626 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1627 || v->next == 0)
1628 break;
1629
1630 v = vi_next (v);
1631 fieldoffset = v->offset;
1632 }
1633 while (1);
1634 }
1635
1636 done:
1637 /* If the LHS solution changed, mark the var as changed. */
1638 if (flag)
1639 {
1640 get_varinfo (lhs)->solution = sol;
1641 bitmap_set_bit (changed, lhs);
1642 }
1643 }
1644
1645 /* Process a constraint C that represents *(x + off) = y using DELTA
1646 as the starting solution for x. */
1647
1648 static void
1649 do_ds_constraint (constraint_t c, bitmap delta)
1650 {
1651 unsigned int rhs = c->rhs.var;
1652 bitmap sol = get_varinfo (rhs)->solution;
1653 unsigned int j;
1654 bitmap_iterator bi;
1655 HOST_WIDE_INT loff = c->lhs.offset;
1656 bool escaped_p = false;
1657
1658 /* Our IL does not allow this. */
1659 gcc_checking_assert (c->rhs.offset == 0);
1660
1661 /* If the solution of y contains ANYTHING simply use the ANYTHING
1662 solution. This avoids needlessly increasing the points-to sets. */
1663 if (bitmap_bit_p (sol, anything_id))
1664 sol = get_varinfo (find (anything_id))->solution;
1665
1666 /* If the solution for x contains ANYTHING we have to merge the
1667 solution of y into all pointer variables which we do via
1668 STOREDANYTHING. */
1669 if (bitmap_bit_p (delta, anything_id))
1670 {
1671 unsigned t = find (storedanything_id);
1672 if (add_graph_edge (graph, t, rhs))
1673 {
1674 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1675 bitmap_set_bit (changed, t);
1676 }
1677 return;
1678 }
1679
1680 /* If we do not know at with offset the rhs is dereferenced compute
1681 the reachability set of DELTA, conservatively assuming it is
1682 dereferenced at all valid offsets. */
1683 if (loff == UNKNOWN_OFFSET)
1684 {
1685 solution_set_expand (delta);
1686 loff = 0;
1687 }
1688
1689 /* For each member j of delta (Sol(x)), add an edge from y to j and
1690 union Sol(y) into Sol(j) */
1691 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1692 {
1693 varinfo_t v = get_varinfo (j);
1694 unsigned int t;
1695 HOST_WIDE_INT fieldoffset = v->offset + loff;
1696
1697 if (v->is_full_var)
1698 fieldoffset = v->offset;
1699 else if (loff != 0)
1700 v = first_vi_for_offset (v, fieldoffset);
1701 /* If the access is outside of the variable we can ignore it. */
1702 if (!v)
1703 continue;
1704
1705 do
1706 {
1707 if (v->may_have_pointers)
1708 {
1709 /* If v is a global variable then this is an escape point. */
1710 if (v->is_global_var
1711 && !escaped_p)
1712 {
1713 t = find (escaped_id);
1714 if (add_graph_edge (graph, t, rhs)
1715 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1716 bitmap_set_bit (changed, t);
1717 /* Enough to let rhs escape once. */
1718 escaped_p = true;
1719 }
1720
1721 if (v->is_special_var)
1722 break;
1723
1724 t = find (v->id);
1725 if (add_graph_edge (graph, t, rhs)
1726 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1727 bitmap_set_bit (changed, t);
1728 }
1729
1730 /* If the variable is not exactly at the requested offset
1731 we have to include the next one. */
1732 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1733 || v->next == 0)
1734 break;
1735
1736 v = vi_next (v);
1737 fieldoffset = v->offset;
1738 }
1739 while (1);
1740 }
1741 }
1742
1743 /* Handle a non-simple (simple meaning requires no iteration),
1744 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1745
1746 static void
1747 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1748 {
1749 if (c->lhs.type == DEREF)
1750 {
1751 if (c->rhs.type == ADDRESSOF)
1752 {
1753 gcc_unreachable ();
1754 }
1755 else
1756 {
1757 /* *x = y */
1758 do_ds_constraint (c, delta);
1759 }
1760 }
1761 else if (c->rhs.type == DEREF)
1762 {
1763 /* x = *y */
1764 if (!(get_varinfo (c->lhs.var)->is_special_var))
1765 do_sd_constraint (graph, c, delta);
1766 }
1767 else
1768 {
1769 bitmap tmp;
1770 bitmap solution;
1771 bool flag = false;
1772
1773 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1774 solution = get_varinfo (c->rhs.var)->solution;
1775 tmp = get_varinfo (c->lhs.var)->solution;
1776
1777 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1778
1779 if (flag)
1780 bitmap_set_bit (changed, c->lhs.var);
1781 }
1782 }
1783
1784 /* Initialize and return a new SCC info structure. */
1785
1786 static struct scc_info *
1787 init_scc_info (size_t size)
1788 {
1789 struct scc_info *si = XNEW (struct scc_info);
1790 size_t i;
1791
1792 si->current_index = 0;
1793 si->visited = sbitmap_alloc (size);
1794 bitmap_clear (si->visited);
1795 si->deleted = sbitmap_alloc (size);
1796 bitmap_clear (si->deleted);
1797 si->node_mapping = XNEWVEC (unsigned int, size);
1798 si->dfs = XCNEWVEC (unsigned int, size);
1799
1800 for (i = 0; i < size; i++)
1801 si->node_mapping[i] = i;
1802
1803 si->scc_stack.create (1);
1804 return si;
1805 }
1806
1807 /* Free an SCC info structure pointed to by SI */
1808
1809 static void
1810 free_scc_info (struct scc_info *si)
1811 {
1812 sbitmap_free (si->visited);
1813 sbitmap_free (si->deleted);
1814 free (si->node_mapping);
1815 free (si->dfs);
1816 si->scc_stack.release ();
1817 free (si);
1818 }
1819
1820
1821 /* Find indirect cycles in GRAPH that occur, using strongly connected
1822 components, and note them in the indirect cycles map.
1823
1824 This technique comes from Ben Hardekopf and Calvin Lin,
1825 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1826 Lines of Code", submitted to PLDI 2007. */
1827
1828 static void
1829 find_indirect_cycles (constraint_graph_t graph)
1830 {
1831 unsigned int i;
1832 unsigned int size = graph->size;
1833 struct scc_info *si = init_scc_info (size);
1834
1835 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1836 if (!bitmap_bit_p (si->visited, i) && find (i) == i)
1837 scc_visit (graph, si, i);
1838
1839 free_scc_info (si);
1840 }
1841
1842 /* Compute a topological ordering for GRAPH, and store the result in the
1843 topo_info structure TI. */
1844
1845 static void
1846 compute_topo_order (constraint_graph_t graph,
1847 struct topo_info *ti)
1848 {
1849 unsigned int i;
1850 unsigned int size = graph->size;
1851
1852 for (i = 0; i != size; ++i)
1853 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1854 topo_visit (graph, ti, i);
1855 }
1856
1857 /* Structure used to for hash value numbering of pointer equivalence
1858 classes. */
1859
1860 typedef struct equiv_class_label
1861 {
1862 hashval_t hashcode;
1863 unsigned int equivalence_class;
1864 bitmap labels;
1865 } *equiv_class_label_t;
1866 typedef const struct equiv_class_label *const_equiv_class_label_t;
1867
1868 /* Equiv_class_label hashtable helpers. */
1869
1870 struct equiv_class_hasher : typed_free_remove <equiv_class_label>
1871 {
1872 typedef equiv_class_label value_type;
1873 typedef equiv_class_label compare_type;
1874 static inline hashval_t hash (const value_type *);
1875 static inline bool equal (const value_type *, const compare_type *);
1876 };
1877
1878 /* Hash function for a equiv_class_label_t */
1879
1880 inline hashval_t
1881 equiv_class_hasher::hash (const value_type *ecl)
1882 {
1883 return ecl->hashcode;
1884 }
1885
1886 /* Equality function for two equiv_class_label_t's. */
1887
1888 inline bool
1889 equiv_class_hasher::equal (const value_type *eql1, const compare_type *eql2)
1890 {
1891 return (eql1->hashcode == eql2->hashcode
1892 && bitmap_equal_p (eql1->labels, eql2->labels));
1893 }
1894
1895 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1896 classes. */
1897 static hash_table <equiv_class_hasher> pointer_equiv_class_table;
1898
1899 /* A hashtable for mapping a bitmap of labels->location equivalence
1900 classes. */
1901 static hash_table <equiv_class_hasher> location_equiv_class_table;
1902
1903 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1904 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1905 is equivalent to. */
1906
1907 static equiv_class_label *
1908 equiv_class_lookup_or_add (hash_table <equiv_class_hasher> table, bitmap labels)
1909 {
1910 equiv_class_label **slot;
1911 equiv_class_label ecl;
1912
1913 ecl.labels = labels;
1914 ecl.hashcode = bitmap_hash (labels);
1915 slot = table.find_slot_with_hash (&ecl, ecl.hashcode, INSERT);
1916 if (!*slot)
1917 {
1918 *slot = XNEW (struct equiv_class_label);
1919 (*slot)->labels = labels;
1920 (*slot)->hashcode = ecl.hashcode;
1921 (*slot)->equivalence_class = 0;
1922 }
1923
1924 return *slot;
1925 }
1926
1927 /* Perform offline variable substitution.
1928
1929 This is a worst case quadratic time way of identifying variables
1930 that must have equivalent points-to sets, including those caused by
1931 static cycles, and single entry subgraphs, in the constraint graph.
1932
1933 The technique is described in "Exploiting Pointer and Location
1934 Equivalence to Optimize Pointer Analysis. In the 14th International
1935 Static Analysis Symposium (SAS), August 2007." It is known as the
1936 "HU" algorithm, and is equivalent to value numbering the collapsed
1937 constraint graph including evaluating unions.
1938
1939 The general method of finding equivalence classes is as follows:
1940 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1941 Initialize all non-REF nodes to be direct nodes.
1942 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1943 variable}
1944 For each constraint containing the dereference, we also do the same
1945 thing.
1946
1947 We then compute SCC's in the graph and unify nodes in the same SCC,
1948 including pts sets.
1949
1950 For each non-collapsed node x:
1951 Visit all unvisited explicit incoming edges.
1952 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1953 where y->x.
1954 Lookup the equivalence class for pts(x).
1955 If we found one, equivalence_class(x) = found class.
1956 Otherwise, equivalence_class(x) = new class, and new_class is
1957 added to the lookup table.
1958
1959 All direct nodes with the same equivalence class can be replaced
1960 with a single representative node.
1961 All unlabeled nodes (label == 0) are not pointers and all edges
1962 involving them can be eliminated.
1963 We perform these optimizations during rewrite_constraints
1964
1965 In addition to pointer equivalence class finding, we also perform
1966 location equivalence class finding. This is the set of variables
1967 that always appear together in points-to sets. We use this to
1968 compress the size of the points-to sets. */
1969
1970 /* Current maximum pointer equivalence class id. */
1971 static int pointer_equiv_class;
1972
1973 /* Current maximum location equivalence class id. */
1974 static int location_equiv_class;
1975
1976 /* Recursive routine to find strongly connected components in GRAPH,
1977 and label it's nodes with DFS numbers. */
1978
1979 static void
1980 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1981 {
1982 unsigned int i;
1983 bitmap_iterator bi;
1984 unsigned int my_dfs;
1985
1986 gcc_checking_assert (si->node_mapping[n] == n);
1987 bitmap_set_bit (si->visited, n);
1988 si->dfs[n] = si->current_index ++;
1989 my_dfs = si->dfs[n];
1990
1991 /* Visit all the successors. */
1992 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1993 {
1994 unsigned int w = si->node_mapping[i];
1995
1996 if (bitmap_bit_p (si->deleted, w))
1997 continue;
1998
1999 if (!bitmap_bit_p (si->visited, w))
2000 condense_visit (graph, si, w);
2001
2002 unsigned int t = si->node_mapping[w];
2003 gcc_checking_assert (si->node_mapping[n] == n);
2004 if (si->dfs[t] < si->dfs[n])
2005 si->dfs[n] = si->dfs[t];
2006 }
2007
2008 /* Visit all the implicit predecessors. */
2009 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2010 {
2011 unsigned int w = si->node_mapping[i];
2012
2013 if (bitmap_bit_p (si->deleted, w))
2014 continue;
2015
2016 if (!bitmap_bit_p (si->visited, w))
2017 condense_visit (graph, si, w);
2018
2019 unsigned int t = si->node_mapping[w];
2020 gcc_assert (si->node_mapping[n] == n);
2021 if (si->dfs[t] < si->dfs[n])
2022 si->dfs[n] = si->dfs[t];
2023 }
2024
2025 /* See if any components have been identified. */
2026 if (si->dfs[n] == my_dfs)
2027 {
2028 while (si->scc_stack.length () != 0
2029 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2030 {
2031 unsigned int w = si->scc_stack.pop ();
2032 si->node_mapping[w] = n;
2033
2034 if (!bitmap_bit_p (graph->direct_nodes, w))
2035 bitmap_clear_bit (graph->direct_nodes, n);
2036
2037 /* Unify our nodes. */
2038 if (graph->preds[w])
2039 {
2040 if (!graph->preds[n])
2041 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2042 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2043 }
2044 if (graph->implicit_preds[w])
2045 {
2046 if (!graph->implicit_preds[n])
2047 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2048 bitmap_ior_into (graph->implicit_preds[n],
2049 graph->implicit_preds[w]);
2050 }
2051 if (graph->points_to[w])
2052 {
2053 if (!graph->points_to[n])
2054 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2055 bitmap_ior_into (graph->points_to[n],
2056 graph->points_to[w]);
2057 }
2058 }
2059 bitmap_set_bit (si->deleted, n);
2060 }
2061 else
2062 si->scc_stack.safe_push (n);
2063 }
2064
2065 /* Label pointer equivalences. */
2066
2067 static void
2068 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2069 {
2070 unsigned int i, first_pred;
2071 bitmap_iterator bi;
2072
2073 bitmap_set_bit (si->visited, n);
2074
2075 /* Label and union our incoming edges's points to sets. */
2076 first_pred = -1U;
2077 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2078 {
2079 unsigned int w = si->node_mapping[i];
2080 if (!bitmap_bit_p (si->visited, w))
2081 label_visit (graph, si, w);
2082
2083 /* Skip unused edges */
2084 if (w == n || graph->pointer_label[w] == 0)
2085 continue;
2086
2087 if (graph->points_to[w])
2088 {
2089 if (!graph->points_to[n])
2090 {
2091 if (first_pred == -1U)
2092 first_pred = w;
2093 else
2094 {
2095 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2096 bitmap_ior (graph->points_to[n],
2097 graph->points_to[first_pred],
2098 graph->points_to[w]);
2099 }
2100 }
2101 else
2102 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2103 }
2104 }
2105
2106 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2107 if (!bitmap_bit_p (graph->direct_nodes, n))
2108 {
2109 if (!graph->points_to[n])
2110 {
2111 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2112 if (first_pred != -1U)
2113 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2114 }
2115 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2116 graph->pointer_label[n] = pointer_equiv_class++;
2117 equiv_class_label_t ecl;
2118 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2119 graph->points_to[n]);
2120 ecl->equivalence_class = graph->pointer_label[n];
2121 return;
2122 }
2123
2124 /* If there was only a single non-empty predecessor the pointer equiv
2125 class is the same. */
2126 if (!graph->points_to[n])
2127 {
2128 if (first_pred != -1U)
2129 {
2130 graph->pointer_label[n] = graph->pointer_label[first_pred];
2131 graph->points_to[n] = graph->points_to[first_pred];
2132 }
2133 return;
2134 }
2135
2136 if (!bitmap_empty_p (graph->points_to[n]))
2137 {
2138 equiv_class_label_t ecl;
2139 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2140 graph->points_to[n]);
2141 if (ecl->equivalence_class == 0)
2142 ecl->equivalence_class = pointer_equiv_class++;
2143 else
2144 {
2145 BITMAP_FREE (graph->points_to[n]);
2146 graph->points_to[n] = ecl->labels;
2147 }
2148 graph->pointer_label[n] = ecl->equivalence_class;
2149 }
2150 }
2151
2152 /* Print the pred graph in dot format. */
2153
2154 static void
2155 dump_pred_graph (struct scc_info *si, FILE *file)
2156 {
2157 unsigned int i;
2158
2159 /* Only print the graph if it has already been initialized: */
2160 if (!graph)
2161 return;
2162
2163 /* Prints the header of the dot file: */
2164 fprintf (file, "strict digraph {\n");
2165 fprintf (file, " node [\n shape = box\n ]\n");
2166 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2167 fprintf (file, "\n // List of nodes and complex constraints in "
2168 "the constraint graph:\n");
2169
2170 /* The next lines print the nodes in the graph together with the
2171 complex constraints attached to them. */
2172 for (i = 1; i < graph->size; i++)
2173 {
2174 if (i == FIRST_REF_NODE)
2175 continue;
2176 if (si->node_mapping[i] != i)
2177 continue;
2178 if (i < FIRST_REF_NODE)
2179 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2180 else
2181 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2182 if (graph->points_to[i]
2183 && !bitmap_empty_p (graph->points_to[i]))
2184 {
2185 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2186 unsigned j;
2187 bitmap_iterator bi;
2188 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2189 fprintf (file, " %d", j);
2190 fprintf (file, " }\"]");
2191 }
2192 fprintf (file, ";\n");
2193 }
2194
2195 /* Go over the edges. */
2196 fprintf (file, "\n // Edges in the constraint graph:\n");
2197 for (i = 1; i < graph->size; i++)
2198 {
2199 unsigned j;
2200 bitmap_iterator bi;
2201 if (si->node_mapping[i] != i)
2202 continue;
2203 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2204 {
2205 unsigned from = si->node_mapping[j];
2206 if (from < FIRST_REF_NODE)
2207 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2208 else
2209 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2210 fprintf (file, " -> ");
2211 if (i < FIRST_REF_NODE)
2212 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2213 else
2214 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2215 fprintf (file, ";\n");
2216 }
2217 }
2218
2219 /* Prints the tail of the dot file. */
2220 fprintf (file, "}\n");
2221 }
2222
2223 /* Perform offline variable substitution, discovering equivalence
2224 classes, and eliminating non-pointer variables. */
2225
2226 static struct scc_info *
2227 perform_var_substitution (constraint_graph_t graph)
2228 {
2229 unsigned int i;
2230 unsigned int size = graph->size;
2231 struct scc_info *si = init_scc_info (size);
2232
2233 bitmap_obstack_initialize (&iteration_obstack);
2234 pointer_equiv_class_table.create (511);
2235 location_equiv_class_table.create (511);
2236 pointer_equiv_class = 1;
2237 location_equiv_class = 1;
2238
2239 /* Condense the nodes, which means to find SCC's, count incoming
2240 predecessors, and unite nodes in SCC's. */
2241 for (i = 1; i < FIRST_REF_NODE; i++)
2242 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2243 condense_visit (graph, si, si->node_mapping[i]);
2244
2245 if (dump_file && (dump_flags & TDF_GRAPH))
2246 {
2247 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2248 "in dot format:\n");
2249 dump_pred_graph (si, dump_file);
2250 fprintf (dump_file, "\n\n");
2251 }
2252
2253 bitmap_clear (si->visited);
2254 /* Actually the label the nodes for pointer equivalences */
2255 for (i = 1; i < FIRST_REF_NODE; i++)
2256 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2257 label_visit (graph, si, si->node_mapping[i]);
2258
2259 /* Calculate location equivalence labels. */
2260 for (i = 1; i < FIRST_REF_NODE; i++)
2261 {
2262 bitmap pointed_by;
2263 bitmap_iterator bi;
2264 unsigned int j;
2265
2266 if (!graph->pointed_by[i])
2267 continue;
2268 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2269
2270 /* Translate the pointed-by mapping for pointer equivalence
2271 labels. */
2272 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2273 {
2274 bitmap_set_bit (pointed_by,
2275 graph->pointer_label[si->node_mapping[j]]);
2276 }
2277 /* The original pointed_by is now dead. */
2278 BITMAP_FREE (graph->pointed_by[i]);
2279
2280 /* Look up the location equivalence label if one exists, or make
2281 one otherwise. */
2282 equiv_class_label_t ecl;
2283 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2284 if (ecl->equivalence_class == 0)
2285 ecl->equivalence_class = location_equiv_class++;
2286 else
2287 {
2288 if (dump_file && (dump_flags & TDF_DETAILS))
2289 fprintf (dump_file, "Found location equivalence for node %s\n",
2290 get_varinfo (i)->name);
2291 BITMAP_FREE (pointed_by);
2292 }
2293 graph->loc_label[i] = ecl->equivalence_class;
2294
2295 }
2296
2297 if (dump_file && (dump_flags & TDF_DETAILS))
2298 for (i = 1; i < FIRST_REF_NODE; i++)
2299 {
2300 unsigned j = si->node_mapping[i];
2301 if (j != i)
2302 {
2303 fprintf (dump_file, "%s node id %d ",
2304 bitmap_bit_p (graph->direct_nodes, i)
2305 ? "Direct" : "Indirect", i);
2306 if (i < FIRST_REF_NODE)
2307 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2308 else
2309 fprintf (dump_file, "\"*%s\"",
2310 get_varinfo (i - FIRST_REF_NODE)->name);
2311 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2312 if (j < FIRST_REF_NODE)
2313 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2314 else
2315 fprintf (dump_file, "\"*%s\"\n",
2316 get_varinfo (j - FIRST_REF_NODE)->name);
2317 }
2318 else
2319 {
2320 fprintf (dump_file,
2321 "Equivalence classes for %s node id %d ",
2322 bitmap_bit_p (graph->direct_nodes, i)
2323 ? "direct" : "indirect", i);
2324 if (i < FIRST_REF_NODE)
2325 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2326 else
2327 fprintf (dump_file, "\"*%s\"",
2328 get_varinfo (i - FIRST_REF_NODE)->name);
2329 fprintf (dump_file,
2330 ": pointer %d, location %d\n",
2331 graph->pointer_label[i], graph->loc_label[i]);
2332 }
2333 }
2334
2335 /* Quickly eliminate our non-pointer variables. */
2336
2337 for (i = 1; i < FIRST_REF_NODE; i++)
2338 {
2339 unsigned int node = si->node_mapping[i];
2340
2341 if (graph->pointer_label[node] == 0)
2342 {
2343 if (dump_file && (dump_flags & TDF_DETAILS))
2344 fprintf (dump_file,
2345 "%s is a non-pointer variable, eliminating edges.\n",
2346 get_varinfo (node)->name);
2347 stats.nonpointer_vars++;
2348 clear_edges_for_node (graph, node);
2349 }
2350 }
2351
2352 return si;
2353 }
2354
2355 /* Free information that was only necessary for variable
2356 substitution. */
2357
2358 static void
2359 free_var_substitution_info (struct scc_info *si)
2360 {
2361 free_scc_info (si);
2362 free (graph->pointer_label);
2363 free (graph->loc_label);
2364 free (graph->pointed_by);
2365 free (graph->points_to);
2366 free (graph->eq_rep);
2367 sbitmap_free (graph->direct_nodes);
2368 pointer_equiv_class_table.dispose ();
2369 location_equiv_class_table.dispose ();
2370 bitmap_obstack_release (&iteration_obstack);
2371 }
2372
2373 /* Return an existing node that is equivalent to NODE, which has
2374 equivalence class LABEL, if one exists. Return NODE otherwise. */
2375
2376 static unsigned int
2377 find_equivalent_node (constraint_graph_t graph,
2378 unsigned int node, unsigned int label)
2379 {
2380 /* If the address version of this variable is unused, we can
2381 substitute it for anything else with the same label.
2382 Otherwise, we know the pointers are equivalent, but not the
2383 locations, and we can unite them later. */
2384
2385 if (!bitmap_bit_p (graph->address_taken, node))
2386 {
2387 gcc_checking_assert (label < graph->size);
2388
2389 if (graph->eq_rep[label] != -1)
2390 {
2391 /* Unify the two variables since we know they are equivalent. */
2392 if (unite (graph->eq_rep[label], node))
2393 unify_nodes (graph, graph->eq_rep[label], node, false);
2394 return graph->eq_rep[label];
2395 }
2396 else
2397 {
2398 graph->eq_rep[label] = node;
2399 graph->pe_rep[label] = node;
2400 }
2401 }
2402 else
2403 {
2404 gcc_checking_assert (label < graph->size);
2405 graph->pe[node] = label;
2406 if (graph->pe_rep[label] == -1)
2407 graph->pe_rep[label] = node;
2408 }
2409
2410 return node;
2411 }
2412
2413 /* Unite pointer equivalent but not location equivalent nodes in
2414 GRAPH. This may only be performed once variable substitution is
2415 finished. */
2416
2417 static void
2418 unite_pointer_equivalences (constraint_graph_t graph)
2419 {
2420 unsigned int i;
2421
2422 /* Go through the pointer equivalences and unite them to their
2423 representative, if they aren't already. */
2424 for (i = 1; i < FIRST_REF_NODE; i++)
2425 {
2426 unsigned int label = graph->pe[i];
2427 if (label)
2428 {
2429 int label_rep = graph->pe_rep[label];
2430
2431 if (label_rep == -1)
2432 continue;
2433
2434 label_rep = find (label_rep);
2435 if (label_rep >= 0 && unite (label_rep, find (i)))
2436 unify_nodes (graph, label_rep, i, false);
2437 }
2438 }
2439 }
2440
2441 /* Move complex constraints to the GRAPH nodes they belong to. */
2442
2443 static void
2444 move_complex_constraints (constraint_graph_t graph)
2445 {
2446 int i;
2447 constraint_t c;
2448
2449 FOR_EACH_VEC_ELT (constraints, i, c)
2450 {
2451 if (c)
2452 {
2453 struct constraint_expr lhs = c->lhs;
2454 struct constraint_expr rhs = c->rhs;
2455
2456 if (lhs.type == DEREF)
2457 {
2458 insert_into_complex (graph, lhs.var, c);
2459 }
2460 else if (rhs.type == DEREF)
2461 {
2462 if (!(get_varinfo (lhs.var)->is_special_var))
2463 insert_into_complex (graph, rhs.var, c);
2464 }
2465 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2466 && (lhs.offset != 0 || rhs.offset != 0))
2467 {
2468 insert_into_complex (graph, rhs.var, c);
2469 }
2470 }
2471 }
2472 }
2473
2474
2475 /* Optimize and rewrite complex constraints while performing
2476 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2477 result of perform_variable_substitution. */
2478
2479 static void
2480 rewrite_constraints (constraint_graph_t graph,
2481 struct scc_info *si)
2482 {
2483 int i;
2484 constraint_t c;
2485
2486 #ifdef ENABLE_CHECKING
2487 for (unsigned int j = 0; j < graph->size; j++)
2488 gcc_assert (find (j) == j);
2489 #endif
2490
2491 FOR_EACH_VEC_ELT (constraints, i, c)
2492 {
2493 struct constraint_expr lhs = c->lhs;
2494 struct constraint_expr rhs = c->rhs;
2495 unsigned int lhsvar = find (lhs.var);
2496 unsigned int rhsvar = find (rhs.var);
2497 unsigned int lhsnode, rhsnode;
2498 unsigned int lhslabel, rhslabel;
2499
2500 lhsnode = si->node_mapping[lhsvar];
2501 rhsnode = si->node_mapping[rhsvar];
2502 lhslabel = graph->pointer_label[lhsnode];
2503 rhslabel = graph->pointer_label[rhsnode];
2504
2505 /* See if it is really a non-pointer variable, and if so, ignore
2506 the constraint. */
2507 if (lhslabel == 0)
2508 {
2509 if (dump_file && (dump_flags & TDF_DETAILS))
2510 {
2511
2512 fprintf (dump_file, "%s is a non-pointer variable,"
2513 "ignoring constraint:",
2514 get_varinfo (lhs.var)->name);
2515 dump_constraint (dump_file, c);
2516 fprintf (dump_file, "\n");
2517 }
2518 constraints[i] = NULL;
2519 continue;
2520 }
2521
2522 if (rhslabel == 0)
2523 {
2524 if (dump_file && (dump_flags & TDF_DETAILS))
2525 {
2526
2527 fprintf (dump_file, "%s is a non-pointer variable,"
2528 "ignoring constraint:",
2529 get_varinfo (rhs.var)->name);
2530 dump_constraint (dump_file, c);
2531 fprintf (dump_file, "\n");
2532 }
2533 constraints[i] = NULL;
2534 continue;
2535 }
2536
2537 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2538 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2539 c->lhs.var = lhsvar;
2540 c->rhs.var = rhsvar;
2541 }
2542 }
2543
2544 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2545 part of an SCC, false otherwise. */
2546
2547 static bool
2548 eliminate_indirect_cycles (unsigned int node)
2549 {
2550 if (graph->indirect_cycles[node] != -1
2551 && !bitmap_empty_p (get_varinfo (node)->solution))
2552 {
2553 unsigned int i;
2554 vec<unsigned> queue = vNULL;
2555 int queuepos;
2556 unsigned int to = find (graph->indirect_cycles[node]);
2557 bitmap_iterator bi;
2558
2559 /* We can't touch the solution set and call unify_nodes
2560 at the same time, because unify_nodes is going to do
2561 bitmap unions into it. */
2562
2563 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2564 {
2565 if (find (i) == i && i != to)
2566 {
2567 if (unite (to, i))
2568 queue.safe_push (i);
2569 }
2570 }
2571
2572 for (queuepos = 0;
2573 queue.iterate (queuepos, &i);
2574 queuepos++)
2575 {
2576 unify_nodes (graph, to, i, true);
2577 }
2578 queue.release ();
2579 return true;
2580 }
2581 return false;
2582 }
2583
2584 /* Solve the constraint graph GRAPH using our worklist solver.
2585 This is based on the PW* family of solvers from the "Efficient Field
2586 Sensitive Pointer Analysis for C" paper.
2587 It works by iterating over all the graph nodes, processing the complex
2588 constraints and propagating the copy constraints, until everything stops
2589 changed. This corresponds to steps 6-8 in the solving list given above. */
2590
2591 static void
2592 solve_graph (constraint_graph_t graph)
2593 {
2594 unsigned int size = graph->size;
2595 unsigned int i;
2596 bitmap pts;
2597
2598 changed = BITMAP_ALLOC (NULL);
2599
2600 /* Mark all initial non-collapsed nodes as changed. */
2601 for (i = 1; i < size; i++)
2602 {
2603 varinfo_t ivi = get_varinfo (i);
2604 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2605 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2606 || graph->complex[i].length () > 0))
2607 bitmap_set_bit (changed, i);
2608 }
2609
2610 /* Allocate a bitmap to be used to store the changed bits. */
2611 pts = BITMAP_ALLOC (&pta_obstack);
2612
2613 while (!bitmap_empty_p (changed))
2614 {
2615 unsigned int i;
2616 struct topo_info *ti = init_topo_info ();
2617 stats.iterations++;
2618
2619 bitmap_obstack_initialize (&iteration_obstack);
2620
2621 compute_topo_order (graph, ti);
2622
2623 while (ti->topo_order.length () != 0)
2624 {
2625
2626 i = ti->topo_order.pop ();
2627
2628 /* If this variable is not a representative, skip it. */
2629 if (find (i) != i)
2630 continue;
2631
2632 /* In certain indirect cycle cases, we may merge this
2633 variable to another. */
2634 if (eliminate_indirect_cycles (i) && find (i) != i)
2635 continue;
2636
2637 /* If the node has changed, we need to process the
2638 complex constraints and outgoing edges again. */
2639 if (bitmap_clear_bit (changed, i))
2640 {
2641 unsigned int j;
2642 constraint_t c;
2643 bitmap solution;
2644 vec<constraint_t> complex = graph->complex[i];
2645 varinfo_t vi = get_varinfo (i);
2646 bool solution_empty;
2647
2648 /* Compute the changed set of solution bits. If anything
2649 is in the solution just propagate that. */
2650 if (bitmap_bit_p (vi->solution, anything_id))
2651 {
2652 /* If anything is also in the old solution there is
2653 nothing to do.
2654 ??? But we shouldn't ended up with "changed" set ... */
2655 if (vi->oldsolution
2656 && bitmap_bit_p (vi->oldsolution, anything_id))
2657 continue;
2658 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2659 }
2660 else if (vi->oldsolution)
2661 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2662 else
2663 bitmap_copy (pts, vi->solution);
2664
2665 if (bitmap_empty_p (pts))
2666 continue;
2667
2668 if (vi->oldsolution)
2669 bitmap_ior_into (vi->oldsolution, pts);
2670 else
2671 {
2672 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2673 bitmap_copy (vi->oldsolution, pts);
2674 }
2675
2676 solution = vi->solution;
2677 solution_empty = bitmap_empty_p (solution);
2678
2679 /* Process the complex constraints */
2680 FOR_EACH_VEC_ELT (complex, j, c)
2681 {
2682 /* XXX: This is going to unsort the constraints in
2683 some cases, which will occasionally add duplicate
2684 constraints during unification. This does not
2685 affect correctness. */
2686 c->lhs.var = find (c->lhs.var);
2687 c->rhs.var = find (c->rhs.var);
2688
2689 /* The only complex constraint that can change our
2690 solution to non-empty, given an empty solution,
2691 is a constraint where the lhs side is receiving
2692 some set from elsewhere. */
2693 if (!solution_empty || c->lhs.type != DEREF)
2694 do_complex_constraint (graph, c, pts);
2695 }
2696
2697 solution_empty = bitmap_empty_p (solution);
2698
2699 if (!solution_empty)
2700 {
2701 bitmap_iterator bi;
2702 unsigned eff_escaped_id = find (escaped_id);
2703
2704 /* Propagate solution to all successors. */
2705 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2706 0, j, bi)
2707 {
2708 bitmap tmp;
2709 bool flag;
2710
2711 unsigned int to = find (j);
2712 tmp = get_varinfo (to)->solution;
2713 flag = false;
2714
2715 /* Don't try to propagate to ourselves. */
2716 if (to == i)
2717 continue;
2718
2719 /* If we propagate from ESCAPED use ESCAPED as
2720 placeholder. */
2721 if (i == eff_escaped_id)
2722 flag = bitmap_set_bit (tmp, escaped_id);
2723 else
2724 flag = bitmap_ior_into (tmp, pts);
2725
2726 if (flag)
2727 bitmap_set_bit (changed, to);
2728 }
2729 }
2730 }
2731 }
2732 free_topo_info (ti);
2733 bitmap_obstack_release (&iteration_obstack);
2734 }
2735
2736 BITMAP_FREE (pts);
2737 BITMAP_FREE (changed);
2738 bitmap_obstack_release (&oldpta_obstack);
2739 }
2740
2741 /* Map from trees to variable infos. */
2742 static struct pointer_map_t *vi_for_tree;
2743
2744
2745 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2746
2747 static void
2748 insert_vi_for_tree (tree t, varinfo_t vi)
2749 {
2750 void **slot = pointer_map_insert (vi_for_tree, t);
2751 gcc_assert (vi);
2752 gcc_assert (*slot == NULL);
2753 *slot = vi;
2754 }
2755
2756 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2757 exist in the map, return NULL, otherwise, return the varinfo we found. */
2758
2759 static varinfo_t
2760 lookup_vi_for_tree (tree t)
2761 {
2762 void **slot = pointer_map_contains (vi_for_tree, t);
2763 if (slot == NULL)
2764 return NULL;
2765
2766 return (varinfo_t) *slot;
2767 }
2768
2769 /* Return a printable name for DECL */
2770
2771 static const char *
2772 alias_get_name (tree decl)
2773 {
2774 const char *res = NULL;
2775 char *temp;
2776 int num_printed = 0;
2777
2778 if (!dump_file)
2779 return "NULL";
2780
2781 if (TREE_CODE (decl) == SSA_NAME)
2782 {
2783 res = get_name (decl);
2784 if (res)
2785 num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2786 else
2787 num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2788 if (num_printed > 0)
2789 {
2790 res = ggc_strdup (temp);
2791 free (temp);
2792 }
2793 }
2794 else if (DECL_P (decl))
2795 {
2796 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2797 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2798 else
2799 {
2800 res = get_name (decl);
2801 if (!res)
2802 {
2803 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2804 if (num_printed > 0)
2805 {
2806 res = ggc_strdup (temp);
2807 free (temp);
2808 }
2809 }
2810 }
2811 }
2812 if (res != NULL)
2813 return res;
2814
2815 return "NULL";
2816 }
2817
2818 /* Find the variable id for tree T in the map.
2819 If T doesn't exist in the map, create an entry for it and return it. */
2820
2821 static varinfo_t
2822 get_vi_for_tree (tree t)
2823 {
2824 void **slot = pointer_map_contains (vi_for_tree, t);
2825 if (slot == NULL)
2826 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2827
2828 return (varinfo_t) *slot;
2829 }
2830
2831 /* Get a scalar constraint expression for a new temporary variable. */
2832
2833 static struct constraint_expr
2834 new_scalar_tmp_constraint_exp (const char *name)
2835 {
2836 struct constraint_expr tmp;
2837 varinfo_t vi;
2838
2839 vi = new_var_info (NULL_TREE, name);
2840 vi->offset = 0;
2841 vi->size = -1;
2842 vi->fullsize = -1;
2843 vi->is_full_var = 1;
2844
2845 tmp.var = vi->id;
2846 tmp.type = SCALAR;
2847 tmp.offset = 0;
2848
2849 return tmp;
2850 }
2851
2852 /* Get a constraint expression vector from an SSA_VAR_P node.
2853 If address_p is true, the result will be taken its address of. */
2854
2855 static void
2856 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2857 {
2858 struct constraint_expr cexpr;
2859 varinfo_t vi;
2860
2861 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2862 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2863
2864 /* For parameters, get at the points-to set for the actual parm
2865 decl. */
2866 if (TREE_CODE (t) == SSA_NAME
2867 && SSA_NAME_IS_DEFAULT_DEF (t)
2868 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2869 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2870 {
2871 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2872 return;
2873 }
2874
2875 /* For global variables resort to the alias target. */
2876 if (TREE_CODE (t) == VAR_DECL
2877 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2878 {
2879 struct varpool_node *node = varpool_get_node (t);
2880 if (node && node->alias && node->analyzed)
2881 {
2882 node = varpool_variable_node (node, NULL);
2883 t = node->decl;
2884 }
2885 }
2886
2887 vi = get_vi_for_tree (t);
2888 cexpr.var = vi->id;
2889 cexpr.type = SCALAR;
2890 cexpr.offset = 0;
2891 /* If we determine the result is "anything", and we know this is readonly,
2892 say it points to readonly memory instead. */
2893 if (cexpr.var == anything_id && TREE_READONLY (t))
2894 {
2895 gcc_unreachable ();
2896 cexpr.type = ADDRESSOF;
2897 cexpr.var = readonly_id;
2898 }
2899
2900 /* If we are not taking the address of the constraint expr, add all
2901 sub-fiels of the variable as well. */
2902 if (!address_p
2903 && !vi->is_full_var)
2904 {
2905 for (; vi; vi = vi_next (vi))
2906 {
2907 cexpr.var = vi->id;
2908 results->safe_push (cexpr);
2909 }
2910 return;
2911 }
2912
2913 results->safe_push (cexpr);
2914 }
2915
2916 /* Process constraint T, performing various simplifications and then
2917 adding it to our list of overall constraints. */
2918
2919 static void
2920 process_constraint (constraint_t t)
2921 {
2922 struct constraint_expr rhs = t->rhs;
2923 struct constraint_expr lhs = t->lhs;
2924
2925 gcc_assert (rhs.var < varmap.length ());
2926 gcc_assert (lhs.var < varmap.length ());
2927
2928 /* If we didn't get any useful constraint from the lhs we get
2929 &ANYTHING as fallback from get_constraint_for. Deal with
2930 it here by turning it into *ANYTHING. */
2931 if (lhs.type == ADDRESSOF
2932 && lhs.var == anything_id)
2933 lhs.type = DEREF;
2934
2935 /* ADDRESSOF on the lhs is invalid. */
2936 gcc_assert (lhs.type != ADDRESSOF);
2937
2938 /* We shouldn't add constraints from things that cannot have pointers.
2939 It's not completely trivial to avoid in the callers, so do it here. */
2940 if (rhs.type != ADDRESSOF
2941 && !get_varinfo (rhs.var)->may_have_pointers)
2942 return;
2943
2944 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2945 if (!get_varinfo (lhs.var)->may_have_pointers)
2946 return;
2947
2948 /* This can happen in our IR with things like n->a = *p */
2949 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2950 {
2951 /* Split into tmp = *rhs, *lhs = tmp */
2952 struct constraint_expr tmplhs;
2953 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2954 process_constraint (new_constraint (tmplhs, rhs));
2955 process_constraint (new_constraint (lhs, tmplhs));
2956 }
2957 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2958 {
2959 /* Split into tmp = &rhs, *lhs = tmp */
2960 struct constraint_expr tmplhs;
2961 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2962 process_constraint (new_constraint (tmplhs, rhs));
2963 process_constraint (new_constraint (lhs, tmplhs));
2964 }
2965 else
2966 {
2967 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2968 constraints.safe_push (t);
2969 }
2970 }
2971
2972
2973 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2974 structure. */
2975
2976 static HOST_WIDE_INT
2977 bitpos_of_field (const tree fdecl)
2978 {
2979 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2980 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2981 return -1;
2982
2983 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
2984 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2985 }
2986
2987
2988 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2989 resulting constraint expressions in *RESULTS. */
2990
2991 static void
2992 get_constraint_for_ptr_offset (tree ptr, tree offset,
2993 vec<ce_s> *results)
2994 {
2995 struct constraint_expr c;
2996 unsigned int j, n;
2997 HOST_WIDE_INT rhsoffset;
2998
2999 /* If we do not do field-sensitive PTA adding offsets to pointers
3000 does not change the points-to solution. */
3001 if (!use_field_sensitive)
3002 {
3003 get_constraint_for_rhs (ptr, results);
3004 return;
3005 }
3006
3007 /* If the offset is not a non-negative integer constant that fits
3008 in a HOST_WIDE_INT, we have to fall back to a conservative
3009 solution which includes all sub-fields of all pointed-to
3010 variables of ptr. */
3011 if (offset == NULL_TREE
3012 || TREE_CODE (offset) != INTEGER_CST)
3013 rhsoffset = UNKNOWN_OFFSET;
3014 else
3015 {
3016 /* Sign-extend the offset. */
3017 double_int soffset = tree_to_double_int (offset)
3018 .sext (TYPE_PRECISION (TREE_TYPE (offset)));
3019 if (!soffset.fits_shwi ())
3020 rhsoffset = UNKNOWN_OFFSET;
3021 else
3022 {
3023 /* Make sure the bit-offset also fits. */
3024 HOST_WIDE_INT rhsunitoffset = soffset.low;
3025 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3026 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3027 rhsoffset = UNKNOWN_OFFSET;
3028 }
3029 }
3030
3031 get_constraint_for_rhs (ptr, results);
3032 if (rhsoffset == 0)
3033 return;
3034
3035 /* As we are eventually appending to the solution do not use
3036 vec::iterate here. */
3037 n = results->length ();
3038 for (j = 0; j < n; j++)
3039 {
3040 varinfo_t curr;
3041 c = (*results)[j];
3042 curr = get_varinfo (c.var);
3043
3044 if (c.type == ADDRESSOF
3045 /* If this varinfo represents a full variable just use it. */
3046 && curr->is_full_var)
3047 c.offset = 0;
3048 else if (c.type == ADDRESSOF
3049 /* If we do not know the offset add all subfields. */
3050 && rhsoffset == UNKNOWN_OFFSET)
3051 {
3052 varinfo_t temp = get_varinfo (curr->head);
3053 do
3054 {
3055 struct constraint_expr c2;
3056 c2.var = temp->id;
3057 c2.type = ADDRESSOF;
3058 c2.offset = 0;
3059 if (c2.var != c.var)
3060 results->safe_push (c2);
3061 temp = vi_next (temp);
3062 }
3063 while (temp);
3064 }
3065 else if (c.type == ADDRESSOF)
3066 {
3067 varinfo_t temp;
3068 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3069
3070 /* Search the sub-field which overlaps with the
3071 pointed-to offset. If the result is outside of the variable
3072 we have to provide a conservative result, as the variable is
3073 still reachable from the resulting pointer (even though it
3074 technically cannot point to anything). The last and first
3075 sub-fields are such conservative results.
3076 ??? If we always had a sub-field for &object + 1 then
3077 we could represent this in a more precise way. */
3078 if (rhsoffset < 0
3079 && curr->offset < offset)
3080 offset = 0;
3081 temp = first_or_preceding_vi_for_offset (curr, offset);
3082
3083 /* If the found variable is not exactly at the pointed to
3084 result, we have to include the next variable in the
3085 solution as well. Otherwise two increments by offset / 2
3086 do not result in the same or a conservative superset
3087 solution. */
3088 if (temp->offset != offset
3089 && temp->next != 0)
3090 {
3091 struct constraint_expr c2;
3092 c2.var = temp->next;
3093 c2.type = ADDRESSOF;
3094 c2.offset = 0;
3095 results->safe_push (c2);
3096 }
3097 c.var = temp->id;
3098 c.offset = 0;
3099 }
3100 else
3101 c.offset = rhsoffset;
3102
3103 (*results)[j] = c;
3104 }
3105 }
3106
3107
3108 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3109 If address_p is true the result will be taken its address of.
3110 If lhs_p is true then the constraint expression is assumed to be used
3111 as the lhs. */
3112
3113 static void
3114 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3115 bool address_p, bool lhs_p)
3116 {
3117 tree orig_t = t;
3118 HOST_WIDE_INT bitsize = -1;
3119 HOST_WIDE_INT bitmaxsize = -1;
3120 HOST_WIDE_INT bitpos;
3121 tree forzero;
3122
3123 /* Some people like to do cute things like take the address of
3124 &0->a.b */
3125 forzero = t;
3126 while (handled_component_p (forzero)
3127 || INDIRECT_REF_P (forzero)
3128 || TREE_CODE (forzero) == MEM_REF)
3129 forzero = TREE_OPERAND (forzero, 0);
3130
3131 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3132 {
3133 struct constraint_expr temp;
3134
3135 temp.offset = 0;
3136 temp.var = integer_id;
3137 temp.type = SCALAR;
3138 results->safe_push (temp);
3139 return;
3140 }
3141
3142 /* Handle type-punning through unions. If we are extracting a pointer
3143 from a union via a possibly type-punning access that pointer
3144 points to anything, similar to a conversion of an integer to
3145 a pointer. */
3146 if (!lhs_p)
3147 {
3148 tree u;
3149 for (u = t;
3150 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3151 u = TREE_OPERAND (u, 0))
3152 if (TREE_CODE (u) == COMPONENT_REF
3153 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3154 {
3155 struct constraint_expr temp;
3156
3157 temp.offset = 0;
3158 temp.var = anything_id;
3159 temp.type = ADDRESSOF;
3160 results->safe_push (temp);
3161 return;
3162 }
3163 }
3164
3165 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3166
3167 /* Pretend to take the address of the base, we'll take care of
3168 adding the required subset of sub-fields below. */
3169 get_constraint_for_1 (t, results, true, lhs_p);
3170 gcc_assert (results->length () == 1);
3171 struct constraint_expr &result = results->last ();
3172
3173 if (result.type == SCALAR
3174 && get_varinfo (result.var)->is_full_var)
3175 /* For single-field vars do not bother about the offset. */
3176 result.offset = 0;
3177 else if (result.type == SCALAR)
3178 {
3179 /* In languages like C, you can access one past the end of an
3180 array. You aren't allowed to dereference it, so we can
3181 ignore this constraint. When we handle pointer subtraction,
3182 we may have to do something cute here. */
3183
3184 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3185 && bitmaxsize != 0)
3186 {
3187 /* It's also not true that the constraint will actually start at the
3188 right offset, it may start in some padding. We only care about
3189 setting the constraint to the first actual field it touches, so
3190 walk to find it. */
3191 struct constraint_expr cexpr = result;
3192 varinfo_t curr;
3193 results->pop ();
3194 cexpr.offset = 0;
3195 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3196 {
3197 if (ranges_overlap_p (curr->offset, curr->size,
3198 bitpos, bitmaxsize))
3199 {
3200 cexpr.var = curr->id;
3201 results->safe_push (cexpr);
3202 if (address_p)
3203 break;
3204 }
3205 }
3206 /* If we are going to take the address of this field then
3207 to be able to compute reachability correctly add at least
3208 the last field of the variable. */
3209 if (address_p && results->length () == 0)
3210 {
3211 curr = get_varinfo (cexpr.var);
3212 while (curr->next != 0)
3213 curr = vi_next (curr);
3214 cexpr.var = curr->id;
3215 results->safe_push (cexpr);
3216 }
3217 else if (results->length () == 0)
3218 /* Assert that we found *some* field there. The user couldn't be
3219 accessing *only* padding. */
3220 /* Still the user could access one past the end of an array
3221 embedded in a struct resulting in accessing *only* padding. */
3222 /* Or accessing only padding via type-punning to a type
3223 that has a filed just in padding space. */
3224 {
3225 cexpr.type = SCALAR;
3226 cexpr.var = anything_id;
3227 cexpr.offset = 0;
3228 results->safe_push (cexpr);
3229 }
3230 }
3231 else if (bitmaxsize == 0)
3232 {
3233 if (dump_file && (dump_flags & TDF_DETAILS))
3234 fprintf (dump_file, "Access to zero-sized part of variable,"
3235 "ignoring\n");
3236 }
3237 else
3238 if (dump_file && (dump_flags & TDF_DETAILS))
3239 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3240 }
3241 else if (result.type == DEREF)
3242 {
3243 /* If we do not know exactly where the access goes say so. Note
3244 that only for non-structure accesses we know that we access
3245 at most one subfiled of any variable. */
3246 if (bitpos == -1
3247 || bitsize != bitmaxsize
3248 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3249 || result.offset == UNKNOWN_OFFSET)
3250 result.offset = UNKNOWN_OFFSET;
3251 else
3252 result.offset += bitpos;
3253 }
3254 else if (result.type == ADDRESSOF)
3255 {
3256 /* We can end up here for component references on a
3257 VIEW_CONVERT_EXPR <>(&foobar). */
3258 result.type = SCALAR;
3259 result.var = anything_id;
3260 result.offset = 0;
3261 }
3262 else
3263 gcc_unreachable ();
3264 }
3265
3266
3267 /* Dereference the constraint expression CONS, and return the result.
3268 DEREF (ADDRESSOF) = SCALAR
3269 DEREF (SCALAR) = DEREF
3270 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3271 This is needed so that we can handle dereferencing DEREF constraints. */
3272
3273 static void
3274 do_deref (vec<ce_s> *constraints)
3275 {
3276 struct constraint_expr *c;
3277 unsigned int i = 0;
3278
3279 FOR_EACH_VEC_ELT (*constraints, i, c)
3280 {
3281 if (c->type == SCALAR)
3282 c->type = DEREF;
3283 else if (c->type == ADDRESSOF)
3284 c->type = SCALAR;
3285 else if (c->type == DEREF)
3286 {
3287 struct constraint_expr tmplhs;
3288 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3289 process_constraint (new_constraint (tmplhs, *c));
3290 c->var = tmplhs.var;
3291 }
3292 else
3293 gcc_unreachable ();
3294 }
3295 }
3296
3297 /* Given a tree T, return the constraint expression for taking the
3298 address of it. */
3299
3300 static void
3301 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3302 {
3303 struct constraint_expr *c;
3304 unsigned int i;
3305
3306 get_constraint_for_1 (t, results, true, true);
3307
3308 FOR_EACH_VEC_ELT (*results, i, c)
3309 {
3310 if (c->type == DEREF)
3311 c->type = SCALAR;
3312 else
3313 c->type = ADDRESSOF;
3314 }
3315 }
3316
3317 /* Given a tree T, return the constraint expression for it. */
3318
3319 static void
3320 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3321 bool lhs_p)
3322 {
3323 struct constraint_expr temp;
3324
3325 /* x = integer is all glommed to a single variable, which doesn't
3326 point to anything by itself. That is, of course, unless it is an
3327 integer constant being treated as a pointer, in which case, we
3328 will return that this is really the addressof anything. This
3329 happens below, since it will fall into the default case. The only
3330 case we know something about an integer treated like a pointer is
3331 when it is the NULL pointer, and then we just say it points to
3332 NULL.
3333
3334 Do not do that if -fno-delete-null-pointer-checks though, because
3335 in that case *NULL does not fail, so it _should_ alias *anything.
3336 It is not worth adding a new option or renaming the existing one,
3337 since this case is relatively obscure. */
3338 if ((TREE_CODE (t) == INTEGER_CST
3339 && integer_zerop (t))
3340 /* The only valid CONSTRUCTORs in gimple with pointer typed
3341 elements are zero-initializer. But in IPA mode we also
3342 process global initializers, so verify at least. */
3343 || (TREE_CODE (t) == CONSTRUCTOR
3344 && CONSTRUCTOR_NELTS (t) == 0))
3345 {
3346 if (flag_delete_null_pointer_checks)
3347 temp.var = nothing_id;
3348 else
3349 temp.var = nonlocal_id;
3350 temp.type = ADDRESSOF;
3351 temp.offset = 0;
3352 results->safe_push (temp);
3353 return;
3354 }
3355
3356 /* String constants are read-only. */
3357 if (TREE_CODE (t) == STRING_CST)
3358 {
3359 temp.var = readonly_id;
3360 temp.type = SCALAR;
3361 temp.offset = 0;
3362 results->safe_push (temp);
3363 return;
3364 }
3365
3366 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3367 {
3368 case tcc_expression:
3369 {
3370 switch (TREE_CODE (t))
3371 {
3372 case ADDR_EXPR:
3373 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3374 return;
3375 default:;
3376 }
3377 break;
3378 }
3379 case tcc_reference:
3380 {
3381 switch (TREE_CODE (t))
3382 {
3383 case MEM_REF:
3384 {
3385 struct constraint_expr cs;
3386 varinfo_t vi, curr;
3387 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3388 TREE_OPERAND (t, 1), results);
3389 do_deref (results);
3390
3391 /* If we are not taking the address then make sure to process
3392 all subvariables we might access. */
3393 if (address_p)
3394 return;
3395
3396 cs = results->last ();
3397 if (cs.type == DEREF
3398 && type_can_have_subvars (TREE_TYPE (t)))
3399 {
3400 /* For dereferences this means we have to defer it
3401 to solving time. */
3402 results->last ().offset = UNKNOWN_OFFSET;
3403 return;
3404 }
3405 if (cs.type != SCALAR)
3406 return;
3407
3408 vi = get_varinfo (cs.var);
3409 curr = vi_next (vi);
3410 if (!vi->is_full_var
3411 && curr)
3412 {
3413 unsigned HOST_WIDE_INT size;
3414 if (host_integerp (TYPE_SIZE (TREE_TYPE (t)), 1))
3415 size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t)));
3416 else
3417 size = -1;
3418 for (; curr; curr = vi_next (curr))
3419 {
3420 if (curr->offset - vi->offset < size)
3421 {
3422 cs.var = curr->id;
3423 results->safe_push (cs);
3424 }
3425 else
3426 break;
3427 }
3428 }
3429 return;
3430 }
3431 case ARRAY_REF:
3432 case ARRAY_RANGE_REF:
3433 case COMPONENT_REF:
3434 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3435 return;
3436 case VIEW_CONVERT_EXPR:
3437 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3438 lhs_p);
3439 return;
3440 /* We are missing handling for TARGET_MEM_REF here. */
3441 default:;
3442 }
3443 break;
3444 }
3445 case tcc_exceptional:
3446 {
3447 switch (TREE_CODE (t))
3448 {
3449 case SSA_NAME:
3450 {
3451 get_constraint_for_ssa_var (t, results, address_p);
3452 return;
3453 }
3454 case CONSTRUCTOR:
3455 {
3456 unsigned int i;
3457 tree val;
3458 vec<ce_s> tmp = vNULL;
3459 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3460 {
3461 struct constraint_expr *rhsp;
3462 unsigned j;
3463 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3464 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3465 results->safe_push (*rhsp);
3466 tmp.truncate (0);
3467 }
3468 tmp.release ();
3469 /* We do not know whether the constructor was complete,
3470 so technically we have to add &NOTHING or &ANYTHING
3471 like we do for an empty constructor as well. */
3472 return;
3473 }
3474 default:;
3475 }
3476 break;
3477 }
3478 case tcc_declaration:
3479 {
3480 get_constraint_for_ssa_var (t, results, address_p);
3481 return;
3482 }
3483 case tcc_constant:
3484 {
3485 /* We cannot refer to automatic variables through constants. */
3486 temp.type = ADDRESSOF;
3487 temp.var = nonlocal_id;
3488 temp.offset = 0;
3489 results->safe_push (temp);
3490 return;
3491 }
3492 default:;
3493 }
3494
3495 /* The default fallback is a constraint from anything. */
3496 temp.type = ADDRESSOF;
3497 temp.var = anything_id;
3498 temp.offset = 0;
3499 results->safe_push (temp);
3500 }
3501
3502 /* Given a gimple tree T, return the constraint expression vector for it. */
3503
3504 static void
3505 get_constraint_for (tree t, vec<ce_s> *results)
3506 {
3507 gcc_assert (results->length () == 0);
3508
3509 get_constraint_for_1 (t, results, false, true);
3510 }
3511
3512 /* Given a gimple tree T, return the constraint expression vector for it
3513 to be used as the rhs of a constraint. */
3514
3515 static void
3516 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3517 {
3518 gcc_assert (results->length () == 0);
3519
3520 get_constraint_for_1 (t, results, false, false);
3521 }
3522
3523
3524 /* Efficiently generates constraints from all entries in *RHSC to all
3525 entries in *LHSC. */
3526
3527 static void
3528 process_all_all_constraints (vec<ce_s> lhsc,
3529 vec<ce_s> rhsc)
3530 {
3531 struct constraint_expr *lhsp, *rhsp;
3532 unsigned i, j;
3533
3534 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3535 {
3536 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3537 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3538 process_constraint (new_constraint (*lhsp, *rhsp));
3539 }
3540 else
3541 {
3542 struct constraint_expr tmp;
3543 tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3544 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3545 process_constraint (new_constraint (tmp, *rhsp));
3546 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3547 process_constraint (new_constraint (*lhsp, tmp));
3548 }
3549 }
3550
3551 /* Handle aggregate copies by expanding into copies of the respective
3552 fields of the structures. */
3553
3554 static void
3555 do_structure_copy (tree lhsop, tree rhsop)
3556 {
3557 struct constraint_expr *lhsp, *rhsp;
3558 vec<ce_s> lhsc = vNULL;
3559 vec<ce_s> rhsc = vNULL;
3560 unsigned j;
3561
3562 get_constraint_for (lhsop, &lhsc);
3563 get_constraint_for_rhs (rhsop, &rhsc);
3564 lhsp = &lhsc[0];
3565 rhsp = &rhsc[0];
3566 if (lhsp->type == DEREF
3567 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3568 || rhsp->type == DEREF)
3569 {
3570 if (lhsp->type == DEREF)
3571 {
3572 gcc_assert (lhsc.length () == 1);
3573 lhsp->offset = UNKNOWN_OFFSET;
3574 }
3575 if (rhsp->type == DEREF)
3576 {
3577 gcc_assert (rhsc.length () == 1);
3578 rhsp->offset = UNKNOWN_OFFSET;
3579 }
3580 process_all_all_constraints (lhsc, rhsc);
3581 }
3582 else if (lhsp->type == SCALAR
3583 && (rhsp->type == SCALAR
3584 || rhsp->type == ADDRESSOF))
3585 {
3586 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3587 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3588 unsigned k = 0;
3589 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3590 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3591 for (j = 0; lhsc.iterate (j, &lhsp);)
3592 {
3593 varinfo_t lhsv, rhsv;
3594 rhsp = &rhsc[k];
3595 lhsv = get_varinfo (lhsp->var);
3596 rhsv = get_varinfo (rhsp->var);
3597 if (lhsv->may_have_pointers
3598 && (lhsv->is_full_var
3599 || rhsv->is_full_var
3600 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3601 rhsv->offset + lhsoffset, rhsv->size)))
3602 process_constraint (new_constraint (*lhsp, *rhsp));
3603 if (!rhsv->is_full_var
3604 && (lhsv->is_full_var
3605 || (lhsv->offset + rhsoffset + lhsv->size
3606 > rhsv->offset + lhsoffset + rhsv->size)))
3607 {
3608 ++k;
3609 if (k >= rhsc.length ())
3610 break;
3611 }
3612 else
3613 ++j;
3614 }
3615 }
3616 else
3617 gcc_unreachable ();
3618
3619 lhsc.release ();
3620 rhsc.release ();
3621 }
3622
3623 /* Create constraints ID = { rhsc }. */
3624
3625 static void
3626 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3627 {
3628 struct constraint_expr *c;
3629 struct constraint_expr includes;
3630 unsigned int j;
3631
3632 includes.var = id;
3633 includes.offset = 0;
3634 includes.type = SCALAR;
3635
3636 FOR_EACH_VEC_ELT (rhsc, j, c)
3637 process_constraint (new_constraint (includes, *c));
3638 }
3639
3640 /* Create a constraint ID = OP. */
3641
3642 static void
3643 make_constraint_to (unsigned id, tree op)
3644 {
3645 vec<ce_s> rhsc = vNULL;
3646 get_constraint_for_rhs (op, &rhsc);
3647 make_constraints_to (id, rhsc);
3648 rhsc.release ();
3649 }
3650
3651 /* Create a constraint ID = &FROM. */
3652
3653 static void
3654 make_constraint_from (varinfo_t vi, int from)
3655 {
3656 struct constraint_expr lhs, rhs;
3657
3658 lhs.var = vi->id;
3659 lhs.offset = 0;
3660 lhs.type = SCALAR;
3661
3662 rhs.var = from;
3663 rhs.offset = 0;
3664 rhs.type = ADDRESSOF;
3665 process_constraint (new_constraint (lhs, rhs));
3666 }
3667
3668 /* Create a constraint ID = FROM. */
3669
3670 static void
3671 make_copy_constraint (varinfo_t vi, int from)
3672 {
3673 struct constraint_expr lhs, rhs;
3674
3675 lhs.var = vi->id;
3676 lhs.offset = 0;
3677 lhs.type = SCALAR;
3678
3679 rhs.var = from;
3680 rhs.offset = 0;
3681 rhs.type = SCALAR;
3682 process_constraint (new_constraint (lhs, rhs));
3683 }
3684
3685 /* Make constraints necessary to make OP escape. */
3686
3687 static void
3688 make_escape_constraint (tree op)
3689 {
3690 make_constraint_to (escaped_id, op);
3691 }
3692
3693 /* Add constraints to that the solution of VI is transitively closed. */
3694
3695 static void
3696 make_transitive_closure_constraints (varinfo_t vi)
3697 {
3698 struct constraint_expr lhs, rhs;
3699
3700 /* VAR = *VAR; */
3701 lhs.type = SCALAR;
3702 lhs.var = vi->id;
3703 lhs.offset = 0;
3704 rhs.type = DEREF;
3705 rhs.var = vi->id;
3706 rhs.offset = 0;
3707 process_constraint (new_constraint (lhs, rhs));
3708
3709 /* VAR = VAR + UNKNOWN; */
3710 lhs.type = SCALAR;
3711 lhs.var = vi->id;
3712 lhs.offset = 0;
3713 rhs.type = SCALAR;
3714 rhs.var = vi->id;
3715 rhs.offset = UNKNOWN_OFFSET;
3716 process_constraint (new_constraint (lhs, rhs));
3717 }
3718
3719 /* Temporary storage for fake var decls. */
3720 struct obstack fake_var_decl_obstack;
3721
3722 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3723
3724 static tree
3725 build_fake_var_decl (tree type)
3726 {
3727 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3728 memset (decl, 0, sizeof (struct tree_var_decl));
3729 TREE_SET_CODE (decl, VAR_DECL);
3730 TREE_TYPE (decl) = type;
3731 DECL_UID (decl) = allocate_decl_uid ();
3732 SET_DECL_PT_UID (decl, -1);
3733 layout_decl (decl, 0);
3734 return decl;
3735 }
3736
3737 /* Create a new artificial heap variable with NAME.
3738 Return the created variable. */
3739
3740 static varinfo_t
3741 make_heapvar (const char *name)
3742 {
3743 varinfo_t vi;
3744 tree heapvar;
3745
3746 heapvar = build_fake_var_decl (ptr_type_node);
3747 DECL_EXTERNAL (heapvar) = 1;
3748
3749 vi = new_var_info (heapvar, name);
3750 vi->is_artificial_var = true;
3751 vi->is_heap_var = true;
3752 vi->is_unknown_size_var = true;
3753 vi->offset = 0;
3754 vi->fullsize = ~0;
3755 vi->size = ~0;
3756 vi->is_full_var = true;
3757 insert_vi_for_tree (heapvar, vi);
3758
3759 return vi;
3760 }
3761
3762 /* Create a new artificial heap variable with NAME and make a
3763 constraint from it to LHS. Set flags according to a tag used
3764 for tracking restrict pointers. */
3765
3766 static varinfo_t
3767 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3768 {
3769 varinfo_t vi = make_heapvar (name);
3770 vi->is_global_var = 1;
3771 vi->may_have_pointers = 1;
3772 make_constraint_from (lhs, vi->id);
3773 return vi;
3774 }
3775
3776 /* Create a new artificial heap variable with NAME and make a
3777 constraint from it to LHS. Set flags according to a tag used
3778 for tracking restrict pointers and make the artificial heap
3779 point to global memory. */
3780
3781 static varinfo_t
3782 make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
3783 {
3784 varinfo_t vi = make_constraint_from_restrict (lhs, name);
3785 make_copy_constraint (vi, nonlocal_id);
3786 return vi;
3787 }
3788
3789 /* In IPA mode there are varinfos for different aspects of reach
3790 function designator. One for the points-to set of the return
3791 value, one for the variables that are clobbered by the function,
3792 one for its uses and one for each parameter (including a single
3793 glob for remaining variadic arguments). */
3794
3795 enum { fi_clobbers = 1, fi_uses = 2,
3796 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3797
3798 /* Get a constraint for the requested part of a function designator FI
3799 when operating in IPA mode. */
3800
3801 static struct constraint_expr
3802 get_function_part_constraint (varinfo_t fi, unsigned part)
3803 {
3804 struct constraint_expr c;
3805
3806 gcc_assert (in_ipa_mode);
3807
3808 if (fi->id == anything_id)
3809 {
3810 /* ??? We probably should have a ANYFN special variable. */
3811 c.var = anything_id;
3812 c.offset = 0;
3813 c.type = SCALAR;
3814 }
3815 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3816 {
3817 varinfo_t ai = first_vi_for_offset (fi, part);
3818 if (ai)
3819 c.var = ai->id;
3820 else
3821 c.var = anything_id;
3822 c.offset = 0;
3823 c.type = SCALAR;
3824 }
3825 else
3826 {
3827 c.var = fi->id;
3828 c.offset = part;
3829 c.type = DEREF;
3830 }
3831
3832 return c;
3833 }
3834
3835 /* For non-IPA mode, generate constraints necessary for a call on the
3836 RHS. */
3837
3838 static void
3839 handle_rhs_call (gimple stmt, vec<ce_s> *results)
3840 {
3841 struct constraint_expr rhsc;
3842 unsigned i;
3843 bool returns_uses = false;
3844
3845 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3846 {
3847 tree arg = gimple_call_arg (stmt, i);
3848 int flags = gimple_call_arg_flags (stmt, i);
3849
3850 /* If the argument is not used we can ignore it. */
3851 if (flags & EAF_UNUSED)
3852 continue;
3853
3854 /* As we compute ESCAPED context-insensitive we do not gain
3855 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3856 set. The argument would still get clobbered through the
3857 escape solution. */
3858 if ((flags & EAF_NOCLOBBER)
3859 && (flags & EAF_NOESCAPE))
3860 {
3861 varinfo_t uses = get_call_use_vi (stmt);
3862 if (!(flags & EAF_DIRECT))
3863 {
3864 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3865 make_constraint_to (tem->id, arg);
3866 make_transitive_closure_constraints (tem);
3867 make_copy_constraint (uses, tem->id);
3868 }
3869 else
3870 make_constraint_to (uses->id, arg);
3871 returns_uses = true;
3872 }
3873 else if (flags & EAF_NOESCAPE)
3874 {
3875 struct constraint_expr lhs, rhs;
3876 varinfo_t uses = get_call_use_vi (stmt);
3877 varinfo_t clobbers = get_call_clobber_vi (stmt);
3878 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3879 make_constraint_to (tem->id, arg);
3880 if (!(flags & EAF_DIRECT))
3881 make_transitive_closure_constraints (tem);
3882 make_copy_constraint (uses, tem->id);
3883 make_copy_constraint (clobbers, tem->id);
3884 /* Add *tem = nonlocal, do not add *tem = callused as
3885 EAF_NOESCAPE parameters do not escape to other parameters
3886 and all other uses appear in NONLOCAL as well. */
3887 lhs.type = DEREF;
3888 lhs.var = tem->id;
3889 lhs.offset = 0;
3890 rhs.type = SCALAR;
3891 rhs.var = nonlocal_id;
3892 rhs.offset = 0;
3893 process_constraint (new_constraint (lhs, rhs));
3894 returns_uses = true;
3895 }
3896 else
3897 make_escape_constraint (arg);
3898 }
3899
3900 /* If we added to the calls uses solution make sure we account for
3901 pointers to it to be returned. */
3902 if (returns_uses)
3903 {
3904 rhsc.var = get_call_use_vi (stmt)->id;
3905 rhsc.offset = 0;
3906 rhsc.type = SCALAR;
3907 results->safe_push (rhsc);
3908 }
3909
3910 /* The static chain escapes as well. */
3911 if (gimple_call_chain (stmt))
3912 make_escape_constraint (gimple_call_chain (stmt));
3913
3914 /* And if we applied NRV the address of the return slot escapes as well. */
3915 if (gimple_call_return_slot_opt_p (stmt)
3916 && gimple_call_lhs (stmt) != NULL_TREE
3917 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3918 {
3919 vec<ce_s> tmpc = vNULL;
3920 struct constraint_expr lhsc, *c;
3921 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3922 lhsc.var = escaped_id;
3923 lhsc.offset = 0;
3924 lhsc.type = SCALAR;
3925 FOR_EACH_VEC_ELT (tmpc, i, c)
3926 process_constraint (new_constraint (lhsc, *c));
3927 tmpc.release ();
3928 }
3929
3930 /* Regular functions return nonlocal memory. */
3931 rhsc.var = nonlocal_id;
3932 rhsc.offset = 0;
3933 rhsc.type = SCALAR;
3934 results->safe_push (rhsc);
3935 }
3936
3937 /* For non-IPA mode, generate constraints necessary for a call
3938 that returns a pointer and assigns it to LHS. This simply makes
3939 the LHS point to global and escaped variables. */
3940
3941 static void
3942 handle_lhs_call (gimple stmt, tree lhs, int flags, vec<ce_s> rhsc,
3943 tree fndecl)
3944 {
3945 vec<ce_s> lhsc = vNULL;
3946
3947 get_constraint_for (lhs, &lhsc);
3948 /* If the store is to a global decl make sure to
3949 add proper escape constraints. */
3950 lhs = get_base_address (lhs);
3951 if (lhs
3952 && DECL_P (lhs)
3953 && is_global_var (lhs))
3954 {
3955 struct constraint_expr tmpc;
3956 tmpc.var = escaped_id;
3957 tmpc.offset = 0;
3958 tmpc.type = SCALAR;
3959 lhsc.safe_push (tmpc);
3960 }
3961
3962 /* If the call returns an argument unmodified override the rhs
3963 constraints. */
3964 flags = gimple_call_return_flags (stmt);
3965 if (flags & ERF_RETURNS_ARG
3966 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
3967 {
3968 tree arg;
3969 rhsc.create (0);
3970 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
3971 get_constraint_for (arg, &rhsc);
3972 process_all_all_constraints (lhsc, rhsc);
3973 rhsc.release ();
3974 }
3975 else if (flags & ERF_NOALIAS)
3976 {
3977 varinfo_t vi;
3978 struct constraint_expr tmpc;
3979 rhsc.create (0);
3980 vi = make_heapvar ("HEAP");
3981 /* We delay marking allocated storage global until we know if
3982 it escapes. */
3983 DECL_EXTERNAL (vi->decl) = 0;
3984 vi->is_global_var = 0;
3985 /* If this is not a real malloc call assume the memory was
3986 initialized and thus may point to global memory. All
3987 builtin functions with the malloc attribute behave in a sane way. */
3988 if (!fndecl
3989 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3990 make_constraint_from (vi, nonlocal_id);
3991 tmpc.var = vi->id;
3992 tmpc.offset = 0;
3993 tmpc.type = ADDRESSOF;
3994 rhsc.safe_push (tmpc);
3995 process_all_all_constraints (lhsc, rhsc);
3996 rhsc.release ();
3997 }
3998 else
3999 process_all_all_constraints (lhsc, rhsc);
4000
4001 lhsc.release ();
4002 }
4003
4004 /* For non-IPA mode, generate constraints necessary for a call of a
4005 const function that returns a pointer in the statement STMT. */
4006
4007 static void
4008 handle_const_call (gimple stmt, vec<ce_s> *results)
4009 {
4010 struct constraint_expr rhsc;
4011 unsigned int k;
4012
4013 /* Treat nested const functions the same as pure functions as far
4014 as the static chain is concerned. */
4015 if (gimple_call_chain (stmt))
4016 {
4017 varinfo_t uses = get_call_use_vi (stmt);
4018 make_transitive_closure_constraints (uses);
4019 make_constraint_to (uses->id, gimple_call_chain (stmt));
4020 rhsc.var = uses->id;
4021 rhsc.offset = 0;
4022 rhsc.type = SCALAR;
4023 results->safe_push (rhsc);
4024 }
4025
4026 /* May return arguments. */
4027 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4028 {
4029 tree arg = gimple_call_arg (stmt, k);
4030 vec<ce_s> argc = vNULL;
4031 unsigned i;
4032 struct constraint_expr *argp;
4033 get_constraint_for_rhs (arg, &argc);
4034 FOR_EACH_VEC_ELT (argc, i, argp)
4035 results->safe_push (*argp);
4036 argc.release ();
4037 }
4038
4039 /* May return addresses of globals. */
4040 rhsc.var = nonlocal_id;
4041 rhsc.offset = 0;
4042 rhsc.type = ADDRESSOF;
4043 results->safe_push (rhsc);
4044 }
4045
4046 /* For non-IPA mode, generate constraints necessary for a call to a
4047 pure function in statement STMT. */
4048
4049 static void
4050 handle_pure_call (gimple stmt, vec<ce_s> *results)
4051 {
4052 struct constraint_expr rhsc;
4053 unsigned i;
4054 varinfo_t uses = NULL;
4055
4056 /* Memory reached from pointer arguments is call-used. */
4057 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4058 {
4059 tree arg = gimple_call_arg (stmt, i);
4060 if (!uses)
4061 {
4062 uses = get_call_use_vi (stmt);
4063 make_transitive_closure_constraints (uses);
4064 }
4065 make_constraint_to (uses->id, arg);
4066 }
4067
4068 /* The static chain is used as well. */
4069 if (gimple_call_chain (stmt))
4070 {
4071 if (!uses)
4072 {
4073 uses = get_call_use_vi (stmt);
4074 make_transitive_closure_constraints (uses);
4075 }
4076 make_constraint_to (uses->id, gimple_call_chain (stmt));
4077 }
4078
4079 /* Pure functions may return call-used and nonlocal memory. */
4080 if (uses)
4081 {
4082 rhsc.var = uses->id;
4083 rhsc.offset = 0;
4084 rhsc.type = SCALAR;
4085 results->safe_push (rhsc);
4086 }
4087 rhsc.var = nonlocal_id;
4088 rhsc.offset = 0;
4089 rhsc.type = SCALAR;
4090 results->safe_push (rhsc);
4091 }
4092
4093
4094 /* Return the varinfo for the callee of CALL. */
4095
4096 static varinfo_t
4097 get_fi_for_callee (gimple call)
4098 {
4099 tree decl, fn = gimple_call_fn (call);
4100
4101 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4102 fn = OBJ_TYPE_REF_EXPR (fn);
4103
4104 /* If we can directly resolve the function being called, do so.
4105 Otherwise, it must be some sort of indirect expression that
4106 we should still be able to handle. */
4107 decl = gimple_call_addr_fndecl (fn);
4108 if (decl)
4109 return get_vi_for_tree (decl);
4110
4111 /* If the function is anything other than a SSA name pointer we have no
4112 clue and should be getting ANYFN (well, ANYTHING for now). */
4113 if (!fn || TREE_CODE (fn) != SSA_NAME)
4114 return get_varinfo (anything_id);
4115
4116 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4117 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4118 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4119 fn = SSA_NAME_VAR (fn);
4120
4121 return get_vi_for_tree (fn);
4122 }
4123
4124 /* Create constraints for the builtin call T. Return true if the call
4125 was handled, otherwise false. */
4126
4127 static bool
4128 find_func_aliases_for_builtin_call (gimple t)
4129 {
4130 tree fndecl = gimple_call_fndecl (t);
4131 vec<ce_s> lhsc = vNULL;
4132 vec<ce_s> rhsc = vNULL;
4133 varinfo_t fi;
4134
4135 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4136 /* ??? All builtins that are handled here need to be handled
4137 in the alias-oracle query functions explicitly! */
4138 switch (DECL_FUNCTION_CODE (fndecl))
4139 {
4140 /* All the following functions return a pointer to the same object
4141 as their first argument points to. The functions do not add
4142 to the ESCAPED solution. The functions make the first argument
4143 pointed to memory point to what the second argument pointed to
4144 memory points to. */
4145 case BUILT_IN_STRCPY:
4146 case BUILT_IN_STRNCPY:
4147 case BUILT_IN_BCOPY:
4148 case BUILT_IN_MEMCPY:
4149 case BUILT_IN_MEMMOVE:
4150 case BUILT_IN_MEMPCPY:
4151 case BUILT_IN_STPCPY:
4152 case BUILT_IN_STPNCPY:
4153 case BUILT_IN_STRCAT:
4154 case BUILT_IN_STRNCAT:
4155 case BUILT_IN_STRCPY_CHK:
4156 case BUILT_IN_STRNCPY_CHK:
4157 case BUILT_IN_MEMCPY_CHK:
4158 case BUILT_IN_MEMMOVE_CHK:
4159 case BUILT_IN_MEMPCPY_CHK:
4160 case BUILT_IN_STPCPY_CHK:
4161 case BUILT_IN_STPNCPY_CHK:
4162 case BUILT_IN_STRCAT_CHK:
4163 case BUILT_IN_STRNCAT_CHK:
4164 case BUILT_IN_TM_MEMCPY:
4165 case BUILT_IN_TM_MEMMOVE:
4166 {
4167 tree res = gimple_call_lhs (t);
4168 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4169 == BUILT_IN_BCOPY ? 1 : 0));
4170 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4171 == BUILT_IN_BCOPY ? 0 : 1));
4172 if (res != NULL_TREE)
4173 {
4174 get_constraint_for (res, &lhsc);
4175 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4176 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4177 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4178 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4179 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4180 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4181 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4182 else
4183 get_constraint_for (dest, &rhsc);
4184 process_all_all_constraints (lhsc, rhsc);
4185 lhsc.release ();
4186 rhsc.release ();
4187 }
4188 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4189 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4190 do_deref (&lhsc);
4191 do_deref (&rhsc);
4192 process_all_all_constraints (lhsc, rhsc);
4193 lhsc.release ();
4194 rhsc.release ();
4195 return true;
4196 }
4197 case BUILT_IN_MEMSET:
4198 case BUILT_IN_MEMSET_CHK:
4199 case BUILT_IN_TM_MEMSET:
4200 {
4201 tree res = gimple_call_lhs (t);
4202 tree dest = gimple_call_arg (t, 0);
4203 unsigned i;
4204 ce_s *lhsp;
4205 struct constraint_expr ac;
4206 if (res != NULL_TREE)
4207 {
4208 get_constraint_for (res, &lhsc);
4209 get_constraint_for (dest, &rhsc);
4210 process_all_all_constraints (lhsc, rhsc);
4211 lhsc.release ();
4212 rhsc.release ();
4213 }
4214 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4215 do_deref (&lhsc);
4216 if (flag_delete_null_pointer_checks
4217 && integer_zerop (gimple_call_arg (t, 1)))
4218 {
4219 ac.type = ADDRESSOF;
4220 ac.var = nothing_id;
4221 }
4222 else
4223 {
4224 ac.type = SCALAR;
4225 ac.var = integer_id;
4226 }
4227 ac.offset = 0;
4228 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4229 process_constraint (new_constraint (*lhsp, ac));
4230 lhsc.release ();
4231 return true;
4232 }
4233 case BUILT_IN_ASSUME_ALIGNED:
4234 {
4235 tree res = gimple_call_lhs (t);
4236 tree dest = gimple_call_arg (t, 0);
4237 if (res != NULL_TREE)
4238 {
4239 get_constraint_for (res, &lhsc);
4240 get_constraint_for (dest, &rhsc);
4241 process_all_all_constraints (lhsc, rhsc);
4242 lhsc.release ();
4243 rhsc.release ();
4244 }
4245 return true;
4246 }
4247 /* All the following functions do not return pointers, do not
4248 modify the points-to sets of memory reachable from their
4249 arguments and do not add to the ESCAPED solution. */
4250 case BUILT_IN_SINCOS:
4251 case BUILT_IN_SINCOSF:
4252 case BUILT_IN_SINCOSL:
4253 case BUILT_IN_FREXP:
4254 case BUILT_IN_FREXPF:
4255 case BUILT_IN_FREXPL:
4256 case BUILT_IN_GAMMA_R:
4257 case BUILT_IN_GAMMAF_R:
4258 case BUILT_IN_GAMMAL_R:
4259 case BUILT_IN_LGAMMA_R:
4260 case BUILT_IN_LGAMMAF_R:
4261 case BUILT_IN_LGAMMAL_R:
4262 case BUILT_IN_MODF:
4263 case BUILT_IN_MODFF:
4264 case BUILT_IN_MODFL:
4265 case BUILT_IN_REMQUO:
4266 case BUILT_IN_REMQUOF:
4267 case BUILT_IN_REMQUOL:
4268 case BUILT_IN_FREE:
4269 return true;
4270 case BUILT_IN_STRDUP:
4271 case BUILT_IN_STRNDUP:
4272 if (gimple_call_lhs (t))
4273 {
4274 handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
4275 vNULL, fndecl);
4276 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4277 NULL_TREE, &lhsc);
4278 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4279 NULL_TREE, &rhsc);
4280 do_deref (&lhsc);
4281 do_deref (&rhsc);
4282 process_all_all_constraints (lhsc, rhsc);
4283 lhsc.release ();
4284 rhsc.release ();
4285 return true;
4286 }
4287 break;
4288 /* String / character search functions return a pointer into the
4289 source string or NULL. */
4290 case BUILT_IN_INDEX:
4291 case BUILT_IN_STRCHR:
4292 case BUILT_IN_STRRCHR:
4293 case BUILT_IN_MEMCHR:
4294 case BUILT_IN_STRSTR:
4295 case BUILT_IN_STRPBRK:
4296 if (gimple_call_lhs (t))
4297 {
4298 tree src = gimple_call_arg (t, 0);
4299 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4300 constraint_expr nul;
4301 nul.var = nothing_id;
4302 nul.offset = 0;
4303 nul.type = ADDRESSOF;
4304 rhsc.safe_push (nul);
4305 get_constraint_for (gimple_call_lhs (t), &lhsc);
4306 process_all_all_constraints (lhsc, rhsc);
4307 lhsc.release ();
4308 rhsc.release ();
4309 }
4310 return true;
4311 /* Trampolines are special - they set up passing the static
4312 frame. */
4313 case BUILT_IN_INIT_TRAMPOLINE:
4314 {
4315 tree tramp = gimple_call_arg (t, 0);
4316 tree nfunc = gimple_call_arg (t, 1);
4317 tree frame = gimple_call_arg (t, 2);
4318 unsigned i;
4319 struct constraint_expr lhs, *rhsp;
4320 if (in_ipa_mode)
4321 {
4322 varinfo_t nfi = NULL;
4323 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4324 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4325 if (nfi)
4326 {
4327 lhs = get_function_part_constraint (nfi, fi_static_chain);
4328 get_constraint_for (frame, &rhsc);
4329 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4330 process_constraint (new_constraint (lhs, *rhsp));
4331 rhsc.release ();
4332
4333 /* Make the frame point to the function for
4334 the trampoline adjustment call. */
4335 get_constraint_for (tramp, &lhsc);
4336 do_deref (&lhsc);
4337 get_constraint_for (nfunc, &rhsc);
4338 process_all_all_constraints (lhsc, rhsc);
4339 rhsc.release ();
4340 lhsc.release ();
4341
4342 return true;
4343 }
4344 }
4345 /* Else fallthru to generic handling which will let
4346 the frame escape. */
4347 break;
4348 }
4349 case BUILT_IN_ADJUST_TRAMPOLINE:
4350 {
4351 tree tramp = gimple_call_arg (t, 0);
4352 tree res = gimple_call_lhs (t);
4353 if (in_ipa_mode && res)
4354 {
4355 get_constraint_for (res, &lhsc);
4356 get_constraint_for (tramp, &rhsc);
4357 do_deref (&rhsc);
4358 process_all_all_constraints (lhsc, rhsc);
4359 rhsc.release ();
4360 lhsc.release ();
4361 }
4362 return true;
4363 }
4364 CASE_BUILT_IN_TM_STORE (1):
4365 CASE_BUILT_IN_TM_STORE (2):
4366 CASE_BUILT_IN_TM_STORE (4):
4367 CASE_BUILT_IN_TM_STORE (8):
4368 CASE_BUILT_IN_TM_STORE (FLOAT):
4369 CASE_BUILT_IN_TM_STORE (DOUBLE):
4370 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4371 CASE_BUILT_IN_TM_STORE (M64):
4372 CASE_BUILT_IN_TM_STORE (M128):
4373 CASE_BUILT_IN_TM_STORE (M256):
4374 {
4375 tree addr = gimple_call_arg (t, 0);
4376 tree src = gimple_call_arg (t, 1);
4377
4378 get_constraint_for (addr, &lhsc);
4379 do_deref (&lhsc);
4380 get_constraint_for (src, &rhsc);
4381 process_all_all_constraints (lhsc, rhsc);
4382 lhsc.release ();
4383 rhsc.release ();
4384 return true;
4385 }
4386 CASE_BUILT_IN_TM_LOAD (1):
4387 CASE_BUILT_IN_TM_LOAD (2):
4388 CASE_BUILT_IN_TM_LOAD (4):
4389 CASE_BUILT_IN_TM_LOAD (8):
4390 CASE_BUILT_IN_TM_LOAD (FLOAT):
4391 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4392 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4393 CASE_BUILT_IN_TM_LOAD (M64):
4394 CASE_BUILT_IN_TM_LOAD (M128):
4395 CASE_BUILT_IN_TM_LOAD (M256):
4396 {
4397 tree dest = gimple_call_lhs (t);
4398 tree addr = gimple_call_arg (t, 0);
4399
4400 get_constraint_for (dest, &lhsc);
4401 get_constraint_for (addr, &rhsc);
4402 do_deref (&rhsc);
4403 process_all_all_constraints (lhsc, rhsc);
4404 lhsc.release ();
4405 rhsc.release ();
4406 return true;
4407 }
4408 /* Variadic argument handling needs to be handled in IPA
4409 mode as well. */
4410 case BUILT_IN_VA_START:
4411 {
4412 tree valist = gimple_call_arg (t, 0);
4413 struct constraint_expr rhs, *lhsp;
4414 unsigned i;
4415 get_constraint_for (valist, &lhsc);
4416 do_deref (&lhsc);
4417 /* The va_list gets access to pointers in variadic
4418 arguments. Which we know in the case of IPA analysis
4419 and otherwise are just all nonlocal variables. */
4420 if (in_ipa_mode)
4421 {
4422 fi = lookup_vi_for_tree (cfun->decl);
4423 rhs = get_function_part_constraint (fi, ~0);
4424 rhs.type = ADDRESSOF;
4425 }
4426 else
4427 {
4428 rhs.var = nonlocal_id;
4429 rhs.type = ADDRESSOF;
4430 rhs.offset = 0;
4431 }
4432 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4433 process_constraint (new_constraint (*lhsp, rhs));
4434 lhsc.release ();
4435 /* va_list is clobbered. */
4436 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4437 return true;
4438 }
4439 /* va_end doesn't have any effect that matters. */
4440 case BUILT_IN_VA_END:
4441 return true;
4442 /* Alternate return. Simply give up for now. */
4443 case BUILT_IN_RETURN:
4444 {
4445 fi = NULL;
4446 if (!in_ipa_mode
4447 || !(fi = get_vi_for_tree (cfun->decl)))
4448 make_constraint_from (get_varinfo (escaped_id), anything_id);
4449 else if (in_ipa_mode
4450 && fi != NULL)
4451 {
4452 struct constraint_expr lhs, rhs;
4453 lhs = get_function_part_constraint (fi, fi_result);
4454 rhs.var = anything_id;
4455 rhs.offset = 0;
4456 rhs.type = SCALAR;
4457 process_constraint (new_constraint (lhs, rhs));
4458 }
4459 return true;
4460 }
4461 /* printf-style functions may have hooks to set pointers to
4462 point to somewhere into the generated string. Leave them
4463 for a later exercise... */
4464 default:
4465 /* Fallthru to general call handling. */;
4466 }
4467
4468 return false;
4469 }
4470
4471 /* Create constraints for the call T. */
4472
4473 static void
4474 find_func_aliases_for_call (gimple t)
4475 {
4476 tree fndecl = gimple_call_fndecl (t);
4477 vec<ce_s> lhsc = vNULL;
4478 vec<ce_s> rhsc = vNULL;
4479 varinfo_t fi;
4480
4481 if (fndecl != NULL_TREE
4482 && DECL_BUILT_IN (fndecl)
4483 && find_func_aliases_for_builtin_call (t))
4484 return;
4485
4486 fi = get_fi_for_callee (t);
4487 if (!in_ipa_mode
4488 || (fndecl && !fi->is_fn_info))
4489 {
4490 vec<ce_s> rhsc = vNULL;
4491 int flags = gimple_call_flags (t);
4492
4493 /* Const functions can return their arguments and addresses
4494 of global memory but not of escaped memory. */
4495 if (flags & (ECF_CONST|ECF_NOVOPS))
4496 {
4497 if (gimple_call_lhs (t))
4498 handle_const_call (t, &rhsc);
4499 }
4500 /* Pure functions can return addresses in and of memory
4501 reachable from their arguments, but they are not an escape
4502 point for reachable memory of their arguments. */
4503 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4504 handle_pure_call (t, &rhsc);
4505 else
4506 handle_rhs_call (t, &rhsc);
4507 if (gimple_call_lhs (t))
4508 handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
4509 rhsc.release ();
4510 }
4511 else
4512 {
4513 tree lhsop;
4514 unsigned j;
4515
4516 /* Assign all the passed arguments to the appropriate incoming
4517 parameters of the function. */
4518 for (j = 0; j < gimple_call_num_args (t); j++)
4519 {
4520 struct constraint_expr lhs ;
4521 struct constraint_expr *rhsp;
4522 tree arg = gimple_call_arg (t, j);
4523
4524 get_constraint_for_rhs (arg, &rhsc);
4525 lhs = get_function_part_constraint (fi, fi_parm_base + j);
4526 while (rhsc.length () != 0)
4527 {
4528 rhsp = &rhsc.last ();
4529 process_constraint (new_constraint (lhs, *rhsp));
4530 rhsc.pop ();
4531 }
4532 }
4533
4534 /* If we are returning a value, assign it to the result. */
4535 lhsop = gimple_call_lhs (t);
4536 if (lhsop)
4537 {
4538 struct constraint_expr rhs;
4539 struct constraint_expr *lhsp;
4540
4541 get_constraint_for (lhsop, &lhsc);
4542 rhs = get_function_part_constraint (fi, fi_result);
4543 if (fndecl
4544 && DECL_RESULT (fndecl)
4545 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4546 {
4547 vec<ce_s> tem = vNULL;
4548 tem.safe_push (rhs);
4549 do_deref (&tem);
4550 rhs = tem[0];
4551 tem.release ();
4552 }
4553 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4554 process_constraint (new_constraint (*lhsp, rhs));
4555 }
4556
4557 /* If we pass the result decl by reference, honor that. */
4558 if (lhsop
4559 && fndecl
4560 && DECL_RESULT (fndecl)
4561 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4562 {
4563 struct constraint_expr lhs;
4564 struct constraint_expr *rhsp;
4565
4566 get_constraint_for_address_of (lhsop, &rhsc);
4567 lhs = get_function_part_constraint (fi, fi_result);
4568 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4569 process_constraint (new_constraint (lhs, *rhsp));
4570 rhsc.release ();
4571 }
4572
4573 /* If we use a static chain, pass it along. */
4574 if (gimple_call_chain (t))
4575 {
4576 struct constraint_expr lhs;
4577 struct constraint_expr *rhsp;
4578
4579 get_constraint_for (gimple_call_chain (t), &rhsc);
4580 lhs = get_function_part_constraint (fi, fi_static_chain);
4581 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4582 process_constraint (new_constraint (lhs, *rhsp));
4583 }
4584 }
4585 }
4586
4587 /* Walk statement T setting up aliasing constraints according to the
4588 references found in T. This function is the main part of the
4589 constraint builder. AI points to auxiliary alias information used
4590 when building alias sets and computing alias grouping heuristics. */
4591
4592 static void
4593 find_func_aliases (gimple origt)
4594 {
4595 gimple t = origt;
4596 vec<ce_s> lhsc = vNULL;
4597 vec<ce_s> rhsc = vNULL;
4598 struct constraint_expr *c;
4599 varinfo_t fi;
4600
4601 /* Now build constraints expressions. */
4602 if (gimple_code (t) == GIMPLE_PHI)
4603 {
4604 size_t i;
4605 unsigned int j;
4606
4607 /* For a phi node, assign all the arguments to
4608 the result. */
4609 get_constraint_for (gimple_phi_result (t), &lhsc);
4610 for (i = 0; i < gimple_phi_num_args (t); i++)
4611 {
4612 tree strippedrhs = PHI_ARG_DEF (t, i);
4613
4614 STRIP_NOPS (strippedrhs);
4615 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4616
4617 FOR_EACH_VEC_ELT (lhsc, j, c)
4618 {
4619 struct constraint_expr *c2;
4620 while (rhsc.length () > 0)
4621 {
4622 c2 = &rhsc.last ();
4623 process_constraint (new_constraint (*c, *c2));
4624 rhsc.pop ();
4625 }
4626 }
4627 }
4628 }
4629 /* In IPA mode, we need to generate constraints to pass call
4630 arguments through their calls. There are two cases,
4631 either a GIMPLE_CALL returning a value, or just a plain
4632 GIMPLE_CALL when we are not.
4633
4634 In non-ipa mode, we need to generate constraints for each
4635 pointer passed by address. */
4636 else if (is_gimple_call (t))
4637 find_func_aliases_for_call (t);
4638
4639 /* Otherwise, just a regular assignment statement. Only care about
4640 operations with pointer result, others are dealt with as escape
4641 points if they have pointer operands. */
4642 else if (is_gimple_assign (t))
4643 {
4644 /* Otherwise, just a regular assignment statement. */
4645 tree lhsop = gimple_assign_lhs (t);
4646 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4647
4648 if (rhsop && TREE_CLOBBER_P (rhsop))
4649 /* Ignore clobbers, they don't actually store anything into
4650 the LHS. */
4651 ;
4652 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4653 do_structure_copy (lhsop, rhsop);
4654 else
4655 {
4656 enum tree_code code = gimple_assign_rhs_code (t);
4657
4658 get_constraint_for (lhsop, &lhsc);
4659
4660 if (FLOAT_TYPE_P (TREE_TYPE (lhsop)))
4661 /* If the operation produces a floating point result then
4662 assume the value is not produced to transfer a pointer. */
4663 ;
4664 else if (code == POINTER_PLUS_EXPR)
4665 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4666 gimple_assign_rhs2 (t), &rhsc);
4667 else if (code == BIT_AND_EXPR
4668 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4669 {
4670 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4671 the pointer. Handle it by offsetting it by UNKNOWN. */
4672 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4673 NULL_TREE, &rhsc);
4674 }
4675 else if ((CONVERT_EXPR_CODE_P (code)
4676 && !(POINTER_TYPE_P (gimple_expr_type (t))
4677 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4678 || gimple_assign_single_p (t))
4679 get_constraint_for_rhs (rhsop, &rhsc);
4680 else if (code == COND_EXPR)
4681 {
4682 /* The result is a merge of both COND_EXPR arms. */
4683 vec<ce_s> tmp = vNULL;
4684 struct constraint_expr *rhsp;
4685 unsigned i;
4686 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4687 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4688 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4689 rhsc.safe_push (*rhsp);
4690 tmp.release ();
4691 }
4692 else if (truth_value_p (code))
4693 /* Truth value results are not pointer (parts). Or at least
4694 very very unreasonable obfuscation of a part. */
4695 ;
4696 else
4697 {
4698 /* All other operations are merges. */
4699 vec<ce_s> tmp = vNULL;
4700 struct constraint_expr *rhsp;
4701 unsigned i, j;
4702 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4703 for (i = 2; i < gimple_num_ops (t); ++i)
4704 {
4705 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4706 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4707 rhsc.safe_push (*rhsp);
4708 tmp.truncate (0);
4709 }
4710 tmp.release ();
4711 }
4712 process_all_all_constraints (lhsc, rhsc);
4713 }
4714 /* If there is a store to a global variable the rhs escapes. */
4715 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4716 && DECL_P (lhsop)
4717 && is_global_var (lhsop)
4718 && (!in_ipa_mode
4719 || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4720 make_escape_constraint (rhsop);
4721 }
4722 /* Handle escapes through return. */
4723 else if (gimple_code (t) == GIMPLE_RETURN
4724 && gimple_return_retval (t) != NULL_TREE)
4725 {
4726 fi = NULL;
4727 if (!in_ipa_mode
4728 || !(fi = get_vi_for_tree (cfun->decl)))
4729 make_escape_constraint (gimple_return_retval (t));
4730 else if (in_ipa_mode
4731 && fi != NULL)
4732 {
4733 struct constraint_expr lhs ;
4734 struct constraint_expr *rhsp;
4735 unsigned i;
4736
4737 lhs = get_function_part_constraint (fi, fi_result);
4738 get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
4739 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4740 process_constraint (new_constraint (lhs, *rhsp));
4741 }
4742 }
4743 /* Handle asms conservatively by adding escape constraints to everything. */
4744 else if (gimple_code (t) == GIMPLE_ASM)
4745 {
4746 unsigned i, noutputs;
4747 const char **oconstraints;
4748 const char *constraint;
4749 bool allows_mem, allows_reg, is_inout;
4750
4751 noutputs = gimple_asm_noutputs (t);
4752 oconstraints = XALLOCAVEC (const char *, noutputs);
4753
4754 for (i = 0; i < noutputs; ++i)
4755 {
4756 tree link = gimple_asm_output_op (t, i);
4757 tree op = TREE_VALUE (link);
4758
4759 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4760 oconstraints[i] = constraint;
4761 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4762 &allows_reg, &is_inout);
4763
4764 /* A memory constraint makes the address of the operand escape. */
4765 if (!allows_reg && allows_mem)
4766 make_escape_constraint (build_fold_addr_expr (op));
4767
4768 /* The asm may read global memory, so outputs may point to
4769 any global memory. */
4770 if (op)
4771 {
4772 vec<ce_s> lhsc = vNULL;
4773 struct constraint_expr rhsc, *lhsp;
4774 unsigned j;
4775 get_constraint_for (op, &lhsc);
4776 rhsc.var = nonlocal_id;
4777 rhsc.offset = 0;
4778 rhsc.type = SCALAR;
4779 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4780 process_constraint (new_constraint (*lhsp, rhsc));
4781 lhsc.release ();
4782 }
4783 }
4784 for (i = 0; i < gimple_asm_ninputs (t); ++i)
4785 {
4786 tree link = gimple_asm_input_op (t, i);
4787 tree op = TREE_VALUE (link);
4788
4789 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4790
4791 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4792 &allows_mem, &allows_reg);
4793
4794 /* A memory constraint makes the address of the operand escape. */
4795 if (!allows_reg && allows_mem)
4796 make_escape_constraint (build_fold_addr_expr (op));
4797 /* Strictly we'd only need the constraint to ESCAPED if
4798 the asm clobbers memory, otherwise using something
4799 along the lines of per-call clobbers/uses would be enough. */
4800 else if (op)
4801 make_escape_constraint (op);
4802 }
4803 }
4804
4805 rhsc.release ();
4806 lhsc.release ();
4807 }
4808
4809
4810 /* Create a constraint adding to the clobber set of FI the memory
4811 pointed to by PTR. */
4812
4813 static void
4814 process_ipa_clobber (varinfo_t fi, tree ptr)
4815 {
4816 vec<ce_s> ptrc = vNULL;
4817 struct constraint_expr *c, lhs;
4818 unsigned i;
4819 get_constraint_for_rhs (ptr, &ptrc);
4820 lhs = get_function_part_constraint (fi, fi_clobbers);
4821 FOR_EACH_VEC_ELT (ptrc, i, c)
4822 process_constraint (new_constraint (lhs, *c));
4823 ptrc.release ();
4824 }
4825
4826 /* Walk statement T setting up clobber and use constraints according to the
4827 references found in T. This function is a main part of the
4828 IPA constraint builder. */
4829
4830 static void
4831 find_func_clobbers (gimple origt)
4832 {
4833 gimple t = origt;
4834 vec<ce_s> lhsc = vNULL;
4835 vec<ce_s> rhsc = vNULL;
4836 varinfo_t fi;
4837
4838 /* Add constraints for clobbered/used in IPA mode.
4839 We are not interested in what automatic variables are clobbered
4840 or used as we only use the information in the caller to which
4841 they do not escape. */
4842 gcc_assert (in_ipa_mode);
4843
4844 /* If the stmt refers to memory in any way it better had a VUSE. */
4845 if (gimple_vuse (t) == NULL_TREE)
4846 return;
4847
4848 /* We'd better have function information for the current function. */
4849 fi = lookup_vi_for_tree (cfun->decl);
4850 gcc_assert (fi != NULL);
4851
4852 /* Account for stores in assignments and calls. */
4853 if (gimple_vdef (t) != NULL_TREE
4854 && gimple_has_lhs (t))
4855 {
4856 tree lhs = gimple_get_lhs (t);
4857 tree tem = lhs;
4858 while (handled_component_p (tem))
4859 tem = TREE_OPERAND (tem, 0);
4860 if ((DECL_P (tem)
4861 && !auto_var_in_fn_p (tem, cfun->decl))
4862 || INDIRECT_REF_P (tem)
4863 || (TREE_CODE (tem) == MEM_REF
4864 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4865 && auto_var_in_fn_p
4866 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4867 {
4868 struct constraint_expr lhsc, *rhsp;
4869 unsigned i;
4870 lhsc = get_function_part_constraint (fi, fi_clobbers);
4871 get_constraint_for_address_of (lhs, &rhsc);
4872 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4873 process_constraint (new_constraint (lhsc, *rhsp));
4874 rhsc.release ();
4875 }
4876 }
4877
4878 /* Account for uses in assigments and returns. */
4879 if (gimple_assign_single_p (t)
4880 || (gimple_code (t) == GIMPLE_RETURN
4881 && gimple_return_retval (t) != NULL_TREE))
4882 {
4883 tree rhs = (gimple_assign_single_p (t)
4884 ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4885 tree tem = rhs;
4886 while (handled_component_p (tem))
4887 tem = TREE_OPERAND (tem, 0);
4888 if ((DECL_P (tem)
4889 && !auto_var_in_fn_p (tem, cfun->decl))
4890 || INDIRECT_REF_P (tem)
4891 || (TREE_CODE (tem) == MEM_REF
4892 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4893 && auto_var_in_fn_p
4894 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4895 {
4896 struct constraint_expr lhs, *rhsp;
4897 unsigned i;
4898 lhs = get_function_part_constraint (fi, fi_uses);
4899 get_constraint_for_address_of (rhs, &rhsc);
4900 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4901 process_constraint (new_constraint (lhs, *rhsp));
4902 rhsc.release ();
4903 }
4904 }
4905
4906 if (is_gimple_call (t))
4907 {
4908 varinfo_t cfi = NULL;
4909 tree decl = gimple_call_fndecl (t);
4910 struct constraint_expr lhs, rhs;
4911 unsigned i, j;
4912
4913 /* For builtins we do not have separate function info. For those
4914 we do not generate escapes for we have to generate clobbers/uses. */
4915 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4916 switch (DECL_FUNCTION_CODE (decl))
4917 {
4918 /* The following functions use and clobber memory pointed to
4919 by their arguments. */
4920 case BUILT_IN_STRCPY:
4921 case BUILT_IN_STRNCPY:
4922 case BUILT_IN_BCOPY:
4923 case BUILT_IN_MEMCPY:
4924 case BUILT_IN_MEMMOVE:
4925 case BUILT_IN_MEMPCPY:
4926 case BUILT_IN_STPCPY:
4927 case BUILT_IN_STPNCPY:
4928 case BUILT_IN_STRCAT:
4929 case BUILT_IN_STRNCAT:
4930 case BUILT_IN_STRCPY_CHK:
4931 case BUILT_IN_STRNCPY_CHK:
4932 case BUILT_IN_MEMCPY_CHK:
4933 case BUILT_IN_MEMMOVE_CHK:
4934 case BUILT_IN_MEMPCPY_CHK:
4935 case BUILT_IN_STPCPY_CHK:
4936 case BUILT_IN_STPNCPY_CHK:
4937 case BUILT_IN_STRCAT_CHK:
4938 case BUILT_IN_STRNCAT_CHK:
4939 {
4940 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4941 == BUILT_IN_BCOPY ? 1 : 0));
4942 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4943 == BUILT_IN_BCOPY ? 0 : 1));
4944 unsigned i;
4945 struct constraint_expr *rhsp, *lhsp;
4946 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4947 lhs = get_function_part_constraint (fi, fi_clobbers);
4948 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4949 process_constraint (new_constraint (lhs, *lhsp));
4950 lhsc.release ();
4951 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4952 lhs = get_function_part_constraint (fi, fi_uses);
4953 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4954 process_constraint (new_constraint (lhs, *rhsp));
4955 rhsc.release ();
4956 return;
4957 }
4958 /* The following function clobbers memory pointed to by
4959 its argument. */
4960 case BUILT_IN_MEMSET:
4961 case BUILT_IN_MEMSET_CHK:
4962 {
4963 tree dest = gimple_call_arg (t, 0);
4964 unsigned i;
4965 ce_s *lhsp;
4966 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4967 lhs = get_function_part_constraint (fi, fi_clobbers);
4968 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4969 process_constraint (new_constraint (lhs, *lhsp));
4970 lhsc.release ();
4971 return;
4972 }
4973 /* The following functions clobber their second and third
4974 arguments. */
4975 case BUILT_IN_SINCOS:
4976 case BUILT_IN_SINCOSF:
4977 case BUILT_IN_SINCOSL:
4978 {
4979 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4980 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4981 return;
4982 }
4983 /* The following functions clobber their second argument. */
4984 case BUILT_IN_FREXP:
4985 case BUILT_IN_FREXPF:
4986 case BUILT_IN_FREXPL:
4987 case BUILT_IN_LGAMMA_R:
4988 case BUILT_IN_LGAMMAF_R:
4989 case BUILT_IN_LGAMMAL_R:
4990 case BUILT_IN_GAMMA_R:
4991 case BUILT_IN_GAMMAF_R:
4992 case BUILT_IN_GAMMAL_R:
4993 case BUILT_IN_MODF:
4994 case BUILT_IN_MODFF:
4995 case BUILT_IN_MODFL:
4996 {
4997 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4998 return;
4999 }
5000 /* The following functions clobber their third argument. */
5001 case BUILT_IN_REMQUO:
5002 case BUILT_IN_REMQUOF:
5003 case BUILT_IN_REMQUOL:
5004 {
5005 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5006 return;
5007 }
5008 /* The following functions neither read nor clobber memory. */
5009 case BUILT_IN_ASSUME_ALIGNED:
5010 case BUILT_IN_FREE:
5011 return;
5012 /* Trampolines are of no interest to us. */
5013 case BUILT_IN_INIT_TRAMPOLINE:
5014 case BUILT_IN_ADJUST_TRAMPOLINE:
5015 return;
5016 case BUILT_IN_VA_START:
5017 case BUILT_IN_VA_END:
5018 return;
5019 /* printf-style functions may have hooks to set pointers to
5020 point to somewhere into the generated string. Leave them
5021 for a later exercise... */
5022 default:
5023 /* Fallthru to general call handling. */;
5024 }
5025
5026 /* Parameters passed by value are used. */
5027 lhs = get_function_part_constraint (fi, fi_uses);
5028 for (i = 0; i < gimple_call_num_args (t); i++)
5029 {
5030 struct constraint_expr *rhsp;
5031 tree arg = gimple_call_arg (t, i);
5032
5033 if (TREE_CODE (arg) == SSA_NAME
5034 || is_gimple_min_invariant (arg))
5035 continue;
5036
5037 get_constraint_for_address_of (arg, &rhsc);
5038 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5039 process_constraint (new_constraint (lhs, *rhsp));
5040 rhsc.release ();
5041 }
5042
5043 /* Build constraints for propagating clobbers/uses along the
5044 callgraph edges. */
5045 cfi = get_fi_for_callee (t);
5046 if (cfi->id == anything_id)
5047 {
5048 if (gimple_vdef (t))
5049 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5050 anything_id);
5051 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5052 anything_id);
5053 return;
5054 }
5055
5056 /* For callees without function info (that's external functions),
5057 ESCAPED is clobbered and used. */
5058 if (gimple_call_fndecl (t)
5059 && !cfi->is_fn_info)
5060 {
5061 varinfo_t vi;
5062
5063 if (gimple_vdef (t))
5064 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5065 escaped_id);
5066 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5067
5068 /* Also honor the call statement use/clobber info. */
5069 if ((vi = lookup_call_clobber_vi (t)) != NULL)
5070 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5071 vi->id);
5072 if ((vi = lookup_call_use_vi (t)) != NULL)
5073 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5074 vi->id);
5075 return;
5076 }
5077
5078 /* Otherwise the caller clobbers and uses what the callee does.
5079 ??? This should use a new complex constraint that filters
5080 local variables of the callee. */
5081 if (gimple_vdef (t))
5082 {
5083 lhs = get_function_part_constraint (fi, fi_clobbers);
5084 rhs = get_function_part_constraint (cfi, fi_clobbers);
5085 process_constraint (new_constraint (lhs, rhs));
5086 }
5087 lhs = get_function_part_constraint (fi, fi_uses);
5088 rhs = get_function_part_constraint (cfi, fi_uses);
5089 process_constraint (new_constraint (lhs, rhs));
5090 }
5091 else if (gimple_code (t) == GIMPLE_ASM)
5092 {
5093 /* ??? Ick. We can do better. */
5094 if (gimple_vdef (t))
5095 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5096 anything_id);
5097 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5098 anything_id);
5099 }
5100
5101 rhsc.release ();
5102 }
5103
5104
5105 /* Find the first varinfo in the same variable as START that overlaps with
5106 OFFSET. Return NULL if we can't find one. */
5107
5108 static varinfo_t
5109 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5110 {
5111 /* If the offset is outside of the variable, bail out. */
5112 if (offset >= start->fullsize)
5113 return NULL;
5114
5115 /* If we cannot reach offset from start, lookup the first field
5116 and start from there. */
5117 if (start->offset > offset)
5118 start = get_varinfo (start->head);
5119
5120 while (start)
5121 {
5122 /* We may not find a variable in the field list with the actual
5123 offset when when we have glommed a structure to a variable.
5124 In that case, however, offset should still be within the size
5125 of the variable. */
5126 if (offset >= start->offset
5127 && (offset - start->offset) < start->size)
5128 return start;
5129
5130 start = vi_next (start);
5131 }
5132
5133 return NULL;
5134 }
5135
5136 /* Find the first varinfo in the same variable as START that overlaps with
5137 OFFSET. If there is no such varinfo the varinfo directly preceding
5138 OFFSET is returned. */
5139
5140 static varinfo_t
5141 first_or_preceding_vi_for_offset (varinfo_t start,
5142 unsigned HOST_WIDE_INT offset)
5143 {
5144 /* If we cannot reach offset from start, lookup the first field
5145 and start from there. */
5146 if (start->offset > offset)
5147 start = get_varinfo (start->head);
5148
5149 /* We may not find a variable in the field list with the actual
5150 offset when when we have glommed a structure to a variable.
5151 In that case, however, offset should still be within the size
5152 of the variable.
5153 If we got beyond the offset we look for return the field
5154 directly preceding offset which may be the last field. */
5155 while (start->next
5156 && offset >= start->offset
5157 && !((offset - start->offset) < start->size))
5158 start = vi_next (start);
5159
5160 return start;
5161 }
5162
5163
5164 /* This structure is used during pushing fields onto the fieldstack
5165 to track the offset of the field, since bitpos_of_field gives it
5166 relative to its immediate containing type, and we want it relative
5167 to the ultimate containing object. */
5168
5169 struct fieldoff
5170 {
5171 /* Offset from the base of the base containing object to this field. */
5172 HOST_WIDE_INT offset;
5173
5174 /* Size, in bits, of the field. */
5175 unsigned HOST_WIDE_INT size;
5176
5177 unsigned has_unknown_size : 1;
5178
5179 unsigned must_have_pointers : 1;
5180
5181 unsigned may_have_pointers : 1;
5182
5183 unsigned only_restrict_pointers : 1;
5184 };
5185 typedef struct fieldoff fieldoff_s;
5186
5187
5188 /* qsort comparison function for two fieldoff's PA and PB */
5189
5190 static int
5191 fieldoff_compare (const void *pa, const void *pb)
5192 {
5193 const fieldoff_s *foa = (const fieldoff_s *)pa;
5194 const fieldoff_s *fob = (const fieldoff_s *)pb;
5195 unsigned HOST_WIDE_INT foasize, fobsize;
5196
5197 if (foa->offset < fob->offset)
5198 return -1;
5199 else if (foa->offset > fob->offset)
5200 return 1;
5201
5202 foasize = foa->size;
5203 fobsize = fob->size;
5204 if (foasize < fobsize)
5205 return -1;
5206 else if (foasize > fobsize)
5207 return 1;
5208 return 0;
5209 }
5210
5211 /* Sort a fieldstack according to the field offset and sizes. */
5212 static void
5213 sort_fieldstack (vec<fieldoff_s> fieldstack)
5214 {
5215 fieldstack.qsort (fieldoff_compare);
5216 }
5217
5218 /* Return true if T is a type that can have subvars. */
5219
5220 static inline bool
5221 type_can_have_subvars (const_tree t)
5222 {
5223 /* Aggregates without overlapping fields can have subvars. */
5224 return TREE_CODE (t) == RECORD_TYPE;
5225 }
5226
5227 /* Return true if V is a tree that we can have subvars for.
5228 Normally, this is any aggregate type. Also complex
5229 types which are not gimple registers can have subvars. */
5230
5231 static inline bool
5232 var_can_have_subvars (const_tree v)
5233 {
5234 /* Volatile variables should never have subvars. */
5235 if (TREE_THIS_VOLATILE (v))
5236 return false;
5237
5238 /* Non decls or memory tags can never have subvars. */
5239 if (!DECL_P (v))
5240 return false;
5241
5242 return type_can_have_subvars (TREE_TYPE (v));
5243 }
5244
5245 /* Return true if T is a type that does contain pointers. */
5246
5247 static bool
5248 type_must_have_pointers (tree type)
5249 {
5250 if (POINTER_TYPE_P (type))
5251 return true;
5252
5253 if (TREE_CODE (type) == ARRAY_TYPE)
5254 return type_must_have_pointers (TREE_TYPE (type));
5255
5256 /* A function or method can have pointers as arguments, so track
5257 those separately. */
5258 if (TREE_CODE (type) == FUNCTION_TYPE
5259 || TREE_CODE (type) == METHOD_TYPE)
5260 return true;
5261
5262 return false;
5263 }
5264
5265 static bool
5266 field_must_have_pointers (tree t)
5267 {
5268 return type_must_have_pointers (TREE_TYPE (t));
5269 }
5270
5271 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5272 the fields of TYPE onto fieldstack, recording their offsets along
5273 the way.
5274
5275 OFFSET is used to keep track of the offset in this entire
5276 structure, rather than just the immediately containing structure.
5277 Returns false if the caller is supposed to handle the field we
5278 recursed for. */
5279
5280 static bool
5281 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5282 HOST_WIDE_INT offset)
5283 {
5284 tree field;
5285 bool empty_p = true;
5286
5287 if (TREE_CODE (type) != RECORD_TYPE)
5288 return false;
5289
5290 /* If the vector of fields is growing too big, bail out early.
5291 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5292 sure this fails. */
5293 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5294 return false;
5295
5296 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5297 if (TREE_CODE (field) == FIELD_DECL)
5298 {
5299 bool push = false;
5300 HOST_WIDE_INT foff = bitpos_of_field (field);
5301
5302 if (!var_can_have_subvars (field)
5303 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
5304 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
5305 push = true;
5306 else if (!push_fields_onto_fieldstack
5307 (TREE_TYPE (field), fieldstack, offset + foff)
5308 && (DECL_SIZE (field)
5309 && !integer_zerop (DECL_SIZE (field))))
5310 /* Empty structures may have actual size, like in C++. So
5311 see if we didn't push any subfields and the size is
5312 nonzero, push the field onto the stack. */
5313 push = true;
5314
5315 if (push)
5316 {
5317 fieldoff_s *pair = NULL;
5318 bool has_unknown_size = false;
5319 bool must_have_pointers_p;
5320
5321 if (!fieldstack->is_empty ())
5322 pair = &fieldstack->last ();
5323
5324 /* If there isn't anything at offset zero, create sth. */
5325 if (!pair
5326 && offset + foff != 0)
5327 {
5328 fieldoff_s e = {0, offset + foff, false, false, false, false};
5329 pair = fieldstack->safe_push (e);
5330 }
5331
5332 if (!DECL_SIZE (field)
5333 || !host_integerp (DECL_SIZE (field), 1))
5334 has_unknown_size = true;
5335
5336 /* If adjacent fields do not contain pointers merge them. */
5337 must_have_pointers_p = field_must_have_pointers (field);
5338 if (pair
5339 && !has_unknown_size
5340 && !must_have_pointers_p
5341 && !pair->must_have_pointers
5342 && !pair->has_unknown_size
5343 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5344 {
5345 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
5346 }
5347 else
5348 {
5349 fieldoff_s e;
5350 e.offset = offset + foff;
5351 e.has_unknown_size = has_unknown_size;
5352 if (!has_unknown_size)
5353 e.size = TREE_INT_CST_LOW (DECL_SIZE (field));
5354 else
5355 e.size = -1;
5356 e.must_have_pointers = must_have_pointers_p;
5357 e.may_have_pointers = true;
5358 e.only_restrict_pointers
5359 = (!has_unknown_size
5360 && POINTER_TYPE_P (TREE_TYPE (field))
5361 && TYPE_RESTRICT (TREE_TYPE (field)));
5362 fieldstack->safe_push (e);
5363 }
5364 }
5365
5366 empty_p = false;
5367 }
5368
5369 return !empty_p;
5370 }
5371
5372 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5373 if it is a varargs function. */
5374
5375 static unsigned int
5376 count_num_arguments (tree decl, bool *is_varargs)
5377 {
5378 unsigned int num = 0;
5379 tree t;
5380
5381 /* Capture named arguments for K&R functions. They do not
5382 have a prototype and thus no TYPE_ARG_TYPES. */
5383 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5384 ++num;
5385
5386 /* Check if the function has variadic arguments. */
5387 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5388 if (TREE_VALUE (t) == void_type_node)
5389 break;
5390 if (!t)
5391 *is_varargs = true;
5392
5393 return num;
5394 }
5395
5396 /* Creation function node for DECL, using NAME, and return the index
5397 of the variable we've created for the function. */
5398
5399 static varinfo_t
5400 create_function_info_for (tree decl, const char *name)
5401 {
5402 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5403 varinfo_t vi, prev_vi;
5404 tree arg;
5405 unsigned int i;
5406 bool is_varargs = false;
5407 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5408
5409 /* Create the variable info. */
5410
5411 vi = new_var_info (decl, name);
5412 vi->offset = 0;
5413 vi->size = 1;
5414 vi->fullsize = fi_parm_base + num_args;
5415 vi->is_fn_info = 1;
5416 vi->may_have_pointers = false;
5417 if (is_varargs)
5418 vi->fullsize = ~0;
5419 insert_vi_for_tree (vi->decl, vi);
5420
5421 prev_vi = vi;
5422
5423 /* Create a variable for things the function clobbers and one for
5424 things the function uses. */
5425 {
5426 varinfo_t clobbervi, usevi;
5427 const char *newname;
5428 char *tempname;
5429
5430 asprintf (&tempname, "%s.clobber", name);
5431 newname = ggc_strdup (tempname);
5432 free (tempname);
5433
5434 clobbervi = new_var_info (NULL, newname);
5435 clobbervi->offset = fi_clobbers;
5436 clobbervi->size = 1;
5437 clobbervi->fullsize = vi->fullsize;
5438 clobbervi->is_full_var = true;
5439 clobbervi->is_global_var = false;
5440 gcc_assert (prev_vi->offset < clobbervi->offset);
5441 prev_vi->next = clobbervi->id;
5442 prev_vi = clobbervi;
5443
5444 asprintf (&tempname, "%s.use", name);
5445 newname = ggc_strdup (tempname);
5446 free (tempname);
5447
5448 usevi = new_var_info (NULL, newname);
5449 usevi->offset = fi_uses;
5450 usevi->size = 1;
5451 usevi->fullsize = vi->fullsize;
5452 usevi->is_full_var = true;
5453 usevi->is_global_var = false;
5454 gcc_assert (prev_vi->offset < usevi->offset);
5455 prev_vi->next = usevi->id;
5456 prev_vi = usevi;
5457 }
5458
5459 /* And one for the static chain. */
5460 if (fn->static_chain_decl != NULL_TREE)
5461 {
5462 varinfo_t chainvi;
5463 const char *newname;
5464 char *tempname;
5465
5466 asprintf (&tempname, "%s.chain", name);
5467 newname = ggc_strdup (tempname);
5468 free (tempname);
5469
5470 chainvi = new_var_info (fn->static_chain_decl, newname);
5471 chainvi->offset = fi_static_chain;
5472 chainvi->size = 1;
5473 chainvi->fullsize = vi->fullsize;
5474 chainvi->is_full_var = true;
5475 chainvi->is_global_var = false;
5476 gcc_assert (prev_vi->offset < chainvi->offset);
5477 prev_vi->next = chainvi->id;
5478 prev_vi = chainvi;
5479 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5480 }
5481
5482 /* Create a variable for the return var. */
5483 if (DECL_RESULT (decl) != NULL
5484 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5485 {
5486 varinfo_t resultvi;
5487 const char *newname;
5488 char *tempname;
5489 tree resultdecl = decl;
5490
5491 if (DECL_RESULT (decl))
5492 resultdecl = DECL_RESULT (decl);
5493
5494 asprintf (&tempname, "%s.result", name);
5495 newname = ggc_strdup (tempname);
5496 free (tempname);
5497
5498 resultvi = new_var_info (resultdecl, newname);
5499 resultvi->offset = fi_result;
5500 resultvi->size = 1;
5501 resultvi->fullsize = vi->fullsize;
5502 resultvi->is_full_var = true;
5503 if (DECL_RESULT (decl))
5504 resultvi->may_have_pointers = true;
5505 gcc_assert (prev_vi->offset < resultvi->offset);
5506 prev_vi->next = resultvi->id;
5507 prev_vi = resultvi;
5508 if (DECL_RESULT (decl))
5509 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5510 }
5511
5512 /* Set up variables for each argument. */
5513 arg = DECL_ARGUMENTS (decl);
5514 for (i = 0; i < num_args; i++)
5515 {
5516 varinfo_t argvi;
5517 const char *newname;
5518 char *tempname;
5519 tree argdecl = decl;
5520
5521 if (arg)
5522 argdecl = arg;
5523
5524 asprintf (&tempname, "%s.arg%d", name, i);
5525 newname = ggc_strdup (tempname);
5526 free (tempname);
5527
5528 argvi = new_var_info (argdecl, newname);
5529 argvi->offset = fi_parm_base + i;
5530 argvi->size = 1;
5531 argvi->is_full_var = true;
5532 argvi->fullsize = vi->fullsize;
5533 if (arg)
5534 argvi->may_have_pointers = true;
5535 gcc_assert (prev_vi->offset < argvi->offset);
5536 prev_vi->next = argvi->id;
5537 prev_vi = argvi;
5538 if (arg)
5539 {
5540 insert_vi_for_tree (arg, argvi);
5541 arg = DECL_CHAIN (arg);
5542 }
5543 }
5544
5545 /* Add one representative for all further args. */
5546 if (is_varargs)
5547 {
5548 varinfo_t argvi;
5549 const char *newname;
5550 char *tempname;
5551 tree decl;
5552
5553 asprintf (&tempname, "%s.varargs", name);
5554 newname = ggc_strdup (tempname);
5555 free (tempname);
5556
5557 /* We need sth that can be pointed to for va_start. */
5558 decl = build_fake_var_decl (ptr_type_node);
5559
5560 argvi = new_var_info (decl, newname);
5561 argvi->offset = fi_parm_base + num_args;
5562 argvi->size = ~0;
5563 argvi->is_full_var = true;
5564 argvi->is_heap_var = true;
5565 argvi->fullsize = vi->fullsize;
5566 gcc_assert (prev_vi->offset < argvi->offset);
5567 prev_vi->next = argvi->id;
5568 prev_vi = argvi;
5569 }
5570
5571 return vi;
5572 }
5573
5574
5575 /* Return true if FIELDSTACK contains fields that overlap.
5576 FIELDSTACK is assumed to be sorted by offset. */
5577
5578 static bool
5579 check_for_overlaps (vec<fieldoff_s> fieldstack)
5580 {
5581 fieldoff_s *fo = NULL;
5582 unsigned int i;
5583 HOST_WIDE_INT lastoffset = -1;
5584
5585 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5586 {
5587 if (fo->offset == lastoffset)
5588 return true;
5589 lastoffset = fo->offset;
5590 }
5591 return false;
5592 }
5593
5594 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5595 This will also create any varinfo structures necessary for fields
5596 of DECL. */
5597
5598 static varinfo_t
5599 create_variable_info_for_1 (tree decl, const char *name)
5600 {
5601 varinfo_t vi, newvi;
5602 tree decl_type = TREE_TYPE (decl);
5603 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5604 vec<fieldoff_s> fieldstack = vNULL;
5605 fieldoff_s *fo;
5606 unsigned int i;
5607
5608 if (!declsize
5609 || !host_integerp (declsize, 1))
5610 {
5611 vi = new_var_info (decl, name);
5612 vi->offset = 0;
5613 vi->size = ~0;
5614 vi->fullsize = ~0;
5615 vi->is_unknown_size_var = true;
5616 vi->is_full_var = true;
5617 vi->may_have_pointers = true;
5618 return vi;
5619 }
5620
5621 /* Collect field information. */
5622 if (use_field_sensitive
5623 && var_can_have_subvars (decl)
5624 /* ??? Force us to not use subfields for global initializers
5625 in IPA mode. Else we'd have to parse arbitrary initializers. */
5626 && !(in_ipa_mode
5627 && is_global_var (decl)
5628 && DECL_INITIAL (decl)))
5629 {
5630 fieldoff_s *fo = NULL;
5631 bool notokay = false;
5632 unsigned int i;
5633
5634 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5635
5636 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5637 if (fo->has_unknown_size
5638 || fo->offset < 0)
5639 {
5640 notokay = true;
5641 break;
5642 }
5643
5644 /* We can't sort them if we have a field with a variable sized type,
5645 which will make notokay = true. In that case, we are going to return
5646 without creating varinfos for the fields anyway, so sorting them is a
5647 waste to boot. */
5648 if (!notokay)
5649 {
5650 sort_fieldstack (fieldstack);
5651 /* Due to some C++ FE issues, like PR 22488, we might end up
5652 what appear to be overlapping fields even though they,
5653 in reality, do not overlap. Until the C++ FE is fixed,
5654 we will simply disable field-sensitivity for these cases. */
5655 notokay = check_for_overlaps (fieldstack);
5656 }
5657
5658 if (notokay)
5659 fieldstack.release ();
5660 }
5661
5662 /* If we didn't end up collecting sub-variables create a full
5663 variable for the decl. */
5664 if (fieldstack.length () <= 1
5665 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5666 {
5667 vi = new_var_info (decl, name);
5668 vi->offset = 0;
5669 vi->may_have_pointers = true;
5670 vi->fullsize = TREE_INT_CST_LOW (declsize);
5671 vi->size = vi->fullsize;
5672 vi->is_full_var = true;
5673 fieldstack.release ();
5674 return vi;
5675 }
5676
5677 vi = new_var_info (decl, name);
5678 vi->fullsize = TREE_INT_CST_LOW (declsize);
5679 for (i = 0, newvi = vi;
5680 fieldstack.iterate (i, &fo);
5681 ++i, newvi = vi_next (newvi))
5682 {
5683 const char *newname = "NULL";
5684 char *tempname;
5685
5686 if (dump_file)
5687 {
5688 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5689 "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
5690 newname = ggc_strdup (tempname);
5691 free (tempname);
5692 }
5693 newvi->name = newname;
5694 newvi->offset = fo->offset;
5695 newvi->size = fo->size;
5696 newvi->fullsize = vi->fullsize;
5697 newvi->may_have_pointers = fo->may_have_pointers;
5698 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5699 if (i + 1 < fieldstack.length ())
5700 {
5701 varinfo_t tem = new_var_info (decl, name);
5702 newvi->next = tem->id;
5703 tem->head = vi->id;
5704 }
5705 }
5706
5707 fieldstack.release ();
5708
5709 return vi;
5710 }
5711
5712 static unsigned int
5713 create_variable_info_for (tree decl, const char *name)
5714 {
5715 varinfo_t vi = create_variable_info_for_1 (decl, name);
5716 unsigned int id = vi->id;
5717
5718 insert_vi_for_tree (decl, vi);
5719
5720 if (TREE_CODE (decl) != VAR_DECL)
5721 return id;
5722
5723 /* Create initial constraints for globals. */
5724 for (; vi; vi = vi_next (vi))
5725 {
5726 if (!vi->may_have_pointers
5727 || !vi->is_global_var)
5728 continue;
5729
5730 /* Mark global restrict qualified pointers. */
5731 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5732 && TYPE_RESTRICT (TREE_TYPE (decl)))
5733 || vi->only_restrict_pointers)
5734 {
5735 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5736 continue;
5737 }
5738
5739 /* In non-IPA mode the initializer from nonlocal is all we need. */
5740 if (!in_ipa_mode
5741 || DECL_HARD_REGISTER (decl))
5742 make_copy_constraint (vi, nonlocal_id);
5743
5744 /* In IPA mode parse the initializer and generate proper constraints
5745 for it. */
5746 else
5747 {
5748 struct varpool_node *vnode = varpool_get_node (decl);
5749
5750 /* For escaped variables initialize them from nonlocal. */
5751 if (!varpool_all_refs_explicit_p (vnode))
5752 make_copy_constraint (vi, nonlocal_id);
5753
5754 /* If this is a global variable with an initializer and we are in
5755 IPA mode generate constraints for it. */
5756 if (DECL_INITIAL (decl)
5757 && vnode->definition)
5758 {
5759 vec<ce_s> rhsc = vNULL;
5760 struct constraint_expr lhs, *rhsp;
5761 unsigned i;
5762 get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
5763 lhs.var = vi->id;
5764 lhs.offset = 0;
5765 lhs.type = SCALAR;
5766 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5767 process_constraint (new_constraint (lhs, *rhsp));
5768 /* If this is a variable that escapes from the unit
5769 the initializer escapes as well. */
5770 if (!varpool_all_refs_explicit_p (vnode))
5771 {
5772 lhs.var = escaped_id;
5773 lhs.offset = 0;
5774 lhs.type = SCALAR;
5775 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5776 process_constraint (new_constraint (lhs, *rhsp));
5777 }
5778 rhsc.release ();
5779 }
5780 }
5781 }
5782
5783 return id;
5784 }
5785
5786 /* Print out the points-to solution for VAR to FILE. */
5787
5788 static void
5789 dump_solution_for_var (FILE *file, unsigned int var)
5790 {
5791 varinfo_t vi = get_varinfo (var);
5792 unsigned int i;
5793 bitmap_iterator bi;
5794
5795 /* Dump the solution for unified vars anyway, this avoids difficulties
5796 in scanning dumps in the testsuite. */
5797 fprintf (file, "%s = { ", vi->name);
5798 vi = get_varinfo (find (var));
5799 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5800 fprintf (file, "%s ", get_varinfo (i)->name);
5801 fprintf (file, "}");
5802
5803 /* But note when the variable was unified. */
5804 if (vi->id != var)
5805 fprintf (file, " same as %s", vi->name);
5806
5807 fprintf (file, "\n");
5808 }
5809
5810 /* Print the points-to solution for VAR to stdout. */
5811
5812 DEBUG_FUNCTION void
5813 debug_solution_for_var (unsigned int var)
5814 {
5815 dump_solution_for_var (stdout, var);
5816 }
5817
5818 /* Create varinfo structures for all of the variables in the
5819 function for intraprocedural mode. */
5820
5821 static void
5822 intra_create_variable_infos (void)
5823 {
5824 tree t;
5825
5826 /* For each incoming pointer argument arg, create the constraint ARG
5827 = NONLOCAL or a dummy variable if it is a restrict qualified
5828 passed-by-reference argument. */
5829 for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
5830 {
5831 varinfo_t p = get_vi_for_tree (t);
5832
5833 /* For restrict qualified pointers to objects passed by
5834 reference build a real representative for the pointed-to object.
5835 Treat restrict qualified references the same. */
5836 if (TYPE_RESTRICT (TREE_TYPE (t))
5837 && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
5838 || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
5839 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
5840 {
5841 struct constraint_expr lhsc, rhsc;
5842 varinfo_t vi;
5843 tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
5844 DECL_EXTERNAL (heapvar) = 1;
5845 vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
5846 insert_vi_for_tree (heapvar, vi);
5847 lhsc.var = p->id;
5848 lhsc.type = SCALAR;
5849 lhsc.offset = 0;
5850 rhsc.var = vi->id;
5851 rhsc.type = ADDRESSOF;
5852 rhsc.offset = 0;
5853 process_constraint (new_constraint (lhsc, rhsc));
5854 for (; vi; vi = vi_next (vi))
5855 if (vi->may_have_pointers)
5856 {
5857 if (vi->only_restrict_pointers)
5858 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5859 else
5860 make_copy_constraint (vi, nonlocal_id);
5861 }
5862 continue;
5863 }
5864
5865 if (POINTER_TYPE_P (TREE_TYPE (t))
5866 && TYPE_RESTRICT (TREE_TYPE (t)))
5867 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5868 else
5869 {
5870 for (; p; p = vi_next (p))
5871 {
5872 if (p->only_restrict_pointers)
5873 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5874 else if (p->may_have_pointers)
5875 make_constraint_from (p, nonlocal_id);
5876 }
5877 }
5878 }
5879
5880 /* Add a constraint for a result decl that is passed by reference. */
5881 if (DECL_RESULT (cfun->decl)
5882 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5883 {
5884 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5885
5886 for (p = result_vi; p; p = vi_next (p))
5887 make_constraint_from (p, nonlocal_id);
5888 }
5889
5890 /* Add a constraint for the incoming static chain parameter. */
5891 if (cfun->static_chain_decl != NULL_TREE)
5892 {
5893 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5894
5895 for (p = chain_vi; p; p = vi_next (p))
5896 make_constraint_from (p, nonlocal_id);
5897 }
5898 }
5899
5900 /* Structure used to put solution bitmaps in a hashtable so they can
5901 be shared among variables with the same points-to set. */
5902
5903 typedef struct shared_bitmap_info
5904 {
5905 bitmap pt_vars;
5906 hashval_t hashcode;
5907 } *shared_bitmap_info_t;
5908 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5909
5910 /* Shared_bitmap hashtable helpers. */
5911
5912 struct shared_bitmap_hasher : typed_free_remove <shared_bitmap_info>
5913 {
5914 typedef shared_bitmap_info value_type;
5915 typedef shared_bitmap_info compare_type;
5916 static inline hashval_t hash (const value_type *);
5917 static inline bool equal (const value_type *, const compare_type *);
5918 };
5919
5920 /* Hash function for a shared_bitmap_info_t */
5921
5922 inline hashval_t
5923 shared_bitmap_hasher::hash (const value_type *bi)
5924 {
5925 return bi->hashcode;
5926 }
5927
5928 /* Equality function for two shared_bitmap_info_t's. */
5929
5930 inline bool
5931 shared_bitmap_hasher::equal (const value_type *sbi1, const compare_type *sbi2)
5932 {
5933 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5934 }
5935
5936 /* Shared_bitmap hashtable. */
5937
5938 static hash_table <shared_bitmap_hasher> shared_bitmap_table;
5939
5940 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5941 existing instance if there is one, NULL otherwise. */
5942
5943 static bitmap
5944 shared_bitmap_lookup (bitmap pt_vars)
5945 {
5946 shared_bitmap_info **slot;
5947 struct shared_bitmap_info sbi;
5948
5949 sbi.pt_vars = pt_vars;
5950 sbi.hashcode = bitmap_hash (pt_vars);
5951
5952 slot = shared_bitmap_table.find_slot_with_hash (&sbi, sbi.hashcode,
5953 NO_INSERT);
5954 if (!slot)
5955 return NULL;
5956 else
5957 return (*slot)->pt_vars;
5958 }
5959
5960
5961 /* Add a bitmap to the shared bitmap hashtable. */
5962
5963 static void
5964 shared_bitmap_add (bitmap pt_vars)
5965 {
5966 shared_bitmap_info **slot;
5967 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5968
5969 sbi->pt_vars = pt_vars;
5970 sbi->hashcode = bitmap_hash (pt_vars);
5971
5972 slot = shared_bitmap_table.find_slot_with_hash (sbi, sbi->hashcode, INSERT);
5973 gcc_assert (!*slot);
5974 *slot = sbi;
5975 }
5976
5977
5978 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5979
5980 static void
5981 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5982 {
5983 unsigned int i;
5984 bitmap_iterator bi;
5985
5986 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
5987 {
5988 varinfo_t vi = get_varinfo (i);
5989
5990 /* The only artificial variables that are allowed in a may-alias
5991 set are heap variables. */
5992 if (vi->is_artificial_var && !vi->is_heap_var)
5993 continue;
5994
5995 if (TREE_CODE (vi->decl) == VAR_DECL
5996 || TREE_CODE (vi->decl) == PARM_DECL
5997 || TREE_CODE (vi->decl) == RESULT_DECL)
5998 {
5999 /* If we are in IPA mode we will not recompute points-to
6000 sets after inlining so make sure they stay valid. */
6001 if (in_ipa_mode
6002 && !DECL_PT_UID_SET_P (vi->decl))
6003 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6004
6005 /* Add the decl to the points-to set. Note that the points-to
6006 set contains global variables. */
6007 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6008 if (vi->is_global_var)
6009 pt->vars_contains_global = true;
6010 }
6011 }
6012 }
6013
6014
6015 /* Compute the points-to solution *PT for the variable VI. */
6016
6017 static struct pt_solution
6018 find_what_var_points_to (varinfo_t orig_vi)
6019 {
6020 unsigned int i;
6021 bitmap_iterator bi;
6022 bitmap finished_solution;
6023 bitmap result;
6024 varinfo_t vi;
6025 void **slot;
6026 struct pt_solution *pt;
6027
6028 /* This variable may have been collapsed, let's get the real
6029 variable. */
6030 vi = get_varinfo (find (orig_vi->id));
6031
6032 /* See if we have already computed the solution and return it. */
6033 slot = pointer_map_insert (final_solutions, vi);
6034 if (*slot != NULL)
6035 return *(struct pt_solution *)*slot;
6036
6037 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6038 memset (pt, 0, sizeof (struct pt_solution));
6039
6040 /* Translate artificial variables into SSA_NAME_PTR_INFO
6041 attributes. */
6042 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6043 {
6044 varinfo_t vi = get_varinfo (i);
6045
6046 if (vi->is_artificial_var)
6047 {
6048 if (vi->id == nothing_id)
6049 pt->null = 1;
6050 else if (vi->id == escaped_id)
6051 {
6052 if (in_ipa_mode)
6053 pt->ipa_escaped = 1;
6054 else
6055 pt->escaped = 1;
6056 }
6057 else if (vi->id == nonlocal_id)
6058 pt->nonlocal = 1;
6059 else if (vi->is_heap_var)
6060 /* We represent heapvars in the points-to set properly. */
6061 ;
6062 else if (vi->id == readonly_id)
6063 /* Nobody cares. */
6064 ;
6065 else if (vi->id == anything_id
6066 || vi->id == integer_id)
6067 pt->anything = 1;
6068 }
6069 }
6070
6071 /* Instead of doing extra work, simply do not create
6072 elaborate points-to information for pt_anything pointers. */
6073 if (pt->anything)
6074 return *pt;
6075
6076 /* Share the final set of variables when possible. */
6077 finished_solution = BITMAP_GGC_ALLOC ();
6078 stats.points_to_sets_created++;
6079
6080 set_uids_in_ptset (finished_solution, vi->solution, pt);
6081 result = shared_bitmap_lookup (finished_solution);
6082 if (!result)
6083 {
6084 shared_bitmap_add (finished_solution);
6085 pt->vars = finished_solution;
6086 }
6087 else
6088 {
6089 pt->vars = result;
6090 bitmap_clear (finished_solution);
6091 }
6092
6093 return *pt;
6094 }
6095
6096 /* Given a pointer variable P, fill in its points-to set. */
6097
6098 static void
6099 find_what_p_points_to (tree p)
6100 {
6101 struct ptr_info_def *pi;
6102 tree lookup_p = p;
6103 varinfo_t vi;
6104
6105 /* For parameters, get at the points-to set for the actual parm
6106 decl. */
6107 if (TREE_CODE (p) == SSA_NAME
6108 && SSA_NAME_IS_DEFAULT_DEF (p)
6109 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6110 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6111 lookup_p = SSA_NAME_VAR (p);
6112
6113 vi = lookup_vi_for_tree (lookup_p);
6114 if (!vi)
6115 return;
6116
6117 pi = get_ptr_info (p);
6118 pi->pt = find_what_var_points_to (vi);
6119 }
6120
6121
6122 /* Query statistics for points-to solutions. */
6123
6124 static struct {
6125 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6126 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6127 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6128 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6129 } pta_stats;
6130
6131 void
6132 dump_pta_stats (FILE *s)
6133 {
6134 fprintf (s, "\nPTA query stats:\n");
6135 fprintf (s, " pt_solution_includes: "
6136 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6137 HOST_WIDE_INT_PRINT_DEC" queries\n",
6138 pta_stats.pt_solution_includes_no_alias,
6139 pta_stats.pt_solution_includes_no_alias
6140 + pta_stats.pt_solution_includes_may_alias);
6141 fprintf (s, " pt_solutions_intersect: "
6142 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6143 HOST_WIDE_INT_PRINT_DEC" queries\n",
6144 pta_stats.pt_solutions_intersect_no_alias,
6145 pta_stats.pt_solutions_intersect_no_alias
6146 + pta_stats.pt_solutions_intersect_may_alias);
6147 }
6148
6149
6150 /* Reset the points-to solution *PT to a conservative default
6151 (point to anything). */
6152
6153 void
6154 pt_solution_reset (struct pt_solution *pt)
6155 {
6156 memset (pt, 0, sizeof (struct pt_solution));
6157 pt->anything = true;
6158 }
6159
6160 /* Set the points-to solution *PT to point only to the variables
6161 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6162 global variables and VARS_CONTAINS_RESTRICT specifies whether
6163 it contains restrict tag variables. */
6164
6165 void
6166 pt_solution_set (struct pt_solution *pt, bitmap vars, bool vars_contains_global)
6167 {
6168 memset (pt, 0, sizeof (struct pt_solution));
6169 pt->vars = vars;
6170 pt->vars_contains_global = vars_contains_global;
6171 }
6172
6173 /* Set the points-to solution *PT to point only to the variable VAR. */
6174
6175 void
6176 pt_solution_set_var (struct pt_solution *pt, tree var)
6177 {
6178 memset (pt, 0, sizeof (struct pt_solution));
6179 pt->vars = BITMAP_GGC_ALLOC ();
6180 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6181 pt->vars_contains_global = is_global_var (var);
6182 }
6183
6184 /* Computes the union of the points-to solutions *DEST and *SRC and
6185 stores the result in *DEST. This changes the points-to bitmap
6186 of *DEST and thus may not be used if that might be shared.
6187 The points-to bitmap of *SRC and *DEST will not be shared after
6188 this function if they were not before. */
6189
6190 static void
6191 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6192 {
6193 dest->anything |= src->anything;
6194 if (dest->anything)
6195 {
6196 pt_solution_reset (dest);
6197 return;
6198 }
6199
6200 dest->nonlocal |= src->nonlocal;
6201 dest->escaped |= src->escaped;
6202 dest->ipa_escaped |= src->ipa_escaped;
6203 dest->null |= src->null;
6204 dest->vars_contains_global |= src->vars_contains_global;
6205 if (!src->vars)
6206 return;
6207
6208 if (!dest->vars)
6209 dest->vars = BITMAP_GGC_ALLOC ();
6210 bitmap_ior_into (dest->vars, src->vars);
6211 }
6212
6213 /* Return true if the points-to solution *PT is empty. */
6214
6215 bool
6216 pt_solution_empty_p (struct pt_solution *pt)
6217 {
6218 if (pt->anything
6219 || pt->nonlocal)
6220 return false;
6221
6222 if (pt->vars
6223 && !bitmap_empty_p (pt->vars))
6224 return false;
6225
6226 /* If the solution includes ESCAPED, check if that is empty. */
6227 if (pt->escaped
6228 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6229 return false;
6230
6231 /* If the solution includes ESCAPED, check if that is empty. */
6232 if (pt->ipa_escaped
6233 && !pt_solution_empty_p (&ipa_escaped_pt))
6234 return false;
6235
6236 return true;
6237 }
6238
6239 /* Return true if the points-to solution *PT only point to a single var, and
6240 return the var uid in *UID. */
6241
6242 bool
6243 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6244 {
6245 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6246 || pt->null || pt->vars == NULL
6247 || !bitmap_single_bit_set_p (pt->vars))
6248 return false;
6249
6250 *uid = bitmap_first_set_bit (pt->vars);
6251 return true;
6252 }
6253
6254 /* Return true if the points-to solution *PT includes global memory. */
6255
6256 bool
6257 pt_solution_includes_global (struct pt_solution *pt)
6258 {
6259 if (pt->anything
6260 || pt->nonlocal
6261 || pt->vars_contains_global)
6262 return true;
6263
6264 if (pt->escaped)
6265 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6266
6267 if (pt->ipa_escaped)
6268 return pt_solution_includes_global (&ipa_escaped_pt);
6269
6270 /* ??? This predicate is not correct for the IPA-PTA solution
6271 as we do not properly distinguish between unit escape points
6272 and global variables. */
6273 if (cfun->gimple_df->ipa_pta)
6274 return true;
6275
6276 return false;
6277 }
6278
6279 /* Return true if the points-to solution *PT includes the variable
6280 declaration DECL. */
6281
6282 static bool
6283 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6284 {
6285 if (pt->anything)
6286 return true;
6287
6288 if (pt->nonlocal
6289 && is_global_var (decl))
6290 return true;
6291
6292 if (pt->vars
6293 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6294 return true;
6295
6296 /* If the solution includes ESCAPED, check it. */
6297 if (pt->escaped
6298 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6299 return true;
6300
6301 /* If the solution includes ESCAPED, check it. */
6302 if (pt->ipa_escaped
6303 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6304 return true;
6305
6306 return false;
6307 }
6308
6309 bool
6310 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6311 {
6312 bool res = pt_solution_includes_1 (pt, decl);
6313 if (res)
6314 ++pta_stats.pt_solution_includes_may_alias;
6315 else
6316 ++pta_stats.pt_solution_includes_no_alias;
6317 return res;
6318 }
6319
6320 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6321 intersection. */
6322
6323 static bool
6324 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6325 {
6326 if (pt1->anything || pt2->anything)
6327 return true;
6328
6329 /* If either points to unknown global memory and the other points to
6330 any global memory they alias. */
6331 if ((pt1->nonlocal
6332 && (pt2->nonlocal
6333 || pt2->vars_contains_global))
6334 || (pt2->nonlocal
6335 && pt1->vars_contains_global))
6336 return true;
6337
6338 /* Check the escaped solution if required. */
6339 if ((pt1->escaped || pt2->escaped)
6340 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6341 {
6342 /* If both point to escaped memory and that solution
6343 is not empty they alias. */
6344 if (pt1->escaped && pt2->escaped)
6345 return true;
6346
6347 /* If either points to escaped memory see if the escaped solution
6348 intersects with the other. */
6349 if ((pt1->escaped
6350 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
6351 || (pt2->escaped
6352 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
6353 return true;
6354 }
6355
6356 /* Check the escaped solution if required.
6357 ??? Do we need to check the local against the IPA escaped sets? */
6358 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6359 && !pt_solution_empty_p (&ipa_escaped_pt))
6360 {
6361 /* If both point to escaped memory and that solution
6362 is not empty they alias. */
6363 if (pt1->ipa_escaped && pt2->ipa_escaped)
6364 return true;
6365
6366 /* If either points to escaped memory see if the escaped solution
6367 intersects with the other. */
6368 if ((pt1->ipa_escaped
6369 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6370 || (pt2->ipa_escaped
6371 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6372 return true;
6373 }
6374
6375 /* Now both pointers alias if their points-to solution intersects. */
6376 return (pt1->vars
6377 && pt2->vars
6378 && bitmap_intersect_p (pt1->vars, pt2->vars));
6379 }
6380
6381 bool
6382 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6383 {
6384 bool res = pt_solutions_intersect_1 (pt1, pt2);
6385 if (res)
6386 ++pta_stats.pt_solutions_intersect_may_alias;
6387 else
6388 ++pta_stats.pt_solutions_intersect_no_alias;
6389 return res;
6390 }
6391
6392
6393 /* Dump points-to information to OUTFILE. */
6394
6395 static void
6396 dump_sa_points_to_info (FILE *outfile)
6397 {
6398 unsigned int i;
6399
6400 fprintf (outfile, "\nPoints-to sets\n\n");
6401
6402 if (dump_flags & TDF_STATS)
6403 {
6404 fprintf (outfile, "Stats:\n");
6405 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6406 fprintf (outfile, "Non-pointer vars: %d\n",
6407 stats.nonpointer_vars);
6408 fprintf (outfile, "Statically unified vars: %d\n",
6409 stats.unified_vars_static);
6410 fprintf (outfile, "Dynamically unified vars: %d\n",
6411 stats.unified_vars_dynamic);
6412 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6413 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6414 fprintf (outfile, "Number of implicit edges: %d\n",
6415 stats.num_implicit_edges);
6416 }
6417
6418 for (i = 1; i < varmap.length (); i++)
6419 {
6420 varinfo_t vi = get_varinfo (i);
6421 if (!vi->may_have_pointers)
6422 continue;
6423 dump_solution_for_var (outfile, i);
6424 }
6425 }
6426
6427
6428 /* Debug points-to information to stderr. */
6429
6430 DEBUG_FUNCTION void
6431 debug_sa_points_to_info (void)
6432 {
6433 dump_sa_points_to_info (stderr);
6434 }
6435
6436
6437 /* Initialize the always-existing constraint variables for NULL
6438 ANYTHING, READONLY, and INTEGER */
6439
6440 static void
6441 init_base_vars (void)
6442 {
6443 struct constraint_expr lhs, rhs;
6444 varinfo_t var_anything;
6445 varinfo_t var_nothing;
6446 varinfo_t var_readonly;
6447 varinfo_t var_escaped;
6448 varinfo_t var_nonlocal;
6449 varinfo_t var_storedanything;
6450 varinfo_t var_integer;
6451
6452 /* Variable ID zero is reserved and should be NULL. */
6453 varmap.safe_push (NULL);
6454
6455 /* Create the NULL variable, used to represent that a variable points
6456 to NULL. */
6457 var_nothing = new_var_info (NULL_TREE, "NULL");
6458 gcc_assert (var_nothing->id == nothing_id);
6459 var_nothing->is_artificial_var = 1;
6460 var_nothing->offset = 0;
6461 var_nothing->size = ~0;
6462 var_nothing->fullsize = ~0;
6463 var_nothing->is_special_var = 1;
6464 var_nothing->may_have_pointers = 0;
6465 var_nothing->is_global_var = 0;
6466
6467 /* Create the ANYTHING variable, used to represent that a variable
6468 points to some unknown piece of memory. */
6469 var_anything = new_var_info (NULL_TREE, "ANYTHING");
6470 gcc_assert (var_anything->id == anything_id);
6471 var_anything->is_artificial_var = 1;
6472 var_anything->size = ~0;
6473 var_anything->offset = 0;
6474 var_anything->fullsize = ~0;
6475 var_anything->is_special_var = 1;
6476
6477 /* Anything points to anything. This makes deref constraints just
6478 work in the presence of linked list and other p = *p type loops,
6479 by saying that *ANYTHING = ANYTHING. */
6480 lhs.type = SCALAR;
6481 lhs.var = anything_id;
6482 lhs.offset = 0;
6483 rhs.type = ADDRESSOF;
6484 rhs.var = anything_id;
6485 rhs.offset = 0;
6486
6487 /* This specifically does not use process_constraint because
6488 process_constraint ignores all anything = anything constraints, since all
6489 but this one are redundant. */
6490 constraints.safe_push (new_constraint (lhs, rhs));
6491
6492 /* Create the READONLY variable, used to represent that a variable
6493 points to readonly memory. */
6494 var_readonly = new_var_info (NULL_TREE, "READONLY");
6495 gcc_assert (var_readonly->id == readonly_id);
6496 var_readonly->is_artificial_var = 1;
6497 var_readonly->offset = 0;
6498 var_readonly->size = ~0;
6499 var_readonly->fullsize = ~0;
6500 var_readonly->is_special_var = 1;
6501
6502 /* readonly memory points to anything, in order to make deref
6503 easier. In reality, it points to anything the particular
6504 readonly variable can point to, but we don't track this
6505 separately. */
6506 lhs.type = SCALAR;
6507 lhs.var = readonly_id;
6508 lhs.offset = 0;
6509 rhs.type = ADDRESSOF;
6510 rhs.var = readonly_id; /* FIXME */
6511 rhs.offset = 0;
6512 process_constraint (new_constraint (lhs, rhs));
6513
6514 /* Create the ESCAPED variable, used to represent the set of escaped
6515 memory. */
6516 var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6517 gcc_assert (var_escaped->id == escaped_id);
6518 var_escaped->is_artificial_var = 1;
6519 var_escaped->offset = 0;
6520 var_escaped->size = ~0;
6521 var_escaped->fullsize = ~0;
6522 var_escaped->is_special_var = 0;
6523
6524 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6525 memory. */
6526 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6527 gcc_assert (var_nonlocal->id == nonlocal_id);
6528 var_nonlocal->is_artificial_var = 1;
6529 var_nonlocal->offset = 0;
6530 var_nonlocal->size = ~0;
6531 var_nonlocal->fullsize = ~0;
6532 var_nonlocal->is_special_var = 1;
6533
6534 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6535 lhs.type = SCALAR;
6536 lhs.var = escaped_id;
6537 lhs.offset = 0;
6538 rhs.type = DEREF;
6539 rhs.var = escaped_id;
6540 rhs.offset = 0;
6541 process_constraint (new_constraint (lhs, rhs));
6542
6543 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6544 whole variable escapes. */
6545 lhs.type = SCALAR;
6546 lhs.var = escaped_id;
6547 lhs.offset = 0;
6548 rhs.type = SCALAR;
6549 rhs.var = escaped_id;
6550 rhs.offset = UNKNOWN_OFFSET;
6551 process_constraint (new_constraint (lhs, rhs));
6552
6553 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6554 everything pointed to by escaped points to what global memory can
6555 point to. */
6556 lhs.type = DEREF;
6557 lhs.var = escaped_id;
6558 lhs.offset = 0;
6559 rhs.type = SCALAR;
6560 rhs.var = nonlocal_id;
6561 rhs.offset = 0;
6562 process_constraint (new_constraint (lhs, rhs));
6563
6564 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6565 global memory may point to global memory and escaped memory. */
6566 lhs.type = SCALAR;
6567 lhs.var = nonlocal_id;
6568 lhs.offset = 0;
6569 rhs.type = ADDRESSOF;
6570 rhs.var = nonlocal_id;
6571 rhs.offset = 0;
6572 process_constraint (new_constraint (lhs, rhs));
6573 rhs.type = ADDRESSOF;
6574 rhs.var = escaped_id;
6575 rhs.offset = 0;
6576 process_constraint (new_constraint (lhs, rhs));
6577
6578 /* Create the STOREDANYTHING variable, used to represent the set of
6579 variables stored to *ANYTHING. */
6580 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6581 gcc_assert (var_storedanything->id == storedanything_id);
6582 var_storedanything->is_artificial_var = 1;
6583 var_storedanything->offset = 0;
6584 var_storedanything->size = ~0;
6585 var_storedanything->fullsize = ~0;
6586 var_storedanything->is_special_var = 0;
6587
6588 /* Create the INTEGER variable, used to represent that a variable points
6589 to what an INTEGER "points to". */
6590 var_integer = new_var_info (NULL_TREE, "INTEGER");
6591 gcc_assert (var_integer->id == integer_id);
6592 var_integer->is_artificial_var = 1;
6593 var_integer->size = ~0;
6594 var_integer->fullsize = ~0;
6595 var_integer->offset = 0;
6596 var_integer->is_special_var = 1;
6597
6598 /* INTEGER = ANYTHING, because we don't know where a dereference of
6599 a random integer will point to. */
6600 lhs.type = SCALAR;
6601 lhs.var = integer_id;
6602 lhs.offset = 0;
6603 rhs.type = ADDRESSOF;
6604 rhs.var = anything_id;
6605 rhs.offset = 0;
6606 process_constraint (new_constraint (lhs, rhs));
6607 }
6608
6609 /* Initialize things necessary to perform PTA */
6610
6611 static void
6612 init_alias_vars (void)
6613 {
6614 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6615
6616 bitmap_obstack_initialize (&pta_obstack);
6617 bitmap_obstack_initialize (&oldpta_obstack);
6618 bitmap_obstack_initialize (&predbitmap_obstack);
6619
6620 constraint_pool = create_alloc_pool ("Constraint pool",
6621 sizeof (struct constraint), 30);
6622 variable_info_pool = create_alloc_pool ("Variable info pool",
6623 sizeof (struct variable_info), 30);
6624 constraints.create (8);
6625 varmap.create (8);
6626 vi_for_tree = pointer_map_create ();
6627 call_stmt_vars = pointer_map_create ();
6628
6629 memset (&stats, 0, sizeof (stats));
6630 shared_bitmap_table.create (511);
6631 init_base_vars ();
6632
6633 gcc_obstack_init (&fake_var_decl_obstack);
6634
6635 final_solutions = pointer_map_create ();
6636 gcc_obstack_init (&final_solutions_obstack);
6637 }
6638
6639 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6640 predecessor edges. */
6641
6642 static void
6643 remove_preds_and_fake_succs (constraint_graph_t graph)
6644 {
6645 unsigned int i;
6646
6647 /* Clear the implicit ref and address nodes from the successor
6648 lists. */
6649 for (i = 1; i < FIRST_REF_NODE; i++)
6650 {
6651 if (graph->succs[i])
6652 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6653 FIRST_REF_NODE * 2);
6654 }
6655
6656 /* Free the successor list for the non-ref nodes. */
6657 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
6658 {
6659 if (graph->succs[i])
6660 BITMAP_FREE (graph->succs[i]);
6661 }
6662
6663 /* Now reallocate the size of the successor list as, and blow away
6664 the predecessor bitmaps. */
6665 graph->size = varmap.length ();
6666 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6667
6668 free (graph->implicit_preds);
6669 graph->implicit_preds = NULL;
6670 free (graph->preds);
6671 graph->preds = NULL;
6672 bitmap_obstack_release (&predbitmap_obstack);
6673 }
6674
6675 /* Solve the constraint set. */
6676
6677 static void
6678 solve_constraints (void)
6679 {
6680 struct scc_info *si;
6681
6682 if (dump_file)
6683 fprintf (dump_file,
6684 "\nCollapsing static cycles and doing variable "
6685 "substitution\n");
6686
6687 init_graph (varmap.length () * 2);
6688
6689 if (dump_file)
6690 fprintf (dump_file, "Building predecessor graph\n");
6691 build_pred_graph ();
6692
6693 if (dump_file)
6694 fprintf (dump_file, "Detecting pointer and location "
6695 "equivalences\n");
6696 si = perform_var_substitution (graph);
6697
6698 if (dump_file)
6699 fprintf (dump_file, "Rewriting constraints and unifying "
6700 "variables\n");
6701 rewrite_constraints (graph, si);
6702
6703 build_succ_graph ();
6704
6705 free_var_substitution_info (si);
6706
6707 /* Attach complex constraints to graph nodes. */
6708 move_complex_constraints (graph);
6709
6710 if (dump_file)
6711 fprintf (dump_file, "Uniting pointer but not location equivalent "
6712 "variables\n");
6713 unite_pointer_equivalences (graph);
6714
6715 if (dump_file)
6716 fprintf (dump_file, "Finding indirect cycles\n");
6717 find_indirect_cycles (graph);
6718
6719 /* Implicit nodes and predecessors are no longer necessary at this
6720 point. */
6721 remove_preds_and_fake_succs (graph);
6722
6723 if (dump_file && (dump_flags & TDF_GRAPH))
6724 {
6725 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
6726 "in dot format:\n");
6727 dump_constraint_graph (dump_file);
6728 fprintf (dump_file, "\n\n");
6729 }
6730
6731 if (dump_file)
6732 fprintf (dump_file, "Solving graph\n");
6733
6734 solve_graph (graph);
6735
6736 if (dump_file && (dump_flags & TDF_GRAPH))
6737 {
6738 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
6739 "in dot format:\n");
6740 dump_constraint_graph (dump_file);
6741 fprintf (dump_file, "\n\n");
6742 }
6743
6744 if (dump_file)
6745 dump_sa_points_to_info (dump_file);
6746 }
6747
6748 /* Create points-to sets for the current function. See the comments
6749 at the start of the file for an algorithmic overview. */
6750
6751 static void
6752 compute_points_to_sets (void)
6753 {
6754 basic_block bb;
6755 unsigned i;
6756 varinfo_t vi;
6757
6758 timevar_push (TV_TREE_PTA);
6759
6760 init_alias_vars ();
6761
6762 intra_create_variable_infos ();
6763
6764 /* Now walk all statements and build the constraint set. */
6765 FOR_EACH_BB (bb)
6766 {
6767 gimple_stmt_iterator gsi;
6768
6769 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6770 {
6771 gimple phi = gsi_stmt (gsi);
6772
6773 if (! virtual_operand_p (gimple_phi_result (phi)))
6774 find_func_aliases (phi);
6775 }
6776
6777 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6778 {
6779 gimple stmt = gsi_stmt (gsi);
6780
6781 find_func_aliases (stmt);
6782 }
6783 }
6784
6785 if (dump_file)
6786 {
6787 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6788 dump_constraints (dump_file, 0);
6789 }
6790
6791 /* From the constraints compute the points-to sets. */
6792 solve_constraints ();
6793
6794 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6795 cfun->gimple_df->escaped = find_what_var_points_to (get_varinfo (escaped_id));
6796
6797 /* Make sure the ESCAPED solution (which is used as placeholder in
6798 other solutions) does not reference itself. This simplifies
6799 points-to solution queries. */
6800 cfun->gimple_df->escaped.escaped = 0;
6801
6802 /* Mark escaped HEAP variables as global. */
6803 FOR_EACH_VEC_ELT (varmap, i, vi)
6804 if (vi
6805 && vi->is_heap_var
6806 && !vi->is_global_var)
6807 DECL_EXTERNAL (vi->decl) = vi->is_global_var
6808 = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
6809
6810 /* Compute the points-to sets for pointer SSA_NAMEs. */
6811 for (i = 0; i < num_ssa_names; ++i)
6812 {
6813 tree ptr = ssa_name (i);
6814 if (ptr
6815 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6816 find_what_p_points_to (ptr);
6817 }
6818
6819 /* Compute the call-used/clobbered sets. */
6820 FOR_EACH_BB (bb)
6821 {
6822 gimple_stmt_iterator gsi;
6823
6824 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6825 {
6826 gimple stmt = gsi_stmt (gsi);
6827 struct pt_solution *pt;
6828 if (!is_gimple_call (stmt))
6829 continue;
6830
6831 pt = gimple_call_use_set (stmt);
6832 if (gimple_call_flags (stmt) & ECF_CONST)
6833 memset (pt, 0, sizeof (struct pt_solution));
6834 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6835 {
6836 *pt = find_what_var_points_to (vi);
6837 /* Escaped (and thus nonlocal) variables are always
6838 implicitly used by calls. */
6839 /* ??? ESCAPED can be empty even though NONLOCAL
6840 always escaped. */
6841 pt->nonlocal = 1;
6842 pt->escaped = 1;
6843 }
6844 else
6845 {
6846 /* If there is nothing special about this call then
6847 we have made everything that is used also escape. */
6848 *pt = cfun->gimple_df->escaped;
6849 pt->nonlocal = 1;
6850 }
6851
6852 pt = gimple_call_clobber_set (stmt);
6853 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6854 memset (pt, 0, sizeof (struct pt_solution));
6855 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6856 {
6857 *pt = find_what_var_points_to (vi);
6858 /* Escaped (and thus nonlocal) variables are always
6859 implicitly clobbered by calls. */
6860 /* ??? ESCAPED can be empty even though NONLOCAL
6861 always escaped. */
6862 pt->nonlocal = 1;
6863 pt->escaped = 1;
6864 }
6865 else
6866 {
6867 /* If there is nothing special about this call then
6868 we have made everything that is used also escape. */
6869 *pt = cfun->gimple_df->escaped;
6870 pt->nonlocal = 1;
6871 }
6872 }
6873 }
6874
6875 timevar_pop (TV_TREE_PTA);
6876 }
6877
6878
6879 /* Delete created points-to sets. */
6880
6881 static void
6882 delete_points_to_sets (void)
6883 {
6884 unsigned int i;
6885
6886 shared_bitmap_table.dispose ();
6887 if (dump_file && (dump_flags & TDF_STATS))
6888 fprintf (dump_file, "Points to sets created:%d\n",
6889 stats.points_to_sets_created);
6890
6891 pointer_map_destroy (vi_for_tree);
6892 pointer_map_destroy (call_stmt_vars);
6893 bitmap_obstack_release (&pta_obstack);
6894 constraints.release ();
6895
6896 for (i = 0; i < graph->size; i++)
6897 graph->complex[i].release ();
6898 free (graph->complex);
6899
6900 free (graph->rep);
6901 free (graph->succs);
6902 free (graph->pe);
6903 free (graph->pe_rep);
6904 free (graph->indirect_cycles);
6905 free (graph);
6906
6907 varmap.release ();
6908 free_alloc_pool (variable_info_pool);
6909 free_alloc_pool (constraint_pool);
6910
6911 obstack_free (&fake_var_decl_obstack, NULL);
6912
6913 pointer_map_destroy (final_solutions);
6914 obstack_free (&final_solutions_obstack, NULL);
6915 }
6916
6917
6918 /* Compute points-to information for every SSA_NAME pointer in the
6919 current function and compute the transitive closure of escaped
6920 variables to re-initialize the call-clobber states of local variables. */
6921
6922 unsigned int
6923 compute_may_aliases (void)
6924 {
6925 if (cfun->gimple_df->ipa_pta)
6926 {
6927 if (dump_file)
6928 {
6929 fprintf (dump_file, "\nNot re-computing points-to information "
6930 "because IPA points-to information is available.\n\n");
6931
6932 /* But still dump what we have remaining it. */
6933 dump_alias_info (dump_file);
6934 }
6935
6936 return 0;
6937 }
6938
6939 /* For each pointer P_i, determine the sets of variables that P_i may
6940 point-to. Compute the reachability set of escaped and call-used
6941 variables. */
6942 compute_points_to_sets ();
6943
6944 /* Debugging dumps. */
6945 if (dump_file)
6946 dump_alias_info (dump_file);
6947
6948 /* Deallocate memory used by aliasing data structures and the internal
6949 points-to solution. */
6950 delete_points_to_sets ();
6951
6952 gcc_assert (!need_ssa_update_p (cfun));
6953
6954 return 0;
6955 }
6956
6957 static bool
6958 gate_tree_pta (void)
6959 {
6960 return flag_tree_pta;
6961 }
6962
6963 /* A dummy pass to cause points-to information to be computed via
6964 TODO_rebuild_alias. */
6965
6966 namespace {
6967
6968 const pass_data pass_data_build_alias =
6969 {
6970 GIMPLE_PASS, /* type */
6971 "alias", /* name */
6972 OPTGROUP_NONE, /* optinfo_flags */
6973 true, /* has_gate */
6974 false, /* has_execute */
6975 TV_NONE, /* tv_id */
6976 ( PROP_cfg | PROP_ssa ), /* properties_required */
6977 0, /* properties_provided */
6978 0, /* properties_destroyed */
6979 0, /* todo_flags_start */
6980 TODO_rebuild_alias, /* todo_flags_finish */
6981 };
6982
6983 class pass_build_alias : public gimple_opt_pass
6984 {
6985 public:
6986 pass_build_alias (gcc::context *ctxt)
6987 : gimple_opt_pass (pass_data_build_alias, ctxt)
6988 {}
6989
6990 /* opt_pass methods: */
6991 bool gate () { return gate_tree_pta (); }
6992
6993 }; // class pass_build_alias
6994
6995 } // anon namespace
6996
6997 gimple_opt_pass *
6998 make_pass_build_alias (gcc::context *ctxt)
6999 {
7000 return new pass_build_alias (ctxt);
7001 }
7002
7003 /* A dummy pass to cause points-to information to be computed via
7004 TODO_rebuild_alias. */
7005
7006 namespace {
7007
7008 const pass_data pass_data_build_ealias =
7009 {
7010 GIMPLE_PASS, /* type */
7011 "ealias", /* name */
7012 OPTGROUP_NONE, /* optinfo_flags */
7013 true, /* has_gate */
7014 false, /* has_execute */
7015 TV_NONE, /* tv_id */
7016 ( PROP_cfg | PROP_ssa ), /* properties_required */
7017 0, /* properties_provided */
7018 0, /* properties_destroyed */
7019 0, /* todo_flags_start */
7020 TODO_rebuild_alias, /* todo_flags_finish */
7021 };
7022
7023 class pass_build_ealias : public gimple_opt_pass
7024 {
7025 public:
7026 pass_build_ealias (gcc::context *ctxt)
7027 : gimple_opt_pass (pass_data_build_ealias, ctxt)
7028 {}
7029
7030 /* opt_pass methods: */
7031 bool gate () { return gate_tree_pta (); }
7032
7033 }; // class pass_build_ealias
7034
7035 } // anon namespace
7036
7037 gimple_opt_pass *
7038 make_pass_build_ealias (gcc::context *ctxt)
7039 {
7040 return new pass_build_ealias (ctxt);
7041 }
7042
7043
7044 /* Return true if we should execute IPA PTA. */
7045 static bool
7046 gate_ipa_pta (void)
7047 {
7048 return (optimize
7049 && flag_ipa_pta
7050 /* Don't bother doing anything if the program has errors. */
7051 && !seen_error ());
7052 }
7053
7054 /* IPA PTA solutions for ESCAPED. */
7055 struct pt_solution ipa_escaped_pt
7056 = { true, false, false, false, false, false, NULL };
7057
7058 /* Associate node with varinfo DATA. Worker for
7059 cgraph_for_node_and_aliases. */
7060 static bool
7061 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7062 {
7063 if ((node->alias || node->thunk.thunk_p)
7064 && node->analyzed)
7065 insert_vi_for_tree (node->decl, (varinfo_t)data);
7066 return false;
7067 }
7068
7069 /* Execute the driver for IPA PTA. */
7070 static unsigned int
7071 ipa_pta_execute (void)
7072 {
7073 struct cgraph_node *node;
7074 struct varpool_node *var;
7075 int from;
7076
7077 in_ipa_mode = 1;
7078
7079 init_alias_vars ();
7080
7081 if (dump_file && (dump_flags & TDF_DETAILS))
7082 {
7083 dump_symtab (dump_file);
7084 fprintf (dump_file, "\n");
7085 }
7086
7087 /* Build the constraints. */
7088 FOR_EACH_DEFINED_FUNCTION (node)
7089 {
7090 varinfo_t vi;
7091 /* Nodes without a body are not interesting. Especially do not
7092 visit clones at this point for now - we get duplicate decls
7093 there for inline clones at least. */
7094 if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
7095 continue;
7096 cgraph_get_body (node);
7097
7098 gcc_assert (!node->clone_of);
7099
7100 vi = create_function_info_for (node->decl,
7101 alias_get_name (node->decl));
7102 cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
7103 }
7104
7105 /* Create constraints for global variables and their initializers. */
7106 FOR_EACH_VARIABLE (var)
7107 {
7108 if (var->alias && var->analyzed)
7109 continue;
7110
7111 get_vi_for_tree (var->decl);
7112 }
7113
7114 if (dump_file)
7115 {
7116 fprintf (dump_file,
7117 "Generating constraints for global initializers\n\n");
7118 dump_constraints (dump_file, 0);
7119 fprintf (dump_file, "\n");
7120 }
7121 from = constraints.length ();
7122
7123 FOR_EACH_DEFINED_FUNCTION (node)
7124 {
7125 struct function *func;
7126 basic_block bb;
7127
7128 /* Nodes without a body are not interesting. */
7129 if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
7130 continue;
7131
7132 if (dump_file)
7133 {
7134 fprintf (dump_file,
7135 "Generating constraints for %s", cgraph_node_name (node));
7136 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7137 fprintf (dump_file, " (%s)",
7138 IDENTIFIER_POINTER
7139 (DECL_ASSEMBLER_NAME (node->decl)));
7140 fprintf (dump_file, "\n");
7141 }
7142
7143 func = DECL_STRUCT_FUNCTION (node->decl);
7144 push_cfun (func);
7145
7146 /* For externally visible or attribute used annotated functions use
7147 local constraints for their arguments.
7148 For local functions we see all callers and thus do not need initial
7149 constraints for parameters. */
7150 if (node->used_from_other_partition
7151 || node->externally_visible
7152 || node->force_output)
7153 {
7154 intra_create_variable_infos ();
7155
7156 /* We also need to make function return values escape. Nothing
7157 escapes by returning from main though. */
7158 if (!MAIN_NAME_P (DECL_NAME (node->decl)))
7159 {
7160 varinfo_t fi, rvi;
7161 fi = lookup_vi_for_tree (node->decl);
7162 rvi = first_vi_for_offset (fi, fi_result);
7163 if (rvi && rvi->offset == fi_result)
7164 {
7165 struct constraint_expr includes;
7166 struct constraint_expr var;
7167 includes.var = escaped_id;
7168 includes.offset = 0;
7169 includes.type = SCALAR;
7170 var.var = rvi->id;
7171 var.offset = 0;
7172 var.type = SCALAR;
7173 process_constraint (new_constraint (includes, var));
7174 }
7175 }
7176 }
7177
7178 /* Build constriants for the function body. */
7179 FOR_EACH_BB_FN (bb, func)
7180 {
7181 gimple_stmt_iterator gsi;
7182
7183 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7184 gsi_next (&gsi))
7185 {
7186 gimple phi = gsi_stmt (gsi);
7187
7188 if (! virtual_operand_p (gimple_phi_result (phi)))
7189 find_func_aliases (phi);
7190 }
7191
7192 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7193 {
7194 gimple stmt = gsi_stmt (gsi);
7195
7196 find_func_aliases (stmt);
7197 find_func_clobbers (stmt);
7198 }
7199 }
7200
7201 pop_cfun ();
7202
7203 if (dump_file)
7204 {
7205 fprintf (dump_file, "\n");
7206 dump_constraints (dump_file, from);
7207 fprintf (dump_file, "\n");
7208 }
7209 from = constraints.length ();
7210 }
7211
7212 /* From the constraints compute the points-to sets. */
7213 solve_constraints ();
7214
7215 /* Compute the global points-to sets for ESCAPED.
7216 ??? Note that the computed escape set is not correct
7217 for the whole unit as we fail to consider graph edges to
7218 externally visible functions. */
7219 ipa_escaped_pt = find_what_var_points_to (get_varinfo (escaped_id));
7220
7221 /* Make sure the ESCAPED solution (which is used as placeholder in
7222 other solutions) does not reference itself. This simplifies
7223 points-to solution queries. */
7224 ipa_escaped_pt.ipa_escaped = 0;
7225
7226 /* Assign the points-to sets to the SSA names in the unit. */
7227 FOR_EACH_DEFINED_FUNCTION (node)
7228 {
7229 tree ptr;
7230 struct function *fn;
7231 unsigned i;
7232 varinfo_t fi;
7233 basic_block bb;
7234 struct pt_solution uses, clobbers;
7235 struct cgraph_edge *e;
7236
7237 /* Nodes without a body are not interesting. */
7238 if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
7239 continue;
7240
7241 fn = DECL_STRUCT_FUNCTION (node->decl);
7242
7243 /* Compute the points-to sets for pointer SSA_NAMEs. */
7244 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7245 {
7246 if (ptr
7247 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7248 find_what_p_points_to (ptr);
7249 }
7250
7251 /* Compute the call-use and call-clobber sets for all direct calls. */
7252 fi = lookup_vi_for_tree (node->decl);
7253 gcc_assert (fi->is_fn_info);
7254 clobbers
7255 = find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers));
7256 uses = find_what_var_points_to (first_vi_for_offset (fi, fi_uses));
7257 for (e = node->callers; e; e = e->next_caller)
7258 {
7259 if (!e->call_stmt)
7260 continue;
7261
7262 *gimple_call_clobber_set (e->call_stmt) = clobbers;
7263 *gimple_call_use_set (e->call_stmt) = uses;
7264 }
7265
7266 /* Compute the call-use and call-clobber sets for indirect calls
7267 and calls to external functions. */
7268 FOR_EACH_BB_FN (bb, fn)
7269 {
7270 gimple_stmt_iterator gsi;
7271
7272 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7273 {
7274 gimple stmt = gsi_stmt (gsi);
7275 struct pt_solution *pt;
7276 varinfo_t vi;
7277 tree decl;
7278
7279 if (!is_gimple_call (stmt))
7280 continue;
7281
7282 /* Handle direct calls to external functions. */
7283 decl = gimple_call_fndecl (stmt);
7284 if (decl
7285 && (!(fi = lookup_vi_for_tree (decl))
7286 || !fi->is_fn_info))
7287 {
7288 pt = gimple_call_use_set (stmt);
7289 if (gimple_call_flags (stmt) & ECF_CONST)
7290 memset (pt, 0, sizeof (struct pt_solution));
7291 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7292 {
7293 *pt = find_what_var_points_to (vi);
7294 /* Escaped (and thus nonlocal) variables are always
7295 implicitly used by calls. */
7296 /* ??? ESCAPED can be empty even though NONLOCAL
7297 always escaped. */
7298 pt->nonlocal = 1;
7299 pt->ipa_escaped = 1;
7300 }
7301 else
7302 {
7303 /* If there is nothing special about this call then
7304 we have made everything that is used also escape. */
7305 *pt = ipa_escaped_pt;
7306 pt->nonlocal = 1;
7307 }
7308
7309 pt = gimple_call_clobber_set (stmt);
7310 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7311 memset (pt, 0, sizeof (struct pt_solution));
7312 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7313 {
7314 *pt = find_what_var_points_to (vi);
7315 /* Escaped (and thus nonlocal) variables are always
7316 implicitly clobbered by calls. */
7317 /* ??? ESCAPED can be empty even though NONLOCAL
7318 always escaped. */
7319 pt->nonlocal = 1;
7320 pt->ipa_escaped = 1;
7321 }
7322 else
7323 {
7324 /* If there is nothing special about this call then
7325 we have made everything that is used also escape. */
7326 *pt = ipa_escaped_pt;
7327 pt->nonlocal = 1;
7328 }
7329 }
7330
7331 /* Handle indirect calls. */
7332 if (!decl
7333 && (fi = get_fi_for_callee (stmt)))
7334 {
7335 /* We need to accumulate all clobbers/uses of all possible
7336 callees. */
7337 fi = get_varinfo (find (fi->id));
7338 /* If we cannot constrain the set of functions we'll end up
7339 calling we end up using/clobbering everything. */
7340 if (bitmap_bit_p (fi->solution, anything_id)
7341 || bitmap_bit_p (fi->solution, nonlocal_id)
7342 || bitmap_bit_p (fi->solution, escaped_id))
7343 {
7344 pt_solution_reset (gimple_call_clobber_set (stmt));
7345 pt_solution_reset (gimple_call_use_set (stmt));
7346 }
7347 else
7348 {
7349 bitmap_iterator bi;
7350 unsigned i;
7351 struct pt_solution *uses, *clobbers;
7352
7353 uses = gimple_call_use_set (stmt);
7354 clobbers = gimple_call_clobber_set (stmt);
7355 memset (uses, 0, sizeof (struct pt_solution));
7356 memset (clobbers, 0, sizeof (struct pt_solution));
7357 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7358 {
7359 struct pt_solution sol;
7360
7361 vi = get_varinfo (i);
7362 if (!vi->is_fn_info)
7363 {
7364 /* ??? We could be more precise here? */
7365 uses->nonlocal = 1;
7366 uses->ipa_escaped = 1;
7367 clobbers->nonlocal = 1;
7368 clobbers->ipa_escaped = 1;
7369 continue;
7370 }
7371
7372 if (!uses->anything)
7373 {
7374 sol = find_what_var_points_to
7375 (first_vi_for_offset (vi, fi_uses));
7376 pt_solution_ior_into (uses, &sol);
7377 }
7378 if (!clobbers->anything)
7379 {
7380 sol = find_what_var_points_to
7381 (first_vi_for_offset (vi, fi_clobbers));
7382 pt_solution_ior_into (clobbers, &sol);
7383 }
7384 }
7385 }
7386 }
7387 }
7388 }
7389
7390 fn->gimple_df->ipa_pta = true;
7391 }
7392
7393 delete_points_to_sets ();
7394
7395 in_ipa_mode = 0;
7396
7397 return 0;
7398 }
7399
7400 namespace {
7401
7402 const pass_data pass_data_ipa_pta =
7403 {
7404 SIMPLE_IPA_PASS, /* type */
7405 "pta", /* name */
7406 OPTGROUP_NONE, /* optinfo_flags */
7407 true, /* has_gate */
7408 true, /* has_execute */
7409 TV_IPA_PTA, /* tv_id */
7410 0, /* properties_required */
7411 0, /* properties_provided */
7412 0, /* properties_destroyed */
7413 0, /* todo_flags_start */
7414 TODO_update_ssa, /* todo_flags_finish */
7415 };
7416
7417 class pass_ipa_pta : public simple_ipa_opt_pass
7418 {
7419 public:
7420 pass_ipa_pta (gcc::context *ctxt)
7421 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
7422 {}
7423
7424 /* opt_pass methods: */
7425 bool gate () { return gate_ipa_pta (); }
7426 unsigned int execute () { return ipa_pta_execute (); }
7427
7428 }; // class pass_ipa_pta
7429
7430 } // anon namespace
7431
7432 simple_ipa_opt_pass *
7433 make_pass_ipa_pta (gcc::context *ctxt)
7434 {
7435 return new pass_ipa_pta (ctxt);
7436 }