trans.c: Include header files.
[gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stmt.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "params.h"
41 #include "gimple-walk.h"
42 #include "varasm.h"
43 #include "stringpool.h"
44 #include "attribs.h"
45
46 /* The idea behind this analyzer is to generate set constraints from the
47 program, then solve the resulting constraints in order to generate the
48 points-to sets.
49
50 Set constraints are a way of modeling program analysis problems that
51 involve sets. They consist of an inclusion constraint language,
52 describing the variables (each variable is a set) and operations that
53 are involved on the variables, and a set of rules that derive facts
54 from these operations. To solve a system of set constraints, you derive
55 all possible facts under the rules, which gives you the correct sets
56 as a consequence.
57
58 See "Efficient Field-sensitive pointer analysis for C" by "David
59 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
60 http://citeseer.ist.psu.edu/pearce04efficient.html
61
62 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
63 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
64 http://citeseer.ist.psu.edu/heintze01ultrafast.html
65
66 There are three types of real constraint expressions, DEREF,
67 ADDRESSOF, and SCALAR. Each constraint expression consists
68 of a constraint type, a variable, and an offset.
69
70 SCALAR is a constraint expression type used to represent x, whether
71 it appears on the LHS or the RHS of a statement.
72 DEREF is a constraint expression type used to represent *x, whether
73 it appears on the LHS or the RHS of a statement.
74 ADDRESSOF is a constraint expression used to represent &x, whether
75 it appears on the LHS or the RHS of a statement.
76
77 Each pointer variable in the program is assigned an integer id, and
78 each field of a structure variable is assigned an integer id as well.
79
80 Structure variables are linked to their list of fields through a "next
81 field" in each variable that points to the next field in offset
82 order.
83 Each variable for a structure field has
84
85 1. "size", that tells the size in bits of that field.
86 2. "fullsize, that tells the size in bits of the entire structure.
87 3. "offset", that tells the offset in bits from the beginning of the
88 structure to this field.
89
90 Thus,
91 struct f
92 {
93 int a;
94 int b;
95 } foo;
96 int *bar;
97
98 looks like
99
100 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
101 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
102 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
103
104
105 In order to solve the system of set constraints, the following is
106 done:
107
108 1. Each constraint variable x has a solution set associated with it,
109 Sol(x).
110
111 2. Constraints are separated into direct, copy, and complex.
112 Direct constraints are ADDRESSOF constraints that require no extra
113 processing, such as P = &Q
114 Copy constraints are those of the form P = Q.
115 Complex constraints are all the constraints involving dereferences
116 and offsets (including offsetted copies).
117
118 3. All direct constraints of the form P = &Q are processed, such
119 that Q is added to Sol(P)
120
121 4. All complex constraints for a given constraint variable are stored in a
122 linked list attached to that variable's node.
123
124 5. A directed graph is built out of the copy constraints. Each
125 constraint variable is a node in the graph, and an edge from
126 Q to P is added for each copy constraint of the form P = Q
127
128 6. The graph is then walked, and solution sets are
129 propagated along the copy edges, such that an edge from Q to P
130 causes Sol(P) <- Sol(P) union Sol(Q).
131
132 7. As we visit each node, all complex constraints associated with
133 that node are processed by adding appropriate copy edges to the graph, or the
134 appropriate variables to the solution set.
135
136 8. The process of walking the graph is iterated until no solution
137 sets change.
138
139 Prior to walking the graph in steps 6 and 7, We perform static
140 cycle elimination on the constraint graph, as well
141 as off-line variable substitution.
142
143 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
144 on and turned into anything), but isn't. You can just see what offset
145 inside the pointed-to struct it's going to access.
146
147 TODO: Constant bounded arrays can be handled as if they were structs of the
148 same number of elements.
149
150 TODO: Modeling heap and incoming pointers becomes much better if we
151 add fields to them as we discover them, which we could do.
152
153 TODO: We could handle unions, but to be honest, it's probably not
154 worth the pain or slowdown. */
155
156 /* IPA-PTA optimizations possible.
157
158 When the indirect function called is ANYTHING we can add disambiguation
159 based on the function signatures (or simply the parameter count which
160 is the varinfo size). We also do not need to consider functions that
161 do not have their address taken.
162
163 The is_global_var bit which marks escape points is overly conservative
164 in IPA mode. Split it to is_escape_point and is_global_var - only
165 externally visible globals are escape points in IPA mode.
166 There is now is_ipa_escape_point but this is only used in a few
167 selected places.
168
169 The way we introduce DECL_PT_UID to avoid fixing up all points-to
170 sets in the translation unit when we copy a DECL during inlining
171 pessimizes precision. The advantage is that the DECL_PT_UID keeps
172 compile-time and memory usage overhead low - the points-to sets
173 do not grow or get unshared as they would during a fixup phase.
174 An alternative solution is to delay IPA PTA until after all
175 inlining transformations have been applied.
176
177 The way we propagate clobber/use information isn't optimized.
178 It should use a new complex constraint that properly filters
179 out local variables of the callee (though that would make
180 the sets invalid after inlining). OTOH we might as well
181 admit defeat to WHOPR and simply do all the clobber/use analysis
182 and propagation after PTA finished but before we threw away
183 points-to information for memory variables. WHOPR and PTA
184 do not play along well anyway - the whole constraint solving
185 would need to be done in WPA phase and it will be very interesting
186 to apply the results to local SSA names during LTRANS phase.
187
188 We probably should compute a per-function unit-ESCAPE solution
189 propagating it simply like the clobber / uses solutions. The
190 solution can go alongside the non-IPA espaced solution and be
191 used to query which vars escape the unit through a function.
192 This is also required to make the escaped-HEAP trick work in IPA mode.
193
194 We never put function decls in points-to sets so we do not
195 keep the set of called functions for indirect calls.
196
197 And probably more. */
198
199 static bool use_field_sensitive = true;
200 static int in_ipa_mode = 0;
201
202 /* Used for predecessor bitmaps. */
203 static bitmap_obstack predbitmap_obstack;
204
205 /* Used for points-to sets. */
206 static bitmap_obstack pta_obstack;
207
208 /* Used for oldsolution members of variables. */
209 static bitmap_obstack oldpta_obstack;
210
211 /* Used for per-solver-iteration bitmaps. */
212 static bitmap_obstack iteration_obstack;
213
214 static unsigned int create_variable_info_for (tree, const char *, bool);
215 typedef struct constraint_graph *constraint_graph_t;
216 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
217
218 struct constraint;
219 typedef struct constraint *constraint_t;
220
221
222 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
223 if (a) \
224 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
225
226 static struct constraint_stats
227 {
228 unsigned int total_vars;
229 unsigned int nonpointer_vars;
230 unsigned int unified_vars_static;
231 unsigned int unified_vars_dynamic;
232 unsigned int iterations;
233 unsigned int num_edges;
234 unsigned int num_implicit_edges;
235 unsigned int points_to_sets_created;
236 } stats;
237
238 struct variable_info
239 {
240 /* ID of this variable */
241 unsigned int id;
242
243 /* True if this is a variable created by the constraint analysis, such as
244 heap variables and constraints we had to break up. */
245 unsigned int is_artificial_var : 1;
246
247 /* True if this is a special variable whose solution set should not be
248 changed. */
249 unsigned int is_special_var : 1;
250
251 /* True for variables whose size is not known or variable. */
252 unsigned int is_unknown_size_var : 1;
253
254 /* True for (sub-)fields that represent a whole variable. */
255 unsigned int is_full_var : 1;
256
257 /* True if this is a heap variable. */
258 unsigned int is_heap_var : 1;
259
260 /* True if this field may contain pointers. */
261 unsigned int may_have_pointers : 1;
262
263 /* True if this field has only restrict qualified pointers. */
264 unsigned int only_restrict_pointers : 1;
265
266 /* True if this represents a heap var created for a restrict qualified
267 pointer. */
268 unsigned int is_restrict_var : 1;
269
270 /* True if this represents a global variable. */
271 unsigned int is_global_var : 1;
272
273 /* True if this represents a module escape point for IPA analysis. */
274 unsigned int is_ipa_escape_point : 1;
275
276 /* True if this represents a IPA function info. */
277 unsigned int is_fn_info : 1;
278
279 /* ??? Store somewhere better. */
280 unsigned short ruid;
281
282 /* The ID of the variable for the next field in this structure
283 or zero for the last field in this structure. */
284 unsigned next;
285
286 /* The ID of the variable for the first field in this structure. */
287 unsigned head;
288
289 /* Offset of this variable, in bits, from the base variable */
290 unsigned HOST_WIDE_INT offset;
291
292 /* Size of the variable, in bits. */
293 unsigned HOST_WIDE_INT size;
294
295 /* Full size of the base variable, in bits. */
296 unsigned HOST_WIDE_INT fullsize;
297
298 /* Name of this variable */
299 const char *name;
300
301 /* Tree that this variable is associated with. */
302 tree decl;
303
304 /* Points-to set for this variable. */
305 bitmap solution;
306
307 /* Old points-to set for this variable. */
308 bitmap oldsolution;
309 };
310 typedef struct variable_info *varinfo_t;
311
312 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
313 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
314 unsigned HOST_WIDE_INT);
315 static varinfo_t lookup_vi_for_tree (tree);
316 static inline bool type_can_have_subvars (const_tree);
317 static void make_param_constraints (varinfo_t);
318
319 /* Pool of variable info structures. */
320 static object_allocator<variable_info> variable_info_pool
321 ("Variable info pool");
322
323 /* Map varinfo to final pt_solution. */
324 static hash_map<varinfo_t, pt_solution *> *final_solutions;
325 struct obstack final_solutions_obstack;
326
327 /* Table of variable info structures for constraint variables.
328 Indexed directly by variable info id. */
329 static vec<varinfo_t> varmap;
330
331 /* Return the varmap element N */
332
333 static inline varinfo_t
334 get_varinfo (unsigned int n)
335 {
336 return varmap[n];
337 }
338
339 /* Return the next variable in the list of sub-variables of VI
340 or NULL if VI is the last sub-variable. */
341
342 static inline varinfo_t
343 vi_next (varinfo_t vi)
344 {
345 return get_varinfo (vi->next);
346 }
347
348 /* Static IDs for the special variables. Variable ID zero is unused
349 and used as terminator for the sub-variable chain. */
350 enum { nothing_id = 1, anything_id = 2, string_id = 3,
351 escaped_id = 4, nonlocal_id = 5,
352 storedanything_id = 6, integer_id = 7 };
353
354 /* Return a new variable info structure consisting for a variable
355 named NAME, and using constraint graph node NODE. Append it
356 to the vector of variable info structures. */
357
358 static varinfo_t
359 new_var_info (tree t, const char *name, bool add_id)
360 {
361 unsigned index = varmap.length ();
362 varinfo_t ret = variable_info_pool.allocate ();
363
364 if (dump_file && add_id)
365 {
366 char *tempname = xasprintf ("%s(%d)", name, index);
367 name = ggc_strdup (tempname);
368 free (tempname);
369 }
370
371 ret->id = index;
372 ret->name = name;
373 ret->decl = t;
374 /* Vars without decl are artificial and do not have sub-variables. */
375 ret->is_artificial_var = (t == NULL_TREE);
376 ret->is_special_var = false;
377 ret->is_unknown_size_var = false;
378 ret->is_full_var = (t == NULL_TREE);
379 ret->is_heap_var = false;
380 ret->may_have_pointers = true;
381 ret->only_restrict_pointers = false;
382 ret->is_restrict_var = false;
383 ret->ruid = 0;
384 ret->is_global_var = (t == NULL_TREE);
385 ret->is_ipa_escape_point = false;
386 ret->is_fn_info = false;
387 if (t && DECL_P (t))
388 ret->is_global_var = (is_global_var (t)
389 /* We have to treat even local register variables
390 as escape points. */
391 || (VAR_P (t) && DECL_HARD_REGISTER (t)));
392 ret->solution = BITMAP_ALLOC (&pta_obstack);
393 ret->oldsolution = NULL;
394 ret->next = 0;
395 ret->head = ret->id;
396
397 stats.total_vars++;
398
399 varmap.safe_push (ret);
400
401 return ret;
402 }
403
404 /* A map mapping call statements to per-stmt variables for uses
405 and clobbers specific to the call. */
406 static hash_map<gimple *, varinfo_t> *call_stmt_vars;
407
408 /* Lookup or create the variable for the call statement CALL. */
409
410 static varinfo_t
411 get_call_vi (gcall *call)
412 {
413 varinfo_t vi, vi2;
414
415 bool existed;
416 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
417 if (existed)
418 return *slot_p;
419
420 vi = new_var_info (NULL_TREE, "CALLUSED", true);
421 vi->offset = 0;
422 vi->size = 1;
423 vi->fullsize = 2;
424 vi->is_full_var = true;
425
426 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED", true);
427 vi2->offset = 1;
428 vi2->size = 1;
429 vi2->fullsize = 2;
430 vi2->is_full_var = true;
431
432 vi->next = vi2->id;
433
434 *slot_p = vi;
435 return vi;
436 }
437
438 /* Lookup the variable for the call statement CALL representing
439 the uses. Returns NULL if there is nothing special about this call. */
440
441 static varinfo_t
442 lookup_call_use_vi (gcall *call)
443 {
444 varinfo_t *slot_p = call_stmt_vars->get (call);
445 if (slot_p)
446 return *slot_p;
447
448 return NULL;
449 }
450
451 /* Lookup the variable for the call statement CALL representing
452 the clobbers. Returns NULL if there is nothing special about this call. */
453
454 static varinfo_t
455 lookup_call_clobber_vi (gcall *call)
456 {
457 varinfo_t uses = lookup_call_use_vi (call);
458 if (!uses)
459 return NULL;
460
461 return vi_next (uses);
462 }
463
464 /* Lookup or create the variable for the call statement CALL representing
465 the uses. */
466
467 static varinfo_t
468 get_call_use_vi (gcall *call)
469 {
470 return get_call_vi (call);
471 }
472
473 /* Lookup or create the variable for the call statement CALL representing
474 the clobbers. */
475
476 static varinfo_t ATTRIBUTE_UNUSED
477 get_call_clobber_vi (gcall *call)
478 {
479 return vi_next (get_call_vi (call));
480 }
481
482
483 enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF};
484
485 /* An expression that appears in a constraint. */
486
487 struct constraint_expr
488 {
489 /* Constraint type. */
490 constraint_expr_type type;
491
492 /* Variable we are referring to in the constraint. */
493 unsigned int var;
494
495 /* Offset, in bits, of this constraint from the beginning of
496 variables it ends up referring to.
497
498 IOW, in a deref constraint, we would deref, get the result set,
499 then add OFFSET to each member. */
500 HOST_WIDE_INT offset;
501 };
502
503 /* Use 0x8000... as special unknown offset. */
504 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
505
506 typedef struct constraint_expr ce_s;
507 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
508 static void get_constraint_for (tree, vec<ce_s> *);
509 static void get_constraint_for_rhs (tree, vec<ce_s> *);
510 static void do_deref (vec<ce_s> *);
511
512 /* Our set constraints are made up of two constraint expressions, one
513 LHS, and one RHS.
514
515 As described in the introduction, our set constraints each represent an
516 operation between set valued variables.
517 */
518 struct constraint
519 {
520 struct constraint_expr lhs;
521 struct constraint_expr rhs;
522 };
523
524 /* List of constraints that we use to build the constraint graph from. */
525
526 static vec<constraint_t> constraints;
527 static object_allocator<constraint> constraint_pool ("Constraint pool");
528
529 /* The constraint graph is represented as an array of bitmaps
530 containing successor nodes. */
531
532 struct constraint_graph
533 {
534 /* Size of this graph, which may be different than the number of
535 nodes in the variable map. */
536 unsigned int size;
537
538 /* Explicit successors of each node. */
539 bitmap *succs;
540
541 /* Implicit predecessors of each node (Used for variable
542 substitution). */
543 bitmap *implicit_preds;
544
545 /* Explicit predecessors of each node (Used for variable substitution). */
546 bitmap *preds;
547
548 /* Indirect cycle representatives, or -1 if the node has no indirect
549 cycles. */
550 int *indirect_cycles;
551
552 /* Representative node for a node. rep[a] == a unless the node has
553 been unified. */
554 unsigned int *rep;
555
556 /* Equivalence class representative for a label. This is used for
557 variable substitution. */
558 int *eq_rep;
559
560 /* Pointer equivalence label for a node. All nodes with the same
561 pointer equivalence label can be unified together at some point
562 (either during constraint optimization or after the constraint
563 graph is built). */
564 unsigned int *pe;
565
566 /* Pointer equivalence representative for a label. This is used to
567 handle nodes that are pointer equivalent but not location
568 equivalent. We can unite these once the addressof constraints
569 are transformed into initial points-to sets. */
570 int *pe_rep;
571
572 /* Pointer equivalence label for each node, used during variable
573 substitution. */
574 unsigned int *pointer_label;
575
576 /* Location equivalence label for each node, used during location
577 equivalence finding. */
578 unsigned int *loc_label;
579
580 /* Pointed-by set for each node, used during location equivalence
581 finding. This is pointed-by rather than pointed-to, because it
582 is constructed using the predecessor graph. */
583 bitmap *pointed_by;
584
585 /* Points to sets for pointer equivalence. This is *not* the actual
586 points-to sets for nodes. */
587 bitmap *points_to;
588
589 /* Bitmap of nodes where the bit is set if the node is a direct
590 node. Used for variable substitution. */
591 sbitmap direct_nodes;
592
593 /* Bitmap of nodes where the bit is set if the node is address
594 taken. Used for variable substitution. */
595 bitmap address_taken;
596
597 /* Vector of complex constraints for each graph node. Complex
598 constraints are those involving dereferences or offsets that are
599 not 0. */
600 vec<constraint_t> *complex;
601 };
602
603 static constraint_graph_t graph;
604
605 /* During variable substitution and the offline version of indirect
606 cycle finding, we create nodes to represent dereferences and
607 address taken constraints. These represent where these start and
608 end. */
609 #define FIRST_REF_NODE (varmap).length ()
610 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
611
612 /* Return the representative node for NODE, if NODE has been unioned
613 with another NODE.
614 This function performs path compression along the way to finding
615 the representative. */
616
617 static unsigned int
618 find (unsigned int node)
619 {
620 gcc_checking_assert (node < graph->size);
621 if (graph->rep[node] != node)
622 return graph->rep[node] = find (graph->rep[node]);
623 return node;
624 }
625
626 /* Union the TO and FROM nodes to the TO nodes.
627 Note that at some point in the future, we may want to do
628 union-by-rank, in which case we are going to have to return the
629 node we unified to. */
630
631 static bool
632 unite (unsigned int to, unsigned int from)
633 {
634 gcc_checking_assert (to < graph->size && from < graph->size);
635 if (to != from && graph->rep[from] != to)
636 {
637 graph->rep[from] = to;
638 return true;
639 }
640 return false;
641 }
642
643 /* Create a new constraint consisting of LHS and RHS expressions. */
644
645 static constraint_t
646 new_constraint (const struct constraint_expr lhs,
647 const struct constraint_expr rhs)
648 {
649 constraint_t ret = constraint_pool.allocate ();
650 ret->lhs = lhs;
651 ret->rhs = rhs;
652 return ret;
653 }
654
655 /* Print out constraint C to FILE. */
656
657 static void
658 dump_constraint (FILE *file, constraint_t c)
659 {
660 if (c->lhs.type == ADDRESSOF)
661 fprintf (file, "&");
662 else if (c->lhs.type == DEREF)
663 fprintf (file, "*");
664 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
665 if (c->lhs.offset == UNKNOWN_OFFSET)
666 fprintf (file, " + UNKNOWN");
667 else if (c->lhs.offset != 0)
668 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
669 fprintf (file, " = ");
670 if (c->rhs.type == ADDRESSOF)
671 fprintf (file, "&");
672 else if (c->rhs.type == DEREF)
673 fprintf (file, "*");
674 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
675 if (c->rhs.offset == UNKNOWN_OFFSET)
676 fprintf (file, " + UNKNOWN");
677 else if (c->rhs.offset != 0)
678 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
679 }
680
681
682 void debug_constraint (constraint_t);
683 void debug_constraints (void);
684 void debug_constraint_graph (void);
685 void debug_solution_for_var (unsigned int);
686 void debug_sa_points_to_info (void);
687 void debug_varinfo (varinfo_t);
688 void debug_varmap (void);
689
690 /* Print out constraint C to stderr. */
691
692 DEBUG_FUNCTION void
693 debug_constraint (constraint_t c)
694 {
695 dump_constraint (stderr, c);
696 fprintf (stderr, "\n");
697 }
698
699 /* Print out all constraints to FILE */
700
701 static void
702 dump_constraints (FILE *file, int from)
703 {
704 int i;
705 constraint_t c;
706 for (i = from; constraints.iterate (i, &c); i++)
707 if (c)
708 {
709 dump_constraint (file, c);
710 fprintf (file, "\n");
711 }
712 }
713
714 /* Print out all constraints to stderr. */
715
716 DEBUG_FUNCTION void
717 debug_constraints (void)
718 {
719 dump_constraints (stderr, 0);
720 }
721
722 /* Print the constraint graph in dot format. */
723
724 static void
725 dump_constraint_graph (FILE *file)
726 {
727 unsigned int i;
728
729 /* Only print the graph if it has already been initialized: */
730 if (!graph)
731 return;
732
733 /* Prints the header of the dot file: */
734 fprintf (file, "strict digraph {\n");
735 fprintf (file, " node [\n shape = box\n ]\n");
736 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
737 fprintf (file, "\n // List of nodes and complex constraints in "
738 "the constraint graph:\n");
739
740 /* The next lines print the nodes in the graph together with the
741 complex constraints attached to them. */
742 for (i = 1; i < graph->size; i++)
743 {
744 if (i == FIRST_REF_NODE)
745 continue;
746 if (find (i) != i)
747 continue;
748 if (i < FIRST_REF_NODE)
749 fprintf (file, "\"%s\"", get_varinfo (i)->name);
750 else
751 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
752 if (graph->complex[i].exists ())
753 {
754 unsigned j;
755 constraint_t c;
756 fprintf (file, " [label=\"\\N\\n");
757 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
758 {
759 dump_constraint (file, c);
760 fprintf (file, "\\l");
761 }
762 fprintf (file, "\"]");
763 }
764 fprintf (file, ";\n");
765 }
766
767 /* Go over the edges. */
768 fprintf (file, "\n // Edges in the constraint graph:\n");
769 for (i = 1; i < graph->size; i++)
770 {
771 unsigned j;
772 bitmap_iterator bi;
773 if (find (i) != i)
774 continue;
775 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
776 {
777 unsigned to = find (j);
778 if (i == to)
779 continue;
780 if (i < FIRST_REF_NODE)
781 fprintf (file, "\"%s\"", get_varinfo (i)->name);
782 else
783 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
784 fprintf (file, " -> ");
785 if (to < FIRST_REF_NODE)
786 fprintf (file, "\"%s\"", get_varinfo (to)->name);
787 else
788 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
789 fprintf (file, ";\n");
790 }
791 }
792
793 /* Prints the tail of the dot file. */
794 fprintf (file, "}\n");
795 }
796
797 /* Print out the constraint graph to stderr. */
798
799 DEBUG_FUNCTION void
800 debug_constraint_graph (void)
801 {
802 dump_constraint_graph (stderr);
803 }
804
805 /* SOLVER FUNCTIONS
806
807 The solver is a simple worklist solver, that works on the following
808 algorithm:
809
810 sbitmap changed_nodes = all zeroes;
811 changed_count = 0;
812 For each node that is not already collapsed:
813 changed_count++;
814 set bit in changed nodes
815
816 while (changed_count > 0)
817 {
818 compute topological ordering for constraint graph
819
820 find and collapse cycles in the constraint graph (updating
821 changed if necessary)
822
823 for each node (n) in the graph in topological order:
824 changed_count--;
825
826 Process each complex constraint associated with the node,
827 updating changed if necessary.
828
829 For each outgoing edge from n, propagate the solution from n to
830 the destination of the edge, updating changed as necessary.
831
832 } */
833
834 /* Return true if two constraint expressions A and B are equal. */
835
836 static bool
837 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
838 {
839 return a.type == b.type && a.var == b.var && a.offset == b.offset;
840 }
841
842 /* Return true if constraint expression A is less than constraint expression
843 B. This is just arbitrary, but consistent, in order to give them an
844 ordering. */
845
846 static bool
847 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
848 {
849 if (a.type == b.type)
850 {
851 if (a.var == b.var)
852 return a.offset < b.offset;
853 else
854 return a.var < b.var;
855 }
856 else
857 return a.type < b.type;
858 }
859
860 /* Return true if constraint A is less than constraint B. This is just
861 arbitrary, but consistent, in order to give them an ordering. */
862
863 static bool
864 constraint_less (const constraint_t &a, const constraint_t &b)
865 {
866 if (constraint_expr_less (a->lhs, b->lhs))
867 return true;
868 else if (constraint_expr_less (b->lhs, a->lhs))
869 return false;
870 else
871 return constraint_expr_less (a->rhs, b->rhs);
872 }
873
874 /* Return true if two constraints A and B are equal. */
875
876 static bool
877 constraint_equal (struct constraint a, struct constraint b)
878 {
879 return constraint_expr_equal (a.lhs, b.lhs)
880 && constraint_expr_equal (a.rhs, b.rhs);
881 }
882
883
884 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
885
886 static constraint_t
887 constraint_vec_find (vec<constraint_t> vec,
888 struct constraint lookfor)
889 {
890 unsigned int place;
891 constraint_t found;
892
893 if (!vec.exists ())
894 return NULL;
895
896 place = vec.lower_bound (&lookfor, constraint_less);
897 if (place >= vec.length ())
898 return NULL;
899 found = vec[place];
900 if (!constraint_equal (*found, lookfor))
901 return NULL;
902 return found;
903 }
904
905 /* Union two constraint vectors, TO and FROM. Put the result in TO.
906 Returns true of TO set is changed. */
907
908 static bool
909 constraint_set_union (vec<constraint_t> *to,
910 vec<constraint_t> *from)
911 {
912 int i;
913 constraint_t c;
914 bool any_change = false;
915
916 FOR_EACH_VEC_ELT (*from, i, c)
917 {
918 if (constraint_vec_find (*to, *c) == NULL)
919 {
920 unsigned int place = to->lower_bound (c, constraint_less);
921 to->safe_insert (place, c);
922 any_change = true;
923 }
924 }
925 return any_change;
926 }
927
928 /* Expands the solution in SET to all sub-fields of variables included. */
929
930 static bitmap
931 solution_set_expand (bitmap set, bitmap *expanded)
932 {
933 bitmap_iterator bi;
934 unsigned j;
935
936 if (*expanded)
937 return *expanded;
938
939 *expanded = BITMAP_ALLOC (&iteration_obstack);
940
941 /* In a first pass expand to the head of the variables we need to
942 add all sub-fields off. This avoids quadratic behavior. */
943 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
944 {
945 varinfo_t v = get_varinfo (j);
946 if (v->is_artificial_var
947 || v->is_full_var)
948 continue;
949 bitmap_set_bit (*expanded, v->head);
950 }
951
952 /* In the second pass now expand all head variables with subfields. */
953 EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
954 {
955 varinfo_t v = get_varinfo (j);
956 if (v->head != j)
957 continue;
958 for (v = vi_next (v); v != NULL; v = vi_next (v))
959 bitmap_set_bit (*expanded, v->id);
960 }
961
962 /* And finally set the rest of the bits from SET. */
963 bitmap_ior_into (*expanded, set);
964
965 return *expanded;
966 }
967
968 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
969 process. */
970
971 static bool
972 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
973 bitmap *expanded_delta)
974 {
975 bool changed = false;
976 bitmap_iterator bi;
977 unsigned int i;
978
979 /* If the solution of DELTA contains anything it is good enough to transfer
980 this to TO. */
981 if (bitmap_bit_p (delta, anything_id))
982 return bitmap_set_bit (to, anything_id);
983
984 /* If the offset is unknown we have to expand the solution to
985 all subfields. */
986 if (inc == UNKNOWN_OFFSET)
987 {
988 delta = solution_set_expand (delta, expanded_delta);
989 changed |= bitmap_ior_into (to, delta);
990 return changed;
991 }
992
993 /* For non-zero offset union the offsetted solution into the destination. */
994 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
995 {
996 varinfo_t vi = get_varinfo (i);
997
998 /* If this is a variable with just one field just set its bit
999 in the result. */
1000 if (vi->is_artificial_var
1001 || vi->is_unknown_size_var
1002 || vi->is_full_var)
1003 changed |= bitmap_set_bit (to, i);
1004 else
1005 {
1006 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1007 unsigned HOST_WIDE_INT size = vi->size;
1008
1009 /* If the offset makes the pointer point to before the
1010 variable use offset zero for the field lookup. */
1011 if (fieldoffset < 0)
1012 vi = get_varinfo (vi->head);
1013 else
1014 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1015
1016 do
1017 {
1018 changed |= bitmap_set_bit (to, vi->id);
1019 if (vi->is_full_var
1020 || vi->next == 0)
1021 break;
1022
1023 /* We have to include all fields that overlap the current field
1024 shifted by inc. */
1025 vi = vi_next (vi);
1026 }
1027 while (vi->offset < fieldoffset + size);
1028 }
1029 }
1030
1031 return changed;
1032 }
1033
1034 /* Insert constraint C into the list of complex constraints for graph
1035 node VAR. */
1036
1037 static void
1038 insert_into_complex (constraint_graph_t graph,
1039 unsigned int var, constraint_t c)
1040 {
1041 vec<constraint_t> complex = graph->complex[var];
1042 unsigned int place = complex.lower_bound (c, constraint_less);
1043
1044 /* Only insert constraints that do not already exist. */
1045 if (place >= complex.length ()
1046 || !constraint_equal (*c, *complex[place]))
1047 graph->complex[var].safe_insert (place, c);
1048 }
1049
1050
1051 /* Condense two variable nodes into a single variable node, by moving
1052 all associated info from FROM to TO. Returns true if TO node's
1053 constraint set changes after the merge. */
1054
1055 static bool
1056 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1057 unsigned int from)
1058 {
1059 unsigned int i;
1060 constraint_t c;
1061 bool any_change = false;
1062
1063 gcc_checking_assert (find (from) == to);
1064
1065 /* Move all complex constraints from src node into to node */
1066 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1067 {
1068 /* In complex constraints for node FROM, we may have either
1069 a = *FROM, and *FROM = a, or an offseted constraint which are
1070 always added to the rhs node's constraints. */
1071
1072 if (c->rhs.type == DEREF)
1073 c->rhs.var = to;
1074 else if (c->lhs.type == DEREF)
1075 c->lhs.var = to;
1076 else
1077 c->rhs.var = to;
1078
1079 }
1080 any_change = constraint_set_union (&graph->complex[to],
1081 &graph->complex[from]);
1082 graph->complex[from].release ();
1083 return any_change;
1084 }
1085
1086
1087 /* Remove edges involving NODE from GRAPH. */
1088
1089 static void
1090 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1091 {
1092 if (graph->succs[node])
1093 BITMAP_FREE (graph->succs[node]);
1094 }
1095
1096 /* Merge GRAPH nodes FROM and TO into node TO. */
1097
1098 static void
1099 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1100 unsigned int from)
1101 {
1102 if (graph->indirect_cycles[from] != -1)
1103 {
1104 /* If we have indirect cycles with the from node, and we have
1105 none on the to node, the to node has indirect cycles from the
1106 from node now that they are unified.
1107 If indirect cycles exist on both, unify the nodes that they
1108 are in a cycle with, since we know they are in a cycle with
1109 each other. */
1110 if (graph->indirect_cycles[to] == -1)
1111 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1112 }
1113
1114 /* Merge all the successor edges. */
1115 if (graph->succs[from])
1116 {
1117 if (!graph->succs[to])
1118 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1119 bitmap_ior_into (graph->succs[to],
1120 graph->succs[from]);
1121 }
1122
1123 clear_edges_for_node (graph, from);
1124 }
1125
1126
1127 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1128 it doesn't exist in the graph already. */
1129
1130 static void
1131 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1132 unsigned int from)
1133 {
1134 if (to == from)
1135 return;
1136
1137 if (!graph->implicit_preds[to])
1138 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1139
1140 if (bitmap_set_bit (graph->implicit_preds[to], from))
1141 stats.num_implicit_edges++;
1142 }
1143
1144 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1145 it doesn't exist in the graph already.
1146 Return false if the edge already existed, true otherwise. */
1147
1148 static void
1149 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1150 unsigned int from)
1151 {
1152 if (!graph->preds[to])
1153 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1154 bitmap_set_bit (graph->preds[to], from);
1155 }
1156
1157 /* Add a graph edge to GRAPH, going from FROM to TO if
1158 it doesn't exist in the graph already.
1159 Return false if the edge already existed, true otherwise. */
1160
1161 static bool
1162 add_graph_edge (constraint_graph_t graph, unsigned int to,
1163 unsigned int from)
1164 {
1165 if (to == from)
1166 {
1167 return false;
1168 }
1169 else
1170 {
1171 bool r = false;
1172
1173 if (!graph->succs[from])
1174 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1175 if (bitmap_set_bit (graph->succs[from], to))
1176 {
1177 r = true;
1178 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1179 stats.num_edges++;
1180 }
1181 return r;
1182 }
1183 }
1184
1185
1186 /* Initialize the constraint graph structure to contain SIZE nodes. */
1187
1188 static void
1189 init_graph (unsigned int size)
1190 {
1191 unsigned int j;
1192
1193 graph = XCNEW (struct constraint_graph);
1194 graph->size = size;
1195 graph->succs = XCNEWVEC (bitmap, graph->size);
1196 graph->indirect_cycles = XNEWVEC (int, graph->size);
1197 graph->rep = XNEWVEC (unsigned int, graph->size);
1198 /* ??? Macros do not support template types with multiple arguments,
1199 so we use a typedef to work around it. */
1200 typedef vec<constraint_t> vec_constraint_t_heap;
1201 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1202 graph->pe = XCNEWVEC (unsigned int, graph->size);
1203 graph->pe_rep = XNEWVEC (int, graph->size);
1204
1205 for (j = 0; j < graph->size; j++)
1206 {
1207 graph->rep[j] = j;
1208 graph->pe_rep[j] = -1;
1209 graph->indirect_cycles[j] = -1;
1210 }
1211 }
1212
1213 /* Build the constraint graph, adding only predecessor edges right now. */
1214
1215 static void
1216 build_pred_graph (void)
1217 {
1218 int i;
1219 constraint_t c;
1220 unsigned int j;
1221
1222 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1223 graph->preds = XCNEWVEC (bitmap, graph->size);
1224 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1225 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1226 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1227 graph->points_to = XCNEWVEC (bitmap, graph->size);
1228 graph->eq_rep = XNEWVEC (int, graph->size);
1229 graph->direct_nodes = sbitmap_alloc (graph->size);
1230 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1231 bitmap_clear (graph->direct_nodes);
1232
1233 for (j = 1; j < FIRST_REF_NODE; j++)
1234 {
1235 if (!get_varinfo (j)->is_special_var)
1236 bitmap_set_bit (graph->direct_nodes, j);
1237 }
1238
1239 for (j = 0; j < graph->size; j++)
1240 graph->eq_rep[j] = -1;
1241
1242 for (j = 0; j < varmap.length (); j++)
1243 graph->indirect_cycles[j] = -1;
1244
1245 FOR_EACH_VEC_ELT (constraints, i, c)
1246 {
1247 struct constraint_expr lhs = c->lhs;
1248 struct constraint_expr rhs = c->rhs;
1249 unsigned int lhsvar = lhs.var;
1250 unsigned int rhsvar = rhs.var;
1251
1252 if (lhs.type == DEREF)
1253 {
1254 /* *x = y. */
1255 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1256 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1257 }
1258 else if (rhs.type == DEREF)
1259 {
1260 /* x = *y */
1261 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1262 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1263 else
1264 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1265 }
1266 else if (rhs.type == ADDRESSOF)
1267 {
1268 varinfo_t v;
1269
1270 /* x = &y */
1271 if (graph->points_to[lhsvar] == NULL)
1272 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1273 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1274
1275 if (graph->pointed_by[rhsvar] == NULL)
1276 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1277 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1278
1279 /* Implicitly, *x = y */
1280 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1281
1282 /* All related variables are no longer direct nodes. */
1283 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1284 v = get_varinfo (rhsvar);
1285 if (!v->is_full_var)
1286 {
1287 v = get_varinfo (v->head);
1288 do
1289 {
1290 bitmap_clear_bit (graph->direct_nodes, v->id);
1291 v = vi_next (v);
1292 }
1293 while (v != NULL);
1294 }
1295 bitmap_set_bit (graph->address_taken, rhsvar);
1296 }
1297 else if (lhsvar > anything_id
1298 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1299 {
1300 /* x = y */
1301 add_pred_graph_edge (graph, lhsvar, rhsvar);
1302 /* Implicitly, *x = *y */
1303 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1304 FIRST_REF_NODE + rhsvar);
1305 }
1306 else if (lhs.offset != 0 || rhs.offset != 0)
1307 {
1308 if (rhs.offset != 0)
1309 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1310 else if (lhs.offset != 0)
1311 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1312 }
1313 }
1314 }
1315
1316 /* Build the constraint graph, adding successor edges. */
1317
1318 static void
1319 build_succ_graph (void)
1320 {
1321 unsigned i, t;
1322 constraint_t c;
1323
1324 FOR_EACH_VEC_ELT (constraints, i, c)
1325 {
1326 struct constraint_expr lhs;
1327 struct constraint_expr rhs;
1328 unsigned int lhsvar;
1329 unsigned int rhsvar;
1330
1331 if (!c)
1332 continue;
1333
1334 lhs = c->lhs;
1335 rhs = c->rhs;
1336 lhsvar = find (lhs.var);
1337 rhsvar = find (rhs.var);
1338
1339 if (lhs.type == DEREF)
1340 {
1341 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1342 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1343 }
1344 else if (rhs.type == DEREF)
1345 {
1346 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1347 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1348 }
1349 else if (rhs.type == ADDRESSOF)
1350 {
1351 /* x = &y */
1352 gcc_checking_assert (find (rhs.var) == rhs.var);
1353 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1354 }
1355 else if (lhsvar > anything_id
1356 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1357 {
1358 add_graph_edge (graph, lhsvar, rhsvar);
1359 }
1360 }
1361
1362 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1363 receive pointers. */
1364 t = find (storedanything_id);
1365 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1366 {
1367 if (!bitmap_bit_p (graph->direct_nodes, i)
1368 && get_varinfo (i)->may_have_pointers)
1369 add_graph_edge (graph, find (i), t);
1370 }
1371
1372 /* Everything stored to ANYTHING also potentially escapes. */
1373 add_graph_edge (graph, find (escaped_id), t);
1374 }
1375
1376
1377 /* Changed variables on the last iteration. */
1378 static bitmap changed;
1379
1380 /* Strongly Connected Component visitation info. */
1381
1382 struct scc_info
1383 {
1384 scc_info (size_t size);
1385 ~scc_info ();
1386
1387 auto_sbitmap visited;
1388 auto_sbitmap deleted;
1389 unsigned int *dfs;
1390 unsigned int *node_mapping;
1391 int current_index;
1392 auto_vec<unsigned> scc_stack;
1393 };
1394
1395
1396 /* Recursive routine to find strongly connected components in GRAPH.
1397 SI is the SCC info to store the information in, and N is the id of current
1398 graph node we are processing.
1399
1400 This is Tarjan's strongly connected component finding algorithm, as
1401 modified by Nuutila to keep only non-root nodes on the stack.
1402 The algorithm can be found in "On finding the strongly connected
1403 connected components in a directed graph" by Esko Nuutila and Eljas
1404 Soisalon-Soininen, in Information Processing Letters volume 49,
1405 number 1, pages 9-14. */
1406
1407 static void
1408 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1409 {
1410 unsigned int i;
1411 bitmap_iterator bi;
1412 unsigned int my_dfs;
1413
1414 bitmap_set_bit (si->visited, n);
1415 si->dfs[n] = si->current_index ++;
1416 my_dfs = si->dfs[n];
1417
1418 /* Visit all the successors. */
1419 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1420 {
1421 unsigned int w;
1422
1423 if (i > LAST_REF_NODE)
1424 break;
1425
1426 w = find (i);
1427 if (bitmap_bit_p (si->deleted, w))
1428 continue;
1429
1430 if (!bitmap_bit_p (si->visited, w))
1431 scc_visit (graph, si, w);
1432
1433 unsigned int t = find (w);
1434 gcc_checking_assert (find (n) == n);
1435 if (si->dfs[t] < si->dfs[n])
1436 si->dfs[n] = si->dfs[t];
1437 }
1438
1439 /* See if any components have been identified. */
1440 if (si->dfs[n] == my_dfs)
1441 {
1442 if (si->scc_stack.length () > 0
1443 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1444 {
1445 bitmap scc = BITMAP_ALLOC (NULL);
1446 unsigned int lowest_node;
1447 bitmap_iterator bi;
1448
1449 bitmap_set_bit (scc, n);
1450
1451 while (si->scc_stack.length () != 0
1452 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1453 {
1454 unsigned int w = si->scc_stack.pop ();
1455
1456 bitmap_set_bit (scc, w);
1457 }
1458
1459 lowest_node = bitmap_first_set_bit (scc);
1460 gcc_assert (lowest_node < FIRST_REF_NODE);
1461
1462 /* Collapse the SCC nodes into a single node, and mark the
1463 indirect cycles. */
1464 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1465 {
1466 if (i < FIRST_REF_NODE)
1467 {
1468 if (unite (lowest_node, i))
1469 unify_nodes (graph, lowest_node, i, false);
1470 }
1471 else
1472 {
1473 unite (lowest_node, i);
1474 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1475 }
1476 }
1477 }
1478 bitmap_set_bit (si->deleted, n);
1479 }
1480 else
1481 si->scc_stack.safe_push (n);
1482 }
1483
1484 /* Unify node FROM into node TO, updating the changed count if
1485 necessary when UPDATE_CHANGED is true. */
1486
1487 static void
1488 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1489 bool update_changed)
1490 {
1491 gcc_checking_assert (to != from && find (to) == to);
1492
1493 if (dump_file && (dump_flags & TDF_DETAILS))
1494 fprintf (dump_file, "Unifying %s to %s\n",
1495 get_varinfo (from)->name,
1496 get_varinfo (to)->name);
1497
1498 if (update_changed)
1499 stats.unified_vars_dynamic++;
1500 else
1501 stats.unified_vars_static++;
1502
1503 merge_graph_nodes (graph, to, from);
1504 if (merge_node_constraints (graph, to, from))
1505 {
1506 if (update_changed)
1507 bitmap_set_bit (changed, to);
1508 }
1509
1510 /* Mark TO as changed if FROM was changed. If TO was already marked
1511 as changed, decrease the changed count. */
1512
1513 if (update_changed
1514 && bitmap_clear_bit (changed, from))
1515 bitmap_set_bit (changed, to);
1516 varinfo_t fromvi = get_varinfo (from);
1517 if (fromvi->solution)
1518 {
1519 /* If the solution changes because of the merging, we need to mark
1520 the variable as changed. */
1521 varinfo_t tovi = get_varinfo (to);
1522 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1523 {
1524 if (update_changed)
1525 bitmap_set_bit (changed, to);
1526 }
1527
1528 BITMAP_FREE (fromvi->solution);
1529 if (fromvi->oldsolution)
1530 BITMAP_FREE (fromvi->oldsolution);
1531
1532 if (stats.iterations > 0
1533 && tovi->oldsolution)
1534 BITMAP_FREE (tovi->oldsolution);
1535 }
1536 if (graph->succs[to])
1537 bitmap_clear_bit (graph->succs[to], to);
1538 }
1539
1540 /* Information needed to compute the topological ordering of a graph. */
1541
1542 struct topo_info
1543 {
1544 /* sbitmap of visited nodes. */
1545 sbitmap visited;
1546 /* Array that stores the topological order of the graph, *in
1547 reverse*. */
1548 vec<unsigned> topo_order;
1549 };
1550
1551
1552 /* Initialize and return a topological info structure. */
1553
1554 static struct topo_info *
1555 init_topo_info (void)
1556 {
1557 size_t size = graph->size;
1558 struct topo_info *ti = XNEW (struct topo_info);
1559 ti->visited = sbitmap_alloc (size);
1560 bitmap_clear (ti->visited);
1561 ti->topo_order.create (1);
1562 return ti;
1563 }
1564
1565
1566 /* Free the topological sort info pointed to by TI. */
1567
1568 static void
1569 free_topo_info (struct topo_info *ti)
1570 {
1571 sbitmap_free (ti->visited);
1572 ti->topo_order.release ();
1573 free (ti);
1574 }
1575
1576 /* Visit the graph in topological order, and store the order in the
1577 topo_info structure. */
1578
1579 static void
1580 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1581 unsigned int n)
1582 {
1583 bitmap_iterator bi;
1584 unsigned int j;
1585
1586 bitmap_set_bit (ti->visited, n);
1587
1588 if (graph->succs[n])
1589 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1590 {
1591 if (!bitmap_bit_p (ti->visited, j))
1592 topo_visit (graph, ti, j);
1593 }
1594
1595 ti->topo_order.safe_push (n);
1596 }
1597
1598 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1599 starting solution for y. */
1600
1601 static void
1602 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1603 bitmap delta, bitmap *expanded_delta)
1604 {
1605 unsigned int lhs = c->lhs.var;
1606 bool flag = false;
1607 bitmap sol = get_varinfo (lhs)->solution;
1608 unsigned int j;
1609 bitmap_iterator bi;
1610 HOST_WIDE_INT roffset = c->rhs.offset;
1611
1612 /* Our IL does not allow this. */
1613 gcc_checking_assert (c->lhs.offset == 0);
1614
1615 /* If the solution of Y contains anything it is good enough to transfer
1616 this to the LHS. */
1617 if (bitmap_bit_p (delta, anything_id))
1618 {
1619 flag |= bitmap_set_bit (sol, anything_id);
1620 goto done;
1621 }
1622
1623 /* If we do not know at with offset the rhs is dereferenced compute
1624 the reachability set of DELTA, conservatively assuming it is
1625 dereferenced at all valid offsets. */
1626 if (roffset == UNKNOWN_OFFSET)
1627 {
1628 delta = solution_set_expand (delta, expanded_delta);
1629 /* No further offset processing is necessary. */
1630 roffset = 0;
1631 }
1632
1633 /* For each variable j in delta (Sol(y)), add
1634 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1635 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1636 {
1637 varinfo_t v = get_varinfo (j);
1638 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1639 unsigned HOST_WIDE_INT size = v->size;
1640 unsigned int t;
1641
1642 if (v->is_full_var)
1643 ;
1644 else if (roffset != 0)
1645 {
1646 if (fieldoffset < 0)
1647 v = get_varinfo (v->head);
1648 else
1649 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1650 }
1651
1652 /* We have to include all fields that overlap the current field
1653 shifted by roffset. */
1654 do
1655 {
1656 t = find (v->id);
1657
1658 /* Adding edges from the special vars is pointless.
1659 They don't have sets that can change. */
1660 if (get_varinfo (t)->is_special_var)
1661 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1662 /* Merging the solution from ESCAPED needlessly increases
1663 the set. Use ESCAPED as representative instead. */
1664 else if (v->id == escaped_id)
1665 flag |= bitmap_set_bit (sol, escaped_id);
1666 else if (v->may_have_pointers
1667 && add_graph_edge (graph, lhs, t))
1668 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1669
1670 if (v->is_full_var
1671 || v->next == 0)
1672 break;
1673
1674 v = vi_next (v);
1675 }
1676 while (v->offset < fieldoffset + size);
1677 }
1678
1679 done:
1680 /* If the LHS solution changed, mark the var as changed. */
1681 if (flag)
1682 {
1683 get_varinfo (lhs)->solution = sol;
1684 bitmap_set_bit (changed, lhs);
1685 }
1686 }
1687
1688 /* Process a constraint C that represents *(x + off) = y using DELTA
1689 as the starting solution for x. */
1690
1691 static void
1692 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1693 {
1694 unsigned int rhs = c->rhs.var;
1695 bitmap sol = get_varinfo (rhs)->solution;
1696 unsigned int j;
1697 bitmap_iterator bi;
1698 HOST_WIDE_INT loff = c->lhs.offset;
1699 bool escaped_p = false;
1700
1701 /* Our IL does not allow this. */
1702 gcc_checking_assert (c->rhs.offset == 0);
1703
1704 /* If the solution of y contains ANYTHING simply use the ANYTHING
1705 solution. This avoids needlessly increasing the points-to sets. */
1706 if (bitmap_bit_p (sol, anything_id))
1707 sol = get_varinfo (find (anything_id))->solution;
1708
1709 /* If the solution for x contains ANYTHING we have to merge the
1710 solution of y into all pointer variables which we do via
1711 STOREDANYTHING. */
1712 if (bitmap_bit_p (delta, anything_id))
1713 {
1714 unsigned t = find (storedanything_id);
1715 if (add_graph_edge (graph, t, rhs))
1716 {
1717 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1718 bitmap_set_bit (changed, t);
1719 }
1720 return;
1721 }
1722
1723 /* If we do not know at with offset the rhs is dereferenced compute
1724 the reachability set of DELTA, conservatively assuming it is
1725 dereferenced at all valid offsets. */
1726 if (loff == UNKNOWN_OFFSET)
1727 {
1728 delta = solution_set_expand (delta, expanded_delta);
1729 loff = 0;
1730 }
1731
1732 /* For each member j of delta (Sol(x)), add an edge from y to j and
1733 union Sol(y) into Sol(j) */
1734 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1735 {
1736 varinfo_t v = get_varinfo (j);
1737 unsigned int t;
1738 HOST_WIDE_INT fieldoffset = v->offset + loff;
1739 unsigned HOST_WIDE_INT size = v->size;
1740
1741 if (v->is_full_var)
1742 ;
1743 else if (loff != 0)
1744 {
1745 if (fieldoffset < 0)
1746 v = get_varinfo (v->head);
1747 else
1748 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1749 }
1750
1751 /* We have to include all fields that overlap the current field
1752 shifted by loff. */
1753 do
1754 {
1755 if (v->may_have_pointers)
1756 {
1757 /* If v is a global variable then this is an escape point. */
1758 if (v->is_global_var
1759 && !escaped_p)
1760 {
1761 t = find (escaped_id);
1762 if (add_graph_edge (graph, t, rhs)
1763 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1764 bitmap_set_bit (changed, t);
1765 /* Enough to let rhs escape once. */
1766 escaped_p = true;
1767 }
1768
1769 if (v->is_special_var)
1770 break;
1771
1772 t = find (v->id);
1773 if (add_graph_edge (graph, t, rhs)
1774 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1775 bitmap_set_bit (changed, t);
1776 }
1777
1778 if (v->is_full_var
1779 || v->next == 0)
1780 break;
1781
1782 v = vi_next (v);
1783 }
1784 while (v->offset < fieldoffset + size);
1785 }
1786 }
1787
1788 /* Handle a non-simple (simple meaning requires no iteration),
1789 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1790
1791 static void
1792 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1793 bitmap *expanded_delta)
1794 {
1795 if (c->lhs.type == DEREF)
1796 {
1797 if (c->rhs.type == ADDRESSOF)
1798 {
1799 gcc_unreachable ();
1800 }
1801 else
1802 {
1803 /* *x = y */
1804 do_ds_constraint (c, delta, expanded_delta);
1805 }
1806 }
1807 else if (c->rhs.type == DEREF)
1808 {
1809 /* x = *y */
1810 if (!(get_varinfo (c->lhs.var)->is_special_var))
1811 do_sd_constraint (graph, c, delta, expanded_delta);
1812 }
1813 else
1814 {
1815 bitmap tmp;
1816 bool flag = false;
1817
1818 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1819 && c->rhs.offset != 0 && c->lhs.offset == 0);
1820 tmp = get_varinfo (c->lhs.var)->solution;
1821
1822 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1823 expanded_delta);
1824
1825 if (flag)
1826 bitmap_set_bit (changed, c->lhs.var);
1827 }
1828 }
1829
1830 /* Initialize and return a new SCC info structure. */
1831
1832 scc_info::scc_info (size_t size) :
1833 visited (size), deleted (size), current_index (0), scc_stack (1)
1834 {
1835 bitmap_clear (visited);
1836 bitmap_clear (deleted);
1837 node_mapping = XNEWVEC (unsigned int, size);
1838 dfs = XCNEWVEC (unsigned int, size);
1839
1840 for (size_t i = 0; i < size; i++)
1841 node_mapping[i] = i;
1842 }
1843
1844 /* Free an SCC info structure pointed to by SI */
1845
1846 scc_info::~scc_info ()
1847 {
1848 free (node_mapping);
1849 free (dfs);
1850 }
1851
1852
1853 /* Find indirect cycles in GRAPH that occur, using strongly connected
1854 components, and note them in the indirect cycles map.
1855
1856 This technique comes from Ben Hardekopf and Calvin Lin,
1857 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1858 Lines of Code", submitted to PLDI 2007. */
1859
1860 static void
1861 find_indirect_cycles (constraint_graph_t graph)
1862 {
1863 unsigned int i;
1864 unsigned int size = graph->size;
1865 scc_info si (size);
1866
1867 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1868 if (!bitmap_bit_p (si.visited, i) && find (i) == i)
1869 scc_visit (graph, &si, i);
1870 }
1871
1872 /* Compute a topological ordering for GRAPH, and store the result in the
1873 topo_info structure TI. */
1874
1875 static void
1876 compute_topo_order (constraint_graph_t graph,
1877 struct topo_info *ti)
1878 {
1879 unsigned int i;
1880 unsigned int size = graph->size;
1881
1882 for (i = 0; i != size; ++i)
1883 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1884 topo_visit (graph, ti, i);
1885 }
1886
1887 /* Structure used to for hash value numbering of pointer equivalence
1888 classes. */
1889
1890 typedef struct equiv_class_label
1891 {
1892 hashval_t hashcode;
1893 unsigned int equivalence_class;
1894 bitmap labels;
1895 } *equiv_class_label_t;
1896 typedef const struct equiv_class_label *const_equiv_class_label_t;
1897
1898 /* Equiv_class_label hashtable helpers. */
1899
1900 struct equiv_class_hasher : free_ptr_hash <equiv_class_label>
1901 {
1902 static inline hashval_t hash (const equiv_class_label *);
1903 static inline bool equal (const equiv_class_label *,
1904 const equiv_class_label *);
1905 };
1906
1907 /* Hash function for a equiv_class_label_t */
1908
1909 inline hashval_t
1910 equiv_class_hasher::hash (const equiv_class_label *ecl)
1911 {
1912 return ecl->hashcode;
1913 }
1914
1915 /* Equality function for two equiv_class_label_t's. */
1916
1917 inline bool
1918 equiv_class_hasher::equal (const equiv_class_label *eql1,
1919 const equiv_class_label *eql2)
1920 {
1921 return (eql1->hashcode == eql2->hashcode
1922 && bitmap_equal_p (eql1->labels, eql2->labels));
1923 }
1924
1925 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1926 classes. */
1927 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1928
1929 /* A hashtable for mapping a bitmap of labels->location equivalence
1930 classes. */
1931 static hash_table<equiv_class_hasher> *location_equiv_class_table;
1932
1933 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1934 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1935 is equivalent to. */
1936
1937 static equiv_class_label *
1938 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1939 bitmap labels)
1940 {
1941 equiv_class_label **slot;
1942 equiv_class_label ecl;
1943
1944 ecl.labels = labels;
1945 ecl.hashcode = bitmap_hash (labels);
1946 slot = table->find_slot (&ecl, INSERT);
1947 if (!*slot)
1948 {
1949 *slot = XNEW (struct equiv_class_label);
1950 (*slot)->labels = labels;
1951 (*slot)->hashcode = ecl.hashcode;
1952 (*slot)->equivalence_class = 0;
1953 }
1954
1955 return *slot;
1956 }
1957
1958 /* Perform offline variable substitution.
1959
1960 This is a worst case quadratic time way of identifying variables
1961 that must have equivalent points-to sets, including those caused by
1962 static cycles, and single entry subgraphs, in the constraint graph.
1963
1964 The technique is described in "Exploiting Pointer and Location
1965 Equivalence to Optimize Pointer Analysis. In the 14th International
1966 Static Analysis Symposium (SAS), August 2007." It is known as the
1967 "HU" algorithm, and is equivalent to value numbering the collapsed
1968 constraint graph including evaluating unions.
1969
1970 The general method of finding equivalence classes is as follows:
1971 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1972 Initialize all non-REF nodes to be direct nodes.
1973 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1974 variable}
1975 For each constraint containing the dereference, we also do the same
1976 thing.
1977
1978 We then compute SCC's in the graph and unify nodes in the same SCC,
1979 including pts sets.
1980
1981 For each non-collapsed node x:
1982 Visit all unvisited explicit incoming edges.
1983 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1984 where y->x.
1985 Lookup the equivalence class for pts(x).
1986 If we found one, equivalence_class(x) = found class.
1987 Otherwise, equivalence_class(x) = new class, and new_class is
1988 added to the lookup table.
1989
1990 All direct nodes with the same equivalence class can be replaced
1991 with a single representative node.
1992 All unlabeled nodes (label == 0) are not pointers and all edges
1993 involving them can be eliminated.
1994 We perform these optimizations during rewrite_constraints
1995
1996 In addition to pointer equivalence class finding, we also perform
1997 location equivalence class finding. This is the set of variables
1998 that always appear together in points-to sets. We use this to
1999 compress the size of the points-to sets. */
2000
2001 /* Current maximum pointer equivalence class id. */
2002 static int pointer_equiv_class;
2003
2004 /* Current maximum location equivalence class id. */
2005 static int location_equiv_class;
2006
2007 /* Recursive routine to find strongly connected components in GRAPH,
2008 and label it's nodes with DFS numbers. */
2009
2010 static void
2011 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2012 {
2013 unsigned int i;
2014 bitmap_iterator bi;
2015 unsigned int my_dfs;
2016
2017 gcc_checking_assert (si->node_mapping[n] == n);
2018 bitmap_set_bit (si->visited, n);
2019 si->dfs[n] = si->current_index ++;
2020 my_dfs = si->dfs[n];
2021
2022 /* Visit all the successors. */
2023 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2024 {
2025 unsigned int w = si->node_mapping[i];
2026
2027 if (bitmap_bit_p (si->deleted, w))
2028 continue;
2029
2030 if (!bitmap_bit_p (si->visited, w))
2031 condense_visit (graph, si, w);
2032
2033 unsigned int t = si->node_mapping[w];
2034 gcc_checking_assert (si->node_mapping[n] == n);
2035 if (si->dfs[t] < si->dfs[n])
2036 si->dfs[n] = si->dfs[t];
2037 }
2038
2039 /* Visit all the implicit predecessors. */
2040 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2041 {
2042 unsigned int w = si->node_mapping[i];
2043
2044 if (bitmap_bit_p (si->deleted, w))
2045 continue;
2046
2047 if (!bitmap_bit_p (si->visited, w))
2048 condense_visit (graph, si, w);
2049
2050 unsigned int t = si->node_mapping[w];
2051 gcc_assert (si->node_mapping[n] == n);
2052 if (si->dfs[t] < si->dfs[n])
2053 si->dfs[n] = si->dfs[t];
2054 }
2055
2056 /* See if any components have been identified. */
2057 if (si->dfs[n] == my_dfs)
2058 {
2059 while (si->scc_stack.length () != 0
2060 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2061 {
2062 unsigned int w = si->scc_stack.pop ();
2063 si->node_mapping[w] = n;
2064
2065 if (!bitmap_bit_p (graph->direct_nodes, w))
2066 bitmap_clear_bit (graph->direct_nodes, n);
2067
2068 /* Unify our nodes. */
2069 if (graph->preds[w])
2070 {
2071 if (!graph->preds[n])
2072 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2073 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2074 }
2075 if (graph->implicit_preds[w])
2076 {
2077 if (!graph->implicit_preds[n])
2078 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2079 bitmap_ior_into (graph->implicit_preds[n],
2080 graph->implicit_preds[w]);
2081 }
2082 if (graph->points_to[w])
2083 {
2084 if (!graph->points_to[n])
2085 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2086 bitmap_ior_into (graph->points_to[n],
2087 graph->points_to[w]);
2088 }
2089 }
2090 bitmap_set_bit (si->deleted, n);
2091 }
2092 else
2093 si->scc_stack.safe_push (n);
2094 }
2095
2096 /* Label pointer equivalences.
2097
2098 This performs a value numbering of the constraint graph to
2099 discover which variables will always have the same points-to sets
2100 under the current set of constraints.
2101
2102 The way it value numbers is to store the set of points-to bits
2103 generated by the constraints and graph edges. This is just used as a
2104 hash and equality comparison. The *actual set of points-to bits* is
2105 completely irrelevant, in that we don't care about being able to
2106 extract them later.
2107
2108 The equality values (currently bitmaps) just have to satisfy a few
2109 constraints, the main ones being:
2110 1. The combining operation must be order independent.
2111 2. The end result of a given set of operations must be unique iff the
2112 combination of input values is unique
2113 3. Hashable. */
2114
2115 static void
2116 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2117 {
2118 unsigned int i, first_pred;
2119 bitmap_iterator bi;
2120
2121 bitmap_set_bit (si->visited, n);
2122
2123 /* Label and union our incoming edges's points to sets. */
2124 first_pred = -1U;
2125 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2126 {
2127 unsigned int w = si->node_mapping[i];
2128 if (!bitmap_bit_p (si->visited, w))
2129 label_visit (graph, si, w);
2130
2131 /* Skip unused edges */
2132 if (w == n || graph->pointer_label[w] == 0)
2133 continue;
2134
2135 if (graph->points_to[w])
2136 {
2137 if (!graph->points_to[n])
2138 {
2139 if (first_pred == -1U)
2140 first_pred = w;
2141 else
2142 {
2143 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2144 bitmap_ior (graph->points_to[n],
2145 graph->points_to[first_pred],
2146 graph->points_to[w]);
2147 }
2148 }
2149 else
2150 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2151 }
2152 }
2153
2154 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2155 if (!bitmap_bit_p (graph->direct_nodes, n))
2156 {
2157 if (!graph->points_to[n])
2158 {
2159 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2160 if (first_pred != -1U)
2161 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2162 }
2163 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2164 graph->pointer_label[n] = pointer_equiv_class++;
2165 equiv_class_label_t ecl;
2166 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2167 graph->points_to[n]);
2168 ecl->equivalence_class = graph->pointer_label[n];
2169 return;
2170 }
2171
2172 /* If there was only a single non-empty predecessor the pointer equiv
2173 class is the same. */
2174 if (!graph->points_to[n])
2175 {
2176 if (first_pred != -1U)
2177 {
2178 graph->pointer_label[n] = graph->pointer_label[first_pred];
2179 graph->points_to[n] = graph->points_to[first_pred];
2180 }
2181 return;
2182 }
2183
2184 if (!bitmap_empty_p (graph->points_to[n]))
2185 {
2186 equiv_class_label_t ecl;
2187 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2188 graph->points_to[n]);
2189 if (ecl->equivalence_class == 0)
2190 ecl->equivalence_class = pointer_equiv_class++;
2191 else
2192 {
2193 BITMAP_FREE (graph->points_to[n]);
2194 graph->points_to[n] = ecl->labels;
2195 }
2196 graph->pointer_label[n] = ecl->equivalence_class;
2197 }
2198 }
2199
2200 /* Print the pred graph in dot format. */
2201
2202 static void
2203 dump_pred_graph (struct scc_info *si, FILE *file)
2204 {
2205 unsigned int i;
2206
2207 /* Only print the graph if it has already been initialized: */
2208 if (!graph)
2209 return;
2210
2211 /* Prints the header of the dot file: */
2212 fprintf (file, "strict digraph {\n");
2213 fprintf (file, " node [\n shape = box\n ]\n");
2214 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2215 fprintf (file, "\n // List of nodes and complex constraints in "
2216 "the constraint graph:\n");
2217
2218 /* The next lines print the nodes in the graph together with the
2219 complex constraints attached to them. */
2220 for (i = 1; i < graph->size; i++)
2221 {
2222 if (i == FIRST_REF_NODE)
2223 continue;
2224 if (si->node_mapping[i] != i)
2225 continue;
2226 if (i < FIRST_REF_NODE)
2227 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2228 else
2229 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2230 if (graph->points_to[i]
2231 && !bitmap_empty_p (graph->points_to[i]))
2232 {
2233 if (i < FIRST_REF_NODE)
2234 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2235 else
2236 fprintf (file, "[label=\"*%s = {",
2237 get_varinfo (i - FIRST_REF_NODE)->name);
2238 unsigned j;
2239 bitmap_iterator bi;
2240 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2241 fprintf (file, " %d", j);
2242 fprintf (file, " }\"]");
2243 }
2244 fprintf (file, ";\n");
2245 }
2246
2247 /* Go over the edges. */
2248 fprintf (file, "\n // Edges in the constraint graph:\n");
2249 for (i = 1; i < graph->size; i++)
2250 {
2251 unsigned j;
2252 bitmap_iterator bi;
2253 if (si->node_mapping[i] != i)
2254 continue;
2255 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2256 {
2257 unsigned from = si->node_mapping[j];
2258 if (from < FIRST_REF_NODE)
2259 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2260 else
2261 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2262 fprintf (file, " -> ");
2263 if (i < FIRST_REF_NODE)
2264 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2265 else
2266 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2267 fprintf (file, ";\n");
2268 }
2269 }
2270
2271 /* Prints the tail of the dot file. */
2272 fprintf (file, "}\n");
2273 }
2274
2275 /* Perform offline variable substitution, discovering equivalence
2276 classes, and eliminating non-pointer variables. */
2277
2278 static struct scc_info *
2279 perform_var_substitution (constraint_graph_t graph)
2280 {
2281 unsigned int i;
2282 unsigned int size = graph->size;
2283 scc_info *si = new scc_info (size);
2284
2285 bitmap_obstack_initialize (&iteration_obstack);
2286 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2287 location_equiv_class_table
2288 = new hash_table<equiv_class_hasher> (511);
2289 pointer_equiv_class = 1;
2290 location_equiv_class = 1;
2291
2292 /* Condense the nodes, which means to find SCC's, count incoming
2293 predecessors, and unite nodes in SCC's. */
2294 for (i = 1; i < FIRST_REF_NODE; i++)
2295 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2296 condense_visit (graph, si, si->node_mapping[i]);
2297
2298 if (dump_file && (dump_flags & TDF_GRAPH))
2299 {
2300 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2301 "in dot format:\n");
2302 dump_pred_graph (si, dump_file);
2303 fprintf (dump_file, "\n\n");
2304 }
2305
2306 bitmap_clear (si->visited);
2307 /* Actually the label the nodes for pointer equivalences */
2308 for (i = 1; i < FIRST_REF_NODE; i++)
2309 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2310 label_visit (graph, si, si->node_mapping[i]);
2311
2312 /* Calculate location equivalence labels. */
2313 for (i = 1; i < FIRST_REF_NODE; i++)
2314 {
2315 bitmap pointed_by;
2316 bitmap_iterator bi;
2317 unsigned int j;
2318
2319 if (!graph->pointed_by[i])
2320 continue;
2321 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2322
2323 /* Translate the pointed-by mapping for pointer equivalence
2324 labels. */
2325 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2326 {
2327 bitmap_set_bit (pointed_by,
2328 graph->pointer_label[si->node_mapping[j]]);
2329 }
2330 /* The original pointed_by is now dead. */
2331 BITMAP_FREE (graph->pointed_by[i]);
2332
2333 /* Look up the location equivalence label if one exists, or make
2334 one otherwise. */
2335 equiv_class_label_t ecl;
2336 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2337 if (ecl->equivalence_class == 0)
2338 ecl->equivalence_class = location_equiv_class++;
2339 else
2340 {
2341 if (dump_file && (dump_flags & TDF_DETAILS))
2342 fprintf (dump_file, "Found location equivalence for node %s\n",
2343 get_varinfo (i)->name);
2344 BITMAP_FREE (pointed_by);
2345 }
2346 graph->loc_label[i] = ecl->equivalence_class;
2347
2348 }
2349
2350 if (dump_file && (dump_flags & TDF_DETAILS))
2351 for (i = 1; i < FIRST_REF_NODE; i++)
2352 {
2353 unsigned j = si->node_mapping[i];
2354 if (j != i)
2355 {
2356 fprintf (dump_file, "%s node id %d ",
2357 bitmap_bit_p (graph->direct_nodes, i)
2358 ? "Direct" : "Indirect", i);
2359 if (i < FIRST_REF_NODE)
2360 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2361 else
2362 fprintf (dump_file, "\"*%s\"",
2363 get_varinfo (i - FIRST_REF_NODE)->name);
2364 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2365 if (j < FIRST_REF_NODE)
2366 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2367 else
2368 fprintf (dump_file, "\"*%s\"\n",
2369 get_varinfo (j - FIRST_REF_NODE)->name);
2370 }
2371 else
2372 {
2373 fprintf (dump_file,
2374 "Equivalence classes for %s node id %d ",
2375 bitmap_bit_p (graph->direct_nodes, i)
2376 ? "direct" : "indirect", i);
2377 if (i < FIRST_REF_NODE)
2378 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2379 else
2380 fprintf (dump_file, "\"*%s\"",
2381 get_varinfo (i - FIRST_REF_NODE)->name);
2382 fprintf (dump_file,
2383 ": pointer %d, location %d\n",
2384 graph->pointer_label[i], graph->loc_label[i]);
2385 }
2386 }
2387
2388 /* Quickly eliminate our non-pointer variables. */
2389
2390 for (i = 1; i < FIRST_REF_NODE; i++)
2391 {
2392 unsigned int node = si->node_mapping[i];
2393
2394 if (graph->pointer_label[node] == 0)
2395 {
2396 if (dump_file && (dump_flags & TDF_DETAILS))
2397 fprintf (dump_file,
2398 "%s is a non-pointer variable, eliminating edges.\n",
2399 get_varinfo (node)->name);
2400 stats.nonpointer_vars++;
2401 clear_edges_for_node (graph, node);
2402 }
2403 }
2404
2405 return si;
2406 }
2407
2408 /* Free information that was only necessary for variable
2409 substitution. */
2410
2411 static void
2412 free_var_substitution_info (struct scc_info *si)
2413 {
2414 delete si;
2415 free (graph->pointer_label);
2416 free (graph->loc_label);
2417 free (graph->pointed_by);
2418 free (graph->points_to);
2419 free (graph->eq_rep);
2420 sbitmap_free (graph->direct_nodes);
2421 delete pointer_equiv_class_table;
2422 pointer_equiv_class_table = NULL;
2423 delete location_equiv_class_table;
2424 location_equiv_class_table = NULL;
2425 bitmap_obstack_release (&iteration_obstack);
2426 }
2427
2428 /* Return an existing node that is equivalent to NODE, which has
2429 equivalence class LABEL, if one exists. Return NODE otherwise. */
2430
2431 static unsigned int
2432 find_equivalent_node (constraint_graph_t graph,
2433 unsigned int node, unsigned int label)
2434 {
2435 /* If the address version of this variable is unused, we can
2436 substitute it for anything else with the same label.
2437 Otherwise, we know the pointers are equivalent, but not the
2438 locations, and we can unite them later. */
2439
2440 if (!bitmap_bit_p (graph->address_taken, node))
2441 {
2442 gcc_checking_assert (label < graph->size);
2443
2444 if (graph->eq_rep[label] != -1)
2445 {
2446 /* Unify the two variables since we know they are equivalent. */
2447 if (unite (graph->eq_rep[label], node))
2448 unify_nodes (graph, graph->eq_rep[label], node, false);
2449 return graph->eq_rep[label];
2450 }
2451 else
2452 {
2453 graph->eq_rep[label] = node;
2454 graph->pe_rep[label] = node;
2455 }
2456 }
2457 else
2458 {
2459 gcc_checking_assert (label < graph->size);
2460 graph->pe[node] = label;
2461 if (graph->pe_rep[label] == -1)
2462 graph->pe_rep[label] = node;
2463 }
2464
2465 return node;
2466 }
2467
2468 /* Unite pointer equivalent but not location equivalent nodes in
2469 GRAPH. This may only be performed once variable substitution is
2470 finished. */
2471
2472 static void
2473 unite_pointer_equivalences (constraint_graph_t graph)
2474 {
2475 unsigned int i;
2476
2477 /* Go through the pointer equivalences and unite them to their
2478 representative, if they aren't already. */
2479 for (i = 1; i < FIRST_REF_NODE; i++)
2480 {
2481 unsigned int label = graph->pe[i];
2482 if (label)
2483 {
2484 int label_rep = graph->pe_rep[label];
2485
2486 if (label_rep == -1)
2487 continue;
2488
2489 label_rep = find (label_rep);
2490 if (label_rep >= 0 && unite (label_rep, find (i)))
2491 unify_nodes (graph, label_rep, i, false);
2492 }
2493 }
2494 }
2495
2496 /* Move complex constraints to the GRAPH nodes they belong to. */
2497
2498 static void
2499 move_complex_constraints (constraint_graph_t graph)
2500 {
2501 int i;
2502 constraint_t c;
2503
2504 FOR_EACH_VEC_ELT (constraints, i, c)
2505 {
2506 if (c)
2507 {
2508 struct constraint_expr lhs = c->lhs;
2509 struct constraint_expr rhs = c->rhs;
2510
2511 if (lhs.type == DEREF)
2512 {
2513 insert_into_complex (graph, lhs.var, c);
2514 }
2515 else if (rhs.type == DEREF)
2516 {
2517 if (!(get_varinfo (lhs.var)->is_special_var))
2518 insert_into_complex (graph, rhs.var, c);
2519 }
2520 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2521 && (lhs.offset != 0 || rhs.offset != 0))
2522 {
2523 insert_into_complex (graph, rhs.var, c);
2524 }
2525 }
2526 }
2527 }
2528
2529
2530 /* Optimize and rewrite complex constraints while performing
2531 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2532 result of perform_variable_substitution. */
2533
2534 static void
2535 rewrite_constraints (constraint_graph_t graph,
2536 struct scc_info *si)
2537 {
2538 int i;
2539 constraint_t c;
2540
2541 if (flag_checking)
2542 {
2543 for (unsigned int j = 0; j < graph->size; j++)
2544 gcc_assert (find (j) == j);
2545 }
2546
2547 FOR_EACH_VEC_ELT (constraints, i, c)
2548 {
2549 struct constraint_expr lhs = c->lhs;
2550 struct constraint_expr rhs = c->rhs;
2551 unsigned int lhsvar = find (lhs.var);
2552 unsigned int rhsvar = find (rhs.var);
2553 unsigned int lhsnode, rhsnode;
2554 unsigned int lhslabel, rhslabel;
2555
2556 lhsnode = si->node_mapping[lhsvar];
2557 rhsnode = si->node_mapping[rhsvar];
2558 lhslabel = graph->pointer_label[lhsnode];
2559 rhslabel = graph->pointer_label[rhsnode];
2560
2561 /* See if it is really a non-pointer variable, and if so, ignore
2562 the constraint. */
2563 if (lhslabel == 0)
2564 {
2565 if (dump_file && (dump_flags & TDF_DETAILS))
2566 {
2567
2568 fprintf (dump_file, "%s is a non-pointer variable, "
2569 "ignoring constraint:",
2570 get_varinfo (lhs.var)->name);
2571 dump_constraint (dump_file, c);
2572 fprintf (dump_file, "\n");
2573 }
2574 constraints[i] = NULL;
2575 continue;
2576 }
2577
2578 if (rhslabel == 0)
2579 {
2580 if (dump_file && (dump_flags & TDF_DETAILS))
2581 {
2582
2583 fprintf (dump_file, "%s is a non-pointer variable, "
2584 "ignoring constraint:",
2585 get_varinfo (rhs.var)->name);
2586 dump_constraint (dump_file, c);
2587 fprintf (dump_file, "\n");
2588 }
2589 constraints[i] = NULL;
2590 continue;
2591 }
2592
2593 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2594 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2595 c->lhs.var = lhsvar;
2596 c->rhs.var = rhsvar;
2597 }
2598 }
2599
2600 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2601 part of an SCC, false otherwise. */
2602
2603 static bool
2604 eliminate_indirect_cycles (unsigned int node)
2605 {
2606 if (graph->indirect_cycles[node] != -1
2607 && !bitmap_empty_p (get_varinfo (node)->solution))
2608 {
2609 unsigned int i;
2610 auto_vec<unsigned> queue;
2611 int queuepos;
2612 unsigned int to = find (graph->indirect_cycles[node]);
2613 bitmap_iterator bi;
2614
2615 /* We can't touch the solution set and call unify_nodes
2616 at the same time, because unify_nodes is going to do
2617 bitmap unions into it. */
2618
2619 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2620 {
2621 if (find (i) == i && i != to)
2622 {
2623 if (unite (to, i))
2624 queue.safe_push (i);
2625 }
2626 }
2627
2628 for (queuepos = 0;
2629 queue.iterate (queuepos, &i);
2630 queuepos++)
2631 {
2632 unify_nodes (graph, to, i, true);
2633 }
2634 return true;
2635 }
2636 return false;
2637 }
2638
2639 /* Solve the constraint graph GRAPH using our worklist solver.
2640 This is based on the PW* family of solvers from the "Efficient Field
2641 Sensitive Pointer Analysis for C" paper.
2642 It works by iterating over all the graph nodes, processing the complex
2643 constraints and propagating the copy constraints, until everything stops
2644 changed. This corresponds to steps 6-8 in the solving list given above. */
2645
2646 static void
2647 solve_graph (constraint_graph_t graph)
2648 {
2649 unsigned int size = graph->size;
2650 unsigned int i;
2651 bitmap pts;
2652
2653 changed = BITMAP_ALLOC (NULL);
2654
2655 /* Mark all initial non-collapsed nodes as changed. */
2656 for (i = 1; i < size; i++)
2657 {
2658 varinfo_t ivi = get_varinfo (i);
2659 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2660 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2661 || graph->complex[i].length () > 0))
2662 bitmap_set_bit (changed, i);
2663 }
2664
2665 /* Allocate a bitmap to be used to store the changed bits. */
2666 pts = BITMAP_ALLOC (&pta_obstack);
2667
2668 while (!bitmap_empty_p (changed))
2669 {
2670 unsigned int i;
2671 struct topo_info *ti = init_topo_info ();
2672 stats.iterations++;
2673
2674 bitmap_obstack_initialize (&iteration_obstack);
2675
2676 compute_topo_order (graph, ti);
2677
2678 while (ti->topo_order.length () != 0)
2679 {
2680
2681 i = ti->topo_order.pop ();
2682
2683 /* If this variable is not a representative, skip it. */
2684 if (find (i) != i)
2685 continue;
2686
2687 /* In certain indirect cycle cases, we may merge this
2688 variable to another. */
2689 if (eliminate_indirect_cycles (i) && find (i) != i)
2690 continue;
2691
2692 /* If the node has changed, we need to process the
2693 complex constraints and outgoing edges again. */
2694 if (bitmap_clear_bit (changed, i))
2695 {
2696 unsigned int j;
2697 constraint_t c;
2698 bitmap solution;
2699 vec<constraint_t> complex = graph->complex[i];
2700 varinfo_t vi = get_varinfo (i);
2701 bool solution_empty;
2702
2703 /* Compute the changed set of solution bits. If anything
2704 is in the solution just propagate that. */
2705 if (bitmap_bit_p (vi->solution, anything_id))
2706 {
2707 /* If anything is also in the old solution there is
2708 nothing to do.
2709 ??? But we shouldn't ended up with "changed" set ... */
2710 if (vi->oldsolution
2711 && bitmap_bit_p (vi->oldsolution, anything_id))
2712 continue;
2713 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2714 }
2715 else if (vi->oldsolution)
2716 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2717 else
2718 bitmap_copy (pts, vi->solution);
2719
2720 if (bitmap_empty_p (pts))
2721 continue;
2722
2723 if (vi->oldsolution)
2724 bitmap_ior_into (vi->oldsolution, pts);
2725 else
2726 {
2727 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2728 bitmap_copy (vi->oldsolution, pts);
2729 }
2730
2731 solution = vi->solution;
2732 solution_empty = bitmap_empty_p (solution);
2733
2734 /* Process the complex constraints */
2735 bitmap expanded_pts = NULL;
2736 FOR_EACH_VEC_ELT (complex, j, c)
2737 {
2738 /* XXX: This is going to unsort the constraints in
2739 some cases, which will occasionally add duplicate
2740 constraints during unification. This does not
2741 affect correctness. */
2742 c->lhs.var = find (c->lhs.var);
2743 c->rhs.var = find (c->rhs.var);
2744
2745 /* The only complex constraint that can change our
2746 solution to non-empty, given an empty solution,
2747 is a constraint where the lhs side is receiving
2748 some set from elsewhere. */
2749 if (!solution_empty || c->lhs.type != DEREF)
2750 do_complex_constraint (graph, c, pts, &expanded_pts);
2751 }
2752 BITMAP_FREE (expanded_pts);
2753
2754 solution_empty = bitmap_empty_p (solution);
2755
2756 if (!solution_empty)
2757 {
2758 bitmap_iterator bi;
2759 unsigned eff_escaped_id = find (escaped_id);
2760
2761 /* Propagate solution to all successors. */
2762 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2763 0, j, bi)
2764 {
2765 bitmap tmp;
2766 bool flag;
2767
2768 unsigned int to = find (j);
2769 tmp = get_varinfo (to)->solution;
2770 flag = false;
2771
2772 /* Don't try to propagate to ourselves. */
2773 if (to == i)
2774 continue;
2775
2776 /* If we propagate from ESCAPED use ESCAPED as
2777 placeholder. */
2778 if (i == eff_escaped_id)
2779 flag = bitmap_set_bit (tmp, escaped_id);
2780 else
2781 flag = bitmap_ior_into (tmp, pts);
2782
2783 if (flag)
2784 bitmap_set_bit (changed, to);
2785 }
2786 }
2787 }
2788 }
2789 free_topo_info (ti);
2790 bitmap_obstack_release (&iteration_obstack);
2791 }
2792
2793 BITMAP_FREE (pts);
2794 BITMAP_FREE (changed);
2795 bitmap_obstack_release (&oldpta_obstack);
2796 }
2797
2798 /* Map from trees to variable infos. */
2799 static hash_map<tree, varinfo_t> *vi_for_tree;
2800
2801
2802 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2803
2804 static void
2805 insert_vi_for_tree (tree t, varinfo_t vi)
2806 {
2807 gcc_assert (vi);
2808 gcc_assert (!vi_for_tree->put (t, vi));
2809 }
2810
2811 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2812 exist in the map, return NULL, otherwise, return the varinfo we found. */
2813
2814 static varinfo_t
2815 lookup_vi_for_tree (tree t)
2816 {
2817 varinfo_t *slot = vi_for_tree->get (t);
2818 if (slot == NULL)
2819 return NULL;
2820
2821 return *slot;
2822 }
2823
2824 /* Return a printable name for DECL */
2825
2826 static const char *
2827 alias_get_name (tree decl)
2828 {
2829 const char *res = NULL;
2830 char *temp;
2831
2832 if (!dump_file)
2833 return "NULL";
2834
2835 if (TREE_CODE (decl) == SSA_NAME)
2836 {
2837 res = get_name (decl);
2838 if (res)
2839 temp = xasprintf ("%s_%u", res, SSA_NAME_VERSION (decl));
2840 else
2841 temp = xasprintf ("_%u", SSA_NAME_VERSION (decl));
2842 res = ggc_strdup (temp);
2843 free (temp);
2844 }
2845 else if (DECL_P (decl))
2846 {
2847 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2848 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2849 else
2850 {
2851 res = get_name (decl);
2852 if (!res)
2853 {
2854 temp = xasprintf ("D.%u", DECL_UID (decl));
2855 res = ggc_strdup (temp);
2856 free (temp);
2857 }
2858 }
2859 }
2860 if (res != NULL)
2861 return res;
2862
2863 return "NULL";
2864 }
2865
2866 /* Find the variable id for tree T in the map.
2867 If T doesn't exist in the map, create an entry for it and return it. */
2868
2869 static varinfo_t
2870 get_vi_for_tree (tree t)
2871 {
2872 varinfo_t *slot = vi_for_tree->get (t);
2873 if (slot == NULL)
2874 {
2875 unsigned int id = create_variable_info_for (t, alias_get_name (t), false);
2876 return get_varinfo (id);
2877 }
2878
2879 return *slot;
2880 }
2881
2882 /* Get a scalar constraint expression for a new temporary variable. */
2883
2884 static struct constraint_expr
2885 new_scalar_tmp_constraint_exp (const char *name, bool add_id)
2886 {
2887 struct constraint_expr tmp;
2888 varinfo_t vi;
2889
2890 vi = new_var_info (NULL_TREE, name, add_id);
2891 vi->offset = 0;
2892 vi->size = -1;
2893 vi->fullsize = -1;
2894 vi->is_full_var = 1;
2895
2896 tmp.var = vi->id;
2897 tmp.type = SCALAR;
2898 tmp.offset = 0;
2899
2900 return tmp;
2901 }
2902
2903 /* Get a constraint expression vector from an SSA_VAR_P node.
2904 If address_p is true, the result will be taken its address of. */
2905
2906 static void
2907 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2908 {
2909 struct constraint_expr cexpr;
2910 varinfo_t vi;
2911
2912 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2913 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2914
2915 /* For parameters, get at the points-to set for the actual parm
2916 decl. */
2917 if (TREE_CODE (t) == SSA_NAME
2918 && SSA_NAME_IS_DEFAULT_DEF (t)
2919 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2920 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2921 {
2922 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2923 return;
2924 }
2925
2926 /* For global variables resort to the alias target. */
2927 if (VAR_P (t) && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2928 {
2929 varpool_node *node = varpool_node::get (t);
2930 if (node && node->alias && node->analyzed)
2931 {
2932 node = node->ultimate_alias_target ();
2933 /* Canonicalize the PT uid of all aliases to the ultimate target.
2934 ??? Hopefully the set of aliases can't change in a way that
2935 changes the ultimate alias target. */
2936 gcc_assert ((! DECL_PT_UID_SET_P (node->decl)
2937 || DECL_PT_UID (node->decl) == DECL_UID (node->decl))
2938 && (! DECL_PT_UID_SET_P (t)
2939 || DECL_PT_UID (t) == DECL_UID (node->decl)));
2940 DECL_PT_UID (t) = DECL_UID (node->decl);
2941 t = node->decl;
2942 }
2943
2944 /* If this is decl may bind to NULL note that. */
2945 if (address_p
2946 && (! node || ! node->nonzero_address ()))
2947 {
2948 cexpr.var = nothing_id;
2949 cexpr.type = SCALAR;
2950 cexpr.offset = 0;
2951 results->safe_push (cexpr);
2952 }
2953 }
2954
2955 vi = get_vi_for_tree (t);
2956 cexpr.var = vi->id;
2957 cexpr.type = SCALAR;
2958 cexpr.offset = 0;
2959
2960 /* If we are not taking the address of the constraint expr, add all
2961 sub-fiels of the variable as well. */
2962 if (!address_p
2963 && !vi->is_full_var)
2964 {
2965 for (; vi; vi = vi_next (vi))
2966 {
2967 cexpr.var = vi->id;
2968 results->safe_push (cexpr);
2969 }
2970 return;
2971 }
2972
2973 results->safe_push (cexpr);
2974 }
2975
2976 /* Process constraint T, performing various simplifications and then
2977 adding it to our list of overall constraints. */
2978
2979 static void
2980 process_constraint (constraint_t t)
2981 {
2982 struct constraint_expr rhs = t->rhs;
2983 struct constraint_expr lhs = t->lhs;
2984
2985 gcc_assert (rhs.var < varmap.length ());
2986 gcc_assert (lhs.var < varmap.length ());
2987
2988 /* If we didn't get any useful constraint from the lhs we get
2989 &ANYTHING as fallback from get_constraint_for. Deal with
2990 it here by turning it into *ANYTHING. */
2991 if (lhs.type == ADDRESSOF
2992 && lhs.var == anything_id)
2993 lhs.type = DEREF;
2994
2995 /* ADDRESSOF on the lhs is invalid. */
2996 gcc_assert (lhs.type != ADDRESSOF);
2997
2998 /* We shouldn't add constraints from things that cannot have pointers.
2999 It's not completely trivial to avoid in the callers, so do it here. */
3000 if (rhs.type != ADDRESSOF
3001 && !get_varinfo (rhs.var)->may_have_pointers)
3002 return;
3003
3004 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3005 if (!get_varinfo (lhs.var)->may_have_pointers)
3006 return;
3007
3008 /* This can happen in our IR with things like n->a = *p */
3009 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3010 {
3011 /* Split into tmp = *rhs, *lhs = tmp */
3012 struct constraint_expr tmplhs;
3013 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3014 process_constraint (new_constraint (tmplhs, rhs));
3015 process_constraint (new_constraint (lhs, tmplhs));
3016 }
3017 else if ((rhs.type != SCALAR || rhs.offset != 0) && lhs.type == DEREF)
3018 {
3019 /* Split into tmp = &rhs, *lhs = tmp */
3020 struct constraint_expr tmplhs;
3021 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3022 process_constraint (new_constraint (tmplhs, rhs));
3023 process_constraint (new_constraint (lhs, tmplhs));
3024 }
3025 else
3026 {
3027 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3028 constraints.safe_push (t);
3029 }
3030 }
3031
3032
3033 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3034 structure. */
3035
3036 static HOST_WIDE_INT
3037 bitpos_of_field (const tree fdecl)
3038 {
3039 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3040 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3041 return -1;
3042
3043 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3044 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3045 }
3046
3047
3048 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3049 resulting constraint expressions in *RESULTS. */
3050
3051 static void
3052 get_constraint_for_ptr_offset (tree ptr, tree offset,
3053 vec<ce_s> *results)
3054 {
3055 struct constraint_expr c;
3056 unsigned int j, n;
3057 HOST_WIDE_INT rhsoffset;
3058
3059 /* If we do not do field-sensitive PTA adding offsets to pointers
3060 does not change the points-to solution. */
3061 if (!use_field_sensitive)
3062 {
3063 get_constraint_for_rhs (ptr, results);
3064 return;
3065 }
3066
3067 /* If the offset is not a non-negative integer constant that fits
3068 in a HOST_WIDE_INT, we have to fall back to a conservative
3069 solution which includes all sub-fields of all pointed-to
3070 variables of ptr. */
3071 if (offset == NULL_TREE
3072 || TREE_CODE (offset) != INTEGER_CST)
3073 rhsoffset = UNKNOWN_OFFSET;
3074 else
3075 {
3076 /* Sign-extend the offset. */
3077 offset_int soffset = offset_int::from (offset, SIGNED);
3078 if (!wi::fits_shwi_p (soffset))
3079 rhsoffset = UNKNOWN_OFFSET;
3080 else
3081 {
3082 /* Make sure the bit-offset also fits. */
3083 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3084 rhsoffset = rhsunitoffset * (unsigned HOST_WIDE_INT) BITS_PER_UNIT;
3085 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3086 rhsoffset = UNKNOWN_OFFSET;
3087 }
3088 }
3089
3090 get_constraint_for_rhs (ptr, results);
3091 if (rhsoffset == 0)
3092 return;
3093
3094 /* As we are eventually appending to the solution do not use
3095 vec::iterate here. */
3096 n = results->length ();
3097 for (j = 0; j < n; j++)
3098 {
3099 varinfo_t curr;
3100 c = (*results)[j];
3101 curr = get_varinfo (c.var);
3102
3103 if (c.type == ADDRESSOF
3104 /* If this varinfo represents a full variable just use it. */
3105 && curr->is_full_var)
3106 ;
3107 else if (c.type == ADDRESSOF
3108 /* If we do not know the offset add all subfields. */
3109 && rhsoffset == UNKNOWN_OFFSET)
3110 {
3111 varinfo_t temp = get_varinfo (curr->head);
3112 do
3113 {
3114 struct constraint_expr c2;
3115 c2.var = temp->id;
3116 c2.type = ADDRESSOF;
3117 c2.offset = 0;
3118 if (c2.var != c.var)
3119 results->safe_push (c2);
3120 temp = vi_next (temp);
3121 }
3122 while (temp);
3123 }
3124 else if (c.type == ADDRESSOF)
3125 {
3126 varinfo_t temp;
3127 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3128
3129 /* If curr->offset + rhsoffset is less than zero adjust it. */
3130 if (rhsoffset < 0
3131 && curr->offset < offset)
3132 offset = 0;
3133
3134 /* We have to include all fields that overlap the current
3135 field shifted by rhsoffset. And we include at least
3136 the last or the first field of the variable to represent
3137 reachability of off-bound addresses, in particular &object + 1,
3138 conservatively correct. */
3139 temp = first_or_preceding_vi_for_offset (curr, offset);
3140 c.var = temp->id;
3141 c.offset = 0;
3142 temp = vi_next (temp);
3143 while (temp
3144 && temp->offset < offset + curr->size)
3145 {
3146 struct constraint_expr c2;
3147 c2.var = temp->id;
3148 c2.type = ADDRESSOF;
3149 c2.offset = 0;
3150 results->safe_push (c2);
3151 temp = vi_next (temp);
3152 }
3153 }
3154 else if (c.type == SCALAR)
3155 {
3156 gcc_assert (c.offset == 0);
3157 c.offset = rhsoffset;
3158 }
3159 else
3160 /* We shouldn't get any DEREFs here. */
3161 gcc_unreachable ();
3162
3163 (*results)[j] = c;
3164 }
3165 }
3166
3167
3168 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3169 If address_p is true the result will be taken its address of.
3170 If lhs_p is true then the constraint expression is assumed to be used
3171 as the lhs. */
3172
3173 static void
3174 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3175 bool address_p, bool lhs_p)
3176 {
3177 tree orig_t = t;
3178 HOST_WIDE_INT bitsize = -1;
3179 HOST_WIDE_INT bitmaxsize = -1;
3180 HOST_WIDE_INT bitpos;
3181 bool reverse;
3182 tree forzero;
3183
3184 /* Some people like to do cute things like take the address of
3185 &0->a.b */
3186 forzero = t;
3187 while (handled_component_p (forzero)
3188 || INDIRECT_REF_P (forzero)
3189 || TREE_CODE (forzero) == MEM_REF)
3190 forzero = TREE_OPERAND (forzero, 0);
3191
3192 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3193 {
3194 struct constraint_expr temp;
3195
3196 temp.offset = 0;
3197 temp.var = integer_id;
3198 temp.type = SCALAR;
3199 results->safe_push (temp);
3200 return;
3201 }
3202
3203 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
3204
3205 /* We can end up here for component references on a
3206 VIEW_CONVERT_EXPR <>(&foobar) or things like a
3207 BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for
3208 symbolic constants simply give up. */
3209 if (TREE_CODE (t) == ADDR_EXPR)
3210 {
3211 constraint_expr result;
3212 result.type = SCALAR;
3213 result.var = anything_id;
3214 result.offset = 0;
3215 results->safe_push (result);
3216 return;
3217 }
3218
3219 /* Pretend to take the address of the base, we'll take care of
3220 adding the required subset of sub-fields below. */
3221 get_constraint_for_1 (t, results, true, lhs_p);
3222 /* Strip off nothing_id. */
3223 if (results->length () == 2)
3224 {
3225 gcc_assert ((*results)[0].var == nothing_id);
3226 results->unordered_remove (0);
3227 }
3228 gcc_assert (results->length () == 1);
3229 struct constraint_expr &result = results->last ();
3230
3231 if (result.type == SCALAR
3232 && get_varinfo (result.var)->is_full_var)
3233 /* For single-field vars do not bother about the offset. */
3234 result.offset = 0;
3235 else if (result.type == SCALAR)
3236 {
3237 /* In languages like C, you can access one past the end of an
3238 array. You aren't allowed to dereference it, so we can
3239 ignore this constraint. When we handle pointer subtraction,
3240 we may have to do something cute here. */
3241
3242 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3243 && bitmaxsize != 0)
3244 {
3245 /* It's also not true that the constraint will actually start at the
3246 right offset, it may start in some padding. We only care about
3247 setting the constraint to the first actual field it touches, so
3248 walk to find it. */
3249 struct constraint_expr cexpr = result;
3250 varinfo_t curr;
3251 results->pop ();
3252 cexpr.offset = 0;
3253 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3254 {
3255 if (ranges_overlap_p (curr->offset, curr->size,
3256 bitpos, bitmaxsize))
3257 {
3258 cexpr.var = curr->id;
3259 results->safe_push (cexpr);
3260 if (address_p)
3261 break;
3262 }
3263 }
3264 /* If we are going to take the address of this field then
3265 to be able to compute reachability correctly add at least
3266 the last field of the variable. */
3267 if (address_p && results->length () == 0)
3268 {
3269 curr = get_varinfo (cexpr.var);
3270 while (curr->next != 0)
3271 curr = vi_next (curr);
3272 cexpr.var = curr->id;
3273 results->safe_push (cexpr);
3274 }
3275 else if (results->length () == 0)
3276 /* Assert that we found *some* field there. The user couldn't be
3277 accessing *only* padding. */
3278 /* Still the user could access one past the end of an array
3279 embedded in a struct resulting in accessing *only* padding. */
3280 /* Or accessing only padding via type-punning to a type
3281 that has a filed just in padding space. */
3282 {
3283 cexpr.type = SCALAR;
3284 cexpr.var = anything_id;
3285 cexpr.offset = 0;
3286 results->safe_push (cexpr);
3287 }
3288 }
3289 else if (bitmaxsize == 0)
3290 {
3291 if (dump_file && (dump_flags & TDF_DETAILS))
3292 fprintf (dump_file, "Access to zero-sized part of variable, "
3293 "ignoring\n");
3294 }
3295 else
3296 if (dump_file && (dump_flags & TDF_DETAILS))
3297 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3298 }
3299 else if (result.type == DEREF)
3300 {
3301 /* If we do not know exactly where the access goes say so. Note
3302 that only for non-structure accesses we know that we access
3303 at most one subfiled of any variable. */
3304 if (bitpos == -1
3305 || bitsize != bitmaxsize
3306 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3307 || result.offset == UNKNOWN_OFFSET)
3308 result.offset = UNKNOWN_OFFSET;
3309 else
3310 result.offset += bitpos;
3311 }
3312 else if (result.type == ADDRESSOF)
3313 {
3314 /* We can end up here for component references on constants like
3315 VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */
3316 result.type = SCALAR;
3317 result.var = anything_id;
3318 result.offset = 0;
3319 }
3320 else
3321 gcc_unreachable ();
3322 }
3323
3324
3325 /* Dereference the constraint expression CONS, and return the result.
3326 DEREF (ADDRESSOF) = SCALAR
3327 DEREF (SCALAR) = DEREF
3328 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3329 This is needed so that we can handle dereferencing DEREF constraints. */
3330
3331 static void
3332 do_deref (vec<ce_s> *constraints)
3333 {
3334 struct constraint_expr *c;
3335 unsigned int i = 0;
3336
3337 FOR_EACH_VEC_ELT (*constraints, i, c)
3338 {
3339 if (c->type == SCALAR)
3340 c->type = DEREF;
3341 else if (c->type == ADDRESSOF)
3342 c->type = SCALAR;
3343 else if (c->type == DEREF)
3344 {
3345 struct constraint_expr tmplhs;
3346 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp", true);
3347 process_constraint (new_constraint (tmplhs, *c));
3348 c->var = tmplhs.var;
3349 }
3350 else
3351 gcc_unreachable ();
3352 }
3353 }
3354
3355 /* Given a tree T, return the constraint expression for taking the
3356 address of it. */
3357
3358 static void
3359 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3360 {
3361 struct constraint_expr *c;
3362 unsigned int i;
3363
3364 get_constraint_for_1 (t, results, true, true);
3365
3366 FOR_EACH_VEC_ELT (*results, i, c)
3367 {
3368 if (c->type == DEREF)
3369 c->type = SCALAR;
3370 else
3371 c->type = ADDRESSOF;
3372 }
3373 }
3374
3375 /* Given a tree T, return the constraint expression for it. */
3376
3377 static void
3378 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3379 bool lhs_p)
3380 {
3381 struct constraint_expr temp;
3382
3383 /* x = integer is all glommed to a single variable, which doesn't
3384 point to anything by itself. That is, of course, unless it is an
3385 integer constant being treated as a pointer, in which case, we
3386 will return that this is really the addressof anything. This
3387 happens below, since it will fall into the default case. The only
3388 case we know something about an integer treated like a pointer is
3389 when it is the NULL pointer, and then we just say it points to
3390 NULL.
3391
3392 Do not do that if -fno-delete-null-pointer-checks though, because
3393 in that case *NULL does not fail, so it _should_ alias *anything.
3394 It is not worth adding a new option or renaming the existing one,
3395 since this case is relatively obscure. */
3396 if ((TREE_CODE (t) == INTEGER_CST
3397 && integer_zerop (t))
3398 /* The only valid CONSTRUCTORs in gimple with pointer typed
3399 elements are zero-initializer. But in IPA mode we also
3400 process global initializers, so verify at least. */
3401 || (TREE_CODE (t) == CONSTRUCTOR
3402 && CONSTRUCTOR_NELTS (t) == 0))
3403 {
3404 if (flag_delete_null_pointer_checks)
3405 temp.var = nothing_id;
3406 else
3407 temp.var = nonlocal_id;
3408 temp.type = ADDRESSOF;
3409 temp.offset = 0;
3410 results->safe_push (temp);
3411 return;
3412 }
3413
3414 /* String constants are read-only, ideally we'd have a CONST_DECL
3415 for those. */
3416 if (TREE_CODE (t) == STRING_CST)
3417 {
3418 temp.var = string_id;
3419 temp.type = SCALAR;
3420 temp.offset = 0;
3421 results->safe_push (temp);
3422 return;
3423 }
3424
3425 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3426 {
3427 case tcc_expression:
3428 {
3429 switch (TREE_CODE (t))
3430 {
3431 case ADDR_EXPR:
3432 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3433 return;
3434 default:;
3435 }
3436 break;
3437 }
3438 case tcc_reference:
3439 {
3440 switch (TREE_CODE (t))
3441 {
3442 case MEM_REF:
3443 {
3444 struct constraint_expr cs;
3445 varinfo_t vi, curr;
3446 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3447 TREE_OPERAND (t, 1), results);
3448 do_deref (results);
3449
3450 /* If we are not taking the address then make sure to process
3451 all subvariables we might access. */
3452 if (address_p)
3453 return;
3454
3455 cs = results->last ();
3456 if (cs.type == DEREF
3457 && type_can_have_subvars (TREE_TYPE (t)))
3458 {
3459 /* For dereferences this means we have to defer it
3460 to solving time. */
3461 results->last ().offset = UNKNOWN_OFFSET;
3462 return;
3463 }
3464 if (cs.type != SCALAR)
3465 return;
3466
3467 vi = get_varinfo (cs.var);
3468 curr = vi_next (vi);
3469 if (!vi->is_full_var
3470 && curr)
3471 {
3472 unsigned HOST_WIDE_INT size;
3473 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3474 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3475 else
3476 size = -1;
3477 for (; curr; curr = vi_next (curr))
3478 {
3479 if (curr->offset - vi->offset < size)
3480 {
3481 cs.var = curr->id;
3482 results->safe_push (cs);
3483 }
3484 else
3485 break;
3486 }
3487 }
3488 return;
3489 }
3490 case ARRAY_REF:
3491 case ARRAY_RANGE_REF:
3492 case COMPONENT_REF:
3493 case IMAGPART_EXPR:
3494 case REALPART_EXPR:
3495 case BIT_FIELD_REF:
3496 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3497 return;
3498 case VIEW_CONVERT_EXPR:
3499 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3500 lhs_p);
3501 return;
3502 /* We are missing handling for TARGET_MEM_REF here. */
3503 default:;
3504 }
3505 break;
3506 }
3507 case tcc_exceptional:
3508 {
3509 switch (TREE_CODE (t))
3510 {
3511 case SSA_NAME:
3512 {
3513 get_constraint_for_ssa_var (t, results, address_p);
3514 return;
3515 }
3516 case CONSTRUCTOR:
3517 {
3518 unsigned int i;
3519 tree val;
3520 auto_vec<ce_s> tmp;
3521 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3522 {
3523 struct constraint_expr *rhsp;
3524 unsigned j;
3525 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3526 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3527 results->safe_push (*rhsp);
3528 tmp.truncate (0);
3529 }
3530 /* We do not know whether the constructor was complete,
3531 so technically we have to add &NOTHING or &ANYTHING
3532 like we do for an empty constructor as well. */
3533 return;
3534 }
3535 default:;
3536 }
3537 break;
3538 }
3539 case tcc_declaration:
3540 {
3541 get_constraint_for_ssa_var (t, results, address_p);
3542 return;
3543 }
3544 case tcc_constant:
3545 {
3546 /* We cannot refer to automatic variables through constants. */
3547 temp.type = ADDRESSOF;
3548 temp.var = nonlocal_id;
3549 temp.offset = 0;
3550 results->safe_push (temp);
3551 return;
3552 }
3553 default:;
3554 }
3555
3556 /* The default fallback is a constraint from anything. */
3557 temp.type = ADDRESSOF;
3558 temp.var = anything_id;
3559 temp.offset = 0;
3560 results->safe_push (temp);
3561 }
3562
3563 /* Given a gimple tree T, return the constraint expression vector for it. */
3564
3565 static void
3566 get_constraint_for (tree t, vec<ce_s> *results)
3567 {
3568 gcc_assert (results->length () == 0);
3569
3570 get_constraint_for_1 (t, results, false, true);
3571 }
3572
3573 /* Given a gimple tree T, return the constraint expression vector for it
3574 to be used as the rhs of a constraint. */
3575
3576 static void
3577 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3578 {
3579 gcc_assert (results->length () == 0);
3580
3581 get_constraint_for_1 (t, results, false, false);
3582 }
3583
3584
3585 /* Efficiently generates constraints from all entries in *RHSC to all
3586 entries in *LHSC. */
3587
3588 static void
3589 process_all_all_constraints (vec<ce_s> lhsc,
3590 vec<ce_s> rhsc)
3591 {
3592 struct constraint_expr *lhsp, *rhsp;
3593 unsigned i, j;
3594
3595 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3596 {
3597 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3598 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3599 process_constraint (new_constraint (*lhsp, *rhsp));
3600 }
3601 else
3602 {
3603 struct constraint_expr tmp;
3604 tmp = new_scalar_tmp_constraint_exp ("allalltmp", true);
3605 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3606 process_constraint (new_constraint (tmp, *rhsp));
3607 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3608 process_constraint (new_constraint (*lhsp, tmp));
3609 }
3610 }
3611
3612 /* Handle aggregate copies by expanding into copies of the respective
3613 fields of the structures. */
3614
3615 static void
3616 do_structure_copy (tree lhsop, tree rhsop)
3617 {
3618 struct constraint_expr *lhsp, *rhsp;
3619 auto_vec<ce_s> lhsc;
3620 auto_vec<ce_s> rhsc;
3621 unsigned j;
3622
3623 get_constraint_for (lhsop, &lhsc);
3624 get_constraint_for_rhs (rhsop, &rhsc);
3625 lhsp = &lhsc[0];
3626 rhsp = &rhsc[0];
3627 if (lhsp->type == DEREF
3628 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3629 || rhsp->type == DEREF)
3630 {
3631 if (lhsp->type == DEREF)
3632 {
3633 gcc_assert (lhsc.length () == 1);
3634 lhsp->offset = UNKNOWN_OFFSET;
3635 }
3636 if (rhsp->type == DEREF)
3637 {
3638 gcc_assert (rhsc.length () == 1);
3639 rhsp->offset = UNKNOWN_OFFSET;
3640 }
3641 process_all_all_constraints (lhsc, rhsc);
3642 }
3643 else if (lhsp->type == SCALAR
3644 && (rhsp->type == SCALAR
3645 || rhsp->type == ADDRESSOF))
3646 {
3647 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3648 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3649 bool reverse;
3650 unsigned k = 0;
3651 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize,
3652 &reverse);
3653 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize,
3654 &reverse);
3655 for (j = 0; lhsc.iterate (j, &lhsp);)
3656 {
3657 varinfo_t lhsv, rhsv;
3658 rhsp = &rhsc[k];
3659 lhsv = get_varinfo (lhsp->var);
3660 rhsv = get_varinfo (rhsp->var);
3661 if (lhsv->may_have_pointers
3662 && (lhsv->is_full_var
3663 || rhsv->is_full_var
3664 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3665 rhsv->offset + lhsoffset, rhsv->size)))
3666 process_constraint (new_constraint (*lhsp, *rhsp));
3667 if (!rhsv->is_full_var
3668 && (lhsv->is_full_var
3669 || (lhsv->offset + rhsoffset + lhsv->size
3670 > rhsv->offset + lhsoffset + rhsv->size)))
3671 {
3672 ++k;
3673 if (k >= rhsc.length ())
3674 break;
3675 }
3676 else
3677 ++j;
3678 }
3679 }
3680 else
3681 gcc_unreachable ();
3682 }
3683
3684 /* Create constraints ID = { rhsc }. */
3685
3686 static void
3687 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3688 {
3689 struct constraint_expr *c;
3690 struct constraint_expr includes;
3691 unsigned int j;
3692
3693 includes.var = id;
3694 includes.offset = 0;
3695 includes.type = SCALAR;
3696
3697 FOR_EACH_VEC_ELT (rhsc, j, c)
3698 process_constraint (new_constraint (includes, *c));
3699 }
3700
3701 /* Create a constraint ID = OP. */
3702
3703 static void
3704 make_constraint_to (unsigned id, tree op)
3705 {
3706 auto_vec<ce_s> rhsc;
3707 get_constraint_for_rhs (op, &rhsc);
3708 make_constraints_to (id, rhsc);
3709 }
3710
3711 /* Create a constraint ID = &FROM. */
3712
3713 static void
3714 make_constraint_from (varinfo_t vi, int from)
3715 {
3716 struct constraint_expr lhs, rhs;
3717
3718 lhs.var = vi->id;
3719 lhs.offset = 0;
3720 lhs.type = SCALAR;
3721
3722 rhs.var = from;
3723 rhs.offset = 0;
3724 rhs.type = ADDRESSOF;
3725 process_constraint (new_constraint (lhs, rhs));
3726 }
3727
3728 /* Create a constraint ID = FROM. */
3729
3730 static void
3731 make_copy_constraint (varinfo_t vi, int from)
3732 {
3733 struct constraint_expr lhs, rhs;
3734
3735 lhs.var = vi->id;
3736 lhs.offset = 0;
3737 lhs.type = SCALAR;
3738
3739 rhs.var = from;
3740 rhs.offset = 0;
3741 rhs.type = SCALAR;
3742 process_constraint (new_constraint (lhs, rhs));
3743 }
3744
3745 /* Make constraints necessary to make OP escape. */
3746
3747 static void
3748 make_escape_constraint (tree op)
3749 {
3750 make_constraint_to (escaped_id, op);
3751 }
3752
3753 /* Add constraints to that the solution of VI is transitively closed. */
3754
3755 static void
3756 make_transitive_closure_constraints (varinfo_t vi)
3757 {
3758 struct constraint_expr lhs, rhs;
3759
3760 /* VAR = *(VAR + UNKNOWN); */
3761 lhs.type = SCALAR;
3762 lhs.var = vi->id;
3763 lhs.offset = 0;
3764 rhs.type = DEREF;
3765 rhs.var = vi->id;
3766 rhs.offset = UNKNOWN_OFFSET;
3767 process_constraint (new_constraint (lhs, rhs));
3768 }
3769
3770 /* Add constraints to that the solution of VI has all subvariables added. */
3771
3772 static void
3773 make_any_offset_constraints (varinfo_t vi)
3774 {
3775 struct constraint_expr lhs, rhs;
3776
3777 /* VAR = VAR + UNKNOWN; */
3778 lhs.type = SCALAR;
3779 lhs.var = vi->id;
3780 lhs.offset = 0;
3781 rhs.type = SCALAR;
3782 rhs.var = vi->id;
3783 rhs.offset = UNKNOWN_OFFSET;
3784 process_constraint (new_constraint (lhs, rhs));
3785 }
3786
3787 /* Temporary storage for fake var decls. */
3788 struct obstack fake_var_decl_obstack;
3789
3790 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3791
3792 static tree
3793 build_fake_var_decl (tree type)
3794 {
3795 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3796 memset (decl, 0, sizeof (struct tree_var_decl));
3797 TREE_SET_CODE (decl, VAR_DECL);
3798 TREE_TYPE (decl) = type;
3799 DECL_UID (decl) = allocate_decl_uid ();
3800 SET_DECL_PT_UID (decl, -1);
3801 layout_decl (decl, 0);
3802 return decl;
3803 }
3804
3805 /* Create a new artificial heap variable with NAME.
3806 Return the created variable. */
3807
3808 static varinfo_t
3809 make_heapvar (const char *name, bool add_id)
3810 {
3811 varinfo_t vi;
3812 tree heapvar;
3813
3814 heapvar = build_fake_var_decl (ptr_type_node);
3815 DECL_EXTERNAL (heapvar) = 1;
3816
3817 vi = new_var_info (heapvar, name, add_id);
3818 vi->is_artificial_var = true;
3819 vi->is_heap_var = true;
3820 vi->is_unknown_size_var = true;
3821 vi->offset = 0;
3822 vi->fullsize = ~0;
3823 vi->size = ~0;
3824 vi->is_full_var = true;
3825 insert_vi_for_tree (heapvar, vi);
3826
3827 return vi;
3828 }
3829
3830 /* Create a new artificial heap variable with NAME and make a
3831 constraint from it to LHS. Set flags according to a tag used
3832 for tracking restrict pointers. */
3833
3834 static varinfo_t
3835 make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id)
3836 {
3837 varinfo_t vi = make_heapvar (name, add_id);
3838 vi->is_restrict_var = 1;
3839 vi->is_global_var = 1;
3840 vi->may_have_pointers = 1;
3841 make_constraint_from (lhs, vi->id);
3842 return vi;
3843 }
3844
3845 /* Create a new artificial heap variable with NAME and make a
3846 constraint from it to LHS. Set flags according to a tag used
3847 for tracking restrict pointers and make the artificial heap
3848 point to global memory. */
3849
3850 static varinfo_t
3851 make_constraint_from_global_restrict (varinfo_t lhs, const char *name,
3852 bool add_id)
3853 {
3854 varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id);
3855 make_copy_constraint (vi, nonlocal_id);
3856 return vi;
3857 }
3858
3859 /* In IPA mode there are varinfos for different aspects of reach
3860 function designator. One for the points-to set of the return
3861 value, one for the variables that are clobbered by the function,
3862 one for its uses and one for each parameter (including a single
3863 glob for remaining variadic arguments). */
3864
3865 enum { fi_clobbers = 1, fi_uses = 2,
3866 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3867
3868 /* Get a constraint for the requested part of a function designator FI
3869 when operating in IPA mode. */
3870
3871 static struct constraint_expr
3872 get_function_part_constraint (varinfo_t fi, unsigned part)
3873 {
3874 struct constraint_expr c;
3875
3876 gcc_assert (in_ipa_mode);
3877
3878 if (fi->id == anything_id)
3879 {
3880 /* ??? We probably should have a ANYFN special variable. */
3881 c.var = anything_id;
3882 c.offset = 0;
3883 c.type = SCALAR;
3884 }
3885 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3886 {
3887 varinfo_t ai = first_vi_for_offset (fi, part);
3888 if (ai)
3889 c.var = ai->id;
3890 else
3891 c.var = anything_id;
3892 c.offset = 0;
3893 c.type = SCALAR;
3894 }
3895 else
3896 {
3897 c.var = fi->id;
3898 c.offset = part;
3899 c.type = DEREF;
3900 }
3901
3902 return c;
3903 }
3904
3905 /* For non-IPA mode, generate constraints necessary for a call on the
3906 RHS. */
3907
3908 static void
3909 handle_rhs_call (gcall *stmt, vec<ce_s> *results)
3910 {
3911 struct constraint_expr rhsc;
3912 unsigned i;
3913 bool returns_uses = false;
3914
3915 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3916 {
3917 tree arg = gimple_call_arg (stmt, i);
3918 int flags = gimple_call_arg_flags (stmt, i);
3919
3920 /* If the argument is not used we can ignore it. */
3921 if (flags & EAF_UNUSED)
3922 continue;
3923
3924 /* As we compute ESCAPED context-insensitive we do not gain
3925 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3926 set. The argument would still get clobbered through the
3927 escape solution. */
3928 if ((flags & EAF_NOCLOBBER)
3929 && (flags & EAF_NOESCAPE))
3930 {
3931 varinfo_t uses = get_call_use_vi (stmt);
3932 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
3933 make_constraint_to (tem->id, arg);
3934 make_any_offset_constraints (tem);
3935 if (!(flags & EAF_DIRECT))
3936 make_transitive_closure_constraints (tem);
3937 make_copy_constraint (uses, tem->id);
3938 returns_uses = true;
3939 }
3940 else if (flags & EAF_NOESCAPE)
3941 {
3942 struct constraint_expr lhs, rhs;
3943 varinfo_t uses = get_call_use_vi (stmt);
3944 varinfo_t clobbers = get_call_clobber_vi (stmt);
3945 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
3946 make_constraint_to (tem->id, arg);
3947 make_any_offset_constraints (tem);
3948 if (!(flags & EAF_DIRECT))
3949 make_transitive_closure_constraints (tem);
3950 make_copy_constraint (uses, tem->id);
3951 make_copy_constraint (clobbers, tem->id);
3952 /* Add *tem = nonlocal, do not add *tem = callused as
3953 EAF_NOESCAPE parameters do not escape to other parameters
3954 and all other uses appear in NONLOCAL as well. */
3955 lhs.type = DEREF;
3956 lhs.var = tem->id;
3957 lhs.offset = 0;
3958 rhs.type = SCALAR;
3959 rhs.var = nonlocal_id;
3960 rhs.offset = 0;
3961 process_constraint (new_constraint (lhs, rhs));
3962 returns_uses = true;
3963 }
3964 else
3965 make_escape_constraint (arg);
3966 }
3967
3968 /* If we added to the calls uses solution make sure we account for
3969 pointers to it to be returned. */
3970 if (returns_uses)
3971 {
3972 rhsc.var = get_call_use_vi (stmt)->id;
3973 rhsc.offset = UNKNOWN_OFFSET;
3974 rhsc.type = SCALAR;
3975 results->safe_push (rhsc);
3976 }
3977
3978 /* The static chain escapes as well. */
3979 if (gimple_call_chain (stmt))
3980 make_escape_constraint (gimple_call_chain (stmt));
3981
3982 /* And if we applied NRV the address of the return slot escapes as well. */
3983 if (gimple_call_return_slot_opt_p (stmt)
3984 && gimple_call_lhs (stmt) != NULL_TREE
3985 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3986 {
3987 auto_vec<ce_s> tmpc;
3988 struct constraint_expr lhsc, *c;
3989 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3990 lhsc.var = escaped_id;
3991 lhsc.offset = 0;
3992 lhsc.type = SCALAR;
3993 FOR_EACH_VEC_ELT (tmpc, i, c)
3994 process_constraint (new_constraint (lhsc, *c));
3995 }
3996
3997 /* Regular functions return nonlocal memory. */
3998 rhsc.var = nonlocal_id;
3999 rhsc.offset = 0;
4000 rhsc.type = SCALAR;
4001 results->safe_push (rhsc);
4002 }
4003
4004 /* For non-IPA mode, generate constraints necessary for a call
4005 that returns a pointer and assigns it to LHS. This simply makes
4006 the LHS point to global and escaped variables. */
4007
4008 static void
4009 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> rhsc,
4010 tree fndecl)
4011 {
4012 auto_vec<ce_s> lhsc;
4013
4014 get_constraint_for (lhs, &lhsc);
4015 /* If the store is to a global decl make sure to
4016 add proper escape constraints. */
4017 lhs = get_base_address (lhs);
4018 if (lhs
4019 && DECL_P (lhs)
4020 && is_global_var (lhs))
4021 {
4022 struct constraint_expr tmpc;
4023 tmpc.var = escaped_id;
4024 tmpc.offset = 0;
4025 tmpc.type = SCALAR;
4026 lhsc.safe_push (tmpc);
4027 }
4028
4029 /* If the call returns an argument unmodified override the rhs
4030 constraints. */
4031 if (flags & ERF_RETURNS_ARG
4032 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4033 {
4034 tree arg;
4035 rhsc.create (0);
4036 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4037 get_constraint_for (arg, &rhsc);
4038 process_all_all_constraints (lhsc, rhsc);
4039 rhsc.release ();
4040 }
4041 else if (flags & ERF_NOALIAS)
4042 {
4043 varinfo_t vi;
4044 struct constraint_expr tmpc;
4045 rhsc.create (0);
4046 vi = make_heapvar ("HEAP", true);
4047 /* We are marking allocated storage local, we deal with it becoming
4048 global by escaping and setting of vars_contains_escaped_heap. */
4049 DECL_EXTERNAL (vi->decl) = 0;
4050 vi->is_global_var = 0;
4051 /* If this is not a real malloc call assume the memory was
4052 initialized and thus may point to global memory. All
4053 builtin functions with the malloc attribute behave in a sane way. */
4054 if (!fndecl
4055 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
4056 make_constraint_from (vi, nonlocal_id);
4057 tmpc.var = vi->id;
4058 tmpc.offset = 0;
4059 tmpc.type = ADDRESSOF;
4060 rhsc.safe_push (tmpc);
4061 process_all_all_constraints (lhsc, rhsc);
4062 rhsc.release ();
4063 }
4064 else
4065 process_all_all_constraints (lhsc, rhsc);
4066 }
4067
4068 /* For non-IPA mode, generate constraints necessary for a call of a
4069 const function that returns a pointer in the statement STMT. */
4070
4071 static void
4072 handle_const_call (gcall *stmt, vec<ce_s> *results)
4073 {
4074 struct constraint_expr rhsc;
4075 unsigned int k;
4076 bool need_uses = false;
4077
4078 /* Treat nested const functions the same as pure functions as far
4079 as the static chain is concerned. */
4080 if (gimple_call_chain (stmt))
4081 {
4082 varinfo_t uses = get_call_use_vi (stmt);
4083 make_constraint_to (uses->id, gimple_call_chain (stmt));
4084 need_uses = true;
4085 }
4086
4087 /* And if we applied NRV the address of the return slot escapes as well. */
4088 if (gimple_call_return_slot_opt_p (stmt)
4089 && gimple_call_lhs (stmt) != NULL_TREE
4090 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4091 {
4092 varinfo_t uses = get_call_use_vi (stmt);
4093 auto_vec<ce_s> tmpc;
4094 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4095 make_constraints_to (uses->id, tmpc);
4096 need_uses = true;
4097 }
4098
4099 if (need_uses)
4100 {
4101 varinfo_t uses = get_call_use_vi (stmt);
4102 make_any_offset_constraints (uses);
4103 make_transitive_closure_constraints (uses);
4104 rhsc.var = uses->id;
4105 rhsc.offset = 0;
4106 rhsc.type = SCALAR;
4107 results->safe_push (rhsc);
4108 }
4109
4110 /* May return offsetted arguments. */
4111 varinfo_t tem = NULL;
4112 if (gimple_call_num_args (stmt) != 0)
4113 tem = new_var_info (NULL_TREE, "callarg", true);
4114 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4115 {
4116 tree arg = gimple_call_arg (stmt, k);
4117 auto_vec<ce_s> argc;
4118 get_constraint_for_rhs (arg, &argc);
4119 make_constraints_to (tem->id, argc);
4120 }
4121 if (tem)
4122 {
4123 ce_s ce;
4124 ce.type = SCALAR;
4125 ce.var = tem->id;
4126 ce.offset = UNKNOWN_OFFSET;
4127 results->safe_push (ce);
4128 }
4129
4130 /* May return addresses of globals. */
4131 rhsc.var = nonlocal_id;
4132 rhsc.offset = 0;
4133 rhsc.type = ADDRESSOF;
4134 results->safe_push (rhsc);
4135 }
4136
4137 /* For non-IPA mode, generate constraints necessary for a call to a
4138 pure function in statement STMT. */
4139
4140 static void
4141 handle_pure_call (gcall *stmt, vec<ce_s> *results)
4142 {
4143 struct constraint_expr rhsc;
4144 unsigned i;
4145 varinfo_t uses = NULL;
4146
4147 /* Memory reached from pointer arguments is call-used. */
4148 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4149 {
4150 tree arg = gimple_call_arg (stmt, i);
4151 if (!uses)
4152 {
4153 uses = get_call_use_vi (stmt);
4154 make_any_offset_constraints (uses);
4155 make_transitive_closure_constraints (uses);
4156 }
4157 make_constraint_to (uses->id, arg);
4158 }
4159
4160 /* The static chain is used as well. */
4161 if (gimple_call_chain (stmt))
4162 {
4163 if (!uses)
4164 {
4165 uses = get_call_use_vi (stmt);
4166 make_any_offset_constraints (uses);
4167 make_transitive_closure_constraints (uses);
4168 }
4169 make_constraint_to (uses->id, gimple_call_chain (stmt));
4170 }
4171
4172 /* And if we applied NRV the address of the return slot. */
4173 if (gimple_call_return_slot_opt_p (stmt)
4174 && gimple_call_lhs (stmt) != NULL_TREE
4175 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4176 {
4177 if (!uses)
4178 {
4179 uses = get_call_use_vi (stmt);
4180 make_any_offset_constraints (uses);
4181 make_transitive_closure_constraints (uses);
4182 }
4183 auto_vec<ce_s> tmpc;
4184 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4185 make_constraints_to (uses->id, tmpc);
4186 }
4187
4188 /* Pure functions may return call-used and nonlocal memory. */
4189 if (uses)
4190 {
4191 rhsc.var = uses->id;
4192 rhsc.offset = 0;
4193 rhsc.type = SCALAR;
4194 results->safe_push (rhsc);
4195 }
4196 rhsc.var = nonlocal_id;
4197 rhsc.offset = 0;
4198 rhsc.type = SCALAR;
4199 results->safe_push (rhsc);
4200 }
4201
4202
4203 /* Return the varinfo for the callee of CALL. */
4204
4205 static varinfo_t
4206 get_fi_for_callee (gcall *call)
4207 {
4208 tree decl, fn = gimple_call_fn (call);
4209
4210 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4211 fn = OBJ_TYPE_REF_EXPR (fn);
4212
4213 /* If we can directly resolve the function being called, do so.
4214 Otherwise, it must be some sort of indirect expression that
4215 we should still be able to handle. */
4216 decl = gimple_call_addr_fndecl (fn);
4217 if (decl)
4218 return get_vi_for_tree (decl);
4219
4220 /* If the function is anything other than a SSA name pointer we have no
4221 clue and should be getting ANYFN (well, ANYTHING for now). */
4222 if (!fn || TREE_CODE (fn) != SSA_NAME)
4223 return get_varinfo (anything_id);
4224
4225 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4226 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4227 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4228 fn = SSA_NAME_VAR (fn);
4229
4230 return get_vi_for_tree (fn);
4231 }
4232
4233 /* Create constraints for assigning call argument ARG to the incoming parameter
4234 INDEX of function FI. */
4235
4236 static void
4237 find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg)
4238 {
4239 struct constraint_expr lhs;
4240 lhs = get_function_part_constraint (fi, fi_parm_base + index);
4241
4242 auto_vec<ce_s, 2> rhsc;
4243 get_constraint_for_rhs (arg, &rhsc);
4244
4245 unsigned j;
4246 struct constraint_expr *rhsp;
4247 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4248 process_constraint (new_constraint (lhs, *rhsp));
4249 }
4250
4251 /* Return true if FNDECL may be part of another lto partition. */
4252
4253 static bool
4254 fndecl_maybe_in_other_partition (tree fndecl)
4255 {
4256 cgraph_node *fn_node = cgraph_node::get (fndecl);
4257 if (fn_node == NULL)
4258 return true;
4259
4260 return fn_node->in_other_partition;
4261 }
4262
4263 /* Create constraints for the builtin call T. Return true if the call
4264 was handled, otherwise false. */
4265
4266 static bool
4267 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4268 {
4269 tree fndecl = gimple_call_fndecl (t);
4270 auto_vec<ce_s, 2> lhsc;
4271 auto_vec<ce_s, 4> rhsc;
4272 varinfo_t fi;
4273
4274 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4275 /* ??? All builtins that are handled here need to be handled
4276 in the alias-oracle query functions explicitly! */
4277 switch (DECL_FUNCTION_CODE (fndecl))
4278 {
4279 /* All the following functions return a pointer to the same object
4280 as their first argument points to. The functions do not add
4281 to the ESCAPED solution. The functions make the first argument
4282 pointed to memory point to what the second argument pointed to
4283 memory points to. */
4284 case BUILT_IN_STRCPY:
4285 case BUILT_IN_STRNCPY:
4286 case BUILT_IN_BCOPY:
4287 case BUILT_IN_MEMCPY:
4288 case BUILT_IN_MEMMOVE:
4289 case BUILT_IN_MEMPCPY:
4290 case BUILT_IN_STPCPY:
4291 case BUILT_IN_STPNCPY:
4292 case BUILT_IN_STRCAT:
4293 case BUILT_IN_STRNCAT:
4294 case BUILT_IN_STRCPY_CHK:
4295 case BUILT_IN_STRNCPY_CHK:
4296 case BUILT_IN_MEMCPY_CHK:
4297 case BUILT_IN_MEMMOVE_CHK:
4298 case BUILT_IN_MEMPCPY_CHK:
4299 case BUILT_IN_STPCPY_CHK:
4300 case BUILT_IN_STPNCPY_CHK:
4301 case BUILT_IN_STRCAT_CHK:
4302 case BUILT_IN_STRNCAT_CHK:
4303 case BUILT_IN_TM_MEMCPY:
4304 case BUILT_IN_TM_MEMMOVE:
4305 {
4306 tree res = gimple_call_lhs (t);
4307 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4308 == BUILT_IN_BCOPY ? 1 : 0));
4309 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4310 == BUILT_IN_BCOPY ? 0 : 1));
4311 if (res != NULL_TREE)
4312 {
4313 get_constraint_for (res, &lhsc);
4314 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4315 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4316 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4317 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4318 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4319 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4320 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4321 else
4322 get_constraint_for (dest, &rhsc);
4323 process_all_all_constraints (lhsc, rhsc);
4324 lhsc.truncate (0);
4325 rhsc.truncate (0);
4326 }
4327 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4328 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4329 do_deref (&lhsc);
4330 do_deref (&rhsc);
4331 process_all_all_constraints (lhsc, rhsc);
4332 return true;
4333 }
4334 case BUILT_IN_MEMSET:
4335 case BUILT_IN_MEMSET_CHK:
4336 case BUILT_IN_TM_MEMSET:
4337 {
4338 tree res = gimple_call_lhs (t);
4339 tree dest = gimple_call_arg (t, 0);
4340 unsigned i;
4341 ce_s *lhsp;
4342 struct constraint_expr ac;
4343 if (res != NULL_TREE)
4344 {
4345 get_constraint_for (res, &lhsc);
4346 get_constraint_for (dest, &rhsc);
4347 process_all_all_constraints (lhsc, rhsc);
4348 lhsc.truncate (0);
4349 }
4350 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4351 do_deref (&lhsc);
4352 if (flag_delete_null_pointer_checks
4353 && integer_zerop (gimple_call_arg (t, 1)))
4354 {
4355 ac.type = ADDRESSOF;
4356 ac.var = nothing_id;
4357 }
4358 else
4359 {
4360 ac.type = SCALAR;
4361 ac.var = integer_id;
4362 }
4363 ac.offset = 0;
4364 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4365 process_constraint (new_constraint (*lhsp, ac));
4366 return true;
4367 }
4368 case BUILT_IN_POSIX_MEMALIGN:
4369 {
4370 tree ptrptr = gimple_call_arg (t, 0);
4371 get_constraint_for (ptrptr, &lhsc);
4372 do_deref (&lhsc);
4373 varinfo_t vi = make_heapvar ("HEAP", true);
4374 /* We are marking allocated storage local, we deal with it becoming
4375 global by escaping and setting of vars_contains_escaped_heap. */
4376 DECL_EXTERNAL (vi->decl) = 0;
4377 vi->is_global_var = 0;
4378 struct constraint_expr tmpc;
4379 tmpc.var = vi->id;
4380 tmpc.offset = 0;
4381 tmpc.type = ADDRESSOF;
4382 rhsc.safe_push (tmpc);
4383 process_all_all_constraints (lhsc, rhsc);
4384 return true;
4385 }
4386 case BUILT_IN_ASSUME_ALIGNED:
4387 {
4388 tree res = gimple_call_lhs (t);
4389 tree dest = gimple_call_arg (t, 0);
4390 if (res != NULL_TREE)
4391 {
4392 get_constraint_for (res, &lhsc);
4393 get_constraint_for (dest, &rhsc);
4394 process_all_all_constraints (lhsc, rhsc);
4395 }
4396 return true;
4397 }
4398 /* All the following functions do not return pointers, do not
4399 modify the points-to sets of memory reachable from their
4400 arguments and do not add to the ESCAPED solution. */
4401 case BUILT_IN_SINCOS:
4402 case BUILT_IN_SINCOSF:
4403 case BUILT_IN_SINCOSL:
4404 case BUILT_IN_FREXP:
4405 case BUILT_IN_FREXPF:
4406 case BUILT_IN_FREXPL:
4407 case BUILT_IN_GAMMA_R:
4408 case BUILT_IN_GAMMAF_R:
4409 case BUILT_IN_GAMMAL_R:
4410 case BUILT_IN_LGAMMA_R:
4411 case BUILT_IN_LGAMMAF_R:
4412 case BUILT_IN_LGAMMAL_R:
4413 case BUILT_IN_MODF:
4414 case BUILT_IN_MODFF:
4415 case BUILT_IN_MODFL:
4416 case BUILT_IN_REMQUO:
4417 case BUILT_IN_REMQUOF:
4418 case BUILT_IN_REMQUOL:
4419 case BUILT_IN_FREE:
4420 return true;
4421 case BUILT_IN_STRDUP:
4422 case BUILT_IN_STRNDUP:
4423 case BUILT_IN_REALLOC:
4424 if (gimple_call_lhs (t))
4425 {
4426 handle_lhs_call (t, gimple_call_lhs (t),
4427 gimple_call_return_flags (t) | ERF_NOALIAS,
4428 vNULL, fndecl);
4429 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4430 NULL_TREE, &lhsc);
4431 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4432 NULL_TREE, &rhsc);
4433 do_deref (&lhsc);
4434 do_deref (&rhsc);
4435 process_all_all_constraints (lhsc, rhsc);
4436 lhsc.truncate (0);
4437 rhsc.truncate (0);
4438 /* For realloc the resulting pointer can be equal to the
4439 argument as well. But only doing this wouldn't be
4440 correct because with ptr == 0 realloc behaves like malloc. */
4441 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4442 {
4443 get_constraint_for (gimple_call_lhs (t), &lhsc);
4444 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4445 process_all_all_constraints (lhsc, rhsc);
4446 }
4447 return true;
4448 }
4449 break;
4450 /* String / character search functions return a pointer into the
4451 source string or NULL. */
4452 case BUILT_IN_INDEX:
4453 case BUILT_IN_STRCHR:
4454 case BUILT_IN_STRRCHR:
4455 case BUILT_IN_MEMCHR:
4456 case BUILT_IN_STRSTR:
4457 case BUILT_IN_STRPBRK:
4458 if (gimple_call_lhs (t))
4459 {
4460 tree src = gimple_call_arg (t, 0);
4461 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4462 constraint_expr nul;
4463 nul.var = nothing_id;
4464 nul.offset = 0;
4465 nul.type = ADDRESSOF;
4466 rhsc.safe_push (nul);
4467 get_constraint_for (gimple_call_lhs (t), &lhsc);
4468 process_all_all_constraints (lhsc, rhsc);
4469 }
4470 return true;
4471 /* Pure functions that return something not based on any object and
4472 that use the memory pointed to by their arguments (but not
4473 transitively). */
4474 case BUILT_IN_STRCMP:
4475 case BUILT_IN_STRNCMP:
4476 case BUILT_IN_STRCASECMP:
4477 case BUILT_IN_STRNCASECMP:
4478 case BUILT_IN_MEMCMP:
4479 case BUILT_IN_BCMP:
4480 case BUILT_IN_STRSPN:
4481 case BUILT_IN_STRCSPN:
4482 {
4483 varinfo_t uses = get_call_use_vi (t);
4484 make_any_offset_constraints (uses);
4485 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4486 make_constraint_to (uses->id, gimple_call_arg (t, 1));
4487 /* No constraints are necessary for the return value. */
4488 return true;
4489 }
4490 case BUILT_IN_STRLEN:
4491 {
4492 varinfo_t uses = get_call_use_vi (t);
4493 make_any_offset_constraints (uses);
4494 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4495 /* No constraints are necessary for the return value. */
4496 return true;
4497 }
4498 case BUILT_IN_OBJECT_SIZE:
4499 case BUILT_IN_CONSTANT_P:
4500 {
4501 /* No constraints are necessary for the return value or the
4502 arguments. */
4503 return true;
4504 }
4505 /* Trampolines are special - they set up passing the static
4506 frame. */
4507 case BUILT_IN_INIT_TRAMPOLINE:
4508 {
4509 tree tramp = gimple_call_arg (t, 0);
4510 tree nfunc = gimple_call_arg (t, 1);
4511 tree frame = gimple_call_arg (t, 2);
4512 unsigned i;
4513 struct constraint_expr lhs, *rhsp;
4514 if (in_ipa_mode)
4515 {
4516 varinfo_t nfi = NULL;
4517 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4518 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4519 if (nfi)
4520 {
4521 lhs = get_function_part_constraint (nfi, fi_static_chain);
4522 get_constraint_for (frame, &rhsc);
4523 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4524 process_constraint (new_constraint (lhs, *rhsp));
4525 rhsc.truncate (0);
4526
4527 /* Make the frame point to the function for
4528 the trampoline adjustment call. */
4529 get_constraint_for (tramp, &lhsc);
4530 do_deref (&lhsc);
4531 get_constraint_for (nfunc, &rhsc);
4532 process_all_all_constraints (lhsc, rhsc);
4533
4534 return true;
4535 }
4536 }
4537 /* Else fallthru to generic handling which will let
4538 the frame escape. */
4539 break;
4540 }
4541 case BUILT_IN_ADJUST_TRAMPOLINE:
4542 {
4543 tree tramp = gimple_call_arg (t, 0);
4544 tree res = gimple_call_lhs (t);
4545 if (in_ipa_mode && res)
4546 {
4547 get_constraint_for (res, &lhsc);
4548 get_constraint_for (tramp, &rhsc);
4549 do_deref (&rhsc);
4550 process_all_all_constraints (lhsc, rhsc);
4551 }
4552 return true;
4553 }
4554 CASE_BUILT_IN_TM_STORE (1):
4555 CASE_BUILT_IN_TM_STORE (2):
4556 CASE_BUILT_IN_TM_STORE (4):
4557 CASE_BUILT_IN_TM_STORE (8):
4558 CASE_BUILT_IN_TM_STORE (FLOAT):
4559 CASE_BUILT_IN_TM_STORE (DOUBLE):
4560 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4561 CASE_BUILT_IN_TM_STORE (M64):
4562 CASE_BUILT_IN_TM_STORE (M128):
4563 CASE_BUILT_IN_TM_STORE (M256):
4564 {
4565 tree addr = gimple_call_arg (t, 0);
4566 tree src = gimple_call_arg (t, 1);
4567
4568 get_constraint_for (addr, &lhsc);
4569 do_deref (&lhsc);
4570 get_constraint_for (src, &rhsc);
4571 process_all_all_constraints (lhsc, rhsc);
4572 return true;
4573 }
4574 CASE_BUILT_IN_TM_LOAD (1):
4575 CASE_BUILT_IN_TM_LOAD (2):
4576 CASE_BUILT_IN_TM_LOAD (4):
4577 CASE_BUILT_IN_TM_LOAD (8):
4578 CASE_BUILT_IN_TM_LOAD (FLOAT):
4579 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4580 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4581 CASE_BUILT_IN_TM_LOAD (M64):
4582 CASE_BUILT_IN_TM_LOAD (M128):
4583 CASE_BUILT_IN_TM_LOAD (M256):
4584 {
4585 tree dest = gimple_call_lhs (t);
4586 tree addr = gimple_call_arg (t, 0);
4587
4588 get_constraint_for (dest, &lhsc);
4589 get_constraint_for (addr, &rhsc);
4590 do_deref (&rhsc);
4591 process_all_all_constraints (lhsc, rhsc);
4592 return true;
4593 }
4594 /* Variadic argument handling needs to be handled in IPA
4595 mode as well. */
4596 case BUILT_IN_VA_START:
4597 {
4598 tree valist = gimple_call_arg (t, 0);
4599 struct constraint_expr rhs, *lhsp;
4600 unsigned i;
4601 get_constraint_for_ptr_offset (valist, NULL_TREE, &lhsc);
4602 do_deref (&lhsc);
4603 /* The va_list gets access to pointers in variadic
4604 arguments. Which we know in the case of IPA analysis
4605 and otherwise are just all nonlocal variables. */
4606 if (in_ipa_mode)
4607 {
4608 fi = lookup_vi_for_tree (fn->decl);
4609 rhs = get_function_part_constraint (fi, ~0);
4610 rhs.type = ADDRESSOF;
4611 }
4612 else
4613 {
4614 rhs.var = nonlocal_id;
4615 rhs.type = ADDRESSOF;
4616 rhs.offset = 0;
4617 }
4618 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4619 process_constraint (new_constraint (*lhsp, rhs));
4620 /* va_list is clobbered. */
4621 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4622 return true;
4623 }
4624 /* va_end doesn't have any effect that matters. */
4625 case BUILT_IN_VA_END:
4626 return true;
4627 /* Alternate return. Simply give up for now. */
4628 case BUILT_IN_RETURN:
4629 {
4630 fi = NULL;
4631 if (!in_ipa_mode
4632 || !(fi = get_vi_for_tree (fn->decl)))
4633 make_constraint_from (get_varinfo (escaped_id), anything_id);
4634 else if (in_ipa_mode
4635 && fi != NULL)
4636 {
4637 struct constraint_expr lhs, rhs;
4638 lhs = get_function_part_constraint (fi, fi_result);
4639 rhs.var = anything_id;
4640 rhs.offset = 0;
4641 rhs.type = SCALAR;
4642 process_constraint (new_constraint (lhs, rhs));
4643 }
4644 return true;
4645 }
4646 case BUILT_IN_GOMP_PARALLEL:
4647 case BUILT_IN_GOACC_PARALLEL:
4648 {
4649 if (in_ipa_mode)
4650 {
4651 unsigned int fnpos, argpos;
4652 switch (DECL_FUNCTION_CODE (fndecl))
4653 {
4654 case BUILT_IN_GOMP_PARALLEL:
4655 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4656 fnpos = 0;
4657 argpos = 1;
4658 break;
4659 case BUILT_IN_GOACC_PARALLEL:
4660 /* __builtin_GOACC_parallel (device, fn, mapnum, hostaddrs,
4661 sizes, kinds, ...). */
4662 fnpos = 1;
4663 argpos = 3;
4664 break;
4665 default:
4666 gcc_unreachable ();
4667 }
4668
4669 tree fnarg = gimple_call_arg (t, fnpos);
4670 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
4671 tree fndecl = TREE_OPERAND (fnarg, 0);
4672 if (fndecl_maybe_in_other_partition (fndecl))
4673 /* Fallthru to general call handling. */
4674 break;
4675
4676 tree arg = gimple_call_arg (t, argpos);
4677
4678 varinfo_t fi = get_vi_for_tree (fndecl);
4679 find_func_aliases_for_call_arg (fi, 0, arg);
4680 return true;
4681 }
4682 /* Else fallthru to generic call handling. */
4683 break;
4684 }
4685 /* printf-style functions may have hooks to set pointers to
4686 point to somewhere into the generated string. Leave them
4687 for a later exercise... */
4688 default:
4689 /* Fallthru to general call handling. */;
4690 }
4691
4692 return false;
4693 }
4694
4695 /* Create constraints for the call T. */
4696
4697 static void
4698 find_func_aliases_for_call (struct function *fn, gcall *t)
4699 {
4700 tree fndecl = gimple_call_fndecl (t);
4701 varinfo_t fi;
4702
4703 if (fndecl != NULL_TREE
4704 && DECL_BUILT_IN (fndecl)
4705 && find_func_aliases_for_builtin_call (fn, t))
4706 return;
4707
4708 fi = get_fi_for_callee (t);
4709 if (!in_ipa_mode
4710 || (fndecl && !fi->is_fn_info))
4711 {
4712 auto_vec<ce_s, 16> rhsc;
4713 int flags = gimple_call_flags (t);
4714
4715 /* Const functions can return their arguments and addresses
4716 of global memory but not of escaped memory. */
4717 if (flags & (ECF_CONST|ECF_NOVOPS))
4718 {
4719 if (gimple_call_lhs (t))
4720 handle_const_call (t, &rhsc);
4721 }
4722 /* Pure functions can return addresses in and of memory
4723 reachable from their arguments, but they are not an escape
4724 point for reachable memory of their arguments. */
4725 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4726 handle_pure_call (t, &rhsc);
4727 else
4728 handle_rhs_call (t, &rhsc);
4729 if (gimple_call_lhs (t))
4730 handle_lhs_call (t, gimple_call_lhs (t),
4731 gimple_call_return_flags (t), rhsc, fndecl);
4732 }
4733 else
4734 {
4735 auto_vec<ce_s, 2> rhsc;
4736 tree lhsop;
4737 unsigned j;
4738
4739 /* Assign all the passed arguments to the appropriate incoming
4740 parameters of the function. */
4741 for (j = 0; j < gimple_call_num_args (t); j++)
4742 {
4743 tree arg = gimple_call_arg (t, j);
4744 find_func_aliases_for_call_arg (fi, j, arg);
4745 }
4746
4747 /* If we are returning a value, assign it to the result. */
4748 lhsop = gimple_call_lhs (t);
4749 if (lhsop)
4750 {
4751 auto_vec<ce_s, 2> lhsc;
4752 struct constraint_expr rhs;
4753 struct constraint_expr *lhsp;
4754 bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (t));
4755
4756 get_constraint_for (lhsop, &lhsc);
4757 rhs = get_function_part_constraint (fi, fi_result);
4758 if (aggr_p)
4759 {
4760 auto_vec<ce_s, 2> tem;
4761 tem.quick_push (rhs);
4762 do_deref (&tem);
4763 gcc_checking_assert (tem.length () == 1);
4764 rhs = tem[0];
4765 }
4766 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4767 process_constraint (new_constraint (*lhsp, rhs));
4768
4769 /* If we pass the result decl by reference, honor that. */
4770 if (aggr_p)
4771 {
4772 struct constraint_expr lhs;
4773 struct constraint_expr *rhsp;
4774
4775 get_constraint_for_address_of (lhsop, &rhsc);
4776 lhs = get_function_part_constraint (fi, fi_result);
4777 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4778 process_constraint (new_constraint (lhs, *rhsp));
4779 rhsc.truncate (0);
4780 }
4781 }
4782
4783 /* If we use a static chain, pass it along. */
4784 if (gimple_call_chain (t))
4785 {
4786 struct constraint_expr lhs;
4787 struct constraint_expr *rhsp;
4788
4789 get_constraint_for (gimple_call_chain (t), &rhsc);
4790 lhs = get_function_part_constraint (fi, fi_static_chain);
4791 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4792 process_constraint (new_constraint (lhs, *rhsp));
4793 }
4794 }
4795 }
4796
4797 /* Walk statement T setting up aliasing constraints according to the
4798 references found in T. This function is the main part of the
4799 constraint builder. AI points to auxiliary alias information used
4800 when building alias sets and computing alias grouping heuristics. */
4801
4802 static void
4803 find_func_aliases (struct function *fn, gimple *origt)
4804 {
4805 gimple *t = origt;
4806 auto_vec<ce_s, 16> lhsc;
4807 auto_vec<ce_s, 16> rhsc;
4808 struct constraint_expr *c;
4809 varinfo_t fi;
4810
4811 /* Now build constraints expressions. */
4812 if (gimple_code (t) == GIMPLE_PHI)
4813 {
4814 size_t i;
4815 unsigned int j;
4816
4817 /* For a phi node, assign all the arguments to
4818 the result. */
4819 get_constraint_for (gimple_phi_result (t), &lhsc);
4820 for (i = 0; i < gimple_phi_num_args (t); i++)
4821 {
4822 tree strippedrhs = PHI_ARG_DEF (t, i);
4823
4824 STRIP_NOPS (strippedrhs);
4825 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4826
4827 FOR_EACH_VEC_ELT (lhsc, j, c)
4828 {
4829 struct constraint_expr *c2;
4830 while (rhsc.length () > 0)
4831 {
4832 c2 = &rhsc.last ();
4833 process_constraint (new_constraint (*c, *c2));
4834 rhsc.pop ();
4835 }
4836 }
4837 }
4838 }
4839 /* In IPA mode, we need to generate constraints to pass call
4840 arguments through their calls. There are two cases,
4841 either a GIMPLE_CALL returning a value, or just a plain
4842 GIMPLE_CALL when we are not.
4843
4844 In non-ipa mode, we need to generate constraints for each
4845 pointer passed by address. */
4846 else if (is_gimple_call (t))
4847 find_func_aliases_for_call (fn, as_a <gcall *> (t));
4848
4849 /* Otherwise, just a regular assignment statement. Only care about
4850 operations with pointer result, others are dealt with as escape
4851 points if they have pointer operands. */
4852 else if (is_gimple_assign (t))
4853 {
4854 /* Otherwise, just a regular assignment statement. */
4855 tree lhsop = gimple_assign_lhs (t);
4856 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4857
4858 if (rhsop && TREE_CLOBBER_P (rhsop))
4859 /* Ignore clobbers, they don't actually store anything into
4860 the LHS. */
4861 ;
4862 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4863 do_structure_copy (lhsop, rhsop);
4864 else
4865 {
4866 enum tree_code code = gimple_assign_rhs_code (t);
4867
4868 get_constraint_for (lhsop, &lhsc);
4869
4870 if (code == POINTER_PLUS_EXPR)
4871 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4872 gimple_assign_rhs2 (t), &rhsc);
4873 else if (code == BIT_AND_EXPR
4874 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4875 {
4876 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4877 the pointer. Handle it by offsetting it by UNKNOWN. */
4878 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4879 NULL_TREE, &rhsc);
4880 }
4881 else if ((CONVERT_EXPR_CODE_P (code)
4882 && !(POINTER_TYPE_P (gimple_expr_type (t))
4883 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4884 || gimple_assign_single_p (t))
4885 get_constraint_for_rhs (rhsop, &rhsc);
4886 else if (code == COND_EXPR)
4887 {
4888 /* The result is a merge of both COND_EXPR arms. */
4889 auto_vec<ce_s, 2> tmp;
4890 struct constraint_expr *rhsp;
4891 unsigned i;
4892 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4893 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4894 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4895 rhsc.safe_push (*rhsp);
4896 }
4897 else if (truth_value_p (code))
4898 /* Truth value results are not pointer (parts). Or at least
4899 very unreasonable obfuscation of a part. */
4900 ;
4901 else
4902 {
4903 /* All other operations are merges. */
4904 auto_vec<ce_s, 4> tmp;
4905 struct constraint_expr *rhsp;
4906 unsigned i, j;
4907 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4908 for (i = 2; i < gimple_num_ops (t); ++i)
4909 {
4910 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4911 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4912 rhsc.safe_push (*rhsp);
4913 tmp.truncate (0);
4914 }
4915 }
4916 process_all_all_constraints (lhsc, rhsc);
4917 }
4918 /* If there is a store to a global variable the rhs escapes. */
4919 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4920 && DECL_P (lhsop))
4921 {
4922 varinfo_t vi = get_vi_for_tree (lhsop);
4923 if ((! in_ipa_mode && vi->is_global_var)
4924 || vi->is_ipa_escape_point)
4925 make_escape_constraint (rhsop);
4926 }
4927 }
4928 /* Handle escapes through return. */
4929 else if (gimple_code (t) == GIMPLE_RETURN
4930 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
4931 {
4932 greturn *return_stmt = as_a <greturn *> (t);
4933 fi = NULL;
4934 if (!in_ipa_mode
4935 || !(fi = get_vi_for_tree (fn->decl)))
4936 make_escape_constraint (gimple_return_retval (return_stmt));
4937 else if (in_ipa_mode)
4938 {
4939 struct constraint_expr lhs ;
4940 struct constraint_expr *rhsp;
4941 unsigned i;
4942
4943 lhs = get_function_part_constraint (fi, fi_result);
4944 get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
4945 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4946 process_constraint (new_constraint (lhs, *rhsp));
4947 }
4948 }
4949 /* Handle asms conservatively by adding escape constraints to everything. */
4950 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
4951 {
4952 unsigned i, noutputs;
4953 const char **oconstraints;
4954 const char *constraint;
4955 bool allows_mem, allows_reg, is_inout;
4956
4957 noutputs = gimple_asm_noutputs (asm_stmt);
4958 oconstraints = XALLOCAVEC (const char *, noutputs);
4959
4960 for (i = 0; i < noutputs; ++i)
4961 {
4962 tree link = gimple_asm_output_op (asm_stmt, i);
4963 tree op = TREE_VALUE (link);
4964
4965 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4966 oconstraints[i] = constraint;
4967 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4968 &allows_reg, &is_inout);
4969
4970 /* A memory constraint makes the address of the operand escape. */
4971 if (!allows_reg && allows_mem)
4972 make_escape_constraint (build_fold_addr_expr (op));
4973
4974 /* The asm may read global memory, so outputs may point to
4975 any global memory. */
4976 if (op)
4977 {
4978 auto_vec<ce_s, 2> lhsc;
4979 struct constraint_expr rhsc, *lhsp;
4980 unsigned j;
4981 get_constraint_for (op, &lhsc);
4982 rhsc.var = nonlocal_id;
4983 rhsc.offset = 0;
4984 rhsc.type = SCALAR;
4985 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4986 process_constraint (new_constraint (*lhsp, rhsc));
4987 }
4988 }
4989 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4990 {
4991 tree link = gimple_asm_input_op (asm_stmt, i);
4992 tree op = TREE_VALUE (link);
4993
4994 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4995
4996 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4997 &allows_mem, &allows_reg);
4998
4999 /* A memory constraint makes the address of the operand escape. */
5000 if (!allows_reg && allows_mem)
5001 make_escape_constraint (build_fold_addr_expr (op));
5002 /* Strictly we'd only need the constraint to ESCAPED if
5003 the asm clobbers memory, otherwise using something
5004 along the lines of per-call clobbers/uses would be enough. */
5005 else if (op)
5006 make_escape_constraint (op);
5007 }
5008 }
5009 }
5010
5011
5012 /* Create a constraint adding to the clobber set of FI the memory
5013 pointed to by PTR. */
5014
5015 static void
5016 process_ipa_clobber (varinfo_t fi, tree ptr)
5017 {
5018 vec<ce_s> ptrc = vNULL;
5019 struct constraint_expr *c, lhs;
5020 unsigned i;
5021 get_constraint_for_rhs (ptr, &ptrc);
5022 lhs = get_function_part_constraint (fi, fi_clobbers);
5023 FOR_EACH_VEC_ELT (ptrc, i, c)
5024 process_constraint (new_constraint (lhs, *c));
5025 ptrc.release ();
5026 }
5027
5028 /* Walk statement T setting up clobber and use constraints according to the
5029 references found in T. This function is a main part of the
5030 IPA constraint builder. */
5031
5032 static void
5033 find_func_clobbers (struct function *fn, gimple *origt)
5034 {
5035 gimple *t = origt;
5036 auto_vec<ce_s, 16> lhsc;
5037 auto_vec<ce_s, 16> rhsc;
5038 varinfo_t fi;
5039
5040 /* Add constraints for clobbered/used in IPA mode.
5041 We are not interested in what automatic variables are clobbered
5042 or used as we only use the information in the caller to which
5043 they do not escape. */
5044 gcc_assert (in_ipa_mode);
5045
5046 /* If the stmt refers to memory in any way it better had a VUSE. */
5047 if (gimple_vuse (t) == NULL_TREE)
5048 return;
5049
5050 /* We'd better have function information for the current function. */
5051 fi = lookup_vi_for_tree (fn->decl);
5052 gcc_assert (fi != NULL);
5053
5054 /* Account for stores in assignments and calls. */
5055 if (gimple_vdef (t) != NULL_TREE
5056 && gimple_has_lhs (t))
5057 {
5058 tree lhs = gimple_get_lhs (t);
5059 tree tem = lhs;
5060 while (handled_component_p (tem))
5061 tem = TREE_OPERAND (tem, 0);
5062 if ((DECL_P (tem)
5063 && !auto_var_in_fn_p (tem, fn->decl))
5064 || INDIRECT_REF_P (tem)
5065 || (TREE_CODE (tem) == MEM_REF
5066 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5067 && auto_var_in_fn_p
5068 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5069 {
5070 struct constraint_expr lhsc, *rhsp;
5071 unsigned i;
5072 lhsc = get_function_part_constraint (fi, fi_clobbers);
5073 get_constraint_for_address_of (lhs, &rhsc);
5074 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5075 process_constraint (new_constraint (lhsc, *rhsp));
5076 rhsc.truncate (0);
5077 }
5078 }
5079
5080 /* Account for uses in assigments and returns. */
5081 if (gimple_assign_single_p (t)
5082 || (gimple_code (t) == GIMPLE_RETURN
5083 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
5084 {
5085 tree rhs = (gimple_assign_single_p (t)
5086 ? gimple_assign_rhs1 (t)
5087 : gimple_return_retval (as_a <greturn *> (t)));
5088 tree tem = rhs;
5089 while (handled_component_p (tem))
5090 tem = TREE_OPERAND (tem, 0);
5091 if ((DECL_P (tem)
5092 && !auto_var_in_fn_p (tem, fn->decl))
5093 || INDIRECT_REF_P (tem)
5094 || (TREE_CODE (tem) == MEM_REF
5095 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5096 && auto_var_in_fn_p
5097 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5098 {
5099 struct constraint_expr lhs, *rhsp;
5100 unsigned i;
5101 lhs = get_function_part_constraint (fi, fi_uses);
5102 get_constraint_for_address_of (rhs, &rhsc);
5103 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5104 process_constraint (new_constraint (lhs, *rhsp));
5105 rhsc.truncate (0);
5106 }
5107 }
5108
5109 if (gcall *call_stmt = dyn_cast <gcall *> (t))
5110 {
5111 varinfo_t cfi = NULL;
5112 tree decl = gimple_call_fndecl (t);
5113 struct constraint_expr lhs, rhs;
5114 unsigned i, j;
5115
5116 /* For builtins we do not have separate function info. For those
5117 we do not generate escapes for we have to generate clobbers/uses. */
5118 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
5119 switch (DECL_FUNCTION_CODE (decl))
5120 {
5121 /* The following functions use and clobber memory pointed to
5122 by their arguments. */
5123 case BUILT_IN_STRCPY:
5124 case BUILT_IN_STRNCPY:
5125 case BUILT_IN_BCOPY:
5126 case BUILT_IN_MEMCPY:
5127 case BUILT_IN_MEMMOVE:
5128 case BUILT_IN_MEMPCPY:
5129 case BUILT_IN_STPCPY:
5130 case BUILT_IN_STPNCPY:
5131 case BUILT_IN_STRCAT:
5132 case BUILT_IN_STRNCAT:
5133 case BUILT_IN_STRCPY_CHK:
5134 case BUILT_IN_STRNCPY_CHK:
5135 case BUILT_IN_MEMCPY_CHK:
5136 case BUILT_IN_MEMMOVE_CHK:
5137 case BUILT_IN_MEMPCPY_CHK:
5138 case BUILT_IN_STPCPY_CHK:
5139 case BUILT_IN_STPNCPY_CHK:
5140 case BUILT_IN_STRCAT_CHK:
5141 case BUILT_IN_STRNCAT_CHK:
5142 {
5143 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5144 == BUILT_IN_BCOPY ? 1 : 0));
5145 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5146 == BUILT_IN_BCOPY ? 0 : 1));
5147 unsigned i;
5148 struct constraint_expr *rhsp, *lhsp;
5149 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5150 lhs = get_function_part_constraint (fi, fi_clobbers);
5151 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5152 process_constraint (new_constraint (lhs, *lhsp));
5153 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5154 lhs = get_function_part_constraint (fi, fi_uses);
5155 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5156 process_constraint (new_constraint (lhs, *rhsp));
5157 return;
5158 }
5159 /* The following function clobbers memory pointed to by
5160 its argument. */
5161 case BUILT_IN_MEMSET:
5162 case BUILT_IN_MEMSET_CHK:
5163 case BUILT_IN_POSIX_MEMALIGN:
5164 {
5165 tree dest = gimple_call_arg (t, 0);
5166 unsigned i;
5167 ce_s *lhsp;
5168 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5169 lhs = get_function_part_constraint (fi, fi_clobbers);
5170 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5171 process_constraint (new_constraint (lhs, *lhsp));
5172 return;
5173 }
5174 /* The following functions clobber their second and third
5175 arguments. */
5176 case BUILT_IN_SINCOS:
5177 case BUILT_IN_SINCOSF:
5178 case BUILT_IN_SINCOSL:
5179 {
5180 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5181 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5182 return;
5183 }
5184 /* The following functions clobber their second argument. */
5185 case BUILT_IN_FREXP:
5186 case BUILT_IN_FREXPF:
5187 case BUILT_IN_FREXPL:
5188 case BUILT_IN_LGAMMA_R:
5189 case BUILT_IN_LGAMMAF_R:
5190 case BUILT_IN_LGAMMAL_R:
5191 case BUILT_IN_GAMMA_R:
5192 case BUILT_IN_GAMMAF_R:
5193 case BUILT_IN_GAMMAL_R:
5194 case BUILT_IN_MODF:
5195 case BUILT_IN_MODFF:
5196 case BUILT_IN_MODFL:
5197 {
5198 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5199 return;
5200 }
5201 /* The following functions clobber their third argument. */
5202 case BUILT_IN_REMQUO:
5203 case BUILT_IN_REMQUOF:
5204 case BUILT_IN_REMQUOL:
5205 {
5206 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5207 return;
5208 }
5209 /* The following functions neither read nor clobber memory. */
5210 case BUILT_IN_ASSUME_ALIGNED:
5211 case BUILT_IN_FREE:
5212 return;
5213 /* Trampolines are of no interest to us. */
5214 case BUILT_IN_INIT_TRAMPOLINE:
5215 case BUILT_IN_ADJUST_TRAMPOLINE:
5216 return;
5217 case BUILT_IN_VA_START:
5218 case BUILT_IN_VA_END:
5219 return;
5220 case BUILT_IN_GOMP_PARALLEL:
5221 case BUILT_IN_GOACC_PARALLEL:
5222 {
5223 unsigned int fnpos, argpos;
5224 unsigned int implicit_use_args[2];
5225 unsigned int num_implicit_use_args = 0;
5226 switch (DECL_FUNCTION_CODE (decl))
5227 {
5228 case BUILT_IN_GOMP_PARALLEL:
5229 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5230 fnpos = 0;
5231 argpos = 1;
5232 break;
5233 case BUILT_IN_GOACC_PARALLEL:
5234 /* __builtin_GOACC_parallel (device, fn, mapnum, hostaddrs,
5235 sizes, kinds, ...). */
5236 fnpos = 1;
5237 argpos = 3;
5238 implicit_use_args[num_implicit_use_args++] = 4;
5239 implicit_use_args[num_implicit_use_args++] = 5;
5240 break;
5241 default:
5242 gcc_unreachable ();
5243 }
5244
5245 tree fnarg = gimple_call_arg (t, fnpos);
5246 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5247 tree fndecl = TREE_OPERAND (fnarg, 0);
5248 if (fndecl_maybe_in_other_partition (fndecl))
5249 /* Fallthru to general call handling. */
5250 break;
5251
5252 varinfo_t cfi = get_vi_for_tree (fndecl);
5253
5254 tree arg = gimple_call_arg (t, argpos);
5255
5256 /* Parameter passed by value is used. */
5257 lhs = get_function_part_constraint (fi, fi_uses);
5258 struct constraint_expr *rhsp;
5259 get_constraint_for (arg, &rhsc);
5260 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5261 process_constraint (new_constraint (lhs, *rhsp));
5262 rhsc.truncate (0);
5263
5264 /* Handle parameters used by the call, but not used in cfi, as
5265 implicitly used by cfi. */
5266 lhs = get_function_part_constraint (cfi, fi_uses);
5267 for (unsigned i = 0; i < num_implicit_use_args; ++i)
5268 {
5269 tree arg = gimple_call_arg (t, implicit_use_args[i]);
5270 get_constraint_for (arg, &rhsc);
5271 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5272 process_constraint (new_constraint (lhs, *rhsp));
5273 rhsc.truncate (0);
5274 }
5275
5276 /* The caller clobbers what the callee does. */
5277 lhs = get_function_part_constraint (fi, fi_clobbers);
5278 rhs = get_function_part_constraint (cfi, fi_clobbers);
5279 process_constraint (new_constraint (lhs, rhs));
5280
5281 /* The caller uses what the callee does. */
5282 lhs = get_function_part_constraint (fi, fi_uses);
5283 rhs = get_function_part_constraint (cfi, fi_uses);
5284 process_constraint (new_constraint (lhs, rhs));
5285
5286 return;
5287 }
5288 /* printf-style functions may have hooks to set pointers to
5289 point to somewhere into the generated string. Leave them
5290 for a later exercise... */
5291 default:
5292 /* Fallthru to general call handling. */;
5293 }
5294
5295 /* Parameters passed by value are used. */
5296 lhs = get_function_part_constraint (fi, fi_uses);
5297 for (i = 0; i < gimple_call_num_args (t); i++)
5298 {
5299 struct constraint_expr *rhsp;
5300 tree arg = gimple_call_arg (t, i);
5301
5302 if (TREE_CODE (arg) == SSA_NAME
5303 || is_gimple_min_invariant (arg))
5304 continue;
5305
5306 get_constraint_for_address_of (arg, &rhsc);
5307 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5308 process_constraint (new_constraint (lhs, *rhsp));
5309 rhsc.truncate (0);
5310 }
5311
5312 /* Build constraints for propagating clobbers/uses along the
5313 callgraph edges. */
5314 cfi = get_fi_for_callee (call_stmt);
5315 if (cfi->id == anything_id)
5316 {
5317 if (gimple_vdef (t))
5318 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5319 anything_id);
5320 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5321 anything_id);
5322 return;
5323 }
5324
5325 /* For callees without function info (that's external functions),
5326 ESCAPED is clobbered and used. */
5327 if (gimple_call_fndecl (t)
5328 && !cfi->is_fn_info)
5329 {
5330 varinfo_t vi;
5331
5332 if (gimple_vdef (t))
5333 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5334 escaped_id);
5335 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5336
5337 /* Also honor the call statement use/clobber info. */
5338 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5339 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5340 vi->id);
5341 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5342 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5343 vi->id);
5344 return;
5345 }
5346
5347 /* Otherwise the caller clobbers and uses what the callee does.
5348 ??? This should use a new complex constraint that filters
5349 local variables of the callee. */
5350 if (gimple_vdef (t))
5351 {
5352 lhs = get_function_part_constraint (fi, fi_clobbers);
5353 rhs = get_function_part_constraint (cfi, fi_clobbers);
5354 process_constraint (new_constraint (lhs, rhs));
5355 }
5356 lhs = get_function_part_constraint (fi, fi_uses);
5357 rhs = get_function_part_constraint (cfi, fi_uses);
5358 process_constraint (new_constraint (lhs, rhs));
5359 }
5360 else if (gimple_code (t) == GIMPLE_ASM)
5361 {
5362 /* ??? Ick. We can do better. */
5363 if (gimple_vdef (t))
5364 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5365 anything_id);
5366 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5367 anything_id);
5368 }
5369 }
5370
5371
5372 /* Find the first varinfo in the same variable as START that overlaps with
5373 OFFSET. Return NULL if we can't find one. */
5374
5375 static varinfo_t
5376 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5377 {
5378 /* If the offset is outside of the variable, bail out. */
5379 if (offset >= start->fullsize)
5380 return NULL;
5381
5382 /* If we cannot reach offset from start, lookup the first field
5383 and start from there. */
5384 if (start->offset > offset)
5385 start = get_varinfo (start->head);
5386
5387 while (start)
5388 {
5389 /* We may not find a variable in the field list with the actual
5390 offset when we have glommed a structure to a variable.
5391 In that case, however, offset should still be within the size
5392 of the variable. */
5393 if (offset >= start->offset
5394 && (offset - start->offset) < start->size)
5395 return start;
5396
5397 start = vi_next (start);
5398 }
5399
5400 return NULL;
5401 }
5402
5403 /* Find the first varinfo in the same variable as START that overlaps with
5404 OFFSET. If there is no such varinfo the varinfo directly preceding
5405 OFFSET is returned. */
5406
5407 static varinfo_t
5408 first_or_preceding_vi_for_offset (varinfo_t start,
5409 unsigned HOST_WIDE_INT offset)
5410 {
5411 /* If we cannot reach offset from start, lookup the first field
5412 and start from there. */
5413 if (start->offset > offset)
5414 start = get_varinfo (start->head);
5415
5416 /* We may not find a variable in the field list with the actual
5417 offset when we have glommed a structure to a variable.
5418 In that case, however, offset should still be within the size
5419 of the variable.
5420 If we got beyond the offset we look for return the field
5421 directly preceding offset which may be the last field. */
5422 while (start->next
5423 && offset >= start->offset
5424 && !((offset - start->offset) < start->size))
5425 start = vi_next (start);
5426
5427 return start;
5428 }
5429
5430
5431 /* This structure is used during pushing fields onto the fieldstack
5432 to track the offset of the field, since bitpos_of_field gives it
5433 relative to its immediate containing type, and we want it relative
5434 to the ultimate containing object. */
5435
5436 struct fieldoff
5437 {
5438 /* Offset from the base of the base containing object to this field. */
5439 HOST_WIDE_INT offset;
5440
5441 /* Size, in bits, of the field. */
5442 unsigned HOST_WIDE_INT size;
5443
5444 unsigned has_unknown_size : 1;
5445
5446 unsigned must_have_pointers : 1;
5447
5448 unsigned may_have_pointers : 1;
5449
5450 unsigned only_restrict_pointers : 1;
5451
5452 tree restrict_pointed_type;
5453 };
5454 typedef struct fieldoff fieldoff_s;
5455
5456
5457 /* qsort comparison function for two fieldoff's PA and PB */
5458
5459 static int
5460 fieldoff_compare (const void *pa, const void *pb)
5461 {
5462 const fieldoff_s *foa = (const fieldoff_s *)pa;
5463 const fieldoff_s *fob = (const fieldoff_s *)pb;
5464 unsigned HOST_WIDE_INT foasize, fobsize;
5465
5466 if (foa->offset < fob->offset)
5467 return -1;
5468 else if (foa->offset > fob->offset)
5469 return 1;
5470
5471 foasize = foa->size;
5472 fobsize = fob->size;
5473 if (foasize < fobsize)
5474 return -1;
5475 else if (foasize > fobsize)
5476 return 1;
5477 return 0;
5478 }
5479
5480 /* Sort a fieldstack according to the field offset and sizes. */
5481 static void
5482 sort_fieldstack (vec<fieldoff_s> fieldstack)
5483 {
5484 fieldstack.qsort (fieldoff_compare);
5485 }
5486
5487 /* Return true if T is a type that can have subvars. */
5488
5489 static inline bool
5490 type_can_have_subvars (const_tree t)
5491 {
5492 /* Aggregates without overlapping fields can have subvars. */
5493 return TREE_CODE (t) == RECORD_TYPE;
5494 }
5495
5496 /* Return true if V is a tree that we can have subvars for.
5497 Normally, this is any aggregate type. Also complex
5498 types which are not gimple registers can have subvars. */
5499
5500 static inline bool
5501 var_can_have_subvars (const_tree v)
5502 {
5503 /* Volatile variables should never have subvars. */
5504 if (TREE_THIS_VOLATILE (v))
5505 return false;
5506
5507 /* Non decls or memory tags can never have subvars. */
5508 if (!DECL_P (v))
5509 return false;
5510
5511 return type_can_have_subvars (TREE_TYPE (v));
5512 }
5513
5514 /* Return true if T is a type that does contain pointers. */
5515
5516 static bool
5517 type_must_have_pointers (tree type)
5518 {
5519 if (POINTER_TYPE_P (type))
5520 return true;
5521
5522 if (TREE_CODE (type) == ARRAY_TYPE)
5523 return type_must_have_pointers (TREE_TYPE (type));
5524
5525 /* A function or method can have pointers as arguments, so track
5526 those separately. */
5527 if (TREE_CODE (type) == FUNCTION_TYPE
5528 || TREE_CODE (type) == METHOD_TYPE)
5529 return true;
5530
5531 return false;
5532 }
5533
5534 static bool
5535 field_must_have_pointers (tree t)
5536 {
5537 return type_must_have_pointers (TREE_TYPE (t));
5538 }
5539
5540 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5541 the fields of TYPE onto fieldstack, recording their offsets along
5542 the way.
5543
5544 OFFSET is used to keep track of the offset in this entire
5545 structure, rather than just the immediately containing structure.
5546 Returns false if the caller is supposed to handle the field we
5547 recursed for. */
5548
5549 static bool
5550 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5551 HOST_WIDE_INT offset)
5552 {
5553 tree field;
5554 bool empty_p = true;
5555
5556 if (TREE_CODE (type) != RECORD_TYPE)
5557 return false;
5558
5559 /* If the vector of fields is growing too big, bail out early.
5560 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5561 sure this fails. */
5562 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5563 return false;
5564
5565 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5566 if (TREE_CODE (field) == FIELD_DECL)
5567 {
5568 bool push = false;
5569 HOST_WIDE_INT foff = bitpos_of_field (field);
5570 tree field_type = TREE_TYPE (field);
5571
5572 if (!var_can_have_subvars (field)
5573 || TREE_CODE (field_type) == QUAL_UNION_TYPE
5574 || TREE_CODE (field_type) == UNION_TYPE)
5575 push = true;
5576 else if (!push_fields_onto_fieldstack
5577 (field_type, fieldstack, offset + foff)
5578 && (DECL_SIZE (field)
5579 && !integer_zerop (DECL_SIZE (field))))
5580 /* Empty structures may have actual size, like in C++. So
5581 see if we didn't push any subfields and the size is
5582 nonzero, push the field onto the stack. */
5583 push = true;
5584
5585 if (push)
5586 {
5587 fieldoff_s *pair = NULL;
5588 bool has_unknown_size = false;
5589 bool must_have_pointers_p;
5590
5591 if (!fieldstack->is_empty ())
5592 pair = &fieldstack->last ();
5593
5594 /* If there isn't anything at offset zero, create sth. */
5595 if (!pair
5596 && offset + foff != 0)
5597 {
5598 fieldoff_s e
5599 = {0, offset + foff, false, false, true, false, NULL_TREE};
5600 pair = fieldstack->safe_push (e);
5601 }
5602
5603 if (!DECL_SIZE (field)
5604 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5605 has_unknown_size = true;
5606
5607 /* If adjacent fields do not contain pointers merge them. */
5608 must_have_pointers_p = field_must_have_pointers (field);
5609 if (pair
5610 && !has_unknown_size
5611 && !must_have_pointers_p
5612 && !pair->must_have_pointers
5613 && !pair->has_unknown_size
5614 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5615 {
5616 pair->size += tree_to_uhwi (DECL_SIZE (field));
5617 }
5618 else
5619 {
5620 fieldoff_s e;
5621 e.offset = offset + foff;
5622 e.has_unknown_size = has_unknown_size;
5623 if (!has_unknown_size)
5624 e.size = tree_to_uhwi (DECL_SIZE (field));
5625 else
5626 e.size = -1;
5627 e.must_have_pointers = must_have_pointers_p;
5628 e.may_have_pointers = true;
5629 e.only_restrict_pointers
5630 = (!has_unknown_size
5631 && POINTER_TYPE_P (field_type)
5632 && TYPE_RESTRICT (field_type));
5633 if (e.only_restrict_pointers)
5634 e.restrict_pointed_type = TREE_TYPE (field_type);
5635 fieldstack->safe_push (e);
5636 }
5637 }
5638
5639 empty_p = false;
5640 }
5641
5642 return !empty_p;
5643 }
5644
5645 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5646 if it is a varargs function. */
5647
5648 static unsigned int
5649 count_num_arguments (tree decl, bool *is_varargs)
5650 {
5651 unsigned int num = 0;
5652 tree t;
5653
5654 /* Capture named arguments for K&R functions. They do not
5655 have a prototype and thus no TYPE_ARG_TYPES. */
5656 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5657 ++num;
5658
5659 /* Check if the function has variadic arguments. */
5660 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5661 if (TREE_VALUE (t) == void_type_node)
5662 break;
5663 if (!t)
5664 *is_varargs = true;
5665
5666 return num;
5667 }
5668
5669 /* Creation function node for DECL, using NAME, and return the index
5670 of the variable we've created for the function. If NONLOCAL_p, create
5671 initial constraints. */
5672
5673 static varinfo_t
5674 create_function_info_for (tree decl, const char *name, bool add_id,
5675 bool nonlocal_p)
5676 {
5677 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5678 varinfo_t vi, prev_vi;
5679 tree arg;
5680 unsigned int i;
5681 bool is_varargs = false;
5682 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5683
5684 /* Create the variable info. */
5685
5686 vi = new_var_info (decl, name, add_id);
5687 vi->offset = 0;
5688 vi->size = 1;
5689 vi->fullsize = fi_parm_base + num_args;
5690 vi->is_fn_info = 1;
5691 vi->may_have_pointers = false;
5692 if (is_varargs)
5693 vi->fullsize = ~0;
5694 insert_vi_for_tree (vi->decl, vi);
5695
5696 prev_vi = vi;
5697
5698 /* Create a variable for things the function clobbers and one for
5699 things the function uses. */
5700 {
5701 varinfo_t clobbervi, usevi;
5702 const char *newname;
5703 char *tempname;
5704
5705 tempname = xasprintf ("%s.clobber", name);
5706 newname = ggc_strdup (tempname);
5707 free (tempname);
5708
5709 clobbervi = new_var_info (NULL, newname, false);
5710 clobbervi->offset = fi_clobbers;
5711 clobbervi->size = 1;
5712 clobbervi->fullsize = vi->fullsize;
5713 clobbervi->is_full_var = true;
5714 clobbervi->is_global_var = false;
5715
5716 gcc_assert (prev_vi->offset < clobbervi->offset);
5717 prev_vi->next = clobbervi->id;
5718 prev_vi = clobbervi;
5719
5720 tempname = xasprintf ("%s.use", name);
5721 newname = ggc_strdup (tempname);
5722 free (tempname);
5723
5724 usevi = new_var_info (NULL, newname, false);
5725 usevi->offset = fi_uses;
5726 usevi->size = 1;
5727 usevi->fullsize = vi->fullsize;
5728 usevi->is_full_var = true;
5729 usevi->is_global_var = false;
5730
5731 gcc_assert (prev_vi->offset < usevi->offset);
5732 prev_vi->next = usevi->id;
5733 prev_vi = usevi;
5734 }
5735
5736 /* And one for the static chain. */
5737 if (fn->static_chain_decl != NULL_TREE)
5738 {
5739 varinfo_t chainvi;
5740 const char *newname;
5741 char *tempname;
5742
5743 tempname = xasprintf ("%s.chain", name);
5744 newname = ggc_strdup (tempname);
5745 free (tempname);
5746
5747 chainvi = new_var_info (fn->static_chain_decl, newname, false);
5748 chainvi->offset = fi_static_chain;
5749 chainvi->size = 1;
5750 chainvi->fullsize = vi->fullsize;
5751 chainvi->is_full_var = true;
5752 chainvi->is_global_var = false;
5753
5754 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5755
5756 if (nonlocal_p
5757 && chainvi->may_have_pointers)
5758 make_constraint_from (chainvi, nonlocal_id);
5759
5760 gcc_assert (prev_vi->offset < chainvi->offset);
5761 prev_vi->next = chainvi->id;
5762 prev_vi = chainvi;
5763 }
5764
5765 /* Create a variable for the return var. */
5766 if (DECL_RESULT (decl) != NULL
5767 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5768 {
5769 varinfo_t resultvi;
5770 const char *newname;
5771 char *tempname;
5772 tree resultdecl = decl;
5773
5774 if (DECL_RESULT (decl))
5775 resultdecl = DECL_RESULT (decl);
5776
5777 tempname = xasprintf ("%s.result", name);
5778 newname = ggc_strdup (tempname);
5779 free (tempname);
5780
5781 resultvi = new_var_info (resultdecl, newname, false);
5782 resultvi->offset = fi_result;
5783 resultvi->size = 1;
5784 resultvi->fullsize = vi->fullsize;
5785 resultvi->is_full_var = true;
5786 if (DECL_RESULT (decl))
5787 resultvi->may_have_pointers = true;
5788
5789 if (DECL_RESULT (decl))
5790 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5791
5792 if (nonlocal_p
5793 && DECL_RESULT (decl)
5794 && DECL_BY_REFERENCE (DECL_RESULT (decl)))
5795 make_constraint_from (resultvi, nonlocal_id);
5796
5797 gcc_assert (prev_vi->offset < resultvi->offset);
5798 prev_vi->next = resultvi->id;
5799 prev_vi = resultvi;
5800 }
5801
5802 /* We also need to make function return values escape. Nothing
5803 escapes by returning from main though. */
5804 if (nonlocal_p
5805 && !MAIN_NAME_P (DECL_NAME (decl)))
5806 {
5807 varinfo_t fi, rvi;
5808 fi = lookup_vi_for_tree (decl);
5809 rvi = first_vi_for_offset (fi, fi_result);
5810 if (rvi && rvi->offset == fi_result)
5811 make_copy_constraint (get_varinfo (escaped_id), rvi->id);
5812 }
5813
5814 /* Set up variables for each argument. */
5815 arg = DECL_ARGUMENTS (decl);
5816 for (i = 0; i < num_args; i++)
5817 {
5818 varinfo_t argvi;
5819 const char *newname;
5820 char *tempname;
5821 tree argdecl = decl;
5822
5823 if (arg)
5824 argdecl = arg;
5825
5826 tempname = xasprintf ("%s.arg%d", name, i);
5827 newname = ggc_strdup (tempname);
5828 free (tempname);
5829
5830 argvi = new_var_info (argdecl, newname, false);
5831 argvi->offset = fi_parm_base + i;
5832 argvi->size = 1;
5833 argvi->is_full_var = true;
5834 argvi->fullsize = vi->fullsize;
5835 if (arg)
5836 argvi->may_have_pointers = true;
5837
5838 if (arg)
5839 insert_vi_for_tree (arg, argvi);
5840
5841 if (nonlocal_p
5842 && argvi->may_have_pointers)
5843 make_constraint_from (argvi, nonlocal_id);
5844
5845 gcc_assert (prev_vi->offset < argvi->offset);
5846 prev_vi->next = argvi->id;
5847 prev_vi = argvi;
5848 if (arg)
5849 arg = DECL_CHAIN (arg);
5850 }
5851
5852 /* Add one representative for all further args. */
5853 if (is_varargs)
5854 {
5855 varinfo_t argvi;
5856 const char *newname;
5857 char *tempname;
5858 tree decl;
5859
5860 tempname = xasprintf ("%s.varargs", name);
5861 newname = ggc_strdup (tempname);
5862 free (tempname);
5863
5864 /* We need sth that can be pointed to for va_start. */
5865 decl = build_fake_var_decl (ptr_type_node);
5866
5867 argvi = new_var_info (decl, newname, false);
5868 argvi->offset = fi_parm_base + num_args;
5869 argvi->size = ~0;
5870 argvi->is_full_var = true;
5871 argvi->is_heap_var = true;
5872 argvi->fullsize = vi->fullsize;
5873
5874 if (nonlocal_p
5875 && argvi->may_have_pointers)
5876 make_constraint_from (argvi, nonlocal_id);
5877
5878 gcc_assert (prev_vi->offset < argvi->offset);
5879 prev_vi->next = argvi->id;
5880 prev_vi = argvi;
5881 }
5882
5883 return vi;
5884 }
5885
5886
5887 /* Return true if FIELDSTACK contains fields that overlap.
5888 FIELDSTACK is assumed to be sorted by offset. */
5889
5890 static bool
5891 check_for_overlaps (vec<fieldoff_s> fieldstack)
5892 {
5893 fieldoff_s *fo = NULL;
5894 unsigned int i;
5895 HOST_WIDE_INT lastoffset = -1;
5896
5897 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5898 {
5899 if (fo->offset == lastoffset)
5900 return true;
5901 lastoffset = fo->offset;
5902 }
5903 return false;
5904 }
5905
5906 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5907 This will also create any varinfo structures necessary for fields
5908 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
5909 HANDLED_STRUCT_TYPE is used to register struct types reached by following
5910 restrict pointers. This is needed to prevent infinite recursion. */
5911
5912 static varinfo_t
5913 create_variable_info_for_1 (tree decl, const char *name, bool add_id,
5914 bool handle_param, bitmap handled_struct_type)
5915 {
5916 varinfo_t vi, newvi;
5917 tree decl_type = TREE_TYPE (decl);
5918 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5919 auto_vec<fieldoff_s> fieldstack;
5920 fieldoff_s *fo;
5921 unsigned int i;
5922
5923 if (!declsize
5924 || !tree_fits_uhwi_p (declsize))
5925 {
5926 vi = new_var_info (decl, name, add_id);
5927 vi->offset = 0;
5928 vi->size = ~0;
5929 vi->fullsize = ~0;
5930 vi->is_unknown_size_var = true;
5931 vi->is_full_var = true;
5932 vi->may_have_pointers = true;
5933 return vi;
5934 }
5935
5936 /* Collect field information. */
5937 if (use_field_sensitive
5938 && var_can_have_subvars (decl)
5939 /* ??? Force us to not use subfields for globals in IPA mode.
5940 Else we'd have to parse arbitrary initializers. */
5941 && !(in_ipa_mode
5942 && is_global_var (decl)))
5943 {
5944 fieldoff_s *fo = NULL;
5945 bool notokay = false;
5946 unsigned int i;
5947
5948 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5949
5950 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5951 if (fo->has_unknown_size
5952 || fo->offset < 0)
5953 {
5954 notokay = true;
5955 break;
5956 }
5957
5958 /* We can't sort them if we have a field with a variable sized type,
5959 which will make notokay = true. In that case, we are going to return
5960 without creating varinfos for the fields anyway, so sorting them is a
5961 waste to boot. */
5962 if (!notokay)
5963 {
5964 sort_fieldstack (fieldstack);
5965 /* Due to some C++ FE issues, like PR 22488, we might end up
5966 what appear to be overlapping fields even though they,
5967 in reality, do not overlap. Until the C++ FE is fixed,
5968 we will simply disable field-sensitivity for these cases. */
5969 notokay = check_for_overlaps (fieldstack);
5970 }
5971
5972 if (notokay)
5973 fieldstack.release ();
5974 }
5975
5976 /* If we didn't end up collecting sub-variables create a full
5977 variable for the decl. */
5978 if (fieldstack.length () == 0
5979 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5980 {
5981 vi = new_var_info (decl, name, add_id);
5982 vi->offset = 0;
5983 vi->may_have_pointers = true;
5984 vi->fullsize = tree_to_uhwi (declsize);
5985 vi->size = vi->fullsize;
5986 vi->is_full_var = true;
5987 if (POINTER_TYPE_P (decl_type)
5988 && TYPE_RESTRICT (decl_type))
5989 vi->only_restrict_pointers = 1;
5990 if (vi->only_restrict_pointers
5991 && !type_contains_placeholder_p (TREE_TYPE (decl_type))
5992 && handle_param
5993 && !bitmap_bit_p (handled_struct_type,
5994 TYPE_UID (TREE_TYPE (decl_type))))
5995 {
5996 varinfo_t rvi;
5997 tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type));
5998 DECL_EXTERNAL (heapvar) = 1;
5999 if (var_can_have_subvars (heapvar))
6000 bitmap_set_bit (handled_struct_type,
6001 TYPE_UID (TREE_TYPE (decl_type)));
6002 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6003 true, handled_struct_type);
6004 if (var_can_have_subvars (heapvar))
6005 bitmap_clear_bit (handled_struct_type,
6006 TYPE_UID (TREE_TYPE (decl_type)));
6007 rvi->is_restrict_var = 1;
6008 insert_vi_for_tree (heapvar, rvi);
6009 make_constraint_from (vi, rvi->id);
6010 make_param_constraints (rvi);
6011 }
6012 fieldstack.release ();
6013 return vi;
6014 }
6015
6016 vi = new_var_info (decl, name, add_id);
6017 vi->fullsize = tree_to_uhwi (declsize);
6018 if (fieldstack.length () == 1)
6019 vi->is_full_var = true;
6020 for (i = 0, newvi = vi;
6021 fieldstack.iterate (i, &fo);
6022 ++i, newvi = vi_next (newvi))
6023 {
6024 const char *newname = NULL;
6025 char *tempname;
6026
6027 if (dump_file)
6028 {
6029 if (fieldstack.length () != 1)
6030 {
6031 tempname
6032 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
6033 "+" HOST_WIDE_INT_PRINT_DEC, name,
6034 fo->offset, fo->size);
6035 newname = ggc_strdup (tempname);
6036 free (tempname);
6037 }
6038 }
6039 else
6040 newname = "NULL";
6041
6042 if (newname)
6043 newvi->name = newname;
6044 newvi->offset = fo->offset;
6045 newvi->size = fo->size;
6046 newvi->fullsize = vi->fullsize;
6047 newvi->may_have_pointers = fo->may_have_pointers;
6048 newvi->only_restrict_pointers = fo->only_restrict_pointers;
6049 if (handle_param
6050 && newvi->only_restrict_pointers
6051 && !type_contains_placeholder_p (fo->restrict_pointed_type)
6052 && !bitmap_bit_p (handled_struct_type,
6053 TYPE_UID (fo->restrict_pointed_type)))
6054 {
6055 varinfo_t rvi;
6056 tree heapvar = build_fake_var_decl (fo->restrict_pointed_type);
6057 DECL_EXTERNAL (heapvar) = 1;
6058 if (var_can_have_subvars (heapvar))
6059 bitmap_set_bit (handled_struct_type,
6060 TYPE_UID (fo->restrict_pointed_type));
6061 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6062 true, handled_struct_type);
6063 if (var_can_have_subvars (heapvar))
6064 bitmap_clear_bit (handled_struct_type,
6065 TYPE_UID (fo->restrict_pointed_type));
6066 rvi->is_restrict_var = 1;
6067 insert_vi_for_tree (heapvar, rvi);
6068 make_constraint_from (newvi, rvi->id);
6069 make_param_constraints (rvi);
6070 }
6071 if (i + 1 < fieldstack.length ())
6072 {
6073 varinfo_t tem = new_var_info (decl, name, false);
6074 newvi->next = tem->id;
6075 tem->head = vi->id;
6076 }
6077 }
6078
6079 return vi;
6080 }
6081
6082 static unsigned int
6083 create_variable_info_for (tree decl, const char *name, bool add_id)
6084 {
6085 varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL);
6086 unsigned int id = vi->id;
6087
6088 insert_vi_for_tree (decl, vi);
6089
6090 if (!VAR_P (decl))
6091 return id;
6092
6093 /* Create initial constraints for globals. */
6094 for (; vi; vi = vi_next (vi))
6095 {
6096 if (!vi->may_have_pointers
6097 || !vi->is_global_var)
6098 continue;
6099
6100 /* Mark global restrict qualified pointers. */
6101 if ((POINTER_TYPE_P (TREE_TYPE (decl))
6102 && TYPE_RESTRICT (TREE_TYPE (decl)))
6103 || vi->only_restrict_pointers)
6104 {
6105 varinfo_t rvi
6106 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT",
6107 true);
6108 /* ??? For now exclude reads from globals as restrict sources
6109 if those are not (indirectly) from incoming parameters. */
6110 rvi->is_restrict_var = false;
6111 continue;
6112 }
6113
6114 /* In non-IPA mode the initializer from nonlocal is all we need. */
6115 if (!in_ipa_mode
6116 || DECL_HARD_REGISTER (decl))
6117 make_copy_constraint (vi, nonlocal_id);
6118
6119 /* In IPA mode parse the initializer and generate proper constraints
6120 for it. */
6121 else
6122 {
6123 varpool_node *vnode = varpool_node::get (decl);
6124
6125 /* For escaped variables initialize them from nonlocal. */
6126 if (!vnode->all_refs_explicit_p ())
6127 make_copy_constraint (vi, nonlocal_id);
6128
6129 /* If this is a global variable with an initializer and we are in
6130 IPA mode generate constraints for it. */
6131 ipa_ref *ref;
6132 for (unsigned idx = 0; vnode->iterate_reference (idx, ref); ++idx)
6133 {
6134 auto_vec<ce_s> rhsc;
6135 struct constraint_expr lhs, *rhsp;
6136 unsigned i;
6137 get_constraint_for_address_of (ref->referred->decl, &rhsc);
6138 lhs.var = vi->id;
6139 lhs.offset = 0;
6140 lhs.type = SCALAR;
6141 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6142 process_constraint (new_constraint (lhs, *rhsp));
6143 /* If this is a variable that escapes from the unit
6144 the initializer escapes as well. */
6145 if (!vnode->all_refs_explicit_p ())
6146 {
6147 lhs.var = escaped_id;
6148 lhs.offset = 0;
6149 lhs.type = SCALAR;
6150 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6151 process_constraint (new_constraint (lhs, *rhsp));
6152 }
6153 }
6154 }
6155 }
6156
6157 return id;
6158 }
6159
6160 /* Print out the points-to solution for VAR to FILE. */
6161
6162 static void
6163 dump_solution_for_var (FILE *file, unsigned int var)
6164 {
6165 varinfo_t vi = get_varinfo (var);
6166 unsigned int i;
6167 bitmap_iterator bi;
6168
6169 /* Dump the solution for unified vars anyway, this avoids difficulties
6170 in scanning dumps in the testsuite. */
6171 fprintf (file, "%s = { ", vi->name);
6172 vi = get_varinfo (find (var));
6173 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6174 fprintf (file, "%s ", get_varinfo (i)->name);
6175 fprintf (file, "}");
6176
6177 /* But note when the variable was unified. */
6178 if (vi->id != var)
6179 fprintf (file, " same as %s", vi->name);
6180
6181 fprintf (file, "\n");
6182 }
6183
6184 /* Print the points-to solution for VAR to stderr. */
6185
6186 DEBUG_FUNCTION void
6187 debug_solution_for_var (unsigned int var)
6188 {
6189 dump_solution_for_var (stderr, var);
6190 }
6191
6192 /* Register the constraints for function parameter related VI. */
6193
6194 static void
6195 make_param_constraints (varinfo_t vi)
6196 {
6197 for (; vi; vi = vi_next (vi))
6198 {
6199 if (vi->only_restrict_pointers)
6200 ;
6201 else if (vi->may_have_pointers)
6202 make_constraint_from (vi, nonlocal_id);
6203
6204 if (vi->is_full_var)
6205 break;
6206 }
6207 }
6208
6209 /* Create varinfo structures for all of the variables in the
6210 function for intraprocedural mode. */
6211
6212 static void
6213 intra_create_variable_infos (struct function *fn)
6214 {
6215 tree t;
6216 bitmap handled_struct_type = NULL;
6217
6218 /* For each incoming pointer argument arg, create the constraint ARG
6219 = NONLOCAL or a dummy variable if it is a restrict qualified
6220 passed-by-reference argument. */
6221 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
6222 {
6223 if (handled_struct_type == NULL)
6224 handled_struct_type = BITMAP_ALLOC (NULL);
6225
6226 varinfo_t p
6227 = create_variable_info_for_1 (t, alias_get_name (t), false, true,
6228 handled_struct_type);
6229 insert_vi_for_tree (t, p);
6230
6231 make_param_constraints (p);
6232 }
6233
6234 if (handled_struct_type != NULL)
6235 BITMAP_FREE (handled_struct_type);
6236
6237 /* Add a constraint for a result decl that is passed by reference. */
6238 if (DECL_RESULT (fn->decl)
6239 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
6240 {
6241 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
6242
6243 for (p = result_vi; p; p = vi_next (p))
6244 make_constraint_from (p, nonlocal_id);
6245 }
6246
6247 /* Add a constraint for the incoming static chain parameter. */
6248 if (fn->static_chain_decl != NULL_TREE)
6249 {
6250 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
6251
6252 for (p = chain_vi; p; p = vi_next (p))
6253 make_constraint_from (p, nonlocal_id);
6254 }
6255 }
6256
6257 /* Structure used to put solution bitmaps in a hashtable so they can
6258 be shared among variables with the same points-to set. */
6259
6260 typedef struct shared_bitmap_info
6261 {
6262 bitmap pt_vars;
6263 hashval_t hashcode;
6264 } *shared_bitmap_info_t;
6265 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
6266
6267 /* Shared_bitmap hashtable helpers. */
6268
6269 struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
6270 {
6271 static inline hashval_t hash (const shared_bitmap_info *);
6272 static inline bool equal (const shared_bitmap_info *,
6273 const shared_bitmap_info *);
6274 };
6275
6276 /* Hash function for a shared_bitmap_info_t */
6277
6278 inline hashval_t
6279 shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
6280 {
6281 return bi->hashcode;
6282 }
6283
6284 /* Equality function for two shared_bitmap_info_t's. */
6285
6286 inline bool
6287 shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
6288 const shared_bitmap_info *sbi2)
6289 {
6290 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
6291 }
6292
6293 /* Shared_bitmap hashtable. */
6294
6295 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
6296
6297 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6298 existing instance if there is one, NULL otherwise. */
6299
6300 static bitmap
6301 shared_bitmap_lookup (bitmap pt_vars)
6302 {
6303 shared_bitmap_info **slot;
6304 struct shared_bitmap_info sbi;
6305
6306 sbi.pt_vars = pt_vars;
6307 sbi.hashcode = bitmap_hash (pt_vars);
6308
6309 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6310 if (!slot)
6311 return NULL;
6312 else
6313 return (*slot)->pt_vars;
6314 }
6315
6316
6317 /* Add a bitmap to the shared bitmap hashtable. */
6318
6319 static void
6320 shared_bitmap_add (bitmap pt_vars)
6321 {
6322 shared_bitmap_info **slot;
6323 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6324
6325 sbi->pt_vars = pt_vars;
6326 sbi->hashcode = bitmap_hash (pt_vars);
6327
6328 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6329 gcc_assert (!*slot);
6330 *slot = sbi;
6331 }
6332
6333
6334 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6335
6336 static void
6337 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt,
6338 tree fndecl)
6339 {
6340 unsigned int i;
6341 bitmap_iterator bi;
6342 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6343 bool everything_escaped
6344 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6345
6346 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6347 {
6348 varinfo_t vi = get_varinfo (i);
6349
6350 /* The only artificial variables that are allowed in a may-alias
6351 set are heap variables. */
6352 if (vi->is_artificial_var && !vi->is_heap_var)
6353 continue;
6354
6355 if (everything_escaped
6356 || (escaped_vi->solution
6357 && bitmap_bit_p (escaped_vi->solution, i)))
6358 {
6359 pt->vars_contains_escaped = true;
6360 pt->vars_contains_escaped_heap = vi->is_heap_var;
6361 }
6362
6363 if (vi->is_restrict_var)
6364 pt->vars_contains_restrict = true;
6365
6366 if (VAR_P (vi->decl)
6367 || TREE_CODE (vi->decl) == PARM_DECL
6368 || TREE_CODE (vi->decl) == RESULT_DECL)
6369 {
6370 /* If we are in IPA mode we will not recompute points-to
6371 sets after inlining so make sure they stay valid. */
6372 if (in_ipa_mode
6373 && !DECL_PT_UID_SET_P (vi->decl))
6374 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6375
6376 /* Add the decl to the points-to set. Note that the points-to
6377 set contains global variables. */
6378 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6379 if (vi->is_global_var
6380 /* In IPA mode the escaped_heap trick doesn't work as
6381 ESCAPED is escaped from the unit but
6382 pt_solution_includes_global needs to answer true for
6383 all variables not automatic within a function.
6384 For the same reason is_global_var is not the
6385 correct flag to track - local variables from other
6386 functions also need to be considered global.
6387 Conveniently all HEAP vars are not put in function
6388 scope. */
6389 || (in_ipa_mode
6390 && fndecl
6391 && ! auto_var_in_fn_p (vi->decl, fndecl)))
6392 pt->vars_contains_nonlocal = true;
6393
6394 /* If we have a variable that is interposable record that fact
6395 for pointer comparison simplification. */
6396 if (VAR_P (vi->decl)
6397 && (TREE_STATIC (vi->decl) || DECL_EXTERNAL (vi->decl))
6398 && ! decl_binds_to_current_def_p (vi->decl))
6399 pt->vars_contains_interposable = true;
6400 }
6401
6402 else if (TREE_CODE (vi->decl) == FUNCTION_DECL
6403 || TREE_CODE (vi->decl) == LABEL_DECL)
6404 {
6405 /* Nothing should read/write from/to code so we can
6406 save bits by not including them in the points-to bitmaps.
6407 Still mark the points-to set as containing global memory
6408 to make code-patching possible - see PR70128. */
6409 pt->vars_contains_nonlocal = true;
6410 }
6411 }
6412 }
6413
6414
6415 /* Compute the points-to solution *PT for the variable VI. */
6416
6417 static struct pt_solution
6418 find_what_var_points_to (tree fndecl, varinfo_t orig_vi)
6419 {
6420 unsigned int i;
6421 bitmap_iterator bi;
6422 bitmap finished_solution;
6423 bitmap result;
6424 varinfo_t vi;
6425 struct pt_solution *pt;
6426
6427 /* This variable may have been collapsed, let's get the real
6428 variable. */
6429 vi = get_varinfo (find (orig_vi->id));
6430
6431 /* See if we have already computed the solution and return it. */
6432 pt_solution **slot = &final_solutions->get_or_insert (vi);
6433 if (*slot != NULL)
6434 return **slot;
6435
6436 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6437 memset (pt, 0, sizeof (struct pt_solution));
6438
6439 /* Translate artificial variables into SSA_NAME_PTR_INFO
6440 attributes. */
6441 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6442 {
6443 varinfo_t vi = get_varinfo (i);
6444
6445 if (vi->is_artificial_var)
6446 {
6447 if (vi->id == nothing_id)
6448 pt->null = 1;
6449 else if (vi->id == escaped_id)
6450 {
6451 if (in_ipa_mode)
6452 pt->ipa_escaped = 1;
6453 else
6454 pt->escaped = 1;
6455 /* Expand some special vars of ESCAPED in-place here. */
6456 varinfo_t evi = get_varinfo (find (escaped_id));
6457 if (bitmap_bit_p (evi->solution, nonlocal_id))
6458 pt->nonlocal = 1;
6459 }
6460 else if (vi->id == nonlocal_id)
6461 pt->nonlocal = 1;
6462 else if (vi->is_heap_var)
6463 /* We represent heapvars in the points-to set properly. */
6464 ;
6465 else if (vi->id == string_id)
6466 /* Nobody cares - STRING_CSTs are read-only entities. */
6467 ;
6468 else if (vi->id == anything_id
6469 || vi->id == integer_id)
6470 pt->anything = 1;
6471 }
6472 }
6473
6474 /* Instead of doing extra work, simply do not create
6475 elaborate points-to information for pt_anything pointers. */
6476 if (pt->anything)
6477 return *pt;
6478
6479 /* Share the final set of variables when possible. */
6480 finished_solution = BITMAP_GGC_ALLOC ();
6481 stats.points_to_sets_created++;
6482
6483 set_uids_in_ptset (finished_solution, vi->solution, pt, fndecl);
6484 result = shared_bitmap_lookup (finished_solution);
6485 if (!result)
6486 {
6487 shared_bitmap_add (finished_solution);
6488 pt->vars = finished_solution;
6489 }
6490 else
6491 {
6492 pt->vars = result;
6493 bitmap_clear (finished_solution);
6494 }
6495
6496 return *pt;
6497 }
6498
6499 /* Given a pointer variable P, fill in its points-to set. */
6500
6501 static void
6502 find_what_p_points_to (tree fndecl, tree p)
6503 {
6504 struct ptr_info_def *pi;
6505 tree lookup_p = p;
6506 varinfo_t vi;
6507 bool nonnull = get_ptr_nonnull (p);
6508
6509 /* For parameters, get at the points-to set for the actual parm
6510 decl. */
6511 if (TREE_CODE (p) == SSA_NAME
6512 && SSA_NAME_IS_DEFAULT_DEF (p)
6513 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6514 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6515 lookup_p = SSA_NAME_VAR (p);
6516
6517 vi = lookup_vi_for_tree (lookup_p);
6518 if (!vi)
6519 return;
6520
6521 pi = get_ptr_info (p);
6522 pi->pt = find_what_var_points_to (fndecl, vi);
6523 /* Conservatively set to NULL from PTA (to true). */
6524 pi->pt.null = 1;
6525 /* Preserve pointer nonnull computed by VRP. See get_ptr_nonnull
6526 in gcc/tree-ssaname.c for more information. */
6527 if (nonnull)
6528 set_ptr_nonnull (p);
6529 }
6530
6531
6532 /* Query statistics for points-to solutions. */
6533
6534 static struct {
6535 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6536 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6537 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6538 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6539 } pta_stats;
6540
6541 void
6542 dump_pta_stats (FILE *s)
6543 {
6544 fprintf (s, "\nPTA query stats:\n");
6545 fprintf (s, " pt_solution_includes: "
6546 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6547 HOST_WIDE_INT_PRINT_DEC" queries\n",
6548 pta_stats.pt_solution_includes_no_alias,
6549 pta_stats.pt_solution_includes_no_alias
6550 + pta_stats.pt_solution_includes_may_alias);
6551 fprintf (s, " pt_solutions_intersect: "
6552 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6553 HOST_WIDE_INT_PRINT_DEC" queries\n",
6554 pta_stats.pt_solutions_intersect_no_alias,
6555 pta_stats.pt_solutions_intersect_no_alias
6556 + pta_stats.pt_solutions_intersect_may_alias);
6557 }
6558
6559
6560 /* Reset the points-to solution *PT to a conservative default
6561 (point to anything). */
6562
6563 void
6564 pt_solution_reset (struct pt_solution *pt)
6565 {
6566 memset (pt, 0, sizeof (struct pt_solution));
6567 pt->anything = true;
6568 pt->null = true;
6569 }
6570
6571 /* Set the points-to solution *PT to point only to the variables
6572 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6573 global variables and VARS_CONTAINS_RESTRICT specifies whether
6574 it contains restrict tag variables. */
6575
6576 void
6577 pt_solution_set (struct pt_solution *pt, bitmap vars,
6578 bool vars_contains_nonlocal)
6579 {
6580 memset (pt, 0, sizeof (struct pt_solution));
6581 pt->vars = vars;
6582 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6583 pt->vars_contains_escaped
6584 = (cfun->gimple_df->escaped.anything
6585 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6586 }
6587
6588 /* Set the points-to solution *PT to point only to the variable VAR. */
6589
6590 void
6591 pt_solution_set_var (struct pt_solution *pt, tree var)
6592 {
6593 memset (pt, 0, sizeof (struct pt_solution));
6594 pt->vars = BITMAP_GGC_ALLOC ();
6595 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6596 pt->vars_contains_nonlocal = is_global_var (var);
6597 pt->vars_contains_escaped
6598 = (cfun->gimple_df->escaped.anything
6599 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6600 }
6601
6602 /* Computes the union of the points-to solutions *DEST and *SRC and
6603 stores the result in *DEST. This changes the points-to bitmap
6604 of *DEST and thus may not be used if that might be shared.
6605 The points-to bitmap of *SRC and *DEST will not be shared after
6606 this function if they were not before. */
6607
6608 static void
6609 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6610 {
6611 dest->anything |= src->anything;
6612 if (dest->anything)
6613 {
6614 pt_solution_reset (dest);
6615 return;
6616 }
6617
6618 dest->nonlocal |= src->nonlocal;
6619 dest->escaped |= src->escaped;
6620 dest->ipa_escaped |= src->ipa_escaped;
6621 dest->null |= src->null;
6622 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6623 dest->vars_contains_escaped |= src->vars_contains_escaped;
6624 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6625 if (!src->vars)
6626 return;
6627
6628 if (!dest->vars)
6629 dest->vars = BITMAP_GGC_ALLOC ();
6630 bitmap_ior_into (dest->vars, src->vars);
6631 }
6632
6633 /* Return true if the points-to solution *PT is empty. */
6634
6635 bool
6636 pt_solution_empty_p (struct pt_solution *pt)
6637 {
6638 if (pt->anything
6639 || pt->nonlocal)
6640 return false;
6641
6642 if (pt->vars
6643 && !bitmap_empty_p (pt->vars))
6644 return false;
6645
6646 /* If the solution includes ESCAPED, check if that is empty. */
6647 if (pt->escaped
6648 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6649 return false;
6650
6651 /* If the solution includes ESCAPED, check if that is empty. */
6652 if (pt->ipa_escaped
6653 && !pt_solution_empty_p (&ipa_escaped_pt))
6654 return false;
6655
6656 return true;
6657 }
6658
6659 /* Return true if the points-to solution *PT only point to a single var, and
6660 return the var uid in *UID. */
6661
6662 bool
6663 pt_solution_singleton_or_null_p (struct pt_solution *pt, unsigned *uid)
6664 {
6665 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6666 || pt->vars == NULL
6667 || !bitmap_single_bit_set_p (pt->vars))
6668 return false;
6669
6670 *uid = bitmap_first_set_bit (pt->vars);
6671 return true;
6672 }
6673
6674 /* Return true if the points-to solution *PT includes global memory. */
6675
6676 bool
6677 pt_solution_includes_global (struct pt_solution *pt)
6678 {
6679 if (pt->anything
6680 || pt->nonlocal
6681 || pt->vars_contains_nonlocal
6682 /* The following is a hack to make the malloc escape hack work.
6683 In reality we'd need different sets for escaped-through-return
6684 and escaped-to-callees and passes would need to be updated. */
6685 || pt->vars_contains_escaped_heap)
6686 return true;
6687
6688 /* 'escaped' is also a placeholder so we have to look into it. */
6689 if (pt->escaped)
6690 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6691
6692 if (pt->ipa_escaped)
6693 return pt_solution_includes_global (&ipa_escaped_pt);
6694
6695 return false;
6696 }
6697
6698 /* Return true if the points-to solution *PT includes the variable
6699 declaration DECL. */
6700
6701 static bool
6702 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6703 {
6704 if (pt->anything)
6705 return true;
6706
6707 if (pt->nonlocal
6708 && is_global_var (decl))
6709 return true;
6710
6711 if (pt->vars
6712 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6713 return true;
6714
6715 /* If the solution includes ESCAPED, check it. */
6716 if (pt->escaped
6717 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6718 return true;
6719
6720 /* If the solution includes ESCAPED, check it. */
6721 if (pt->ipa_escaped
6722 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6723 return true;
6724
6725 return false;
6726 }
6727
6728 bool
6729 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6730 {
6731 bool res = pt_solution_includes_1 (pt, decl);
6732 if (res)
6733 ++pta_stats.pt_solution_includes_may_alias;
6734 else
6735 ++pta_stats.pt_solution_includes_no_alias;
6736 return res;
6737 }
6738
6739 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6740 intersection. */
6741
6742 static bool
6743 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6744 {
6745 if (pt1->anything || pt2->anything)
6746 return true;
6747
6748 /* If either points to unknown global memory and the other points to
6749 any global memory they alias. */
6750 if ((pt1->nonlocal
6751 && (pt2->nonlocal
6752 || pt2->vars_contains_nonlocal))
6753 || (pt2->nonlocal
6754 && pt1->vars_contains_nonlocal))
6755 return true;
6756
6757 /* If either points to all escaped memory and the other points to
6758 any escaped memory they alias. */
6759 if ((pt1->escaped
6760 && (pt2->escaped
6761 || pt2->vars_contains_escaped))
6762 || (pt2->escaped
6763 && pt1->vars_contains_escaped))
6764 return true;
6765
6766 /* Check the escaped solution if required.
6767 ??? Do we need to check the local against the IPA escaped sets? */
6768 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6769 && !pt_solution_empty_p (&ipa_escaped_pt))
6770 {
6771 /* If both point to escaped memory and that solution
6772 is not empty they alias. */
6773 if (pt1->ipa_escaped && pt2->ipa_escaped)
6774 return true;
6775
6776 /* If either points to escaped memory see if the escaped solution
6777 intersects with the other. */
6778 if ((pt1->ipa_escaped
6779 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6780 || (pt2->ipa_escaped
6781 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6782 return true;
6783 }
6784
6785 /* Now both pointers alias if their points-to solution intersects. */
6786 return (pt1->vars
6787 && pt2->vars
6788 && bitmap_intersect_p (pt1->vars, pt2->vars));
6789 }
6790
6791 bool
6792 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6793 {
6794 bool res = pt_solutions_intersect_1 (pt1, pt2);
6795 if (res)
6796 ++pta_stats.pt_solutions_intersect_may_alias;
6797 else
6798 ++pta_stats.pt_solutions_intersect_no_alias;
6799 return res;
6800 }
6801
6802
6803 /* Dump points-to information to OUTFILE. */
6804
6805 static void
6806 dump_sa_points_to_info (FILE *outfile)
6807 {
6808 unsigned int i;
6809
6810 fprintf (outfile, "\nPoints-to sets\n\n");
6811
6812 if (dump_flags & TDF_STATS)
6813 {
6814 fprintf (outfile, "Stats:\n");
6815 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6816 fprintf (outfile, "Non-pointer vars: %d\n",
6817 stats.nonpointer_vars);
6818 fprintf (outfile, "Statically unified vars: %d\n",
6819 stats.unified_vars_static);
6820 fprintf (outfile, "Dynamically unified vars: %d\n",
6821 stats.unified_vars_dynamic);
6822 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6823 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6824 fprintf (outfile, "Number of implicit edges: %d\n",
6825 stats.num_implicit_edges);
6826 }
6827
6828 for (i = 1; i < varmap.length (); i++)
6829 {
6830 varinfo_t vi = get_varinfo (i);
6831 if (!vi->may_have_pointers)
6832 continue;
6833 dump_solution_for_var (outfile, i);
6834 }
6835 }
6836
6837
6838 /* Debug points-to information to stderr. */
6839
6840 DEBUG_FUNCTION void
6841 debug_sa_points_to_info (void)
6842 {
6843 dump_sa_points_to_info (stderr);
6844 }
6845
6846
6847 /* Initialize the always-existing constraint variables for NULL
6848 ANYTHING, READONLY, and INTEGER */
6849
6850 static void
6851 init_base_vars (void)
6852 {
6853 struct constraint_expr lhs, rhs;
6854 varinfo_t var_anything;
6855 varinfo_t var_nothing;
6856 varinfo_t var_string;
6857 varinfo_t var_escaped;
6858 varinfo_t var_nonlocal;
6859 varinfo_t var_storedanything;
6860 varinfo_t var_integer;
6861
6862 /* Variable ID zero is reserved and should be NULL. */
6863 varmap.safe_push (NULL);
6864
6865 /* Create the NULL variable, used to represent that a variable points
6866 to NULL. */
6867 var_nothing = new_var_info (NULL_TREE, "NULL", false);
6868 gcc_assert (var_nothing->id == nothing_id);
6869 var_nothing->is_artificial_var = 1;
6870 var_nothing->offset = 0;
6871 var_nothing->size = ~0;
6872 var_nothing->fullsize = ~0;
6873 var_nothing->is_special_var = 1;
6874 var_nothing->may_have_pointers = 0;
6875 var_nothing->is_global_var = 0;
6876
6877 /* Create the ANYTHING variable, used to represent that a variable
6878 points to some unknown piece of memory. */
6879 var_anything = new_var_info (NULL_TREE, "ANYTHING", false);
6880 gcc_assert (var_anything->id == anything_id);
6881 var_anything->is_artificial_var = 1;
6882 var_anything->size = ~0;
6883 var_anything->offset = 0;
6884 var_anything->fullsize = ~0;
6885 var_anything->is_special_var = 1;
6886
6887 /* Anything points to anything. This makes deref constraints just
6888 work in the presence of linked list and other p = *p type loops,
6889 by saying that *ANYTHING = ANYTHING. */
6890 lhs.type = SCALAR;
6891 lhs.var = anything_id;
6892 lhs.offset = 0;
6893 rhs.type = ADDRESSOF;
6894 rhs.var = anything_id;
6895 rhs.offset = 0;
6896
6897 /* This specifically does not use process_constraint because
6898 process_constraint ignores all anything = anything constraints, since all
6899 but this one are redundant. */
6900 constraints.safe_push (new_constraint (lhs, rhs));
6901
6902 /* Create the STRING variable, used to represent that a variable
6903 points to a string literal. String literals don't contain
6904 pointers so STRING doesn't point to anything. */
6905 var_string = new_var_info (NULL_TREE, "STRING", false);
6906 gcc_assert (var_string->id == string_id);
6907 var_string->is_artificial_var = 1;
6908 var_string->offset = 0;
6909 var_string->size = ~0;
6910 var_string->fullsize = ~0;
6911 var_string->is_special_var = 1;
6912 var_string->may_have_pointers = 0;
6913
6914 /* Create the ESCAPED variable, used to represent the set of escaped
6915 memory. */
6916 var_escaped = new_var_info (NULL_TREE, "ESCAPED", false);
6917 gcc_assert (var_escaped->id == escaped_id);
6918 var_escaped->is_artificial_var = 1;
6919 var_escaped->offset = 0;
6920 var_escaped->size = ~0;
6921 var_escaped->fullsize = ~0;
6922 var_escaped->is_special_var = 0;
6923
6924 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6925 memory. */
6926 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL", false);
6927 gcc_assert (var_nonlocal->id == nonlocal_id);
6928 var_nonlocal->is_artificial_var = 1;
6929 var_nonlocal->offset = 0;
6930 var_nonlocal->size = ~0;
6931 var_nonlocal->fullsize = ~0;
6932 var_nonlocal->is_special_var = 1;
6933
6934 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6935 lhs.type = SCALAR;
6936 lhs.var = escaped_id;
6937 lhs.offset = 0;
6938 rhs.type = DEREF;
6939 rhs.var = escaped_id;
6940 rhs.offset = 0;
6941 process_constraint (new_constraint (lhs, rhs));
6942
6943 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6944 whole variable escapes. */
6945 lhs.type = SCALAR;
6946 lhs.var = escaped_id;
6947 lhs.offset = 0;
6948 rhs.type = SCALAR;
6949 rhs.var = escaped_id;
6950 rhs.offset = UNKNOWN_OFFSET;
6951 process_constraint (new_constraint (lhs, rhs));
6952
6953 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6954 everything pointed to by escaped points to what global memory can
6955 point to. */
6956 lhs.type = DEREF;
6957 lhs.var = escaped_id;
6958 lhs.offset = 0;
6959 rhs.type = SCALAR;
6960 rhs.var = nonlocal_id;
6961 rhs.offset = 0;
6962 process_constraint (new_constraint (lhs, rhs));
6963
6964 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6965 global memory may point to global memory and escaped memory. */
6966 lhs.type = SCALAR;
6967 lhs.var = nonlocal_id;
6968 lhs.offset = 0;
6969 rhs.type = ADDRESSOF;
6970 rhs.var = nonlocal_id;
6971 rhs.offset = 0;
6972 process_constraint (new_constraint (lhs, rhs));
6973 rhs.type = ADDRESSOF;
6974 rhs.var = escaped_id;
6975 rhs.offset = 0;
6976 process_constraint (new_constraint (lhs, rhs));
6977
6978 /* Create the STOREDANYTHING variable, used to represent the set of
6979 variables stored to *ANYTHING. */
6980 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING", false);
6981 gcc_assert (var_storedanything->id == storedanything_id);
6982 var_storedanything->is_artificial_var = 1;
6983 var_storedanything->offset = 0;
6984 var_storedanything->size = ~0;
6985 var_storedanything->fullsize = ~0;
6986 var_storedanything->is_special_var = 0;
6987
6988 /* Create the INTEGER variable, used to represent that a variable points
6989 to what an INTEGER "points to". */
6990 var_integer = new_var_info (NULL_TREE, "INTEGER", false);
6991 gcc_assert (var_integer->id == integer_id);
6992 var_integer->is_artificial_var = 1;
6993 var_integer->size = ~0;
6994 var_integer->fullsize = ~0;
6995 var_integer->offset = 0;
6996 var_integer->is_special_var = 1;
6997
6998 /* INTEGER = ANYTHING, because we don't know where a dereference of
6999 a random integer will point to. */
7000 lhs.type = SCALAR;
7001 lhs.var = integer_id;
7002 lhs.offset = 0;
7003 rhs.type = ADDRESSOF;
7004 rhs.var = anything_id;
7005 rhs.offset = 0;
7006 process_constraint (new_constraint (lhs, rhs));
7007 }
7008
7009 /* Initialize things necessary to perform PTA */
7010
7011 static void
7012 init_alias_vars (void)
7013 {
7014 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
7015
7016 bitmap_obstack_initialize (&pta_obstack);
7017 bitmap_obstack_initialize (&oldpta_obstack);
7018 bitmap_obstack_initialize (&predbitmap_obstack);
7019
7020 constraints.create (8);
7021 varmap.create (8);
7022 vi_for_tree = new hash_map<tree, varinfo_t>;
7023 call_stmt_vars = new hash_map<gimple *, varinfo_t>;
7024
7025 memset (&stats, 0, sizeof (stats));
7026 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
7027 init_base_vars ();
7028
7029 gcc_obstack_init (&fake_var_decl_obstack);
7030
7031 final_solutions = new hash_map<varinfo_t, pt_solution *>;
7032 gcc_obstack_init (&final_solutions_obstack);
7033 }
7034
7035 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
7036 predecessor edges. */
7037
7038 static void
7039 remove_preds_and_fake_succs (constraint_graph_t graph)
7040 {
7041 unsigned int i;
7042
7043 /* Clear the implicit ref and address nodes from the successor
7044 lists. */
7045 for (i = 1; i < FIRST_REF_NODE; i++)
7046 {
7047 if (graph->succs[i])
7048 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
7049 FIRST_REF_NODE * 2);
7050 }
7051
7052 /* Free the successor list for the non-ref nodes. */
7053 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
7054 {
7055 if (graph->succs[i])
7056 BITMAP_FREE (graph->succs[i]);
7057 }
7058
7059 /* Now reallocate the size of the successor list as, and blow away
7060 the predecessor bitmaps. */
7061 graph->size = varmap.length ();
7062 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
7063
7064 free (graph->implicit_preds);
7065 graph->implicit_preds = NULL;
7066 free (graph->preds);
7067 graph->preds = NULL;
7068 bitmap_obstack_release (&predbitmap_obstack);
7069 }
7070
7071 /* Solve the constraint set. */
7072
7073 static void
7074 solve_constraints (void)
7075 {
7076 struct scc_info *si;
7077
7078 if (dump_file)
7079 fprintf (dump_file,
7080 "\nCollapsing static cycles and doing variable "
7081 "substitution\n");
7082
7083 init_graph (varmap.length () * 2);
7084
7085 if (dump_file)
7086 fprintf (dump_file, "Building predecessor graph\n");
7087 build_pred_graph ();
7088
7089 if (dump_file)
7090 fprintf (dump_file, "Detecting pointer and location "
7091 "equivalences\n");
7092 si = perform_var_substitution (graph);
7093
7094 if (dump_file)
7095 fprintf (dump_file, "Rewriting constraints and unifying "
7096 "variables\n");
7097 rewrite_constraints (graph, si);
7098
7099 build_succ_graph ();
7100
7101 free_var_substitution_info (si);
7102
7103 /* Attach complex constraints to graph nodes. */
7104 move_complex_constraints (graph);
7105
7106 if (dump_file)
7107 fprintf (dump_file, "Uniting pointer but not location equivalent "
7108 "variables\n");
7109 unite_pointer_equivalences (graph);
7110
7111 if (dump_file)
7112 fprintf (dump_file, "Finding indirect cycles\n");
7113 find_indirect_cycles (graph);
7114
7115 /* Implicit nodes and predecessors are no longer necessary at this
7116 point. */
7117 remove_preds_and_fake_succs (graph);
7118
7119 if (dump_file && (dump_flags & TDF_GRAPH))
7120 {
7121 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
7122 "in dot format:\n");
7123 dump_constraint_graph (dump_file);
7124 fprintf (dump_file, "\n\n");
7125 }
7126
7127 if (dump_file)
7128 fprintf (dump_file, "Solving graph\n");
7129
7130 solve_graph (graph);
7131
7132 if (dump_file && (dump_flags & TDF_GRAPH))
7133 {
7134 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
7135 "in dot format:\n");
7136 dump_constraint_graph (dump_file);
7137 fprintf (dump_file, "\n\n");
7138 }
7139
7140 if (dump_file)
7141 dump_sa_points_to_info (dump_file);
7142 }
7143
7144 /* Create points-to sets for the current function. See the comments
7145 at the start of the file for an algorithmic overview. */
7146
7147 static void
7148 compute_points_to_sets (void)
7149 {
7150 basic_block bb;
7151 varinfo_t vi;
7152
7153 timevar_push (TV_TREE_PTA);
7154
7155 init_alias_vars ();
7156
7157 intra_create_variable_infos (cfun);
7158
7159 /* Now walk all statements and build the constraint set. */
7160 FOR_EACH_BB_FN (bb, cfun)
7161 {
7162 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7163 gsi_next (&gsi))
7164 {
7165 gphi *phi = gsi.phi ();
7166
7167 if (! virtual_operand_p (gimple_phi_result (phi)))
7168 find_func_aliases (cfun, phi);
7169 }
7170
7171 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7172 gsi_next (&gsi))
7173 {
7174 gimple *stmt = gsi_stmt (gsi);
7175
7176 find_func_aliases (cfun, stmt);
7177 }
7178 }
7179
7180 if (dump_file)
7181 {
7182 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
7183 dump_constraints (dump_file, 0);
7184 }
7185
7186 /* From the constraints compute the points-to sets. */
7187 solve_constraints ();
7188
7189 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7190 cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl,
7191 get_varinfo (escaped_id));
7192
7193 /* Make sure the ESCAPED solution (which is used as placeholder in
7194 other solutions) does not reference itself. This simplifies
7195 points-to solution queries. */
7196 cfun->gimple_df->escaped.escaped = 0;
7197
7198 /* Compute the points-to sets for pointer SSA_NAMEs. */
7199 unsigned i;
7200 tree ptr;
7201
7202 FOR_EACH_SSA_NAME (i, ptr, cfun)
7203 {
7204 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
7205 find_what_p_points_to (cfun->decl, ptr);
7206 }
7207
7208 /* Compute the call-used/clobbered sets. */
7209 FOR_EACH_BB_FN (bb, cfun)
7210 {
7211 gimple_stmt_iterator gsi;
7212
7213 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7214 {
7215 gcall *stmt;
7216 struct pt_solution *pt;
7217
7218 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7219 if (!stmt)
7220 continue;
7221
7222 pt = gimple_call_use_set (stmt);
7223 if (gimple_call_flags (stmt) & ECF_CONST)
7224 memset (pt, 0, sizeof (struct pt_solution));
7225 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7226 {
7227 *pt = find_what_var_points_to (cfun->decl, vi);
7228 /* Escaped (and thus nonlocal) variables are always
7229 implicitly used by calls. */
7230 /* ??? ESCAPED can be empty even though NONLOCAL
7231 always escaped. */
7232 pt->nonlocal = 1;
7233 pt->escaped = 1;
7234 }
7235 else
7236 {
7237 /* If there is nothing special about this call then
7238 we have made everything that is used also escape. */
7239 *pt = cfun->gimple_df->escaped;
7240 pt->nonlocal = 1;
7241 }
7242
7243 pt = gimple_call_clobber_set (stmt);
7244 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7245 memset (pt, 0, sizeof (struct pt_solution));
7246 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7247 {
7248 *pt = find_what_var_points_to (cfun->decl, vi);
7249 /* Escaped (and thus nonlocal) variables are always
7250 implicitly clobbered by calls. */
7251 /* ??? ESCAPED can be empty even though NONLOCAL
7252 always escaped. */
7253 pt->nonlocal = 1;
7254 pt->escaped = 1;
7255 }
7256 else
7257 {
7258 /* If there is nothing special about this call then
7259 we have made everything that is used also escape. */
7260 *pt = cfun->gimple_df->escaped;
7261 pt->nonlocal = 1;
7262 }
7263 }
7264 }
7265
7266 timevar_pop (TV_TREE_PTA);
7267 }
7268
7269
7270 /* Delete created points-to sets. */
7271
7272 static void
7273 delete_points_to_sets (void)
7274 {
7275 unsigned int i;
7276
7277 delete shared_bitmap_table;
7278 shared_bitmap_table = NULL;
7279 if (dump_file && (dump_flags & TDF_STATS))
7280 fprintf (dump_file, "Points to sets created:%d\n",
7281 stats.points_to_sets_created);
7282
7283 delete vi_for_tree;
7284 delete call_stmt_vars;
7285 bitmap_obstack_release (&pta_obstack);
7286 constraints.release ();
7287
7288 for (i = 0; i < graph->size; i++)
7289 graph->complex[i].release ();
7290 free (graph->complex);
7291
7292 free (graph->rep);
7293 free (graph->succs);
7294 free (graph->pe);
7295 free (graph->pe_rep);
7296 free (graph->indirect_cycles);
7297 free (graph);
7298
7299 varmap.release ();
7300 variable_info_pool.release ();
7301 constraint_pool.release ();
7302
7303 obstack_free (&fake_var_decl_obstack, NULL);
7304
7305 delete final_solutions;
7306 obstack_free (&final_solutions_obstack, NULL);
7307 }
7308
7309 struct vls_data
7310 {
7311 unsigned short clique;
7312 bitmap rvars;
7313 };
7314
7315 /* Mark "other" loads and stores as belonging to CLIQUE and with
7316 base zero. */
7317
7318 static bool
7319 visit_loadstore (gimple *, tree base, tree ref, void *data)
7320 {
7321 unsigned short clique = ((vls_data *) data)->clique;
7322 bitmap rvars = ((vls_data *) data)->rvars;
7323 if (TREE_CODE (base) == MEM_REF
7324 || TREE_CODE (base) == TARGET_MEM_REF)
7325 {
7326 tree ptr = TREE_OPERAND (base, 0);
7327 if (TREE_CODE (ptr) == SSA_NAME)
7328 {
7329 /* For parameters, get at the points-to set for the actual parm
7330 decl. */
7331 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7332 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7333 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7334 ptr = SSA_NAME_VAR (ptr);
7335
7336 /* We need to make sure 'ptr' doesn't include any of
7337 the restrict tags we added bases for in its points-to set. */
7338 varinfo_t vi = lookup_vi_for_tree (ptr);
7339 if (! vi)
7340 return false;
7341
7342 vi = get_varinfo (find (vi->id));
7343 if (bitmap_intersect_p (rvars, vi->solution))
7344 return false;
7345 }
7346
7347 /* Do not overwrite existing cliques (that includes clique, base
7348 pairs we just set). */
7349 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7350 {
7351 MR_DEPENDENCE_CLIQUE (base) = clique;
7352 MR_DEPENDENCE_BASE (base) = 0;
7353 }
7354 }
7355
7356 /* For plain decl accesses see whether they are accesses to globals
7357 and rewrite them to MEM_REFs with { clique, 0 }. */
7358 if (VAR_P (base)
7359 && is_global_var (base)
7360 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7361 ops callback. */
7362 && base != ref)
7363 {
7364 tree *basep = &ref;
7365 while (handled_component_p (*basep))
7366 basep = &TREE_OPERAND (*basep, 0);
7367 gcc_assert (VAR_P (*basep));
7368 tree ptr = build_fold_addr_expr (*basep);
7369 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7370 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7371 MR_DEPENDENCE_CLIQUE (*basep) = clique;
7372 MR_DEPENDENCE_BASE (*basep) = 0;
7373 }
7374
7375 return false;
7376 }
7377
7378 /* If REF is a MEM_REF then assign a clique, base pair to it, updating
7379 CLIQUE, *RESTRICT_VAR and LAST_RUID. Return whether dependence info
7380 was assigned to REF. */
7381
7382 static bool
7383 maybe_set_dependence_info (tree ref, tree ptr,
7384 unsigned short &clique, varinfo_t restrict_var,
7385 unsigned short &last_ruid)
7386 {
7387 while (handled_component_p (ref))
7388 ref = TREE_OPERAND (ref, 0);
7389 if ((TREE_CODE (ref) == MEM_REF
7390 || TREE_CODE (ref) == TARGET_MEM_REF)
7391 && TREE_OPERAND (ref, 0) == ptr)
7392 {
7393 /* Do not overwrite existing cliques. This avoids overwriting dependence
7394 info inlined from a function with restrict parameters inlined
7395 into a function with restrict parameters. This usually means we
7396 prefer to be precise in innermost loops. */
7397 if (MR_DEPENDENCE_CLIQUE (ref) == 0)
7398 {
7399 if (clique == 0)
7400 clique = ++cfun->last_clique;
7401 if (restrict_var->ruid == 0)
7402 restrict_var->ruid = ++last_ruid;
7403 MR_DEPENDENCE_CLIQUE (ref) = clique;
7404 MR_DEPENDENCE_BASE (ref) = restrict_var->ruid;
7405 return true;
7406 }
7407 }
7408 return false;
7409 }
7410
7411 /* Compute the set of independend memory references based on restrict
7412 tags and their conservative propagation to the points-to sets. */
7413
7414 static void
7415 compute_dependence_clique (void)
7416 {
7417 unsigned short clique = 0;
7418 unsigned short last_ruid = 0;
7419 bitmap rvars = BITMAP_ALLOC (NULL);
7420 for (unsigned i = 0; i < num_ssa_names; ++i)
7421 {
7422 tree ptr = ssa_name (i);
7423 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7424 continue;
7425
7426 /* Avoid all this when ptr is not dereferenced? */
7427 tree p = ptr;
7428 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7429 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7430 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7431 p = SSA_NAME_VAR (ptr);
7432 varinfo_t vi = lookup_vi_for_tree (p);
7433 if (!vi)
7434 continue;
7435 vi = get_varinfo (find (vi->id));
7436 bitmap_iterator bi;
7437 unsigned j;
7438 varinfo_t restrict_var = NULL;
7439 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7440 {
7441 varinfo_t oi = get_varinfo (j);
7442 if (oi->is_restrict_var)
7443 {
7444 if (restrict_var)
7445 {
7446 if (dump_file && (dump_flags & TDF_DETAILS))
7447 {
7448 fprintf (dump_file, "found restrict pointed-to "
7449 "for ");
7450 print_generic_expr (dump_file, ptr);
7451 fprintf (dump_file, " but not exclusively\n");
7452 }
7453 restrict_var = NULL;
7454 break;
7455 }
7456 restrict_var = oi;
7457 }
7458 /* NULL is the only other valid points-to entry. */
7459 else if (oi->id != nothing_id)
7460 {
7461 restrict_var = NULL;
7462 break;
7463 }
7464 }
7465 /* Ok, found that ptr must(!) point to a single(!) restrict
7466 variable. */
7467 /* ??? PTA isn't really a proper propagation engine to compute
7468 this property.
7469 ??? We could handle merging of two restricts by unifying them. */
7470 if (restrict_var)
7471 {
7472 /* Now look at possible dereferences of ptr. */
7473 imm_use_iterator ui;
7474 gimple *use_stmt;
7475 bool used = false;
7476 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7477 {
7478 /* ??? Calls and asms. */
7479 if (!gimple_assign_single_p (use_stmt))
7480 continue;
7481 used |= maybe_set_dependence_info (gimple_assign_lhs (use_stmt),
7482 ptr, clique, restrict_var,
7483 last_ruid);
7484 used |= maybe_set_dependence_info (gimple_assign_rhs1 (use_stmt),
7485 ptr, clique, restrict_var,
7486 last_ruid);
7487 }
7488 if (used)
7489 bitmap_set_bit (rvars, restrict_var->id);
7490 }
7491 }
7492
7493 if (clique != 0)
7494 {
7495 /* Assign the BASE id zero to all accesses not based on a restrict
7496 pointer. That way they get disambiguated against restrict
7497 accesses but not against each other. */
7498 /* ??? For restricts derived from globals (thus not incoming
7499 parameters) we can't restrict scoping properly thus the following
7500 is too aggressive there. For now we have excluded those globals from
7501 getting into the MR_DEPENDENCE machinery. */
7502 vls_data data = { clique, rvars };
7503 basic_block bb;
7504 FOR_EACH_BB_FN (bb, cfun)
7505 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7506 !gsi_end_p (gsi); gsi_next (&gsi))
7507 {
7508 gimple *stmt = gsi_stmt (gsi);
7509 walk_stmt_load_store_ops (stmt, &data,
7510 visit_loadstore, visit_loadstore);
7511 }
7512 }
7513
7514 BITMAP_FREE (rvars);
7515 }
7516
7517 /* Compute points-to information for every SSA_NAME pointer in the
7518 current function and compute the transitive closure of escaped
7519 variables to re-initialize the call-clobber states of local variables. */
7520
7521 unsigned int
7522 compute_may_aliases (void)
7523 {
7524 if (cfun->gimple_df->ipa_pta)
7525 {
7526 if (dump_file)
7527 {
7528 fprintf (dump_file, "\nNot re-computing points-to information "
7529 "because IPA points-to information is available.\n\n");
7530
7531 /* But still dump what we have remaining it. */
7532 dump_alias_info (dump_file);
7533 }
7534
7535 return 0;
7536 }
7537
7538 /* For each pointer P_i, determine the sets of variables that P_i may
7539 point-to. Compute the reachability set of escaped and call-used
7540 variables. */
7541 compute_points_to_sets ();
7542
7543 /* Debugging dumps. */
7544 if (dump_file)
7545 dump_alias_info (dump_file);
7546
7547 /* Compute restrict-based memory disambiguations. */
7548 compute_dependence_clique ();
7549
7550 /* Deallocate memory used by aliasing data structures and the internal
7551 points-to solution. */
7552 delete_points_to_sets ();
7553
7554 gcc_assert (!need_ssa_update_p (cfun));
7555
7556 return 0;
7557 }
7558
7559 /* A dummy pass to cause points-to information to be computed via
7560 TODO_rebuild_alias. */
7561
7562 namespace {
7563
7564 const pass_data pass_data_build_alias =
7565 {
7566 GIMPLE_PASS, /* type */
7567 "alias", /* name */
7568 OPTGROUP_NONE, /* optinfo_flags */
7569 TV_NONE, /* tv_id */
7570 ( PROP_cfg | PROP_ssa ), /* properties_required */
7571 0, /* properties_provided */
7572 0, /* properties_destroyed */
7573 0, /* todo_flags_start */
7574 TODO_rebuild_alias, /* todo_flags_finish */
7575 };
7576
7577 class pass_build_alias : public gimple_opt_pass
7578 {
7579 public:
7580 pass_build_alias (gcc::context *ctxt)
7581 : gimple_opt_pass (pass_data_build_alias, ctxt)
7582 {}
7583
7584 /* opt_pass methods: */
7585 virtual bool gate (function *) { return flag_tree_pta; }
7586
7587 }; // class pass_build_alias
7588
7589 } // anon namespace
7590
7591 gimple_opt_pass *
7592 make_pass_build_alias (gcc::context *ctxt)
7593 {
7594 return new pass_build_alias (ctxt);
7595 }
7596
7597 /* A dummy pass to cause points-to information to be computed via
7598 TODO_rebuild_alias. */
7599
7600 namespace {
7601
7602 const pass_data pass_data_build_ealias =
7603 {
7604 GIMPLE_PASS, /* type */
7605 "ealias", /* name */
7606 OPTGROUP_NONE, /* optinfo_flags */
7607 TV_NONE, /* tv_id */
7608 ( PROP_cfg | PROP_ssa ), /* properties_required */
7609 0, /* properties_provided */
7610 0, /* properties_destroyed */
7611 0, /* todo_flags_start */
7612 TODO_rebuild_alias, /* todo_flags_finish */
7613 };
7614
7615 class pass_build_ealias : public gimple_opt_pass
7616 {
7617 public:
7618 pass_build_ealias (gcc::context *ctxt)
7619 : gimple_opt_pass (pass_data_build_ealias, ctxt)
7620 {}
7621
7622 /* opt_pass methods: */
7623 virtual bool gate (function *) { return flag_tree_pta; }
7624
7625 }; // class pass_build_ealias
7626
7627 } // anon namespace
7628
7629 gimple_opt_pass *
7630 make_pass_build_ealias (gcc::context *ctxt)
7631 {
7632 return new pass_build_ealias (ctxt);
7633 }
7634
7635
7636 /* IPA PTA solutions for ESCAPED. */
7637 struct pt_solution ipa_escaped_pt
7638 = { true, false, false, false, false,
7639 false, false, false, false, false, NULL };
7640
7641 /* Associate node with varinfo DATA. Worker for
7642 cgraph_for_symbol_thunks_and_aliases. */
7643 static bool
7644 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7645 {
7646 if ((node->alias
7647 || (node->thunk.thunk_p
7648 && ! node->global.inlined_to))
7649 && node->analyzed)
7650 insert_vi_for_tree (node->decl, (varinfo_t)data);
7651 return false;
7652 }
7653
7654 /* Dump varinfo VI to FILE. */
7655
7656 static void
7657 dump_varinfo (FILE *file, varinfo_t vi)
7658 {
7659 if (vi == NULL)
7660 return;
7661
7662 fprintf (file, "%u: %s\n", vi->id, vi->name);
7663
7664 const char *sep = " ";
7665 if (vi->is_artificial_var)
7666 fprintf (file, "%sartificial", sep);
7667 if (vi->is_special_var)
7668 fprintf (file, "%sspecial", sep);
7669 if (vi->is_unknown_size_var)
7670 fprintf (file, "%sunknown-size", sep);
7671 if (vi->is_full_var)
7672 fprintf (file, "%sfull", sep);
7673 if (vi->is_heap_var)
7674 fprintf (file, "%sheap", sep);
7675 if (vi->may_have_pointers)
7676 fprintf (file, "%smay-have-pointers", sep);
7677 if (vi->only_restrict_pointers)
7678 fprintf (file, "%sonly-restrict-pointers", sep);
7679 if (vi->is_restrict_var)
7680 fprintf (file, "%sis-restrict-var", sep);
7681 if (vi->is_global_var)
7682 fprintf (file, "%sglobal", sep);
7683 if (vi->is_ipa_escape_point)
7684 fprintf (file, "%sipa-escape-point", sep);
7685 if (vi->is_fn_info)
7686 fprintf (file, "%sfn-info", sep);
7687 if (vi->ruid)
7688 fprintf (file, "%srestrict-uid:%u", sep, vi->ruid);
7689 if (vi->next)
7690 fprintf (file, "%snext:%u", sep, vi->next);
7691 if (vi->head != vi->id)
7692 fprintf (file, "%shead:%u", sep, vi->head);
7693 if (vi->offset)
7694 fprintf (file, "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset);
7695 if (vi->size != ~(unsigned HOST_WIDE_INT)0)
7696 fprintf (file, "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size);
7697 if (vi->fullsize != ~(unsigned HOST_WIDE_INT)0
7698 && vi->fullsize != vi->size)
7699 fprintf (file, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep,
7700 vi->fullsize);
7701 fprintf (file, "\n");
7702
7703 if (vi->solution && !bitmap_empty_p (vi->solution))
7704 {
7705 bitmap_iterator bi;
7706 unsigned i;
7707 fprintf (file, " solution: {");
7708 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
7709 fprintf (file, " %u", i);
7710 fprintf (file, " }\n");
7711 }
7712
7713 if (vi->oldsolution && !bitmap_empty_p (vi->oldsolution)
7714 && !bitmap_equal_p (vi->solution, vi->oldsolution))
7715 {
7716 bitmap_iterator bi;
7717 unsigned i;
7718 fprintf (file, " oldsolution: {");
7719 EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi)
7720 fprintf (file, " %u", i);
7721 fprintf (file, " }\n");
7722 }
7723 }
7724
7725 /* Dump varinfo VI to stderr. */
7726
7727 DEBUG_FUNCTION void
7728 debug_varinfo (varinfo_t vi)
7729 {
7730 dump_varinfo (stderr, vi);
7731 }
7732
7733 /* Dump varmap to FILE. */
7734
7735 static void
7736 dump_varmap (FILE *file)
7737 {
7738 if (varmap.length () == 0)
7739 return;
7740
7741 fprintf (file, "variables:\n");
7742
7743 for (unsigned int i = 0; i < varmap.length (); ++i)
7744 {
7745 varinfo_t vi = get_varinfo (i);
7746 dump_varinfo (file, vi);
7747 }
7748
7749 fprintf (file, "\n");
7750 }
7751
7752 /* Dump varmap to stderr. */
7753
7754 DEBUG_FUNCTION void
7755 debug_varmap (void)
7756 {
7757 dump_varmap (stderr);
7758 }
7759
7760 /* Compute whether node is refered to non-locally. Worker for
7761 cgraph_for_symbol_thunks_and_aliases. */
7762 static bool
7763 refered_from_nonlocal_fn (struct cgraph_node *node, void *data)
7764 {
7765 bool *nonlocal_p = (bool *)data;
7766 *nonlocal_p |= (node->used_from_other_partition
7767 || node->externally_visible
7768 || node->force_output
7769 || lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl)));
7770 return false;
7771 }
7772
7773 /* Same for varpool nodes. */
7774 static bool
7775 refered_from_nonlocal_var (struct varpool_node *node, void *data)
7776 {
7777 bool *nonlocal_p = (bool *)data;
7778 *nonlocal_p |= (node->used_from_other_partition
7779 || node->externally_visible
7780 || node->force_output);
7781 return false;
7782 }
7783
7784 /* Execute the driver for IPA PTA. */
7785 static unsigned int
7786 ipa_pta_execute (void)
7787 {
7788 struct cgraph_node *node;
7789 varpool_node *var;
7790 unsigned int from = 0;
7791
7792 in_ipa_mode = 1;
7793
7794 init_alias_vars ();
7795
7796 if (dump_file && (dump_flags & TDF_DETAILS))
7797 {
7798 symtab->dump (dump_file);
7799 fprintf (dump_file, "\n");
7800 }
7801
7802 if (dump_file)
7803 {
7804 fprintf (dump_file, "Generating generic constraints\n\n");
7805 dump_constraints (dump_file, from);
7806 fprintf (dump_file, "\n");
7807 from = constraints.length ();
7808 }
7809
7810 /* Build the constraints. */
7811 FOR_EACH_DEFINED_FUNCTION (node)
7812 {
7813 varinfo_t vi;
7814 /* Nodes without a body are not interesting. Especially do not
7815 visit clones at this point for now - we get duplicate decls
7816 there for inline clones at least. */
7817 if (!node->has_gimple_body_p () || node->global.inlined_to)
7818 continue;
7819 node->get_body ();
7820
7821 gcc_assert (!node->clone_of);
7822
7823 /* For externally visible or attribute used annotated functions use
7824 local constraints for their arguments.
7825 For local functions we see all callers and thus do not need initial
7826 constraints for parameters. */
7827 bool nonlocal_p = (node->used_from_other_partition
7828 || node->externally_visible
7829 || node->force_output
7830 || lookup_attribute ("noipa",
7831 DECL_ATTRIBUTES (node->decl)));
7832 node->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn,
7833 &nonlocal_p, true);
7834
7835 vi = create_function_info_for (node->decl,
7836 alias_get_name (node->decl), false,
7837 nonlocal_p);
7838 if (dump_file
7839 && from != constraints.length ())
7840 {
7841 fprintf (dump_file,
7842 "Generating intial constraints for %s", node->name ());
7843 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7844 fprintf (dump_file, " (%s)",
7845 IDENTIFIER_POINTER
7846 (DECL_ASSEMBLER_NAME (node->decl)));
7847 fprintf (dump_file, "\n\n");
7848 dump_constraints (dump_file, from);
7849 fprintf (dump_file, "\n");
7850
7851 from = constraints.length ();
7852 }
7853
7854 node->call_for_symbol_thunks_and_aliases
7855 (associate_varinfo_to_alias, vi, true);
7856 }
7857
7858 /* Create constraints for global variables and their initializers. */
7859 FOR_EACH_VARIABLE (var)
7860 {
7861 if (var->alias && var->analyzed)
7862 continue;
7863
7864 varinfo_t vi = get_vi_for_tree (var->decl);
7865
7866 /* For the purpose of IPA PTA unit-local globals are not
7867 escape points. */
7868 bool nonlocal_p = (var->used_from_other_partition
7869 || var->externally_visible
7870 || var->force_output);
7871 var->call_for_symbol_and_aliases (refered_from_nonlocal_var,
7872 &nonlocal_p, true);
7873 if (nonlocal_p)
7874 vi->is_ipa_escape_point = true;
7875 }
7876
7877 if (dump_file
7878 && from != constraints.length ())
7879 {
7880 fprintf (dump_file,
7881 "Generating constraints for global initializers\n\n");
7882 dump_constraints (dump_file, from);
7883 fprintf (dump_file, "\n");
7884 from = constraints.length ();
7885 }
7886
7887 FOR_EACH_DEFINED_FUNCTION (node)
7888 {
7889 struct function *func;
7890 basic_block bb;
7891
7892 /* Nodes without a body are not interesting. */
7893 if (!node->has_gimple_body_p () || node->clone_of)
7894 continue;
7895
7896 if (dump_file)
7897 {
7898 fprintf (dump_file,
7899 "Generating constraints for %s", node->name ());
7900 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7901 fprintf (dump_file, " (%s)",
7902 IDENTIFIER_POINTER
7903 (DECL_ASSEMBLER_NAME (node->decl)));
7904 fprintf (dump_file, "\n");
7905 }
7906
7907 func = DECL_STRUCT_FUNCTION (node->decl);
7908 gcc_assert (cfun == NULL);
7909
7910 /* Build constriants for the function body. */
7911 FOR_EACH_BB_FN (bb, func)
7912 {
7913 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7914 gsi_next (&gsi))
7915 {
7916 gphi *phi = gsi.phi ();
7917
7918 if (! virtual_operand_p (gimple_phi_result (phi)))
7919 find_func_aliases (func, phi);
7920 }
7921
7922 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7923 gsi_next (&gsi))
7924 {
7925 gimple *stmt = gsi_stmt (gsi);
7926
7927 find_func_aliases (func, stmt);
7928 find_func_clobbers (func, stmt);
7929 }
7930 }
7931
7932 if (dump_file)
7933 {
7934 fprintf (dump_file, "\n");
7935 dump_constraints (dump_file, from);
7936 fprintf (dump_file, "\n");
7937 from = constraints.length ();
7938 }
7939 }
7940
7941 /* From the constraints compute the points-to sets. */
7942 solve_constraints ();
7943
7944 /* Compute the global points-to sets for ESCAPED.
7945 ??? Note that the computed escape set is not correct
7946 for the whole unit as we fail to consider graph edges to
7947 externally visible functions. */
7948 ipa_escaped_pt = find_what_var_points_to (NULL, get_varinfo (escaped_id));
7949
7950 /* Make sure the ESCAPED solution (which is used as placeholder in
7951 other solutions) does not reference itself. This simplifies
7952 points-to solution queries. */
7953 ipa_escaped_pt.ipa_escaped = 0;
7954
7955 /* Assign the points-to sets to the SSA names in the unit. */
7956 FOR_EACH_DEFINED_FUNCTION (node)
7957 {
7958 tree ptr;
7959 struct function *fn;
7960 unsigned i;
7961 basic_block bb;
7962
7963 /* Nodes without a body are not interesting. */
7964 if (!node->has_gimple_body_p () || node->clone_of)
7965 continue;
7966
7967 fn = DECL_STRUCT_FUNCTION (node->decl);
7968
7969 /* Compute the points-to sets for pointer SSA_NAMEs. */
7970 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7971 {
7972 if (ptr
7973 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7974 find_what_p_points_to (node->decl, ptr);
7975 }
7976
7977 /* Compute the call-use and call-clobber sets for indirect calls
7978 and calls to external functions. */
7979 FOR_EACH_BB_FN (bb, fn)
7980 {
7981 gimple_stmt_iterator gsi;
7982
7983 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7984 {
7985 gcall *stmt;
7986 struct pt_solution *pt;
7987 varinfo_t vi, fi;
7988 tree decl;
7989
7990 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7991 if (!stmt)
7992 continue;
7993
7994 /* Handle direct calls to functions with body. */
7995 decl = gimple_call_fndecl (stmt);
7996
7997 {
7998 tree called_decl = NULL_TREE;
7999 if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL))
8000 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
8001 else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL))
8002 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
8003
8004 if (called_decl != NULL_TREE
8005 && !fndecl_maybe_in_other_partition (called_decl))
8006 decl = called_decl;
8007 }
8008
8009 if (decl
8010 && (fi = lookup_vi_for_tree (decl))
8011 && fi->is_fn_info)
8012 {
8013 *gimple_call_clobber_set (stmt)
8014 = find_what_var_points_to
8015 (node->decl, first_vi_for_offset (fi, fi_clobbers));
8016 *gimple_call_use_set (stmt)
8017 = find_what_var_points_to
8018 (node->decl, first_vi_for_offset (fi, fi_uses));
8019 }
8020 /* Handle direct calls to external functions. */
8021 else if (decl)
8022 {
8023 pt = gimple_call_use_set (stmt);
8024 if (gimple_call_flags (stmt) & ECF_CONST)
8025 memset (pt, 0, sizeof (struct pt_solution));
8026 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
8027 {
8028 *pt = find_what_var_points_to (node->decl, vi);
8029 /* Escaped (and thus nonlocal) variables are always
8030 implicitly used by calls. */
8031 /* ??? ESCAPED can be empty even though NONLOCAL
8032 always escaped. */
8033 pt->nonlocal = 1;
8034 pt->ipa_escaped = 1;
8035 }
8036 else
8037 {
8038 /* If there is nothing special about this call then
8039 we have made everything that is used also escape. */
8040 *pt = ipa_escaped_pt;
8041 pt->nonlocal = 1;
8042 }
8043
8044 pt = gimple_call_clobber_set (stmt);
8045 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
8046 memset (pt, 0, sizeof (struct pt_solution));
8047 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
8048 {
8049 *pt = find_what_var_points_to (node->decl, vi);
8050 /* Escaped (and thus nonlocal) variables are always
8051 implicitly clobbered by calls. */
8052 /* ??? ESCAPED can be empty even though NONLOCAL
8053 always escaped. */
8054 pt->nonlocal = 1;
8055 pt->ipa_escaped = 1;
8056 }
8057 else
8058 {
8059 /* If there is nothing special about this call then
8060 we have made everything that is used also escape. */
8061 *pt = ipa_escaped_pt;
8062 pt->nonlocal = 1;
8063 }
8064 }
8065 /* Handle indirect calls. */
8066 else if (!decl
8067 && (fi = get_fi_for_callee (stmt)))
8068 {
8069 /* We need to accumulate all clobbers/uses of all possible
8070 callees. */
8071 fi = get_varinfo (find (fi->id));
8072 /* If we cannot constrain the set of functions we'll end up
8073 calling we end up using/clobbering everything. */
8074 if (bitmap_bit_p (fi->solution, anything_id)
8075 || bitmap_bit_p (fi->solution, nonlocal_id)
8076 || bitmap_bit_p (fi->solution, escaped_id))
8077 {
8078 pt_solution_reset (gimple_call_clobber_set (stmt));
8079 pt_solution_reset (gimple_call_use_set (stmt));
8080 }
8081 else
8082 {
8083 bitmap_iterator bi;
8084 unsigned i;
8085 struct pt_solution *uses, *clobbers;
8086
8087 uses = gimple_call_use_set (stmt);
8088 clobbers = gimple_call_clobber_set (stmt);
8089 memset (uses, 0, sizeof (struct pt_solution));
8090 memset (clobbers, 0, sizeof (struct pt_solution));
8091 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
8092 {
8093 struct pt_solution sol;
8094
8095 vi = get_varinfo (i);
8096 if (!vi->is_fn_info)
8097 {
8098 /* ??? We could be more precise here? */
8099 uses->nonlocal = 1;
8100 uses->ipa_escaped = 1;
8101 clobbers->nonlocal = 1;
8102 clobbers->ipa_escaped = 1;
8103 continue;
8104 }
8105
8106 if (!uses->anything)
8107 {
8108 sol = find_what_var_points_to
8109 (node->decl,
8110 first_vi_for_offset (vi, fi_uses));
8111 pt_solution_ior_into (uses, &sol);
8112 }
8113 if (!clobbers->anything)
8114 {
8115 sol = find_what_var_points_to
8116 (node->decl,
8117 first_vi_for_offset (vi, fi_clobbers));
8118 pt_solution_ior_into (clobbers, &sol);
8119 }
8120 }
8121 }
8122 }
8123 }
8124 }
8125
8126 fn->gimple_df->ipa_pta = true;
8127
8128 /* We have to re-set the final-solution cache after each function
8129 because what is a "global" is dependent on function context. */
8130 final_solutions->empty ();
8131 obstack_free (&final_solutions_obstack, NULL);
8132 gcc_obstack_init (&final_solutions_obstack);
8133 }
8134
8135 delete_points_to_sets ();
8136
8137 in_ipa_mode = 0;
8138
8139 return 0;
8140 }
8141
8142 namespace {
8143
8144 const pass_data pass_data_ipa_pta =
8145 {
8146 SIMPLE_IPA_PASS, /* type */
8147 "pta", /* name */
8148 OPTGROUP_NONE, /* optinfo_flags */
8149 TV_IPA_PTA, /* tv_id */
8150 0, /* properties_required */
8151 0, /* properties_provided */
8152 0, /* properties_destroyed */
8153 0, /* todo_flags_start */
8154 0, /* todo_flags_finish */
8155 };
8156
8157 class pass_ipa_pta : public simple_ipa_opt_pass
8158 {
8159 public:
8160 pass_ipa_pta (gcc::context *ctxt)
8161 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
8162 {}
8163
8164 /* opt_pass methods: */
8165 virtual bool gate (function *)
8166 {
8167 return (optimize
8168 && flag_ipa_pta
8169 /* Don't bother doing anything if the program has errors. */
8170 && !seen_error ());
8171 }
8172
8173 opt_pass * clone () { return new pass_ipa_pta (m_ctxt); }
8174
8175 virtual unsigned int execute (function *) { return ipa_pta_execute (); }
8176
8177 }; // class pass_ipa_pta
8178
8179 } // anon namespace
8180
8181 simple_ipa_opt_pass *
8182 make_pass_ipa_pta (gcc::context *ctxt)
8183 {
8184 return new pass_ipa_pta (ctxt);
8185 }