tree-ssa-alias.c (compute_flow_insensitive_aliasing): If type memory tag is call...
[gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "timevar.h"
32 #include "expr.h"
33 #include "ggc.h"
34 #include "langhooks.h"
35 #include "flags.h"
36 #include "function.h"
37 #include "diagnostic.h"
38 #include "tree-dump.h"
39 #include "tree-gimple.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-alias-common.h"
43 #include "tree-pass.h"
44 #include "convert.h"
45 #include "params.h"
46
47
48 /* Structure to map a variable to its alias set and keep track of the
49 virtual operands that will be needed to represent it. */
50 struct alias_map_d
51 {
52 /* Variable and its alias set. */
53 tree var;
54 HOST_WIDE_INT set;
55
56 /* Total number of virtual operands that will be needed to represent
57 all the aliases of VAR. */
58 long total_alias_vops;
59
60 /* Nonzero if the aliases for this memory tag have been grouped
61 already. Used in group_aliases. */
62 unsigned int grouped_p : 1;
63
64 /* Set of variables aliased with VAR. This is the exact same
65 information contained in VAR_ANN (VAR)->MAY_ALIASES, but in
66 bitmap form to speed up alias grouping. */
67 sbitmap may_aliases;
68 };
69
70
71 /* Alias information used by compute_may_aliases and its helpers. */
72 struct alias_info
73 {
74 /* SSA names visited while collecting points-to information. If bit I
75 is set, it means that SSA variable with version I has already been
76 visited. */
77 bitmap ssa_names_visited;
78
79 /* Array of SSA_NAME pointers processed by the points-to collector. */
80 varray_type processed_ptrs;
81
82 /* Variables whose address is still needed. */
83 bitmap addresses_needed;
84
85 /* ADDRESSABLE_VARS contains all the global variables and locals that
86 have had their address taken. */
87 struct alias_map_d **addressable_vars;
88 size_t num_addressable_vars;
89
90 /* POINTERS contains all the _DECL pointers with unique memory tags
91 that have been referenced in the program. */
92 struct alias_map_d **pointers;
93 size_t num_pointers;
94
95 /* Number of function calls found in the program. */
96 size_t num_calls_found;
97
98 /* Array of counters to keep track of how many times each pointer has
99 been dereferenced in the program. This is used by the alias grouping
100 heuristic in compute_flow_insensitive_aliasing. */
101 varray_type num_references;
102
103 /* Total number of virtual operands that will be needed to represent
104 all the aliases of all the pointers found in the program. */
105 long total_alias_vops;
106
107 /* Variables that have been written to. */
108 bitmap written_vars;
109
110 /* Pointers that have been used in an indirect store operation. */
111 bitmap dereferenced_ptrs_store;
112
113 /* Pointers that have been used in an indirect load operation. */
114 bitmap dereferenced_ptrs_load;
115 };
116
117
118 /* Counters used to display statistics on alias analysis. */
119 struct alias_stats_d
120 {
121 unsigned int alias_queries;
122 unsigned int alias_mayalias;
123 unsigned int alias_noalias;
124 unsigned int simple_queries;
125 unsigned int simple_resolved;
126 unsigned int tbaa_queries;
127 unsigned int tbaa_resolved;
128 unsigned int pta_queries;
129 unsigned int pta_resolved;
130 };
131
132
133 /* Local variables. */
134 static struct alias_stats_d alias_stats;
135
136 /* Local functions. */
137 static void compute_flow_insensitive_aliasing (struct alias_info *);
138 static void dump_alias_stats (FILE *);
139 static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT);
140 static tree create_memory_tag (tree type, bool is_type_tag);
141 static tree get_tmt_for (tree, struct alias_info *);
142 static tree get_nmt_for (tree);
143 static void add_may_alias (tree, tree);
144 static void replace_may_alias (tree, size_t, tree);
145 static struct alias_info *init_alias_info (void);
146 static void delete_alias_info (struct alias_info *);
147 static void compute_points_to_and_addr_escape (struct alias_info *);
148 static void compute_flow_sensitive_aliasing (struct alias_info *);
149 static void setup_pointers_and_addressables (struct alias_info *);
150 static bool collect_points_to_info_r (tree, tree, void *);
151 static bool is_escape_site (tree, size_t *);
152 static void add_pointed_to_var (struct alias_info *, tree, tree);
153 static void add_pointed_to_expr (tree, tree);
154 static void create_global_var (void);
155 static void collect_points_to_info_for (struct alias_info *, tree);
156 static bool ptr_is_dereferenced_by (tree, tree, bool *);
157 static void maybe_create_global_var (struct alias_info *ai);
158 static void group_aliases (struct alias_info *);
159 static struct ptr_info_def *get_ptr_info (tree t);
160 static void set_pt_anything (tree ptr);
161 static void set_pt_malloc (tree ptr);
162
163 /* Global declarations. */
164
165 /* Call clobbered variables in the function. If bit I is set, then
166 REFERENCED_VARS (I) is call-clobbered. */
167 bitmap call_clobbered_vars;
168
169 /* Addressable variables in the function. If bit I is set, then
170 REFERENCED_VARS (I) has had its address taken. Note that
171 CALL_CLOBBERED_VARS and ADDRESSABLE_VARS are not related. An
172 addressable variable is not necessarily call-clobbered (e.g., a
173 local addressable whose address does not escape) and not all
174 call-clobbered variables are addressable (e.g., a local static
175 variable). */
176 bitmap addressable_vars;
177
178 /* When the program has too many call-clobbered variables and call-sites,
179 this variable is used to represent the clobbering effects of function
180 calls. In these cases, all the call clobbered variables in the program
181 are forced to alias this variable. This reduces compile times by not
182 having to keep track of too many V_MAY_DEF expressions at call sites. */
183 tree global_var;
184
185
186 /* Compute may-alias information for every variable referenced in function
187 FNDECL.
188
189 Alias analysis proceeds in 3 main phases:
190
191 1- Points-to and escape analysis.
192
193 This phase walks the use-def chains in the SSA web looking for three
194 things:
195
196 * Assignments of the form P_i = &VAR
197 * Assignments of the form P_i = malloc()
198 * Pointers and ADDR_EXPR that escape the current function.
199
200 The concept of 'escaping' is the same one used in the Java world. When
201 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
202 outside of the current function. So, assignment to global variables,
203 function arguments and returning a pointer are all escape sites.
204
205 This is where we are currently limited. Since not everything is renamed
206 into SSA, we lose track of escape properties when a pointer is stashed
207 inside a field in a structure, for instance. In those cases, we are
208 assuming that the pointer does escape.
209
210 We use escape analysis to determine whether a variable is
211 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
212 is call-clobbered. If a pointer P_i escapes, then all the variables
213 pointed-to by P_i (and its memory tag) also escape.
214
215 2- Compute flow-sensitive aliases
216
217 We have two classes of memory tags. Memory tags associated with the
218 pointed-to data type of the pointers in the program. These tags are
219 called "type memory tag" (TMT). The other class are those associated
220 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
221 when adding operands for an INDIRECT_REF *P_i, we will first check
222 whether P_i has a name tag, if it does we use it, because that will have
223 more precise aliasing information. Otherwise, we use the standard type
224 tag.
225
226 In this phase, we go through all the pointers we found in points-to
227 analysis and create alias sets for the name memory tags associated with
228 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
229 it points to and its tag.
230
231
232 3- Compute flow-insensitive aliases
233
234 This pass will compare the alias set of every type memory tag and every
235 addressable variable found in the program. Given a type memory tag TMT
236 and an addressable variable V. If the alias sets of TMT and V conflict
237 (as computed by may_alias_p), then V is marked as an alias tag and added
238 to the alias set of TMT.
239
240 For instance, consider the following function:
241
242 foo (int i)
243 {
244 int *p, *q, a, b;
245
246 if (i > 10)
247 p = &a;
248 else
249 q = &b;
250
251 *p = 3;
252 *q = 5;
253 a = b + 2;
254 return *p;
255 }
256
257 After aliasing analysis has finished, the type memory tag for pointer
258 'p' will have two aliases, namely variables 'a' and 'b'. Every time
259 pointer 'p' is dereferenced, we want to mark the operation as a
260 potential reference to 'a' and 'b'.
261
262 foo (int i)
263 {
264 int *p, a, b;
265
266 if (i_2 > 10)
267 p_4 = &a;
268 else
269 p_6 = &b;
270 # p_1 = PHI <p_4(1), p_6(2)>;
271
272 # a_7 = V_MAY_DEF <a_3>;
273 # b_8 = V_MAY_DEF <b_5>;
274 *p_1 = 3;
275
276 # a_9 = V_MAY_DEF <a_7>
277 # VUSE <b_8>
278 a_9 = b_8 + 2;
279
280 # VUSE <a_9>;
281 # VUSE <b_8>;
282 return *p_1;
283 }
284
285 In certain cases, the list of may aliases for a pointer may grow too
286 large. This may cause an explosion in the number of virtual operands
287 inserted in the code. Resulting in increased memory consumption and
288 compilation time.
289
290 When the number of virtual operands needed to represent aliased
291 loads and stores grows too large (configurable with @option{--param
292 max-aliased-vops}), alias sets are grouped to avoid severe
293 compile-time slow downs and memory consumption. See group_aliases. */
294
295 static void
296 compute_may_aliases (void)
297 {
298 struct alias_info *ai;
299
300 memset (&alias_stats, 0, sizeof (alias_stats));
301
302 /* Initialize aliasing information. */
303 ai = init_alias_info ();
304
305 /* For each pointer P_i, determine the sets of variables that P_i may
306 point-to. For every addressable variable V, determine whether the
307 address of V escapes the current function, making V call-clobbered
308 (i.e., whether &V is stored in a global variable or if its passed as a
309 function call argument). */
310 compute_points_to_and_addr_escape (ai);
311
312 /* Collect all pointers and addressable variables, compute alias sets,
313 create memory tags for pointers and promote variables whose address is
314 not needed anymore. */
315 setup_pointers_and_addressables (ai);
316
317 /* Compute flow-sensitive, points-to based aliasing for all the name
318 memory tags. Note that this pass needs to be done before flow
319 insensitive analysis because it uses the points-to information
320 gathered before to mark call-clobbered type tags. */
321 compute_flow_sensitive_aliasing (ai);
322
323 /* Compute type-based flow-insensitive aliasing for all the type
324 memory tags. */
325 compute_flow_insensitive_aliasing (ai);
326
327 /* If the program has too many call-clobbered variables and/or function
328 calls, create .GLOBAL_VAR and use it to model call-clobbering
329 semantics at call sites. This reduces the number of virtual operands
330 considerably, improving compile times at the expense of lost
331 aliasing precision. */
332 maybe_create_global_var (ai);
333
334 /* Debugging dumps. */
335 if (dump_file)
336 {
337 dump_referenced_vars (dump_file);
338 if (dump_flags & TDF_STATS)
339 dump_alias_stats (dump_file);
340 dump_points_to_info (dump_file);
341 dump_alias_info (dump_file);
342 }
343
344 /* Deallocate memory used by aliasing data structures. */
345 delete_alias_info (ai);
346 }
347
348 struct tree_opt_pass pass_may_alias =
349 {
350 "alias", /* name */
351 NULL, /* gate */
352 compute_may_aliases, /* execute */
353 NULL, /* sub */
354 NULL, /* next */
355 0, /* static_pass_number */
356 TV_TREE_MAY_ALIAS, /* tv_id */
357 PROP_cfg | PROP_ssa | PROP_pta, /* properties_required */
358 PROP_alias, /* properties_provided */
359 0, /* properties_destroyed */
360 0, /* todo_flags_start */
361 TODO_dump_func | TODO_rename_vars
362 | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
363 0 /* letter */
364 };
365
366
367 /* Initialize the data structures used for alias analysis. */
368
369 static struct alias_info *
370 init_alias_info (void)
371 {
372 struct alias_info *ai;
373 static bool aliases_computed_p = false;
374
375 ai = xcalloc (1, sizeof (struct alias_info));
376 ai->ssa_names_visited = BITMAP_XMALLOC ();
377 VARRAY_TREE_INIT (ai->processed_ptrs, 50, "processed_ptrs");
378 ai->addresses_needed = BITMAP_XMALLOC ();
379 VARRAY_UINT_INIT (ai->num_references, num_referenced_vars, "num_references");
380 ai->written_vars = BITMAP_XMALLOC ();
381 ai->dereferenced_ptrs_store = BITMAP_XMALLOC ();
382 ai->dereferenced_ptrs_load = BITMAP_XMALLOC ();
383
384 /* If aliases have been computed before, clear existing information. */
385 if (aliases_computed_p)
386 {
387 size_t i;
388
389 /* Clear the call-clobbered set. We are going to re-discover
390 call-clobbered variables. */
391 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
392 {
393 tree var = referenced_var (i);
394
395 /* Variables that are intrinsically call-clobbered (globals,
396 local statics, etc) will not be marked by the aliasing
397 code, so we can't remove them from CALL_CLOBBERED_VARS. */
398 if (!is_call_clobbered (var))
399 bitmap_clear_bit (call_clobbered_vars, var_ann (var)->uid);
400 });
401
402 /* Similarly, clear the set of addressable variables. In this
403 case, we can just clear the set because addressability is
404 only computed here. */
405 bitmap_clear (addressable_vars);
406
407 /* Clear flow-insensitive alias information from each symbol. */
408 for (i = 0; i < num_referenced_vars; i++)
409 {
410 var_ann_t ann = var_ann (referenced_var (i));
411 ann->is_alias_tag = 0;
412 ann->may_aliases = NULL;
413 }
414
415 /* Clear flow-sensitive points-to information from each SSA name. */
416 for (i = 1; i < num_ssa_names; i++)
417 {
418 tree name = ssa_name (i);
419
420 if (!POINTER_TYPE_P (TREE_TYPE (name)))
421 continue;
422
423 if (SSA_NAME_PTR_INFO (name))
424 {
425 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
426
427 /* Clear all the flags but keep the name tag to
428 avoid creating new temporaries unnecessarily. If
429 this pointer is found to point to a subset or
430 superset of its former points-to set, then a new
431 tag will need to be created in create_name_tags. */
432 pi->pt_anything = 0;
433 pi->pt_malloc = 0;
434 pi->value_escapes_p = 0;
435 pi->is_dereferenced = 0;
436 if (pi->pt_vars)
437 bitmap_clear (pi->pt_vars);
438 }
439 }
440 }
441
442 /* Next time, we will need to reset alias information. */
443 aliases_computed_p = true;
444
445 return ai;
446 }
447
448
449 /* Deallocate memory used by alias analysis. */
450
451 static void
452 delete_alias_info (struct alias_info *ai)
453 {
454 size_t i;
455
456 BITMAP_XFREE (ai->ssa_names_visited);
457 ai->processed_ptrs = NULL;
458 BITMAP_XFREE (ai->addresses_needed);
459
460 for (i = 0; i < ai->num_addressable_vars; i++)
461 {
462 sbitmap_free (ai->addressable_vars[i]->may_aliases);
463 free (ai->addressable_vars[i]);
464 }
465 free (ai->addressable_vars);
466
467 for (i = 0; i < ai->num_pointers; i++)
468 {
469 sbitmap_free (ai->pointers[i]->may_aliases);
470 free (ai->pointers[i]);
471 }
472 free (ai->pointers);
473
474 ai->num_references = NULL;
475 BITMAP_XFREE (ai->written_vars);
476 BITMAP_XFREE (ai->dereferenced_ptrs_store);
477 BITMAP_XFREE (ai->dereferenced_ptrs_load);
478
479 free (ai);
480 }
481
482
483 /* Walk use-def chains for pointer PTR to determine what variables is PTR
484 pointing to. */
485
486 static void
487 collect_points_to_info_for (struct alias_info *ai, tree ptr)
488 {
489 #if defined ENABLE_CHECKING
490 if (!POINTER_TYPE_P (TREE_TYPE (ptr)))
491 abort ();
492 #endif
493
494 if (!bitmap_bit_p (ai->ssa_names_visited, SSA_NAME_VERSION (ptr)))
495 {
496 bitmap_set_bit (ai->ssa_names_visited, SSA_NAME_VERSION (ptr));
497 walk_use_def_chains (ptr, collect_points_to_info_r, ai, true);
498 VARRAY_PUSH_TREE (ai->processed_ptrs, ptr);
499 }
500 }
501
502
503 /* Helper for ptr_is_dereferenced_by. Called by walk_tree to look for
504 INDIRECT_REF nodes for the pointer passed in DATA. */
505
506 static tree
507 find_ptr_dereference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
508 {
509 tree ptr = (tree) data;
510
511 if (TREE_CODE (*tp) == INDIRECT_REF
512 && TREE_OPERAND (*tp, 0) == ptr)
513 return *tp;
514
515 return NULL_TREE;
516 }
517
518
519 /* Return true if STMT contains INDIRECT_REF <PTR>. *IS_STORE is set
520 to 'true' if the dereference is on the LHS of an assignment. */
521
522 static bool
523 ptr_is_dereferenced_by (tree ptr, tree stmt, bool *is_store)
524 {
525 *is_store = false;
526
527 if (TREE_CODE (stmt) == MODIFY_EXPR
528 || (TREE_CODE (stmt) == RETURN_EXPR
529 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR))
530 {
531 tree e, lhs, rhs;
532
533 e = (TREE_CODE (stmt) == RETURN_EXPR) ? TREE_OPERAND (stmt, 0) : stmt;
534 lhs = TREE_OPERAND (e, 0);
535 rhs = TREE_OPERAND (e, 1);
536
537 if (EXPR_P (lhs)
538 && walk_tree (&lhs, find_ptr_dereference, ptr, NULL))
539 {
540 *is_store = true;
541 return true;
542 }
543 else if (EXPR_P (rhs)
544 && walk_tree (&rhs, find_ptr_dereference, ptr, NULL))
545 {
546 return true;
547 }
548 }
549 else if (TREE_CODE (stmt) == ASM_EXPR)
550 {
551 if (walk_tree (&ASM_OUTPUTS (stmt), find_ptr_dereference, ptr, NULL)
552 || walk_tree (&ASM_CLOBBERS (stmt), find_ptr_dereference, ptr, NULL))
553 {
554 *is_store = true;
555 return true;
556 }
557 else if (walk_tree (&ASM_INPUTS (stmt), find_ptr_dereference, ptr, NULL))
558 {
559 return true;
560 }
561 }
562
563 return false;
564 }
565
566
567 /* Traverse use-def links for all the pointers in the program to collect
568 address escape and points-to information.
569
570 This is loosely based on the same idea described in R. Hasti and S.
571 Horwitz, ``Using static single assignment form to improve
572 flow-insensitive pointer analysis,'' in SIGPLAN Conference on
573 Programming Language Design and Implementation, pp. 97-105, 1998. */
574
575 static void
576 compute_points_to_and_addr_escape (struct alias_info *ai)
577 {
578 basic_block bb;
579 size_t i;
580 tree op;
581 ssa_op_iter iter;
582
583 timevar_push (TV_TREE_PTA);
584
585 FOR_EACH_BB (bb)
586 {
587 bb_ann_t block_ann = bb_ann (bb);
588 block_stmt_iterator si;
589
590 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
591 {
592 bitmap addr_taken;
593 tree stmt = bsi_stmt (si);
594 bool stmt_escapes_p = is_escape_site (stmt, &ai->num_calls_found);
595
596 /* Mark all the variables whose address are taken by the
597 statement. Note that this will miss all the addresses taken
598 in PHI nodes (those are discovered while following the use-def
599 chains). */
600 get_stmt_operands (stmt);
601 addr_taken = addresses_taken (stmt);
602 if (addr_taken)
603 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i,
604 {
605 tree var = referenced_var (i);
606 bitmap_set_bit (ai->addresses_needed, var_ann (var)->uid);
607 if (stmt_escapes_p)
608 mark_call_clobbered (var);
609 });
610
611 if (stmt_escapes_p)
612 block_ann->has_escape_site = 1;
613
614 /* Special case for silly ADDR_EXPR tricks
615 (gcc.c-torture/unsorted/pass.c). If this statement is an
616 assignment to a non-pointer variable and the RHS takes the
617 address of a variable, assume that the variable on the RHS is
618 call-clobbered. We could add the LHS to the list of
619 "pointers" and follow it to see if it really escapes, but it's
620 not worth the pain. */
621 if (addr_taken
622 && TREE_CODE (stmt) == MODIFY_EXPR
623 && !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 0))))
624 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i,
625 {
626 tree var = referenced_var (i);
627 mark_call_clobbered (var);
628 });
629
630 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
631 {
632 var_ann_t v_ann = var_ann (SSA_NAME_VAR (op));
633 struct ptr_info_def *pi;
634 bool is_store;
635
636 /* If the operand's variable may be aliased, keep track
637 of how many times we've referenced it. This is used
638 for alias grouping in compute_flow_sensitive_aliasing.
639 Note that we don't need to grow AI->NUM_REFERENCES
640 because we are processing regular variables, not
641 memory tags (the array's initial size is set to
642 NUM_REFERENCED_VARS). */
643 if (may_be_aliased (SSA_NAME_VAR (op)))
644 (VARRAY_UINT (ai->num_references, v_ann->uid))++;
645
646 if (!POINTER_TYPE_P (TREE_TYPE (op)))
647 continue;
648
649 collect_points_to_info_for (ai, op);
650
651 pi = SSA_NAME_PTR_INFO (op);
652 if (ptr_is_dereferenced_by (op, stmt, &is_store))
653 {
654 /* Mark OP as dereferenced. In a subsequent pass,
655 dereferenced pointers that point to a set of
656 variables will be assigned a name tag to alias
657 all the variables OP points to. */
658 pi->is_dereferenced = 1;
659
660 /* Keep track of how many time we've dereferenced each
661 pointer. Again, we don't need to grow
662 AI->NUM_REFERENCES because we're processing
663 existing program variables. */
664 (VARRAY_UINT (ai->num_references, v_ann->uid))++;
665
666 /* If this is a store operation, mark OP as being
667 dereferenced to store, otherwise mark it as being
668 dereferenced to load. */
669 if (is_store)
670 bitmap_set_bit (ai->dereferenced_ptrs_store, v_ann->uid);
671 else
672 bitmap_set_bit (ai->dereferenced_ptrs_load, v_ann->uid);
673 }
674 else if (stmt_escapes_p)
675 {
676 /* Note that even if STMT is an escape point, pointer OP
677 will not escape if it is being dereferenced. That's
678 why we only check for escape points if OP is not
679 dereferenced by STMT. */
680 pi->value_escapes_p = 1;
681
682 /* If the statement makes a function call, assume
683 that pointer OP will be dereferenced in a store
684 operation inside the called function. */
685 if (get_call_expr_in (stmt))
686 {
687 bitmap_set_bit (ai->dereferenced_ptrs_store, v_ann->uid);
688 pi->is_dereferenced = 1;
689 }
690 }
691 }
692
693 /* Update reference counter for definitions to any
694 potentially aliased variable. This is used in the alias
695 grouping heuristics. */
696 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
697 {
698 tree var = SSA_NAME_VAR (op);
699 var_ann_t ann = var_ann (var);
700 bitmap_set_bit (ai->written_vars, ann->uid);
701 if (may_be_aliased (var))
702 (VARRAY_UINT (ai->num_references, ann->uid))++;
703 }
704
705 /* Mark variables in V_MAY_DEF operands as being written to. */
706 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VIRTUAL_DEFS)
707 {
708 tree var = SSA_NAME_VAR (op);
709 var_ann_t ann = var_ann (var);
710 bitmap_set_bit (ai->written_vars, ann->uid);
711 }
712
713 /* After promoting variables and computing aliasing we will
714 need to re-scan most statements. FIXME: Try to minimize the
715 number of statements re-scanned. It's not really necessary to
716 re-scan *all* statements. */
717 modify_stmt (stmt);
718 }
719 }
720
721 timevar_pop (TV_TREE_PTA);
722 }
723
724
725 /* Create name tags for all the pointers that have been dereferenced.
726 We only create a name tag for a pointer P if P is found to point to
727 a set of variables (so that we can alias them to *P) or if it is
728 the result of a call to malloc (which means that P cannot point to
729 anything else nor alias any other variable).
730
731 If two pointers P and Q point to the same set of variables, they
732 are assigned the same name tag. */
733
734 static void
735 create_name_tags (struct alias_info *ai)
736 {
737 size_t i;
738
739 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
740 {
741 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
742 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
743
744 if (pi->pt_anything || !pi->is_dereferenced)
745 {
746 /* No name tags for pointers that have not been
747 dereferenced or point to an arbitrary location. */
748 pi->name_mem_tag = NULL_TREE;
749 continue;
750 }
751
752 if (pi->pt_vars
753 && bitmap_first_set_bit (pi->pt_vars) >= 0)
754 {
755 size_t j;
756 tree old_name_tag = pi->name_mem_tag;
757
758 /* If PTR points to a set of variables, check if we don't
759 have another pointer Q with the same points-to set before
760 creating a tag. If so, use Q's tag instead of creating a
761 new one.
762
763 This is important for not creating unnecessary symbols
764 and also for copy propagation. If we ever need to
765 propagate PTR into Q or vice-versa, we would run into
766 problems if they both had different name tags because
767 they would have different SSA version numbers (which
768 would force us to take the name tags in and out of SSA). */
769 for (j = 0; j < i; j++)
770 {
771 tree q = VARRAY_TREE (ai->processed_ptrs, j);
772 struct ptr_info_def *qi = SSA_NAME_PTR_INFO (q);
773
774 if (qi
775 && qi->pt_vars
776 && qi->name_mem_tag
777 && bitmap_equal_p (pi->pt_vars, qi->pt_vars))
778 {
779 pi->name_mem_tag = qi->name_mem_tag;
780 break;
781 }
782 }
783
784 /* If we didn't find a pointer with the same points-to set
785 as PTR, create a new name tag if needed. */
786 if (pi->name_mem_tag == NULL_TREE)
787 pi->name_mem_tag = get_nmt_for (ptr);
788
789 /* If the new name tag computed for PTR is different than
790 the old name tag that it used to have, then the old tag
791 needs to be removed from the IL, so we mark it for
792 renaming. */
793 if (old_name_tag && old_name_tag != pi->name_mem_tag)
794 bitmap_set_bit (vars_to_rename, var_ann (old_name_tag)->uid);
795 }
796 else if (pi->pt_malloc)
797 {
798 /* Otherwise, create a unique name tag for this pointer. */
799 pi->name_mem_tag = get_nmt_for (ptr);
800 }
801 else
802 {
803 /* Only pointers that may point to malloc or other variables
804 may receive a name tag. If the pointer does not point to
805 a known spot, we should use type tags. */
806 set_pt_anything (ptr);
807 continue;
808 }
809
810 /* Mark the new name tag for renaming. */
811 bitmap_set_bit (vars_to_rename, var_ann (pi->name_mem_tag)->uid);
812 }
813 }
814
815
816
817 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
818 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
819 name tag and the variables it points-to are call-clobbered. Finally, if
820 P_i escapes and we could not determine where it points to, then all the
821 variables in the same alias set as *P_i are marked call-clobbered. This
822 is necessary because we must assume that P_i may take the address of any
823 variable in the same alias set. */
824
825 static void
826 compute_flow_sensitive_aliasing (struct alias_info *ai)
827 {
828 size_t i;
829
830 create_name_tags (ai);
831
832 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
833 {
834 size_t j;
835 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
836 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
837 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
838
839 if (pi->value_escapes_p || pi->pt_anything)
840 {
841 /* If PTR escapes or may point to anything, then its associated
842 memory tags and pointed-to variables are call-clobbered. */
843 if (pi->name_mem_tag)
844 mark_call_clobbered (pi->name_mem_tag);
845
846 if (v_ann->type_mem_tag)
847 mark_call_clobbered (v_ann->type_mem_tag);
848
849 if (pi->pt_vars)
850 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j,
851 mark_call_clobbered (referenced_var (j)));
852 }
853
854 /* Set up aliasing information for PTR's name memory tag (if it has
855 one). Note that only pointers that have been dereferenced will
856 have a name memory tag. */
857 if (pi->name_mem_tag && pi->pt_vars)
858 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j,
859 add_may_alias (pi->name_mem_tag, referenced_var (j)));
860
861 /* If the name tag is call clobbered, so is the type tag
862 associated with the base VAR_DECL. */
863 if (pi->name_mem_tag
864 && v_ann->type_mem_tag
865 && is_call_clobbered (pi->name_mem_tag))
866 mark_call_clobbered (v_ann->type_mem_tag);
867 }
868 }
869
870
871 /* Compute type-based alias sets. Traverse all the pointers and
872 addressable variables found in setup_pointers_and_addressables.
873
874 For every pointer P in AI->POINTERS and addressable variable V in
875 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's type
876 memory tag (TMT) if their alias sets conflict. V is then marked as
877 an alias tag so that the operand scanner knows that statements
878 containing V have aliased operands. */
879
880 static void
881 compute_flow_insensitive_aliasing (struct alias_info *ai)
882 {
883 size_t i;
884
885 /* Initialize counter for the total number of virtual operands that
886 aliasing will introduce. When AI->TOTAL_ALIAS_VOPS goes beyond the
887 threshold set by --params max-alias-vops, we enable alias
888 grouping. */
889 ai->total_alias_vops = 0;
890
891 /* For every pointer P, determine which addressable variables may alias
892 with P's type memory tag. */
893 for (i = 0; i < ai->num_pointers; i++)
894 {
895 size_t j;
896 struct alias_map_d *p_map = ai->pointers[i];
897 tree tag = var_ann (p_map->var)->type_mem_tag;
898 var_ann_t tag_ann = var_ann (tag);
899
900 p_map->total_alias_vops = 0;
901 p_map->may_aliases = sbitmap_alloc (num_referenced_vars);
902 sbitmap_zero (p_map->may_aliases);
903
904 for (j = 0; j < ai->num_addressable_vars; j++)
905 {
906 struct alias_map_d *v_map;
907 var_ann_t v_ann;
908 tree var;
909 bool tag_stored_p, var_stored_p;
910
911 v_map = ai->addressable_vars[j];
912 var = v_map->var;
913 v_ann = var_ann (var);
914
915 /* Skip memory tags and variables that have never been
916 written to. We also need to check if the variables are
917 call-clobbered because they may be overwritten by
918 function calls. */
919 tag_stored_p = bitmap_bit_p (ai->written_vars, tag_ann->uid)
920 || is_call_clobbered (tag);
921 var_stored_p = bitmap_bit_p (ai->written_vars, v_ann->uid)
922 || is_call_clobbered (var);
923 if (!tag_stored_p && !var_stored_p)
924 continue;
925
926 if (may_alias_p (p_map->var, p_map->set, var, v_map->set))
927 {
928 size_t num_tag_refs, num_var_refs;
929
930 num_tag_refs = VARRAY_UINT (ai->num_references, tag_ann->uid);
931 num_var_refs = VARRAY_UINT (ai->num_references, v_ann->uid);
932
933 /* If TAG is call clobbered, so is VAR. */
934 if (is_call_clobbered (tag))
935 mark_call_clobbered (var);
936
937 /* Add VAR to TAG's may-aliases set. */
938 add_may_alias (tag, var);
939
940 /* Update the total number of virtual operands due to
941 aliasing. Since we are adding one more alias to TAG's
942 may-aliases set, the total number of virtual operands due
943 to aliasing will be increased by the number of references
944 made to VAR and TAG (every reference to TAG will also
945 count as a reference to VAR). */
946 ai->total_alias_vops += (num_var_refs + num_tag_refs);
947 p_map->total_alias_vops += (num_var_refs + num_tag_refs);
948
949 /* Update the bitmap used to represent TAG's alias set
950 in case we need to group aliases. */
951 SET_BIT (p_map->may_aliases, var_ann (var)->uid);
952 }
953 }
954 }
955
956 if (dump_file)
957 fprintf (dump_file, "%s: Total number of aliased vops: %ld\n",
958 get_name (current_function_decl),
959 ai->total_alias_vops);
960
961 /* Determine if we need to enable alias grouping. */
962 if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
963 group_aliases (ai);
964 }
965
966
967 /* Comparison function for qsort used in group_aliases. */
968
969 static int
970 total_alias_vops_cmp (const void *p, const void *q)
971 {
972 const struct alias_map_d **p1 = (const struct alias_map_d **)p;
973 const struct alias_map_d **p2 = (const struct alias_map_d **)q;
974 long n1 = (*p1)->total_alias_vops;
975 long n2 = (*p2)->total_alias_vops;
976
977 /* We want to sort in descending order. */
978 return (n1 > n2 ? -1 : (n1 == n2) ? 0 : 1);
979 }
980
981 /* Group all the aliases for TAG to make TAG represent all the
982 variables in its alias set. Update the total number
983 of virtual operands due to aliasing (AI->TOTAL_ALIAS_VOPS). This
984 function will make TAG be the unique alias tag for all the
985 variables in its may-aliases. So, given:
986
987 may-aliases(TAG) = { V1, V2, V3 }
988
989 This function will group the variables into:
990
991 may-aliases(V1) = { TAG }
992 may-aliases(V2) = { TAG }
993 may-aliases(V2) = { TAG } */
994
995 static void
996 group_aliases_into (tree tag, sbitmap tag_aliases, struct alias_info *ai)
997 {
998 size_t i;
999 var_ann_t tag_ann = var_ann (tag);
1000 size_t num_tag_refs = VARRAY_UINT (ai->num_references, tag_ann->uid);
1001
1002 EXECUTE_IF_SET_IN_SBITMAP (tag_aliases, 0, i,
1003 {
1004 tree var = referenced_var (i);
1005 var_ann_t ann = var_ann (var);
1006
1007 /* Make TAG the unique alias of VAR. */
1008 ann->is_alias_tag = 0;
1009 ann->may_aliases = NULL;
1010
1011 /* Note that VAR and TAG may be the same if the function has no
1012 addressable variables (see the discussion at the end of
1013 setup_pointers_and_addressables). */
1014 if (var != tag)
1015 add_may_alias (var, tag);
1016
1017 /* Reduce total number of virtual operands contributed
1018 by TAG on behalf of VAR. Notice that the references to VAR
1019 itself won't be removed. We will merely replace them with
1020 references to TAG. */
1021 ai->total_alias_vops -= num_tag_refs;
1022 });
1023
1024 /* We have reduced the number of virtual operands that TAG makes on
1025 behalf of all the variables formerly aliased with it. However,
1026 we have also "removed" all the virtual operands for TAG itself,
1027 so we add them back. */
1028 ai->total_alias_vops += num_tag_refs;
1029
1030 /* TAG no longer has any aliases. */
1031 tag_ann->may_aliases = NULL;
1032 }
1033
1034
1035 /* Group may-aliases sets to reduce the number of virtual operands due
1036 to aliasing.
1037
1038 1- Sort the list of pointers in decreasing number of contributed
1039 virtual operands.
1040
1041 2- Take the first entry in AI->POINTERS and revert the role of
1042 the memory tag and its aliases. Usually, whenever an aliased
1043 variable Vi is found to alias with a memory tag T, we add Vi
1044 to the may-aliases set for T. Meaning that after alias
1045 analysis, we will have:
1046
1047 may-aliases(T) = { V1, V2, V3, ..., Vn }
1048
1049 This means that every statement that references T, will get 'n'
1050 virtual operands for each of the Vi tags. But, when alias
1051 grouping is enabled, we make T an alias tag and add it to the
1052 alias set of all the Vi variables:
1053
1054 may-aliases(V1) = { T }
1055 may-aliases(V2) = { T }
1056 ...
1057 may-aliases(Vn) = { T }
1058
1059 This has two effects: (a) statements referencing T will only get
1060 a single virtual operand, and, (b) all the variables Vi will now
1061 appear to alias each other. So, we lose alias precision to
1062 improve compile time. But, in theory, a program with such a high
1063 level of aliasing should not be very optimizable in the first
1064 place.
1065
1066 3- Since variables may be in the alias set of more than one
1067 memory tag, the grouping done in step (2) needs to be extended
1068 to all the memory tags that have a non-empty intersection with
1069 the may-aliases set of tag T. For instance, if we originally
1070 had these may-aliases sets:
1071
1072 may-aliases(T) = { V1, V2, V3 }
1073 may-aliases(R) = { V2, V4 }
1074
1075 In step (2) we would have reverted the aliases for T as:
1076
1077 may-aliases(V1) = { T }
1078 may-aliases(V2) = { T }
1079 may-aliases(V3) = { T }
1080
1081 But note that now V2 is no longer aliased with R. We could
1082 add R to may-aliases(V2), but we are in the process of
1083 grouping aliases to reduce virtual operands so what we do is
1084 add V4 to the grouping to obtain:
1085
1086 may-aliases(V1) = { T }
1087 may-aliases(V2) = { T }
1088 may-aliases(V3) = { T }
1089 may-aliases(V4) = { T }
1090
1091 4- If the total number of virtual operands due to aliasing is
1092 still above the threshold set by max-alias-vops, go back to (2). */
1093
1094 static void
1095 group_aliases (struct alias_info *ai)
1096 {
1097 size_t i;
1098 sbitmap res;
1099
1100 /* Sort the POINTERS array in descending order of contributed
1101 virtual operands. */
1102 qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
1103 total_alias_vops_cmp);
1104
1105 res = sbitmap_alloc (num_referenced_vars);
1106
1107 /* For every pointer in AI->POINTERS, reverse the roles of its tag
1108 and the tag's may-aliases set. */
1109 for (i = 0; i < ai->num_pointers; i++)
1110 {
1111 size_t j;
1112 tree tag1 = var_ann (ai->pointers[i]->var)->type_mem_tag;
1113 sbitmap tag1_aliases = ai->pointers[i]->may_aliases;
1114
1115 /* Skip tags that have been grouped already. */
1116 if (ai->pointers[i]->grouped_p)
1117 continue;
1118
1119 /* See if TAG1 had any aliases in common with other type tags.
1120 If we find a TAG2 with common aliases with TAG1, add TAG2's
1121 aliases into TAG1. */
1122 for (j = i + 1; j < ai->num_pointers; j++)
1123 {
1124 sbitmap tag2_aliases = ai->pointers[j]->may_aliases;
1125
1126 sbitmap_a_and_b (res, tag1_aliases, tag2_aliases);
1127 if (sbitmap_first_set_bit (res) >= 0)
1128 {
1129 size_t k;
1130
1131 tree tag2 = var_ann (ai->pointers[j]->var)->type_mem_tag;
1132
1133 if (!is_call_clobbered (tag1) && is_call_clobbered (tag2))
1134 {
1135 mark_call_clobbered (tag1);
1136 EXECUTE_IF_SET_IN_SBITMAP (tag1_aliases, 0, k,
1137 {
1138 tree var = referenced_var (k);
1139 mark_call_clobbered (var);
1140 });
1141 }
1142 else if (is_call_clobbered (tag1) && !is_call_clobbered (tag2))
1143 {
1144 mark_call_clobbered (tag2);
1145 EXECUTE_IF_SET_IN_SBITMAP (tag2_aliases, 0, k,
1146 {
1147 tree var = referenced_var (k);
1148 mark_call_clobbered (var);
1149 });
1150 }
1151
1152 sbitmap_a_or_b (tag1_aliases, tag1_aliases, tag2_aliases);
1153
1154 /* TAG2 does not need its aliases anymore. */
1155 sbitmap_zero (tag2_aliases);
1156 var_ann (tag2)->may_aliases = NULL;
1157
1158 /* TAG1 is the unique alias of TAG2. */
1159 add_may_alias (tag2, tag1);
1160
1161 ai->pointers[j]->grouped_p = true;
1162 }
1163 }
1164
1165 /* Now group all the aliases we collected into TAG1. */
1166 group_aliases_into (tag1, tag1_aliases, ai);
1167
1168 /* If we've reduced total number of virtual operands below the
1169 threshold, stop. */
1170 if (ai->total_alias_vops < MAX_ALIASED_VOPS)
1171 break;
1172 }
1173
1174 /* Finally, all the variables that have been grouped cannot be in
1175 the may-alias set of name memory tags. Suppose that we have
1176 grouped the aliases in this code so that may-aliases(a) = TMT.20
1177
1178 p_5 = &a;
1179 ...
1180 # a_9 = V_MAY_DEF <a_8>
1181 p_5->field = 0
1182 ... Several modifications to TMT.20 ...
1183 # VUSE <a_9>
1184 x_30 = p_5->field
1185
1186 Since p_5 points to 'a', the optimizers will try to propagate 0
1187 into p_5->field, but that is wrong because there have been
1188 modifications to 'TMT.20' in between. To prevent this we have to
1189 replace 'a' with 'TMT.20' in the name tag of p_5. */
1190 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
1191 {
1192 size_t j;
1193 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
1194 tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
1195 varray_type aliases;
1196
1197 if (name_tag == NULL_TREE)
1198 continue;
1199
1200 aliases = var_ann (name_tag)->may_aliases;
1201 for (j = 0; aliases && j < VARRAY_ACTIVE_SIZE (aliases); j++)
1202 {
1203 tree alias = VARRAY_TREE (aliases, j);
1204 var_ann_t ann = var_ann (alias);
1205
1206 if (ann->mem_tag_kind == NOT_A_TAG && ann->may_aliases)
1207 {
1208 tree new_alias;
1209
1210 #if defined ENABLE_CHECKING
1211 if (VARRAY_ACTIVE_SIZE (ann->may_aliases) != 1)
1212 abort ();
1213 #endif
1214 new_alias = VARRAY_TREE (ann->may_aliases, 0);
1215 replace_may_alias (name_tag, j, new_alias);
1216 }
1217 }
1218 }
1219
1220 sbitmap_free (res);
1221
1222 if (dump_file)
1223 fprintf (dump_file,
1224 "%s: Total number of aliased vops after grouping: %ld%s\n",
1225 get_name (current_function_decl),
1226 ai->total_alias_vops,
1227 (ai->total_alias_vops < 0) ? " (negative values are OK)" : "");
1228 }
1229
1230
1231 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
1232
1233 static void
1234 create_alias_map_for (tree var, struct alias_info *ai)
1235 {
1236 struct alias_map_d *alias_map;
1237 alias_map = xcalloc (1, sizeof (*alias_map));
1238 alias_map->var = var;
1239 alias_map->set = get_alias_set (var);
1240 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
1241 }
1242
1243
1244 /* Create memory tags for all the dereferenced pointers and build the
1245 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
1246 sets. Based on the address escape and points-to information collected
1247 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
1248 variables whose address is not needed anymore. */
1249
1250 static void
1251 setup_pointers_and_addressables (struct alias_info *ai)
1252 {
1253 size_t i, n_vars, num_addressable_vars, num_pointers;
1254
1255 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
1256 num_addressable_vars = num_pointers = 0;
1257 for (i = 0; i < num_referenced_vars; i++)
1258 {
1259 tree var = referenced_var (i);
1260
1261 if (may_be_aliased (var))
1262 num_addressable_vars++;
1263
1264 if (POINTER_TYPE_P (TREE_TYPE (var)))
1265 {
1266 /* Since we don't keep track of volatile variables, assume that
1267 these pointers are used in indirect store operations. */
1268 if (TREE_THIS_VOLATILE (var))
1269 bitmap_set_bit (ai->dereferenced_ptrs_store, var_ann (var)->uid);
1270
1271 num_pointers++;
1272 }
1273 }
1274
1275 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
1276 always going to be slightly bigger than we actually need them
1277 because some TREE_ADDRESSABLE variables will be marked
1278 non-addressable below and only pointers with unique type tags are
1279 going to be added to POINTERS. */
1280 ai->addressable_vars = xcalloc (num_addressable_vars,
1281 sizeof (struct alias_map_d *));
1282 ai->pointers = xcalloc (num_pointers, sizeof (struct alias_map_d *));
1283 ai->num_addressable_vars = 0;
1284 ai->num_pointers = 0;
1285
1286 /* Since we will be creating type memory tags within this loop, cache the
1287 value of NUM_REFERENCED_VARS to avoid processing the additional tags
1288 unnecessarily. */
1289 n_vars = num_referenced_vars;
1290
1291 for (i = 0; i < n_vars; i++)
1292 {
1293 tree var = referenced_var (i);
1294 var_ann_t v_ann = var_ann (var);
1295
1296 /* Name memory tags already have flow-sensitive aliasing
1297 information, so they need not be processed by
1298 compute_may_aliases. Similarly, type memory tags are already
1299 accounted for when we process their associated pointer. */
1300 if (v_ann->mem_tag_kind != NOT_A_TAG)
1301 continue;
1302
1303 /* Remove the ADDRESSABLE flag from every addressable variable whose
1304 address is not needed anymore. This is caused by the propagation
1305 of ADDR_EXPR constants into INDIRECT_REF expressions and the
1306 removal of dead pointer assignments done by the early scalar
1307 cleanup passes. */
1308 if (TREE_ADDRESSABLE (var))
1309 {
1310 if (!bitmap_bit_p (ai->addresses_needed, v_ann->uid)
1311 && v_ann->mem_tag_kind == NOT_A_TAG
1312 && !is_global_var (var))
1313 {
1314 /* The address of VAR is not needed, remove the
1315 addressable bit, so that it can be optimized as a
1316 regular variable. */
1317 mark_non_addressable (var);
1318
1319 /* Since VAR is now a regular GIMPLE register, we will need
1320 to rename VAR into SSA afterwards. */
1321 bitmap_set_bit (vars_to_rename, v_ann->uid);
1322 }
1323 else
1324 {
1325 /* Add the variable to the set of addressables. Mostly
1326 used when scanning operands for ASM_EXPRs that
1327 clobber memory. In those cases, we need to clobber
1328 all call-clobbered variables and all addressables. */
1329 bitmap_set_bit (addressable_vars, v_ann->uid);
1330 }
1331 }
1332
1333 /* Global variables and addressable locals may be aliased. Create an
1334 entry in ADDRESSABLE_VARS for VAR. */
1335 if (may_be_aliased (var))
1336 {
1337 create_alias_map_for (var, ai);
1338 bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
1339 }
1340
1341 /* Add pointer variables that have been dereferenced to the POINTERS
1342 array and create a type memory tag for them. */
1343 if (POINTER_TYPE_P (TREE_TYPE (var)))
1344 {
1345 if ((bitmap_bit_p (ai->dereferenced_ptrs_store, v_ann->uid)
1346 || bitmap_bit_p (ai->dereferenced_ptrs_load, v_ann->uid)))
1347 {
1348 tree tag;
1349 var_ann_t t_ann;
1350
1351 /* If pointer VAR still doesn't have a memory tag
1352 associated with it, create it now or re-use an
1353 existing one. */
1354 tag = get_tmt_for (var, ai);
1355 t_ann = var_ann (tag);
1356
1357 /* The type tag will need to be renamed into SSA
1358 afterwards. Note that we cannot do this inside
1359 get_tmt_for because aliasing may run multiple times
1360 and we only create type tags the first time. */
1361 bitmap_set_bit (vars_to_rename, t_ann->uid);
1362
1363 /* Associate the tag with pointer VAR. */
1364 v_ann->type_mem_tag = tag;
1365
1366 /* If pointer VAR has been used in a store operation,
1367 then its memory tag must be marked as written-to. */
1368 if (bitmap_bit_p (ai->dereferenced_ptrs_store, v_ann->uid))
1369 bitmap_set_bit (ai->written_vars, t_ann->uid);
1370
1371 /* If pointer VAR is a global variable or a PARM_DECL,
1372 then its memory tag should be considered a global
1373 variable. */
1374 if (TREE_CODE (var) == PARM_DECL || is_global_var (var))
1375 mark_call_clobbered (tag);
1376
1377 /* All the dereferences of pointer VAR count as
1378 references of TAG. Since TAG can be associated with
1379 several pointers, add the dereferences of VAR to the
1380 TAG. We may need to grow AI->NUM_REFERENCES because
1381 we have been adding name and type tags. */
1382 if (t_ann->uid >= VARRAY_SIZE (ai->num_references))
1383 VARRAY_GROW (ai->num_references, t_ann->uid + 10);
1384
1385 VARRAY_UINT (ai->num_references, t_ann->uid)
1386 += VARRAY_UINT (ai->num_references, v_ann->uid);
1387 }
1388 else
1389 {
1390 /* The pointer has not been dereferenced. If it had a
1391 type memory tag, remove it and mark the old tag for
1392 renaming to remove it out of the IL. */
1393 var_ann_t ann = var_ann (var);
1394 tree tag = ann->type_mem_tag;
1395 if (tag)
1396 {
1397 bitmap_set_bit (vars_to_rename, var_ann (tag)->uid);
1398 ann->type_mem_tag = NULL_TREE;
1399 }
1400 }
1401 }
1402 }
1403
1404 /* If we found no addressable variables, but we have more than one
1405 pointer, we will need to check for conflicts between the
1406 pointers. Otherwise, we would miss alias relations as in
1407 testsuite/gcc.dg/tree-ssa/20040319-1.c:
1408
1409 struct bar { int count; int *arr;};
1410
1411 void foo (struct bar *b)
1412 {
1413 b->count = 0;
1414 *(b->arr) = 2;
1415 if (b->count == 0)
1416 abort ();
1417 }
1418
1419 b->count and *(b->arr) could be aliased if b->arr == &b->count.
1420 To do this, we add all the memory tags for the pointers in
1421 AI->POINTERS to AI->ADDRESSABLE_VARS, so that
1422 compute_flow_insensitive_aliasing will naturally compare every
1423 pointer to every type tag. */
1424 if (ai->num_addressable_vars == 0
1425 && ai->num_pointers > 1)
1426 {
1427 free (ai->addressable_vars);
1428 ai->addressable_vars = xcalloc (ai->num_pointers,
1429 sizeof (struct alias_map_d *));
1430 ai->num_addressable_vars = 0;
1431 for (i = 0; i < ai->num_pointers; i++)
1432 {
1433 struct alias_map_d *p = ai->pointers[i];
1434 tree tag = var_ann (p->var)->type_mem_tag;
1435 create_alias_map_for (tag, ai);
1436 }
1437 }
1438 }
1439
1440
1441 /* Determine whether to use .GLOBAL_VAR to model call clobbering semantics. At
1442 every call site, we need to emit V_MAY_DEF expressions to represent the
1443 clobbering effects of the call for variables whose address escapes the
1444 current function.
1445
1446 One approach is to group all call-clobbered variables into a single
1447 representative that is used as an alias of every call-clobbered variable
1448 (.GLOBAL_VAR). This works well, but it ties the optimizer hands because
1449 references to any call clobbered variable is a reference to .GLOBAL_VAR.
1450
1451 The second approach is to emit a clobbering V_MAY_DEF for every
1452 call-clobbered variable at call sites. This is the preferred way in terms
1453 of optimization opportunities but it may create too many V_MAY_DEF operands
1454 if there are many call clobbered variables and function calls in the
1455 function.
1456
1457 To decide whether or not to use .GLOBAL_VAR we multiply the number of
1458 function calls found by the number of call-clobbered variables. If that
1459 product is beyond a certain threshold, as determined by the parameterized
1460 values shown below, we use .GLOBAL_VAR.
1461
1462 FIXME. This heuristic should be improved. One idea is to use several
1463 .GLOBAL_VARs of different types instead of a single one. The thresholds
1464 have been derived from a typical bootstrap cycle, including all target
1465 libraries. Compile times were found increase by ~1% compared to using
1466 .GLOBAL_VAR. */
1467
1468 static void
1469 maybe_create_global_var (struct alias_info *ai)
1470 {
1471 size_t i, n_clobbered;
1472
1473 /* No need to create it, if we have one already. */
1474 if (global_var == NULL_TREE)
1475 {
1476 /* Count all the call-clobbered variables. */
1477 n_clobbered = 0;
1478 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, n_clobbered++);
1479
1480 /* Create .GLOBAL_VAR if we have too many call-clobbered
1481 variables. We also create .GLOBAL_VAR when there no
1482 call-clobbered variables to prevent code motion
1483 transformations from re-arranging function calls that may
1484 have side effects. For instance,
1485
1486 foo ()
1487 {
1488 int a = f ();
1489 g ();
1490 h (a);
1491 }
1492
1493 There are no call-clobbered variables in foo(), so it would
1494 be entirely possible for a pass to want to move the call to
1495 f() after the call to g(). If f() has side effects, that
1496 would be wrong. Creating .GLOBAL_VAR in this case will
1497 insert VDEFs for it and prevent such transformations. */
1498 if (n_clobbered == 0
1499 || ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD)
1500 create_global_var ();
1501 }
1502
1503 /* If the function has calls to clobbering functions and .GLOBAL_VAR has
1504 been created, make it an alias for all call-clobbered variables. */
1505 if (global_var)
1506 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1507 {
1508 tree var = referenced_var (i);
1509 if (var != global_var)
1510 {
1511 add_may_alias (var, global_var);
1512 bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
1513 }
1514 });
1515 }
1516
1517
1518 /* Return TRUE if pointer PTR may point to variable VAR.
1519
1520 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
1521 This is needed because when checking for type conflicts we are
1522 interested in the alias set of the memory location pointed-to by
1523 PTR. The alias set of PTR itself is irrelevant.
1524
1525 VAR_ALIAS_SET is the alias set for VAR. */
1526
1527 static bool
1528 may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
1529 tree var, HOST_WIDE_INT var_alias_set)
1530 {
1531 tree mem;
1532 var_ann_t v_ann, m_ann;
1533
1534 alias_stats.alias_queries++;
1535 alias_stats.simple_queries++;
1536
1537 /* By convention, a variable cannot alias itself. */
1538 mem = var_ann (ptr)->type_mem_tag;
1539 if (mem == var)
1540 {
1541 alias_stats.alias_noalias++;
1542 alias_stats.simple_resolved++;
1543 return false;
1544 }
1545
1546 v_ann = var_ann (var);
1547 m_ann = var_ann (mem);
1548
1549 #if defined ENABLE_CHECKING
1550 if (m_ann->mem_tag_kind != TYPE_TAG)
1551 abort ();
1552 #endif
1553
1554 alias_stats.tbaa_queries++;
1555
1556 /* If VAR is a pointer with the same alias set as PTR, then dereferencing
1557 PTR can't possibly affect VAR. Note, that we are specifically testing
1558 for PTR's alias set here, not its pointed-to type. We also can't
1559 do this check with relaxed aliasing enabled. */
1560 if (POINTER_TYPE_P (TREE_TYPE (var))
1561 && var_alias_set != 0)
1562 {
1563 HOST_WIDE_INT ptr_alias_set = get_alias_set (ptr);
1564 if (ptr_alias_set == var_alias_set)
1565 {
1566 alias_stats.alias_noalias++;
1567 alias_stats.tbaa_resolved++;
1568 return false;
1569 }
1570 }
1571
1572 /* If the alias sets don't conflict then MEM cannot alias VAR. */
1573 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
1574 {
1575 /* Handle aliases to structure fields. If either VAR or MEM are
1576 aggregate types, they may not have conflicting types, but one of
1577 the structures could contain a pointer to the other one.
1578
1579 For instance, given
1580
1581 MEM -> struct P *p;
1582 VAR -> struct Q *q;
1583
1584 It may happen that '*p' and '*q' can't alias because 'struct P'
1585 and 'struct Q' have non-conflicting alias sets. However, it could
1586 happen that one of the fields in 'struct P' is a 'struct Q *' or
1587 vice-versa.
1588
1589 Therefore, we also need to check if 'struct P' aliases 'struct Q *'
1590 or 'struct Q' aliases 'struct P *'. Notice, that since GIMPLE
1591 does not have more than one-level pointers, we don't need to
1592 recurse into the structures. */
1593 if (AGGREGATE_TYPE_P (TREE_TYPE (mem))
1594 || AGGREGATE_TYPE_P (TREE_TYPE (var)))
1595 {
1596 tree ptr_to_var;
1597
1598 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
1599 ptr_to_var = TYPE_POINTER_TO (TREE_TYPE (TREE_TYPE (var)));
1600 else
1601 ptr_to_var = TYPE_POINTER_TO (TREE_TYPE (var));
1602
1603 /* If no pointer-to VAR exists, then MEM can't alias VAR. */
1604 if (ptr_to_var == NULL_TREE)
1605 {
1606 alias_stats.alias_noalias++;
1607 alias_stats.tbaa_resolved++;
1608 return false;
1609 }
1610
1611 /* If MEM doesn't alias a pointer to VAR and VAR doesn't alias
1612 PTR, then PTR can't alias VAR. */
1613 if (!alias_sets_conflict_p (mem_alias_set, get_alias_set (ptr_to_var))
1614 && !alias_sets_conflict_p (var_alias_set, get_alias_set (ptr)))
1615 {
1616 alias_stats.alias_noalias++;
1617 alias_stats.tbaa_resolved++;
1618 return false;
1619 }
1620 }
1621 else
1622 {
1623 alias_stats.alias_noalias++;
1624 alias_stats.tbaa_resolved++;
1625 return false;
1626 }
1627 }
1628
1629 if (flag_tree_points_to != PTA_NONE)
1630 alias_stats.pta_queries++;
1631
1632 /* If -ftree-points-to is given, check if PTR may point to VAR. */
1633 if (flag_tree_points_to == PTA_ANDERSEN
1634 && !ptr_may_alias_var (ptr, var))
1635 {
1636 alias_stats.alias_noalias++;
1637 alias_stats.pta_resolved++;
1638 return false;
1639 }
1640
1641 alias_stats.alias_mayalias++;
1642 return true;
1643 }
1644
1645
1646 /* Add ALIAS to the set of variables that may alias VAR. */
1647
1648 static void
1649 add_may_alias (tree var, tree alias)
1650 {
1651 size_t i;
1652 var_ann_t v_ann = get_var_ann (var);
1653 var_ann_t a_ann = get_var_ann (alias);
1654
1655 #if defined ENABLE_CHECKING
1656 if (var == alias)
1657 abort ();
1658 #endif
1659
1660 if (v_ann->may_aliases == NULL)
1661 VARRAY_TREE_INIT (v_ann->may_aliases, 2, "aliases");
1662
1663 /* Avoid adding duplicates. */
1664 for (i = 0; i < VARRAY_ACTIVE_SIZE (v_ann->may_aliases); i++)
1665 if (alias == VARRAY_TREE (v_ann->may_aliases, i))
1666 return;
1667
1668 VARRAY_PUSH_TREE (v_ann->may_aliases, alias);
1669 a_ann->is_alias_tag = 1;
1670 }
1671
1672
1673 /* Replace alias I in the alias sets of VAR with NEW_ALIAS. */
1674
1675 static void
1676 replace_may_alias (tree var, size_t i, tree new_alias)
1677 {
1678 var_ann_t v_ann = var_ann (var);
1679 VARRAY_TREE (v_ann->may_aliases, i) = new_alias;
1680 }
1681
1682
1683 /* Mark pointer PTR as pointing to an arbitrary memory location. */
1684
1685 static void
1686 set_pt_anything (tree ptr)
1687 {
1688 struct ptr_info_def *pi = get_ptr_info (ptr);
1689
1690 pi->pt_anything = 1;
1691 pi->pt_malloc = 0;
1692
1693 /* The pointer used to have a name tag, but we now found it pointing
1694 to an arbitrary location. The name tag needs to be renamed and
1695 disassociated from PTR. */
1696 if (pi->name_mem_tag)
1697 {
1698 bitmap_set_bit (vars_to_rename, var_ann (pi->name_mem_tag)->uid);
1699 pi->name_mem_tag = NULL_TREE;
1700 }
1701 }
1702
1703
1704 /* Mark pointer PTR as pointing to a malloc'd memory area. */
1705
1706 static void
1707 set_pt_malloc (tree ptr)
1708 {
1709 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1710
1711 /* If the pointer has already been found to point to arbitrary
1712 memory locations, it is unsafe to mark it as pointing to malloc. */
1713 if (pi->pt_anything)
1714 return;
1715
1716 pi->pt_malloc = 1;
1717 }
1718
1719
1720 /* Given two pointers DEST and ORIG. Merge the points-to information in
1721 ORIG into DEST. AI is as in collect_points_to_info. */
1722
1723 static void
1724 merge_pointed_to_info (struct alias_info *ai, tree dest, tree orig)
1725 {
1726 struct ptr_info_def *dest_pi, *orig_pi;
1727
1728 /* Make sure we have points-to information for ORIG. */
1729 collect_points_to_info_for (ai, orig);
1730
1731 dest_pi = get_ptr_info (dest);
1732 orig_pi = SSA_NAME_PTR_INFO (orig);
1733
1734 if (orig_pi)
1735 {
1736 dest_pi->pt_global_mem |= orig_pi->pt_global_mem;
1737
1738 /* Notice that we never merge PT_MALLOC. This attribute is only
1739 true if the pointer is the result of a malloc() call.
1740 Otherwise, we can end up in this situation:
1741
1742 P_i = malloc ();
1743 ...
1744 P_j = P_i + X;
1745
1746 P_j would be marked as PT_MALLOC, which is wrong because
1747 PT_MALLOC implies that the pointer may not point to another
1748 variable.
1749
1750 FIXME 1: Subsequent analysis may determine that P_j
1751 cannot alias anything else, but we are being conservative
1752 here.
1753
1754 FIXME 2: If the merging comes from a copy assignment, we
1755 ought to merge PT_MALLOC, but then both pointers would end up
1756 getting different name tags because create_name_tags is not
1757 smart enough to determine that the two come from the same
1758 malloc call. Copy propagation before aliasing should cure
1759 this. */
1760 dest_pi->pt_malloc = 0;
1761
1762 if (orig_pi->pt_malloc || orig_pi->pt_anything)
1763 set_pt_anything (dest);
1764
1765 if (!dest_pi->pt_anything
1766 && orig_pi->pt_vars
1767 && bitmap_first_set_bit (orig_pi->pt_vars) >= 0)
1768 {
1769 if (dest_pi->pt_vars == NULL)
1770 {
1771 dest_pi->pt_vars = BITMAP_GGC_ALLOC ();
1772 bitmap_copy (dest_pi->pt_vars, orig_pi->pt_vars);
1773 }
1774 else
1775 bitmap_a_or_b (dest_pi->pt_vars,
1776 dest_pi->pt_vars,
1777 orig_pi->pt_vars);
1778 }
1779 }
1780 else
1781 set_pt_anything (dest);
1782 }
1783
1784
1785 /* Add VALUE to the list of expressions pointed-to by PTR. */
1786
1787 static void
1788 add_pointed_to_expr (tree ptr, tree value)
1789 {
1790 if (TREE_CODE (value) == WITH_SIZE_EXPR)
1791 value = TREE_OPERAND (value, 0);
1792
1793 #if defined ENABLE_CHECKING
1794 /* Pointer variables should have been handled by merge_pointed_to_info. */
1795 if (TREE_CODE (value) == SSA_NAME
1796 && POINTER_TYPE_P (TREE_TYPE (value)))
1797 abort ();
1798 #endif
1799
1800 get_ptr_info (ptr);
1801
1802 /* If VALUE is the result of a malloc-like call, then the area pointed to
1803 PTR is guaranteed to not alias with anything else. */
1804 if (TREE_CODE (value) == CALL_EXPR
1805 && (call_expr_flags (value) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
1806 set_pt_malloc (ptr);
1807 else
1808 set_pt_anything (ptr);
1809
1810 if (dump_file)
1811 {
1812 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1813
1814 fprintf (dump_file, "Pointer ");
1815 print_generic_expr (dump_file, ptr, dump_flags);
1816 fprintf (dump_file, " points to ");
1817 if (pi->pt_malloc)
1818 fprintf (dump_file, "malloc space: ");
1819 else
1820 fprintf (dump_file, "an arbitrary address: ");
1821 print_generic_expr (dump_file, value, dump_flags);
1822 fprintf (dump_file, "\n");
1823 }
1824 }
1825
1826
1827 /* If VALUE is of the form &DECL, add DECL to the set of variables
1828 pointed-to by PTR. Otherwise, add VALUE as a pointed-to expression by
1829 PTR. AI is as in collect_points_to_info. */
1830
1831 static void
1832 add_pointed_to_var (struct alias_info *ai, tree ptr, tree value)
1833 {
1834 struct ptr_info_def *pi = get_ptr_info (ptr);
1835 tree pt_var;
1836 size_t uid;
1837
1838 #if defined ENABLE_CHECKING
1839 if (TREE_CODE (value) != ADDR_EXPR)
1840 abort ();
1841 #endif
1842
1843 pt_var = TREE_OPERAND (value, 0);
1844 if (TREE_CODE_CLASS (TREE_CODE (pt_var)) == 'r')
1845 pt_var = get_base_address (pt_var);
1846
1847 if (pt_var && SSA_VAR_P (pt_var))
1848 {
1849 uid = var_ann (pt_var)->uid;
1850 bitmap_set_bit (ai->addresses_needed, uid);
1851
1852 if (pi->pt_vars == NULL)
1853 pi->pt_vars = BITMAP_GGC_ALLOC ();
1854 bitmap_set_bit (pi->pt_vars, uid);
1855
1856 /* If the variable is a global, mark the pointer as pointing to
1857 global memory (which will make its tag a global variable). */
1858 if (is_global_var (pt_var))
1859 pi->pt_global_mem = 1;
1860 }
1861 }
1862
1863
1864 /* Callback for walk_use_def_chains to gather points-to information from the
1865 SSA web.
1866
1867 VAR is an SSA variable or a GIMPLE expression.
1868
1869 STMT is the statement that generates the SSA variable or, if STMT is a
1870 PHI_NODE, VAR is one of the PHI arguments.
1871
1872 DATA is a pointer to a structure of type ALIAS_INFO. */
1873
1874 static bool
1875 collect_points_to_info_r (tree var, tree stmt, void *data)
1876 {
1877 struct alias_info *ai = (struct alias_info *) data;
1878
1879 if (dump_file && (dump_flags & TDF_DETAILS))
1880 {
1881 fprintf (dump_file, "Visiting use-def links for ");
1882 print_generic_expr (dump_file, var, dump_flags);
1883 fprintf (dump_file, "\n");
1884 }
1885
1886 if (TREE_CODE (stmt) == MODIFY_EXPR)
1887 {
1888 tree rhs = TREE_OPERAND (stmt, 1);
1889 STRIP_NOPS (rhs);
1890
1891 /* Found P_i = ADDR_EXPR */
1892 if (TREE_CODE (rhs) == ADDR_EXPR)
1893 add_pointed_to_var (ai, var, rhs);
1894
1895 /* Found P_i = Q_j. */
1896 else if (TREE_CODE (rhs) == SSA_NAME
1897 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1898 merge_pointed_to_info (ai, var, rhs);
1899
1900 /* Found P_i = PLUS_EXPR or P_i = MINUS_EXPR */
1901 else if (TREE_CODE (rhs) == PLUS_EXPR
1902 || TREE_CODE (rhs) == MINUS_EXPR)
1903 {
1904 tree op0 = TREE_OPERAND (rhs, 0);
1905 tree op1 = TREE_OPERAND (rhs, 1);
1906
1907 /* Both operands may be of pointer type. FIXME: Shouldn't
1908 we just expect PTR + OFFSET always? */
1909 if (POINTER_TYPE_P (TREE_TYPE (op0)))
1910 {
1911 if (TREE_CODE (op0) == SSA_NAME)
1912 merge_pointed_to_info (ai, var, op0);
1913 else if (TREE_CODE (op0) == ADDR_EXPR)
1914 add_pointed_to_var (ai, var, op0);
1915 else
1916 add_pointed_to_expr (var, op0);
1917 }
1918
1919 if (POINTER_TYPE_P (TREE_TYPE (op1)))
1920 {
1921 if (TREE_CODE (op1) == SSA_NAME)
1922 merge_pointed_to_info (ai, var, op1);
1923 else if (TREE_CODE (op1) == ADDR_EXPR)
1924 add_pointed_to_var (ai, var, op1);
1925 else
1926 add_pointed_to_expr (var, op1);
1927 }
1928
1929 /* Neither operand is a pointer? VAR can be pointing
1930 anywhere. FIXME: Is this right? If we get here, we
1931 found PTR = INT_CST + INT_CST. */
1932 if (!POINTER_TYPE_P (TREE_TYPE (op0))
1933 && !POINTER_TYPE_P (TREE_TYPE (op1)))
1934 add_pointed_to_expr (var, rhs);
1935 }
1936
1937 /* Something else. */
1938 else
1939 add_pointed_to_expr (var, rhs);
1940 }
1941 else if (TREE_CODE (stmt) == ASM_EXPR)
1942 {
1943 /* Pointers defined by __asm__ statements can point anywhere. */
1944 set_pt_anything (var);
1945 }
1946 else if (IS_EMPTY_STMT (stmt))
1947 {
1948 tree decl = SSA_NAME_VAR (var);
1949
1950 if (TREE_CODE (decl) == PARM_DECL)
1951 add_pointed_to_expr (var, decl);
1952 else if (DECL_INITIAL (decl))
1953 add_pointed_to_var (ai, var, DECL_INITIAL (decl));
1954 else
1955 add_pointed_to_expr (var, decl);
1956 }
1957 else if (TREE_CODE (stmt) == PHI_NODE)
1958 {
1959 /* It STMT is a PHI node, then VAR is one of its arguments. The
1960 variable that we are analyzing is the LHS of the PHI node. */
1961 tree lhs = PHI_RESULT (stmt);
1962
1963 if (TREE_CODE (var) == ADDR_EXPR)
1964 add_pointed_to_var (ai, lhs, var);
1965 else if (TREE_CODE (var) == SSA_NAME)
1966 merge_pointed_to_info (ai, lhs, var);
1967 else if (is_gimple_min_invariant (var))
1968 add_pointed_to_expr (lhs, var);
1969 else
1970 abort ();
1971 }
1972 else
1973 abort ();
1974
1975 return false;
1976 }
1977
1978
1979 /* Return true if STMT is an "escape" site from the current function. Escape
1980 sites those statements which might expose the address of a variable
1981 outside the current function. STMT is an escape site iff:
1982
1983 1- STMT is a function call, or
1984 2- STMT is an __asm__ expression, or
1985 3- STMT is an assignment to a non-local variable, or
1986 4- STMT is a return statement.
1987
1988 If NUM_CALLS_P is not NULL, the counter is incremented if STMT contains
1989 a function call. */
1990
1991 static bool
1992 is_escape_site (tree stmt, size_t *num_calls_p)
1993 {
1994 if (get_call_expr_in (stmt) != NULL_TREE)
1995 {
1996 if (num_calls_p)
1997 (*num_calls_p)++;
1998
1999 return true;
2000 }
2001 else if (TREE_CODE (stmt) == ASM_EXPR)
2002 return true;
2003 else if (TREE_CODE (stmt) == MODIFY_EXPR)
2004 {
2005 tree lhs = TREE_OPERAND (stmt, 0);
2006
2007 /* Get to the base of _REF nodes. */
2008 if (TREE_CODE (lhs) != SSA_NAME)
2009 lhs = get_base_address (lhs);
2010
2011 /* If we couldn't recognize the LHS of the assignment, assume that it
2012 is a non-local store. */
2013 if (lhs == NULL_TREE)
2014 return true;
2015
2016 /* If the LHS is an SSA name, it can't possibly represent a non-local
2017 memory store. */
2018 if (TREE_CODE (lhs) == SSA_NAME)
2019 return false;
2020
2021 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
2022 local variables we cannot be sure if it will escape, because we
2023 don't have information about objects not in SSA form. Need to
2024 implement something along the lines of
2025
2026 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
2027 Midkiff, ``Escape analysis for java,'' in Proceedings of the
2028 Conference on Object-Oriented Programming Systems, Languages, and
2029 Applications (OOPSLA), pp. 1-19, 1999. */
2030 return true;
2031 }
2032 else if (TREE_CODE (stmt) == RETURN_EXPR)
2033 return true;
2034
2035 return false;
2036 }
2037
2038
2039 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
2040 is considered to represent all the pointers whose pointed-to types are
2041 in the same alias set class. Otherwise, the tag represents a single
2042 SSA_NAME pointer variable. */
2043
2044 static tree
2045 create_memory_tag (tree type, bool is_type_tag)
2046 {
2047 var_ann_t ann;
2048 tree tag = create_tmp_var_raw (type, (is_type_tag) ? "TMT" : "NMT");
2049
2050 /* By default, memory tags are local variables. Alias analysis will
2051 determine whether they should be considered globals. */
2052 DECL_CONTEXT (tag) = current_function_decl;
2053
2054 /* If the pointed-to type is volatile, so is the tag. */
2055 TREE_THIS_VOLATILE (tag) = TREE_THIS_VOLATILE (type);
2056
2057 /* Memory tags are by definition addressable. This also prevents
2058 is_gimple_ref frome confusing memory tags with optimizable
2059 variables. */
2060 TREE_ADDRESSABLE (tag) = 1;
2061
2062 ann = get_var_ann (tag);
2063 ann->mem_tag_kind = (is_type_tag) ? TYPE_TAG : NAME_TAG;
2064 ann->type_mem_tag = NULL_TREE;
2065
2066 /* Add the tag to the symbol table. */
2067 add_referenced_tmp_var (tag);
2068
2069 return tag;
2070 }
2071
2072
2073 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
2074 This is used if P_i has been found to point to a specific set of
2075 variables or to a non-aliased memory location like the address returned
2076 by malloc functions. */
2077
2078 static tree
2079 get_nmt_for (tree ptr)
2080 {
2081 struct ptr_info_def *pi = get_ptr_info (ptr);
2082 tree tag = pi->name_mem_tag;
2083
2084 if (tag == NULL_TREE)
2085 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
2086
2087 /* If PTR is a PARM_DECL, it points to a global variable or malloc,
2088 then its name tag should be considered a global variable. */
2089 if (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
2090 || pi->pt_malloc
2091 || pi->pt_global_mem)
2092 mark_call_clobbered (tag);
2093
2094 return tag;
2095 }
2096
2097
2098 /* Return the type memory tag associated to pointer PTR. A memory tag is an
2099 artificial variable that represents the memory location pointed-to by
2100 PTR. It is used to model the effects of pointer de-references on
2101 addressable variables.
2102
2103 AI points to the data gathered during alias analysis. This function
2104 populates the array AI->POINTERS. */
2105
2106 static tree
2107 get_tmt_for (tree ptr, struct alias_info *ai)
2108 {
2109 size_t i;
2110 tree tag;
2111 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2112 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2113
2114 /* To avoid creating unnecessary memory tags, only create one memory tag
2115 per alias set class. Note that it may be tempting to group
2116 memory tags based on conflicting alias sets instead of
2117 equivalence. That would be wrong because alias sets are not
2118 necessarily transitive (as demonstrated by the libstdc++ test
2119 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
2120 such that conflicts (A, B) == true and conflicts (A, C) == true,
2121 it does not necessarily follow that conflicts (B, C) == true. */
2122 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
2123 {
2124 struct alias_map_d *curr = ai->pointers[i];
2125 if (tag_set == curr->set
2126 && (flag_tree_points_to == PTA_NONE
2127 || same_points_to_set (curr->var, ptr)))
2128 {
2129 tag = var_ann (curr->var)->type_mem_tag;
2130 break;
2131 }
2132 }
2133
2134 /* If VAR cannot alias with any of the existing memory tags, create a new
2135 tag for PTR and add it to the POINTERS array. */
2136 if (tag == NULL_TREE)
2137 {
2138 struct alias_map_d *alias_map;
2139
2140 /* If PTR did not have a type tag already, create a new TMT.*
2141 artificial variable representing the memory location
2142 pointed-to by PTR. */
2143 if (var_ann (ptr)->type_mem_tag == NULL_TREE)
2144 tag = create_memory_tag (tag_type, true);
2145 else
2146 tag = var_ann (ptr)->type_mem_tag;
2147
2148 /* Add PTR to the POINTERS array. Note that we are not interested in
2149 PTR's alias set. Instead, we cache the alias set for the memory that
2150 PTR points to. */
2151 alias_map = xcalloc (1, sizeof (*alias_map));
2152 alias_map->var = ptr;
2153 alias_map->set = tag_set;
2154 ai->pointers[ai->num_pointers++] = alias_map;
2155 }
2156
2157 #if defined ENABLE_CHECKING
2158 /* Make sure that the type tag has the same alias set as the
2159 pointed-to type. */
2160 if (tag_set != get_alias_set (tag))
2161 abort ();
2162 #endif
2163
2164
2165 return tag;
2166 }
2167
2168
2169 /* Create GLOBAL_VAR, an artificial global variable to act as a
2170 representative of all the variables that may be clobbered by function
2171 calls. */
2172
2173 static void
2174 create_global_var (void)
2175 {
2176 global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
2177 size_type_node);
2178 DECL_ARTIFICIAL (global_var) = 1;
2179 TREE_READONLY (global_var) = 0;
2180 DECL_EXTERNAL (global_var) = 1;
2181 TREE_STATIC (global_var) = 1;
2182 TREE_USED (global_var) = 1;
2183 DECL_CONTEXT (global_var) = NULL_TREE;
2184 TREE_THIS_VOLATILE (global_var) = 0;
2185 TREE_ADDRESSABLE (global_var) = 0;
2186
2187 add_referenced_tmp_var (global_var);
2188 bitmap_set_bit (vars_to_rename, var_ann (global_var)->uid);
2189 }
2190
2191
2192 /* Dump alias statistics on FILE. */
2193
2194 static void
2195 dump_alias_stats (FILE *file)
2196 {
2197 const char *funcname
2198 = lang_hooks.decl_printable_name (current_function_decl, 2);
2199 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
2200 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
2201 fprintf (file, "Total alias mayalias results:\t%u\n",
2202 alias_stats.alias_mayalias);
2203 fprintf (file, "Total alias noalias results:\t%u\n",
2204 alias_stats.alias_noalias);
2205 fprintf (file, "Total simple queries:\t%u\n",
2206 alias_stats.simple_queries);
2207 fprintf (file, "Total simple resolved:\t%u\n",
2208 alias_stats.simple_resolved);
2209 fprintf (file, "Total TBAA queries:\t%u\n",
2210 alias_stats.tbaa_queries);
2211 fprintf (file, "Total TBAA resolved:\t%u\n",
2212 alias_stats.tbaa_resolved);
2213 fprintf (file, "Total PTA queries:\t%u\n",
2214 alias_stats.pta_queries);
2215 fprintf (file, "Total PTA resolved:\t%u\n",
2216 alias_stats.pta_resolved);
2217 }
2218
2219
2220 /* Dump alias information on FILE. */
2221
2222 void
2223 dump_alias_info (FILE *file)
2224 {
2225 size_t i;
2226 const char *funcname
2227 = lang_hooks.decl_printable_name (current_function_decl, 2);
2228
2229 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
2230
2231 fprintf (file, "Aliased symbols\n\n");
2232 for (i = 0; i < num_referenced_vars; i++)
2233 {
2234 tree var = referenced_var (i);
2235 if (may_be_aliased (var))
2236 dump_variable (file, var);
2237 }
2238
2239 fprintf (file, "\nDereferenced pointers\n\n");
2240 for (i = 0; i < num_referenced_vars; i++)
2241 {
2242 tree var = referenced_var (i);
2243 var_ann_t ann = var_ann (var);
2244 if (ann->type_mem_tag)
2245 dump_variable (file, var);
2246 }
2247
2248 fprintf (file, "\nType memory tags\n\n");
2249 for (i = 0; i < num_referenced_vars; i++)
2250 {
2251 tree var = referenced_var (i);
2252 var_ann_t ann = var_ann (var);
2253 if (ann->mem_tag_kind == TYPE_TAG)
2254 dump_variable (file, var);
2255 }
2256
2257 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
2258
2259 fprintf (file, "SSA_NAME pointers\n\n");
2260 for (i = 1; i < num_ssa_names; i++)
2261 {
2262 tree ptr = ssa_name (i);
2263 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2264 if (!SSA_NAME_IN_FREE_LIST (ptr)
2265 && pi
2266 && pi->name_mem_tag)
2267 dump_points_to_info_for (file, ptr);
2268 }
2269
2270 fprintf (file, "\nName memory tags\n\n");
2271 for (i = 0; i < num_referenced_vars; i++)
2272 {
2273 tree var = referenced_var (i);
2274 var_ann_t ann = var_ann (var);
2275 if (ann->mem_tag_kind == NAME_TAG)
2276 dump_variable (file, var);
2277 }
2278
2279 fprintf (file, "\n");
2280 }
2281
2282
2283 /* Dump alias information on stderr. */
2284
2285 void
2286 debug_alias_info (void)
2287 {
2288 dump_alias_info (stderr);
2289 }
2290
2291
2292 /* Return the alias information associated with pointer T. It creates a
2293 new instance if none existed. */
2294
2295 static struct ptr_info_def *
2296 get_ptr_info (tree t)
2297 {
2298 struct ptr_info_def *pi;
2299
2300 #if defined ENABLE_CHECKING
2301 if (!POINTER_TYPE_P (TREE_TYPE (t)))
2302 abort ();
2303 #endif
2304
2305 pi = SSA_NAME_PTR_INFO (t);
2306 if (pi == NULL)
2307 {
2308 pi = ggc_alloc (sizeof (*pi));
2309 memset ((void *)pi, 0, sizeof (*pi));
2310 SSA_NAME_PTR_INFO (t) = pi;
2311 }
2312
2313 return pi;
2314 }
2315
2316
2317 /* Dump points-to information for SSA_NAME PTR into FILE. */
2318
2319 void
2320 dump_points_to_info_for (FILE *file, tree ptr)
2321 {
2322 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2323
2324 print_generic_expr (file, ptr, dump_flags);
2325
2326 if (pi)
2327 {
2328 if (pi->name_mem_tag)
2329 {
2330 fprintf (file, ", name memory tag: ");
2331 print_generic_expr (file, pi->name_mem_tag, dump_flags);
2332 }
2333
2334 if (pi->is_dereferenced)
2335 fprintf (file, ", is dereferenced");
2336
2337 if (pi->value_escapes_p)
2338 fprintf (file, ", its value escapes");
2339
2340 if (pi->pt_anything)
2341 fprintf (file, ", points-to anything");
2342
2343 if (pi->pt_malloc)
2344 fprintf (file, ", points-to malloc");
2345
2346 if (pi->pt_vars)
2347 {
2348 unsigned ix;
2349
2350 fprintf (file, ", points-to vars: { ");
2351 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix,
2352 {
2353 print_generic_expr (file, referenced_var (ix), dump_flags);
2354 fprintf (file, " ");
2355 });
2356 fprintf (file, "}");
2357 }
2358 }
2359
2360 fprintf (file, "\n");
2361 }
2362
2363
2364 /* Dump points-to information for VAR into stderr. */
2365
2366 void
2367 debug_points_to_info_for (tree var)
2368 {
2369 dump_points_to_info_for (stderr, var);
2370 }
2371
2372
2373 /* Dump points-to information into FILE. NOTE: This function is slow, as
2374 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
2375
2376 void
2377 dump_points_to_info (FILE *file)
2378 {
2379 basic_block bb;
2380 block_stmt_iterator si;
2381 size_t i;
2382 ssa_op_iter iter;
2383 const char *fname =
2384 lang_hooks.decl_printable_name (current_function_decl, 2);
2385
2386 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
2387
2388 /* First dump points-to information for the default definitions of
2389 pointer variables. This is necessary because default definitions are
2390 not part of the code. */
2391 for (i = 0; i < num_referenced_vars; i++)
2392 {
2393 tree var = referenced_var (i);
2394 if (POINTER_TYPE_P (TREE_TYPE (var)))
2395 {
2396 var_ann_t ann = var_ann (var);
2397 if (ann->default_def)
2398 dump_points_to_info_for (file, ann->default_def);
2399 }
2400 }
2401
2402 /* Dump points-to information for every pointer defined in the program. */
2403 FOR_EACH_BB (bb)
2404 {
2405 tree phi;
2406
2407 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2408 {
2409 tree ptr = PHI_RESULT (phi);
2410 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2411 dump_points_to_info_for (file, ptr);
2412 }
2413
2414 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
2415 {
2416 tree stmt = bsi_stmt (si);
2417 tree def;
2418 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
2419 if (POINTER_TYPE_P (TREE_TYPE (def)))
2420 dump_points_to_info_for (file, def);
2421 }
2422 }
2423
2424 fprintf (file, "\n");
2425 }
2426
2427
2428 /* Dump points-to info pointed by PTO into STDERR. */
2429
2430 void
2431 debug_points_to_info (void)
2432 {
2433 dump_points_to_info (stderr);
2434 }
2435
2436 /* Dump to FILE the list of variables that may be aliasing VAR. */
2437
2438 void
2439 dump_may_aliases_for (FILE *file, tree var)
2440 {
2441 varray_type aliases;
2442
2443 if (TREE_CODE (var) == SSA_NAME)
2444 var = SSA_NAME_VAR (var);
2445
2446 aliases = var_ann (var)->may_aliases;
2447 if (aliases)
2448 {
2449 size_t i;
2450 fprintf (file, "{ ");
2451 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
2452 {
2453 print_generic_expr (file, VARRAY_TREE (aliases, i), dump_flags);
2454 fprintf (file, " ");
2455 }
2456 fprintf (file, "}");
2457 }
2458 }
2459
2460
2461 /* Dump to stderr the list of variables that may be aliasing VAR. */
2462
2463 void
2464 debug_may_aliases_for (tree var)
2465 {
2466 dump_may_aliases_for (stderr, var);
2467 }
2468
2469 /* Return true if VAR may be aliased. */
2470
2471 bool
2472 may_be_aliased (tree var)
2473 {
2474 /* Obviously. */
2475 if (TREE_ADDRESSABLE (var))
2476 return true;
2477
2478 /* Globally visible variables can have their addresses taken by other
2479 translation units. */
2480 if (DECL_EXTERNAL (var) || TREE_PUBLIC (var))
2481 return true;
2482
2483 /* Automatic variables can't have their addresses escape any other way.
2484 This must be after the check for global variables, as extern declarations
2485 do not have TREE_STATIC set. */
2486 if (!TREE_STATIC (var))
2487 return false;
2488
2489 /* If we're in unit-at-a-time mode, then we must have seen all occurrences
2490 of address-of operators, and so we can trust TREE_ADDRESSABLE. Otherwise
2491 we can only be sure the variable isn't addressable if it's local to the
2492 current function. */
2493 if (flag_unit_at_a_time)
2494 return false;
2495 if (decl_function_context (var) == current_function_decl)
2496 return false;
2497
2498 return true;
2499 }
2500