re PR tree-optimization/26804 (Alias Time explosion)
[gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "timevar.h"
32 #include "expr.h"
33 #include "ggc.h"
34 #include "langhooks.h"
35 #include "flags.h"
36 #include "function.h"
37 #include "diagnostic.h"
38 #include "tree-dump.h"
39 #include "tree-gimple.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-structalias.h"
44 #include "convert.h"
45 #include "params.h"
46 #include "ipa-type-escape.h"
47 #include "vec.h"
48 #include "bitmap.h"
49
50 /* Obstack used to hold grouping bitmaps and other temporary bitmaps used by
51 aliasing */
52 static bitmap_obstack alias_obstack;
53
54 /* 'true' after aliases have been computed (see compute_may_aliases). */
55 bool aliases_computed_p;
56
57 /* Structure to map a variable to its alias set and keep track of the
58 virtual operands that will be needed to represent it. */
59 struct alias_map_d
60 {
61 /* Variable and its alias set. */
62 tree var;
63 HOST_WIDE_INT set;
64
65 /* Total number of virtual operands that will be needed to represent
66 all the aliases of VAR. */
67 long total_alias_vops;
68
69 /* Nonzero if the aliases for this memory tag have been grouped
70 already. Used in group_aliases. */
71 unsigned int grouped_p : 1;
72
73 /* Set of variables aliased with VAR. This is the exact same
74 information contained in VAR_ANN (VAR)->MAY_ALIASES, but in
75 bitmap form to speed up alias grouping. */
76 bitmap may_aliases;
77 };
78
79
80 /* Counters used to display statistics on alias analysis. */
81 struct alias_stats_d
82 {
83 unsigned int alias_queries;
84 unsigned int alias_mayalias;
85 unsigned int alias_noalias;
86 unsigned int simple_queries;
87 unsigned int simple_resolved;
88 unsigned int tbaa_queries;
89 unsigned int tbaa_resolved;
90 unsigned int structnoaddress_queries;
91 unsigned int structnoaddress_resolved;
92 };
93
94
95 /* Local variables. */
96 static struct alias_stats_d alias_stats;
97
98 /* Local functions. */
99 static void compute_flow_insensitive_aliasing (struct alias_info *);
100 static void dump_alias_stats (FILE *);
101 static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool);
102 static tree create_memory_tag (tree type, bool is_type_tag);
103 static tree get_tmt_for (tree, struct alias_info *);
104 static tree get_nmt_for (tree);
105 static void add_may_alias (tree, tree);
106 static void replace_may_alias (tree, size_t, tree);
107 static struct alias_info *init_alias_info (void);
108 static void delete_alias_info (struct alias_info *);
109 static void compute_flow_sensitive_aliasing (struct alias_info *);
110 static void setup_pointers_and_addressables (struct alias_info *);
111 static void create_global_var (void);
112 static void maybe_create_global_var (struct alias_info *ai);
113 static void group_aliases (struct alias_info *);
114 static void set_pt_anything (tree ptr);
115
116 /* Global declarations. */
117
118 /* Call clobbered variables in the function. If bit I is set, then
119 REFERENCED_VARS (I) is call-clobbered. */
120 bitmap call_clobbered_vars;
121
122 /* Addressable variables in the function. If bit I is set, then
123 REFERENCED_VARS (I) has had its address taken. Note that
124 CALL_CLOBBERED_VARS and ADDRESSABLE_VARS are not related. An
125 addressable variable is not necessarily call-clobbered (e.g., a
126 local addressable whose address does not escape) and not all
127 call-clobbered variables are addressable (e.g., a local static
128 variable). */
129 bitmap addressable_vars;
130
131 /* When the program has too many call-clobbered variables and call-sites,
132 this variable is used to represent the clobbering effects of function
133 calls. In these cases, all the call clobbered variables in the program
134 are forced to alias this variable. This reduces compile times by not
135 having to keep track of too many V_MAY_DEF expressions at call sites. */
136 tree global_var;
137
138 DEF_VEC_I(int);
139 DEF_VEC_ALLOC_I(int,heap);
140
141 /* qsort comparison function to sort type/name tags by DECL_UID. */
142
143 static int
144 sort_tags_by_id (const void *pa, const void *pb)
145 {
146 tree a = *(tree *)pa;
147 tree b = *(tree *)pb;
148
149 return DECL_UID (a) - DECL_UID (b);
150 }
151
152 /* Initialize WORKLIST to contain those memory tags that are marked call
153 clobbered. Initialized WORKLIST2 to contain the reasons these
154 memory tags escaped. */
155
156 static void
157 init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
158 VEC (int, heap) **worklist2)
159 {
160 referenced_var_iterator rvi;
161 tree curr;
162
163 FOR_EACH_REFERENCED_VAR (curr, rvi)
164 {
165 if (MTAG_P (curr) && is_call_clobbered (curr))
166 {
167 VEC_safe_push (tree, heap, *worklist, curr);
168 VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask);
169 }
170 }
171 }
172
173 /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
174 ALIAS is not already marked call clobbered, and is a memory
175 tag. */
176
177 static void
178 add_to_worklist (tree alias, VEC (tree, heap) **worklist,
179 VEC (int, heap) **worklist2,
180 int reason)
181 {
182 if (MTAG_P (alias) && !is_call_clobbered (alias))
183 {
184 VEC_safe_push (tree, heap, *worklist, alias);
185 VEC_safe_push (int, heap, *worklist2, reason);
186 }
187 }
188
189 /* Mark aliases of TAG as call clobbered, and place any tags on the
190 alias list that were not already call clobbered on WORKLIST. */
191
192 static void
193 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
194 VEC (int, heap) **worklist2)
195 {
196 unsigned int i;
197 VEC (tree, gc) *ma;
198 tree entry;
199 var_ann_t ta = var_ann (tag);
200
201 if (!MTAG_P (tag))
202 return;
203 ma = may_aliases (tag);
204 if (!ma)
205 return;
206
207 for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
208 {
209 if (!unmodifiable_var_p (entry))
210 {
211 add_to_worklist (entry, worklist, worklist2, ta->escape_mask);
212 mark_call_clobbered (entry, ta->escape_mask);
213 }
214 }
215 }
216
217 /* Tags containing global vars need to be marked as global.
218 Tags containing call clobbered vars need to be marked as call
219 clobbered. */
220
221 static void
222 compute_tag_properties (void)
223 {
224 referenced_var_iterator rvi;
225 tree tag;
226 bool changed = true;
227 VEC (tree, heap) *taglist = NULL;
228
229 FOR_EACH_REFERENCED_VAR (tag, rvi)
230 {
231 if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
232 continue;
233 VEC_safe_push (tree, heap, taglist, tag);
234 }
235
236 /* We sort the taglist by DECL_UID, for two reasons.
237 1. To get a sequential ordering to make the bitmap accesses
238 faster.
239 2. Because of the way we compute aliases, it's more likely that
240 an earlier tag is included in a later tag, and this will reduce
241 the number of iterations.
242
243 If we had a real tag graph, we would just topo-order it and be
244 done with it. */
245 qsort (VEC_address (tree, taglist),
246 VEC_length (tree, taglist),
247 sizeof (tree),
248 sort_tags_by_id);
249
250 /* Go through each tag not marked as global, and if it aliases
251 global vars, mark it global.
252
253 If the tag contains call clobbered vars, mark it call
254 clobbered.
255
256 This loop iterates because tags may appear in the may-aliases
257 list of other tags when we group. */
258
259 while (changed)
260 {
261 unsigned int k;
262
263 changed = false;
264 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
265 {
266 VEC (tree, gc) *ma;
267 unsigned int i;
268 tree entry;
269 bool tagcc = is_call_clobbered (tag);
270 bool tagglobal = MTAG_GLOBAL (tag);
271
272 if (tagcc && tagglobal)
273 continue;
274
275 ma = may_aliases (tag);
276 if (!ma)
277 continue;
278
279 for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
280 {
281 /* Call clobbered entries cause the tag to be marked
282 call clobbered. */
283 if (!tagcc && is_call_clobbered (entry))
284 {
285 mark_call_clobbered (tag, var_ann (entry)->escape_mask);
286 tagcc = true;
287 changed = true;
288 }
289
290 /* Global vars cause the tag to be marked global. */
291 if (!tagglobal && is_global_var (entry))
292 {
293 MTAG_GLOBAL (tag) = true;
294 changed = true;
295 tagglobal = true;
296 }
297
298 /* Early exit once both global and cc are set, since the
299 loop can't do any more than that. */
300 if (tagcc && tagglobal)
301 break;
302 }
303 }
304 }
305 VEC_free (tree, heap, taglist);
306 }
307
308 /* Set up the initial variable clobbers and globalness.
309 When this function completes, only tags whose aliases need to be
310 clobbered will be set clobbered. Tags clobbered because they
311 contain call clobbered vars are handled in compute_tag_properties. */
312
313 static void
314 set_initial_properties (struct alias_info *ai)
315 {
316 unsigned int i;
317 referenced_var_iterator rvi;
318 tree var;
319
320 FOR_EACH_REFERENCED_VAR (var, rvi)
321 {
322 if (is_global_var (var)
323 && (!var_can_have_subvars (var)
324 || get_subvars_for_var (var) == NULL))
325 {
326 if (!unmodifiable_var_p (var))
327 mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
328 }
329 else if (TREE_CODE (var) == PARM_DECL
330 && default_def (var)
331 && POINTER_TYPE_P (TREE_TYPE (var)))
332 {
333 tree def = default_def (var);
334 get_ptr_info (def)->value_escapes_p = 1;
335 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
336 }
337 }
338
339 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
340 {
341 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
342 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
343 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
344
345 if (pi->value_escapes_p)
346 {
347 /* If PTR escapes then its associated memory tags and
348 pointed-to variables are call-clobbered. */
349 if (pi->name_mem_tag)
350 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
351
352 if (v_ann->symbol_mem_tag)
353 mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask);
354
355 if (pi->pt_vars)
356 {
357 bitmap_iterator bi;
358 unsigned int j;
359 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
360 if (!unmodifiable_var_p (referenced_var (j)))
361 mark_call_clobbered (referenced_var (j), pi->escape_mask);
362 }
363 }
364
365 /* If the name tag is call clobbered, so is the symbol tag
366 associated with the base VAR_DECL. */
367 if (pi->name_mem_tag
368 && v_ann->symbol_mem_tag
369 && is_call_clobbered (pi->name_mem_tag))
370 mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask);
371
372 /* Name tags and symbol tags that we don't know where they point
373 to, might point to global memory, and thus, are clobbered.
374
375 FIXME: This is not quite right. They should only be
376 clobbered if value_escapes_p is true, regardless of whether
377 they point to global memory or not.
378 So removing this code and fixing all the bugs would be nice.
379 It is the cause of a bunch of clobbering. */
380 if ((pi->pt_global_mem || pi->pt_anything)
381 && pi->is_dereferenced && pi->name_mem_tag)
382 {
383 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
384 MTAG_GLOBAL (pi->name_mem_tag) = true;
385 }
386
387 if ((pi->pt_global_mem || pi->pt_anything)
388 && pi->is_dereferenced
389 && v_ann->symbol_mem_tag)
390 {
391 mark_call_clobbered (v_ann->symbol_mem_tag, ESCAPE_IS_GLOBAL);
392 MTAG_GLOBAL (v_ann->symbol_mem_tag) = true;
393 }
394 }
395 }
396
397
398 /* This variable is set to true if we are updating the used alone
399 information for SMTs, or are in a pass that is going to break it
400 temporarily. */
401 bool updating_used_alone;
402
403 /* Compute which variables need to be marked call clobbered because
404 their tag is call clobbered, and which tags need to be marked
405 global because they contain global variables. */
406
407 static void
408 compute_call_clobbered (struct alias_info *ai)
409 {
410 VEC (tree, heap) *worklist = NULL;
411 VEC(int,heap) *worklist2 = NULL;
412
413 set_initial_properties (ai);
414 init_transitive_clobber_worklist (&worklist, &worklist2);
415 while (VEC_length (tree, worklist) != 0)
416 {
417 tree curr = VEC_pop (tree, worklist);
418 int reason = VEC_pop (int, worklist2);
419
420 mark_call_clobbered (curr, reason);
421 mark_aliases_call_clobbered (curr, &worklist, &worklist2);
422 }
423 VEC_free (tree, heap, worklist);
424 VEC_free (int, heap, worklist2);
425 compute_tag_properties ();
426 }
427
428
429 /* Recalculate the used_alone information for SMTs . */
430
431 void
432 recalculate_used_alone (void)
433 {
434 VEC (tree, heap) *calls = NULL;
435 block_stmt_iterator bsi;
436 basic_block bb;
437 tree stmt;
438 size_t i;
439 referenced_var_iterator rvi;
440 tree var;
441
442 /* First, reset all the SMT used alone bits to zero. */
443 updating_used_alone = true;
444 FOR_EACH_REFERENCED_VAR (var, rvi)
445 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
446 SMT_USED_ALONE (var) = 0;
447
448 /* Walk all the statements.
449 Calls get put into a list of statements to update, since we will
450 need to update operands on them if we make any changes.
451 If we see a bare use of a SMT anywhere in a real virtual use or virtual
452 def, mark the SMT as used alone, and for renaming. */
453 FOR_EACH_BB (bb)
454 {
455 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
456 {
457 stmt = bsi_stmt (bsi);
458 if (TREE_CODE (stmt) == CALL_EXPR
459 || (TREE_CODE (stmt) == MODIFY_EXPR
460 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
461 VEC_safe_push (tree, heap, calls, stmt);
462 else
463 {
464 ssa_op_iter iter;
465
466 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter,
467 SSA_OP_VUSE | SSA_OP_VIRTUAL_DEFS)
468 {
469 tree svar = var;
470
471 if(TREE_CODE (var) == SSA_NAME)
472 svar = SSA_NAME_VAR (var);
473
474 if (TREE_CODE (svar) == SYMBOL_MEMORY_TAG)
475 {
476 if (!SMT_USED_ALONE (svar))
477 {
478 SMT_USED_ALONE (svar) = true;
479 mark_sym_for_renaming (svar);
480 }
481 }
482 }
483 }
484 }
485 }
486
487 /* Update the operands on all the calls we saw. */
488 if (calls)
489 {
490 for (i = 0; VEC_iterate (tree, calls, i, stmt); i++)
491 update_stmt (stmt);
492 }
493 VEC_free (tree, heap, calls);
494 updating_used_alone = false;
495 }
496
497 /* Compute may-alias information for every variable referenced in function
498 FNDECL.
499
500 Alias analysis proceeds in 3 main phases:
501
502 1- Points-to and escape analysis.
503
504 This phase walks the use-def chains in the SSA web looking for three
505 things:
506
507 * Assignments of the form P_i = &VAR
508 * Assignments of the form P_i = malloc()
509 * Pointers and ADDR_EXPR that escape the current function.
510
511 The concept of 'escaping' is the same one used in the Java world. When
512 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
513 outside of the current function. So, assignment to global variables,
514 function arguments and returning a pointer are all escape sites, as are
515 conversions between pointers and integers.
516
517 This is where we are currently limited. Since not everything is renamed
518 into SSA, we lose track of escape properties when a pointer is stashed
519 inside a field in a structure, for instance. In those cases, we are
520 assuming that the pointer does escape.
521
522 We use escape analysis to determine whether a variable is
523 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
524 is call-clobbered. If a pointer P_i escapes, then all the variables
525 pointed-to by P_i (and its memory tag) also escape.
526
527 2- Compute flow-sensitive aliases
528
529 We have two classes of memory tags. Memory tags associated with the
530 pointed-to data type of the pointers in the program. These tags are
531 called "symbol memory tag" (SMT). The other class are those associated
532 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
533 when adding operands for an INDIRECT_REF *P_i, we will first check
534 whether P_i has a name tag, if it does we use it, because that will have
535 more precise aliasing information. Otherwise, we use the standard symbol
536 tag.
537
538 In this phase, we go through all the pointers we found in points-to
539 analysis and create alias sets for the name memory tags associated with
540 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
541 it points to and its tag.
542
543
544 3- Compute flow-insensitive aliases
545
546 This pass will compare the alias set of every symbol memory tag and
547 every addressable variable found in the program. Given a symbol
548 memory tag SMT and an addressable variable V. If the alias sets of
549 SMT and V conflict (as computed by may_alias_p), then V is marked
550 as an alias tag and added to the alias set of SMT.
551
552 For instance, consider the following function:
553
554 foo (int i)
555 {
556 int *p, a, b;
557
558 if (i > 10)
559 p = &a;
560 else
561 p = &b;
562
563 *p = 3;
564 a = b + 2;
565 return *p;
566 }
567
568 After aliasing analysis has finished, the symbol memory tag for pointer
569 'p' will have two aliases, namely variables 'a' and 'b'. Every time
570 pointer 'p' is dereferenced, we want to mark the operation as a
571 potential reference to 'a' and 'b'.
572
573 foo (int i)
574 {
575 int *p, a, b;
576
577 if (i_2 > 10)
578 p_4 = &a;
579 else
580 p_6 = &b;
581 # p_1 = PHI <p_4(1), p_6(2)>;
582
583 # a_7 = V_MAY_DEF <a_3>;
584 # b_8 = V_MAY_DEF <b_5>;
585 *p_1 = 3;
586
587 # a_9 = V_MAY_DEF <a_7>
588 # VUSE <b_8>
589 a_9 = b_8 + 2;
590
591 # VUSE <a_9>;
592 # VUSE <b_8>;
593 return *p_1;
594 }
595
596 In certain cases, the list of may aliases for a pointer may grow too
597 large. This may cause an explosion in the number of virtual operands
598 inserted in the code. Resulting in increased memory consumption and
599 compilation time.
600
601 When the number of virtual operands needed to represent aliased
602 loads and stores grows too large (configurable with @option{--param
603 max-aliased-vops}), alias sets are grouped to avoid severe
604 compile-time slow downs and memory consumption. See group_aliases. */
605
606 static unsigned int
607 compute_may_aliases (void)
608 {
609 struct alias_info *ai;
610
611 memset (&alias_stats, 0, sizeof (alias_stats));
612
613 /* Initialize aliasing information. */
614 ai = init_alias_info ();
615
616 /* For each pointer P_i, determine the sets of variables that P_i may
617 point-to. For every addressable variable V, determine whether the
618 address of V escapes the current function, making V call-clobbered
619 (i.e., whether &V is stored in a global variable or if its passed as a
620 function call argument). */
621 compute_points_to_sets (ai);
622
623 /* Collect all pointers and addressable variables, compute alias sets,
624 create memory tags for pointers and promote variables whose address is
625 not needed anymore. */
626 setup_pointers_and_addressables (ai);
627
628 /* Compute flow-sensitive, points-to based aliasing for all the name
629 memory tags. Note that this pass needs to be done before flow
630 insensitive analysis because it uses the points-to information
631 gathered before to mark call-clobbered symbol tags. */
632 compute_flow_sensitive_aliasing (ai);
633
634 /* Compute type-based flow-insensitive aliasing for all the type
635 memory tags. */
636 compute_flow_insensitive_aliasing (ai);
637
638 /* Determine if we need to enable alias grouping. */
639 if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
640 group_aliases (ai);
641
642 /* Compute call clobbering information. */
643 compute_call_clobbered (ai);
644
645 /* If the program has too many call-clobbered variables and/or function
646 calls, create .GLOBAL_VAR and use it to model call-clobbering
647 semantics at call sites. This reduces the number of virtual operands
648 considerably, improving compile times at the expense of lost
649 aliasing precision. */
650 maybe_create_global_var (ai);
651
652 /* Debugging dumps. */
653 if (dump_file)
654 {
655 dump_referenced_vars (dump_file);
656 if (dump_flags & TDF_STATS)
657 dump_alias_stats (dump_file);
658 dump_points_to_info (dump_file);
659 dump_alias_info (dump_file);
660 }
661
662 /* Deallocate memory used by aliasing data structures. */
663 delete_alias_info (ai);
664
665 updating_used_alone = true;
666 {
667 block_stmt_iterator bsi;
668 basic_block bb;
669 FOR_EACH_BB (bb)
670 {
671 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
672 {
673 update_stmt_if_modified (bsi_stmt (bsi));
674 }
675 }
676 }
677 recalculate_used_alone ();
678 updating_used_alone = false;
679 return 0;
680 }
681
682
683 struct tree_opt_pass pass_may_alias =
684 {
685 "alias", /* name */
686 NULL, /* gate */
687 compute_may_aliases, /* execute */
688 NULL, /* sub */
689 NULL, /* next */
690 0, /* static_pass_number */
691 TV_TREE_MAY_ALIAS, /* tv_id */
692 PROP_cfg | PROP_ssa, /* properties_required */
693 PROP_alias, /* properties_provided */
694 0, /* properties_destroyed */
695 0, /* todo_flags_start */
696 TODO_dump_func | TODO_update_ssa
697 | TODO_ggc_collect | TODO_verify_ssa
698 | TODO_verify_stmts, /* todo_flags_finish */
699 0 /* letter */
700 };
701
702
703 /* Data structure used to count the number of dereferences to PTR
704 inside an expression. */
705 struct count_ptr_d
706 {
707 tree ptr;
708 unsigned count;
709 };
710
711
712 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
713 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
714
715 static tree
716 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
717 {
718 struct count_ptr_d *count_p = (struct count_ptr_d *) data;
719
720 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
721 pointer 'ptr' is *not* dereferenced, it is simply used to compute
722 the address of 'fld' as 'ptr + offsetof(fld)'. */
723 if (TREE_CODE (*tp) == ADDR_EXPR)
724 {
725 *walk_subtrees = 0;
726 return NULL_TREE;
727 }
728
729 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
730 count_p->count++;
731
732 return NULL_TREE;
733 }
734
735
736 /* Count the number of direct and indirect uses for pointer PTR in
737 statement STMT. The two counts are stored in *NUM_USES_P and
738 *NUM_DEREFS_P respectively. *IS_STORE_P is set to 'true' if at
739 least one of those dereferences is a store operation. */
740
741 void
742 count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
743 unsigned *num_derefs_p, bool *is_store)
744 {
745 ssa_op_iter i;
746 tree use;
747
748 *num_uses_p = 0;
749 *num_derefs_p = 0;
750 *is_store = false;
751
752 /* Find out the total number of uses of PTR in STMT. */
753 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
754 if (use == ptr)
755 (*num_uses_p)++;
756
757 /* Now count the number of indirect references to PTR. This is
758 truly awful, but we don't have much choice. There are no parent
759 pointers inside INDIRECT_REFs, so an expression like
760 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
761 find all the indirect and direct uses of x_1 inside. The only
762 shortcut we can take is the fact that GIMPLE only allows
763 INDIRECT_REFs inside the expressions below. */
764 if (TREE_CODE (stmt) == MODIFY_EXPR
765 || (TREE_CODE (stmt) == RETURN_EXPR
766 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
767 || TREE_CODE (stmt) == ASM_EXPR
768 || TREE_CODE (stmt) == CALL_EXPR)
769 {
770 tree lhs, rhs;
771
772 if (TREE_CODE (stmt) == MODIFY_EXPR)
773 {
774 lhs = TREE_OPERAND (stmt, 0);
775 rhs = TREE_OPERAND (stmt, 1);
776 }
777 else if (TREE_CODE (stmt) == RETURN_EXPR)
778 {
779 tree e = TREE_OPERAND (stmt, 0);
780 lhs = TREE_OPERAND (e, 0);
781 rhs = TREE_OPERAND (e, 1);
782 }
783 else if (TREE_CODE (stmt) == ASM_EXPR)
784 {
785 lhs = ASM_OUTPUTS (stmt);
786 rhs = ASM_INPUTS (stmt);
787 }
788 else
789 {
790 lhs = NULL_TREE;
791 rhs = stmt;
792 }
793
794 if (lhs && (TREE_CODE (lhs) == TREE_LIST || EXPR_P (lhs)))
795 {
796 struct count_ptr_d count;
797 count.ptr = ptr;
798 count.count = 0;
799 walk_tree (&lhs, count_ptr_derefs, &count, NULL);
800 *is_store = true;
801 *num_derefs_p = count.count;
802 }
803
804 if (rhs && (TREE_CODE (rhs) == TREE_LIST || EXPR_P (rhs)))
805 {
806 struct count_ptr_d count;
807 count.ptr = ptr;
808 count.count = 0;
809 walk_tree (&rhs, count_ptr_derefs, &count, NULL);
810 *num_derefs_p += count.count;
811 }
812 }
813
814 gcc_assert (*num_uses_p >= *num_derefs_p);
815 }
816
817 /* Initialize the data structures used for alias analysis. */
818
819 static struct alias_info *
820 init_alias_info (void)
821 {
822 struct alias_info *ai;
823 referenced_var_iterator rvi;
824 tree var;
825
826 bitmap_obstack_initialize (&alias_obstack);
827 ai = XCNEW (struct alias_info);
828 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
829 sbitmap_zero (ai->ssa_names_visited);
830 VARRAY_TREE_INIT (ai->processed_ptrs, 50, "processed_ptrs");
831 ai->written_vars = BITMAP_ALLOC (&alias_obstack);
832 ai->dereferenced_ptrs_store = BITMAP_ALLOC (&alias_obstack);
833 ai->dereferenced_ptrs_load = BITMAP_ALLOC (&alias_obstack);
834
835 /* If aliases have been computed before, clear existing information. */
836 if (aliases_computed_p)
837 {
838 unsigned i;
839
840 /* Similarly, clear the set of addressable variables. In this
841 case, we can just clear the set because addressability is
842 only computed here. */
843 bitmap_clear (addressable_vars);
844
845 /* Clear flow-insensitive alias information from each symbol. */
846 FOR_EACH_REFERENCED_VAR (var, rvi)
847 {
848 var_ann_t ann = var_ann (var);
849
850 ann->is_aliased = 0;
851 ann->may_aliases = NULL;
852 NUM_REFERENCES_CLEAR (ann);
853
854 /* Since we are about to re-discover call-clobbered
855 variables, clear the call-clobbered flag. Variables that
856 are intrinsically call-clobbered (globals, local statics,
857 etc) will not be marked by the aliasing code, so we can't
858 remove them from CALL_CLOBBERED_VARS.
859
860 NB: STRUCT_FIELDS are still call clobbered if they are for
861 a global variable, so we *don't* clear their call clobberedness
862 just because they are tags, though we will clear it if they
863 aren't for global variables. */
864 if (TREE_CODE (var) == NAME_MEMORY_TAG
865 || TREE_CODE (var) == SYMBOL_MEMORY_TAG
866 || !is_global_var (var))
867 clear_call_clobbered (var);
868 }
869
870 /* Clear flow-sensitive points-to information from each SSA name. */
871 for (i = 1; i < num_ssa_names; i++)
872 {
873 tree name = ssa_name (i);
874
875 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
876 continue;
877
878 if (SSA_NAME_PTR_INFO (name))
879 {
880 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
881
882 /* Clear all the flags but keep the name tag to
883 avoid creating new temporaries unnecessarily. If
884 this pointer is found to point to a subset or
885 superset of its former points-to set, then a new
886 tag will need to be created in create_name_tags. */
887 pi->pt_anything = 0;
888 pi->pt_null = 0;
889 pi->value_escapes_p = 0;
890 pi->is_dereferenced = 0;
891 if (pi->pt_vars)
892 bitmap_clear (pi->pt_vars);
893 }
894 }
895 }
896
897 /* Next time, we will need to reset alias information. */
898 aliases_computed_p = true;
899
900 return ai;
901 }
902
903
904 /* Deallocate memory used by alias analysis. */
905
906 static void
907 delete_alias_info (struct alias_info *ai)
908 {
909 size_t i;
910 referenced_var_iterator rvi;
911 tree var;
912
913 sbitmap_free (ai->ssa_names_visited);
914 ai->processed_ptrs = NULL;
915
916 for (i = 0; i < ai->num_addressable_vars; i++)
917 free (ai->addressable_vars[i]);
918
919 FOR_EACH_REFERENCED_VAR(var, rvi)
920 {
921 var_ann_t ann = var_ann (var);
922 NUM_REFERENCES_CLEAR (ann);
923 }
924
925 free (ai->addressable_vars);
926
927 for (i = 0; i < ai->num_pointers; i++)
928 free (ai->pointers[i]);
929 free (ai->pointers);
930
931 BITMAP_FREE (ai->written_vars);
932 BITMAP_FREE (ai->dereferenced_ptrs_store);
933 BITMAP_FREE (ai->dereferenced_ptrs_load);
934 bitmap_obstack_release (&alias_obstack);
935 free (ai);
936
937 delete_points_to_sets ();
938 }
939
940 /* Create name tags for all the pointers that have been dereferenced.
941 We only create a name tag for a pointer P if P is found to point to
942 a set of variables (so that we can alias them to *P) or if it is
943 the result of a call to malloc (which means that P cannot point to
944 anything else nor alias any other variable).
945
946 If two pointers P and Q point to the same set of variables, they
947 are assigned the same name tag. */
948
949 static void
950 create_name_tags (void)
951 {
952 size_t i;
953 VEC (tree, heap) *with_ptvars = NULL;
954 tree ptr;
955
956 /* Collect the list of pointers with a non-empty points to set. */
957 for (i = 1; i < num_ssa_names; i++)
958 {
959 tree ptr = ssa_name (i);
960 struct ptr_info_def *pi;
961
962 if (!ptr
963 || !POINTER_TYPE_P (TREE_TYPE (ptr))
964 || !SSA_NAME_PTR_INFO (ptr))
965 continue;
966
967 pi = SSA_NAME_PTR_INFO (ptr);
968
969 if (pi->pt_anything || !pi->is_dereferenced)
970 {
971 /* No name tags for pointers that have not been
972 dereferenced or point to an arbitrary location. */
973 pi->name_mem_tag = NULL_TREE;
974 continue;
975 }
976
977 /* Set pt_anything on the pointers without pt_vars filled in so
978 that they are assigned a symbol tag. */
979 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
980 VEC_safe_push (tree, heap, with_ptvars, ptr);
981 else
982 set_pt_anything (ptr);
983 }
984
985 /* If we didn't find any pointers with pt_vars set, we're done. */
986 if (!with_ptvars)
987 return;
988
989 /* Now go through the pointers with pt_vars, and find a name tag
990 with the same pt_vars as this pointer, or create one if one
991 doesn't exist. */
992 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
993 {
994 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
995 size_t j;
996 tree ptr2;
997 tree old_name_tag = pi->name_mem_tag;
998
999 /* If PTR points to a set of variables, check if we don't
1000 have another pointer Q with the same points-to set before
1001 creating a tag. If so, use Q's tag instead of creating a
1002 new one.
1003
1004 This is important for not creating unnecessary symbols
1005 and also for copy propagation. If we ever need to
1006 propagate PTR into Q or vice-versa, we would run into
1007 problems if they both had different name tags because
1008 they would have different SSA version numbers (which
1009 would force us to take the name tags in and out of SSA). */
1010 for (j = 0; j < i && VEC_iterate (tree, with_ptvars, j, ptr2); j++)
1011 {
1012 struct ptr_info_def *qi = SSA_NAME_PTR_INFO (ptr2);
1013
1014 if (bitmap_equal_p (pi->pt_vars, qi->pt_vars))
1015 {
1016 pi->name_mem_tag = qi->name_mem_tag;
1017 break;
1018 }
1019 }
1020
1021 /* If we didn't find a pointer with the same points-to set
1022 as PTR, create a new name tag if needed. */
1023 if (pi->name_mem_tag == NULL_TREE)
1024 pi->name_mem_tag = get_nmt_for (ptr);
1025
1026 /* If the new name tag computed for PTR is different than
1027 the old name tag that it used to have, then the old tag
1028 needs to be removed from the IL, so we mark it for
1029 renaming. */
1030 if (old_name_tag && old_name_tag != pi->name_mem_tag)
1031 mark_sym_for_renaming (old_name_tag);
1032
1033 TREE_THIS_VOLATILE (pi->name_mem_tag)
1034 |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
1035
1036 /* Mark the new name tag for renaming. */
1037 mark_sym_for_renaming (pi->name_mem_tag);
1038 }
1039
1040 VEC_free (tree, heap, with_ptvars);
1041 }
1042
1043
1044 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
1045 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
1046 name tag and the variables it points-to are call-clobbered. Finally, if
1047 P_i escapes and we could not determine where it points to, then all the
1048 variables in the same alias set as *P_i are marked call-clobbered. This
1049 is necessary because we must assume that P_i may take the address of any
1050 variable in the same alias set. */
1051
1052 static void
1053 compute_flow_sensitive_aliasing (struct alias_info *ai)
1054 {
1055 size_t i;
1056
1057 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
1058 {
1059 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
1060 if (!find_what_p_points_to (ptr))
1061 set_pt_anything (ptr);
1062 }
1063
1064 create_name_tags ();
1065
1066 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
1067 {
1068 unsigned j;
1069 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
1070 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1071 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
1072 bitmap_iterator bi;
1073
1074
1075 /* Set up aliasing information for PTR's name memory tag (if it has
1076 one). Note that only pointers that have been dereferenced will
1077 have a name memory tag. */
1078 if (pi->name_mem_tag && pi->pt_vars)
1079 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
1080 {
1081 add_may_alias (pi->name_mem_tag, referenced_var (j));
1082 add_may_alias (v_ann->symbol_mem_tag, referenced_var (j));
1083 }
1084 }
1085 }
1086
1087
1088 /* Compute type-based alias sets. Traverse all the pointers and
1089 addressable variables found in setup_pointers_and_addressables.
1090
1091 For every pointer P in AI->POINTERS and addressable variable V in
1092 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
1093 memory tag (SMT) if their alias sets conflict. V is then marked as
1094 an alias tag so that the operand scanner knows that statements
1095 containing V have aliased operands. */
1096
1097 static void
1098 compute_flow_insensitive_aliasing (struct alias_info *ai)
1099 {
1100 size_t i;
1101
1102 /* Initialize counter for the total number of virtual operands that
1103 aliasing will introduce. When AI->TOTAL_ALIAS_VOPS goes beyond the
1104 threshold set by --params max-alias-vops, we enable alias
1105 grouping. */
1106 ai->total_alias_vops = 0;
1107
1108 /* For every pointer P, determine which addressable variables may alias
1109 with P's symbol memory tag. */
1110 for (i = 0; i < ai->num_pointers; i++)
1111 {
1112 size_t j;
1113 struct alias_map_d *p_map = ai->pointers[i];
1114 tree tag = var_ann (p_map->var)->symbol_mem_tag;
1115 var_ann_t tag_ann = var_ann (tag);
1116
1117 p_map->total_alias_vops = 0;
1118 p_map->may_aliases = BITMAP_ALLOC (&alias_obstack);
1119
1120 for (j = 0; j < ai->num_addressable_vars; j++)
1121 {
1122 struct alias_map_d *v_map;
1123 var_ann_t v_ann;
1124 tree var;
1125 bool tag_stored_p, var_stored_p;
1126
1127 v_map = ai->addressable_vars[j];
1128 var = v_map->var;
1129 v_ann = var_ann (var);
1130
1131 /* Skip memory tags and variables that have never been
1132 written to. We also need to check if the variables are
1133 call-clobbered because they may be overwritten by
1134 function calls.
1135
1136 Note this is effectively random accessing elements in
1137 the sparse bitset, which can be highly inefficient.
1138 So we first check the call_clobbered status of the
1139 tag and variable before querying the bitmap. */
1140 tag_stored_p = is_call_clobbered (tag)
1141 || bitmap_bit_p (ai->written_vars, DECL_UID (tag));
1142 var_stored_p = is_call_clobbered (var)
1143 || bitmap_bit_p (ai->written_vars, DECL_UID (var));
1144 if (!tag_stored_p && !var_stored_p)
1145 continue;
1146
1147 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
1148 {
1149 size_t num_tag_refs, num_var_refs;
1150
1151 num_tag_refs = NUM_REFERENCES (tag_ann);
1152 num_var_refs = NUM_REFERENCES (v_ann);
1153
1154 /* Add VAR to TAG's may-aliases set. */
1155
1156 /* We should never have a var with subvars here, because
1157 they shouldn't get into the set of addressable vars */
1158 gcc_assert (!var_can_have_subvars (var)
1159 || get_subvars_for_var (var) == NULL);
1160
1161 add_may_alias (tag, var);
1162 /* Update the bitmap used to represent TAG's alias set
1163 in case we need to group aliases. */
1164 bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
1165
1166 /* Update the total number of virtual operands due to
1167 aliasing. Since we are adding one more alias to TAG's
1168 may-aliases set, the total number of virtual operands due
1169 to aliasing will be increased by the number of references
1170 made to VAR and TAG (every reference to TAG will also
1171 count as a reference to VAR). */
1172 ai->total_alias_vops += (num_var_refs + num_tag_refs);
1173 p_map->total_alias_vops += (num_var_refs + num_tag_refs);
1174
1175
1176 }
1177 }
1178 }
1179
1180 /* Since this analysis is based exclusively on symbols, it fails to
1181 handle cases where two pointers P and Q have different memory
1182 tags with conflicting alias set numbers but no aliased symbols in
1183 common.
1184
1185 For example, suppose that we have two memory tags SMT.1 and SMT.2
1186 such that
1187
1188 may-aliases (SMT.1) = { a }
1189 may-aliases (SMT.2) = { b }
1190
1191 and the alias set number of SMT.1 conflicts with that of SMT.2.
1192 Since they don't have symbols in common, loads and stores from
1193 SMT.1 and SMT.2 will seem independent of each other, which will
1194 lead to the optimizers making invalid transformations (see
1195 testsuite/gcc.c-torture/execute/pr15262-[12].c).
1196
1197 To avoid this problem, we do a final traversal of AI->POINTERS
1198 looking for pairs of pointers that have no aliased symbols in
1199 common and yet have conflicting alias set numbers. */
1200 for (i = 0; i < ai->num_pointers; i++)
1201 {
1202 size_t j;
1203 struct alias_map_d *p_map1 = ai->pointers[i];
1204 tree tag1 = var_ann (p_map1->var)->symbol_mem_tag;
1205 bitmap may_aliases1 = p_map1->may_aliases;
1206
1207 for (j = i + 1; j < ai->num_pointers; j++)
1208 {
1209 struct alias_map_d *p_map2 = ai->pointers[j];
1210 tree tag2 = var_ann (p_map2->var)->symbol_mem_tag;
1211 bitmap may_aliases2 = p_map2->may_aliases;
1212
1213 /* If the pointers may not point to each other, do nothing. */
1214 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
1215 continue;
1216
1217 /* The two pointers may alias each other. If they already have
1218 symbols in common, do nothing. */
1219 if (bitmap_intersect_p (may_aliases1, may_aliases2))
1220 continue;
1221
1222 if (!bitmap_empty_p (may_aliases2))
1223 {
1224 unsigned int k;
1225 bitmap_iterator bi;
1226
1227 /* Add all the aliases for TAG2 into TAG1's alias set.
1228 FIXME, update grouping heuristic counters. */
1229 EXECUTE_IF_SET_IN_BITMAP (may_aliases2, 0, k, bi)
1230 add_may_alias (tag1, referenced_var (k));
1231 bitmap_ior_into (may_aliases1, may_aliases2);
1232 }
1233 else
1234 {
1235 /* Since TAG2 does not have any aliases of its own, add
1236 TAG2 itself to the alias set of TAG1. */
1237 add_may_alias (tag1, tag2);
1238 bitmap_set_bit (may_aliases1, DECL_UID (tag2));
1239 }
1240 }
1241 }
1242
1243 if (dump_file)
1244 fprintf (dump_file, "\n%s: Total number of aliased vops: %ld\n",
1245 get_name (current_function_decl),
1246 ai->total_alias_vops);
1247 }
1248
1249
1250 /* Comparison function for qsort used in group_aliases. */
1251
1252 static int
1253 total_alias_vops_cmp (const void *p, const void *q)
1254 {
1255 const struct alias_map_d **p1 = (const struct alias_map_d **)p;
1256 const struct alias_map_d **p2 = (const struct alias_map_d **)q;
1257 long n1 = (*p1)->total_alias_vops;
1258 long n2 = (*p2)->total_alias_vops;
1259
1260 /* We want to sort in descending order. */
1261 return (n1 > n2 ? -1 : (n1 == n2) ? 0 : 1);
1262 }
1263
1264 /* Group all the aliases for TAG to make TAG represent all the
1265 variables in its alias set. Update the total number
1266 of virtual operands due to aliasing (AI->TOTAL_ALIAS_VOPS). This
1267 function will make TAG be the unique alias tag for all the
1268 variables in its may-aliases. So, given:
1269
1270 may-aliases(TAG) = { V1, V2, V3 }
1271
1272 This function will group the variables into:
1273
1274 may-aliases(V1) = { TAG }
1275 may-aliases(V2) = { TAG }
1276 may-aliases(V2) = { TAG } */
1277
1278 static void
1279 group_aliases_into (tree tag, bitmap tag_aliases, struct alias_info *ai)
1280 {
1281 unsigned int i;
1282 var_ann_t tag_ann = var_ann (tag);
1283 size_t num_tag_refs = NUM_REFERENCES (tag_ann);
1284 bitmap_iterator bi;
1285
1286 EXECUTE_IF_SET_IN_BITMAP (tag_aliases, 0, i, bi)
1287 {
1288 tree var = referenced_var (i);
1289 var_ann_t ann = var_ann (var);
1290
1291 /* Make TAG the unique alias of VAR. */
1292 ann->is_aliased = 0;
1293 ann->may_aliases = NULL;
1294
1295 /* Note that VAR and TAG may be the same if the function has no
1296 addressable variables (see the discussion at the end of
1297 setup_pointers_and_addressables). */
1298 if (var != tag)
1299 add_may_alias (var, tag);
1300
1301 /* Reduce total number of virtual operands contributed
1302 by TAG on behalf of VAR. Notice that the references to VAR
1303 itself won't be removed. We will merely replace them with
1304 references to TAG. */
1305 ai->total_alias_vops -= num_tag_refs;
1306 }
1307
1308 /* We have reduced the number of virtual operands that TAG makes on
1309 behalf of all the variables formerly aliased with it. However,
1310 we have also "removed" all the virtual operands for TAG itself,
1311 so we add them back. */
1312 ai->total_alias_vops += num_tag_refs;
1313
1314 /* TAG no longer has any aliases. */
1315 tag_ann->may_aliases = NULL;
1316 }
1317
1318
1319 /* Group may-aliases sets to reduce the number of virtual operands due
1320 to aliasing.
1321
1322 1- Sort the list of pointers in decreasing number of contributed
1323 virtual operands.
1324
1325 2- Take the first entry in AI->POINTERS and revert the role of
1326 the memory tag and its aliases. Usually, whenever an aliased
1327 variable Vi is found to alias with a memory tag T, we add Vi
1328 to the may-aliases set for T. Meaning that after alias
1329 analysis, we will have:
1330
1331 may-aliases(T) = { V1, V2, V3, ..., Vn }
1332
1333 This means that every statement that references T, will get 'n'
1334 virtual operands for each of the Vi tags. But, when alias
1335 grouping is enabled, we make T an alias tag and add it to the
1336 alias set of all the Vi variables:
1337
1338 may-aliases(V1) = { T }
1339 may-aliases(V2) = { T }
1340 ...
1341 may-aliases(Vn) = { T }
1342
1343 This has two effects: (a) statements referencing T will only get
1344 a single virtual operand, and, (b) all the variables Vi will now
1345 appear to alias each other. So, we lose alias precision to
1346 improve compile time. But, in theory, a program with such a high
1347 level of aliasing should not be very optimizable in the first
1348 place.
1349
1350 3- Since variables may be in the alias set of more than one
1351 memory tag, the grouping done in step (2) needs to be extended
1352 to all the memory tags that have a non-empty intersection with
1353 the may-aliases set of tag T. For instance, if we originally
1354 had these may-aliases sets:
1355
1356 may-aliases(T) = { V1, V2, V3 }
1357 may-aliases(R) = { V2, V4 }
1358
1359 In step (2) we would have reverted the aliases for T as:
1360
1361 may-aliases(V1) = { T }
1362 may-aliases(V2) = { T }
1363 may-aliases(V3) = { T }
1364
1365 But note that now V2 is no longer aliased with R. We could
1366 add R to may-aliases(V2), but we are in the process of
1367 grouping aliases to reduce virtual operands so what we do is
1368 add V4 to the grouping to obtain:
1369
1370 may-aliases(V1) = { T }
1371 may-aliases(V2) = { T }
1372 may-aliases(V3) = { T }
1373 may-aliases(V4) = { T }
1374
1375 4- If the total number of virtual operands due to aliasing is
1376 still above the threshold set by max-alias-vops, go back to (2). */
1377
1378 static void
1379 group_aliases (struct alias_info *ai)
1380 {
1381 size_t i;
1382
1383 /* Sort the POINTERS array in descending order of contributed
1384 virtual operands. */
1385 qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
1386 total_alias_vops_cmp);
1387
1388 /* For every pointer in AI->POINTERS, reverse the roles of its tag
1389 and the tag's may-aliases set. */
1390 for (i = 0; i < ai->num_pointers; i++)
1391 {
1392 size_t j;
1393 tree tag1 = var_ann (ai->pointers[i]->var)->symbol_mem_tag;
1394 bitmap tag1_aliases = ai->pointers[i]->may_aliases;
1395
1396 /* Skip tags that have been grouped already. */
1397 if (ai->pointers[i]->grouped_p)
1398 continue;
1399
1400 /* See if TAG1 had any aliases in common with other symbol tags.
1401 If we find a TAG2 with common aliases with TAG1, add TAG2's
1402 aliases into TAG1. */
1403 for (j = i + 1; j < ai->num_pointers; j++)
1404 {
1405 bitmap tag2_aliases = ai->pointers[j]->may_aliases;
1406
1407 if (bitmap_intersect_p (tag1_aliases, tag2_aliases))
1408 {
1409 tree tag2 = var_ann (ai->pointers[j]->var)->symbol_mem_tag;
1410
1411 bitmap_ior_into (tag1_aliases, tag2_aliases);
1412
1413 /* TAG2 does not need its aliases anymore. */
1414 bitmap_clear (tag2_aliases);
1415 var_ann (tag2)->may_aliases = NULL;
1416
1417 /* TAG1 is the unique alias of TAG2. */
1418 add_may_alias (tag2, tag1);
1419
1420 ai->pointers[j]->grouped_p = true;
1421 }
1422 }
1423
1424 /* Now group all the aliases we collected into TAG1. */
1425 group_aliases_into (tag1, tag1_aliases, ai);
1426
1427 /* If we've reduced total number of virtual operands below the
1428 threshold, stop. */
1429 if (ai->total_alias_vops < MAX_ALIASED_VOPS)
1430 break;
1431 }
1432
1433 /* Finally, all the variables that have been grouped cannot be in
1434 the may-alias set of name memory tags. Suppose that we have
1435 grouped the aliases in this code so that may-aliases(a) = SMT.20
1436
1437 p_5 = &a;
1438 ...
1439 # a_9 = V_MAY_DEF <a_8>
1440 p_5->field = 0
1441 ... Several modifications to SMT.20 ...
1442 # VUSE <a_9>
1443 x_30 = p_5->field
1444
1445 Since p_5 points to 'a', the optimizers will try to propagate 0
1446 into p_5->field, but that is wrong because there have been
1447 modifications to 'SMT.20' in between. To prevent this we have to
1448 replace 'a' with 'SMT.20' in the name tag of p_5. */
1449 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
1450 {
1451 size_t j;
1452 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
1453 tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
1454 VEC(tree,gc) *aliases;
1455 tree alias;
1456
1457 if (name_tag == NULL_TREE)
1458 continue;
1459
1460 aliases = var_ann (name_tag)->may_aliases;
1461 for (j = 0; VEC_iterate (tree, aliases, j, alias); j++)
1462 {
1463 var_ann_t ann = var_ann (alias);
1464
1465 if ((!MTAG_P (alias)
1466 || TREE_CODE (alias) == STRUCT_FIELD_TAG)
1467 && ann->may_aliases)
1468 {
1469 tree new_alias;
1470
1471 gcc_assert (VEC_length (tree, ann->may_aliases) == 1);
1472
1473 new_alias = VEC_index (tree, ann->may_aliases, 0);
1474 replace_may_alias (name_tag, j, new_alias);
1475 }
1476 }
1477 }
1478
1479 if (dump_file)
1480 fprintf (dump_file,
1481 "%s: Total number of aliased vops after grouping: %ld%s\n",
1482 get_name (current_function_decl),
1483 ai->total_alias_vops,
1484 (ai->total_alias_vops < 0) ? " (negative values are OK)" : "");
1485 }
1486
1487
1488 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
1489
1490 static void
1491 create_alias_map_for (tree var, struct alias_info *ai)
1492 {
1493 struct alias_map_d *alias_map;
1494 alias_map = XCNEW (struct alias_map_d);
1495 alias_map->var = var;
1496 alias_map->set = get_alias_set (var);
1497 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
1498 }
1499
1500
1501 /* Create memory tags for all the dereferenced pointers and build the
1502 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
1503 sets. Based on the address escape and points-to information collected
1504 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
1505 variables whose address is not needed anymore. */
1506
1507 static void
1508 setup_pointers_and_addressables (struct alias_info *ai)
1509 {
1510 size_t n_vars, num_addressable_vars, num_pointers;
1511 referenced_var_iterator rvi;
1512 tree var;
1513 VEC (tree, heap) *varvec = NULL;
1514 safe_referenced_var_iterator srvi;
1515
1516 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
1517 num_addressable_vars = num_pointers = 0;
1518
1519 FOR_EACH_REFERENCED_VAR (var, rvi)
1520 {
1521 if (may_be_aliased (var))
1522 num_addressable_vars++;
1523
1524 if (POINTER_TYPE_P (TREE_TYPE (var)))
1525 {
1526 /* Since we don't keep track of volatile variables, assume that
1527 these pointers are used in indirect store operations. */
1528 if (TREE_THIS_VOLATILE (var))
1529 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
1530
1531 num_pointers++;
1532 }
1533 }
1534
1535 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
1536 always going to be slightly bigger than we actually need them
1537 because some TREE_ADDRESSABLE variables will be marked
1538 non-addressable below and only pointers with unique symbol tags are
1539 going to be added to POINTERS. */
1540 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
1541 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
1542 ai->num_addressable_vars = 0;
1543 ai->num_pointers = 0;
1544
1545 /* Since we will be creating symbol memory tags within this loop,
1546 cache the value of NUM_REFERENCED_VARS to avoid processing the
1547 additional tags unnecessarily. */
1548 n_vars = num_referenced_vars;
1549
1550 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
1551 {
1552 var_ann_t v_ann = var_ann (var);
1553 subvar_t svars;
1554
1555 /* Name memory tags already have flow-sensitive aliasing
1556 information, so they need not be processed by
1557 compute_flow_insensitive_aliasing. Similarly, symbol memory
1558 tags are already accounted for when we process their
1559 associated pointer.
1560
1561 Structure fields, on the other hand, have to have some of this
1562 information processed for them, but it's pointless to mark them
1563 non-addressable (since they are fake variables anyway). */
1564 if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
1565 continue;
1566
1567 /* Remove the ADDRESSABLE flag from every addressable variable whose
1568 address is not needed anymore. This is caused by the propagation
1569 of ADDR_EXPR constants into INDIRECT_REF expressions and the
1570 removal of dead pointer assignments done by the early scalar
1571 cleanup passes. */
1572 if (TREE_ADDRESSABLE (var))
1573 {
1574 if (!bitmap_bit_p (addressable_vars, DECL_UID (var))
1575 && TREE_CODE (var) != RESULT_DECL
1576 && !is_global_var (var))
1577 {
1578 bool okay_to_mark = true;
1579
1580 /* Since VAR is now a regular GIMPLE register, we will need
1581 to rename VAR into SSA afterwards. */
1582 mark_sym_for_renaming (var);
1583
1584 /* If VAR can have sub-variables, and any of its
1585 sub-variables has its address taken, then we cannot
1586 remove the addressable flag from VAR. */
1587 if (var_can_have_subvars (var)
1588 && (svars = get_subvars_for_var (var)))
1589 {
1590 subvar_t sv;
1591
1592 for (sv = svars; sv; sv = sv->next)
1593 {
1594 if (bitmap_bit_p (addressable_vars, DECL_UID (sv->var)))
1595 okay_to_mark = false;
1596 mark_sym_for_renaming (sv->var);
1597 }
1598 }
1599
1600 /* The address of VAR is not needed, remove the
1601 addressable bit, so that it can be optimized as a
1602 regular variable. */
1603 if (okay_to_mark)
1604 mark_non_addressable (var);
1605 }
1606 }
1607
1608 /* Global variables and addressable locals may be aliased. Create an
1609 entry in ADDRESSABLE_VARS for VAR. */
1610 if (may_be_aliased (var)
1611 && (!var_can_have_subvars (var)
1612 || get_subvars_for_var (var) == NULL))
1613 {
1614 create_alias_map_for (var, ai);
1615 mark_sym_for_renaming (var);
1616 }
1617
1618 /* Add pointer variables that have been dereferenced to the POINTERS
1619 array and create a symbol memory tag for them. */
1620 if (POINTER_TYPE_P (TREE_TYPE (var)))
1621 {
1622 if ((bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var))
1623 || bitmap_bit_p (ai->dereferenced_ptrs_load, DECL_UID (var))))
1624 {
1625 tree tag;
1626 var_ann_t t_ann;
1627
1628 /* If pointer VAR still doesn't have a memory tag
1629 associated with it, create it now or re-use an
1630 existing one. */
1631 tag = get_tmt_for (var, ai);
1632 t_ann = var_ann (tag);
1633
1634 /* The symbol tag will need to be renamed into SSA
1635 afterwards. Note that we cannot do this inside
1636 get_tmt_for because aliasing may run multiple times
1637 and we only create symbol tags the first time. */
1638 mark_sym_for_renaming (tag);
1639
1640 /* Similarly, if pointer VAR used to have another type
1641 tag, we will need to process it in the renamer to
1642 remove the stale virtual operands. */
1643 if (v_ann->symbol_mem_tag)
1644 mark_sym_for_renaming (v_ann->symbol_mem_tag);
1645
1646 /* Associate the tag with pointer VAR. */
1647 v_ann->symbol_mem_tag = tag;
1648
1649 /* If pointer VAR has been used in a store operation,
1650 then its memory tag must be marked as written-to. */
1651 if (bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var)))
1652 bitmap_set_bit (ai->written_vars, DECL_UID (tag));
1653
1654 /* All the dereferences of pointer VAR count as
1655 references of TAG. Since TAG can be associated with
1656 several pointers, add the dereferences of VAR to the
1657 TAG. */
1658 NUM_REFERENCES_SET (t_ann,
1659 NUM_REFERENCES (t_ann)
1660 + NUM_REFERENCES (v_ann));
1661 }
1662 else
1663 {
1664 /* The pointer has not been dereferenced. If it had a
1665 symbol memory tag, remove it and mark the old tag for
1666 renaming to remove it out of the IL. */
1667 var_ann_t ann = var_ann (var);
1668 tree tag = ann->symbol_mem_tag;
1669 if (tag)
1670 {
1671 mark_sym_for_renaming (tag);
1672 ann->symbol_mem_tag = NULL_TREE;
1673 }
1674 }
1675 }
1676 }
1677 VEC_free (tree, heap, varvec);
1678 }
1679
1680
1681 /* Determine whether to use .GLOBAL_VAR to model call clobbering semantics. At
1682 every call site, we need to emit V_MAY_DEF expressions to represent the
1683 clobbering effects of the call for variables whose address escapes the
1684 current function.
1685
1686 One approach is to group all call-clobbered variables into a single
1687 representative that is used as an alias of every call-clobbered variable
1688 (.GLOBAL_VAR). This works well, but it ties the optimizer hands because
1689 references to any call clobbered variable is a reference to .GLOBAL_VAR.
1690
1691 The second approach is to emit a clobbering V_MAY_DEF for every
1692 call-clobbered variable at call sites. This is the preferred way in terms
1693 of optimization opportunities but it may create too many V_MAY_DEF operands
1694 if there are many call clobbered variables and function calls in the
1695 function.
1696
1697 To decide whether or not to use .GLOBAL_VAR we multiply the number of
1698 function calls found by the number of call-clobbered variables. If that
1699 product is beyond a certain threshold, as determined by the parameterized
1700 values shown below, we use .GLOBAL_VAR.
1701
1702 FIXME. This heuristic should be improved. One idea is to use several
1703 .GLOBAL_VARs of different types instead of a single one. The thresholds
1704 have been derived from a typical bootstrap cycle, including all target
1705 libraries. Compile times were found increase by ~1% compared to using
1706 .GLOBAL_VAR. */
1707
1708 static void
1709 maybe_create_global_var (struct alias_info *ai)
1710 {
1711 unsigned i, n_clobbered;
1712 bitmap_iterator bi;
1713
1714 /* No need to create it, if we have one already. */
1715 if (global_var == NULL_TREE)
1716 {
1717 /* Count all the call-clobbered variables. */
1718 n_clobbered = 0;
1719 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1720 {
1721 n_clobbered++;
1722 }
1723
1724 /* If the number of virtual operands that would be needed to
1725 model all the call-clobbered variables is larger than
1726 GLOBAL_VAR_THRESHOLD, create .GLOBAL_VAR.
1727
1728 Also create .GLOBAL_VAR if there are no call-clobbered
1729 variables and the program contains a mixture of pure/const
1730 and regular function calls. This is to avoid the problem
1731 described in PR 20115:
1732
1733 int X;
1734 int func_pure (void) { return X; }
1735 int func_non_pure (int a) { X += a; }
1736 int foo ()
1737 {
1738 int a = func_pure ();
1739 func_non_pure (a);
1740 a = func_pure ();
1741 return a;
1742 }
1743
1744 Since foo() has no call-clobbered variables, there is
1745 no relationship between the calls to func_pure and
1746 func_non_pure. Since func_pure has no side-effects, value
1747 numbering optimizations elide the second call to func_pure.
1748 So, if we have some pure/const and some regular calls in the
1749 program we create .GLOBAL_VAR to avoid missing these
1750 relations. */
1751 if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD
1752 || (n_clobbered == 0
1753 && ai->num_calls_found > 0
1754 && ai->num_pure_const_calls_found > 0
1755 && ai->num_calls_found > ai->num_pure_const_calls_found))
1756 create_global_var ();
1757 }
1758
1759 /* Mark all call-clobbered symbols for renaming. Since the initial
1760 rewrite into SSA ignored all call sites, we may need to rename
1761 .GLOBAL_VAR and the call-clobbered variables. */
1762 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1763 {
1764 tree var = referenced_var (i);
1765
1766 /* If the function has calls to clobbering functions and
1767 .GLOBAL_VAR has been created, make it an alias for all
1768 call-clobbered variables. */
1769 if (global_var && var != global_var)
1770 {
1771 add_may_alias (var, global_var);
1772 gcc_assert (!get_subvars_for_var (var));
1773 }
1774
1775 mark_sym_for_renaming (var);
1776 }
1777 }
1778
1779
1780 /* Return TRUE if pointer PTR may point to variable VAR.
1781
1782 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
1783 This is needed because when checking for type conflicts we are
1784 interested in the alias set of the memory location pointed-to by
1785 PTR. The alias set of PTR itself is irrelevant.
1786
1787 VAR_ALIAS_SET is the alias set for VAR. */
1788
1789 static bool
1790 may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
1791 tree var, HOST_WIDE_INT var_alias_set,
1792 bool alias_set_only)
1793 {
1794 tree mem;
1795
1796 alias_stats.alias_queries++;
1797 alias_stats.simple_queries++;
1798
1799 /* By convention, a variable cannot alias itself. */
1800 mem = var_ann (ptr)->symbol_mem_tag;
1801 if (mem == var)
1802 {
1803 alias_stats.alias_noalias++;
1804 alias_stats.simple_resolved++;
1805 return false;
1806 }
1807
1808 /* If -fargument-noalias-global is > 2, pointer arguments may
1809 not point to anything else. */
1810 if (flag_argument_noalias > 2 && TREE_CODE (ptr) == PARM_DECL)
1811 {
1812 alias_stats.alias_noalias++;
1813 alias_stats.simple_resolved++;
1814 return false;
1815 }
1816
1817 /* If -fargument-noalias-global is > 1, pointer arguments may
1818 not point to global variables. */
1819 if (flag_argument_noalias > 1 && is_global_var (var)
1820 && TREE_CODE (ptr) == PARM_DECL)
1821 {
1822 alias_stats.alias_noalias++;
1823 alias_stats.simple_resolved++;
1824 return false;
1825 }
1826
1827 /* If either MEM or VAR is a read-only global and the other one
1828 isn't, then PTR cannot point to VAR. */
1829 if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
1830 || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
1831 {
1832 alias_stats.alias_noalias++;
1833 alias_stats.simple_resolved++;
1834 return false;
1835 }
1836
1837 gcc_assert (TREE_CODE (mem) == SYMBOL_MEMORY_TAG);
1838
1839 alias_stats.tbaa_queries++;
1840
1841 /* If the alias sets don't conflict then MEM cannot alias VAR. */
1842 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
1843 {
1844 alias_stats.alias_noalias++;
1845 alias_stats.tbaa_resolved++;
1846 return false;
1847 }
1848
1849 /* If var is a record or union type, ptr cannot point into var
1850 unless there is some operation explicit address operation in the
1851 program that can reference a field of the ptr's dereferenced
1852 type. This also assumes that the types of both var and ptr are
1853 contained within the compilation unit, and that there is no fancy
1854 addressing arithmetic associated with any of the types
1855 involved. */
1856
1857 if ((mem_alias_set != 0) && (var_alias_set != 0))
1858 {
1859 tree ptr_type = TREE_TYPE (ptr);
1860 tree var_type = TREE_TYPE (var);
1861
1862 /* The star count is -1 if the type at the end of the pointer_to
1863 chain is not a record or union type. */
1864 if ((!alias_set_only) &&
1865 ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
1866 {
1867 int ptr_star_count = 0;
1868
1869 /* Ipa_type_escape_star_count_of_interesting_type is a little to
1870 restrictive for the pointer type, need to allow pointers to
1871 primitive types as long as those types cannot be pointers
1872 to everything. */
1873 while (POINTER_TYPE_P (ptr_type))
1874 /* Strip the *'s off. */
1875 {
1876 ptr_type = TREE_TYPE (ptr_type);
1877 ptr_star_count++;
1878 }
1879
1880 /* There does not appear to be a better test to see if the
1881 pointer type was one of the pointer to everything
1882 types. */
1883
1884 if (ptr_star_count > 0)
1885 {
1886 alias_stats.structnoaddress_queries++;
1887 if (ipa_type_escape_field_does_not_clobber_p (var_type,
1888 TREE_TYPE (ptr)))
1889 {
1890 alias_stats.structnoaddress_resolved++;
1891 alias_stats.alias_noalias++;
1892 return false;
1893 }
1894 }
1895 else if (ptr_star_count == 0)
1896 {
1897 /* If ptr_type was not really a pointer to type, it cannot
1898 alias. */
1899 alias_stats.structnoaddress_queries++;
1900 alias_stats.structnoaddress_resolved++;
1901 alias_stats.alias_noalias++;
1902 return false;
1903 }
1904 }
1905 }
1906
1907 alias_stats.alias_mayalias++;
1908 return true;
1909 }
1910
1911
1912 /* Add ALIAS to the set of variables that may alias VAR. */
1913
1914 static void
1915 add_may_alias (tree var, tree alias)
1916 {
1917 size_t i;
1918 var_ann_t v_ann = get_var_ann (var);
1919 var_ann_t a_ann = get_var_ann (alias);
1920 tree al;
1921
1922 /* Don't allow self-referential aliases. */
1923 gcc_assert (var != alias);
1924
1925 /* ALIAS must be addressable if it's being added to an alias set. */
1926 #if 1
1927 TREE_ADDRESSABLE (alias) = 1;
1928 #else
1929 gcc_assert (may_be_aliased (alias));
1930 #endif
1931
1932 if (v_ann->may_aliases == NULL)
1933 v_ann->may_aliases = VEC_alloc (tree, gc, 2);
1934
1935 /* Avoid adding duplicates. */
1936 for (i = 0; VEC_iterate (tree, v_ann->may_aliases, i, al); i++)
1937 if (alias == al)
1938 return;
1939
1940 VEC_safe_push (tree, gc, v_ann->may_aliases, alias);
1941 a_ann->is_aliased = 1;
1942 }
1943
1944
1945 /* Replace alias I in the alias sets of VAR with NEW_ALIAS. */
1946
1947 static void
1948 replace_may_alias (tree var, size_t i, tree new_alias)
1949 {
1950 var_ann_t v_ann = var_ann (var);
1951 VEC_replace (tree, v_ann->may_aliases, i, new_alias);
1952 }
1953
1954
1955 /* Mark pointer PTR as pointing to an arbitrary memory location. */
1956
1957 static void
1958 set_pt_anything (tree ptr)
1959 {
1960 struct ptr_info_def *pi = get_ptr_info (ptr);
1961
1962 pi->pt_anything = 1;
1963 pi->pt_vars = NULL;
1964
1965 /* The pointer used to have a name tag, but we now found it pointing
1966 to an arbitrary location. The name tag needs to be renamed and
1967 disassociated from PTR. */
1968 if (pi->name_mem_tag)
1969 {
1970 mark_sym_for_renaming (pi->name_mem_tag);
1971 pi->name_mem_tag = NULL_TREE;
1972 }
1973 }
1974
1975
1976 /* Return true if STMT is an "escape" site from the current function. Escape
1977 sites those statements which might expose the address of a variable
1978 outside the current function. STMT is an escape site iff:
1979
1980 1- STMT is a function call, or
1981 2- STMT is an __asm__ expression, or
1982 3- STMT is an assignment to a non-local variable, or
1983 4- STMT is a return statement.
1984
1985 AI points to the alias information collected so far.
1986
1987 Return the type of escape site found, if we found one, or NO_ESCAPE
1988 if none. */
1989
1990 enum escape_type
1991 is_escape_site (tree stmt, struct alias_info *ai)
1992 {
1993 tree call = get_call_expr_in (stmt);
1994 if (call != NULL_TREE)
1995 {
1996 ai->num_calls_found++;
1997
1998 if (!TREE_SIDE_EFFECTS (call))
1999 {
2000 ai->num_pure_const_calls_found++;
2001 return ESCAPE_TO_PURE_CONST;
2002 }
2003
2004 return ESCAPE_TO_CALL;
2005 }
2006 else if (TREE_CODE (stmt) == ASM_EXPR)
2007 return ESCAPE_TO_ASM;
2008 else if (TREE_CODE (stmt) == MODIFY_EXPR)
2009 {
2010 tree lhs = TREE_OPERAND (stmt, 0);
2011
2012 /* Get to the base of _REF nodes. */
2013 if (TREE_CODE (lhs) != SSA_NAME)
2014 lhs = get_base_address (lhs);
2015
2016 /* If we couldn't recognize the LHS of the assignment, assume that it
2017 is a non-local store. */
2018 if (lhs == NULL_TREE)
2019 return ESCAPE_UNKNOWN;
2020
2021 /* If the RHS is a conversion between a pointer and an integer, the
2022 pointer escapes since we can't track the integer. */
2023 if ((TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
2024 || TREE_CODE (TREE_OPERAND (stmt, 1)) == CONVERT_EXPR
2025 || TREE_CODE (TREE_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
2026 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND
2027 (TREE_OPERAND (stmt, 1), 0)))
2028 && !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 1))))
2029 return ESCAPE_BAD_CAST;
2030
2031 /* If the LHS is an SSA name, it can't possibly represent a non-local
2032 memory store. */
2033 if (TREE_CODE (lhs) == SSA_NAME)
2034 return NO_ESCAPE;
2035
2036 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
2037 local variables we cannot be sure if it will escape, because we
2038 don't have information about objects not in SSA form. Need to
2039 implement something along the lines of
2040
2041 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
2042 Midkiff, ``Escape analysis for java,'' in Proceedings of the
2043 Conference on Object-Oriented Programming Systems, Languages, and
2044 Applications (OOPSLA), pp. 1-19, 1999. */
2045 return ESCAPE_STORED_IN_GLOBAL;
2046 }
2047 else if (TREE_CODE (stmt) == RETURN_EXPR)
2048 return ESCAPE_TO_RETURN;
2049
2050 return NO_ESCAPE;
2051 }
2052
2053 /* Create a new memory tag of type TYPE.
2054 Does NOT push it into the current binding. */
2055
2056 static tree
2057 create_tag_raw (enum tree_code code, tree type, const char *prefix)
2058 {
2059 tree tmp_var;
2060 tree new_type;
2061
2062 /* Make the type of the variable writable. */
2063 new_type = build_type_variant (type, 0, 0);
2064 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
2065
2066 tmp_var = build_decl (code, create_tmp_var_name (prefix),
2067 type);
2068 /* Make the variable writable. */
2069 TREE_READONLY (tmp_var) = 0;
2070
2071 /* It doesn't start out global. */
2072 MTAG_GLOBAL (tmp_var) = 0;
2073 TREE_STATIC (tmp_var) = 0;
2074 TREE_USED (tmp_var) = 1;
2075
2076 return tmp_var;
2077 }
2078
2079 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
2080 is considered to represent all the pointers whose pointed-to types are
2081 in the same alias set class. Otherwise, the tag represents a single
2082 SSA_NAME pointer variable. */
2083
2084 static tree
2085 create_memory_tag (tree type, bool is_type_tag)
2086 {
2087 var_ann_t ann;
2088 tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG,
2089 type, (is_type_tag) ? "SMT" : "NMT");
2090
2091 /* By default, memory tags are local variables. Alias analysis will
2092 determine whether they should be considered globals. */
2093 DECL_CONTEXT (tag) = current_function_decl;
2094
2095 /* Memory tags are by definition addressable. */
2096 TREE_ADDRESSABLE (tag) = 1;
2097
2098 ann = get_var_ann (tag);
2099 ann->symbol_mem_tag = NULL_TREE;
2100
2101 /* Add the tag to the symbol table. */
2102 add_referenced_tmp_var (tag);
2103
2104 return tag;
2105 }
2106
2107
2108 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
2109 This is used if P_i has been found to point to a specific set of
2110 variables or to a non-aliased memory location like the address returned
2111 by malloc functions. */
2112
2113 static tree
2114 get_nmt_for (tree ptr)
2115 {
2116 struct ptr_info_def *pi = get_ptr_info (ptr);
2117 tree tag = pi->name_mem_tag;
2118
2119 if (tag == NULL_TREE)
2120 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
2121 return tag;
2122 }
2123
2124
2125 /* Return the symbol memory tag associated to pointer PTR. A memory
2126 tag is an artificial variable that represents the memory location
2127 pointed-to by PTR. It is used to model the effects of pointer
2128 de-references on addressable variables.
2129
2130 AI points to the data gathered during alias analysis. This
2131 function populates the array AI->POINTERS. */
2132
2133 static tree
2134 get_tmt_for (tree ptr, struct alias_info *ai)
2135 {
2136 size_t i;
2137 tree tag;
2138 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2139 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2140
2141 /* To avoid creating unnecessary memory tags, only create one memory tag
2142 per alias set class. Note that it may be tempting to group
2143 memory tags based on conflicting alias sets instead of
2144 equivalence. That would be wrong because alias sets are not
2145 necessarily transitive (as demonstrated by the libstdc++ test
2146 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
2147 such that conflicts (A, B) == true and conflicts (A, C) == true,
2148 it does not necessarily follow that conflicts (B, C) == true. */
2149 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
2150 {
2151 struct alias_map_d *curr = ai->pointers[i];
2152 tree curr_tag = var_ann (curr->var)->symbol_mem_tag;
2153 if (tag_set == curr->set)
2154 {
2155 tag = curr_tag;
2156 break;
2157 }
2158 }
2159
2160 /* If VAR cannot alias with any of the existing memory tags, create a new
2161 tag for PTR and add it to the POINTERS array. */
2162 if (tag == NULL_TREE)
2163 {
2164 struct alias_map_d *alias_map;
2165
2166 /* If PTR did not have a symbol tag already, create a new SMT.*
2167 artificial variable representing the memory location
2168 pointed-to by PTR. */
2169 if (var_ann (ptr)->symbol_mem_tag == NULL_TREE)
2170 tag = create_memory_tag (tag_type, true);
2171 else
2172 tag = var_ann (ptr)->symbol_mem_tag;
2173
2174 /* Add PTR to the POINTERS array. Note that we are not interested in
2175 PTR's alias set. Instead, we cache the alias set for the memory that
2176 PTR points to. */
2177 alias_map = XCNEW (struct alias_map_d);
2178 alias_map->var = ptr;
2179 alias_map->set = tag_set;
2180 ai->pointers[ai->num_pointers++] = alias_map;
2181 }
2182
2183 /* If the pointed-to type is volatile, so is the tag. */
2184 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
2185
2186 /* Make sure that the symbol tag has the same alias set as the
2187 pointed-to type. */
2188 gcc_assert (tag_set == get_alias_set (tag));
2189
2190 return tag;
2191 }
2192
2193
2194 /* Create GLOBAL_VAR, an artificial global variable to act as a
2195 representative of all the variables that may be clobbered by function
2196 calls. */
2197
2198 static void
2199 create_global_var (void)
2200 {
2201 global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
2202 void_type_node);
2203 DECL_ARTIFICIAL (global_var) = 1;
2204 TREE_READONLY (global_var) = 0;
2205 DECL_EXTERNAL (global_var) = 1;
2206 TREE_STATIC (global_var) = 1;
2207 TREE_USED (global_var) = 1;
2208 DECL_CONTEXT (global_var) = NULL_TREE;
2209 TREE_THIS_VOLATILE (global_var) = 0;
2210 TREE_ADDRESSABLE (global_var) = 0;
2211
2212 create_var_ann (global_var);
2213 mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
2214 add_referenced_tmp_var (global_var);
2215 mark_sym_for_renaming (global_var);
2216 }
2217
2218
2219 /* Dump alias statistics on FILE. */
2220
2221 static void
2222 dump_alias_stats (FILE *file)
2223 {
2224 const char *funcname
2225 = lang_hooks.decl_printable_name (current_function_decl, 2);
2226 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
2227 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
2228 fprintf (file, "Total alias mayalias results:\t%u\n",
2229 alias_stats.alias_mayalias);
2230 fprintf (file, "Total alias noalias results:\t%u\n",
2231 alias_stats.alias_noalias);
2232 fprintf (file, "Total simple queries:\t%u\n",
2233 alias_stats.simple_queries);
2234 fprintf (file, "Total simple resolved:\t%u\n",
2235 alias_stats.simple_resolved);
2236 fprintf (file, "Total TBAA queries:\t%u\n",
2237 alias_stats.tbaa_queries);
2238 fprintf (file, "Total TBAA resolved:\t%u\n",
2239 alias_stats.tbaa_resolved);
2240 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
2241 alias_stats.structnoaddress_queries);
2242 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
2243 alias_stats.structnoaddress_resolved);
2244 }
2245
2246
2247 /* Dump alias information on FILE. */
2248
2249 void
2250 dump_alias_info (FILE *file)
2251 {
2252 size_t i;
2253 const char *funcname
2254 = lang_hooks.decl_printable_name (current_function_decl, 2);
2255 referenced_var_iterator rvi;
2256 tree var;
2257
2258 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
2259
2260 fprintf (file, "Aliased symbols\n\n");
2261
2262 FOR_EACH_REFERENCED_VAR (var, rvi)
2263 {
2264 if (may_be_aliased (var))
2265 dump_variable (file, var);
2266 }
2267
2268 fprintf (file, "\nDereferenced pointers\n\n");
2269
2270 FOR_EACH_REFERENCED_VAR (var, rvi)
2271 {
2272 var_ann_t ann = var_ann (var);
2273 if (ann->symbol_mem_tag)
2274 dump_variable (file, var);
2275 }
2276
2277 fprintf (file, "\nSymbol memory tags\n\n");
2278
2279 FOR_EACH_REFERENCED_VAR (var, rvi)
2280 {
2281 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
2282 dump_variable (file, var);
2283 }
2284
2285 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
2286
2287 fprintf (file, "SSA_NAME pointers\n\n");
2288 for (i = 1; i < num_ssa_names; i++)
2289 {
2290 tree ptr = ssa_name (i);
2291 struct ptr_info_def *pi;
2292
2293 if (ptr == NULL_TREE)
2294 continue;
2295
2296 pi = SSA_NAME_PTR_INFO (ptr);
2297 if (!SSA_NAME_IN_FREE_LIST (ptr)
2298 && pi
2299 && pi->name_mem_tag)
2300 dump_points_to_info_for (file, ptr);
2301 }
2302
2303 fprintf (file, "\nName memory tags\n\n");
2304
2305 FOR_EACH_REFERENCED_VAR (var, rvi)
2306 {
2307 if (TREE_CODE (var) == NAME_MEMORY_TAG)
2308 dump_variable (file, var);
2309 }
2310
2311 fprintf (file, "\n");
2312 }
2313
2314
2315 /* Dump alias information on stderr. */
2316
2317 void
2318 debug_alias_info (void)
2319 {
2320 dump_alias_info (stderr);
2321 }
2322
2323
2324 /* Return the alias information associated with pointer T. It creates a
2325 new instance if none existed. */
2326
2327 struct ptr_info_def *
2328 get_ptr_info (tree t)
2329 {
2330 struct ptr_info_def *pi;
2331
2332 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
2333
2334 pi = SSA_NAME_PTR_INFO (t);
2335 if (pi == NULL)
2336 {
2337 pi = GGC_NEW (struct ptr_info_def);
2338 memset ((void *)pi, 0, sizeof (*pi));
2339 SSA_NAME_PTR_INFO (t) = pi;
2340 }
2341
2342 return pi;
2343 }
2344
2345
2346 /* Dump points-to information for SSA_NAME PTR into FILE. */
2347
2348 void
2349 dump_points_to_info_for (FILE *file, tree ptr)
2350 {
2351 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2352
2353 print_generic_expr (file, ptr, dump_flags);
2354
2355 if (pi)
2356 {
2357 if (pi->name_mem_tag)
2358 {
2359 fprintf (file, ", name memory tag: ");
2360 print_generic_expr (file, pi->name_mem_tag, dump_flags);
2361 }
2362
2363 if (pi->is_dereferenced)
2364 fprintf (file, ", is dereferenced");
2365
2366 if (pi->value_escapes_p)
2367 fprintf (file, ", its value escapes");
2368
2369 if (pi->pt_anything)
2370 fprintf (file, ", points-to anything");
2371
2372 if (pi->pt_null)
2373 fprintf (file, ", points-to NULL");
2374
2375 if (pi->pt_vars)
2376 {
2377 unsigned ix;
2378 bitmap_iterator bi;
2379
2380 fprintf (file, ", points-to vars: { ");
2381 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix, bi)
2382 {
2383 print_generic_expr (file, referenced_var (ix), dump_flags);
2384 fprintf (file, " ");
2385 }
2386 fprintf (file, "}");
2387 }
2388 }
2389
2390 fprintf (file, "\n");
2391 }
2392
2393
2394 /* Dump points-to information for VAR into stderr. */
2395
2396 void
2397 debug_points_to_info_for (tree var)
2398 {
2399 dump_points_to_info_for (stderr, var);
2400 }
2401
2402
2403 /* Dump points-to information into FILE. NOTE: This function is slow, as
2404 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
2405
2406 void
2407 dump_points_to_info (FILE *file)
2408 {
2409 basic_block bb;
2410 block_stmt_iterator si;
2411 ssa_op_iter iter;
2412 const char *fname =
2413 lang_hooks.decl_printable_name (current_function_decl, 2);
2414 referenced_var_iterator rvi;
2415 tree var;
2416
2417 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
2418
2419 /* First dump points-to information for the default definitions of
2420 pointer variables. This is necessary because default definitions are
2421 not part of the code. */
2422 FOR_EACH_REFERENCED_VAR (var, rvi)
2423 {
2424 if (POINTER_TYPE_P (TREE_TYPE (var)))
2425 {
2426 tree def = default_def (var);
2427 if (def)
2428 dump_points_to_info_for (file, def);
2429 }
2430 }
2431
2432 /* Dump points-to information for every pointer defined in the program. */
2433 FOR_EACH_BB (bb)
2434 {
2435 tree phi;
2436
2437 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2438 {
2439 tree ptr = PHI_RESULT (phi);
2440 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2441 dump_points_to_info_for (file, ptr);
2442 }
2443
2444 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
2445 {
2446 tree stmt = bsi_stmt (si);
2447 tree def;
2448 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
2449 if (POINTER_TYPE_P (TREE_TYPE (def)))
2450 dump_points_to_info_for (file, def);
2451 }
2452 }
2453
2454 fprintf (file, "\n");
2455 }
2456
2457
2458 /* Dump points-to info pointed to by PTO into STDERR. */
2459
2460 void
2461 debug_points_to_info (void)
2462 {
2463 dump_points_to_info (stderr);
2464 }
2465
2466 /* Dump to FILE the list of variables that may be aliasing VAR. */
2467
2468 void
2469 dump_may_aliases_for (FILE *file, tree var)
2470 {
2471 VEC(tree, gc) *aliases;
2472
2473 if (TREE_CODE (var) == SSA_NAME)
2474 var = SSA_NAME_VAR (var);
2475
2476 aliases = var_ann (var)->may_aliases;
2477 if (aliases)
2478 {
2479 size_t i;
2480 tree al;
2481 fprintf (file, "{ ");
2482 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2483 {
2484 print_generic_expr (file, al, dump_flags);
2485 fprintf (file, " ");
2486 }
2487 fprintf (file, "}");
2488 }
2489 }
2490
2491
2492 /* Dump to stderr the list of variables that may be aliasing VAR. */
2493
2494 void
2495 debug_may_aliases_for (tree var)
2496 {
2497 dump_may_aliases_for (stderr, var);
2498 }
2499
2500 /* Return true if VAR may be aliased. */
2501
2502 bool
2503 may_be_aliased (tree var)
2504 {
2505 /* Obviously. */
2506 if (TREE_ADDRESSABLE (var))
2507 return true;
2508
2509 /* Globally visible variables can have their addresses taken by other
2510 translation units. */
2511
2512 if (MTAG_P (var)
2513 && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
2514 return true;
2515 else if (!MTAG_P (var)
2516 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
2517 return true;
2518
2519 /* Automatic variables can't have their addresses escape any other way.
2520 This must be after the check for global variables, as extern declarations
2521 do not have TREE_STATIC set. */
2522 if (!TREE_STATIC (var))
2523 return false;
2524
2525 /* If we're in unit-at-a-time mode, then we must have seen all occurrences
2526 of address-of operators, and so we can trust TREE_ADDRESSABLE. Otherwise
2527 we can only be sure the variable isn't addressable if it's local to the
2528 current function. */
2529 if (flag_unit_at_a_time)
2530 return false;
2531 if (decl_function_context (var) == current_function_decl)
2532 return false;
2533
2534 return true;
2535 }
2536
2537
2538 /* Given two symbols return TRUE if one is in the alias set of the other. */
2539 bool
2540 is_aliased_with (tree tag, tree sym)
2541 {
2542 size_t i;
2543 VEC(tree,gc) *aliases;
2544 tree al;
2545
2546 if (var_ann (sym)->is_aliased)
2547 {
2548 aliases = var_ann (tag)->may_aliases;
2549
2550 if (aliases == NULL)
2551 return false;
2552
2553 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2554 if (al == sym)
2555 return true;
2556 }
2557 else
2558 {
2559 aliases = var_ann (sym)->may_aliases;
2560
2561 if (aliases == NULL)
2562 return false;
2563
2564 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2565 if (al == tag)
2566 return true;
2567 }
2568
2569 return false;
2570 }
2571
2572
2573 /* Add VAR to the list of may-aliases of PTR's symbol tag. If PTR
2574 doesn't already have a symbol tag, create one. */
2575
2576 void
2577 add_type_alias (tree ptr, tree var)
2578 {
2579 VEC(tree, gc) *aliases;
2580 tree tag, al;
2581 var_ann_t ann = var_ann (ptr);
2582 subvar_t svars;
2583 VEC (tree, heap) *varvec = NULL;
2584 unsigned i;
2585
2586 if (ann->symbol_mem_tag == NULL_TREE)
2587 {
2588 tree q = NULL_TREE;
2589 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2590 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2591 safe_referenced_var_iterator rvi;
2592
2593 /* PTR doesn't have a symbol tag, create a new one and add VAR to
2594 the new tag's alias set.
2595
2596 FIXME, This is slower than necessary. We need to determine
2597 whether there is another pointer Q with the same alias set as
2598 PTR. This could be sped up by having symbol tags associated
2599 with types. */
2600 FOR_EACH_REFERENCED_VAR_SAFE (q, varvec, rvi)
2601 {
2602 if (POINTER_TYPE_P (TREE_TYPE (q))
2603 && tag_set == get_alias_set (TREE_TYPE (TREE_TYPE (q))))
2604 {
2605 /* Found another pointer Q with the same alias set as
2606 the PTR's pointed-to type. If Q has a symbol tag, use
2607 it. Otherwise, create a new memory tag for PTR. */
2608 var_ann_t ann1 = var_ann (q);
2609 if (ann1->symbol_mem_tag)
2610 ann->symbol_mem_tag = ann1->symbol_mem_tag;
2611 else
2612 ann->symbol_mem_tag = create_memory_tag (tag_type, true);
2613 goto found_tag;
2614 }
2615 }
2616
2617 /* Couldn't find any other pointer with a symbol tag we could use.
2618 Create a new memory tag for PTR. */
2619 ann->symbol_mem_tag = create_memory_tag (tag_type, true);
2620 }
2621
2622 found_tag:
2623 /* If VAR is not already PTR's symbol tag, add it to the may-alias set
2624 for PTR's symbol tag. */
2625 gcc_assert (!MTAG_P (var));
2626 tag = ann->symbol_mem_tag;
2627
2628 /* If VAR has subvars, add the subvars to the tag instead of the
2629 actual var. */
2630 if (var_can_have_subvars (var)
2631 && (svars = get_subvars_for_var (var)))
2632 {
2633 subvar_t sv;
2634 for (sv = svars; sv; sv = sv->next)
2635 add_may_alias (tag, sv->var);
2636 }
2637 else
2638 add_may_alias (tag, var);
2639
2640 /* TAG and its set of aliases need to be marked for renaming. */
2641 mark_sym_for_renaming (tag);
2642 if ((aliases = var_ann (tag)->may_aliases) != NULL)
2643 {
2644 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2645 mark_sym_for_renaming (al);
2646 }
2647
2648 /* If we had grouped aliases, VAR may have aliases of its own. Mark
2649 them for renaming as well. Other statements referencing the
2650 aliases of VAR will need to be updated. */
2651 if ((aliases = var_ann (var)->may_aliases) != NULL)
2652 {
2653 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2654 mark_sym_for_renaming (al);
2655 }
2656 VEC_free (tree, heap, varvec);
2657 }
2658
2659
2660 /* Create a new symbol tag for PTR. Construct the may-alias list of this type
2661 tag so that it has the aliasing of VAR.
2662
2663 Note, the set of aliases represented by the new symbol tag are not marked
2664 for renaming. */
2665
2666 void
2667 new_type_alias (tree ptr, tree var)
2668 {
2669 var_ann_t p_ann = var_ann (ptr);
2670 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2671 var_ann_t v_ann = var_ann (var);
2672 tree tag;
2673 subvar_t svars;
2674
2675 gcc_assert (p_ann->symbol_mem_tag == NULL_TREE);
2676 gcc_assert (!MTAG_P (var));
2677
2678 /* Add VAR to the may-alias set of PTR's new symbol tag. If VAR has
2679 subvars, add the subvars to the tag instead of the actual var. */
2680 if (var_can_have_subvars (var)
2681 && (svars = get_subvars_for_var (var)))
2682 {
2683 subvar_t sv;
2684
2685 tag = create_memory_tag (tag_type, true);
2686 p_ann->symbol_mem_tag = tag;
2687
2688 for (sv = svars; sv; sv = sv->next)
2689 add_may_alias (tag, sv->var);
2690 }
2691 else
2692 {
2693 /* The following is based on code in add_stmt_operand to ensure that the
2694 same defs/uses/vdefs/vuses will be found after replacing a reference
2695 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
2696 is the address of var. */
2697 VEC(tree, gc) *aliases = v_ann->may_aliases;
2698
2699 if ((aliases != NULL)
2700 && (VEC_length (tree, aliases) == 1))
2701 {
2702 tree ali = VEC_index (tree, aliases, 0);
2703
2704 if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
2705 {
2706 p_ann->symbol_mem_tag = ali;
2707 return;
2708 }
2709 }
2710
2711 tag = create_memory_tag (tag_type, true);
2712 p_ann->symbol_mem_tag = tag;
2713
2714 if (aliases == NULL)
2715 add_may_alias (tag, var);
2716 else
2717 {
2718 unsigned i;
2719 tree al;
2720
2721 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2722 add_may_alias (tag, al);
2723 }
2724 }
2725
2726 TREE_READONLY (tag) = TREE_READONLY (var);
2727 MTAG_GLOBAL (tag) = is_global_var (var);
2728 }
2729
2730
2731
2732 /* This represents the used range of a variable. */
2733
2734 typedef struct used_part
2735 {
2736 HOST_WIDE_INT minused;
2737 HOST_WIDE_INT maxused;
2738 /* True if we have an explicit use/def of some portion of this variable,
2739 even if it is all of it. i.e. a.b = 5 or temp = a.b. */
2740 bool explicit_uses;
2741 /* True if we have an implicit use/def of some portion of this
2742 variable. Implicit uses occur when we can't tell what part we
2743 are referencing, and have to make conservative assumptions. */
2744 bool implicit_uses;
2745 /* True if the structure is only written to or taken its address. */
2746 bool write_only;
2747 } *used_part_t;
2748
2749 /* An array of used_part structures, indexed by variable uid. */
2750
2751 static htab_t used_portions;
2752
2753 struct used_part_map
2754 {
2755 unsigned int uid;
2756 used_part_t to;
2757 };
2758
2759 /* Return true if the uid in the two used part maps are equal. */
2760
2761 static int
2762 used_part_map_eq (const void *va, const void *vb)
2763 {
2764 const struct used_part_map *a = (const struct used_part_map *) va;
2765 const struct used_part_map *b = (const struct used_part_map *) vb;
2766 return (a->uid == b->uid);
2767 }
2768
2769 /* Hash a from uid in a used_part_map. */
2770
2771 static unsigned int
2772 used_part_map_hash (const void *item)
2773 {
2774 return ((const struct used_part_map *)item)->uid;
2775 }
2776
2777 /* Free a used part map element. */
2778
2779 static void
2780 free_used_part_map (void *item)
2781 {
2782 free (((struct used_part_map *)item)->to);
2783 free (item);
2784 }
2785
2786 /* Lookup a used_part structure for a UID. */
2787
2788 static used_part_t
2789 up_lookup (unsigned int uid)
2790 {
2791 struct used_part_map *h, in;
2792 in.uid = uid;
2793 h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid);
2794 if (!h)
2795 return NULL;
2796 return h->to;
2797 }
2798
2799 /* Insert the pair UID, TO into the used part hashtable. */
2800
2801 static void
2802 up_insert (unsigned int uid, used_part_t to)
2803 {
2804 struct used_part_map *h;
2805 void **loc;
2806
2807 h = XNEW (struct used_part_map);
2808 h->uid = uid;
2809 h->to = to;
2810 loc = htab_find_slot_with_hash (used_portions, h,
2811 uid, INSERT);
2812 if (*loc != NULL)
2813 free (*loc);
2814 *(struct used_part_map **) loc = h;
2815 }
2816
2817
2818 /* Given a variable uid, UID, get or create the entry in the used portions
2819 table for the variable. */
2820
2821 static used_part_t
2822 get_or_create_used_part_for (size_t uid)
2823 {
2824 used_part_t up;
2825 if ((up = up_lookup (uid)) == NULL)
2826 {
2827 up = XCNEW (struct used_part);
2828 up->minused = INT_MAX;
2829 up->maxused = 0;
2830 up->explicit_uses = false;
2831 up->implicit_uses = false;
2832 up->write_only = true;
2833 }
2834
2835 return up;
2836 }
2837
2838
2839 /* Create and return a structure sub-variable for field type FIELD at
2840 offset OFFSET, with size SIZE, of variable VAR. */
2841
2842 static tree
2843 create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
2844 unsigned HOST_WIDE_INT size)
2845 {
2846 var_ann_t ann;
2847 tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
2848
2849 /* We need to copy the various flags from VAR to SUBVAR, so that
2850 they are is_global_var iff the original variable was. */
2851 DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
2852 MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var);
2853 TREE_PUBLIC (subvar) = TREE_PUBLIC (var);
2854 TREE_STATIC (subvar) = TREE_STATIC (var);
2855 TREE_READONLY (subvar) = TREE_READONLY (var);
2856
2857 /* Add the new variable to REFERENCED_VARS. */
2858 ann = get_var_ann (subvar);
2859 ann->symbol_mem_tag = NULL;
2860 add_referenced_tmp_var (subvar);
2861 SFT_PARENT_VAR (subvar) = var;
2862 SFT_OFFSET (subvar) = offset;
2863 SFT_SIZE (subvar) = size;
2864 return subvar;
2865 }
2866
2867
2868 /* Given an aggregate VAR, create the subvariables that represent its
2869 fields. */
2870
2871 static void
2872 create_overlap_variables_for (tree var)
2873 {
2874 VEC(fieldoff_s,heap) *fieldstack = NULL;
2875 used_part_t up;
2876 size_t uid = DECL_UID (var);
2877
2878 up = up_lookup (uid);
2879 if (!up
2880 || up->write_only)
2881 return;
2882
2883 push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL);
2884 if (VEC_length (fieldoff_s, fieldstack) != 0)
2885 {
2886 subvar_t *subvars;
2887 fieldoff_s *fo;
2888 bool notokay = false;
2889 int fieldcount = 0;
2890 int i;
2891 HOST_WIDE_INT lastfooffset = -1;
2892 HOST_WIDE_INT lastfosize = -1;
2893 tree lastfotype = NULL_TREE;
2894
2895 /* Not all fields have DECL_SIZE set, and those that don't, we don't
2896 know their size, and thus, can't handle.
2897 The same is true of fields with DECL_SIZE that is not an integer
2898 constant (such as variable sized fields).
2899 Fields with offsets which are not constant will have an offset < 0
2900 We *could* handle fields that are constant sized arrays, but
2901 currently don't. Doing so would require some extra changes to
2902 tree-ssa-operands.c. */
2903
2904 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
2905 {
2906 if (!fo->size
2907 || TREE_CODE (fo->size) != INTEGER_CST
2908 || fo->offset < 0)
2909 {
2910 notokay = true;
2911 break;
2912 }
2913 fieldcount++;
2914 }
2915
2916 /* The current heuristic we use is as follows:
2917 If the variable has no used portions in this function, no
2918 structure vars are created for it.
2919 Otherwise,
2920 If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
2921 we always create structure vars for them.
2922 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2923 some explicit uses, we create structure vars for them.
2924 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2925 no explicit uses, we do not create structure vars for them.
2926 */
2927
2928 if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
2929 && !up->explicit_uses)
2930 {
2931 if (dump_file && (dump_flags & TDF_DETAILS))
2932 {
2933 fprintf (dump_file, "Variable ");
2934 print_generic_expr (dump_file, var, 0);
2935 fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
2936 }
2937 notokay = true;
2938 }
2939
2940 /* Bail out, if we can't create overlap variables. */
2941 if (notokay)
2942 {
2943 VEC_free (fieldoff_s, heap, fieldstack);
2944 return;
2945 }
2946
2947 /* Otherwise, create the variables. */
2948 subvars = lookup_subvars_for_var (var);
2949
2950 sort_fieldstack (fieldstack);
2951
2952 for (i = VEC_length (fieldoff_s, fieldstack);
2953 VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
2954 {
2955 subvar_t sv;
2956 HOST_WIDE_INT fosize;
2957 tree currfotype;
2958
2959 fosize = TREE_INT_CST_LOW (fo->size);
2960 currfotype = fo->type;
2961
2962 /* If this field isn't in the used portion,
2963 or it has the exact same offset and size as the last
2964 field, skip it. */
2965
2966 if (((fo->offset <= up->minused
2967 && fo->offset + fosize <= up->minused)
2968 || fo->offset >= up->maxused)
2969 || (fo->offset == lastfooffset
2970 && fosize == lastfosize
2971 && currfotype == lastfotype))
2972 continue;
2973 sv = GGC_NEW (struct subvar);
2974 sv->next = *subvars;
2975 sv->var = create_sft (var, fo->type, fo->offset, fosize);
2976
2977 if (dump_file)
2978 {
2979 fprintf (dump_file, "structure field tag %s created for var %s",
2980 get_name (sv->var), get_name (var));
2981 fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
2982 SFT_OFFSET (sv->var));
2983 fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
2984 SFT_SIZE (sv->var));
2985 fprintf (dump_file, "\n");
2986 }
2987
2988 lastfotype = currfotype;
2989 lastfooffset = fo->offset;
2990 lastfosize = fosize;
2991 *subvars = sv;
2992 }
2993
2994 /* Once we have created subvars, the original is no longer call
2995 clobbered on its own. Its call clobbered status depends
2996 completely on the call clobbered status of the subvars.
2997
2998 add_referenced_var in the above loop will take care of
2999 marking subvars of global variables as call clobbered for us
3000 to start, since they are global as well. */
3001 clear_call_clobbered (var);
3002 }
3003
3004 VEC_free (fieldoff_s, heap, fieldstack);
3005 }
3006
3007
3008 /* Find the conservative answer to the question of what portions of what
3009 structures are used by this statement. We assume that if we have a
3010 component ref with a known size + offset, that we only need that part
3011 of the structure. For unknown cases, or cases where we do something
3012 to the whole structure, we assume we need to create fields for the
3013 entire structure. */
3014
3015 static tree
3016 find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p)
3017 {
3018 switch (TREE_CODE (*tp))
3019 {
3020 case MODIFY_EXPR:
3021 /* Recurse manually here to track whether the use is in the
3022 LHS of an assignment. */
3023 find_used_portions (&TREE_OPERAND (*tp, 0), walk_subtrees, tp);
3024 return find_used_portions (&TREE_OPERAND (*tp, 1), walk_subtrees, NULL);
3025 case REALPART_EXPR:
3026 case IMAGPART_EXPR:
3027 case COMPONENT_REF:
3028 case ARRAY_REF:
3029 {
3030 HOST_WIDE_INT bitsize;
3031 HOST_WIDE_INT bitmaxsize;
3032 HOST_WIDE_INT bitpos;
3033 tree ref;
3034 ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize);
3035 if (DECL_P (ref)
3036 && var_can_have_subvars (ref)
3037 && bitmaxsize != -1)
3038 {
3039 size_t uid = DECL_UID (ref);
3040 used_part_t up;
3041
3042 up = get_or_create_used_part_for (uid);
3043
3044 if (bitpos <= up->minused)
3045 up->minused = bitpos;
3046 if ((bitpos + bitmaxsize >= up->maxused))
3047 up->maxused = bitpos + bitmaxsize;
3048
3049 if (bitsize == bitmaxsize)
3050 up->explicit_uses = true;
3051 else
3052 up->implicit_uses = true;
3053 if (!lhs_p)
3054 up->write_only = false;
3055 up_insert (uid, up);
3056
3057 *walk_subtrees = 0;
3058 return NULL_TREE;
3059 }
3060 }
3061 break;
3062 /* This is here to make sure we mark the entire base variable as used
3063 when you take its address. Because our used portion analysis is
3064 simple, we aren't looking at casts or pointer arithmetic to see what
3065 happens when you take the address. */
3066 case ADDR_EXPR:
3067 {
3068 tree var = get_base_address (TREE_OPERAND (*tp, 0));
3069
3070 if (var
3071 && DECL_P (var)
3072 && DECL_SIZE (var)
3073 && var_can_have_subvars (var)
3074 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3075 {
3076 used_part_t up;
3077 size_t uid = DECL_UID (var);
3078
3079 up = get_or_create_used_part_for (uid);
3080
3081 up->minused = 0;
3082 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3083 up->implicit_uses = true;
3084 if (!lhs_p)
3085 up->write_only = false;
3086
3087 up_insert (uid, up);
3088 *walk_subtrees = 0;
3089 return NULL_TREE;
3090 }
3091 }
3092 break;
3093 case CALL_EXPR:
3094 {
3095 tree *arg;
3096 for (arg = &TREE_OPERAND (*tp, 1); *arg; arg = &TREE_CHAIN (*arg))
3097 {
3098 if (TREE_CODE (TREE_VALUE (*arg)) != ADDR_EXPR)
3099 find_used_portions (&TREE_VALUE (*arg), walk_subtrees, NULL);
3100 }
3101 *walk_subtrees = 0;
3102 return NULL_TREE;
3103 }
3104 case VAR_DECL:
3105 case PARM_DECL:
3106 case RESULT_DECL:
3107 {
3108 tree var = *tp;
3109 if (DECL_SIZE (var)
3110 && var_can_have_subvars (var)
3111 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3112 {
3113 used_part_t up;
3114 size_t uid = DECL_UID (var);
3115
3116 up = get_or_create_used_part_for (uid);
3117
3118 up->minused = 0;
3119 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3120 up->implicit_uses = true;
3121
3122 up_insert (uid, up);
3123 *walk_subtrees = 0;
3124 return NULL_TREE;
3125 }
3126 }
3127 break;
3128
3129 default:
3130 break;
3131
3132 }
3133 return NULL_TREE;
3134 }
3135
3136 /* Create structure field variables for structures used in this function. */
3137
3138 static unsigned int
3139 create_structure_vars (void)
3140 {
3141 basic_block bb;
3142 safe_referenced_var_iterator rvi;
3143 VEC (tree, heap) *varvec = NULL;
3144 tree var;
3145
3146 used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
3147 free_used_part_map);
3148
3149 FOR_EACH_BB (bb)
3150 {
3151 block_stmt_iterator bsi;
3152 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3153 {
3154 walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
3155 find_used_portions,
3156 NULL);
3157 }
3158 }
3159 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
3160 {
3161 /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
3162 if (var
3163 && DECL_SIZE (var)
3164 && var_can_have_subvars (var)
3165 && !MTAG_P (var)
3166 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3167 create_overlap_variables_for (var);
3168 }
3169 htab_delete (used_portions);
3170 VEC_free (tree, heap, varvec);
3171 return 0;
3172 }
3173
3174 static bool
3175 gate_structure_vars (void)
3176 {
3177 return flag_tree_salias != 0;
3178 }
3179
3180 struct tree_opt_pass pass_create_structure_vars =
3181 {
3182 "salias", /* name */
3183 gate_structure_vars, /* gate */
3184 create_structure_vars, /* execute */
3185 NULL, /* sub */
3186 NULL, /* next */
3187 0, /* static_pass_number */
3188 0, /* tv_id */
3189 PROP_cfg, /* properties_required */
3190 0, /* properties_provided */
3191 0, /* properties_destroyed */
3192 0, /* todo_flags_start */
3193 TODO_dump_func, /* todo_flags_finish */
3194 0 /* letter */
3195 };
3196
3197 /* Reset the DECL_CALL_CLOBBERED flags on our referenced vars. In
3198 theory, this only needs to be done for globals. */
3199
3200 static unsigned int
3201 reset_cc_flags (void)
3202 {
3203 tree var;
3204 referenced_var_iterator rvi;
3205
3206 FOR_EACH_REFERENCED_VAR (var, rvi)
3207 DECL_CALL_CLOBBERED (var) = false;
3208 return 0;
3209 }
3210
3211 struct tree_opt_pass pass_reset_cc_flags =
3212 {
3213 NULL, /* name */
3214 NULL, /* gate */
3215 reset_cc_flags, /* execute */
3216 NULL, /* sub */
3217 NULL, /* next */
3218 0, /* static_pass_number */
3219 0, /* tv_id */
3220 PROP_referenced_vars |PROP_cfg, /* properties_required */
3221 0, /* properties_provided */
3222 0, /* properties_destroyed */
3223 0, /* todo_flags_start */
3224 0, /* todo_flags_finish */
3225 0 /* letter */
3226 };