tree-flow.h: Remove the prototype for is_aliased_with.
[gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "timevar.h"
32 #include "expr.h"
33 #include "ggc.h"
34 #include "langhooks.h"
35 #include "flags.h"
36 #include "function.h"
37 #include "diagnostic.h"
38 #include "tree-dump.h"
39 #include "tree-gimple.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-structalias.h"
44 #include "convert.h"
45 #include "params.h"
46 #include "ipa-type-escape.h"
47 #include "vec.h"
48 #include "bitmap.h"
49 #include "vecprim.h"
50 #include "pointer-set.h"
51
52 /* Structure to map a variable to its alias set. */
53 struct alias_map_d
54 {
55 /* Variable and its alias set. */
56 tree var;
57 HOST_WIDE_INT set;
58 };
59
60
61 /* Counters used to display statistics on alias analysis. */
62 struct alias_stats_d
63 {
64 unsigned int alias_queries;
65 unsigned int alias_mayalias;
66 unsigned int alias_noalias;
67 unsigned int simple_queries;
68 unsigned int simple_resolved;
69 unsigned int tbaa_queries;
70 unsigned int tbaa_resolved;
71 unsigned int structnoaddress_queries;
72 unsigned int structnoaddress_resolved;
73 };
74
75
76 /* Local variables. */
77 static struct alias_stats_d alias_stats;
78 static bitmap_obstack alias_bitmap_obstack;
79
80 /* Local functions. */
81 static void compute_flow_insensitive_aliasing (struct alias_info *);
82 static void finalize_ref_all_pointers (struct alias_info *);
83 static void dump_alias_stats (FILE *);
84 static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool);
85 static tree create_memory_tag (tree type, bool is_type_tag);
86 static tree get_smt_for (tree, struct alias_info *);
87 static tree get_nmt_for (tree);
88 static void add_may_alias (tree, tree);
89 static struct alias_info *init_alias_info (void);
90 static void delete_alias_info (struct alias_info *);
91 static void compute_flow_sensitive_aliasing (struct alias_info *);
92 static void setup_pointers_and_addressables (struct alias_info *);
93 static void create_global_var (void);
94 static void maybe_create_global_var (void);
95 static void set_pt_anything (tree);
96
97 void debug_mp_info (VEC(mem_sym_stats_t,heap) *);
98
99
100
101 /* Return memory reference stats for symbol VAR. Create a new slot in
102 cfun->gimple_df->mem_sym_stats if needed. */
103
104 static struct mem_sym_stats_d *
105 get_mem_sym_stats_for (tree var)
106 {
107 void **slot;
108 struct mem_sym_stats_d *stats;
109 struct pointer_map_t *map = gimple_mem_ref_stats (cfun)->mem_sym_stats;
110
111 gcc_assert (map);
112
113 slot = pointer_map_insert (map, var);
114 if (*slot == NULL)
115 {
116 stats = XCNEW (struct mem_sym_stats_d);
117 stats->var = var;
118 *slot = (void *) stats;
119 }
120 else
121 stats = (struct mem_sym_stats_d *) *slot;
122
123 return stats;
124 }
125
126
127 /* Set MPT to be the memory partition associated with symbol SYM. */
128
129 static inline void
130 set_memory_partition (tree sym, tree mpt)
131 {
132 #if defined ENABLE_CHECKING
133 if (mpt)
134 gcc_assert (TREE_CODE (mpt) == MEMORY_PARTITION_TAG
135 && !is_gimple_reg (sym));
136 #endif
137
138 var_ann (sym)->mpt = mpt;
139 if (mpt)
140 {
141 if (MPT_SYMBOLS (mpt) == NULL)
142 MPT_SYMBOLS (mpt) = BITMAP_ALLOC (&alias_bitmap_obstack);
143
144 bitmap_set_bit (MPT_SYMBOLS (mpt), DECL_UID (sym));
145
146 /* MPT inherits the call-clobbering attributes from SYM. */
147 if (is_call_clobbered (sym))
148 {
149 MTAG_GLOBAL (mpt) = 1;
150 mark_call_clobbered (mpt, ESCAPE_IS_GLOBAL);
151 }
152 }
153 }
154
155
156 /* Mark variable VAR as being non-addressable. */
157
158 static void
159 mark_non_addressable (tree var)
160 {
161 tree mpt;
162
163 if (!TREE_ADDRESSABLE (var))
164 return;
165
166 mpt = memory_partition (var);
167
168 if (!MTAG_P (var))
169 var_ann (var)->call_clobbered = false;
170
171 bitmap_clear_bit (gimple_call_clobbered_vars (cfun), DECL_UID (var));
172 TREE_ADDRESSABLE (var) = 0;
173
174 if (mpt)
175 {
176 /* Note that it's possible for a symbol to have an associated
177 MPT and the MPT have a NULL empty set. During
178 init_alias_info, all MPTs get their sets cleared out, but the
179 symbols still point to the old MPTs that used to hold them.
180 This is done so that compute_memory_partitions can now which
181 symbols are losing or changing partitions and mark them for
182 renaming. */
183 if (MPT_SYMBOLS (mpt))
184 bitmap_clear_bit (MPT_SYMBOLS (mpt), DECL_UID (var));
185 set_memory_partition (var, NULL_TREE);
186 }
187 }
188
189
190 /* qsort comparison function to sort type/name tags by DECL_UID. */
191
192 static int
193 sort_tags_by_id (const void *pa, const void *pb)
194 {
195 tree a = *(tree *)pa;
196 tree b = *(tree *)pb;
197
198 return DECL_UID (a) - DECL_UID (b);
199 }
200
201 /* Initialize WORKLIST to contain those memory tags that are marked call
202 clobbered. Initialized WORKLIST2 to contain the reasons these
203 memory tags escaped. */
204
205 static void
206 init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
207 VEC (int, heap) **worklist2)
208 {
209 referenced_var_iterator rvi;
210 tree curr;
211
212 FOR_EACH_REFERENCED_VAR (curr, rvi)
213 {
214 if (MTAG_P (curr) && is_call_clobbered (curr))
215 {
216 VEC_safe_push (tree, heap, *worklist, curr);
217 VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask);
218 }
219 }
220 }
221
222 /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
223 ALIAS is not already marked call clobbered, and is a memory
224 tag. */
225
226 static void
227 add_to_worklist (tree alias, VEC (tree, heap) **worklist,
228 VEC (int, heap) **worklist2,
229 int reason)
230 {
231 if (MTAG_P (alias) && !is_call_clobbered (alias))
232 {
233 VEC_safe_push (tree, heap, *worklist, alias);
234 VEC_safe_push (int, heap, *worklist2, reason);
235 }
236 }
237
238 /* Mark aliases of TAG as call clobbered, and place any tags on the
239 alias list that were not already call clobbered on WORKLIST. */
240
241 static void
242 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
243 VEC (int, heap) **worklist2)
244 {
245 bitmap aliases;
246 bitmap_iterator bi;
247 unsigned int i;
248 tree entry;
249 var_ann_t ta = var_ann (tag);
250
251 if (!MTAG_P (tag))
252 return;
253 aliases = may_aliases (tag);
254 if (!aliases)
255 return;
256
257 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
258 {
259 entry = referenced_var (i);
260 if (!unmodifiable_var_p (entry))
261 {
262 add_to_worklist (entry, worklist, worklist2, ta->escape_mask);
263 mark_call_clobbered (entry, ta->escape_mask);
264 }
265 }
266 }
267
268 /* Tags containing global vars need to be marked as global.
269 Tags containing call clobbered vars need to be marked as call
270 clobbered. */
271
272 static void
273 compute_tag_properties (void)
274 {
275 referenced_var_iterator rvi;
276 tree tag;
277 bool changed = true;
278 VEC (tree, heap) *taglist = NULL;
279
280 FOR_EACH_REFERENCED_VAR (tag, rvi)
281 {
282 if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
283 continue;
284 VEC_safe_push (tree, heap, taglist, tag);
285 }
286
287 /* We sort the taglist by DECL_UID, for two reasons.
288 1. To get a sequential ordering to make the bitmap accesses
289 faster.
290 2. Because of the way we compute aliases, it's more likely that
291 an earlier tag is included in a later tag, and this will reduce
292 the number of iterations.
293
294 If we had a real tag graph, we would just topo-order it and be
295 done with it. */
296 qsort (VEC_address (tree, taglist),
297 VEC_length (tree, taglist),
298 sizeof (tree),
299 sort_tags_by_id);
300
301 /* Go through each tag not marked as global, and if it aliases
302 global vars, mark it global.
303
304 If the tag contains call clobbered vars, mark it call
305 clobbered.
306
307 This loop iterates because tags may appear in the may-aliases
308 list of other tags when we group. */
309
310 while (changed)
311 {
312 unsigned int k;
313
314 changed = false;
315 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
316 {
317 bitmap ma;
318 bitmap_iterator bi;
319 unsigned int i;
320 tree entry;
321 bool tagcc = is_call_clobbered (tag);
322 bool tagglobal = MTAG_GLOBAL (tag);
323
324 if (tagcc && tagglobal)
325 continue;
326
327 ma = may_aliases (tag);
328 if (!ma)
329 continue;
330
331 EXECUTE_IF_SET_IN_BITMAP (ma, 0, i, bi)
332 {
333 entry = referenced_var (i);
334 /* Call clobbered entries cause the tag to be marked
335 call clobbered. */
336 if (!tagcc && is_call_clobbered (entry))
337 {
338 mark_call_clobbered (tag, var_ann (entry)->escape_mask);
339 tagcc = true;
340 changed = true;
341 }
342
343 /* Global vars cause the tag to be marked global. */
344 if (!tagglobal && is_global_var (entry))
345 {
346 MTAG_GLOBAL (tag) = true;
347 changed = true;
348 tagglobal = true;
349 }
350
351 /* Early exit once both global and cc are set, since the
352 loop can't do any more than that. */
353 if (tagcc && tagglobal)
354 break;
355 }
356 }
357 }
358 VEC_free (tree, heap, taglist);
359 }
360
361 /* Set up the initial variable clobbers and globalness.
362 When this function completes, only tags whose aliases need to be
363 clobbered will be set clobbered. Tags clobbered because they
364 contain call clobbered vars are handled in compute_tag_properties. */
365
366 static void
367 set_initial_properties (struct alias_info *ai)
368 {
369 unsigned int i;
370 referenced_var_iterator rvi;
371 tree var;
372 tree ptr;
373
374 FOR_EACH_REFERENCED_VAR (var, rvi)
375 {
376 if (is_global_var (var)
377 && (!var_can_have_subvars (var)
378 || get_subvars_for_var (var) == NULL))
379 {
380 if (!unmodifiable_var_p (var))
381 mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
382 }
383 else if (TREE_CODE (var) == PARM_DECL
384 && gimple_default_def (cfun, var)
385 && POINTER_TYPE_P (TREE_TYPE (var)))
386 {
387 tree def = gimple_default_def (cfun, var);
388 get_ptr_info (def)->value_escapes_p = 1;
389 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
390 }
391 }
392
393 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
394 {
395 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
396 tree tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
397
398 if (pi->value_escapes_p)
399 {
400 /* If PTR escapes then its associated memory tags and
401 pointed-to variables are call-clobbered. */
402 if (pi->name_mem_tag)
403 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
404
405 if (tag)
406 mark_call_clobbered (tag, pi->escape_mask);
407
408 if (pi->pt_vars)
409 {
410 bitmap_iterator bi;
411 unsigned int j;
412 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
413 if (!unmodifiable_var_p (referenced_var (j)))
414 mark_call_clobbered (referenced_var (j), pi->escape_mask);
415 }
416 }
417
418 /* If the name tag is call clobbered, so is the symbol tag
419 associated with the base VAR_DECL. */
420 if (pi->name_mem_tag
421 && tag
422 && is_call_clobbered (pi->name_mem_tag))
423 mark_call_clobbered (tag, pi->escape_mask);
424
425 /* Name tags and symbol tags that we don't know where they point
426 to, might point to global memory, and thus, are clobbered.
427
428 FIXME: This is not quite right. They should only be
429 clobbered if value_escapes_p is true, regardless of whether
430 they point to global memory or not.
431 So removing this code and fixing all the bugs would be nice.
432 It is the cause of a bunch of clobbering. */
433 if ((pi->pt_global_mem || pi->pt_anything)
434 && pi->is_dereferenced && pi->name_mem_tag)
435 {
436 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
437 MTAG_GLOBAL (pi->name_mem_tag) = true;
438 }
439
440 if ((pi->pt_global_mem || pi->pt_anything)
441 && pi->is_dereferenced
442 && tag)
443 {
444 mark_call_clobbered (tag, ESCAPE_IS_GLOBAL);
445 MTAG_GLOBAL (tag) = true;
446 }
447 }
448 }
449
450 /* Compute which variables need to be marked call clobbered because
451 their tag is call clobbered, and which tags need to be marked
452 global because they contain global variables. */
453
454 static void
455 compute_call_clobbered (struct alias_info *ai)
456 {
457 VEC (tree, heap) *worklist = NULL;
458 VEC(int,heap) *worklist2 = NULL;
459
460 set_initial_properties (ai);
461 init_transitive_clobber_worklist (&worklist, &worklist2);
462 while (VEC_length (tree, worklist) != 0)
463 {
464 tree curr = VEC_pop (tree, worklist);
465 int reason = VEC_pop (int, worklist2);
466
467 mark_call_clobbered (curr, reason);
468 mark_aliases_call_clobbered (curr, &worklist, &worklist2);
469 }
470 VEC_free (tree, heap, worklist);
471 VEC_free (int, heap, worklist2);
472 compute_tag_properties ();
473 }
474
475
476 /* Dump memory partition information to FILE. */
477
478 static void
479 dump_memory_partitions (FILE *file)
480 {
481 unsigned i, npart;
482 unsigned long nsyms;
483 tree mpt;
484
485 fprintf (file, "\nMemory partitions\n\n");
486 for (i = 0, npart = 0, nsyms = 0;
487 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, i, mpt);
488 i++)
489 {
490 if (mpt)
491 {
492 bitmap syms = MPT_SYMBOLS (mpt);
493 unsigned long n = (syms) ? bitmap_count_bits (syms) : 0;
494
495 fprintf (file, "#%u: ", i);
496 print_generic_expr (file, mpt, 0);
497 fprintf (file, ": %lu elements: ", n);
498 dump_decl_set (file, syms);
499 npart++;
500 nsyms += n;
501 }
502 }
503
504 fprintf (file, "\n%u memory partitions holding %lu symbols\n", npart, nsyms);
505 }
506
507
508 /* Dump memory partition information to stderr. */
509
510 void
511 debug_memory_partitions (void)
512 {
513 dump_memory_partitions (stderr);
514 }
515
516
517 /* Return true if memory partitioning is required given the memory
518 reference estimates in STATS. */
519
520 static inline bool
521 need_to_partition_p (struct mem_ref_stats_d *stats)
522 {
523 long num_vops = stats->num_vuses + stats->num_vdefs;
524 long avg_vops = CEIL (num_vops, stats->num_mem_stmts);
525 return (num_vops > (long) MAX_ALIASED_VOPS
526 && avg_vops > (long) AVG_ALIASED_VOPS);
527 }
528
529
530 /* Count the actual number of virtual operators in CFUN. Note that
531 this is only meaningful after virtual operands have been populated,
532 so it should be invoked at the end of compute_may_aliases.
533
534 The number of virtual operators are stored in *NUM_VDEFS_P and
535 *NUM_VUSES_P, the number of partitioned symbols in
536 *NUM_PARTITIONED_P and the number of unpartitioned symbols in
537 *NUM_UNPARTITIONED_P.
538
539 If any of these pointers is NULL the corresponding count is not
540 computed. */
541
542 static void
543 count_mem_refs (long *num_vuses_p, long *num_vdefs_p,
544 long *num_partitioned_p, long *num_unpartitioned_p)
545 {
546 block_stmt_iterator bsi;
547 basic_block bb;
548 long num_vdefs, num_vuses, num_partitioned, num_unpartitioned;
549 referenced_var_iterator rvi;
550 tree sym;
551
552 num_vuses = num_vdefs = num_partitioned = num_unpartitioned = 0;
553
554 if (num_vuses_p || num_vdefs_p)
555 FOR_EACH_BB (bb)
556 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
557 {
558 tree stmt = bsi_stmt (bsi);
559 if (stmt_references_memory_p (stmt))
560 {
561 num_vuses += NUM_SSA_OPERANDS (stmt, SSA_OP_VUSE);
562 num_vdefs += NUM_SSA_OPERANDS (stmt, SSA_OP_VDEF);
563 }
564 }
565
566 if (num_partitioned_p || num_unpartitioned_p)
567 FOR_EACH_REFERENCED_VAR (sym, rvi)
568 {
569 if (is_gimple_reg (sym))
570 continue;
571
572 if (memory_partition (sym))
573 num_partitioned++;
574 else
575 num_unpartitioned++;
576 }
577
578 if (num_vdefs_p)
579 *num_vdefs_p = num_vdefs;
580
581 if (num_vuses_p)
582 *num_vuses_p = num_vuses;
583
584 if (num_partitioned_p)
585 *num_partitioned_p = num_partitioned;
586
587 if (num_unpartitioned_p)
588 *num_unpartitioned_p = num_unpartitioned;
589 }
590
591
592 /* Dump memory reference stats for function CFUN to FILE. */
593
594 void
595 dump_mem_ref_stats (FILE *file)
596 {
597 long actual_num_vuses, actual_num_vdefs;
598 long num_partitioned, num_unpartitioned;
599 struct mem_ref_stats_d *stats;
600
601 stats = gimple_mem_ref_stats (cfun);
602
603 count_mem_refs (&actual_num_vuses, &actual_num_vdefs, &num_partitioned,
604 &num_unpartitioned);
605
606 fprintf (file, "\nMemory reference statistics for %s\n\n",
607 lang_hooks.decl_printable_name (current_function_decl, 2));
608
609 fprintf (file, "Number of memory statements: %ld\n",
610 stats->num_mem_stmts);
611 fprintf (file, "Number of call sites: %ld\n",
612 stats->num_call_sites);
613 fprintf (file, "Number of pure/const call sites: %ld\n",
614 stats->num_pure_const_call_sites);
615 fprintf (file, "Number of asm sites: %ld\n",
616 stats->num_asm_sites);
617 fprintf (file, "Estimated number of loads: %ld (%ld/stmt)\n",
618 stats->num_vuses,
619 (stats->num_mem_stmts)
620 ? CEIL (stats->num_vuses, stats->num_mem_stmts)
621 : 0);
622 fprintf (file, "Actual number of loads: %ld (%ld/stmt)\n",
623 actual_num_vuses,
624 (stats->num_mem_stmts)
625 ? CEIL (actual_num_vuses, stats->num_mem_stmts)
626 : 0);
627
628 if (actual_num_vuses > stats->num_vuses + (stats->num_vuses / 25))
629 fprintf (file, "\t(warning: estimation is lower by more than 25%%)\n");
630
631 fprintf (file, "Estimated number of stores: %ld (%ld/stmt)\n",
632 stats->num_vdefs,
633 (stats->num_mem_stmts)
634 ? CEIL (stats->num_vdefs, stats->num_mem_stmts)
635 : 0);
636 fprintf (file, "Actual number of stores: %ld (%ld/stmt)\n",
637 actual_num_vdefs,
638 (stats->num_mem_stmts)
639 ? CEIL (actual_num_vdefs, stats->num_mem_stmts)
640 : 0);
641
642 if (actual_num_vdefs > stats->num_vdefs + (stats->num_vdefs / 25))
643 fprintf (file, "\t(warning: estimation is lower by more than 25%%)\n");
644
645 fprintf (file, "Partitioning thresholds: MAX = %d AVG = %d "
646 "(%sNEED TO PARTITION)\n", MAX_ALIASED_VOPS, AVG_ALIASED_VOPS,
647 stats->num_mem_stmts && need_to_partition_p (stats) ? "" : "NO ");
648 fprintf (file, "Number of partitioned symbols: %ld\n", num_partitioned);
649 fprintf (file, "Number of unpartitioned symbols: %ld\n", num_unpartitioned);
650 }
651
652
653 /* Dump memory reference stats for function FN to stderr. */
654
655 void
656 debug_mem_ref_stats (void)
657 {
658 dump_mem_ref_stats (stderr);
659 }
660
661
662 /* Dump memory reference stats for variable VAR to FILE. */
663
664 static void
665 dump_mem_sym_stats (FILE *file, tree var)
666 {
667 mem_sym_stats_t stats = mem_sym_stats (cfun, var);
668
669 if (stats == NULL)
670 return;
671
672 fprintf (file, "read frequency: %6ld, write frequency: %6ld, "
673 "direct reads: %3ld, direct writes: %3ld, "
674 "indirect reads: %4ld, indirect writes: %4ld, symbol: ",
675 stats->frequency_reads, stats->frequency_writes,
676 stats->num_direct_reads, stats->num_direct_writes,
677 stats->num_indirect_reads, stats->num_indirect_writes);
678 print_generic_expr (file, stats->var, 0);
679 fprintf (file, ", tags: ");
680 dump_decl_set (file, stats->parent_tags);
681 }
682
683
684 /* Dump memory reference stats for variable VAR to stderr. */
685
686 void
687 debug_mem_sym_stats (tree var)
688 {
689 dump_mem_sym_stats (stderr, var);
690 }
691
692
693 /* Dump memory reference stats for all memory symbols to FILE. */
694
695 static void
696 dump_all_mem_sym_stats (FILE *file)
697 {
698 referenced_var_iterator rvi;
699 tree sym;
700
701 FOR_EACH_REFERENCED_VAR (sym, rvi)
702 {
703 if (is_gimple_reg (sym))
704 continue;
705
706 dump_mem_sym_stats (file, sym);
707 }
708 }
709
710
711 /* Dump memory reference stats for all memory symbols to stderr. */
712
713 void
714 debug_all_mem_sym_stats (void)
715 {
716 dump_all_mem_sym_stats (stderr);
717 }
718
719
720 /* Dump the MP_INFO array to FILE. */
721
722 static void
723 dump_mp_info (FILE *file, VEC(mem_sym_stats_t,heap) *mp_info)
724 {
725 unsigned i;
726 mem_sym_stats_t mp_p;
727
728 for (i = 0; VEC_iterate (mem_sym_stats_t, mp_info, i, mp_p); i++)
729 if (!mp_p->partitioned_p)
730 dump_mem_sym_stats (file, mp_p->var);
731 }
732
733
734 /* Dump the MP_INFO array to stderr. */
735
736 void
737 debug_mp_info (VEC(mem_sym_stats_t,heap) *mp_info)
738 {
739 dump_mp_info (stderr, mp_info);
740 }
741
742
743 /* Update memory reference stats for symbol VAR in statement STMT.
744 NUM_DIRECT_READS and NUM_DIRECT_WRITES specify the number of times
745 that VAR is read/written in STMT (indirect reads/writes are not
746 recorded by this function, see compute_memory_partitions). */
747
748 void
749 update_mem_sym_stats_from_stmt (tree var, tree stmt, long num_direct_reads,
750 long num_direct_writes)
751 {
752 mem_sym_stats_t stats;
753
754 gcc_assert (num_direct_reads >= 0 && num_direct_writes >= 0);
755
756 stats = get_mem_sym_stats_for (var);
757
758 stats->num_direct_reads += num_direct_reads;
759 stats->frequency_reads += ((long) bb_for_stmt (stmt)->frequency
760 * num_direct_reads);
761
762 stats->num_direct_writes += num_direct_writes;
763 stats->frequency_writes += ((long) bb_for_stmt (stmt)->frequency
764 * num_direct_writes);
765 }
766
767
768 /* The list is sorted by increasing partitioning score (PSCORE).
769 This score is computed such that symbols with high scores are
770 those that are least likely to be partitioned. Given a symbol
771 MP->VAR, PSCORE(S) is the result of the following weighted sum
772
773 PSCORE(S) = FW * 64 + FR * 32
774 + DW * 16 + DR * 8
775 + IW * 4 + IR * 2
776 + NO_ALIAS
777
778 where
779
780 FW Execution frequency of writes to S
781 FR Execution frequency of reads from S
782 DW Number of direct writes to S
783 DR Number of direct reads from S
784 IW Number of indirect writes to S
785 IR Number of indirect reads from S
786 NO_ALIAS State of the NO_ALIAS* flags
787
788 The basic idea here is that symbols that are frequently
789 written-to in hot paths of the code are the last to be considered
790 for partitioning. */
791
792 static inline long
793 pscore (mem_sym_stats_t mp)
794 {
795 return mp->frequency_writes * 64 + mp->frequency_reads * 32
796 + mp->num_direct_writes * 16 + mp->num_direct_reads * 8
797 + mp->num_indirect_writes * 4 + mp->num_indirect_reads * 2
798 + var_ann (mp->var)->noalias_state;
799 }
800
801
802 /* Given two MP_INFO entries MP1 and MP2, return -1 if MP1->VAR should
803 be partitioned before MP2->VAR, 0 if they are the same or 1 if
804 MP1->VAR should be partitioned after MP2->VAR. */
805
806 static inline int
807 compare_mp_info_entries (mem_sym_stats_t mp1, mem_sym_stats_t mp2)
808 {
809 long pscore1 = pscore (mp1);
810 long pscore2 = pscore (mp2);
811
812 if (pscore1 < pscore2)
813 return -1;
814 else if (pscore1 > pscore2)
815 return 1;
816 else
817 return 0;
818 }
819
820
821 /* Comparison routine for qsort. The list is sorted by increasing
822 partitioning score (PSCORE). This score is computed such that
823 symbols with high scores are those that are least likely to be
824 partitioned. */
825
826 static int
827 mp_info_cmp (const void *p, const void *q)
828 {
829 mem_sym_stats_t e1 = *((const mem_sym_stats_t *) p);
830 mem_sym_stats_t e2 = *((const mem_sym_stats_t *) q);
831 return compare_mp_info_entries (e1, e2);
832 }
833
834
835 /* Sort the array of reference counts used to compute memory partitions.
836 Elements are sorted in ascending order of execution frequency and
837 descending order of virtual operators needed. */
838
839 static inline void
840 sort_mp_info (VEC(mem_sym_stats_t,heap) *list)
841 {
842 unsigned num = VEC_length (mem_sym_stats_t, list);
843
844 if (num < 2)
845 return;
846
847 if (num == 2)
848 {
849 if (compare_mp_info_entries (VEC_index (mem_sym_stats_t, list, 0),
850 VEC_index (mem_sym_stats_t, list, 1)) > 0)
851 {
852 /* Swap elements if they are in the wrong order. */
853 mem_sym_stats_t tmp = VEC_index (mem_sym_stats_t, list, 0);
854 VEC_replace (mem_sym_stats_t, list, 0,
855 VEC_index (mem_sym_stats_t, list, 1));
856 VEC_replace (mem_sym_stats_t, list, 1, tmp);
857 }
858
859 return;
860 }
861
862 /* There are 3 or more elements, call qsort. */
863 qsort (VEC_address (mem_sym_stats_t, list),
864 VEC_length (mem_sym_stats_t, list),
865 sizeof (mem_sym_stats_t),
866 mp_info_cmp);
867 }
868
869
870 /* Return the memory partition tag (MPT) associated with memory
871 symbol SYM. */
872
873 static tree
874 get_mpt_for (tree sym)
875 {
876 tree mpt;
877
878 /* Don't create a new tag unnecessarily. */
879 mpt = memory_partition (sym);
880 if (mpt == NULL_TREE)
881 {
882 mpt = create_tag_raw (MEMORY_PARTITION_TAG, TREE_TYPE (sym), "MPT");
883 TREE_ADDRESSABLE (mpt) = 0;
884 add_referenced_var (mpt);
885 VEC_safe_push (tree, heap, gimple_ssa_operands (cfun)->mpt_table, mpt);
886 gcc_assert (MPT_SYMBOLS (mpt) == NULL);
887 set_memory_partition (sym, mpt);
888 }
889
890 return mpt;
891 }
892
893
894 /* Add MP_P->VAR to a memory partition and return the partition. */
895
896 static tree
897 find_partition_for (mem_sym_stats_t mp_p)
898 {
899 unsigned i;
900 VEC(tree,heap) *mpt_table;
901 tree mpt;
902
903 mpt_table = gimple_ssa_operands (cfun)->mpt_table;
904 mpt = NULL_TREE;
905
906 /* Find an existing partition for MP_P->VAR. */
907 for (i = 0; VEC_iterate (tree, mpt_table, i, mpt); i++)
908 {
909 mem_sym_stats_t mpt_stats;
910
911 /* If MPT does not have any symbols yet, use it. */
912 if (MPT_SYMBOLS (mpt) == NULL)
913 break;
914
915 /* Otherwise, see if MPT has common parent tags with MP_P->VAR,
916 but avoid grouping clobbered variables with non-clobbered
917 variables (otherwise, this tends to creates a single memory
918 partition because other call-clobbered variables may have
919 common parent tags with non-clobbered ones). */
920 mpt_stats = get_mem_sym_stats_for (mpt);
921 if (mp_p->parent_tags
922 && mpt_stats->parent_tags
923 && is_call_clobbered (mpt) == is_call_clobbered (mp_p->var)
924 && bitmap_intersect_p (mpt_stats->parent_tags, mp_p->parent_tags))
925 break;
926
927 /* If no common parent tags are found, see if both MPT and
928 MP_P->VAR are call-clobbered. */
929 if (is_call_clobbered (mpt) && is_call_clobbered (mp_p->var))
930 break;
931 }
932
933 if (mpt == NULL_TREE)
934 mpt = get_mpt_for (mp_p->var);
935 else
936 set_memory_partition (mp_p->var, mpt);
937
938 mp_p->partitioned_p = true;
939
940 mark_sym_for_renaming (mp_p->var);
941 mark_sym_for_renaming (mpt);
942
943 return mpt;
944 }
945
946
947 /* Rewrite the alias set for TAG to use the newly created partitions.
948 If TAG is NULL, rewrite the set of call-clobbered variables.
949 NEW_ALIASES is a scratch bitmap to build the new set of aliases for
950 TAG. */
951
952 static void
953 rewrite_alias_set_for (tree tag, bitmap new_aliases)
954 {
955 bitmap_iterator bi;
956 unsigned i;
957 tree mpt, sym;
958
959 EXECUTE_IF_SET_IN_BITMAP (MTAG_ALIASES (tag), 0, i, bi)
960 {
961 sym = referenced_var (i);
962 mpt = memory_partition (sym);
963 if (mpt)
964 bitmap_set_bit (new_aliases, DECL_UID (mpt));
965 else
966 bitmap_set_bit (new_aliases, DECL_UID (sym));
967 }
968
969 /* Rebuild the may-alias array for TAG. */
970 bitmap_copy (MTAG_ALIASES (tag), new_aliases);
971 }
972
973
974 /* Determine how many virtual operands can be saved by partitioning
975 MP_P->VAR into MPT. When a symbol S is thrown inside a partition
976 P, every virtual operand that used to reference S will now
977 reference P. Whether it reduces the number of virtual operands
978 depends on:
979
980 1- Direct references to S are never saved. Instead of the virtual
981 operand to S, we will now have a virtual operand to P.
982
983 2- Indirect references to S are reduced only for those memory tags
984 holding S that already had other symbols partitioned into P.
985 For instance, if a memory tag T has the alias set { a b S c },
986 the first time we partition S into P, the alias set will become
987 { a b P c }, so no virtual operands will be saved. However, if
988 we now partition symbol 'c' into P, then the alias set for T
989 will become { a b P }, so we will be saving one virtual operand
990 for every indirect reference to 'c'.
991
992 3- Is S is call-clobbered, we save as many virtual operands as
993 call/asm sites exist in the code, but only if other
994 call-clobbered symbols have been grouped into P. The first
995 call-clobbered symbol that we group does not produce any
996 savings.
997
998 MEM_REF_STATS points to CFUN's memory reference information. */
999
1000 static void
1001 estimate_vop_reduction (struct mem_ref_stats_d *mem_ref_stats,
1002 mem_sym_stats_t mp_p, tree mpt)
1003 {
1004 unsigned i;
1005 bitmap_iterator bi;
1006 mem_sym_stats_t mpt_stats;
1007
1008 /* We should only get symbols with indirect references here. */
1009 gcc_assert (mp_p->num_indirect_reads > 0 || mp_p->num_indirect_writes > 0);
1010
1011 /* Note that the only statistics we keep for MPT is the set of
1012 parent tags to know which memory tags have had alias members
1013 partitioned, and the indicator has_call_clobbered_vars.
1014 Reference counts are not important for MPT. */
1015 mpt_stats = get_mem_sym_stats_for (mpt);
1016
1017 /* Traverse all the parent tags for MP_P->VAR. For every tag T, if
1018 partition P is already grouping aliases of T, then reduce the
1019 number of virtual operands by the number of direct references
1020 to T. */
1021 if (mp_p->parent_tags)
1022 {
1023 if (mpt_stats->parent_tags == NULL)
1024 mpt_stats->parent_tags = BITMAP_ALLOC (&alias_bitmap_obstack);
1025
1026 EXECUTE_IF_SET_IN_BITMAP (mp_p->parent_tags, 0, i, bi)
1027 {
1028 if (bitmap_bit_p (mpt_stats->parent_tags, i))
1029 {
1030 /* Partition MPT is already partitioning symbols in the
1031 alias set for TAG. This means that we are now saving
1032 1 virtual operand for every direct reference to TAG. */
1033 tree tag = referenced_var (i);
1034 mem_sym_stats_t tag_stats = mem_sym_stats (cfun, tag);
1035 mem_ref_stats->num_vuses -= tag_stats->num_direct_reads;
1036 mem_ref_stats->num_vdefs -= tag_stats->num_direct_writes;
1037 }
1038 else
1039 {
1040 /* This is the first symbol in tag I's alias set that is
1041 being grouped under MPT. We will not save any
1042 virtual operands this time, but record that MPT is
1043 grouping a symbol from TAG's alias set so that the
1044 next time we get the savings. */
1045 bitmap_set_bit (mpt_stats->parent_tags, i);
1046 }
1047 }
1048 }
1049
1050 /* If MP_P->VAR is call-clobbered, and MPT is already grouping
1051 call-clobbered symbols, then we will save as many virtual
1052 operands as asm/call sites there are. */
1053 if (is_call_clobbered (mp_p->var))
1054 {
1055 if (mpt_stats->has_call_clobbered_vars)
1056 mem_ref_stats->num_vdefs -= mem_ref_stats->num_call_sites
1057 + mem_ref_stats->num_asm_sites;
1058 else
1059 mpt_stats->has_call_clobbered_vars = true;
1060 }
1061 }
1062
1063
1064 /* Helper for compute_memory_partitions. Transfer reference counts
1065 from pointers to their pointed-to sets. Counters for pointers were
1066 computed by update_alias_info. MEM_REF_STATS points to CFUN's
1067 memory reference information. */
1068
1069 static void
1070 update_reference_counts (struct mem_ref_stats_d *mem_ref_stats)
1071 {
1072 unsigned i;
1073 bitmap_iterator bi;
1074 mem_sym_stats_t sym_stats;
1075
1076 for (i = 1; i < num_ssa_names; i++)
1077 {
1078 tree ptr;
1079 struct ptr_info_def *pi;
1080
1081 ptr = ssa_name (i);
1082 if (ptr
1083 && POINTER_TYPE_P (TREE_TYPE (ptr))
1084 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1085 && pi->is_dereferenced)
1086 {
1087 unsigned j;
1088 bitmap_iterator bj;
1089 tree tag;
1090 mem_sym_stats_t ptr_stats, tag_stats;
1091
1092 /* If PTR has flow-sensitive points-to information, use
1093 PTR's name tag, otherwise use the symbol tag associated
1094 with PTR's symbol. */
1095 if (pi->name_mem_tag)
1096 tag = pi->name_mem_tag;
1097 else
1098 tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
1099
1100 ptr_stats = get_mem_sym_stats_for (ptr);
1101 tag_stats = get_mem_sym_stats_for (tag);
1102
1103 /* TAG has as many direct references as dereferences we
1104 found for its parent pointer. */
1105 tag_stats->num_direct_reads += ptr_stats->num_direct_reads;
1106 tag_stats->num_direct_writes += ptr_stats->num_direct_writes;
1107
1108 /* All the dereferences of pointer PTR are considered direct
1109 references to PTR's memory tag (TAG). In turn,
1110 references to TAG will become virtual operands for every
1111 symbol in TAG's alias set. So, for every symbol ALIAS in
1112 TAG's alias set, add as many indirect references to ALIAS
1113 as direct references there are for TAG. */
1114 if (MTAG_ALIASES (tag))
1115 EXECUTE_IF_SET_IN_BITMAP (MTAG_ALIASES (tag), 0, j, bj)
1116 {
1117 tree alias = referenced_var (j);
1118 sym_stats = get_mem_sym_stats_for (alias);
1119
1120 /* All the direct references to TAG are indirect references
1121 to ALIAS. */
1122 sym_stats->num_indirect_reads += ptr_stats->num_direct_reads;
1123 sym_stats->num_indirect_writes += ptr_stats->num_direct_writes;
1124 sym_stats->frequency_reads += ptr_stats->frequency_reads;
1125 sym_stats->frequency_writes += ptr_stats->frequency_writes;
1126
1127 /* Indicate that TAG is one of ALIAS's parent tags. */
1128 if (sym_stats->parent_tags == NULL)
1129 sym_stats->parent_tags = BITMAP_ALLOC (&alias_bitmap_obstack);
1130 bitmap_set_bit (sym_stats->parent_tags, DECL_UID (tag));
1131 }
1132 }
1133 }
1134
1135 /* Call-clobbered symbols are indirectly written at every
1136 call/asm site. */
1137 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1138 {
1139 tree sym = referenced_var (i);
1140 sym_stats = get_mem_sym_stats_for (sym);
1141 sym_stats->num_indirect_writes += mem_ref_stats->num_call_sites
1142 + mem_ref_stats->num_asm_sites;
1143 }
1144
1145 /* Addressable symbols are indirectly written at some ASM sites.
1146 Since only ASM sites that clobber memory actually affect
1147 addressable symbols, this is an over-estimation. */
1148 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
1149 {
1150 tree sym = referenced_var (i);
1151 sym_stats = get_mem_sym_stats_for (sym);
1152 sym_stats->num_indirect_writes += mem_ref_stats->num_asm_sites;
1153 }
1154 }
1155
1156
1157 /* Helper for compute_memory_partitions. Add all memory symbols to
1158 *MP_INFO_P and compute the initial estimate for the total number of
1159 virtual operands needed. MEM_REF_STATS points to CFUN's memory
1160 reference information. On exit, *TAGS_P will contain the list of
1161 memory tags whose alias set need to be rewritten after
1162 partitioning. */
1163
1164 static void
1165 build_mp_info (struct mem_ref_stats_d *mem_ref_stats,
1166 VEC(mem_sym_stats_t,heap) **mp_info_p,
1167 VEC(tree,heap) **tags_p)
1168 {
1169 tree var;
1170 referenced_var_iterator rvi;
1171
1172 FOR_EACH_REFERENCED_VAR (var, rvi)
1173 {
1174 mem_sym_stats_t sym_stats;
1175 tree old_mpt;
1176
1177 /* We are only interested in memory symbols other than MPTs. */
1178 if (is_gimple_reg (var) || TREE_CODE (var) == MEMORY_PARTITION_TAG)
1179 continue;
1180
1181 /* Collect memory tags into the TAGS array so that we can
1182 rewrite their alias sets after partitioning. */
1183 if (MTAG_P (var) && MTAG_ALIASES (var))
1184 VEC_safe_push (tree, heap, *tags_p, var);
1185
1186 /* Since we are going to re-compute partitions, any symbols that
1187 used to belong to a partition must be detached from it and
1188 marked for renaming. */
1189 if ((old_mpt = memory_partition (var)) != NULL)
1190 {
1191 mark_sym_for_renaming (old_mpt);
1192 set_memory_partition (var, NULL_TREE);
1193 mark_sym_for_renaming (var);
1194 }
1195
1196 sym_stats = get_mem_sym_stats_for (var);
1197
1198 /* Add VAR's reference info to MP_INFO. Note that the only
1199 symbols that make sense to partition are those that have
1200 indirect references. If a symbol S is always directly
1201 referenced, partitioning it will not reduce the number of
1202 virtual operators. The only symbols that are profitable to
1203 partition are those that belong to alias sets and/or are
1204 call-clobbered. */
1205 if (sym_stats->num_indirect_reads > 0
1206 || sym_stats->num_indirect_writes > 0)
1207 VEC_safe_push (mem_sym_stats_t, heap, *mp_info_p, sym_stats);
1208
1209 /* Update the number of estimated VOPS. Note that direct
1210 references to memory tags are always counted as indirect
1211 references to their alias set members, so if a memory tag has
1212 aliases, do not count its direct references to avoid double
1213 accounting. */
1214 if (!MTAG_P (var) || !MTAG_ALIASES (var))
1215 {
1216 mem_ref_stats->num_vuses += sym_stats->num_direct_reads;
1217 mem_ref_stats->num_vdefs += sym_stats->num_direct_writes;
1218 }
1219
1220 mem_ref_stats->num_vuses += sym_stats->num_indirect_reads;
1221 mem_ref_stats->num_vdefs += sym_stats->num_indirect_writes;
1222 }
1223 }
1224
1225
1226 /* Compute memory partitions. A memory partition (MPT) is an
1227 arbitrary grouping of memory symbols, such that references to one
1228 member of the group is considered a reference to all the members of
1229 the group.
1230
1231 As opposed to alias sets in memory tags, the grouping into
1232 partitions is completely arbitrary and only done to reduce the
1233 number of virtual operands. The only rule that needs to be
1234 observed when creating memory partitions is that given two memory
1235 partitions MPT.i and MPT.j, they must not contain symbols in
1236 common.
1237
1238 Memory partitions are used when putting the program into Memory-SSA
1239 form. In particular, in Memory-SSA PHI nodes are not computed for
1240 individual memory symbols. They are computed for memory
1241 partitions. This reduces the amount of PHI nodes in the SSA graph
1242 at the expense of precision (i.e., it makes unrelated stores affect
1243 each other).
1244
1245 However, it is possible to increase precision by changing this
1246 partitioning scheme. For instance, if the partitioning scheme is
1247 such that get_mpt_for is the identity function (that is,
1248 get_mpt_for (s) = s), this will result in ultimate precision at the
1249 expense of huge SSA webs.
1250
1251 At the other extreme, a partitioning scheme that groups all the
1252 symbols in the same set results in minimal SSA webs and almost
1253 total loss of precision.
1254
1255 There partitioning heuristic uses three parameters to decide the
1256 order in which symbols are processed. The list of symbols is
1257 sorted so that symbols that are more likely to be partitioned are
1258 near the top of the list:
1259
1260 - Execution frequency. If a memory references is in a frequently
1261 executed code path, grouping it into a partition may block useful
1262 transformations and cause sub-optimal code generation. So, the
1263 partition heuristic tries to avoid grouping symbols with high
1264 execution frequency scores. Execution frequency is taken
1265 directly from the basic blocks where every reference is made (see
1266 update_mem_sym_stats_from_stmt), which in turn uses the
1267 profile guided machinery, so if the program is compiled with PGO
1268 enabled, more accurate partitioning decisions will be made.
1269
1270 - Number of references. Symbols with few references in the code,
1271 are partitioned before symbols with many references.
1272
1273 - NO_ALIAS attributes. Symbols with any of the NO_ALIAS*
1274 attributes are partitioned after symbols marked MAY_ALIAS.
1275
1276 Once the list is sorted, the partitioning proceeds as follows:
1277
1278 1- For every symbol S in MP_INFO, create a new memory partition MP,
1279 if necessary. To avoid memory partitions that contain symbols
1280 from non-conflicting alias sets, memory partitions are
1281 associated to the memory tag that holds S in its alias set. So,
1282 when looking for a memory partition for S, the memory partition
1283 associated with one of the memory tags holding S is chosen. If
1284 none exists, a new one is created.
1285
1286 2- Add S to memory partition MP.
1287
1288 3- Reduce by 1 the number of VOPS for every memory tag holding S.
1289
1290 4- If the total number of VOPS is less than MAX_ALIASED_VOPS or the
1291 average number of VOPS per statement is less than
1292 AVG_ALIASED_VOPS, stop. Otherwise, go to the next symbol in the
1293 list. */
1294
1295 static void
1296 compute_memory_partitions (void)
1297 {
1298 tree tag;
1299 unsigned i;
1300 mem_sym_stats_t mp_p;
1301 VEC(mem_sym_stats_t,heap) *mp_info;
1302 bitmap new_aliases;
1303 VEC(tree,heap) *tags;
1304 struct mem_ref_stats_d *mem_ref_stats;
1305 int prev_max_aliased_vops;
1306
1307 mem_ref_stats = gimple_mem_ref_stats (cfun);
1308 gcc_assert (mem_ref_stats->num_vuses == 0 && mem_ref_stats->num_vdefs == 0);
1309
1310 if (mem_ref_stats->num_mem_stmts == 0)
1311 return;
1312
1313 timevar_push (TV_MEMORY_PARTITIONING);
1314
1315 mp_info = NULL;
1316 tags = NULL;
1317 prev_max_aliased_vops = MAX_ALIASED_VOPS;
1318
1319 /* Since we clearly cannot lower the number of virtual operators
1320 below the total number of memory statements in the function, we
1321 may need to adjust MAX_ALIASED_VOPS beforehand. */
1322 if (MAX_ALIASED_VOPS < mem_ref_stats->num_mem_stmts)
1323 MAX_ALIASED_VOPS = mem_ref_stats->num_mem_stmts;
1324
1325 /* Update reference stats for all the pointed-to variables and
1326 memory tags. */
1327 update_reference_counts (mem_ref_stats);
1328
1329 /* Add all the memory symbols to MP_INFO. */
1330 build_mp_info (mem_ref_stats, &mp_info, &tags);
1331
1332 /* No partitions required if we are below the threshold. */
1333 if (!need_to_partition_p (mem_ref_stats))
1334 {
1335 if (dump_file)
1336 fprintf (dump_file, "\nMemory partitioning NOT NEEDED for %s\n",
1337 get_name (current_function_decl));
1338 goto done;
1339 }
1340
1341 /* Sort the MP_INFO array so that symbols that should be partitioned
1342 first are near the top of the list. */
1343 sort_mp_info (mp_info);
1344
1345 if (dump_file)
1346 {
1347 fprintf (dump_file, "\nMemory partitioning NEEDED for %s\n\n",
1348 get_name (current_function_decl));
1349 fprintf (dump_file, "Memory symbol references before partitioning:\n");
1350 dump_mp_info (dump_file, mp_info);
1351 }
1352
1353 /* Create partitions for variables in MP_INFO until we have enough
1354 to lower the total number of VOPS below MAX_ALIASED_VOPS or if
1355 the average number of VOPS per statement is below
1356 AVG_ALIASED_VOPS. */
1357 for (i = 0; VEC_iterate (mem_sym_stats_t, mp_info, i, mp_p); i++)
1358 {
1359 tree mpt;
1360
1361 /* If we are below the threshold, stop. */
1362 if (!need_to_partition_p (mem_ref_stats))
1363 break;
1364
1365 mpt = find_partition_for (mp_p);
1366 estimate_vop_reduction (mem_ref_stats, mp_p, mpt);
1367 }
1368
1369 /* After partitions have been created, rewrite alias sets to use
1370 them instead of the original symbols. This way, if the alias set
1371 was computed as { a b c d e f }, and the subset { b e f } was
1372 grouped into partition MPT.3, then the new alias set for the tag
1373 will be { a c d MPT.3 }.
1374
1375 Note that this is not strictly necessary. The operand scanner
1376 will always check if a symbol belongs to a partition when adding
1377 virtual operands. However, by reducing the size of the alias
1378 sets to be scanned, the work needed inside the operand scanner is
1379 significantly reduced. */
1380 new_aliases = BITMAP_ALLOC (NULL);
1381
1382 for (i = 0; VEC_iterate (tree, tags, i, tag); i++)
1383 {
1384 rewrite_alias_set_for (tag, new_aliases);
1385 bitmap_clear (new_aliases);
1386 }
1387
1388 BITMAP_FREE (new_aliases);
1389
1390 if (dump_file)
1391 {
1392 fprintf (dump_file, "\nMemory symbol references after partitioning:\n");
1393 dump_mp_info (dump_file, mp_info);
1394 }
1395
1396 done:
1397 /* Free allocated memory. */
1398 VEC_free (mem_sym_stats_t, heap, mp_info);
1399 VEC_free (tree, heap, tags);
1400
1401 MAX_ALIASED_VOPS = prev_max_aliased_vops;
1402
1403 timevar_pop (TV_MEMORY_PARTITIONING);
1404 }
1405
1406
1407 /* Compute may-alias information for every variable referenced in function
1408 FNDECL.
1409
1410 Alias analysis proceeds in 3 main phases:
1411
1412 1- Points-to and escape analysis.
1413
1414 This phase walks the use-def chains in the SSA web looking for three
1415 things:
1416
1417 * Assignments of the form P_i = &VAR
1418 * Assignments of the form P_i = malloc()
1419 * Pointers and ADDR_EXPR that escape the current function.
1420
1421 The concept of 'escaping' is the same one used in the Java world. When
1422 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
1423 outside of the current function. So, assignment to global variables,
1424 function arguments and returning a pointer are all escape sites, as are
1425 conversions between pointers and integers.
1426
1427 This is where we are currently limited. Since not everything is renamed
1428 into SSA, we lose track of escape properties when a pointer is stashed
1429 inside a field in a structure, for instance. In those cases, we are
1430 assuming that the pointer does escape.
1431
1432 We use escape analysis to determine whether a variable is
1433 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
1434 is call-clobbered. If a pointer P_i escapes, then all the variables
1435 pointed-to by P_i (and its memory tag) also escape.
1436
1437 2- Compute flow-sensitive aliases
1438
1439 We have two classes of memory tags. Memory tags associated with the
1440 pointed-to data type of the pointers in the program. These tags are
1441 called "symbol memory tag" (SMT). The other class are those associated
1442 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
1443 when adding operands for an INDIRECT_REF *P_i, we will first check
1444 whether P_i has a name tag, if it does we use it, because that will have
1445 more precise aliasing information. Otherwise, we use the standard symbol
1446 tag.
1447
1448 In this phase, we go through all the pointers we found in points-to
1449 analysis and create alias sets for the name memory tags associated with
1450 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
1451 it points to and its tag.
1452
1453
1454 3- Compute flow-insensitive aliases
1455
1456 This pass will compare the alias set of every symbol memory tag and
1457 every addressable variable found in the program. Given a symbol
1458 memory tag SMT and an addressable variable V. If the alias sets of
1459 SMT and V conflict (as computed by may_alias_p), then V is marked
1460 as an alias tag and added to the alias set of SMT.
1461
1462 For instance, consider the following function:
1463
1464 foo (int i)
1465 {
1466 int *p, a, b;
1467
1468 if (i > 10)
1469 p = &a;
1470 else
1471 p = &b;
1472
1473 *p = 3;
1474 a = b + 2;
1475 return *p;
1476 }
1477
1478 After aliasing analysis has finished, the symbol memory tag for pointer
1479 'p' will have two aliases, namely variables 'a' and 'b'. Every time
1480 pointer 'p' is dereferenced, we want to mark the operation as a
1481 potential reference to 'a' and 'b'.
1482
1483 foo (int i)
1484 {
1485 int *p, a, b;
1486
1487 if (i_2 > 10)
1488 p_4 = &a;
1489 else
1490 p_6 = &b;
1491 # p_1 = PHI <p_4(1), p_6(2)>;
1492
1493 # a_7 = VDEF <a_3>;
1494 # b_8 = VDEF <b_5>;
1495 *p_1 = 3;
1496
1497 # a_9 = VDEF <a_7>
1498 # VUSE <b_8>
1499 a_9 = b_8 + 2;
1500
1501 # VUSE <a_9>;
1502 # VUSE <b_8>;
1503 return *p_1;
1504 }
1505
1506 In certain cases, the list of may aliases for a pointer may grow too
1507 large. This may cause an explosion in the number of virtual operands
1508 inserted in the code. Resulting in increased memory consumption and
1509 compilation time.
1510
1511 When the number of virtual operands needed to represent aliased
1512 loads and stores grows too large (configurable with option --param
1513 max-aliased-vops and --param avg-aliased-vops), alias sets are
1514 grouped to avoid severe compile-time slow downs and memory
1515 consumption. See compute_memory_partitions. */
1516
1517 static unsigned int
1518 compute_may_aliases (void)
1519 {
1520 struct alias_info *ai;
1521
1522 memset (&alias_stats, 0, sizeof (alias_stats));
1523
1524 /* Initialize aliasing information. */
1525 ai = init_alias_info ();
1526
1527 /* For each pointer P_i, determine the sets of variables that P_i may
1528 point-to. For every addressable variable V, determine whether the
1529 address of V escapes the current function, making V call-clobbered
1530 (i.e., whether &V is stored in a global variable or if its passed as a
1531 function call argument). */
1532 compute_points_to_sets (ai);
1533
1534 /* Collect all pointers and addressable variables, compute alias sets,
1535 create memory tags for pointers and promote variables whose address is
1536 not needed anymore. */
1537 setup_pointers_and_addressables (ai);
1538
1539 /* Compute type-based flow-insensitive aliasing for all the type
1540 memory tags. */
1541 compute_flow_insensitive_aliasing (ai);
1542
1543 /* Compute flow-sensitive, points-to based aliasing for all the name
1544 memory tags. */
1545 compute_flow_sensitive_aliasing (ai);
1546
1547 /* Compute call clobbering information. */
1548 compute_call_clobbered (ai);
1549
1550 /* If the program makes no reference to global variables, but it
1551 contains a mixture of pure and non-pure functions, then we need
1552 to create use-def and def-def links between these functions to
1553 avoid invalid transformations on them. */
1554 maybe_create_global_var ();
1555
1556 /* If the program contains ref-all pointers, finalize may-alias information
1557 for them. This pass needs to be run after call-clobbering information
1558 has been computed. */
1559 if (ai->ref_all_symbol_mem_tag)
1560 finalize_ref_all_pointers (ai);
1561
1562 /* Compute memory partitions for every memory variable. */
1563 compute_memory_partitions ();
1564
1565 /* Remove partitions with no symbols. Partitions may end up with an
1566 empty MPT_SYMBOLS set if a previous round of alias analysis
1567 needed to partition more symbols. Since we don't need those
1568 partitions anymore, remove them to free up the space. */
1569 {
1570 tree mpt;
1571 unsigned i;
1572 VEC(tree,heap) *mpt_table;
1573
1574 mpt_table = gimple_ssa_operands (cfun)->mpt_table;
1575 i = 0;
1576 while (i < VEC_length (tree, mpt_table))
1577 {
1578 mpt = VEC_index (tree, mpt_table, i);
1579 if (MPT_SYMBOLS (mpt) == NULL)
1580 VEC_unordered_remove (tree, mpt_table, i);
1581 else
1582 i++;
1583 }
1584 }
1585
1586 /* Populate all virtual operands and newly promoted register operands. */
1587 {
1588 block_stmt_iterator bsi;
1589 basic_block bb;
1590 FOR_EACH_BB (bb)
1591 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1592 update_stmt_if_modified (bsi_stmt (bsi));
1593 }
1594
1595 /* Debugging dumps. */
1596 if (dump_file)
1597 {
1598 dump_mem_ref_stats (dump_file);
1599 dump_alias_info (dump_file);
1600 dump_points_to_info (dump_file);
1601
1602 if (dump_flags & TDF_STATS)
1603 dump_alias_stats (dump_file);
1604
1605 if (dump_flags & TDF_DETAILS)
1606 dump_referenced_vars (dump_file);
1607 }
1608
1609 /* Report strict aliasing violations. */
1610 strict_aliasing_warning_backend ();
1611
1612 /* Deallocate memory used by aliasing data structures. */
1613 delete_alias_info (ai);
1614
1615 return 0;
1616 }
1617
1618
1619 struct tree_opt_pass pass_may_alias =
1620 {
1621 "alias", /* name */
1622 NULL, /* gate */
1623 compute_may_aliases, /* execute */
1624 NULL, /* sub */
1625 NULL, /* next */
1626 0, /* static_pass_number */
1627 TV_TREE_MAY_ALIAS, /* tv_id */
1628 PROP_cfg | PROP_ssa, /* properties_required */
1629 PROP_alias, /* properties_provided */
1630 0, /* properties_destroyed */
1631 0, /* todo_flags_start */
1632 TODO_dump_func
1633 | TODO_update_ssa
1634 | TODO_ggc_collect
1635 | TODO_verify_ssa
1636 | TODO_verify_stmts, /* todo_flags_finish */
1637 0 /* letter */
1638 };
1639
1640
1641 /* Data structure used to count the number of dereferences to PTR
1642 inside an expression. */
1643 struct count_ptr_d
1644 {
1645 tree ptr;
1646 unsigned count;
1647 };
1648
1649
1650 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
1651 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
1652
1653 static tree
1654 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
1655 {
1656 struct count_ptr_d *count_p = (struct count_ptr_d *) data;
1657
1658 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
1659 pointer 'ptr' is *not* dereferenced, it is simply used to compute
1660 the address of 'fld' as 'ptr + offsetof(fld)'. */
1661 if (TREE_CODE (*tp) == ADDR_EXPR)
1662 {
1663 *walk_subtrees = 0;
1664 return NULL_TREE;
1665 }
1666
1667 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
1668 count_p->count++;
1669
1670 return NULL_TREE;
1671 }
1672
1673
1674 /* Count the number of direct and indirect uses for pointer PTR in
1675 statement STMT. The number of direct uses is stored in
1676 *NUM_USES_P. Indirect references are counted separately depending
1677 on whether they are store or load operations. The counts are
1678 stored in *NUM_STORES_P and *NUM_LOADS_P. */
1679
1680 void
1681 count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
1682 unsigned *num_loads_p, unsigned *num_stores_p)
1683 {
1684 ssa_op_iter i;
1685 tree use;
1686
1687 *num_uses_p = 0;
1688 *num_loads_p = 0;
1689 *num_stores_p = 0;
1690
1691 /* Find out the total number of uses of PTR in STMT. */
1692 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
1693 if (use == ptr)
1694 (*num_uses_p)++;
1695
1696 /* Now count the number of indirect references to PTR. This is
1697 truly awful, but we don't have much choice. There are no parent
1698 pointers inside INDIRECT_REFs, so an expression like
1699 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
1700 find all the indirect and direct uses of x_1 inside. The only
1701 shortcut we can take is the fact that GIMPLE only allows
1702 INDIRECT_REFs inside the expressions below. */
1703 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1704 || (TREE_CODE (stmt) == RETURN_EXPR
1705 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1706 || TREE_CODE (stmt) == ASM_EXPR
1707 || TREE_CODE (stmt) == CALL_EXPR)
1708 {
1709 tree lhs, rhs;
1710
1711 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
1712 {
1713 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1714 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1715 }
1716 else if (TREE_CODE (stmt) == RETURN_EXPR)
1717 {
1718 tree e = TREE_OPERAND (stmt, 0);
1719 lhs = GIMPLE_STMT_OPERAND (e, 0);
1720 rhs = GIMPLE_STMT_OPERAND (e, 1);
1721 }
1722 else if (TREE_CODE (stmt) == ASM_EXPR)
1723 {
1724 lhs = ASM_OUTPUTS (stmt);
1725 rhs = ASM_INPUTS (stmt);
1726 }
1727 else
1728 {
1729 lhs = NULL_TREE;
1730 rhs = stmt;
1731 }
1732
1733 if (lhs
1734 && (TREE_CODE (lhs) == TREE_LIST
1735 || EXPR_P (lhs)
1736 || GIMPLE_STMT_P (lhs)))
1737 {
1738 struct count_ptr_d count;
1739 count.ptr = ptr;
1740 count.count = 0;
1741 walk_tree (&lhs, count_ptr_derefs, &count, NULL);
1742 *num_stores_p = count.count;
1743 }
1744
1745 if (rhs
1746 && (TREE_CODE (rhs) == TREE_LIST
1747 || EXPR_P (rhs)
1748 || GIMPLE_STMT_P (rhs)))
1749 {
1750 struct count_ptr_d count;
1751 count.ptr = ptr;
1752 count.count = 0;
1753 walk_tree (&rhs, count_ptr_derefs, &count, NULL);
1754 *num_loads_p = count.count;
1755 }
1756 }
1757
1758 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
1759 }
1760
1761
1762 /* Helper for delete_mem_ref_stats. Free all the slots in the
1763 mem_sym_stats map. */
1764
1765 static bool
1766 delete_mem_sym_stats (void *key ATTRIBUTE_UNUSED, void **value,
1767 void *data ATTRIBUTE_UNUSED)
1768 {
1769 XDELETE (*value);
1770 *value = NULL;
1771 return false;
1772 }
1773
1774
1775 /* Remove memory references stats for function FN. */
1776
1777 void
1778 delete_mem_ref_stats (struct function *fn)
1779 {
1780 if (gimple_mem_ref_stats (fn)->mem_sym_stats)
1781 {
1782 pointer_map_traverse (gimple_mem_ref_stats (fn)->mem_sym_stats,
1783 delete_mem_sym_stats, NULL);
1784 pointer_map_destroy (gimple_mem_ref_stats (fn)->mem_sym_stats);
1785 }
1786
1787 gimple_mem_ref_stats (fn)->mem_sym_stats = NULL;
1788 }
1789
1790
1791 /* Initialize memory reference stats. */
1792
1793 static void
1794 init_mem_ref_stats (void)
1795 {
1796 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
1797
1798 if (mem_ref_stats->mem_sym_stats)
1799 delete_mem_ref_stats (cfun);
1800
1801 memset (mem_ref_stats, 0, sizeof (struct mem_ref_stats_d));
1802 mem_ref_stats->mem_sym_stats = pointer_map_create ();
1803 }
1804
1805
1806 /* Initialize the data structures used for alias analysis. */
1807
1808 static struct alias_info *
1809 init_alias_info (void)
1810 {
1811 struct alias_info *ai;
1812 referenced_var_iterator rvi;
1813 tree var;
1814
1815 ai = XCNEW (struct alias_info);
1816 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
1817 sbitmap_zero (ai->ssa_names_visited);
1818 ai->processed_ptrs = VEC_alloc (tree, heap, 50);
1819 ai->written_vars = pointer_set_create ();
1820 ai->dereferenced_ptrs_store = pointer_set_create ();
1821 ai->dereferenced_ptrs_load = pointer_set_create ();
1822
1823 /* Clear out all memory reference stats. */
1824 init_mem_ref_stats ();
1825
1826 /* If aliases have been computed before, clear existing information. */
1827 if (gimple_aliases_computed_p (cfun))
1828 {
1829 unsigned i;
1830
1831 bitmap_obstack_release (&alias_bitmap_obstack);
1832
1833 /* Similarly, clear the set of addressable variables. In this
1834 case, we can just clear the set because addressability is
1835 only computed here. */
1836 bitmap_clear (gimple_addressable_vars (cfun));
1837
1838 /* Clear flow-insensitive alias information from each symbol. */
1839 FOR_EACH_REFERENCED_VAR (var, rvi)
1840 {
1841 if (is_gimple_reg (var))
1842 continue;
1843
1844 if (MTAG_P (var))
1845 MTAG_ALIASES (var) = NULL;
1846
1847 /* Memory partition information will be computed from scratch. */
1848 if (TREE_CODE (var) == MEMORY_PARTITION_TAG)
1849 MPT_SYMBOLS (var) = NULL;
1850
1851 /* Since we are about to re-discover call-clobbered
1852 variables, clear the call-clobbered flag. Variables that
1853 are intrinsically call-clobbered (globals, local statics,
1854 etc) will not be marked by the aliasing code, so we can't
1855 remove them from CALL_CLOBBERED_VARS.
1856
1857 NB: STRUCT_FIELDS are still call clobbered if they are
1858 for a global variable, so we *don't* clear their call
1859 clobberedness just because they are tags, though we will
1860 clear it if they aren't for global variables. */
1861 if (TREE_CODE (var) == NAME_MEMORY_TAG
1862 || TREE_CODE (var) == SYMBOL_MEMORY_TAG
1863 || TREE_CODE (var) == MEMORY_PARTITION_TAG
1864 || !is_global_var (var))
1865 clear_call_clobbered (var);
1866 }
1867
1868 /* Clear flow-sensitive points-to information from each SSA name. */
1869 for (i = 1; i < num_ssa_names; i++)
1870 {
1871 tree name = ssa_name (i);
1872
1873 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
1874 continue;
1875
1876 if (SSA_NAME_PTR_INFO (name))
1877 {
1878 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
1879
1880 /* Clear all the flags but keep the name tag to
1881 avoid creating new temporaries unnecessarily. If
1882 this pointer is found to point to a subset or
1883 superset of its former points-to set, then a new
1884 tag will need to be created in create_name_tags. */
1885 pi->pt_anything = 0;
1886 pi->pt_null = 0;
1887 pi->value_escapes_p = 0;
1888 pi->is_dereferenced = 0;
1889 if (pi->pt_vars)
1890 bitmap_clear (pi->pt_vars);
1891 }
1892 }
1893 }
1894 else
1895 {
1896 /* If this is the first time we compute aliasing information,
1897 every non-register symbol will need to be put into SSA form
1898 (the initial SSA form only operates on GIMPLE registers). */
1899 FOR_EACH_REFERENCED_VAR (var, rvi)
1900 if (!is_gimple_reg (var))
1901 mark_sym_for_renaming (var);
1902 }
1903
1904 /* Next time, we will need to reset alias information. */
1905 cfun->gimple_df->aliases_computed_p = true;
1906 bitmap_obstack_initialize (&alias_bitmap_obstack);
1907
1908 return ai;
1909 }
1910
1911
1912 /* Deallocate memory used by alias analysis. */
1913
1914 static void
1915 delete_alias_info (struct alias_info *ai)
1916 {
1917 size_t i;
1918
1919 sbitmap_free (ai->ssa_names_visited);
1920
1921 VEC_free (tree, heap, ai->processed_ptrs);
1922
1923 for (i = 0; i < ai->num_addressable_vars; i++)
1924 free (ai->addressable_vars[i]);
1925 free (ai->addressable_vars);
1926
1927 for (i = 0; i < ai->num_pointers; i++)
1928 free (ai->pointers[i]);
1929 free (ai->pointers);
1930
1931 pointer_set_destroy (ai->written_vars);
1932 pointer_set_destroy (ai->dereferenced_ptrs_store);
1933 pointer_set_destroy (ai->dereferenced_ptrs_load);
1934 free (ai);
1935
1936 delete_points_to_sets ();
1937 }
1938
1939
1940 /* Used for hashing to identify pointer infos with identical
1941 pt_vars bitmaps. */
1942
1943 static int
1944 eq_ptr_info (const void *p1, const void *p2)
1945 {
1946 const struct ptr_info_def *n1 = (const struct ptr_info_def *) p1;
1947 const struct ptr_info_def *n2 = (const struct ptr_info_def *) p2;
1948 return bitmap_equal_p (n1->pt_vars, n2->pt_vars);
1949 }
1950
1951 static hashval_t
1952 ptr_info_hash (const void *p)
1953 {
1954 const struct ptr_info_def *n = (const struct ptr_info_def *) p;
1955 return bitmap_hash (n->pt_vars);
1956 }
1957
1958
1959 /* Create name tags for all the pointers that have been dereferenced.
1960 We only create a name tag for a pointer P if P is found to point to
1961 a set of variables (so that we can alias them to *P) or if it is
1962 the result of a call to malloc (which means that P cannot point to
1963 anything else nor alias any other variable).
1964
1965 If two pointers P and Q point to the same set of variables, they
1966 are assigned the same name tag. */
1967
1968 static void
1969 create_name_tags (void)
1970 {
1971 size_t i;
1972 VEC (tree, heap) *with_ptvars = NULL;
1973 tree ptr;
1974 htab_t ptr_hash;
1975
1976 /* Collect the list of pointers with a non-empty points to set. */
1977 for (i = 1; i < num_ssa_names; i++)
1978 {
1979 tree ptr = ssa_name (i);
1980 struct ptr_info_def *pi;
1981
1982 if (!ptr
1983 || !POINTER_TYPE_P (TREE_TYPE (ptr))
1984 || !SSA_NAME_PTR_INFO (ptr))
1985 continue;
1986
1987 pi = SSA_NAME_PTR_INFO (ptr);
1988
1989 if (pi->pt_anything || !pi->is_dereferenced)
1990 {
1991 /* No name tags for pointers that have not been
1992 dereferenced or point to an arbitrary location. */
1993 pi->name_mem_tag = NULL_TREE;
1994 continue;
1995 }
1996
1997 /* Set pt_anything on the pointers without pt_vars filled in so
1998 that they are assigned a symbol tag. */
1999 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
2000 VEC_safe_push (tree, heap, with_ptvars, ptr);
2001 else
2002 set_pt_anything (ptr);
2003 }
2004
2005 /* If we didn't find any pointers with pt_vars set, we're done. */
2006 if (!with_ptvars)
2007 return;
2008
2009 ptr_hash = htab_create (10, ptr_info_hash, eq_ptr_info, NULL);
2010
2011 /* Now go through the pointers with pt_vars, and find a name tag
2012 with the same pt_vars as this pointer, or create one if one
2013 doesn't exist. */
2014 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
2015 {
2016 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2017 tree old_name_tag = pi->name_mem_tag;
2018 struct ptr_info_def **slot;
2019
2020 /* If PTR points to a set of variables, check if we don't
2021 have another pointer Q with the same points-to set before
2022 creating a tag. If so, use Q's tag instead of creating a
2023 new one.
2024
2025 This is important for not creating unnecessary symbols
2026 and also for copy propagation. If we ever need to
2027 propagate PTR into Q or vice-versa, we would run into
2028 problems if they both had different name tags because
2029 they would have different SSA version numbers (which
2030 would force us to take the name tags in and out of SSA). */
2031 slot = (struct ptr_info_def **) htab_find_slot (ptr_hash, pi, INSERT);
2032 if (*slot)
2033 pi->name_mem_tag = (*slot)->name_mem_tag;
2034 else
2035 {
2036 *slot = pi;
2037 /* If we didn't find a pointer with the same points-to set
2038 as PTR, create a new name tag if needed. */
2039 if (pi->name_mem_tag == NULL_TREE)
2040 pi->name_mem_tag = get_nmt_for (ptr);
2041 }
2042
2043 /* If the new name tag computed for PTR is different than
2044 the old name tag that it used to have, then the old tag
2045 needs to be removed from the IL, so we mark it for
2046 renaming. */
2047 if (old_name_tag && old_name_tag != pi->name_mem_tag)
2048 mark_sym_for_renaming (old_name_tag);
2049
2050 TREE_THIS_VOLATILE (pi->name_mem_tag)
2051 |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
2052
2053 /* Mark the new name tag for renaming. */
2054 mark_sym_for_renaming (pi->name_mem_tag);
2055 }
2056
2057 htab_delete (ptr_hash);
2058
2059 VEC_free (tree, heap, with_ptvars);
2060 }
2061
2062
2063 /* Union the alias set SET into the may-aliases for TAG. */
2064
2065 static void
2066 union_alias_set_into (tree tag, bitmap set)
2067 {
2068 bitmap ma = MTAG_ALIASES (tag);
2069
2070 if (bitmap_empty_p (set))
2071 return;
2072
2073 if (!ma)
2074 ma = MTAG_ALIASES (tag) = BITMAP_ALLOC (&alias_bitmap_obstack);
2075 bitmap_ior_into (ma, set);
2076 }
2077
2078
2079 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
2080 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
2081 name tag and the variables it points-to are call-clobbered. Finally, if
2082 P_i escapes and we could not determine where it points to, then all the
2083 variables in the same alias set as *P_i are marked call-clobbered. This
2084 is necessary because we must assume that P_i may take the address of any
2085 variable in the same alias set. */
2086
2087 static void
2088 compute_flow_sensitive_aliasing (struct alias_info *ai)
2089 {
2090 size_t i;
2091 tree ptr;
2092
2093 set_used_smts ();
2094
2095 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
2096 {
2097 if (!find_what_p_points_to (ptr))
2098 set_pt_anything (ptr);
2099 }
2100
2101 create_name_tags ();
2102
2103 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
2104 {
2105 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2106 tree tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
2107
2108 /* Set up aliasing information for PTR's name memory tag (if it has
2109 one). Note that only pointers that have been dereferenced will
2110 have a name memory tag. */
2111 if (pi->name_mem_tag && pi->pt_vars)
2112 {
2113 if (!bitmap_empty_p (pi->pt_vars))
2114 {
2115 union_alias_set_into (pi->name_mem_tag, pi->pt_vars);
2116 union_alias_set_into (tag, pi->pt_vars);
2117 bitmap_clear_bit (MTAG_ALIASES (tag), DECL_UID (tag));
2118
2119 /* It may be the case that this the tag uid was the only
2120 bit we had set in the aliases list, and in this case,
2121 we don't want to keep an empty bitmap, as this
2122 asserts in tree-ssa-operands.c . */
2123 if (bitmap_empty_p (MTAG_ALIASES (tag)))
2124 BITMAP_FREE (MTAG_ALIASES (tag));
2125 }
2126 }
2127 }
2128 }
2129
2130
2131 /* Return TRUE if at least one symbol in TAG2's alias set is also
2132 present in TAG1's alias set. */
2133
2134 static bool
2135 have_common_aliases_p (bitmap tag1aliases, bitmap tag2aliases)
2136 {
2137
2138 /* This is the old behavior of have_common_aliases_p, which is to
2139 return false if both sets are empty, or one set is and the other
2140 isn't. */
2141 if ((tag1aliases == NULL && tag2aliases != NULL)
2142 || (tag2aliases == NULL && tag1aliases != NULL)
2143 || (tag1aliases == NULL && tag2aliases == NULL))
2144 return false;
2145
2146 return bitmap_intersect_p (tag1aliases, tag2aliases);
2147 }
2148
2149 /* Compute type-based alias sets. Traverse all the pointers and
2150 addressable variables found in setup_pointers_and_addressables.
2151
2152 For every pointer P in AI->POINTERS and addressable variable V in
2153 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
2154 memory tag (SMT) if their alias sets conflict. V is then marked as
2155 an aliased symbol so that the operand scanner knows that statements
2156 containing V have aliased operands. */
2157
2158 static void
2159 compute_flow_insensitive_aliasing (struct alias_info *ai)
2160 {
2161 size_t i;
2162
2163 /* For every pointer P, determine which addressable variables may alias
2164 with P's symbol memory tag. */
2165 for (i = 0; i < ai->num_pointers; i++)
2166 {
2167 size_t j;
2168 struct alias_map_d *p_map = ai->pointers[i];
2169 tree tag = symbol_mem_tag (p_map->var);
2170 tree var;
2171
2172 /* Call-clobbering information is not finalized yet at this point. */
2173 if (PTR_IS_REF_ALL (p_map->var))
2174 continue;
2175
2176 for (j = 0; j < ai->num_addressable_vars; j++)
2177 {
2178 struct alias_map_d *v_map;
2179 var_ann_t v_ann;
2180 bool tag_stored_p, var_stored_p;
2181
2182 v_map = ai->addressable_vars[j];
2183 var = v_map->var;
2184 v_ann = var_ann (var);
2185
2186 /* Skip memory tags and variables that have never been
2187 written to. We also need to check if the variables are
2188 call-clobbered because they may be overwritten by
2189 function calls. */
2190 tag_stored_p = pointer_set_contains (ai->written_vars, tag)
2191 || is_call_clobbered (tag);
2192 var_stored_p = pointer_set_contains (ai->written_vars, var)
2193 || is_call_clobbered (var);
2194 if (!tag_stored_p && !var_stored_p)
2195 continue;
2196
2197 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
2198 {
2199 /* We should never have a var with subvars here, because
2200 they shouldn't get into the set of addressable vars */
2201 gcc_assert (!var_can_have_subvars (var)
2202 || get_subvars_for_var (var) == NULL);
2203
2204 /* Add VAR to TAG's may-aliases set. */
2205 add_may_alias (tag, var);
2206 }
2207 }
2208 }
2209
2210 /* Since this analysis is based exclusively on symbols, it fails to
2211 handle cases where two pointers P and Q have different memory
2212 tags with conflicting alias set numbers but no aliased symbols in
2213 common.
2214
2215 For example, suppose that we have two memory tags SMT.1 and SMT.2
2216 such that
2217
2218 may-aliases (SMT.1) = { a }
2219 may-aliases (SMT.2) = { b }
2220
2221 and the alias set number of SMT.1 conflicts with that of SMT.2.
2222 Since they don't have symbols in common, loads and stores from
2223 SMT.1 and SMT.2 will seem independent of each other, which will
2224 lead to the optimizers making invalid transformations (see
2225 testsuite/gcc.c-torture/execute/pr15262-[12].c).
2226
2227 To avoid this problem, we do a final traversal of AI->POINTERS
2228 looking for pairs of pointers that have no aliased symbols in
2229 common and yet have conflicting alias set numbers. */
2230 for (i = 0; i < ai->num_pointers; i++)
2231 {
2232 size_t j;
2233 struct alias_map_d *p_map1 = ai->pointers[i];
2234 tree tag1 = symbol_mem_tag (p_map1->var);
2235 bitmap may_aliases1 = MTAG_ALIASES (tag1);
2236
2237 if (PTR_IS_REF_ALL (p_map1->var))
2238 continue;
2239
2240 for (j = i + 1; j < ai->num_pointers; j++)
2241 {
2242 struct alias_map_d *p_map2 = ai->pointers[j];
2243 tree tag2 = symbol_mem_tag (p_map2->var);
2244 bitmap may_aliases2 = may_aliases (tag2);
2245
2246 if (PTR_IS_REF_ALL (p_map2->var))
2247 continue;
2248
2249 /* If the pointers may not point to each other, do nothing. */
2250 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
2251 continue;
2252
2253 /* The two pointers may alias each other. If they already have
2254 symbols in common, do nothing. */
2255 if (have_common_aliases_p (may_aliases1, may_aliases2))
2256 continue;
2257
2258 if (may_aliases2 && !bitmap_empty_p (may_aliases2))
2259 {
2260 union_alias_set_into (tag1, may_aliases2);
2261 }
2262 else
2263 {
2264 /* Since TAG2 does not have any aliases of its own, add
2265 TAG2 itself to the alias set of TAG1. */
2266 add_may_alias (tag1, tag2);
2267 }
2268 }
2269
2270 }
2271 }
2272
2273
2274 /* Finalize may-alias information for ref-all pointers. Traverse all
2275 the addressable variables found in setup_pointers_and_addressables.
2276
2277 If flow-sensitive alias analysis has attached a name memory tag to
2278 a ref-all pointer, we will use it for the dereferences because that
2279 will have more precise aliasing information. But if there is no
2280 name tag, we will use a special symbol tag that aliases all the
2281 call-clobbered addressable variables. */
2282
2283 static void
2284 finalize_ref_all_pointers (struct alias_info *ai)
2285 {
2286 size_t i;
2287
2288 /* First add the real call-clobbered variables. */
2289 for (i = 0; i < ai->num_addressable_vars; i++)
2290 {
2291 tree var = ai->addressable_vars[i]->var;
2292 if (is_call_clobbered (var))
2293 add_may_alias (ai->ref_all_symbol_mem_tag, var);
2294 }
2295
2296 /* Then add the call-clobbered pointer memory tags. See
2297 compute_flow_insensitive_aliasing for the rationale. */
2298 for (i = 0; i < ai->num_pointers; i++)
2299 {
2300 tree ptr = ai->pointers[i]->var, tag;
2301 if (PTR_IS_REF_ALL (ptr))
2302 continue;
2303 tag = symbol_mem_tag (ptr);
2304 if (is_call_clobbered (tag))
2305 add_may_alias (ai->ref_all_symbol_mem_tag, tag);
2306 }
2307
2308 }
2309
2310
2311 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
2312
2313 static void
2314 create_alias_map_for (tree var, struct alias_info *ai)
2315 {
2316 struct alias_map_d *alias_map;
2317 alias_map = XCNEW (struct alias_map_d);
2318 alias_map->var = var;
2319 alias_map->set = get_alias_set (var);
2320 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
2321 }
2322
2323
2324 /* Create memory tags for all the dereferenced pointers and build the
2325 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
2326 sets. Based on the address escape and points-to information collected
2327 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
2328 variables whose address is not needed anymore. */
2329
2330 static void
2331 setup_pointers_and_addressables (struct alias_info *ai)
2332 {
2333 size_t num_addressable_vars, num_pointers;
2334 referenced_var_iterator rvi;
2335 tree var;
2336 VEC (tree, heap) *varvec = NULL;
2337 safe_referenced_var_iterator srvi;
2338
2339 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
2340 num_addressable_vars = num_pointers = 0;
2341
2342 FOR_EACH_REFERENCED_VAR (var, rvi)
2343 {
2344 if (may_be_aliased (var))
2345 num_addressable_vars++;
2346
2347 if (POINTER_TYPE_P (TREE_TYPE (var)))
2348 {
2349 /* Since we don't keep track of volatile variables, assume that
2350 these pointers are used in indirect store operations. */
2351 if (TREE_THIS_VOLATILE (var))
2352 pointer_set_insert (ai->dereferenced_ptrs_store, var);
2353
2354 num_pointers++;
2355 }
2356 }
2357
2358 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
2359 always going to be slightly bigger than we actually need them
2360 because some TREE_ADDRESSABLE variables will be marked
2361 non-addressable below and only pointers with unique symbol tags are
2362 going to be added to POINTERS. */
2363 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
2364 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
2365 ai->num_addressable_vars = 0;
2366 ai->num_pointers = 0;
2367
2368 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
2369 {
2370 subvar_t svars;
2371
2372 /* Name memory tags already have flow-sensitive aliasing
2373 information, so they need not be processed by
2374 compute_flow_insensitive_aliasing. Similarly, symbol memory
2375 tags are already accounted for when we process their
2376 associated pointer.
2377
2378 Structure fields, on the other hand, have to have some of this
2379 information processed for them, but it's pointless to mark them
2380 non-addressable (since they are fake variables anyway). */
2381 if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
2382 continue;
2383
2384 /* Remove the ADDRESSABLE flag from every addressable variable whose
2385 address is not needed anymore. This is caused by the propagation
2386 of ADDR_EXPR constants into INDIRECT_REF expressions and the
2387 removal of dead pointer assignments done by the early scalar
2388 cleanup passes. */
2389 if (TREE_ADDRESSABLE (var))
2390 {
2391 if (!bitmap_bit_p (gimple_addressable_vars (cfun), DECL_UID (var))
2392 && TREE_CODE (var) != RESULT_DECL
2393 && !is_global_var (var))
2394 {
2395 bool okay_to_mark = true;
2396
2397 /* Since VAR is now a regular GIMPLE register, we will need
2398 to rename VAR into SSA afterwards. */
2399 mark_sym_for_renaming (var);
2400
2401 /* If VAR can have sub-variables, and any of its
2402 sub-variables has its address taken, then we cannot
2403 remove the addressable flag from VAR. */
2404 if (var_can_have_subvars (var)
2405 && (svars = get_subvars_for_var (var)))
2406 {
2407 subvar_t sv;
2408
2409 for (sv = svars; sv; sv = sv->next)
2410 {
2411 if (bitmap_bit_p (gimple_addressable_vars (cfun),
2412 DECL_UID (sv->var)))
2413 okay_to_mark = false;
2414 mark_sym_for_renaming (sv->var);
2415 }
2416 }
2417
2418 /* The address of VAR is not needed, remove the
2419 addressable bit, so that it can be optimized as a
2420 regular variable. */
2421 if (okay_to_mark)
2422 {
2423 /* The memory partition holding VAR will no longer
2424 contain VAR, and statements referencing it will need
2425 to be updated. */
2426 if (memory_partition (var))
2427 mark_sym_for_renaming (memory_partition (var));
2428
2429 mark_non_addressable (var);
2430 }
2431 }
2432 }
2433
2434 /* Global variables and addressable locals may be aliased. Create an
2435 entry in ADDRESSABLE_VARS for VAR. */
2436 if (may_be_aliased (var))
2437 {
2438 if (!var_can_have_subvars (var)
2439 || get_subvars_for_var (var) == NULL)
2440 create_alias_map_for (var, ai);
2441
2442 mark_sym_for_renaming (var);
2443 }
2444
2445 /* Add pointer variables that have been dereferenced to the POINTERS
2446 array and create a symbol memory tag for them. */
2447 if (POINTER_TYPE_P (TREE_TYPE (var)))
2448 {
2449 if ((pointer_set_contains (ai->dereferenced_ptrs_store, var)
2450 || pointer_set_contains (ai->dereferenced_ptrs_load, var)))
2451 {
2452 tree tag, old_tag;
2453 var_ann_t t_ann;
2454
2455 /* If pointer VAR still doesn't have a memory tag
2456 associated with it, create it now or re-use an
2457 existing one. */
2458 tag = get_smt_for (var, ai);
2459 t_ann = var_ann (tag);
2460
2461 /* The symbol tag will need to be renamed into SSA
2462 afterwards. Note that we cannot do this inside
2463 get_smt_for because aliasing may run multiple times
2464 and we only create symbol tags the first time. */
2465 mark_sym_for_renaming (tag);
2466
2467 /* Similarly, if pointer VAR used to have another type
2468 tag, we will need to process it in the renamer to
2469 remove the stale virtual operands. */
2470 old_tag = symbol_mem_tag (var);
2471 if (old_tag)
2472 mark_sym_for_renaming (old_tag);
2473
2474 /* Associate the tag with pointer VAR. */
2475 set_symbol_mem_tag (var, tag);
2476
2477 /* If pointer VAR has been used in a store operation,
2478 then its memory tag must be marked as written-to. */
2479 if (pointer_set_contains (ai->dereferenced_ptrs_store, var))
2480 pointer_set_insert (ai->written_vars, tag);
2481 }
2482 else
2483 {
2484 /* The pointer has not been dereferenced. If it had a
2485 symbol memory tag, remove it and mark the old tag for
2486 renaming to remove it out of the IL. */
2487 tree tag = symbol_mem_tag (var);
2488 if (tag)
2489 {
2490 mark_sym_for_renaming (tag);
2491 set_symbol_mem_tag (var, NULL_TREE);
2492 }
2493 }
2494 }
2495 }
2496
2497 VEC_free (tree, heap, varvec);
2498 }
2499
2500
2501 /* Determine whether to use .GLOBAL_VAR to model call clobbering
2502 semantics. If the function makes no references to global
2503 variables and contains at least one call to a non-pure function,
2504 then we need to mark the side-effects of the call using .GLOBAL_VAR
2505 to represent all possible global memory referenced by the callee. */
2506
2507 static void
2508 maybe_create_global_var (void)
2509 {
2510 /* No need to create it, if we have one already. */
2511 if (gimple_global_var (cfun) == NULL_TREE)
2512 {
2513 struct mem_ref_stats_d *stats = gimple_mem_ref_stats (cfun);
2514
2515 /* Create .GLOBAL_VAR if there are no call-clobbered
2516 variables and the program contains a mixture of pure/const
2517 and regular function calls. This is to avoid the problem
2518 described in PR 20115:
2519
2520 int X;
2521 int func_pure (void) { return X; }
2522 int func_non_pure (int a) { X += a; }
2523 int foo ()
2524 {
2525 int a = func_pure ();
2526 func_non_pure (a);
2527 a = func_pure ();
2528 return a;
2529 }
2530
2531 Since foo() has no call-clobbered variables, there is
2532 no relationship between the calls to func_pure and
2533 func_non_pure. Since func_pure has no side-effects, value
2534 numbering optimizations elide the second call to func_pure.
2535 So, if we have some pure/const and some regular calls in the
2536 program we create .GLOBAL_VAR to avoid missing these
2537 relations. */
2538 if (bitmap_count_bits (gimple_call_clobbered_vars (cfun)) == 0
2539 && stats->num_call_sites > 0
2540 && stats->num_pure_const_call_sites > 0
2541 && stats->num_call_sites > stats->num_pure_const_call_sites)
2542 create_global_var ();
2543 }
2544 }
2545
2546
2547 /* Return TRUE if pointer PTR may point to variable VAR.
2548
2549 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
2550 This is needed because when checking for type conflicts we are
2551 interested in the alias set of the memory location pointed-to by
2552 PTR. The alias set of PTR itself is irrelevant.
2553
2554 VAR_ALIAS_SET is the alias set for VAR. */
2555
2556 static bool
2557 may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
2558 tree var, HOST_WIDE_INT var_alias_set,
2559 bool alias_set_only)
2560 {
2561 tree mem;
2562
2563 alias_stats.alias_queries++;
2564 alias_stats.simple_queries++;
2565
2566 /* By convention, a variable cannot alias itself. */
2567 mem = symbol_mem_tag (ptr);
2568 if (mem == var)
2569 {
2570 alias_stats.alias_noalias++;
2571 alias_stats.simple_resolved++;
2572 return false;
2573 }
2574
2575 /* If -fargument-noalias-global is > 2, pointer arguments may
2576 not point to anything else. */
2577 if (flag_argument_noalias > 2 && TREE_CODE (ptr) == PARM_DECL)
2578 {
2579 alias_stats.alias_noalias++;
2580 alias_stats.simple_resolved++;
2581 return false;
2582 }
2583
2584 /* If -fargument-noalias-global is > 1, pointer arguments may
2585 not point to global variables. */
2586 if (flag_argument_noalias > 1 && is_global_var (var)
2587 && TREE_CODE (ptr) == PARM_DECL)
2588 {
2589 alias_stats.alias_noalias++;
2590 alias_stats.simple_resolved++;
2591 return false;
2592 }
2593
2594 /* If either MEM or VAR is a read-only global and the other one
2595 isn't, then PTR cannot point to VAR. */
2596 if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
2597 || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
2598 {
2599 alias_stats.alias_noalias++;
2600 alias_stats.simple_resolved++;
2601 return false;
2602 }
2603
2604 gcc_assert (TREE_CODE (mem) == SYMBOL_MEMORY_TAG);
2605
2606 alias_stats.tbaa_queries++;
2607
2608 /* If the alias sets don't conflict then MEM cannot alias VAR. */
2609 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
2610 {
2611 alias_stats.alias_noalias++;
2612 alias_stats.tbaa_resolved++;
2613 return false;
2614 }
2615
2616 /* If VAR is a record or union type, PTR cannot point into VAR
2617 unless there is some explicit address operation in the
2618 program that can reference a field of the type pointed-to by PTR.
2619 This also assumes that the types of both VAR and PTR are
2620 contained within the compilation unit, and that there is no fancy
2621 addressing arithmetic associated with any of the types
2622 involved. */
2623 if (mem_alias_set != 0 && var_alias_set != 0)
2624 {
2625 tree ptr_type = TREE_TYPE (ptr);
2626 tree var_type = TREE_TYPE (var);
2627
2628 /* The star count is -1 if the type at the end of the pointer_to
2629 chain is not a record or union type. */
2630 if ((!alias_set_only) &&
2631 ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
2632 {
2633 int ptr_star_count = 0;
2634
2635 /* ipa_type_escape_star_count_of_interesting_type is a
2636 little too restrictive for the pointer type, need to
2637 allow pointers to primitive types as long as those types
2638 cannot be pointers to everything. */
2639 while (POINTER_TYPE_P (ptr_type))
2640 {
2641 /* Strip the *s off. */
2642 ptr_type = TREE_TYPE (ptr_type);
2643 ptr_star_count++;
2644 }
2645
2646 /* There does not appear to be a better test to see if the
2647 pointer type was one of the pointer to everything
2648 types. */
2649 if (ptr_star_count > 0)
2650 {
2651 alias_stats.structnoaddress_queries++;
2652 if (ipa_type_escape_field_does_not_clobber_p (var_type,
2653 TREE_TYPE (ptr)))
2654 {
2655 alias_stats.structnoaddress_resolved++;
2656 alias_stats.alias_noalias++;
2657 return false;
2658 }
2659 }
2660 else if (ptr_star_count == 0)
2661 {
2662 /* If PTR_TYPE was not really a pointer to type, it cannot
2663 alias. */
2664 alias_stats.structnoaddress_queries++;
2665 alias_stats.structnoaddress_resolved++;
2666 alias_stats.alias_noalias++;
2667 return false;
2668 }
2669 }
2670 }
2671
2672 alias_stats.alias_mayalias++;
2673 return true;
2674 }
2675
2676
2677 /* Add ALIAS to the set of variables that may alias VAR. */
2678
2679 static void
2680 add_may_alias (tree var, tree alias)
2681 {
2682 /* Don't allow self-referential aliases. */
2683 gcc_assert (var != alias);
2684
2685 /* ALIAS must be addressable if it's being added to an alias set. */
2686 #if 1
2687 TREE_ADDRESSABLE (alias) = 1;
2688 #else
2689 gcc_assert (may_be_aliased (alias));
2690 #endif
2691
2692 /* VAR must be a symbol or a name tag. */
2693 gcc_assert (TREE_CODE (var) == SYMBOL_MEMORY_TAG
2694 || TREE_CODE (var) == NAME_MEMORY_TAG);
2695
2696 if (MTAG_ALIASES (var) == NULL)
2697 MTAG_ALIASES (var) = BITMAP_ALLOC (&alias_bitmap_obstack);
2698
2699 bitmap_set_bit (MTAG_ALIASES (var), DECL_UID (alias));
2700 }
2701
2702
2703 /* Mark pointer PTR as pointing to an arbitrary memory location. */
2704
2705 static void
2706 set_pt_anything (tree ptr)
2707 {
2708 struct ptr_info_def *pi = get_ptr_info (ptr);
2709
2710 pi->pt_anything = 1;
2711 pi->pt_vars = NULL;
2712
2713 /* The pointer used to have a name tag, but we now found it pointing
2714 to an arbitrary location. The name tag needs to be renamed and
2715 disassociated from PTR. */
2716 if (pi->name_mem_tag)
2717 {
2718 mark_sym_for_renaming (pi->name_mem_tag);
2719 pi->name_mem_tag = NULL_TREE;
2720 }
2721 }
2722
2723
2724 /* Return true if STMT is an "escape" site from the current function. Escape
2725 sites those statements which might expose the address of a variable
2726 outside the current function. STMT is an escape site iff:
2727
2728 1- STMT is a function call, or
2729 2- STMT is an __asm__ expression, or
2730 3- STMT is an assignment to a non-local variable, or
2731 4- STMT is a return statement.
2732
2733 Return the type of escape site found, if we found one, or NO_ESCAPE
2734 if none. */
2735
2736 enum escape_type
2737 is_escape_site (tree stmt)
2738 {
2739 tree call = get_call_expr_in (stmt);
2740 if (call != NULL_TREE)
2741 {
2742 if (!TREE_SIDE_EFFECTS (call))
2743 return ESCAPE_TO_PURE_CONST;
2744
2745 return ESCAPE_TO_CALL;
2746 }
2747 else if (TREE_CODE (stmt) == ASM_EXPR)
2748 return ESCAPE_TO_ASM;
2749 else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
2750 {
2751 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
2752
2753 /* Get to the base of _REF nodes. */
2754 if (TREE_CODE (lhs) != SSA_NAME)
2755 lhs = get_base_address (lhs);
2756
2757 /* If we couldn't recognize the LHS of the assignment, assume that it
2758 is a non-local store. */
2759 if (lhs == NULL_TREE)
2760 return ESCAPE_UNKNOWN;
2761
2762 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
2763 || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == CONVERT_EXPR
2764 || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
2765 {
2766 tree from
2767 = TREE_TYPE (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0));
2768 tree to = TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 1));
2769
2770 /* If the RHS is a conversion between a pointer and an integer, the
2771 pointer escapes since we can't track the integer. */
2772 if (POINTER_TYPE_P (from) && !POINTER_TYPE_P (to))
2773 return ESCAPE_BAD_CAST;
2774
2775 /* Same if the RHS is a conversion between a regular pointer and a
2776 ref-all pointer since we can't track the SMT of the former. */
2777 if (POINTER_TYPE_P (from) && !TYPE_REF_CAN_ALIAS_ALL (from)
2778 && POINTER_TYPE_P (to) && TYPE_REF_CAN_ALIAS_ALL (to))
2779 return ESCAPE_BAD_CAST;
2780 }
2781
2782 /* If the LHS is an SSA name, it can't possibly represent a non-local
2783 memory store. */
2784 if (TREE_CODE (lhs) == SSA_NAME)
2785 return NO_ESCAPE;
2786
2787 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
2788 local variables we cannot be sure if it will escape, because we
2789 don't have information about objects not in SSA form. Need to
2790 implement something along the lines of
2791
2792 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
2793 Midkiff, ``Escape analysis for java,'' in Proceedings of the
2794 Conference on Object-Oriented Programming Systems, Languages, and
2795 Applications (OOPSLA), pp. 1-19, 1999. */
2796 return ESCAPE_STORED_IN_GLOBAL;
2797 }
2798 else if (TREE_CODE (stmt) == RETURN_EXPR)
2799 return ESCAPE_TO_RETURN;
2800
2801 return NO_ESCAPE;
2802 }
2803
2804 /* Create a new memory tag of type TYPE.
2805 Does NOT push it into the current binding. */
2806
2807 tree
2808 create_tag_raw (enum tree_code code, tree type, const char *prefix)
2809 {
2810 tree tmp_var;
2811
2812 tmp_var = build_decl (code, create_tmp_var_name (prefix), type);
2813
2814 /* Make the variable writable. */
2815 TREE_READONLY (tmp_var) = 0;
2816
2817 /* It doesn't start out global. */
2818 MTAG_GLOBAL (tmp_var) = 0;
2819 TREE_STATIC (tmp_var) = 0;
2820 TREE_USED (tmp_var) = 1;
2821
2822 return tmp_var;
2823 }
2824
2825 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
2826 is considered to represent all the pointers whose pointed-to types are
2827 in the same alias set class. Otherwise, the tag represents a single
2828 SSA_NAME pointer variable. */
2829
2830 static tree
2831 create_memory_tag (tree type, bool is_type_tag)
2832 {
2833 tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG,
2834 type, (is_type_tag) ? "SMT" : "NMT");
2835
2836 /* By default, memory tags are local variables. Alias analysis will
2837 determine whether they should be considered globals. */
2838 DECL_CONTEXT (tag) = current_function_decl;
2839
2840 /* Memory tags are by definition addressable. */
2841 TREE_ADDRESSABLE (tag) = 1;
2842
2843 set_symbol_mem_tag (tag, NULL_TREE);
2844
2845 /* Add the tag to the symbol table. */
2846 add_referenced_var (tag);
2847
2848 return tag;
2849 }
2850
2851
2852 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
2853 This is used if P_i has been found to point to a specific set of
2854 variables or to a non-aliased memory location like the address returned
2855 by malloc functions. */
2856
2857 static tree
2858 get_nmt_for (tree ptr)
2859 {
2860 struct ptr_info_def *pi = get_ptr_info (ptr);
2861 tree tag = pi->name_mem_tag;
2862
2863 if (tag == NULL_TREE)
2864 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
2865 return tag;
2866 }
2867
2868
2869 /* Return the symbol memory tag associated to pointer PTR. A memory
2870 tag is an artificial variable that represents the memory location
2871 pointed-to by PTR. It is used to model the effects of pointer
2872 de-references on addressable variables.
2873
2874 AI points to the data gathered during alias analysis. This
2875 function populates the array AI->POINTERS. */
2876
2877 static tree
2878 get_smt_for (tree ptr, struct alias_info *ai)
2879 {
2880 size_t i;
2881 tree tag;
2882 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2883 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2884
2885 /* We use a unique memory tag for all the ref-all pointers. */
2886 if (PTR_IS_REF_ALL (ptr))
2887 {
2888 if (!ai->ref_all_symbol_mem_tag)
2889 ai->ref_all_symbol_mem_tag = create_memory_tag (void_type_node, true);
2890 return ai->ref_all_symbol_mem_tag;
2891 }
2892
2893 /* To avoid creating unnecessary memory tags, only create one memory tag
2894 per alias set class. Note that it may be tempting to group
2895 memory tags based on conflicting alias sets instead of
2896 equivalence. That would be wrong because alias sets are not
2897 necessarily transitive (as demonstrated by the libstdc++ test
2898 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
2899 such that conflicts (A, B) == true and conflicts (A, C) == true,
2900 it does not necessarily follow that conflicts (B, C) == true. */
2901 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
2902 {
2903 struct alias_map_d *curr = ai->pointers[i];
2904 tree curr_tag = symbol_mem_tag (curr->var);
2905 if (tag_set == curr->set)
2906 {
2907 tag = curr_tag;
2908 break;
2909 }
2910 }
2911
2912 /* If VAR cannot alias with any of the existing memory tags, create a new
2913 tag for PTR and add it to the POINTERS array. */
2914 if (tag == NULL_TREE)
2915 {
2916 struct alias_map_d *alias_map;
2917
2918 /* If PTR did not have a symbol tag already, create a new SMT.*
2919 artificial variable representing the memory location
2920 pointed-to by PTR. */
2921 tag = symbol_mem_tag (ptr);
2922 if (tag == NULL_TREE)
2923 tag = create_memory_tag (tag_type, true);
2924
2925 /* Add PTR to the POINTERS array. Note that we are not interested in
2926 PTR's alias set. Instead, we cache the alias set for the memory that
2927 PTR points to. */
2928 alias_map = XCNEW (struct alias_map_d);
2929 alias_map->var = ptr;
2930 alias_map->set = tag_set;
2931 ai->pointers[ai->num_pointers++] = alias_map;
2932 }
2933
2934 /* If the pointed-to type is volatile, so is the tag. */
2935 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
2936
2937 /* Make sure that the symbol tag has the same alias set as the
2938 pointed-to type. */
2939 gcc_assert (tag_set == get_alias_set (tag));
2940
2941 return tag;
2942 }
2943
2944
2945 /* Create GLOBAL_VAR, an artificial global variable to act as a
2946 representative of all the variables that may be clobbered by function
2947 calls. */
2948
2949 static void
2950 create_global_var (void)
2951 {
2952 tree global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
2953 void_type_node);
2954 DECL_ARTIFICIAL (global_var) = 1;
2955 TREE_READONLY (global_var) = 0;
2956 DECL_EXTERNAL (global_var) = 1;
2957 TREE_STATIC (global_var) = 1;
2958 TREE_USED (global_var) = 1;
2959 DECL_CONTEXT (global_var) = NULL_TREE;
2960 TREE_THIS_VOLATILE (global_var) = 0;
2961 TREE_ADDRESSABLE (global_var) = 0;
2962
2963 create_var_ann (global_var);
2964 mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
2965 add_referenced_var (global_var);
2966 mark_sym_for_renaming (global_var);
2967 cfun->gimple_df->global_var = global_var;
2968 }
2969
2970
2971 /* Dump alias statistics on FILE. */
2972
2973 static void
2974 dump_alias_stats (FILE *file)
2975 {
2976 const char *funcname
2977 = lang_hooks.decl_printable_name (current_function_decl, 2);
2978 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
2979 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
2980 fprintf (file, "Total alias mayalias results:\t%u\n",
2981 alias_stats.alias_mayalias);
2982 fprintf (file, "Total alias noalias results:\t%u\n",
2983 alias_stats.alias_noalias);
2984 fprintf (file, "Total simple queries:\t%u\n",
2985 alias_stats.simple_queries);
2986 fprintf (file, "Total simple resolved:\t%u\n",
2987 alias_stats.simple_resolved);
2988 fprintf (file, "Total TBAA queries:\t%u\n",
2989 alias_stats.tbaa_queries);
2990 fprintf (file, "Total TBAA resolved:\t%u\n",
2991 alias_stats.tbaa_resolved);
2992 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
2993 alias_stats.structnoaddress_queries);
2994 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
2995 alias_stats.structnoaddress_resolved);
2996 }
2997
2998
2999 /* Dump alias information on FILE. */
3000
3001 void
3002 dump_alias_info (FILE *file)
3003 {
3004 size_t i;
3005 const char *funcname
3006 = lang_hooks.decl_printable_name (current_function_decl, 2);
3007 referenced_var_iterator rvi;
3008 tree var;
3009
3010 fprintf (file, "\nAlias information for %s\n\n", funcname);
3011
3012 dump_memory_partitions (file);
3013
3014 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
3015
3016 fprintf (file, "Aliased symbols\n\n");
3017
3018 FOR_EACH_REFERENCED_VAR (var, rvi)
3019 {
3020 if (may_be_aliased (var))
3021 dump_variable (file, var);
3022 }
3023
3024 fprintf (file, "\nDereferenced pointers\n\n");
3025
3026 FOR_EACH_REFERENCED_VAR (var, rvi)
3027 if (symbol_mem_tag (var))
3028 dump_variable (file, var);
3029
3030 fprintf (file, "\nSymbol memory tags\n\n");
3031
3032 FOR_EACH_REFERENCED_VAR (var, rvi)
3033 {
3034 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
3035 dump_variable (file, var);
3036 }
3037
3038 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
3039
3040 fprintf (file, "SSA_NAME pointers\n\n");
3041 for (i = 1; i < num_ssa_names; i++)
3042 {
3043 tree ptr = ssa_name (i);
3044 struct ptr_info_def *pi;
3045
3046 if (ptr == NULL_TREE)
3047 continue;
3048
3049 pi = SSA_NAME_PTR_INFO (ptr);
3050 if (!SSA_NAME_IN_FREE_LIST (ptr)
3051 && pi
3052 && pi->name_mem_tag)
3053 dump_points_to_info_for (file, ptr);
3054 }
3055
3056 fprintf (file, "\nName memory tags\n\n");
3057
3058 FOR_EACH_REFERENCED_VAR (var, rvi)
3059 {
3060 if (TREE_CODE (var) == NAME_MEMORY_TAG)
3061 dump_variable (file, var);
3062 }
3063
3064 fprintf (file, "\n");
3065 }
3066
3067
3068 /* Dump alias information on stderr. */
3069
3070 void
3071 debug_alias_info (void)
3072 {
3073 dump_alias_info (stderr);
3074 }
3075
3076
3077 /* Return the alias information associated with pointer T. It creates a
3078 new instance if none existed. */
3079
3080 struct ptr_info_def *
3081 get_ptr_info (tree t)
3082 {
3083 struct ptr_info_def *pi;
3084
3085 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
3086
3087 pi = SSA_NAME_PTR_INFO (t);
3088 if (pi == NULL)
3089 {
3090 pi = GGC_CNEW (struct ptr_info_def);
3091 SSA_NAME_PTR_INFO (t) = pi;
3092 }
3093
3094 return pi;
3095 }
3096
3097
3098 /* Dump points-to information for SSA_NAME PTR into FILE. */
3099
3100 void
3101 dump_points_to_info_for (FILE *file, tree ptr)
3102 {
3103 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3104
3105 print_generic_expr (file, ptr, dump_flags);
3106
3107 if (pi)
3108 {
3109 if (pi->name_mem_tag)
3110 {
3111 fprintf (file, ", name memory tag: ");
3112 print_generic_expr (file, pi->name_mem_tag, dump_flags);
3113 }
3114
3115 if (pi->is_dereferenced)
3116 fprintf (file, ", is dereferenced (R=%ld, W=%ld)",
3117 get_mem_sym_stats_for (ptr)->num_direct_reads,
3118 get_mem_sym_stats_for (ptr)->num_direct_writes);
3119
3120 if (pi->value_escapes_p)
3121 fprintf (file, ", its value escapes");
3122
3123 if (pi->pt_anything)
3124 fprintf (file, ", points-to anything");
3125
3126 if (pi->pt_null)
3127 fprintf (file, ", points-to NULL");
3128
3129 if (pi->pt_vars)
3130 {
3131 fprintf (file, ", points-to vars: ");
3132 dump_decl_set (file, pi->pt_vars);
3133 }
3134 }
3135
3136 fprintf (file, "\n");
3137 }
3138
3139
3140 /* Dump points-to information for VAR into stderr. */
3141
3142 void
3143 debug_points_to_info_for (tree var)
3144 {
3145 dump_points_to_info_for (stderr, var);
3146 }
3147
3148
3149 /* Dump points-to information into FILE. NOTE: This function is slow, as
3150 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
3151
3152 void
3153 dump_points_to_info (FILE *file)
3154 {
3155 basic_block bb;
3156 block_stmt_iterator si;
3157 ssa_op_iter iter;
3158 const char *fname =
3159 lang_hooks.decl_printable_name (current_function_decl, 2);
3160 referenced_var_iterator rvi;
3161 tree var;
3162
3163 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
3164
3165 /* First dump points-to information for the default definitions of
3166 pointer variables. This is necessary because default definitions are
3167 not part of the code. */
3168 FOR_EACH_REFERENCED_VAR (var, rvi)
3169 {
3170 if (POINTER_TYPE_P (TREE_TYPE (var)))
3171 {
3172 tree def = gimple_default_def (cfun, var);
3173 if (def)
3174 dump_points_to_info_for (file, def);
3175 }
3176 }
3177
3178 /* Dump points-to information for every pointer defined in the program. */
3179 FOR_EACH_BB (bb)
3180 {
3181 tree phi;
3182
3183 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3184 {
3185 tree ptr = PHI_RESULT (phi);
3186 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
3187 dump_points_to_info_for (file, ptr);
3188 }
3189
3190 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
3191 {
3192 tree stmt = bsi_stmt (si);
3193 tree def;
3194 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
3195 if (TREE_CODE (def) == SSA_NAME
3196 && POINTER_TYPE_P (TREE_TYPE (def)))
3197 dump_points_to_info_for (file, def);
3198 }
3199 }
3200
3201 fprintf (file, "\n");
3202 }
3203
3204
3205 /* Dump points-to info pointed to by PTO into STDERR. */
3206
3207 void
3208 debug_points_to_info (void)
3209 {
3210 dump_points_to_info (stderr);
3211 }
3212
3213 /* Dump to FILE the list of variables that may be aliasing VAR. */
3214
3215 void
3216 dump_may_aliases_for (FILE *file, tree var)
3217 {
3218 bitmap aliases;
3219
3220 aliases = MTAG_ALIASES (var);
3221 if (aliases)
3222 {
3223 bitmap_iterator bi;
3224 unsigned int i;
3225 tree al;
3226
3227 fprintf (file, "{ ");
3228 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
3229 {
3230 al = referenced_var (i);
3231 print_generic_expr (file, al, dump_flags);
3232 fprintf (file, " ");
3233 }
3234 fprintf (file, "}");
3235 }
3236 }
3237
3238
3239 /* Dump to stderr the list of variables that may be aliasing VAR. */
3240
3241 void
3242 debug_may_aliases_for (tree var)
3243 {
3244 dump_may_aliases_for (stderr, var);
3245 }
3246
3247
3248 /* Return true if VAR may be aliased. */
3249
3250 bool
3251 may_be_aliased (tree var)
3252 {
3253 /* Obviously. */
3254 if (TREE_ADDRESSABLE (var))
3255 return true;
3256
3257 /* Globally visible variables can have their addresses taken by other
3258 translation units. */
3259 if (MTAG_P (var)
3260 && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
3261 return true;
3262 else if (!MTAG_P (var)
3263 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
3264 return true;
3265
3266 /* Automatic variables can't have their addresses escape any other
3267 way. This must be after the check for global variables, as
3268 extern declarations do not have TREE_STATIC set. */
3269 if (!TREE_STATIC (var))
3270 return false;
3271
3272 /* If we're in unit-at-a-time mode, then we must have seen all
3273 occurrences of address-of operators, and so we can trust
3274 TREE_ADDRESSABLE. Otherwise we can only be sure the variable
3275 isn't addressable if it's local to the current function. */
3276 if (flag_unit_at_a_time)
3277 return false;
3278
3279 if (decl_function_context (var) == current_function_decl)
3280 return false;
3281
3282 return true;
3283 }
3284
3285 /* The following is based on code in add_stmt_operand to ensure that the
3286 same defs/uses/vdefs/vuses will be found after replacing a reference
3287 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
3288 is the address of var. Return a memtag for the ptr, after adding the
3289 proper may_aliases to it (which are the aliases of var, if it has any,
3290 or var itself). */
3291
3292 static tree
3293 add_may_alias_for_new_tag (tree tag, tree var)
3294 {
3295 bitmap aliases = NULL;
3296
3297 if (MTAG_P (var))
3298 aliases = may_aliases (var);
3299
3300 /* Case 1: |aliases| == 1 */
3301 if (aliases && bitmap_count_bits (aliases) == 1)
3302 {
3303 tree ali = referenced_var (bitmap_first_set_bit (aliases));
3304 if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
3305 return ali;
3306 }
3307
3308 /* Case 2: |aliases| == 0 */
3309 if (aliases == NULL)
3310 add_may_alias (tag, var);
3311 else
3312 {
3313 /* Case 3: |aliases| > 1 */
3314 union_alias_set_into (tag, aliases);
3315 }
3316 return tag;
3317 }
3318
3319 /* Create a new symbol tag for PTR. Construct the may-alias list of this type
3320 tag so that it has the aliasing of VAR, or of the relevant subvars of VAR
3321 according to the location accessed by EXPR.
3322
3323 Note, the set of aliases represented by the new symbol tag are not marked
3324 for renaming. */
3325
3326 void
3327 new_type_alias (tree ptr, tree var, tree expr)
3328 {
3329 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
3330 tree tag;
3331 subvar_t svars;
3332 tree ali = NULL_TREE;
3333 HOST_WIDE_INT offset, size, maxsize;
3334 tree ref;
3335 VEC (tree, heap) *overlaps = NULL;
3336 subvar_t sv;
3337 unsigned int len;
3338
3339 gcc_assert (symbol_mem_tag (ptr) == NULL_TREE);
3340 gcc_assert (!MTAG_P (var));
3341
3342 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
3343 gcc_assert (ref);
3344
3345 tag = create_memory_tag (tag_type, true);
3346 set_symbol_mem_tag (ptr, tag);
3347
3348 /* Add VAR to the may-alias set of PTR's new symbol tag. If VAR has
3349 subvars, add the subvars to the tag instead of the actual var. */
3350 if (var_can_have_subvars (ref)
3351 && (svars = get_subvars_for_var (ref)))
3352 {
3353 for (sv = svars; sv; sv = sv->next)
3354 {
3355 bool exact;
3356
3357 if (overlap_subvar (offset, maxsize, sv->var, &exact))
3358 VEC_safe_push (tree, heap, overlaps, sv->var);
3359 }
3360 gcc_assert (overlaps != NULL);
3361 }
3362 else if (var_can_have_subvars (var)
3363 && (svars = get_subvars_for_var (var)))
3364 {
3365 /* If the REF is not a direct access to VAR (e.g., it is a dereference
3366 of a pointer), we should scan the virtual operands of REF the same
3367 way as tree-ssa-operands do. At the moment, this is somewhat
3368 difficult, so we just give up and add all the subvars of VAR.
3369 On mem-ssa branch, the scanning for virtual operands have been
3370 split from the rest of tree-ssa-operands, so it should be much
3371 easier to fix this problem correctly once mem-ssa is merged. */
3372 for (sv = svars; sv; sv = sv->next)
3373 VEC_safe_push (tree, heap, overlaps, sv->var);
3374
3375 gcc_assert (overlaps != NULL);
3376 }
3377 else
3378 ali = add_may_alias_for_new_tag (tag, var);
3379
3380 len = VEC_length (tree, overlaps);
3381 if (len > 0)
3382 {
3383 if (dump_file && (dump_flags & TDF_DETAILS))
3384 fprintf (dump_file, "\nnumber of overlapping subvars = %u\n", len);
3385
3386 if (len == 1)
3387 ali = add_may_alias_for_new_tag (tag, VEC_index (tree, overlaps, 0));
3388 else if (len > 1)
3389 {
3390 unsigned int k;
3391 tree sv_var;
3392
3393 for (k = 0; VEC_iterate (tree, overlaps, k, sv_var); k++)
3394 {
3395 ali = add_may_alias_for_new_tag (tag, sv_var);
3396
3397 if (ali != tag)
3398 {
3399 /* Can happen only if 'Case 1' of add_may_alias_for_new_tag
3400 took place. Since more than one svar was found, we add
3401 'ali' as one of the may_aliases of the new tag. */
3402 add_may_alias (tag, ali);
3403 ali = tag;
3404 }
3405 }
3406 }
3407 VEC_free (tree, heap, overlaps);
3408 }
3409
3410 set_symbol_mem_tag (ptr, ali);
3411 TREE_READONLY (tag) = TREE_READONLY (var);
3412 MTAG_GLOBAL (tag) = is_global_var (var);
3413 }
3414
3415 /* This represents the used range of a variable. */
3416
3417 typedef struct used_part
3418 {
3419 HOST_WIDE_INT minused;
3420 HOST_WIDE_INT maxused;
3421 /* True if we have an explicit use/def of some portion of this variable,
3422 even if it is all of it. i.e. a.b = 5 or temp = a.b. */
3423 bool explicit_uses;
3424 /* True if we have an implicit use/def of some portion of this
3425 variable. Implicit uses occur when we can't tell what part we
3426 are referencing, and have to make conservative assumptions. */
3427 bool implicit_uses;
3428 /* True if the structure is only written to or taken its address. */
3429 bool write_only;
3430 } *used_part_t;
3431
3432 /* An array of used_part structures, indexed by variable uid. */
3433
3434 static htab_t used_portions;
3435
3436 struct used_part_map
3437 {
3438 unsigned int uid;
3439 used_part_t to;
3440 };
3441
3442 /* Return true if the uid in the two used part maps are equal. */
3443
3444 static int
3445 used_part_map_eq (const void *va, const void *vb)
3446 {
3447 const struct used_part_map *a = (const struct used_part_map *) va;
3448 const struct used_part_map *b = (const struct used_part_map *) vb;
3449 return (a->uid == b->uid);
3450 }
3451
3452 /* Hash a from uid in a used_part_map. */
3453
3454 static unsigned int
3455 used_part_map_hash (const void *item)
3456 {
3457 return ((const struct used_part_map *)item)->uid;
3458 }
3459
3460 /* Free a used part map element. */
3461
3462 static void
3463 free_used_part_map (void *item)
3464 {
3465 free (((struct used_part_map *)item)->to);
3466 free (item);
3467 }
3468
3469 /* Lookup a used_part structure for a UID. */
3470
3471 static used_part_t
3472 up_lookup (unsigned int uid)
3473 {
3474 struct used_part_map *h, in;
3475 in.uid = uid;
3476 h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid);
3477 if (!h)
3478 return NULL;
3479 return h->to;
3480 }
3481
3482 /* Insert the pair UID, TO into the used part hashtable. */
3483
3484 static void
3485 up_insert (unsigned int uid, used_part_t to)
3486 {
3487 struct used_part_map *h;
3488 void **loc;
3489
3490 h = XNEW (struct used_part_map);
3491 h->uid = uid;
3492 h->to = to;
3493 loc = htab_find_slot_with_hash (used_portions, h,
3494 uid, INSERT);
3495 if (*loc != NULL)
3496 free (*loc);
3497 *(struct used_part_map **) loc = h;
3498 }
3499
3500
3501 /* Given a variable uid, UID, get or create the entry in the used portions
3502 table for the variable. */
3503
3504 static used_part_t
3505 get_or_create_used_part_for (size_t uid)
3506 {
3507 used_part_t up;
3508 if ((up = up_lookup (uid)) == NULL)
3509 {
3510 up = XCNEW (struct used_part);
3511 up->minused = INT_MAX;
3512 up->maxused = 0;
3513 up->explicit_uses = false;
3514 up->implicit_uses = false;
3515 up->write_only = true;
3516 }
3517
3518 return up;
3519 }
3520
3521
3522 /* Create and return a structure sub-variable for field type FIELD at
3523 offset OFFSET, with size SIZE, of variable VAR. */
3524
3525 static tree
3526 create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
3527 unsigned HOST_WIDE_INT size)
3528 {
3529 tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
3530
3531 /* We need to copy the various flags from VAR to SUBVAR, so that
3532 they are is_global_var iff the original variable was. */
3533 DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
3534 MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var);
3535 TREE_PUBLIC (subvar) = TREE_PUBLIC (var);
3536 TREE_STATIC (subvar) = TREE_STATIC (var);
3537 TREE_READONLY (subvar) = TREE_READONLY (var);
3538 TREE_ADDRESSABLE (subvar) = TREE_ADDRESSABLE (var);
3539
3540 /* Add the new variable to REFERENCED_VARS. */
3541 set_symbol_mem_tag (subvar, NULL);
3542 add_referenced_var (subvar);
3543 SFT_PARENT_VAR (subvar) = var;
3544 SFT_OFFSET (subvar) = offset;
3545 SFT_SIZE (subvar) = size;
3546 return subvar;
3547 }
3548
3549
3550 /* Given an aggregate VAR, create the subvariables that represent its
3551 fields. */
3552
3553 static void
3554 create_overlap_variables_for (tree var)
3555 {
3556 VEC(fieldoff_s,heap) *fieldstack = NULL;
3557 used_part_t up;
3558 size_t uid = DECL_UID (var);
3559
3560 up = up_lookup (uid);
3561 if (!up
3562 || up->write_only)
3563 return;
3564
3565 push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL);
3566 if (VEC_length (fieldoff_s, fieldstack) != 0)
3567 {
3568 subvar_t *subvars;
3569 fieldoff_s *fo;
3570 bool notokay = false;
3571 int fieldcount = 0;
3572 int i;
3573 HOST_WIDE_INT lastfooffset = -1;
3574 HOST_WIDE_INT lastfosize = -1;
3575 tree lastfotype = NULL_TREE;
3576
3577 /* Not all fields have DECL_SIZE set, and those that don't, we don't
3578 know their size, and thus, can't handle.
3579 The same is true of fields with DECL_SIZE that is not an integer
3580 constant (such as variable sized fields).
3581 Fields with offsets which are not constant will have an offset < 0
3582 We *could* handle fields that are constant sized arrays, but
3583 currently don't. Doing so would require some extra changes to
3584 tree-ssa-operands.c. */
3585
3586 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3587 {
3588 if (!fo->size
3589 || TREE_CODE (fo->size) != INTEGER_CST
3590 || fo->offset < 0)
3591 {
3592 notokay = true;
3593 break;
3594 }
3595 fieldcount++;
3596 }
3597
3598 /* The current heuristic we use is as follows:
3599 If the variable has no used portions in this function, no
3600 structure vars are created for it.
3601 Otherwise,
3602 If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
3603 we always create structure vars for them.
3604 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
3605 some explicit uses, we create structure vars for them.
3606 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
3607 no explicit uses, we do not create structure vars for them.
3608 */
3609
3610 if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
3611 && !up->explicit_uses)
3612 {
3613 if (dump_file && (dump_flags & TDF_DETAILS))
3614 {
3615 fprintf (dump_file, "Variable ");
3616 print_generic_expr (dump_file, var, 0);
3617 fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
3618 }
3619 notokay = true;
3620 }
3621
3622 /* Bail out, if we can't create overlap variables. */
3623 if (notokay)
3624 {
3625 VEC_free (fieldoff_s, heap, fieldstack);
3626 return;
3627 }
3628
3629 /* Otherwise, create the variables. */
3630 subvars = lookup_subvars_for_var (var);
3631
3632 sort_fieldstack (fieldstack);
3633
3634 for (i = VEC_length (fieldoff_s, fieldstack);
3635 VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
3636 {
3637 subvar_t sv;
3638 HOST_WIDE_INT fosize;
3639 tree currfotype;
3640
3641 fosize = TREE_INT_CST_LOW (fo->size);
3642 currfotype = fo->type;
3643
3644 /* If this field isn't in the used portion,
3645 or it has the exact same offset and size as the last
3646 field, skip it. */
3647
3648 if (((fo->offset <= up->minused
3649 && fo->offset + fosize <= up->minused)
3650 || fo->offset >= up->maxused)
3651 || (fo->offset == lastfooffset
3652 && fosize == lastfosize
3653 && currfotype == lastfotype))
3654 continue;
3655 sv = GGC_NEW (struct subvar);
3656 sv->next = *subvars;
3657 sv->var = create_sft (var, fo->type, fo->offset, fosize);
3658
3659 if (dump_file)
3660 {
3661 fprintf (dump_file, "structure field tag %s created for var %s",
3662 get_name (sv->var), get_name (var));
3663 fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
3664 SFT_OFFSET (sv->var));
3665 fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
3666 SFT_SIZE (sv->var));
3667 fprintf (dump_file, "\n");
3668 }
3669
3670 lastfotype = currfotype;
3671 lastfooffset = fo->offset;
3672 lastfosize = fosize;
3673 *subvars = sv;
3674 }
3675
3676 /* Once we have created subvars, the original is no longer call
3677 clobbered on its own. Its call clobbered status depends
3678 completely on the call clobbered status of the subvars.
3679
3680 add_referenced_var in the above loop will take care of
3681 marking subvars of global variables as call clobbered for us
3682 to start, since they are global as well. */
3683 clear_call_clobbered (var);
3684 }
3685
3686 VEC_free (fieldoff_s, heap, fieldstack);
3687 }
3688
3689
3690 /* Find the conservative answer to the question of what portions of what
3691 structures are used by this statement. We assume that if we have a
3692 component ref with a known size + offset, that we only need that part
3693 of the structure. For unknown cases, or cases where we do something
3694 to the whole structure, we assume we need to create fields for the
3695 entire structure. */
3696
3697 static tree
3698 find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p)
3699 {
3700 switch (TREE_CODE (*tp))
3701 {
3702 case GIMPLE_MODIFY_STMT:
3703 /* Recurse manually here to track whether the use is in the
3704 LHS of an assignment. */
3705 find_used_portions (&GIMPLE_STMT_OPERAND (*tp, 0), walk_subtrees, tp);
3706 return find_used_portions (&GIMPLE_STMT_OPERAND (*tp, 1),
3707 walk_subtrees, NULL);
3708 case REALPART_EXPR:
3709 case IMAGPART_EXPR:
3710 case COMPONENT_REF:
3711 case ARRAY_REF:
3712 {
3713 HOST_WIDE_INT bitsize;
3714 HOST_WIDE_INT bitmaxsize;
3715 HOST_WIDE_INT bitpos;
3716 tree ref;
3717 ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize);
3718 if (DECL_P (ref)
3719 && var_can_have_subvars (ref)
3720 && bitmaxsize != -1)
3721 {
3722 size_t uid = DECL_UID (ref);
3723 used_part_t up;
3724
3725 up = get_or_create_used_part_for (uid);
3726
3727 if (bitpos <= up->minused)
3728 up->minused = bitpos;
3729 if ((bitpos + bitmaxsize >= up->maxused))
3730 up->maxused = bitpos + bitmaxsize;
3731
3732 if (bitsize == bitmaxsize)
3733 up->explicit_uses = true;
3734 else
3735 up->implicit_uses = true;
3736 if (!lhs_p)
3737 up->write_only = false;
3738 up_insert (uid, up);
3739
3740 *walk_subtrees = 0;
3741 return NULL_TREE;
3742 }
3743 }
3744 break;
3745 /* This is here to make sure we mark the entire base variable as used
3746 when you take its address. Because our used portion analysis is
3747 simple, we aren't looking at casts or pointer arithmetic to see what
3748 happens when you take the address. */
3749 case ADDR_EXPR:
3750 {
3751 tree var = get_base_address (TREE_OPERAND (*tp, 0));
3752
3753 if (var
3754 && DECL_P (var)
3755 && DECL_SIZE (var)
3756 && var_can_have_subvars (var)
3757 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3758 {
3759 used_part_t up;
3760 size_t uid = DECL_UID (var);
3761
3762 up = get_or_create_used_part_for (uid);
3763
3764 up->minused = 0;
3765 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3766 up->implicit_uses = true;
3767 if (!lhs_p)
3768 up->write_only = false;
3769
3770 up_insert (uid, up);
3771 *walk_subtrees = 0;
3772 return NULL_TREE;
3773 }
3774 }
3775 break;
3776 case CALL_EXPR:
3777 {
3778 int i;
3779 int nargs = call_expr_nargs (*tp);
3780 for (i = 0; i < nargs; i++)
3781 {
3782 tree *arg = &CALL_EXPR_ARG (*tp, i);
3783 if (TREE_CODE (*arg) != ADDR_EXPR)
3784 find_used_portions (arg, walk_subtrees, NULL);
3785 }
3786 *walk_subtrees = 0;
3787 return NULL_TREE;
3788 }
3789 case VAR_DECL:
3790 case PARM_DECL:
3791 case RESULT_DECL:
3792 {
3793 tree var = *tp;
3794 if (DECL_SIZE (var)
3795 && var_can_have_subvars (var)
3796 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3797 {
3798 used_part_t up;
3799 size_t uid = DECL_UID (var);
3800
3801 up = get_or_create_used_part_for (uid);
3802
3803 up->minused = 0;
3804 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3805 up->implicit_uses = true;
3806
3807 up_insert (uid, up);
3808 *walk_subtrees = 0;
3809 return NULL_TREE;
3810 }
3811 }
3812 break;
3813
3814 default:
3815 break;
3816
3817 }
3818 return NULL_TREE;
3819 }
3820
3821 /* Create structure field variables for structures used in this function. */
3822
3823 static unsigned int
3824 create_structure_vars (void)
3825 {
3826 basic_block bb;
3827 safe_referenced_var_iterator rvi;
3828 VEC (tree, heap) *varvec = NULL;
3829 tree var;
3830
3831 used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
3832 free_used_part_map);
3833
3834 FOR_EACH_BB (bb)
3835 {
3836 block_stmt_iterator bsi;
3837 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3838 {
3839 walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
3840 find_used_portions,
3841 NULL);
3842 }
3843 }
3844 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
3845 {
3846 /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
3847 if (var
3848 && DECL_SIZE (var)
3849 && var_can_have_subvars (var)
3850 && !MTAG_P (var)
3851 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3852 create_overlap_variables_for (var);
3853 }
3854 htab_delete (used_portions);
3855 VEC_free (tree, heap, varvec);
3856
3857 /* Update SSA operands of statements mentioning variables we split. */
3858 if (gimple_in_ssa_p (cfun))
3859 FOR_EACH_BB (bb)
3860 {
3861 block_stmt_iterator bsi;
3862 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3863 {
3864 tree stmt = bsi_stmt (bsi);
3865 bool update = false;
3866 unsigned int i;
3867 bitmap_iterator bi;
3868
3869 if (STORED_SYMS (stmt))
3870 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
3871 {
3872 tree sym = referenced_var_lookup (i);
3873 if (get_subvars_for_var (sym))
3874 {
3875 update=true;
3876 break;
3877 }
3878 }
3879
3880 if (LOADED_SYMS (stmt) && !update)
3881 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
3882 {
3883 tree sym = referenced_var_lookup (i);
3884 if (get_subvars_for_var (sym))
3885 {
3886 update=true;
3887 break;
3888 }
3889 }
3890
3891 if (stmt_ann (stmt)->addresses_taken && !update)
3892 EXECUTE_IF_SET_IN_BITMAP (stmt_ann (stmt)->addresses_taken,
3893 0, i, bi)
3894 {
3895 tree sym = referenced_var_lookup (i);
3896 if (get_subvars_for_var (sym))
3897 {
3898 update=true;
3899 break;
3900 }
3901 }
3902
3903 if (update)
3904 update_stmt (stmt);
3905 }
3906 }
3907
3908 return 0;
3909 }
3910
3911 static bool
3912 gate_structure_vars (void)
3913 {
3914 return flag_tree_salias != 0;
3915 }
3916
3917 struct tree_opt_pass pass_create_structure_vars =
3918 {
3919 "salias", /* name */
3920 gate_structure_vars, /* gate */
3921 create_structure_vars, /* execute */
3922 NULL, /* sub */
3923 NULL, /* next */
3924 0, /* static_pass_number */
3925 0, /* tv_id */
3926 PROP_cfg, /* properties_required */
3927 0, /* properties_provided */
3928 0, /* properties_destroyed */
3929 0, /* todo_flags_start */
3930 TODO_dump_func, /* todo_flags_finish */
3931 0 /* letter */
3932 };
3933
3934 /* Reset the call_clobbered flags on our referenced vars. In
3935 theory, this only needs to be done for globals. */
3936
3937 static unsigned int
3938 reset_cc_flags (void)
3939 {
3940 tree var;
3941 referenced_var_iterator rvi;
3942
3943 FOR_EACH_REFERENCED_VAR (var, rvi)
3944 var_ann (var)->call_clobbered = false;
3945 return 0;
3946 }
3947
3948 struct tree_opt_pass pass_reset_cc_flags =
3949 {
3950 NULL, /* name */
3951 NULL, /* gate */
3952 reset_cc_flags, /* execute */
3953 NULL, /* sub */
3954 NULL, /* next */
3955 0, /* static_pass_number */
3956 0, /* tv_id */
3957 PROP_referenced_vars |PROP_cfg, /* properties_required */
3958 0, /* properties_provided */
3959 0, /* properties_destroyed */
3960 0, /* todo_flags_start */
3961 0, /* todo_flags_finish */
3962 0 /* letter */
3963 };