errors.h (warning, [...]): Mark as cold.
[gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "timevar.h"
32 #include "expr.h"
33 #include "ggc.h"
34 #include "langhooks.h"
35 #include "flags.h"
36 #include "function.h"
37 #include "diagnostic.h"
38 #include "tree-dump.h"
39 #include "tree-gimple.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-structalias.h"
44 #include "convert.h"
45 #include "params.h"
46 #include "ipa-type-escape.h"
47 #include "vec.h"
48 #include "bitmap.h"
49 #include "vecprim.h"
50 #include "pointer-set.h"
51
52 /* Structure to map a variable to its alias set. */
53 struct alias_map_d
54 {
55 /* Variable and its alias set. */
56 tree var;
57 HOST_WIDE_INT set;
58 };
59
60
61 /* Data structures used for computing memory partitions. */
62
63 struct mp_info_def
64 {
65 /* Symbol or memory tag. */
66 tree var;
67
68 /* Number of virtual operators needed to represent references to VAR. */
69 long num_vops;
70 };
71
72 typedef struct mp_info_def *mp_info_t;
73 DEF_VEC_P(mp_info_t);
74 DEF_VEC_ALLOC_P(mp_info_t, heap);
75
76 /* Counters used to display statistics on alias analysis. */
77 struct alias_stats_d
78 {
79 unsigned int alias_queries;
80 unsigned int alias_mayalias;
81 unsigned int alias_noalias;
82 unsigned int simple_queries;
83 unsigned int simple_resolved;
84 unsigned int tbaa_queries;
85 unsigned int tbaa_resolved;
86 unsigned int structnoaddress_queries;
87 unsigned int structnoaddress_resolved;
88 };
89
90
91 /* Local variables. */
92 static struct alias_stats_d alias_stats;
93 static bitmap_obstack alias_bitmap_obstack;
94
95 /* Local functions. */
96 static void compute_flow_insensitive_aliasing (struct alias_info *);
97 static void finalize_ref_all_pointers (struct alias_info *);
98 static void dump_alias_stats (FILE *);
99 static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool);
100 static tree create_memory_tag (tree type, bool is_type_tag);
101 static tree get_smt_for (tree, struct alias_info *);
102 static tree get_nmt_for (tree);
103 static void add_may_alias (tree, tree);
104 static struct alias_info *init_alias_info (void);
105 static void delete_alias_info (struct alias_info *);
106 static void compute_flow_sensitive_aliasing (struct alias_info *);
107 static void setup_pointers_and_addressables (struct alias_info *);
108 static void create_global_var (void);
109 static void maybe_create_global_var (struct alias_info *ai);
110 static void set_pt_anything (tree ptr);
111
112 void dump_mp_info (FILE *, VEC(mp_info_t,heap) *mp_info_t);
113 void debug_mp_info (VEC(mp_info_t,heap) *mp_info_t);
114
115 /* Global declarations. */
116
117 /* Mark variable VAR as being non-addressable. */
118
119 static void
120 mark_non_addressable (tree var)
121 {
122 tree mpt;
123
124 if (!TREE_ADDRESSABLE (var))
125 return;
126
127 mpt = memory_partition (var);
128
129 if (!MTAG_P (var))
130 var_ann (var)->call_clobbered = false;
131
132 bitmap_clear_bit (gimple_call_clobbered_vars (cfun), DECL_UID (var));
133 TREE_ADDRESSABLE (var) = 0;
134
135 if (mpt)
136 {
137 bitmap_clear_bit (MPT_SYMBOLS (mpt), DECL_UID (var));
138 set_memory_partition (var, NULL_TREE);
139 }
140 }
141
142
143 /* qsort comparison function to sort type/name tags by DECL_UID. */
144
145 static int
146 sort_tags_by_id (const void *pa, const void *pb)
147 {
148 tree a = *(tree *)pa;
149 tree b = *(tree *)pb;
150
151 return DECL_UID (a) - DECL_UID (b);
152 }
153
154 /* Initialize WORKLIST to contain those memory tags that are marked call
155 clobbered. Initialized WORKLIST2 to contain the reasons these
156 memory tags escaped. */
157
158 static void
159 init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
160 VEC (int, heap) **worklist2)
161 {
162 referenced_var_iterator rvi;
163 tree curr;
164
165 FOR_EACH_REFERENCED_VAR (curr, rvi)
166 {
167 if (MTAG_P (curr) && is_call_clobbered (curr))
168 {
169 VEC_safe_push (tree, heap, *worklist, curr);
170 VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask);
171 }
172 }
173 }
174
175 /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
176 ALIAS is not already marked call clobbered, and is a memory
177 tag. */
178
179 static void
180 add_to_worklist (tree alias, VEC (tree, heap) **worklist,
181 VEC (int, heap) **worklist2,
182 int reason)
183 {
184 if (MTAG_P (alias) && !is_call_clobbered (alias))
185 {
186 VEC_safe_push (tree, heap, *worklist, alias);
187 VEC_safe_push (int, heap, *worklist2, reason);
188 }
189 }
190
191 /* Mark aliases of TAG as call clobbered, and place any tags on the
192 alias list that were not already call clobbered on WORKLIST. */
193
194 static void
195 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
196 VEC (int, heap) **worklist2)
197 {
198 bitmap aliases;
199 bitmap_iterator bi;
200 unsigned int i;
201 tree entry;
202 var_ann_t ta = var_ann (tag);
203
204 if (!MTAG_P (tag))
205 return;
206 aliases = may_aliases (tag);
207 if (!aliases)
208 return;
209
210 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
211 {
212 entry = referenced_var (i);
213 if (!unmodifiable_var_p (entry))
214 {
215 add_to_worklist (entry, worklist, worklist2, ta->escape_mask);
216 mark_call_clobbered (entry, ta->escape_mask);
217 }
218 }
219 }
220
221 /* Tags containing global vars need to be marked as global.
222 Tags containing call clobbered vars need to be marked as call
223 clobbered. */
224
225 static void
226 compute_tag_properties (void)
227 {
228 referenced_var_iterator rvi;
229 tree tag;
230 bool changed = true;
231 VEC (tree, heap) *taglist = NULL;
232
233 FOR_EACH_REFERENCED_VAR (tag, rvi)
234 {
235 if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
236 continue;
237 VEC_safe_push (tree, heap, taglist, tag);
238 }
239
240 /* We sort the taglist by DECL_UID, for two reasons.
241 1. To get a sequential ordering to make the bitmap accesses
242 faster.
243 2. Because of the way we compute aliases, it's more likely that
244 an earlier tag is included in a later tag, and this will reduce
245 the number of iterations.
246
247 If we had a real tag graph, we would just topo-order it and be
248 done with it. */
249 qsort (VEC_address (tree, taglist),
250 VEC_length (tree, taglist),
251 sizeof (tree),
252 sort_tags_by_id);
253
254 /* Go through each tag not marked as global, and if it aliases
255 global vars, mark it global.
256
257 If the tag contains call clobbered vars, mark it call
258 clobbered.
259
260 This loop iterates because tags may appear in the may-aliases
261 list of other tags when we group. */
262
263 while (changed)
264 {
265 unsigned int k;
266
267 changed = false;
268 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
269 {
270 bitmap ma;
271 bitmap_iterator bi;
272 unsigned int i;
273 tree entry;
274 bool tagcc = is_call_clobbered (tag);
275 bool tagglobal = MTAG_GLOBAL (tag);
276
277 if (tagcc && tagglobal)
278 continue;
279
280 ma = may_aliases (tag);
281 if (!ma)
282 continue;
283
284 EXECUTE_IF_SET_IN_BITMAP (ma, 0, i, bi)
285 {
286 entry = referenced_var (i);
287 /* Call clobbered entries cause the tag to be marked
288 call clobbered. */
289 if (!tagcc && is_call_clobbered (entry))
290 {
291 mark_call_clobbered (tag, var_ann (entry)->escape_mask);
292 tagcc = true;
293 changed = true;
294 }
295
296 /* Global vars cause the tag to be marked global. */
297 if (!tagglobal && is_global_var (entry))
298 {
299 MTAG_GLOBAL (tag) = true;
300 changed = true;
301 tagglobal = true;
302 }
303
304 /* Early exit once both global and cc are set, since the
305 loop can't do any more than that. */
306 if (tagcc && tagglobal)
307 break;
308 }
309 }
310 }
311 VEC_free (tree, heap, taglist);
312 }
313
314 /* Set up the initial variable clobbers and globalness.
315 When this function completes, only tags whose aliases need to be
316 clobbered will be set clobbered. Tags clobbered because they
317 contain call clobbered vars are handled in compute_tag_properties. */
318
319 static void
320 set_initial_properties (struct alias_info *ai)
321 {
322 unsigned int i;
323 referenced_var_iterator rvi;
324 tree var;
325 tree ptr;
326
327 FOR_EACH_REFERENCED_VAR (var, rvi)
328 {
329 if (is_global_var (var)
330 && (!var_can_have_subvars (var)
331 || get_subvars_for_var (var) == NULL))
332 {
333 if (!unmodifiable_var_p (var))
334 mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
335 }
336 else if (TREE_CODE (var) == PARM_DECL
337 && gimple_default_def (cfun, var)
338 && POINTER_TYPE_P (TREE_TYPE (var)))
339 {
340 tree def = gimple_default_def (cfun, var);
341 get_ptr_info (def)->value_escapes_p = 1;
342 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
343 }
344 }
345
346 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
347 {
348 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
349 tree tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
350
351 if (pi->value_escapes_p)
352 {
353 /* If PTR escapes then its associated memory tags and
354 pointed-to variables are call-clobbered. */
355 if (pi->name_mem_tag)
356 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
357
358 if (tag)
359 mark_call_clobbered (tag, pi->escape_mask);
360
361 if (pi->pt_vars)
362 {
363 bitmap_iterator bi;
364 unsigned int j;
365 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
366 if (!unmodifiable_var_p (referenced_var (j)))
367 mark_call_clobbered (referenced_var (j), pi->escape_mask);
368 }
369 }
370
371 /* If the name tag is call clobbered, so is the symbol tag
372 associated with the base VAR_DECL. */
373 if (pi->name_mem_tag
374 && tag
375 && is_call_clobbered (pi->name_mem_tag))
376 mark_call_clobbered (tag, pi->escape_mask);
377
378 /* Name tags and symbol tags that we don't know where they point
379 to, might point to global memory, and thus, are clobbered.
380
381 FIXME: This is not quite right. They should only be
382 clobbered if value_escapes_p is true, regardless of whether
383 they point to global memory or not.
384 So removing this code and fixing all the bugs would be nice.
385 It is the cause of a bunch of clobbering. */
386 if ((pi->pt_global_mem || pi->pt_anything)
387 && pi->is_dereferenced && pi->name_mem_tag)
388 {
389 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
390 MTAG_GLOBAL (pi->name_mem_tag) = true;
391 }
392
393 if ((pi->pt_global_mem || pi->pt_anything)
394 && pi->is_dereferenced
395 && tag)
396 {
397 mark_call_clobbered (tag, ESCAPE_IS_GLOBAL);
398 MTAG_GLOBAL (tag) = true;
399 }
400 }
401 }
402
403 /* Compute which variables need to be marked call clobbered because
404 their tag is call clobbered, and which tags need to be marked
405 global because they contain global variables. */
406
407 static void
408 compute_call_clobbered (struct alias_info *ai)
409 {
410 VEC (tree, heap) *worklist = NULL;
411 VEC(int,heap) *worklist2 = NULL;
412
413 set_initial_properties (ai);
414 init_transitive_clobber_worklist (&worklist, &worklist2);
415 while (VEC_length (tree, worklist) != 0)
416 {
417 tree curr = VEC_pop (tree, worklist);
418 int reason = VEC_pop (int, worklist2);
419
420 mark_call_clobbered (curr, reason);
421 mark_aliases_call_clobbered (curr, &worklist, &worklist2);
422 }
423 VEC_free (tree, heap, worklist);
424 VEC_free (int, heap, worklist2);
425 compute_tag_properties ();
426 }
427
428 /* Dump the MP_INFO array to FILE. */
429
430 void
431 dump_mp_info (FILE *file, VEC(mp_info_t,heap) *mp_info)
432 {
433 unsigned i;
434 mp_info_t mp_p;
435
436 for (i = 0; VEC_iterate (mp_info_t, mp_info, i, mp_p); i++)
437 {
438 fprintf (file, "%6lu\t", mp_p->num_vops);
439 if (mp_p->var == NULL_TREE)
440 {
441 fprintf (file, "CALL-CLOBBERED SYMBOLS: ");
442 dump_decl_set (file, gimple_call_clobbered_vars (cfun));
443 }
444 else
445 dump_variable (file, mp_p->var);
446 }
447 }
448
449
450 /* Dump the MP_INFO array to stderr. */
451
452 void
453 debug_mp_info (VEC(mp_info_t,heap) *mp_info)
454 {
455 dump_mp_info (stderr, mp_info);
456 }
457
458
459 /* Comparison function for qsort used in sort_mp_info. */
460
461 static int
462 mp_info_cmp (const void *p, const void *q)
463 {
464 mp_info_t e1 = *((const mp_info_t *) p);
465 mp_info_t e2 = *((const mp_info_t *) q);
466
467 /* We want to sort in decreasing order. */
468 if (e1->num_vops < e2->num_vops)
469 return 1;
470 else if (e1->num_vops > e2->num_vops)
471 return -1;
472 else
473 return 0;
474 }
475
476
477 /* Sort the array of reference counts used to compute memory partitions.
478 Elements are sorted in descending order of virtual operators needed. */
479
480 static inline void
481 sort_mp_info (VEC(mp_info_t,heap) *list)
482 {
483 unsigned num = VEC_length (mp_info_t, list);
484
485 if (num < 2)
486 return;
487
488 if (num == 2)
489 {
490 if (VEC_index (mp_info_t, list, 0)->num_vops
491 < VEC_index (mp_info_t, list, 1)->num_vops)
492 {
493 /* Swap elements if they are in the wrong order. */
494 mp_info_t tmp = VEC_index (mp_info_t, list, 0);
495 VEC_replace (mp_info_t, list, 0, VEC_index (mp_info_t, list, 1));
496 VEC_replace (mp_info_t, list, 1, tmp);
497 }
498
499 return;
500 }
501
502 /* There are 3 or more elements, call qsort. */
503 qsort (VEC_address (mp_info_t, list), VEC_length (mp_info_t, list),
504 sizeof (mp_info_t), mp_info_cmp);
505 }
506
507
508 /* Create a new partition to hold all the symbols aliased with
509 MP_P->VAR. If MP_P->VAR is NULL, it partitions the call-clobbered
510 variables. Only symbols that are not already in another partition
511 are added to the new partition created for MP_P->VAR. */
512
513 static void
514 create_partition_for (mp_info_t mp_p)
515 {
516 bitmap_iterator bi;
517 tree mpt, sym;
518 bitmap aliases;
519 unsigned i;
520
521 if (mp_p->num_vops <= (long) MAX_ALIASED_VOPS)
522 return;
523
524 if (mp_p->var == NULL_TREE)
525 {
526 bitmap_iterator bi;
527 bitmap tmp;
528
529 /* Since the partitions we create for call-clobbered variables
530 will also be marked call-clobbered, make a copy of the
531 original set to avoid confusing the iterator. */
532 tmp = BITMAP_ALLOC (NULL);
533 bitmap_copy (tmp, gimple_call_clobbered_vars (cfun));
534
535 /* Process call-clobbered symbols when no MP_P->VAR is given. */
536 mpt = NULL_TREE;
537 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
538 {
539 tree sym = referenced_var (i);
540 if (memory_partition (sym) == NULL_TREE)
541 {
542 if (mpt == NULL_TREE)
543 {
544 mpt = get_mpt_for (sym);
545 mp_p->num_vops++;
546 }
547
548 mark_sym_for_renaming (mpt);
549 mark_sym_for_renaming (sym);
550 set_memory_partition (sym, mpt);
551 }
552
553 mp_p->num_vops--;
554
555 /* If we have already grouped enough, stop. */
556 if (mp_p->num_vops <= (long) MAX_ALIASED_VOPS)
557 break;
558 }
559
560 BITMAP_FREE (tmp);
561 }
562 else
563 {
564 aliases = may_aliases (mp_p->var);
565 gcc_assert (!bitmap_empty_p (aliases));
566
567 mpt = NULL_TREE;
568 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
569 {
570 sym = referenced_var (i);
571 /* Only set the memory partition for aliased symbol SYM if
572 SYM does not belong to another partition. */
573 if (memory_partition (sym) == NULL_TREE)
574 {
575 if (mpt == NULL_TREE)
576 {
577 mpt = get_mpt_for (mp_p->var);
578 mp_p->num_vops++;
579 }
580
581 mark_sym_for_renaming (mpt);
582 mark_sym_for_renaming (sym);
583 set_memory_partition (sym, mpt);
584 }
585
586 mp_p->num_vops--;
587
588 /* If we have already grouped enough, stop. */
589 if (mp_p->num_vops <= (long) MAX_ALIASED_VOPS)
590 break;
591 }
592
593 if (mpt)
594 mark_call_clobbered (mpt, ESCAPE_UNKNOWN);
595 }
596 }
597
598
599 /* Rewrite the alias set for TAG to use the newly created partitions.
600 If TAG is NULL, rewrite the set of call-clobbered variables.
601 NEW_ALIASES is a scratch bitmap to build the new set of aliases for
602 TAG. */
603
604 static void
605 rewrite_alias_set_for (tree tag, bitmap new_aliases)
606 {
607 bitmap_iterator bi;
608 unsigned i;
609 tree mpt, sym;
610
611 if (tag == NULL_TREE)
612 {
613 /* Do not rewrite CALL_CLOBBERED_VARS. If a symbol S is taken
614 out of this set, the optimizers will no longer consider S as
615 call-clobbered, and that may lead to wrong transformations
616 (e.g., pass_tail_calls explicitly examines all the symbols in
617 the function to determine if it should enable tail-call
618 marking). */
619 return;
620 }
621 else
622 {
623 /* Create a new alias set for TAG with the new partitions. */
624
625 EXECUTE_IF_SET_IN_BITMAP (MTAG_ALIASES (tag), 0, i, bi)
626 {
627 sym = referenced_var (i);
628 mpt = memory_partition (sym);
629 if (mpt)
630 bitmap_set_bit (new_aliases, DECL_UID (mpt));
631 else
632 bitmap_set_bit (new_aliases, DECL_UID (sym));
633 }
634
635 /* Rebuild the may-alias array for TAG. */
636 bitmap_copy (MTAG_ALIASES (tag), new_aliases);
637 }
638 }
639
640
641 /* Compute memory partitions.
642
643 The partitioning is straightforward:
644
645 1- All the memory tags and call-clobbered that cause virtual
646 operators are collected into the MP_INFO table together with the
647 number of virtual operands that would be needed to represent all
648 the members in the alias set.
649
650 2- MP_INFO is sorted in decreasing order of virtual operators.
651
652 3- For every memory tag T in MP_INFO, a new partition MP is created.
653
654 4- All the symbols S in T's alias set are examined. If S is not
655 already in another partition then S is added to partition MP.
656
657 6- The estimate of VOPS is updated, if it falls below
658 MAX_ALIASED_VOPS, we stop. */
659
660 static void
661 compute_memory_partitions (void)
662 {
663 referenced_var_iterator rvi;
664 tree var;
665 unsigned i;
666 struct mp_info_def mp;
667 mp_info_t mp_p;
668 VEC(mp_info_t,heap) *mp_info;
669 long max_num_vops = 0;
670 bitmap new_aliases;
671
672 timevar_push (TV_MEMORY_PARTITIONING);
673
674 mp_info = NULL;
675 max_num_vops = 0;
676
677 /* Add reference counts for all the call-clobbered variables. */
678 if (!bitmap_empty_p (gimple_call_clobbered_vars (cfun)))
679 {
680 mp.var = NULL_TREE;
681 mp.num_vops = bitmap_count_bits (gimple_call_clobbered_vars (cfun));
682 max_num_vops = mp.num_vops;
683 mp_p = xcalloc (1, sizeof (*mp_p));
684 *mp_p = mp;
685 VEC_safe_push (mp_info_t, heap, mp_info, mp_p);
686 }
687
688 /* Add reference counts for all the symbol tags. */
689 FOR_EACH_REFERENCED_VAR (var, rvi)
690 {
691 if (TREE_CODE (var) != SYMBOL_MEMORY_TAG
692 && TREE_CODE (var) != NAME_MEMORY_TAG)
693 continue;
694
695 /* Each reference to VAR will produce as many VOPs as elements
696 exist in its alias set. */
697 mp.var = var;
698 if (!may_aliases (var))
699 mp.num_vops = 0;
700 else
701 mp.num_vops = bitmap_count_bits (may_aliases (var));
702
703 /* No point grouping singleton alias sets. */
704 if (mp.num_vops <= 1)
705 continue;
706
707 mp_p = xcalloc (1, sizeof (*mp_p));
708 *mp_p = mp;
709 VEC_safe_push (mp_info_t, heap, mp_info, mp_p);
710
711 if (mp.num_vops > max_num_vops)
712 max_num_vops = mp.num_vops;
713 }
714
715 if (dump_file)
716 {
717 fprintf (dump_file, "\n%s: Maximum number of VOPS needed per statement: "
718 "%ld\n", get_name (current_function_decl), max_num_vops);
719 }
720
721 /* No partitions required if we are below the threshold. */
722 if (max_num_vops <= (long) MAX_ALIASED_VOPS)
723 goto done;
724
725 /* Sort the MP_INFO array in order of decreasing number of
726 virtual operands. */
727 sort_mp_info (mp_info);
728
729 if (dump_file)
730 {
731 fprintf (dump_file, "\nVOPS generated by pointer dereferences "
732 "before partitioning:\n");
733 dump_mp_info (dump_file, mp_info);
734 }
735
736 /* Now that we have all the VOP generating tags in the MP_INFO array
737 sorted by decreasing number of VOPS, create memory partitions and
738 group aliased symbols into those partitions. */
739 for (i = 0; VEC_iterate (mp_info_t, mp_info, i, mp_p); i++)
740 {
741 /* Stop processing if we are already below the threshold. */
742 if (mp_p->num_vops <= (long) MAX_ALIASED_VOPS)
743 break;
744
745 create_partition_for (mp_p);
746 }
747
748 /* After partitions have been created, rewrite alias sets to use
749 them instead of the original symbols. This way, if the alias set
750 was computed as { a b c d e f }, and the subset { b e f } was
751 grouped into partition MPT.3, then the new alias set for the tag
752 will be { a c d MPT.3 }. */
753 new_aliases = BITMAP_ALLOC (NULL);
754
755 for (i = 0; VEC_iterate (mp_info_t, mp_info, i, mp_p); i++)
756 {
757 rewrite_alias_set_for (mp_p->var, new_aliases);
758 bitmap_clear (new_aliases);
759 }
760
761 BITMAP_FREE (new_aliases);
762
763 if (dump_file)
764 {
765 fprintf (dump_file, "\nVOPS generated by pointer dereferences "
766 "after partitioning:\n");
767 dump_mp_info (dump_file, mp_info);
768 }
769
770 done:
771 /* Free allocated memory. */
772 for (i = 0; VEC_iterate (mp_info_t, mp_info, i, mp_p); i++)
773 free (mp_p);
774 VEC_free (mp_info_t, heap, mp_info);
775
776 timevar_pop (TV_MEMORY_PARTITIONING);
777 }
778
779
780 /* Compute may-alias information for every variable referenced in function
781 FNDECL.
782
783 Alias analysis proceeds in 3 main phases:
784
785 1- Points-to and escape analysis.
786
787 This phase walks the use-def chains in the SSA web looking for three
788 things:
789
790 * Assignments of the form P_i = &VAR
791 * Assignments of the form P_i = malloc()
792 * Pointers and ADDR_EXPR that escape the current function.
793
794 The concept of 'escaping' is the same one used in the Java world. When
795 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
796 outside of the current function. So, assignment to global variables,
797 function arguments and returning a pointer are all escape sites, as are
798 conversions between pointers and integers.
799
800 This is where we are currently limited. Since not everything is renamed
801 into SSA, we lose track of escape properties when a pointer is stashed
802 inside a field in a structure, for instance. In those cases, we are
803 assuming that the pointer does escape.
804
805 We use escape analysis to determine whether a variable is
806 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
807 is call-clobbered. If a pointer P_i escapes, then all the variables
808 pointed-to by P_i (and its memory tag) also escape.
809
810 2- Compute flow-sensitive aliases
811
812 We have two classes of memory tags. Memory tags associated with the
813 pointed-to data type of the pointers in the program. These tags are
814 called "symbol memory tag" (SMT). The other class are those associated
815 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
816 when adding operands for an INDIRECT_REF *P_i, we will first check
817 whether P_i has a name tag, if it does we use it, because that will have
818 more precise aliasing information. Otherwise, we use the standard symbol
819 tag.
820
821 In this phase, we go through all the pointers we found in points-to
822 analysis and create alias sets for the name memory tags associated with
823 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
824 it points to and its tag.
825
826
827 3- Compute flow-insensitive aliases
828
829 This pass will compare the alias set of every symbol memory tag and
830 every addressable variable found in the program. Given a symbol
831 memory tag SMT and an addressable variable V. If the alias sets of
832 SMT and V conflict (as computed by may_alias_p), then V is marked
833 as an alias tag and added to the alias set of SMT.
834
835 For instance, consider the following function:
836
837 foo (int i)
838 {
839 int *p, a, b;
840
841 if (i > 10)
842 p = &a;
843 else
844 p = &b;
845
846 *p = 3;
847 a = b + 2;
848 return *p;
849 }
850
851 After aliasing analysis has finished, the symbol memory tag for pointer
852 'p' will have two aliases, namely variables 'a' and 'b'. Every time
853 pointer 'p' is dereferenced, we want to mark the operation as a
854 potential reference to 'a' and 'b'.
855
856 foo (int i)
857 {
858 int *p, a, b;
859
860 if (i_2 > 10)
861 p_4 = &a;
862 else
863 p_6 = &b;
864 # p_1 = PHI <p_4(1), p_6(2)>;
865
866 # a_7 = VDEF <a_3>;
867 # b_8 = VDEF <b_5>;
868 *p_1 = 3;
869
870 # a_9 = VDEF <a_7>
871 # VUSE <b_8>
872 a_9 = b_8 + 2;
873
874 # VUSE <a_9>;
875 # VUSE <b_8>;
876 return *p_1;
877 }
878
879 In certain cases, the list of may aliases for a pointer may grow too
880 large. This may cause an explosion in the number of virtual operands
881 inserted in the code. Resulting in increased memory consumption and
882 compilation time.
883
884 When the number of virtual operands needed to represent aliased
885 loads and stores grows too large (configurable with @option{--param
886 max-aliased-vops}), alias sets are grouped to avoid severe
887 compile-time slow downs and memory consumption. See group_aliases. */
888
889 static unsigned int
890 compute_may_aliases (void)
891 {
892 struct alias_info *ai;
893
894 memset (&alias_stats, 0, sizeof (alias_stats));
895
896 /* Initialize aliasing information. */
897 ai = init_alias_info ();
898
899 /* For each pointer P_i, determine the sets of variables that P_i may
900 point-to. For every addressable variable V, determine whether the
901 address of V escapes the current function, making V call-clobbered
902 (i.e., whether &V is stored in a global variable or if its passed as a
903 function call argument). */
904 compute_points_to_sets (ai);
905
906 /* Collect all pointers and addressable variables, compute alias sets,
907 create memory tags for pointers and promote variables whose address is
908 not needed anymore. */
909 setup_pointers_and_addressables (ai);
910
911 /* Compute type-based flow-insensitive aliasing for all the type
912 memory tags. */
913 compute_flow_insensitive_aliasing (ai);
914
915 /* Compute flow-sensitive, points-to based aliasing for all the name
916 memory tags. */
917 compute_flow_sensitive_aliasing (ai);
918
919 /* Compute call clobbering information. */
920 compute_call_clobbered (ai);
921
922 /* If the program makes no reference to global variables, but it
923 contains a mixture of pure and non-pure functions, then we need
924 to create use-def and def-def links between these functions to
925 avoid invalid transformations on them. */
926 maybe_create_global_var (ai);
927
928 /* If the program contains ref-all pointers, finalize may-alias information
929 for them. This pass needs to be run after call-clobbering information
930 has been computed. */
931 if (ai->ref_all_symbol_mem_tag)
932 finalize_ref_all_pointers (ai);
933
934 /* Compute memory partitions for every memory variable. */
935 compute_memory_partitions ();
936
937 /* Debugging dumps. */
938 if (dump_file)
939 {
940 dump_referenced_vars (dump_file);
941 if (dump_flags & TDF_STATS)
942 dump_alias_stats (dump_file);
943 dump_points_to_info (dump_file);
944 dump_alias_info (dump_file);
945 }
946
947 /* Deallocate memory used by aliasing data structures. */
948 delete_alias_info (ai);
949
950 {
951 block_stmt_iterator bsi;
952 basic_block bb;
953 FOR_EACH_BB (bb)
954 {
955 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
956 {
957 update_stmt_if_modified (bsi_stmt (bsi));
958 }
959 }
960 }
961 return 0;
962 }
963
964
965 struct tree_opt_pass pass_may_alias =
966 {
967 "alias", /* name */
968 NULL, /* gate */
969 compute_may_aliases, /* execute */
970 NULL, /* sub */
971 NULL, /* next */
972 0, /* static_pass_number */
973 TV_TREE_MAY_ALIAS, /* tv_id */
974 PROP_cfg | PROP_ssa, /* properties_required */
975 PROP_alias, /* properties_provided */
976 0, /* properties_destroyed */
977 0, /* todo_flags_start */
978 TODO_dump_func | TODO_update_ssa
979 | TODO_ggc_collect | TODO_verify_ssa
980 | TODO_verify_stmts, /* todo_flags_finish */
981 0 /* letter */
982 };
983
984
985 /* Data structure used to count the number of dereferences to PTR
986 inside an expression. */
987 struct count_ptr_d
988 {
989 tree ptr;
990 unsigned count;
991 };
992
993
994 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
995 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
996
997 static tree
998 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
999 {
1000 struct count_ptr_d *count_p = (struct count_ptr_d *) data;
1001
1002 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
1003 pointer 'ptr' is *not* dereferenced, it is simply used to compute
1004 the address of 'fld' as 'ptr + offsetof(fld)'. */
1005 if (TREE_CODE (*tp) == ADDR_EXPR)
1006 {
1007 *walk_subtrees = 0;
1008 return NULL_TREE;
1009 }
1010
1011 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
1012 count_p->count++;
1013
1014 return NULL_TREE;
1015 }
1016
1017
1018 /* Count the number of direct and indirect uses for pointer PTR in
1019 statement STMT. The two counts are stored in *NUM_USES_P and
1020 *NUM_DEREFS_P respectively. *IS_STORE_P is set to 'true' if at
1021 least one of those dereferences is a store operation. */
1022
1023 void
1024 count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
1025 unsigned *num_derefs_p, bool *is_store)
1026 {
1027 ssa_op_iter i;
1028 tree use;
1029
1030 *num_uses_p = 0;
1031 *num_derefs_p = 0;
1032 *is_store = false;
1033
1034 /* Find out the total number of uses of PTR in STMT. */
1035 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
1036 if (use == ptr)
1037 (*num_uses_p)++;
1038
1039 /* Now count the number of indirect references to PTR. This is
1040 truly awful, but we don't have much choice. There are no parent
1041 pointers inside INDIRECT_REFs, so an expression like
1042 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
1043 find all the indirect and direct uses of x_1 inside. The only
1044 shortcut we can take is the fact that GIMPLE only allows
1045 INDIRECT_REFs inside the expressions below. */
1046 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1047 || (TREE_CODE (stmt) == RETURN_EXPR
1048 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1049 || TREE_CODE (stmt) == ASM_EXPR
1050 || TREE_CODE (stmt) == CALL_EXPR)
1051 {
1052 tree lhs, rhs;
1053
1054 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
1055 {
1056 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1057 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1058 }
1059 else if (TREE_CODE (stmt) == RETURN_EXPR)
1060 {
1061 tree e = TREE_OPERAND (stmt, 0);
1062 lhs = GIMPLE_STMT_OPERAND (e, 0);
1063 rhs = GIMPLE_STMT_OPERAND (e, 1);
1064 }
1065 else if (TREE_CODE (stmt) == ASM_EXPR)
1066 {
1067 lhs = ASM_OUTPUTS (stmt);
1068 rhs = ASM_INPUTS (stmt);
1069 }
1070 else
1071 {
1072 lhs = NULL_TREE;
1073 rhs = stmt;
1074 }
1075
1076 if (lhs && (TREE_CODE (lhs) == TREE_LIST
1077 || EXPR_P (lhs) || GIMPLE_STMT_P (lhs)))
1078 {
1079 struct count_ptr_d count;
1080 count.ptr = ptr;
1081 count.count = 0;
1082 walk_tree (&lhs, count_ptr_derefs, &count, NULL);
1083 *is_store = true;
1084 *num_derefs_p = count.count;
1085 }
1086
1087 if (rhs && (TREE_CODE (rhs) == TREE_LIST
1088 || EXPR_P (rhs) || GIMPLE_STMT_P (rhs)))
1089 {
1090 struct count_ptr_d count;
1091 count.ptr = ptr;
1092 count.count = 0;
1093 walk_tree (&rhs, count_ptr_derefs, &count, NULL);
1094 *num_derefs_p += count.count;
1095 }
1096 }
1097
1098 gcc_assert (*num_uses_p >= *num_derefs_p);
1099 }
1100
1101 /* Initialize the data structures used for alias analysis. */
1102
1103 static struct alias_info *
1104 init_alias_info (void)
1105 {
1106 struct alias_info *ai;
1107 referenced_var_iterator rvi;
1108 tree var;
1109
1110 ai = XCNEW (struct alias_info);
1111 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
1112 sbitmap_zero (ai->ssa_names_visited);
1113 ai->processed_ptrs = VEC_alloc (tree, heap, 50);
1114 ai->written_vars = pointer_set_create ();
1115 ai->dereferenced_ptrs_store = pointer_set_create ();
1116 ai->dereferenced_ptrs_load = pointer_set_create ();
1117
1118 /* If aliases have been computed before, clear existing information. */
1119 if (gimple_aliases_computed_p (cfun))
1120 {
1121 unsigned i;
1122
1123 bitmap_obstack_release (&alias_bitmap_obstack);
1124
1125 /* Similarly, clear the set of addressable variables. In this
1126 case, we can just clear the set because addressability is
1127 only computed here. */
1128 bitmap_clear (gimple_addressable_vars (cfun));
1129
1130 /* Clear flow-insensitive alias information from each symbol. */
1131 FOR_EACH_REFERENCED_VAR (var, rvi)
1132 {
1133 if (MTAG_P (var))
1134 MTAG_ALIASES (var) = NULL;
1135
1136 /* Since we are about to re-discover call-clobbered
1137 variables, clear the call-clobbered flag. Variables that
1138 are intrinsically call-clobbered (globals, local statics,
1139 etc) will not be marked by the aliasing code, so we can't
1140 remove them from CALL_CLOBBERED_VARS.
1141
1142 NB: STRUCT_FIELDS are still call clobbered if they are for
1143 a global variable, so we *don't* clear their call clobberedness
1144 just because they are tags, though we will clear it if they
1145 aren't for global variables. */
1146 if (TREE_CODE (var) == NAME_MEMORY_TAG
1147 || TREE_CODE (var) == SYMBOL_MEMORY_TAG
1148 || !is_global_var (var))
1149 clear_call_clobbered (var);
1150 }
1151
1152 /* Clear flow-sensitive points-to information from each SSA name. */
1153 for (i = 1; i < num_ssa_names; i++)
1154 {
1155 tree name = ssa_name (i);
1156
1157 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
1158 continue;
1159
1160 if (SSA_NAME_PTR_INFO (name))
1161 {
1162 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
1163
1164 /* Clear all the flags but keep the name tag to
1165 avoid creating new temporaries unnecessarily. If
1166 this pointer is found to point to a subset or
1167 superset of its former points-to set, then a new
1168 tag will need to be created in create_name_tags. */
1169 pi->pt_anything = 0;
1170 pi->pt_null = 0;
1171 pi->value_escapes_p = 0;
1172 pi->is_dereferenced = 0;
1173 if (pi->pt_vars)
1174 bitmap_clear (pi->pt_vars);
1175 }
1176 }
1177 }
1178 else
1179 {
1180 /* If this is the first time we compute aliasing information,
1181 every non-register symbol will need to be put into SSA form
1182 (the initial SSA form only operates on GIMPLE registers). */
1183 FOR_EACH_REFERENCED_VAR (var, rvi)
1184 if (!is_gimple_reg (var))
1185 mark_sym_for_renaming (var);
1186 }
1187
1188 /* Next time, we will need to reset alias information. */
1189 cfun->gimple_df->aliases_computed_p = true;
1190 bitmap_obstack_initialize (&alias_bitmap_obstack);
1191
1192 return ai;
1193 }
1194
1195
1196 /* Deallocate memory used by alias analysis. */
1197
1198 static void
1199 delete_alias_info (struct alias_info *ai)
1200 {
1201 size_t i;
1202
1203 sbitmap_free (ai->ssa_names_visited);
1204
1205 VEC_free (tree, heap, ai->processed_ptrs);
1206
1207 for (i = 0; i < ai->num_addressable_vars; i++)
1208 free (ai->addressable_vars[i]);
1209 free (ai->addressable_vars);
1210
1211 for (i = 0; i < ai->num_pointers; i++)
1212 free (ai->pointers[i]);
1213 free (ai->pointers);
1214
1215 pointer_set_destroy (ai->written_vars);
1216 pointer_set_destroy (ai->dereferenced_ptrs_store);
1217 pointer_set_destroy (ai->dereferenced_ptrs_load);
1218 free (ai);
1219
1220 delete_points_to_sets ();
1221 }
1222
1223 /* Used for hashing to identify pointer infos with identical
1224 pt_vars bitmaps. */
1225 static int
1226 eq_ptr_info (const void *p1, const void *p2)
1227 {
1228 const struct ptr_info_def *n1 = (const struct ptr_info_def *) p1;
1229 const struct ptr_info_def *n2 = (const struct ptr_info_def *) p2;
1230 return bitmap_equal_p (n1->pt_vars, n2->pt_vars);
1231 }
1232
1233 static hashval_t
1234 ptr_info_hash (const void *p)
1235 {
1236 const struct ptr_info_def *n = (const struct ptr_info_def *) p;
1237 return bitmap_hash (n->pt_vars);
1238 }
1239
1240 /* Create name tags for all the pointers that have been dereferenced.
1241 We only create a name tag for a pointer P if P is found to point to
1242 a set of variables (so that we can alias them to *P) or if it is
1243 the result of a call to malloc (which means that P cannot point to
1244 anything else nor alias any other variable).
1245
1246 If two pointers P and Q point to the same set of variables, they
1247 are assigned the same name tag. */
1248
1249 static void
1250 create_name_tags (void)
1251 {
1252 size_t i;
1253 VEC (tree, heap) *with_ptvars = NULL;
1254 tree ptr;
1255 htab_t ptr_hash;
1256
1257 /* Collect the list of pointers with a non-empty points to set. */
1258 for (i = 1; i < num_ssa_names; i++)
1259 {
1260 tree ptr = ssa_name (i);
1261 struct ptr_info_def *pi;
1262
1263 if (!ptr
1264 || !POINTER_TYPE_P (TREE_TYPE (ptr))
1265 || !SSA_NAME_PTR_INFO (ptr))
1266 continue;
1267
1268 pi = SSA_NAME_PTR_INFO (ptr);
1269
1270 if (pi->pt_anything || !pi->is_dereferenced)
1271 {
1272 /* No name tags for pointers that have not been
1273 dereferenced or point to an arbitrary location. */
1274 pi->name_mem_tag = NULL_TREE;
1275 continue;
1276 }
1277
1278 /* Set pt_anything on the pointers without pt_vars filled in so
1279 that they are assigned a symbol tag. */
1280 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
1281 VEC_safe_push (tree, heap, with_ptvars, ptr);
1282 else
1283 set_pt_anything (ptr);
1284 }
1285
1286 /* If we didn't find any pointers with pt_vars set, we're done. */
1287 if (!with_ptvars)
1288 return;
1289
1290 ptr_hash = htab_create (10, ptr_info_hash, eq_ptr_info, NULL);
1291 /* Now go through the pointers with pt_vars, and find a name tag
1292 with the same pt_vars as this pointer, or create one if one
1293 doesn't exist. */
1294 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
1295 {
1296 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1297 tree old_name_tag = pi->name_mem_tag;
1298 struct ptr_info_def **slot;
1299
1300 /* If PTR points to a set of variables, check if we don't
1301 have another pointer Q with the same points-to set before
1302 creating a tag. If so, use Q's tag instead of creating a
1303 new one.
1304
1305 This is important for not creating unnecessary symbols
1306 and also for copy propagation. If we ever need to
1307 propagate PTR into Q or vice-versa, we would run into
1308 problems if they both had different name tags because
1309 they would have different SSA version numbers (which
1310 would force us to take the name tags in and out of SSA). */
1311
1312 slot = (struct ptr_info_def **) htab_find_slot (ptr_hash, pi, INSERT);
1313 if (*slot)
1314 pi->name_mem_tag = (*slot)->name_mem_tag;
1315 else
1316 {
1317 *slot = pi;
1318 /* If we didn't find a pointer with the same points-to set
1319 as PTR, create a new name tag if needed. */
1320 if (pi->name_mem_tag == NULL_TREE)
1321 pi->name_mem_tag = get_nmt_for (ptr);
1322 }
1323
1324 /* If the new name tag computed for PTR is different than
1325 the old name tag that it used to have, then the old tag
1326 needs to be removed from the IL, so we mark it for
1327 renaming. */
1328 if (old_name_tag && old_name_tag != pi->name_mem_tag)
1329 mark_sym_for_renaming (old_name_tag);
1330
1331 TREE_THIS_VOLATILE (pi->name_mem_tag)
1332 |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
1333
1334 /* Mark the new name tag for renaming. */
1335 mark_sym_for_renaming (pi->name_mem_tag);
1336 }
1337 htab_delete (ptr_hash);
1338
1339 VEC_free (tree, heap, with_ptvars);
1340 }
1341
1342 /* Union the alias set SET into the may-aliases for TAG */
1343
1344 static void
1345 union_alias_set_into (tree tag, bitmap set)
1346 {
1347 bitmap ma = MTAG_ALIASES (tag);
1348
1349 if (bitmap_empty_p (set))
1350 return;
1351
1352 if (!ma)
1353 ma = MTAG_ALIASES (tag) = BITMAP_ALLOC (&alias_bitmap_obstack);
1354 bitmap_ior_into (ma, set);
1355 }
1356
1357
1358 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
1359 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
1360 name tag and the variables it points-to are call-clobbered. Finally, if
1361 P_i escapes and we could not determine where it points to, then all the
1362 variables in the same alias set as *P_i are marked call-clobbered. This
1363 is necessary because we must assume that P_i may take the address of any
1364 variable in the same alias set. */
1365
1366 static void
1367 compute_flow_sensitive_aliasing (struct alias_info *ai)
1368 {
1369 size_t i;
1370 tree ptr;
1371
1372 set_used_smts ();
1373
1374 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
1375 {
1376 if (!find_what_p_points_to (ptr))
1377 set_pt_anything (ptr);
1378 }
1379
1380 create_name_tags ();
1381
1382 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
1383 {
1384 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1385 tree tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
1386
1387 /* Set up aliasing information for PTR's name memory tag (if it has
1388 one). Note that only pointers that have been dereferenced will
1389 have a name memory tag. */
1390 if (pi->name_mem_tag && pi->pt_vars)
1391 {
1392 if (!bitmap_empty_p (pi->pt_vars))
1393 {
1394 union_alias_set_into (pi->name_mem_tag, pi->pt_vars);
1395 union_alias_set_into (tag, pi->pt_vars);
1396 bitmap_clear_bit (MTAG_ALIASES (tag), DECL_UID (tag));
1397
1398 /* It may be the case that this the tag uid was the only
1399 bit we had set in the aliases list, and in this case,
1400 we don't want to keep an empty bitmap, as this
1401 asserts in tree-ssa-operands.c . */
1402 if (bitmap_empty_p (MTAG_ALIASES (tag)))
1403 BITMAP_FREE (MTAG_ALIASES (tag));
1404 }
1405 }
1406 }
1407 }
1408
1409
1410 /* Return TRUE if at least one symbol in TAG2's alias set is also
1411 present in TAG1's alias set. */
1412
1413 static bool
1414 have_common_aliases_p (bitmap tag1aliases, bitmap tag2aliases)
1415 {
1416
1417 /* This is the old behavior of have_common_aliases_p, which is to
1418 return false if both sets are empty, or one set is and the other
1419 isn't. */
1420 if ((tag1aliases == NULL && tag2aliases != NULL)
1421 || (tag2aliases == NULL && tag1aliases != NULL)
1422 || (tag1aliases == NULL && tag2aliases == NULL))
1423 return false;
1424
1425 return bitmap_intersect_p (tag1aliases, tag2aliases);
1426 }
1427
1428 /* Compute type-based alias sets. Traverse all the pointers and
1429 addressable variables found in setup_pointers_and_addressables.
1430
1431 For every pointer P in AI->POINTERS and addressable variable V in
1432 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
1433 memory tag (SMT) if their alias sets conflict. V is then marked as
1434 an aliased symbol so that the operand scanner knows that statements
1435 containing V have aliased operands. */
1436
1437 static void
1438 compute_flow_insensitive_aliasing (struct alias_info *ai)
1439 {
1440 size_t i;
1441
1442 /* For every pointer P, determine which addressable variables may alias
1443 with P's symbol memory tag. */
1444 for (i = 0; i < ai->num_pointers; i++)
1445 {
1446 size_t j;
1447 struct alias_map_d *p_map = ai->pointers[i];
1448 tree tag = symbol_mem_tag (p_map->var);
1449 tree var;
1450
1451 /* Call-clobbering information is not finalized yet at this point. */
1452 if (PTR_IS_REF_ALL (p_map->var))
1453 continue;
1454
1455 for (j = 0; j < ai->num_addressable_vars; j++)
1456 {
1457 struct alias_map_d *v_map;
1458 var_ann_t v_ann;
1459 bool tag_stored_p, var_stored_p;
1460
1461 v_map = ai->addressable_vars[j];
1462 var = v_map->var;
1463 v_ann = var_ann (var);
1464
1465 /* Skip memory tags and variables that have never been
1466 written to. We also need to check if the variables are
1467 call-clobbered because they may be overwritten by
1468 function calls. */
1469 tag_stored_p = pointer_set_contains (ai->written_vars, tag)
1470 || is_call_clobbered (tag);
1471 var_stored_p = pointer_set_contains (ai->written_vars, var)
1472 || is_call_clobbered (var);
1473 if (!tag_stored_p && !var_stored_p)
1474 continue;
1475
1476 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
1477 {
1478 /* We should never have a var with subvars here, because
1479 they shouldn't get into the set of addressable vars */
1480 gcc_assert (!var_can_have_subvars (var)
1481 || get_subvars_for_var (var) == NULL);
1482
1483 /* Add VAR to TAG's may-aliases set. */
1484 add_may_alias (tag, var);
1485 }
1486 }
1487 }
1488
1489 /* Since this analysis is based exclusively on symbols, it fails to
1490 handle cases where two pointers P and Q have different memory
1491 tags with conflicting alias set numbers but no aliased symbols in
1492 common.
1493
1494 For example, suppose that we have two memory tags SMT.1 and SMT.2
1495 such that
1496
1497 may-aliases (SMT.1) = { a }
1498 may-aliases (SMT.2) = { b }
1499
1500 and the alias set number of SMT.1 conflicts with that of SMT.2.
1501 Since they don't have symbols in common, loads and stores from
1502 SMT.1 and SMT.2 will seem independent of each other, which will
1503 lead to the optimizers making invalid transformations (see
1504 testsuite/gcc.c-torture/execute/pr15262-[12].c).
1505
1506 To avoid this problem, we do a final traversal of AI->POINTERS
1507 looking for pairs of pointers that have no aliased symbols in
1508 common and yet have conflicting alias set numbers. */
1509 for (i = 0; i < ai->num_pointers; i++)
1510 {
1511 size_t j;
1512 struct alias_map_d *p_map1 = ai->pointers[i];
1513 tree tag1 = symbol_mem_tag (p_map1->var);
1514 bitmap may_aliases1 = MTAG_ALIASES (tag1);
1515
1516 if (PTR_IS_REF_ALL (p_map1->var))
1517 continue;
1518
1519 for (j = i + 1; j < ai->num_pointers; j++)
1520 {
1521 struct alias_map_d *p_map2 = ai->pointers[j];
1522 tree tag2 = symbol_mem_tag (p_map2->var);
1523 bitmap may_aliases2 = may_aliases (tag2);
1524
1525 if (PTR_IS_REF_ALL (p_map2->var))
1526 continue;
1527
1528 /* If the pointers may not point to each other, do nothing. */
1529 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
1530 continue;
1531
1532 /* The two pointers may alias each other. If they already have
1533 symbols in common, do nothing. */
1534 if (have_common_aliases_p (may_aliases1, may_aliases2))
1535 continue;
1536
1537 if (may_aliases2 && !bitmap_empty_p (may_aliases2))
1538 {
1539 union_alias_set_into (tag1, may_aliases2);
1540 }
1541 else
1542 {
1543 /* Since TAG2 does not have any aliases of its own, add
1544 TAG2 itself to the alias set of TAG1. */
1545 add_may_alias (tag1, tag2);
1546 }
1547 }
1548
1549 }
1550 }
1551
1552
1553 /* Finalize may-alias information for ref-all pointers. Traverse all
1554 the addressable variables found in setup_pointers_and_addressables.
1555
1556 If flow-sensitive alias analysis has attached a name memory tag to
1557 a ref-all pointer, we will use it for the dereferences because that
1558 will have more precise aliasing information. But if there is no
1559 name tag, we will use a special symbol tag that aliases all the
1560 call-clobbered addressable variables. */
1561
1562 static void
1563 finalize_ref_all_pointers (struct alias_info *ai)
1564 {
1565 size_t i;
1566
1567 /* First add the real call-clobbered variables. */
1568 for (i = 0; i < ai->num_addressable_vars; i++)
1569 {
1570 tree var = ai->addressable_vars[i]->var;
1571 if (is_call_clobbered (var))
1572 add_may_alias (ai->ref_all_symbol_mem_tag, var);
1573 }
1574
1575 /* Then add the call-clobbered pointer memory tags. See
1576 compute_flow_insensitive_aliasing for the rationale. */
1577 for (i = 0; i < ai->num_pointers; i++)
1578 {
1579 tree ptr = ai->pointers[i]->var, tag;
1580 if (PTR_IS_REF_ALL (ptr))
1581 continue;
1582 tag = symbol_mem_tag (ptr);
1583 if (is_call_clobbered (tag))
1584 add_may_alias (ai->ref_all_symbol_mem_tag, tag);
1585 }
1586
1587 }
1588
1589
1590 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
1591
1592 static void
1593 create_alias_map_for (tree var, struct alias_info *ai)
1594 {
1595 struct alias_map_d *alias_map;
1596 alias_map = XCNEW (struct alias_map_d);
1597 alias_map->var = var;
1598 alias_map->set = get_alias_set (var);
1599 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
1600 }
1601
1602
1603 /* Create memory tags for all the dereferenced pointers and build the
1604 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
1605 sets. Based on the address escape and points-to information collected
1606 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
1607 variables whose address is not needed anymore. */
1608
1609 static void
1610 setup_pointers_and_addressables (struct alias_info *ai)
1611 {
1612 size_t num_addressable_vars, num_pointers;
1613 referenced_var_iterator rvi;
1614 tree var;
1615 VEC (tree, heap) *varvec = NULL;
1616 safe_referenced_var_iterator srvi;
1617
1618 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
1619 num_addressable_vars = num_pointers = 0;
1620
1621 FOR_EACH_REFERENCED_VAR (var, rvi)
1622 {
1623 if (may_be_aliased (var))
1624 num_addressable_vars++;
1625
1626 if (POINTER_TYPE_P (TREE_TYPE (var)))
1627 {
1628 /* Since we don't keep track of volatile variables, assume that
1629 these pointers are used in indirect store operations. */
1630 if (TREE_THIS_VOLATILE (var))
1631 pointer_set_insert (ai->dereferenced_ptrs_store, var);
1632
1633 num_pointers++;
1634 }
1635 }
1636
1637 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
1638 always going to be slightly bigger than we actually need them
1639 because some TREE_ADDRESSABLE variables will be marked
1640 non-addressable below and only pointers with unique symbol tags are
1641 going to be added to POINTERS. */
1642 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
1643 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
1644 ai->num_addressable_vars = 0;
1645 ai->num_pointers = 0;
1646
1647 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
1648 {
1649 subvar_t svars;
1650
1651 /* Name memory tags already have flow-sensitive aliasing
1652 information, so they need not be processed by
1653 compute_flow_insensitive_aliasing. Similarly, symbol memory
1654 tags are already accounted for when we process their
1655 associated pointer.
1656
1657 Structure fields, on the other hand, have to have some of this
1658 information processed for them, but it's pointless to mark them
1659 non-addressable (since they are fake variables anyway). */
1660 if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
1661 continue;
1662
1663 /* Remove the ADDRESSABLE flag from every addressable variable whose
1664 address is not needed anymore. This is caused by the propagation
1665 of ADDR_EXPR constants into INDIRECT_REF expressions and the
1666 removal of dead pointer assignments done by the early scalar
1667 cleanup passes. */
1668 if (TREE_ADDRESSABLE (var))
1669 {
1670 if (!bitmap_bit_p (gimple_addressable_vars (cfun), DECL_UID (var))
1671 && TREE_CODE (var) != RESULT_DECL
1672 && !is_global_var (var))
1673 {
1674 bool okay_to_mark = true;
1675
1676 /* Since VAR is now a regular GIMPLE register, we will need
1677 to rename VAR into SSA afterwards. */
1678 mark_sym_for_renaming (var);
1679
1680 /* If VAR can have sub-variables, and any of its
1681 sub-variables has its address taken, then we cannot
1682 remove the addressable flag from VAR. */
1683 if (var_can_have_subvars (var)
1684 && (svars = get_subvars_for_var (var)))
1685 {
1686 subvar_t sv;
1687
1688 for (sv = svars; sv; sv = sv->next)
1689 {
1690 if (bitmap_bit_p (gimple_addressable_vars (cfun),
1691 DECL_UID (sv->var)))
1692 okay_to_mark = false;
1693 mark_sym_for_renaming (sv->var);
1694 }
1695 }
1696
1697 /* The address of VAR is not needed, remove the
1698 addressable bit, so that it can be optimized as a
1699 regular variable. */
1700 if (okay_to_mark)
1701 {
1702 /* The memory partition holding VAR will no longer
1703 contain VAR, and statements referencing it will need
1704 to be updated. */
1705 if (memory_partition (var))
1706 mark_sym_for_renaming (memory_partition (var));
1707
1708 mark_non_addressable (var);
1709 }
1710 }
1711 }
1712
1713 /* Global variables and addressable locals may be aliased. Create an
1714 entry in ADDRESSABLE_VARS for VAR. */
1715 if (may_be_aliased (var))
1716 {
1717 if (!var_can_have_subvars (var)
1718 || get_subvars_for_var (var) == NULL)
1719 create_alias_map_for (var, ai);
1720
1721 mark_sym_for_renaming (var);
1722 }
1723
1724 /* Add pointer variables that have been dereferenced to the POINTERS
1725 array and create a symbol memory tag for them. */
1726 if (POINTER_TYPE_P (TREE_TYPE (var)))
1727 {
1728 if ((pointer_set_contains (ai->dereferenced_ptrs_store, var)
1729 || pointer_set_contains (ai->dereferenced_ptrs_load, var)))
1730 {
1731 tree tag, old_tag;
1732 var_ann_t t_ann;
1733
1734 /* If pointer VAR still doesn't have a memory tag
1735 associated with it, create it now or re-use an
1736 existing one. */
1737 tag = get_smt_for (var, ai);
1738 t_ann = var_ann (tag);
1739
1740 /* The symbol tag will need to be renamed into SSA
1741 afterwards. Note that we cannot do this inside
1742 get_smt_for because aliasing may run multiple times
1743 and we only create symbol tags the first time. */
1744 mark_sym_for_renaming (tag);
1745
1746 /* Similarly, if pointer VAR used to have another type
1747 tag, we will need to process it in the renamer to
1748 remove the stale virtual operands. */
1749 old_tag = symbol_mem_tag (var);
1750 if (old_tag)
1751 mark_sym_for_renaming (old_tag);
1752
1753 /* Associate the tag with pointer VAR. */
1754 set_symbol_mem_tag (var, tag);
1755
1756 /* If pointer VAR has been used in a store operation,
1757 then its memory tag must be marked as written-to. */
1758 if (pointer_set_contains (ai->dereferenced_ptrs_store, var))
1759 pointer_set_insert (ai->written_vars, tag);
1760 }
1761 else
1762 {
1763 /* The pointer has not been dereferenced. If it had a
1764 symbol memory tag, remove it and mark the old tag for
1765 renaming to remove it out of the IL. */
1766 tree tag = symbol_mem_tag (var);
1767 if (tag)
1768 {
1769 mark_sym_for_renaming (tag);
1770 set_symbol_mem_tag (var, NULL_TREE);
1771 }
1772 }
1773 }
1774 }
1775
1776 VEC_free (tree, heap, varvec);
1777 }
1778
1779
1780 /* Determine whether to use .GLOBAL_VAR to model call clobbering
1781 semantics. If the function makes no references to global
1782 variables and contains at least one call to a non-pure function,
1783 then we need to mark the side-effects of the call using .GLOBAL_VAR
1784 to represent all possible global memory referenced by the callee. */
1785
1786 static void
1787 maybe_create_global_var (struct alias_info *ai)
1788 {
1789 /* No need to create it, if we have one already. */
1790 if (gimple_global_var (cfun) == NULL_TREE)
1791 {
1792 /* Create .GLOBAL_VAR if there are no call-clobbered
1793 variables and the program contains a mixture of pure/const
1794 and regular function calls. This is to avoid the problem
1795 described in PR 20115:
1796
1797 int X;
1798 int func_pure (void) { return X; }
1799 int func_non_pure (int a) { X += a; }
1800 int foo ()
1801 {
1802 int a = func_pure ();
1803 func_non_pure (a);
1804 a = func_pure ();
1805 return a;
1806 }
1807
1808 Since foo() has no call-clobbered variables, there is
1809 no relationship between the calls to func_pure and
1810 func_non_pure. Since func_pure has no side-effects, value
1811 numbering optimizations elide the second call to func_pure.
1812 So, if we have some pure/const and some regular calls in the
1813 program we create .GLOBAL_VAR to avoid missing these
1814 relations. */
1815 if (bitmap_count_bits (gimple_call_clobbered_vars (cfun)) == 0
1816 && ai->num_calls_found > 0
1817 && ai->num_pure_const_calls_found > 0
1818 && ai->num_calls_found > ai->num_pure_const_calls_found)
1819 create_global_var ();
1820 }
1821 }
1822
1823
1824 /* Return TRUE if pointer PTR may point to variable VAR.
1825
1826 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
1827 This is needed because when checking for type conflicts we are
1828 interested in the alias set of the memory location pointed-to by
1829 PTR. The alias set of PTR itself is irrelevant.
1830
1831 VAR_ALIAS_SET is the alias set for VAR. */
1832
1833 static bool
1834 may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
1835 tree var, HOST_WIDE_INT var_alias_set,
1836 bool alias_set_only)
1837 {
1838 tree mem;
1839
1840 alias_stats.alias_queries++;
1841 alias_stats.simple_queries++;
1842
1843 /* By convention, a variable cannot alias itself. */
1844 mem = symbol_mem_tag (ptr);
1845 if (mem == var)
1846 {
1847 alias_stats.alias_noalias++;
1848 alias_stats.simple_resolved++;
1849 return false;
1850 }
1851
1852 /* If -fargument-noalias-global is > 2, pointer arguments may
1853 not point to anything else. */
1854 if (flag_argument_noalias > 2 && TREE_CODE (ptr) == PARM_DECL)
1855 {
1856 alias_stats.alias_noalias++;
1857 alias_stats.simple_resolved++;
1858 return false;
1859 }
1860
1861 /* If -fargument-noalias-global is > 1, pointer arguments may
1862 not point to global variables. */
1863 if (flag_argument_noalias > 1 && is_global_var (var)
1864 && TREE_CODE (ptr) == PARM_DECL)
1865 {
1866 alias_stats.alias_noalias++;
1867 alias_stats.simple_resolved++;
1868 return false;
1869 }
1870
1871 /* If either MEM or VAR is a read-only global and the other one
1872 isn't, then PTR cannot point to VAR. */
1873 if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
1874 || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
1875 {
1876 alias_stats.alias_noalias++;
1877 alias_stats.simple_resolved++;
1878 return false;
1879 }
1880
1881 gcc_assert (TREE_CODE (mem) == SYMBOL_MEMORY_TAG);
1882
1883 alias_stats.tbaa_queries++;
1884
1885 /* If the alias sets don't conflict then MEM cannot alias VAR. */
1886 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
1887 {
1888 alias_stats.alias_noalias++;
1889 alias_stats.tbaa_resolved++;
1890 return false;
1891 }
1892
1893 /* If VAR is a record or union type, PTR cannot point into VAR
1894 unless there is some explicit address operation in the
1895 program that can reference a field of the type pointed-to by PTR.
1896 This also assumes that the types of both VAR and PTR are
1897 contained within the compilation unit, and that there is no fancy
1898 addressing arithmetic associated with any of the types
1899 involved. */
1900 if (mem_alias_set != 0 && var_alias_set != 0)
1901 {
1902 tree ptr_type = TREE_TYPE (ptr);
1903 tree var_type = TREE_TYPE (var);
1904
1905 /* The star count is -1 if the type at the end of the pointer_to
1906 chain is not a record or union type. */
1907 if ((!alias_set_only) &&
1908 ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
1909 {
1910 int ptr_star_count = 0;
1911
1912 /* ipa_type_escape_star_count_of_interesting_type is a
1913 little too restrictive for the pointer type, need to
1914 allow pointers to primitive types as long as those types
1915 cannot be pointers to everything. */
1916 while (POINTER_TYPE_P (ptr_type))
1917 {
1918 /* Strip the *s off. */
1919 ptr_type = TREE_TYPE (ptr_type);
1920 ptr_star_count++;
1921 }
1922
1923 /* There does not appear to be a better test to see if the
1924 pointer type was one of the pointer to everything
1925 types. */
1926 if (ptr_star_count > 0)
1927 {
1928 alias_stats.structnoaddress_queries++;
1929 if (ipa_type_escape_field_does_not_clobber_p (var_type,
1930 TREE_TYPE (ptr)))
1931 {
1932 alias_stats.structnoaddress_resolved++;
1933 alias_stats.alias_noalias++;
1934 return false;
1935 }
1936 }
1937 else if (ptr_star_count == 0)
1938 {
1939 /* If PTR_TYPE was not really a pointer to type, it cannot
1940 alias. */
1941 alias_stats.structnoaddress_queries++;
1942 alias_stats.structnoaddress_resolved++;
1943 alias_stats.alias_noalias++;
1944 return false;
1945 }
1946 }
1947 }
1948
1949 alias_stats.alias_mayalias++;
1950 return true;
1951 }
1952
1953
1954 /* Add ALIAS to the set of variables that may alias VAR. */
1955
1956 static void
1957 add_may_alias (tree var, tree alias)
1958 {
1959
1960 /* Don't allow self-referential aliases. */
1961 gcc_assert (var != alias);
1962
1963 /* ALIAS must be addressable if it's being added to an alias set. */
1964 #if 1
1965 TREE_ADDRESSABLE (alias) = 1;
1966 #else
1967 gcc_assert (may_be_aliased (alias));
1968 #endif
1969
1970 /* VAR must be a symbol or a name tag. */
1971 gcc_assert (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1972 || TREE_CODE (var) == NAME_MEMORY_TAG);
1973
1974 if (MTAG_ALIASES (var) == NULL)
1975 MTAG_ALIASES (var) = BITMAP_ALLOC (&alias_bitmap_obstack);
1976
1977 bitmap_set_bit (MTAG_ALIASES (var), DECL_UID (alias));
1978 }
1979
1980
1981 /* Mark pointer PTR as pointing to an arbitrary memory location. */
1982
1983 static void
1984 set_pt_anything (tree ptr)
1985 {
1986 struct ptr_info_def *pi = get_ptr_info (ptr);
1987
1988 pi->pt_anything = 1;
1989 pi->pt_vars = NULL;
1990
1991 /* The pointer used to have a name tag, but we now found it pointing
1992 to an arbitrary location. The name tag needs to be renamed and
1993 disassociated from PTR. */
1994 if (pi->name_mem_tag)
1995 {
1996 mark_sym_for_renaming (pi->name_mem_tag);
1997 pi->name_mem_tag = NULL_TREE;
1998 }
1999 }
2000
2001
2002 /* Return true if STMT is an "escape" site from the current function. Escape
2003 sites those statements which might expose the address of a variable
2004 outside the current function. STMT is an escape site iff:
2005
2006 1- STMT is a function call, or
2007 2- STMT is an __asm__ expression, or
2008 3- STMT is an assignment to a non-local variable, or
2009 4- STMT is a return statement.
2010
2011 Return the type of escape site found, if we found one, or NO_ESCAPE
2012 if none. */
2013
2014 enum escape_type
2015 is_escape_site (tree stmt)
2016 {
2017 tree call = get_call_expr_in (stmt);
2018 if (call != NULL_TREE)
2019 {
2020 if (!TREE_SIDE_EFFECTS (call))
2021 return ESCAPE_TO_PURE_CONST;
2022
2023 return ESCAPE_TO_CALL;
2024 }
2025 else if (TREE_CODE (stmt) == ASM_EXPR)
2026 return ESCAPE_TO_ASM;
2027 else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
2028 {
2029 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
2030
2031 /* Get to the base of _REF nodes. */
2032 if (TREE_CODE (lhs) != SSA_NAME)
2033 lhs = get_base_address (lhs);
2034
2035 /* If we couldn't recognize the LHS of the assignment, assume that it
2036 is a non-local store. */
2037 if (lhs == NULL_TREE)
2038 return ESCAPE_UNKNOWN;
2039
2040 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
2041 || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == CONVERT_EXPR
2042 || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
2043 {
2044 tree from
2045 = TREE_TYPE (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0));
2046 tree to = TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 1));
2047
2048 /* If the RHS is a conversion between a pointer and an integer, the
2049 pointer escapes since we can't track the integer. */
2050 if (POINTER_TYPE_P (from) && !POINTER_TYPE_P (to))
2051 return ESCAPE_BAD_CAST;
2052
2053 /* Same if the RHS is a conversion between a regular pointer and a
2054 ref-all pointer since we can't track the SMT of the former. */
2055 if (POINTER_TYPE_P (from) && !TYPE_REF_CAN_ALIAS_ALL (from)
2056 && POINTER_TYPE_P (to) && TYPE_REF_CAN_ALIAS_ALL (to))
2057 return ESCAPE_BAD_CAST;
2058 }
2059
2060 /* If the LHS is an SSA name, it can't possibly represent a non-local
2061 memory store. */
2062 if (TREE_CODE (lhs) == SSA_NAME)
2063 return NO_ESCAPE;
2064
2065 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
2066 local variables we cannot be sure if it will escape, because we
2067 don't have information about objects not in SSA form. Need to
2068 implement something along the lines of
2069
2070 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
2071 Midkiff, ``Escape analysis for java,'' in Proceedings of the
2072 Conference on Object-Oriented Programming Systems, Languages, and
2073 Applications (OOPSLA), pp. 1-19, 1999. */
2074 return ESCAPE_STORED_IN_GLOBAL;
2075 }
2076 else if (TREE_CODE (stmt) == RETURN_EXPR)
2077 return ESCAPE_TO_RETURN;
2078
2079 return NO_ESCAPE;
2080 }
2081
2082 /* Create a new memory tag of type TYPE.
2083 Does NOT push it into the current binding. */
2084
2085 tree
2086 create_tag_raw (enum tree_code code, tree type, const char *prefix)
2087 {
2088 tree tmp_var;
2089
2090 tmp_var = build_decl (code, create_tmp_var_name (prefix), type);
2091
2092 /* Make the variable writable. */
2093 TREE_READONLY (tmp_var) = 0;
2094
2095 /* It doesn't start out global. */
2096 MTAG_GLOBAL (tmp_var) = 0;
2097 TREE_STATIC (tmp_var) = 0;
2098 TREE_USED (tmp_var) = 1;
2099
2100 return tmp_var;
2101 }
2102
2103 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
2104 is considered to represent all the pointers whose pointed-to types are
2105 in the same alias set class. Otherwise, the tag represents a single
2106 SSA_NAME pointer variable. */
2107
2108 static tree
2109 create_memory_tag (tree type, bool is_type_tag)
2110 {
2111 tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG,
2112 type, (is_type_tag) ? "SMT" : "NMT");
2113
2114 /* By default, memory tags are local variables. Alias analysis will
2115 determine whether they should be considered globals. */
2116 DECL_CONTEXT (tag) = current_function_decl;
2117
2118 /* Memory tags are by definition addressable. */
2119 TREE_ADDRESSABLE (tag) = 1;
2120
2121 set_symbol_mem_tag (tag, NULL_TREE);
2122
2123 /* Add the tag to the symbol table. */
2124 add_referenced_var (tag);
2125
2126 return tag;
2127 }
2128
2129
2130 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
2131 This is used if P_i has been found to point to a specific set of
2132 variables or to a non-aliased memory location like the address returned
2133 by malloc functions. */
2134
2135 static tree
2136 get_nmt_for (tree ptr)
2137 {
2138 struct ptr_info_def *pi = get_ptr_info (ptr);
2139 tree tag = pi->name_mem_tag;
2140
2141 if (tag == NULL_TREE)
2142 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
2143 return tag;
2144 }
2145
2146
2147 /* Return the symbol memory tag associated to pointer PTR. A memory
2148 tag is an artificial variable that represents the memory location
2149 pointed-to by PTR. It is used to model the effects of pointer
2150 de-references on addressable variables.
2151
2152 AI points to the data gathered during alias analysis. This
2153 function populates the array AI->POINTERS. */
2154
2155 static tree
2156 get_smt_for (tree ptr, struct alias_info *ai)
2157 {
2158 size_t i;
2159 tree tag;
2160 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2161 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2162
2163 /* We use a unique memory tag for all the ref-all pointers. */
2164 if (PTR_IS_REF_ALL (ptr))
2165 {
2166 if (!ai->ref_all_symbol_mem_tag)
2167 ai->ref_all_symbol_mem_tag = create_memory_tag (void_type_node, true);
2168 return ai->ref_all_symbol_mem_tag;
2169 }
2170
2171 /* To avoid creating unnecessary memory tags, only create one memory tag
2172 per alias set class. Note that it may be tempting to group
2173 memory tags based on conflicting alias sets instead of
2174 equivalence. That would be wrong because alias sets are not
2175 necessarily transitive (as demonstrated by the libstdc++ test
2176 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
2177 such that conflicts (A, B) == true and conflicts (A, C) == true,
2178 it does not necessarily follow that conflicts (B, C) == true. */
2179 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
2180 {
2181 struct alias_map_d *curr = ai->pointers[i];
2182 tree curr_tag = symbol_mem_tag (curr->var);
2183 if (tag_set == curr->set)
2184 {
2185 tag = curr_tag;
2186 break;
2187 }
2188 }
2189
2190 /* If VAR cannot alias with any of the existing memory tags, create a new
2191 tag for PTR and add it to the POINTERS array. */
2192 if (tag == NULL_TREE)
2193 {
2194 struct alias_map_d *alias_map;
2195
2196 /* If PTR did not have a symbol tag already, create a new SMT.*
2197 artificial variable representing the memory location
2198 pointed-to by PTR. */
2199 tag = symbol_mem_tag (ptr);
2200 if (tag == NULL_TREE)
2201 tag = create_memory_tag (tag_type, true);
2202
2203 /* Add PTR to the POINTERS array. Note that we are not interested in
2204 PTR's alias set. Instead, we cache the alias set for the memory that
2205 PTR points to. */
2206 alias_map = XCNEW (struct alias_map_d);
2207 alias_map->var = ptr;
2208 alias_map->set = tag_set;
2209 ai->pointers[ai->num_pointers++] = alias_map;
2210 }
2211
2212 /* If the pointed-to type is volatile, so is the tag. */
2213 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
2214
2215 /* Make sure that the symbol tag has the same alias set as the
2216 pointed-to type. */
2217 gcc_assert (tag_set == get_alias_set (tag));
2218
2219 return tag;
2220 }
2221
2222
2223 /* Create GLOBAL_VAR, an artificial global variable to act as a
2224 representative of all the variables that may be clobbered by function
2225 calls. */
2226
2227 static void
2228 create_global_var (void)
2229 {
2230 tree global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
2231 void_type_node);
2232 DECL_ARTIFICIAL (global_var) = 1;
2233 TREE_READONLY (global_var) = 0;
2234 DECL_EXTERNAL (global_var) = 1;
2235 TREE_STATIC (global_var) = 1;
2236 TREE_USED (global_var) = 1;
2237 DECL_CONTEXT (global_var) = NULL_TREE;
2238 TREE_THIS_VOLATILE (global_var) = 0;
2239 TREE_ADDRESSABLE (global_var) = 0;
2240
2241 create_var_ann (global_var);
2242 mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
2243 add_referenced_var (global_var);
2244 mark_sym_for_renaming (global_var);
2245 cfun->gimple_df->global_var = global_var;
2246 }
2247
2248
2249 /* Dump alias statistics on FILE. */
2250
2251 static void
2252 dump_alias_stats (FILE *file)
2253 {
2254 const char *funcname
2255 = lang_hooks.decl_printable_name (current_function_decl, 2);
2256 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
2257 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
2258 fprintf (file, "Total alias mayalias results:\t%u\n",
2259 alias_stats.alias_mayalias);
2260 fprintf (file, "Total alias noalias results:\t%u\n",
2261 alias_stats.alias_noalias);
2262 fprintf (file, "Total simple queries:\t%u\n",
2263 alias_stats.simple_queries);
2264 fprintf (file, "Total simple resolved:\t%u\n",
2265 alias_stats.simple_resolved);
2266 fprintf (file, "Total TBAA queries:\t%u\n",
2267 alias_stats.tbaa_queries);
2268 fprintf (file, "Total TBAA resolved:\t%u\n",
2269 alias_stats.tbaa_resolved);
2270 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
2271 alias_stats.structnoaddress_queries);
2272 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
2273 alias_stats.structnoaddress_resolved);
2274 }
2275
2276
2277 /* Dump alias information on FILE. */
2278
2279 void
2280 dump_alias_info (FILE *file)
2281 {
2282 size_t i;
2283 const char *funcname
2284 = lang_hooks.decl_printable_name (current_function_decl, 2);
2285 referenced_var_iterator rvi;
2286 tree var;
2287
2288 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
2289
2290 fprintf (file, "Aliased symbols\n\n");
2291
2292 FOR_EACH_REFERENCED_VAR (var, rvi)
2293 {
2294 if (may_be_aliased (var))
2295 dump_variable (file, var);
2296 }
2297
2298 fprintf (file, "\nDereferenced pointers\n\n");
2299
2300 FOR_EACH_REFERENCED_VAR (var, rvi)
2301 if (symbol_mem_tag (var))
2302 dump_variable (file, var);
2303
2304 fprintf (file, "\nSymbol memory tags\n\n");
2305
2306 FOR_EACH_REFERENCED_VAR (var, rvi)
2307 {
2308 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
2309 dump_variable (file, var);
2310 }
2311
2312 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
2313
2314 fprintf (file, "SSA_NAME pointers\n\n");
2315 for (i = 1; i < num_ssa_names; i++)
2316 {
2317 tree ptr = ssa_name (i);
2318 struct ptr_info_def *pi;
2319
2320 if (ptr == NULL_TREE)
2321 continue;
2322
2323 pi = SSA_NAME_PTR_INFO (ptr);
2324 if (!SSA_NAME_IN_FREE_LIST (ptr)
2325 && pi
2326 && pi->name_mem_tag)
2327 dump_points_to_info_for (file, ptr);
2328 }
2329
2330 fprintf (file, "\nName memory tags\n\n");
2331
2332 FOR_EACH_REFERENCED_VAR (var, rvi)
2333 {
2334 if (TREE_CODE (var) == NAME_MEMORY_TAG)
2335 dump_variable (file, var);
2336 }
2337
2338 dump_memory_partitions (file);
2339
2340 fprintf (file, "\n");
2341 }
2342
2343
2344 /* Dump alias information on stderr. */
2345
2346 void
2347 debug_alias_info (void)
2348 {
2349 dump_alias_info (stderr);
2350 }
2351
2352
2353 /* Return the alias information associated with pointer T. It creates a
2354 new instance if none existed. */
2355
2356 struct ptr_info_def *
2357 get_ptr_info (tree t)
2358 {
2359 struct ptr_info_def *pi;
2360
2361 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
2362
2363 pi = SSA_NAME_PTR_INFO (t);
2364 if (pi == NULL)
2365 {
2366 pi = GGC_CNEW (struct ptr_info_def);
2367 SSA_NAME_PTR_INFO (t) = pi;
2368 }
2369
2370 return pi;
2371 }
2372
2373
2374 /* Dump points-to information for SSA_NAME PTR into FILE. */
2375
2376 void
2377 dump_points_to_info_for (FILE *file, tree ptr)
2378 {
2379 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2380
2381 print_generic_expr (file, ptr, dump_flags);
2382
2383 if (pi)
2384 {
2385 if (pi->name_mem_tag)
2386 {
2387 fprintf (file, ", name memory tag: ");
2388 print_generic_expr (file, pi->name_mem_tag, dump_flags);
2389 }
2390
2391 if (pi->is_dereferenced)
2392 fprintf (file, ", is dereferenced");
2393
2394 if (pi->value_escapes_p)
2395 fprintf (file, ", its value escapes");
2396
2397 if (pi->pt_anything)
2398 fprintf (file, ", points-to anything");
2399
2400 if (pi->pt_null)
2401 fprintf (file, ", points-to NULL");
2402
2403 if (pi->pt_vars)
2404 {
2405 fprintf (file, ", points-to vars: ");
2406 dump_decl_set (file, pi->pt_vars);
2407 }
2408 }
2409
2410 fprintf (file, "\n");
2411 }
2412
2413
2414 /* Dump points-to information for VAR into stderr. */
2415
2416 void
2417 debug_points_to_info_for (tree var)
2418 {
2419 dump_points_to_info_for (stderr, var);
2420 }
2421
2422
2423 /* Dump points-to information into FILE. NOTE: This function is slow, as
2424 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
2425
2426 void
2427 dump_points_to_info (FILE *file)
2428 {
2429 basic_block bb;
2430 block_stmt_iterator si;
2431 ssa_op_iter iter;
2432 const char *fname =
2433 lang_hooks.decl_printable_name (current_function_decl, 2);
2434 referenced_var_iterator rvi;
2435 tree var;
2436
2437 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
2438
2439 /* First dump points-to information for the default definitions of
2440 pointer variables. This is necessary because default definitions are
2441 not part of the code. */
2442 FOR_EACH_REFERENCED_VAR (var, rvi)
2443 {
2444 if (POINTER_TYPE_P (TREE_TYPE (var)))
2445 {
2446 tree def = gimple_default_def (cfun, var);
2447 if (def)
2448 dump_points_to_info_for (file, def);
2449 }
2450 }
2451
2452 /* Dump points-to information for every pointer defined in the program. */
2453 FOR_EACH_BB (bb)
2454 {
2455 tree phi;
2456
2457 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2458 {
2459 tree ptr = PHI_RESULT (phi);
2460 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2461 dump_points_to_info_for (file, ptr);
2462 }
2463
2464 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
2465 {
2466 tree stmt = bsi_stmt (si);
2467 tree def;
2468 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
2469 if (POINTER_TYPE_P (TREE_TYPE (def)))
2470 dump_points_to_info_for (file, def);
2471 }
2472 }
2473
2474 fprintf (file, "\n");
2475 }
2476
2477
2478 /* Dump points-to info pointed to by PTO into STDERR. */
2479
2480 void
2481 debug_points_to_info (void)
2482 {
2483 dump_points_to_info (stderr);
2484 }
2485
2486 /* Dump to FILE the list of variables that may be aliasing VAR. */
2487
2488 void
2489 dump_may_aliases_for (FILE *file, tree var)
2490 {
2491 bitmap aliases;
2492
2493 aliases = MTAG_ALIASES (var);
2494 if (aliases)
2495 {
2496 bitmap_iterator bi;
2497 unsigned int i;
2498 tree al;
2499
2500 fprintf (file, "{ ");
2501 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
2502 {
2503 al = referenced_var (i);
2504 print_generic_expr (file, al, dump_flags);
2505 fprintf (file, " ");
2506 }
2507 fprintf (file, "}");
2508 }
2509 }
2510
2511
2512 /* Dump to stderr the list of variables that may be aliasing VAR. */
2513
2514 void
2515 debug_may_aliases_for (tree var)
2516 {
2517 dump_may_aliases_for (stderr, var);
2518 }
2519
2520
2521 /* Return true if VAR may be aliased. */
2522
2523 bool
2524 may_be_aliased (tree var)
2525 {
2526 /* Obviously. */
2527 if (TREE_ADDRESSABLE (var))
2528 return true;
2529
2530 /* Globally visible variables can have their addresses taken by other
2531 translation units. */
2532 if (MTAG_P (var)
2533 && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
2534 return true;
2535 else if (!MTAG_P (var)
2536 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
2537 return true;
2538
2539 /* Automatic variables can't have their addresses escape any other
2540 way. This must be after the check for global variables, as
2541 extern declarations do not have TREE_STATIC set. */
2542 if (!TREE_STATIC (var))
2543 return false;
2544
2545 /* If we're in unit-at-a-time mode, then we must have seen all
2546 occurrences of address-of operators, and so we can trust
2547 TREE_ADDRESSABLE. Otherwise we can only be sure the variable
2548 isn't addressable if it's local to the current function. */
2549 if (flag_unit_at_a_time)
2550 return false;
2551
2552 if (decl_function_context (var) == current_function_decl)
2553 return false;
2554
2555 return true;
2556 }
2557
2558
2559 /* Given two symbols return TRUE if one is in the alias set of the
2560 other. */
2561
2562 bool
2563 is_aliased_with (tree tag, tree sym)
2564 {
2565 bitmap aliases;
2566
2567 if (MTAG_P (tag))
2568 {
2569 aliases = MTAG_ALIASES (tag);
2570
2571 if (aliases == NULL)
2572 return false;
2573
2574 return bitmap_bit_p (aliases, DECL_UID (sym));
2575 }
2576 else
2577 {
2578 gcc_assert (MTAG_P (sym));
2579 aliases = MTAG_ALIASES (sym);
2580
2581 if (aliases == NULL)
2582 return false;
2583
2584 return bitmap_bit_p (aliases, DECL_UID (tag));
2585 }
2586
2587 return false;
2588 }
2589
2590 /* The following is based on code in add_stmt_operand to ensure that the
2591 same defs/uses/vdefs/vuses will be found after replacing a reference
2592 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
2593 is the address of var. Return a memtag for the ptr, after adding the
2594 proper may_aliases to it (which are the aliases of var, if it has any,
2595 or var itself). */
2596
2597 static tree
2598 add_may_alias_for_new_tag (tree tag, tree var)
2599 {
2600 bitmap aliases = NULL;
2601
2602 if (MTAG_P (var))
2603 aliases = may_aliases (var);
2604
2605 /* Case 1: |aliases| == 1 */
2606 if (aliases && bitmap_count_bits (aliases) == 1)
2607 {
2608 tree ali = referenced_var (bitmap_first_set_bit (aliases));
2609 if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
2610 return ali;
2611 }
2612
2613 /* Case 2: |aliases| == 0 */
2614 if (aliases == NULL)
2615 add_may_alias (tag, var);
2616 else
2617 {
2618 /* Case 3: |aliases| > 1 */
2619 union_alias_set_into (tag, aliases);
2620 }
2621 return tag;
2622 }
2623
2624 /* Create a new symbol tag for PTR. Construct the may-alias list of this type
2625 tag so that it has the aliasing of VAR, or of the relevant subvars of VAR
2626 according to the location accessed by EXPR.
2627
2628 Note, the set of aliases represented by the new symbol tag are not marked
2629 for renaming. */
2630
2631 void
2632 new_type_alias (tree ptr, tree var, tree expr)
2633 {
2634 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2635 tree tag;
2636 subvar_t svars;
2637 tree ali = NULL_TREE;
2638 HOST_WIDE_INT offset, size, maxsize;
2639 tree ref;
2640 VEC (tree, heap) *overlaps = NULL;
2641 subvar_t sv;
2642 unsigned int len;
2643
2644 gcc_assert (symbol_mem_tag (ptr) == NULL_TREE);
2645 gcc_assert (!MTAG_P (var));
2646
2647 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
2648 gcc_assert (ref);
2649
2650 tag = create_memory_tag (tag_type, true);
2651 set_symbol_mem_tag (ptr, tag);
2652
2653 /* Add VAR to the may-alias set of PTR's new symbol tag. If VAR has
2654 subvars, add the subvars to the tag instead of the actual var. */
2655 if (var_can_have_subvars (ref)
2656 && (svars = get_subvars_for_var (ref)))
2657 {
2658 for (sv = svars; sv; sv = sv->next)
2659 {
2660 bool exact;
2661
2662 if (overlap_subvar (offset, maxsize, sv->var, &exact))
2663 VEC_safe_push (tree, heap, overlaps, sv->var);
2664 }
2665 gcc_assert (overlaps != NULL);
2666 }
2667 else if (var_can_have_subvars (var)
2668 && (svars = get_subvars_for_var (var)))
2669 {
2670 /* If the REF is not a direct access to VAR (e.g., it is a dereference
2671 of a pointer), we should scan the virtual operands of REF the same
2672 way as tree-ssa-operands do. At the moment, this is somewhat
2673 difficult, so we just give up and add all the subvars of VAR.
2674 On mem-ssa branch, the scanning for virtual operands have been
2675 split from the rest of tree-ssa-operands, so it should be much
2676 easier to fix this problem correctly once mem-ssa is merged. */
2677 for (sv = svars; sv; sv = sv->next)
2678 VEC_safe_push (tree, heap, overlaps, sv->var);
2679
2680 gcc_assert (overlaps != NULL);
2681 }
2682 else
2683 ali = add_may_alias_for_new_tag (tag, var);
2684
2685 len = VEC_length (tree, overlaps);
2686 if (len > 0)
2687 {
2688 if (dump_file && (dump_flags & TDF_DETAILS))
2689 fprintf (dump_file, "\nnumber of overlapping subvars = %u\n", len);
2690
2691 if (len == 1)
2692 ali = add_may_alias_for_new_tag (tag, VEC_index (tree, overlaps, 0));
2693 else if (len > 1)
2694 {
2695 unsigned int k;
2696 tree sv_var;
2697
2698 for (k = 0; VEC_iterate (tree, overlaps, k, sv_var); k++)
2699 {
2700 ali = add_may_alias_for_new_tag (tag, sv_var);
2701
2702 if (ali != tag)
2703 {
2704 /* Can happen only if 'Case 1' of add_may_alias_for_new_tag
2705 took place. Since more than one svar was found, we add
2706 'ali' as one of the may_aliases of the new tag. */
2707 add_may_alias (tag, ali);
2708 ali = tag;
2709 }
2710 }
2711 }
2712 VEC_free (tree, heap, overlaps);
2713 }
2714
2715 set_symbol_mem_tag (ptr, ali);
2716 TREE_READONLY (tag) = TREE_READONLY (var);
2717 MTAG_GLOBAL (tag) = is_global_var (var);
2718 }
2719
2720 /* This represents the used range of a variable. */
2721
2722 typedef struct used_part
2723 {
2724 HOST_WIDE_INT minused;
2725 HOST_WIDE_INT maxused;
2726 /* True if we have an explicit use/def of some portion of this variable,
2727 even if it is all of it. i.e. a.b = 5 or temp = a.b. */
2728 bool explicit_uses;
2729 /* True if we have an implicit use/def of some portion of this
2730 variable. Implicit uses occur when we can't tell what part we
2731 are referencing, and have to make conservative assumptions. */
2732 bool implicit_uses;
2733 /* True if the structure is only written to or taken its address. */
2734 bool write_only;
2735 } *used_part_t;
2736
2737 /* An array of used_part structures, indexed by variable uid. */
2738
2739 static htab_t used_portions;
2740
2741 struct used_part_map
2742 {
2743 unsigned int uid;
2744 used_part_t to;
2745 };
2746
2747 /* Return true if the uid in the two used part maps are equal. */
2748
2749 static int
2750 used_part_map_eq (const void *va, const void *vb)
2751 {
2752 const struct used_part_map *a = (const struct used_part_map *) va;
2753 const struct used_part_map *b = (const struct used_part_map *) vb;
2754 return (a->uid == b->uid);
2755 }
2756
2757 /* Hash a from uid in a used_part_map. */
2758
2759 static unsigned int
2760 used_part_map_hash (const void *item)
2761 {
2762 return ((const struct used_part_map *)item)->uid;
2763 }
2764
2765 /* Free a used part map element. */
2766
2767 static void
2768 free_used_part_map (void *item)
2769 {
2770 free (((struct used_part_map *)item)->to);
2771 free (item);
2772 }
2773
2774 /* Lookup a used_part structure for a UID. */
2775
2776 static used_part_t
2777 up_lookup (unsigned int uid)
2778 {
2779 struct used_part_map *h, in;
2780 in.uid = uid;
2781 h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid);
2782 if (!h)
2783 return NULL;
2784 return h->to;
2785 }
2786
2787 /* Insert the pair UID, TO into the used part hashtable. */
2788
2789 static void
2790 up_insert (unsigned int uid, used_part_t to)
2791 {
2792 struct used_part_map *h;
2793 void **loc;
2794
2795 h = XNEW (struct used_part_map);
2796 h->uid = uid;
2797 h->to = to;
2798 loc = htab_find_slot_with_hash (used_portions, h,
2799 uid, INSERT);
2800 if (*loc != NULL)
2801 free (*loc);
2802 *(struct used_part_map **) loc = h;
2803 }
2804
2805
2806 /* Given a variable uid, UID, get or create the entry in the used portions
2807 table for the variable. */
2808
2809 static used_part_t
2810 get_or_create_used_part_for (size_t uid)
2811 {
2812 used_part_t up;
2813 if ((up = up_lookup (uid)) == NULL)
2814 {
2815 up = XCNEW (struct used_part);
2816 up->minused = INT_MAX;
2817 up->maxused = 0;
2818 up->explicit_uses = false;
2819 up->implicit_uses = false;
2820 up->write_only = true;
2821 }
2822
2823 return up;
2824 }
2825
2826
2827 /* Create and return a structure sub-variable for field type FIELD at
2828 offset OFFSET, with size SIZE, of variable VAR. */
2829
2830 static tree
2831 create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
2832 unsigned HOST_WIDE_INT size)
2833 {
2834 tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
2835
2836 /* We need to copy the various flags from VAR to SUBVAR, so that
2837 they are is_global_var iff the original variable was. */
2838 DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
2839 MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var);
2840 TREE_PUBLIC (subvar) = TREE_PUBLIC (var);
2841 TREE_STATIC (subvar) = TREE_STATIC (var);
2842 TREE_READONLY (subvar) = TREE_READONLY (var);
2843 TREE_ADDRESSABLE (subvar) = TREE_ADDRESSABLE (var);
2844
2845 /* Add the new variable to REFERENCED_VARS. */
2846 set_symbol_mem_tag (subvar, NULL);
2847 add_referenced_var (subvar);
2848 SFT_PARENT_VAR (subvar) = var;
2849 SFT_OFFSET (subvar) = offset;
2850 SFT_SIZE (subvar) = size;
2851 return subvar;
2852 }
2853
2854
2855 /* Given an aggregate VAR, create the subvariables that represent its
2856 fields. */
2857
2858 static void
2859 create_overlap_variables_for (tree var)
2860 {
2861 VEC(fieldoff_s,heap) *fieldstack = NULL;
2862 used_part_t up;
2863 size_t uid = DECL_UID (var);
2864
2865 up = up_lookup (uid);
2866 if (!up
2867 || up->write_only)
2868 return;
2869
2870 push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL);
2871 if (VEC_length (fieldoff_s, fieldstack) != 0)
2872 {
2873 subvar_t *subvars;
2874 fieldoff_s *fo;
2875 bool notokay = false;
2876 int fieldcount = 0;
2877 int i;
2878 HOST_WIDE_INT lastfooffset = -1;
2879 HOST_WIDE_INT lastfosize = -1;
2880 tree lastfotype = NULL_TREE;
2881
2882 /* Not all fields have DECL_SIZE set, and those that don't, we don't
2883 know their size, and thus, can't handle.
2884 The same is true of fields with DECL_SIZE that is not an integer
2885 constant (such as variable sized fields).
2886 Fields with offsets which are not constant will have an offset < 0
2887 We *could* handle fields that are constant sized arrays, but
2888 currently don't. Doing so would require some extra changes to
2889 tree-ssa-operands.c. */
2890
2891 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
2892 {
2893 if (!fo->size
2894 || TREE_CODE (fo->size) != INTEGER_CST
2895 || fo->offset < 0)
2896 {
2897 notokay = true;
2898 break;
2899 }
2900 fieldcount++;
2901 }
2902
2903 /* The current heuristic we use is as follows:
2904 If the variable has no used portions in this function, no
2905 structure vars are created for it.
2906 Otherwise,
2907 If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
2908 we always create structure vars for them.
2909 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2910 some explicit uses, we create structure vars for them.
2911 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2912 no explicit uses, we do not create structure vars for them.
2913 */
2914
2915 if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
2916 && !up->explicit_uses)
2917 {
2918 if (dump_file && (dump_flags & TDF_DETAILS))
2919 {
2920 fprintf (dump_file, "Variable ");
2921 print_generic_expr (dump_file, var, 0);
2922 fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
2923 }
2924 notokay = true;
2925 }
2926
2927 /* Bail out, if we can't create overlap variables. */
2928 if (notokay)
2929 {
2930 VEC_free (fieldoff_s, heap, fieldstack);
2931 return;
2932 }
2933
2934 /* Otherwise, create the variables. */
2935 subvars = lookup_subvars_for_var (var);
2936
2937 sort_fieldstack (fieldstack);
2938
2939 for (i = VEC_length (fieldoff_s, fieldstack);
2940 VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
2941 {
2942 subvar_t sv;
2943 HOST_WIDE_INT fosize;
2944 tree currfotype;
2945
2946 fosize = TREE_INT_CST_LOW (fo->size);
2947 currfotype = fo->type;
2948
2949 /* If this field isn't in the used portion,
2950 or it has the exact same offset and size as the last
2951 field, skip it. */
2952
2953 if (((fo->offset <= up->minused
2954 && fo->offset + fosize <= up->minused)
2955 || fo->offset >= up->maxused)
2956 || (fo->offset == lastfooffset
2957 && fosize == lastfosize
2958 && currfotype == lastfotype))
2959 continue;
2960 sv = GGC_NEW (struct subvar);
2961 sv->next = *subvars;
2962 sv->var = create_sft (var, fo->type, fo->offset, fosize);
2963
2964 if (dump_file)
2965 {
2966 fprintf (dump_file, "structure field tag %s created for var %s",
2967 get_name (sv->var), get_name (var));
2968 fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
2969 SFT_OFFSET (sv->var));
2970 fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
2971 SFT_SIZE (sv->var));
2972 fprintf (dump_file, "\n");
2973 }
2974
2975 lastfotype = currfotype;
2976 lastfooffset = fo->offset;
2977 lastfosize = fosize;
2978 *subvars = sv;
2979 }
2980
2981 /* Once we have created subvars, the original is no longer call
2982 clobbered on its own. Its call clobbered status depends
2983 completely on the call clobbered status of the subvars.
2984
2985 add_referenced_var in the above loop will take care of
2986 marking subvars of global variables as call clobbered for us
2987 to start, since they are global as well. */
2988 clear_call_clobbered (var);
2989 }
2990
2991 VEC_free (fieldoff_s, heap, fieldstack);
2992 }
2993
2994
2995 /* Find the conservative answer to the question of what portions of what
2996 structures are used by this statement. We assume that if we have a
2997 component ref with a known size + offset, that we only need that part
2998 of the structure. For unknown cases, or cases where we do something
2999 to the whole structure, we assume we need to create fields for the
3000 entire structure. */
3001
3002 static tree
3003 find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p)
3004 {
3005 switch (TREE_CODE (*tp))
3006 {
3007 case GIMPLE_MODIFY_STMT:
3008 /* Recurse manually here to track whether the use is in the
3009 LHS of an assignment. */
3010 find_used_portions (&GIMPLE_STMT_OPERAND (*tp, 0), walk_subtrees, tp);
3011 return find_used_portions (&GIMPLE_STMT_OPERAND (*tp, 1),
3012 walk_subtrees, NULL);
3013 case REALPART_EXPR:
3014 case IMAGPART_EXPR:
3015 case COMPONENT_REF:
3016 case ARRAY_REF:
3017 {
3018 HOST_WIDE_INT bitsize;
3019 HOST_WIDE_INT bitmaxsize;
3020 HOST_WIDE_INT bitpos;
3021 tree ref;
3022 ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize);
3023 if (DECL_P (ref)
3024 && var_can_have_subvars (ref)
3025 && bitmaxsize != -1)
3026 {
3027 size_t uid = DECL_UID (ref);
3028 used_part_t up;
3029
3030 up = get_or_create_used_part_for (uid);
3031
3032 if (bitpos <= up->minused)
3033 up->minused = bitpos;
3034 if ((bitpos + bitmaxsize >= up->maxused))
3035 up->maxused = bitpos + bitmaxsize;
3036
3037 if (bitsize == bitmaxsize)
3038 up->explicit_uses = true;
3039 else
3040 up->implicit_uses = true;
3041 if (!lhs_p)
3042 up->write_only = false;
3043 up_insert (uid, up);
3044
3045 *walk_subtrees = 0;
3046 return NULL_TREE;
3047 }
3048 }
3049 break;
3050 /* This is here to make sure we mark the entire base variable as used
3051 when you take its address. Because our used portion analysis is
3052 simple, we aren't looking at casts or pointer arithmetic to see what
3053 happens when you take the address. */
3054 case ADDR_EXPR:
3055 {
3056 tree var = get_base_address (TREE_OPERAND (*tp, 0));
3057
3058 if (var
3059 && DECL_P (var)
3060 && DECL_SIZE (var)
3061 && var_can_have_subvars (var)
3062 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3063 {
3064 used_part_t up;
3065 size_t uid = DECL_UID (var);
3066
3067 up = get_or_create_used_part_for (uid);
3068
3069 up->minused = 0;
3070 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3071 up->implicit_uses = true;
3072 if (!lhs_p)
3073 up->write_only = false;
3074
3075 up_insert (uid, up);
3076 *walk_subtrees = 0;
3077 return NULL_TREE;
3078 }
3079 }
3080 break;
3081 case CALL_EXPR:
3082 {
3083 int i;
3084 int nargs = call_expr_nargs (*tp);
3085 for (i = 0; i < nargs; i++)
3086 {
3087 tree *arg = &CALL_EXPR_ARG (*tp, i);
3088 if (TREE_CODE (*arg) != ADDR_EXPR)
3089 find_used_portions (arg, walk_subtrees, NULL);
3090 }
3091 *walk_subtrees = 0;
3092 return NULL_TREE;
3093 }
3094 case VAR_DECL:
3095 case PARM_DECL:
3096 case RESULT_DECL:
3097 {
3098 tree var = *tp;
3099 if (DECL_SIZE (var)
3100 && var_can_have_subvars (var)
3101 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3102 {
3103 used_part_t up;
3104 size_t uid = DECL_UID (var);
3105
3106 up = get_or_create_used_part_for (uid);
3107
3108 up->minused = 0;
3109 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3110 up->implicit_uses = true;
3111
3112 up_insert (uid, up);
3113 *walk_subtrees = 0;
3114 return NULL_TREE;
3115 }
3116 }
3117 break;
3118
3119 default:
3120 break;
3121
3122 }
3123 return NULL_TREE;
3124 }
3125
3126 /* Create structure field variables for structures used in this function. */
3127
3128 static unsigned int
3129 create_structure_vars (void)
3130 {
3131 basic_block bb;
3132 safe_referenced_var_iterator rvi;
3133 VEC (tree, heap) *varvec = NULL;
3134 tree var;
3135
3136 used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
3137 free_used_part_map);
3138
3139 FOR_EACH_BB (bb)
3140 {
3141 block_stmt_iterator bsi;
3142 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3143 {
3144 walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
3145 find_used_portions,
3146 NULL);
3147 }
3148 }
3149 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
3150 {
3151 /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
3152 if (var
3153 && DECL_SIZE (var)
3154 && var_can_have_subvars (var)
3155 && !MTAG_P (var)
3156 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3157 create_overlap_variables_for (var);
3158 }
3159 htab_delete (used_portions);
3160 VEC_free (tree, heap, varvec);
3161
3162 /* Update SSA operands of statements mentioning variables we split. */
3163 if (gimple_in_ssa_p (cfun))
3164 FOR_EACH_BB (bb)
3165 {
3166 block_stmt_iterator bsi;
3167 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3168 {
3169 tree stmt = bsi_stmt (bsi);
3170 bool update = false;
3171 unsigned int i;
3172 bitmap_iterator bi;
3173
3174 if (STORED_SYMS (stmt))
3175 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
3176 {
3177 tree sym = referenced_var_lookup (i);
3178 if (get_subvars_for_var (sym))
3179 {
3180 update=true;
3181 break;
3182 }
3183 }
3184
3185 if (LOADED_SYMS (stmt) && !update)
3186 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
3187 {
3188 tree sym = referenced_var_lookup (i);
3189 if (get_subvars_for_var (sym))
3190 {
3191 update=true;
3192 break;
3193 }
3194 }
3195
3196 if (stmt_ann (stmt)->addresses_taken && !update)
3197 EXECUTE_IF_SET_IN_BITMAP (stmt_ann (stmt)->addresses_taken,
3198 0, i, bi)
3199 {
3200 tree sym = referenced_var_lookup (i);
3201 if (get_subvars_for_var (sym))
3202 {
3203 update=true;
3204 break;
3205 }
3206 }
3207
3208 if (update)
3209 update_stmt (stmt);
3210 }
3211 }
3212
3213 return 0;
3214 }
3215
3216 static bool
3217 gate_structure_vars (void)
3218 {
3219 return flag_tree_salias != 0;
3220 }
3221
3222 struct tree_opt_pass pass_create_structure_vars =
3223 {
3224 "salias", /* name */
3225 gate_structure_vars, /* gate */
3226 create_structure_vars, /* execute */
3227 NULL, /* sub */
3228 NULL, /* next */
3229 0, /* static_pass_number */
3230 0, /* tv_id */
3231 PROP_cfg, /* properties_required */
3232 0, /* properties_provided */
3233 0, /* properties_destroyed */
3234 0, /* todo_flags_start */
3235 TODO_dump_func, /* todo_flags_finish */
3236 0 /* letter */
3237 };
3238
3239 /* Reset the call_clobbered flags on our referenced vars. In
3240 theory, this only needs to be done for globals. */
3241
3242 static unsigned int
3243 reset_cc_flags (void)
3244 {
3245 tree var;
3246 referenced_var_iterator rvi;
3247
3248 FOR_EACH_REFERENCED_VAR (var, rvi)
3249 var_ann (var)->call_clobbered = false;
3250 return 0;
3251 }
3252
3253 struct tree_opt_pass pass_reset_cc_flags =
3254 {
3255 NULL, /* name */
3256 NULL, /* gate */
3257 reset_cc_flags, /* execute */
3258 NULL, /* sub */
3259 NULL, /* next */
3260 0, /* static_pass_number */
3261 0, /* tv_id */
3262 PROP_referenced_vars |PROP_cfg, /* properties_required */
3263 0, /* properties_provided */
3264 0, /* properties_destroyed */
3265 0, /* todo_flags_start */
3266 0, /* todo_flags_finish */
3267 0 /* letter */
3268 };