tree-ssa-operands.h (ssa_call_clobbered_cache_valid): Remove.
[gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "timevar.h"
32 #include "expr.h"
33 #include "ggc.h"
34 #include "langhooks.h"
35 #include "flags.h"
36 #include "function.h"
37 #include "diagnostic.h"
38 #include "tree-dump.h"
39 #include "tree-gimple.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-structalias.h"
44 #include "convert.h"
45 #include "params.h"
46 #include "ipa-type-escape.h"
47 #include "vec.h"
48 #include "bitmap.h"
49
50 /* Obstack used to hold grouping bitmaps and other temporary bitmaps used by
51 aliasing */
52 static bitmap_obstack alias_obstack;
53
54 /* 'true' after aliases have been computed (see compute_may_aliases). */
55 bool aliases_computed_p;
56
57 /* Structure to map a variable to its alias set and keep track of the
58 virtual operands that will be needed to represent it. */
59 struct alias_map_d
60 {
61 /* Variable and its alias set. */
62 tree var;
63 HOST_WIDE_INT set;
64
65 /* Total number of virtual operands that will be needed to represent
66 all the aliases of VAR. */
67 long total_alias_vops;
68
69 /* Nonzero if the aliases for this memory tag have been grouped
70 already. Used in group_aliases. */
71 unsigned int grouped_p : 1;
72
73 /* Set of variables aliased with VAR. This is the exact same
74 information contained in VAR_ANN (VAR)->MAY_ALIASES, but in
75 bitmap form to speed up alias grouping. */
76 bitmap may_aliases;
77 };
78
79
80 /* Counters used to display statistics on alias analysis. */
81 struct alias_stats_d
82 {
83 unsigned int alias_queries;
84 unsigned int alias_mayalias;
85 unsigned int alias_noalias;
86 unsigned int simple_queries;
87 unsigned int simple_resolved;
88 unsigned int tbaa_queries;
89 unsigned int tbaa_resolved;
90 unsigned int structnoaddress_queries;
91 unsigned int structnoaddress_resolved;
92 };
93
94
95 /* Local variables. */
96 static struct alias_stats_d alias_stats;
97
98 /* Local functions. */
99 static void compute_flow_insensitive_aliasing (struct alias_info *);
100 static void dump_alias_stats (FILE *);
101 static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool);
102 static tree create_memory_tag (tree type, bool is_type_tag);
103 static tree get_tmt_for (tree, struct alias_info *);
104 static tree get_nmt_for (tree);
105 static void add_may_alias (tree, tree);
106 static void replace_may_alias (tree, size_t, tree);
107 static struct alias_info *init_alias_info (void);
108 static void delete_alias_info (struct alias_info *);
109 static void compute_flow_sensitive_aliasing (struct alias_info *);
110 static void setup_pointers_and_addressables (struct alias_info *);
111 static void create_global_var (void);
112 static void maybe_create_global_var (struct alias_info *ai);
113 static void group_aliases (struct alias_info *);
114 static void set_pt_anything (tree ptr);
115
116 /* Global declarations. */
117
118 /* Call clobbered variables in the function. If bit I is set, then
119 REFERENCED_VARS (I) is call-clobbered. */
120 bitmap call_clobbered_vars;
121
122 /* Addressable variables in the function. If bit I is set, then
123 REFERENCED_VARS (I) has had its address taken. Note that
124 CALL_CLOBBERED_VARS and ADDRESSABLE_VARS are not related. An
125 addressable variable is not necessarily call-clobbered (e.g., a
126 local addressable whose address does not escape) and not all
127 call-clobbered variables are addressable (e.g., a local static
128 variable). */
129 bitmap addressable_vars;
130
131 /* When the program has too many call-clobbered variables and call-sites,
132 this variable is used to represent the clobbering effects of function
133 calls. In these cases, all the call clobbered variables in the program
134 are forced to alias this variable. This reduces compile times by not
135 having to keep track of too many V_MAY_DEF expressions at call sites. */
136 tree global_var;
137
138 DEF_VEC_I(int);
139 DEF_VEC_ALLOC_I(int,heap);
140
141 /* qsort comparison function to sort type/name tags by DECL_UID. */
142
143 static int
144 sort_tags_by_id (const void *pa, const void *pb)
145 {
146 tree a = *(tree *)pa;
147 tree b = *(tree *)pb;
148
149 return DECL_UID (a) - DECL_UID (b);
150 }
151
152 /* Initialize WORKLIST to contain those memory tags that are marked call
153 clobbered. Initialized WORKLIST2 to contain the reasons these
154 memory tags escaped. */
155
156 static void
157 init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
158 VEC (int, heap) **worklist2)
159 {
160 referenced_var_iterator rvi;
161 tree curr;
162
163 FOR_EACH_REFERENCED_VAR (curr, rvi)
164 {
165 if (MTAG_P (curr) && is_call_clobbered (curr))
166 {
167 VEC_safe_push (tree, heap, *worklist, curr);
168 VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask);
169 }
170 }
171 }
172
173 /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
174 ALIAS is not already marked call clobbered, and is a memory
175 tag. */
176
177 static void
178 add_to_worklist (tree alias, VEC (tree, heap) **worklist,
179 VEC (int, heap) **worklist2,
180 int reason)
181 {
182 if (MTAG_P (alias) && !is_call_clobbered (alias))
183 {
184 VEC_safe_push (tree, heap, *worklist, alias);
185 VEC_safe_push (int, heap, *worklist2, reason);
186 }
187 }
188
189 /* Mark aliases of TAG as call clobbered, and place any tags on the
190 alias list that were not already call clobbered on WORKLIST. */
191
192 static void
193 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
194 VEC (int, heap) **worklist2)
195 {
196 unsigned int i;
197 VEC (tree, gc) *ma;
198 tree entry;
199 var_ann_t ta = var_ann (tag);
200
201 if (!MTAG_P (tag))
202 return;
203 ma = may_aliases (tag);
204 if (!ma)
205 return;
206
207 for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
208 {
209 if (!unmodifiable_var_p (entry))
210 {
211 add_to_worklist (entry, worklist, worklist2, ta->escape_mask);
212 mark_call_clobbered (entry, ta->escape_mask);
213 }
214 }
215 }
216
217 /* Tags containing global vars need to be marked as global.
218 Tags containing call clobbered vars need to be marked as call
219 clobbered. */
220
221 static void
222 compute_tag_properties (void)
223 {
224 referenced_var_iterator rvi;
225 tree tag;
226 bool changed = true;
227 VEC (tree, heap) *taglist = NULL;
228
229 FOR_EACH_REFERENCED_VAR (tag, rvi)
230 {
231 if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
232 continue;
233 VEC_safe_push (tree, heap, taglist, tag);
234 }
235
236 /* We sort the taglist by DECL_UID, for two reasons.
237 1. To get a sequential ordering to make the bitmap accesses
238 faster.
239 2. Because of the way we compute aliases, it's more likely that
240 an earlier tag is included in a later tag, and this will reduce
241 the number of iterations.
242
243 If we had a real tag graph, we would just topo-order it and be
244 done with it. */
245 qsort (VEC_address (tree, taglist),
246 VEC_length (tree, taglist),
247 sizeof (tree),
248 sort_tags_by_id);
249
250 /* Go through each tag not marked as global, and if it aliases
251 global vars, mark it global.
252
253 If the tag contains call clobbered vars, mark it call
254 clobbered.
255
256 This loop iterates because tags may appear in the may-aliases
257 list of other tags when we group. */
258
259 while (changed)
260 {
261 unsigned int k;
262
263 changed = false;
264 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
265 {
266 VEC (tree, gc) *ma;
267 unsigned int i;
268 tree entry;
269 bool tagcc = is_call_clobbered (tag);
270 bool tagglobal = MTAG_GLOBAL (tag);
271
272 if (tagcc && tagglobal)
273 continue;
274
275 ma = may_aliases (tag);
276 if (!ma)
277 continue;
278
279 for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
280 {
281 /* Call clobbered entries cause the tag to be marked
282 call clobbered. */
283 if (!tagcc && is_call_clobbered (entry))
284 {
285 mark_call_clobbered (tag, var_ann (entry)->escape_mask);
286 tagcc = true;
287 changed = true;
288 }
289
290 /* Global vars cause the tag to be marked global. */
291 if (!tagglobal && is_global_var (entry))
292 {
293 MTAG_GLOBAL (tag) = true;
294 changed = true;
295 tagglobal = true;
296 }
297
298 /* Early exit once both global and cc are set, since the
299 loop can't do any more than that. */
300 if (tagcc && tagglobal)
301 break;
302 }
303 }
304 }
305 VEC_free (tree, heap, taglist);
306 }
307
308 /* Set up the initial variable clobbers and globalness.
309 When this function completes, only tags whose aliases need to be
310 clobbered will be set clobbered. Tags clobbered because they
311 contain call clobbered vars are handled in compute_tag_properties. */
312
313 static void
314 set_initial_properties (struct alias_info *ai)
315 {
316 unsigned int i;
317 referenced_var_iterator rvi;
318 tree var;
319
320 FOR_EACH_REFERENCED_VAR (var, rvi)
321 {
322 if (is_global_var (var)
323 && (!var_can_have_subvars (var)
324 || get_subvars_for_var (var) == NULL))
325 {
326 if (!unmodifiable_var_p (var))
327 mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
328 }
329 else if (TREE_CODE (var) == PARM_DECL
330 && default_def (var)
331 && POINTER_TYPE_P (TREE_TYPE (var)))
332 {
333 tree def = default_def (var);
334 get_ptr_info (def)->value_escapes_p = 1;
335 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
336 }
337 }
338
339 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
340 {
341 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
342 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
343 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
344
345 if (pi->value_escapes_p)
346 {
347 /* If PTR escapes then its associated memory tags and
348 pointed-to variables are call-clobbered. */
349 if (pi->name_mem_tag)
350 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
351
352 if (v_ann->type_mem_tag)
353 mark_call_clobbered (v_ann->type_mem_tag, pi->escape_mask);
354
355 if (pi->pt_vars)
356 {
357 bitmap_iterator bi;
358 unsigned int j;
359 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
360 if (!unmodifiable_var_p (referenced_var (j)))
361 mark_call_clobbered (referenced_var (j), pi->escape_mask);
362 }
363 }
364 /* If the name tag is call clobbered, so is the type tag
365 associated with the base VAR_DECL. */
366 if (pi->name_mem_tag
367 && v_ann->type_mem_tag
368 && is_call_clobbered (pi->name_mem_tag))
369 mark_call_clobbered (v_ann->type_mem_tag, pi->escape_mask);
370
371 /* Name tags and type tags that we don't know where they point
372 to, might point to global memory, and thus, are clobbered.
373
374 FIXME: This is not quite right. They should only be
375 clobbered if value_escapes_p is true, regardless of whether
376 they point to global memory or not.
377 So removing this code and fixing all the bugs would be nice.
378 It is the cause of a bunch of clobbering. */
379 if ((pi->pt_global_mem || pi->pt_anything)
380 && pi->is_dereferenced && pi->name_mem_tag)
381 {
382 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
383 MTAG_GLOBAL (pi->name_mem_tag) = true;
384 }
385
386 if ((pi->pt_global_mem || pi->pt_anything)
387 && pi->is_dereferenced && v_ann->type_mem_tag)
388 {
389 mark_call_clobbered (v_ann->type_mem_tag, ESCAPE_IS_GLOBAL);
390 MTAG_GLOBAL (v_ann->type_mem_tag) = true;
391 }
392 }
393 }
394
395 /* Compute which variables need to be marked call clobbered because
396 their tag is call clobbered, and which tags need to be marked
397 global because they contain global variables. */
398
399 static void
400 compute_call_clobbered (struct alias_info *ai)
401 {
402 VEC (tree, heap) *worklist = NULL;
403 VEC(int,heap) *worklist2 = NULL;
404
405 set_initial_properties (ai);
406 init_transitive_clobber_worklist (&worklist, &worklist2);
407 while (VEC_length (tree, worklist) != 0)
408 {
409 tree curr = VEC_pop (tree, worklist);
410 int reason = VEC_pop (int, worklist2);
411
412 mark_call_clobbered (curr, reason);
413 mark_aliases_call_clobbered (curr, &worklist, &worklist2);
414 }
415 VEC_free (tree, heap, worklist);
416 VEC_free (int, heap, worklist2);
417 compute_tag_properties ();
418 }
419
420 /* Compute may-alias information for every variable referenced in function
421 FNDECL.
422
423 Alias analysis proceeds in 3 main phases:
424
425 1- Points-to and escape analysis.
426
427 This phase walks the use-def chains in the SSA web looking for three
428 things:
429
430 * Assignments of the form P_i = &VAR
431 * Assignments of the form P_i = malloc()
432 * Pointers and ADDR_EXPR that escape the current function.
433
434 The concept of 'escaping' is the same one used in the Java world. When
435 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
436 outside of the current function. So, assignment to global variables,
437 function arguments and returning a pointer are all escape sites, as are
438 conversions between pointers and integers.
439
440 This is where we are currently limited. Since not everything is renamed
441 into SSA, we lose track of escape properties when a pointer is stashed
442 inside a field in a structure, for instance. In those cases, we are
443 assuming that the pointer does escape.
444
445 We use escape analysis to determine whether a variable is
446 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
447 is call-clobbered. If a pointer P_i escapes, then all the variables
448 pointed-to by P_i (and its memory tag) also escape.
449
450 2- Compute flow-sensitive aliases
451
452 We have two classes of memory tags. Memory tags associated with the
453 pointed-to data type of the pointers in the program. These tags are
454 called "type memory tag" (TMT). The other class are those associated
455 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
456 when adding operands for an INDIRECT_REF *P_i, we will first check
457 whether P_i has a name tag, if it does we use it, because that will have
458 more precise aliasing information. Otherwise, we use the standard type
459 tag.
460
461 In this phase, we go through all the pointers we found in points-to
462 analysis and create alias sets for the name memory tags associated with
463 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
464 it points to and its tag.
465
466
467 3- Compute flow-insensitive aliases
468
469 This pass will compare the alias set of every type memory tag and every
470 addressable variable found in the program. Given a type memory tag TMT
471 and an addressable variable V. If the alias sets of TMT and V conflict
472 (as computed by may_alias_p), then V is marked as an alias tag and added
473 to the alias set of TMT.
474
475 For instance, consider the following function:
476
477 foo (int i)
478 {
479 int *p, a, b;
480
481 if (i > 10)
482 p = &a;
483 else
484 p = &b;
485
486 *p = 3;
487 a = b + 2;
488 return *p;
489 }
490
491 After aliasing analysis has finished, the type memory tag for pointer
492 'p' will have two aliases, namely variables 'a' and 'b'. Every time
493 pointer 'p' is dereferenced, we want to mark the operation as a
494 potential reference to 'a' and 'b'.
495
496 foo (int i)
497 {
498 int *p, a, b;
499
500 if (i_2 > 10)
501 p_4 = &a;
502 else
503 p_6 = &b;
504 # p_1 = PHI <p_4(1), p_6(2)>;
505
506 # a_7 = V_MAY_DEF <a_3>;
507 # b_8 = V_MAY_DEF <b_5>;
508 *p_1 = 3;
509
510 # a_9 = V_MAY_DEF <a_7>
511 # VUSE <b_8>
512 a_9 = b_8 + 2;
513
514 # VUSE <a_9>;
515 # VUSE <b_8>;
516 return *p_1;
517 }
518
519 In certain cases, the list of may aliases for a pointer may grow too
520 large. This may cause an explosion in the number of virtual operands
521 inserted in the code. Resulting in increased memory consumption and
522 compilation time.
523
524 When the number of virtual operands needed to represent aliased
525 loads and stores grows too large (configurable with @option{--param
526 max-aliased-vops}), alias sets are grouped to avoid severe
527 compile-time slow downs and memory consumption. See group_aliases. */
528
529 static void
530 compute_may_aliases (void)
531 {
532 struct alias_info *ai;
533
534 memset (&alias_stats, 0, sizeof (alias_stats));
535
536 /* Initialize aliasing information. */
537 ai = init_alias_info ();
538
539 /* For each pointer P_i, determine the sets of variables that P_i may
540 point-to. For every addressable variable V, determine whether the
541 address of V escapes the current function, making V call-clobbered
542 (i.e., whether &V is stored in a global variable or if its passed as a
543 function call argument). */
544 compute_points_to_sets (ai);
545
546 /* Collect all pointers and addressable variables, compute alias sets,
547 create memory tags for pointers and promote variables whose address is
548 not needed anymore. */
549 setup_pointers_and_addressables (ai);
550
551 /* Compute flow-sensitive, points-to based aliasing for all the name
552 memory tags. Note that this pass needs to be done before flow
553 insensitive analysis because it uses the points-to information
554 gathered before to mark call-clobbered type tags. */
555 compute_flow_sensitive_aliasing (ai);
556
557 /* Compute type-based flow-insensitive aliasing for all the type
558 memory tags. */
559 compute_flow_insensitive_aliasing (ai);
560
561 /* Determine if we need to enable alias grouping. */
562 if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
563 group_aliases (ai);
564
565 /* Compute call clobbering information. */
566 compute_call_clobbered (ai);
567
568 /* If the program has too many call-clobbered variables and/or function
569 calls, create .GLOBAL_VAR and use it to model call-clobbering
570 semantics at call sites. This reduces the number of virtual operands
571 considerably, improving compile times at the expense of lost
572 aliasing precision. */
573 maybe_create_global_var (ai);
574
575 /* Debugging dumps. */
576 if (dump_file)
577 {
578 dump_referenced_vars (dump_file);
579 if (dump_flags & TDF_STATS)
580 dump_alias_stats (dump_file);
581 dump_points_to_info (dump_file);
582 dump_alias_info (dump_file);
583 }
584
585 /* Deallocate memory used by aliasing data structures. */
586 delete_alias_info (ai);
587
588 {
589 block_stmt_iterator bsi;
590 basic_block bb;
591 FOR_EACH_BB (bb)
592 {
593 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
594 {
595 update_stmt_if_modified (bsi_stmt (bsi));
596 }
597 }
598 }
599
600 }
601
602 struct tree_opt_pass pass_may_alias =
603 {
604 "alias", /* name */
605 NULL, /* gate */
606 compute_may_aliases, /* execute */
607 NULL, /* sub */
608 NULL, /* next */
609 0, /* static_pass_number */
610 TV_TREE_MAY_ALIAS, /* tv_id */
611 PROP_cfg | PROP_ssa, /* properties_required */
612 PROP_alias, /* properties_provided */
613 0, /* properties_destroyed */
614 0, /* todo_flags_start */
615 TODO_dump_func | TODO_update_ssa
616 | TODO_ggc_collect | TODO_verify_ssa
617 | TODO_verify_stmts, /* todo_flags_finish */
618 0 /* letter */
619 };
620
621
622 /* Data structure used to count the number of dereferences to PTR
623 inside an expression. */
624 struct count_ptr_d
625 {
626 tree ptr;
627 unsigned count;
628 };
629
630
631 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
632 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
633
634 static tree
635 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
636 {
637 struct count_ptr_d *count_p = (struct count_ptr_d *) data;
638
639 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
640 pointer 'ptr' is *not* dereferenced, it is simply used to compute
641 the address of 'fld' as 'ptr + offsetof(fld)'. */
642 if (TREE_CODE (*tp) == ADDR_EXPR)
643 {
644 *walk_subtrees = 0;
645 return NULL_TREE;
646 }
647
648 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
649 count_p->count++;
650
651 return NULL_TREE;
652 }
653
654
655 /* Count the number of direct and indirect uses for pointer PTR in
656 statement STMT. The two counts are stored in *NUM_USES_P and
657 *NUM_DEREFS_P respectively. *IS_STORE_P is set to 'true' if at
658 least one of those dereferences is a store operation. */
659
660 void
661 count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
662 unsigned *num_derefs_p, bool *is_store)
663 {
664 ssa_op_iter i;
665 tree use;
666
667 *num_uses_p = 0;
668 *num_derefs_p = 0;
669 *is_store = false;
670
671 /* Find out the total number of uses of PTR in STMT. */
672 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
673 if (use == ptr)
674 (*num_uses_p)++;
675
676 /* Now count the number of indirect references to PTR. This is
677 truly awful, but we don't have much choice. There are no parent
678 pointers inside INDIRECT_REFs, so an expression like
679 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
680 find all the indirect and direct uses of x_1 inside. The only
681 shortcut we can take is the fact that GIMPLE only allows
682 INDIRECT_REFs inside the expressions below. */
683 if (TREE_CODE (stmt) == MODIFY_EXPR
684 || (TREE_CODE (stmt) == RETURN_EXPR
685 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
686 || TREE_CODE (stmt) == ASM_EXPR
687 || TREE_CODE (stmt) == CALL_EXPR)
688 {
689 tree lhs, rhs;
690
691 if (TREE_CODE (stmt) == MODIFY_EXPR)
692 {
693 lhs = TREE_OPERAND (stmt, 0);
694 rhs = TREE_OPERAND (stmt, 1);
695 }
696 else if (TREE_CODE (stmt) == RETURN_EXPR)
697 {
698 tree e = TREE_OPERAND (stmt, 0);
699 lhs = TREE_OPERAND (e, 0);
700 rhs = TREE_OPERAND (e, 1);
701 }
702 else if (TREE_CODE (stmt) == ASM_EXPR)
703 {
704 lhs = ASM_OUTPUTS (stmt);
705 rhs = ASM_INPUTS (stmt);
706 }
707 else
708 {
709 lhs = NULL_TREE;
710 rhs = stmt;
711 }
712
713 if (lhs && (TREE_CODE (lhs) == TREE_LIST || EXPR_P (lhs)))
714 {
715 struct count_ptr_d count;
716 count.ptr = ptr;
717 count.count = 0;
718 walk_tree (&lhs, count_ptr_derefs, &count, NULL);
719 *is_store = true;
720 *num_derefs_p = count.count;
721 }
722
723 if (rhs && (TREE_CODE (rhs) == TREE_LIST || EXPR_P (rhs)))
724 {
725 struct count_ptr_d count;
726 count.ptr = ptr;
727 count.count = 0;
728 walk_tree (&rhs, count_ptr_derefs, &count, NULL);
729 *num_derefs_p += count.count;
730 }
731 }
732
733 gcc_assert (*num_uses_p >= *num_derefs_p);
734 }
735
736 /* Initialize the data structures used for alias analysis. */
737
738 static struct alias_info *
739 init_alias_info (void)
740 {
741 struct alias_info *ai;
742 referenced_var_iterator rvi;
743 tree var;
744
745 bitmap_obstack_initialize (&alias_obstack);
746 ai = XCNEW (struct alias_info);
747 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
748 sbitmap_zero (ai->ssa_names_visited);
749 VARRAY_TREE_INIT (ai->processed_ptrs, 50, "processed_ptrs");
750 ai->written_vars = BITMAP_ALLOC (&alias_obstack);
751 ai->dereferenced_ptrs_store = BITMAP_ALLOC (&alias_obstack);
752 ai->dereferenced_ptrs_load = BITMAP_ALLOC (&alias_obstack);
753
754 /* If aliases have been computed before, clear existing information. */
755 if (aliases_computed_p)
756 {
757 unsigned i;
758
759 /* Similarly, clear the set of addressable variables. In this
760 case, we can just clear the set because addressability is
761 only computed here. */
762 bitmap_clear (addressable_vars);
763
764 /* Clear flow-insensitive alias information from each symbol. */
765 FOR_EACH_REFERENCED_VAR (var, rvi)
766 {
767 var_ann_t ann = var_ann (var);
768
769 ann->is_alias_tag = 0;
770 ann->may_aliases = NULL;
771 NUM_REFERENCES_CLEAR (ann);
772
773 /* Since we are about to re-discover call-clobbered
774 variables, clear the call-clobbered flag. Variables that
775 are intrinsically call-clobbered (globals, local statics,
776 etc) will not be marked by the aliasing code, so we can't
777 remove them from CALL_CLOBBERED_VARS.
778
779 NB: STRUCT_FIELDS are still call clobbered if they are for
780 a global variable, so we *don't* clear their call clobberedness
781 just because they are tags, though we will clear it if they
782 aren't for global variables. */
783 if (TREE_CODE (var) == NAME_MEMORY_TAG
784 || TREE_CODE (var) == TYPE_MEMORY_TAG
785 || !is_global_var (var))
786 clear_call_clobbered (var);
787 }
788
789 /* Clear flow-sensitive points-to information from each SSA name. */
790 for (i = 1; i < num_ssa_names; i++)
791 {
792 tree name = ssa_name (i);
793
794 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
795 continue;
796
797 if (SSA_NAME_PTR_INFO (name))
798 {
799 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
800
801 /* Clear all the flags but keep the name tag to
802 avoid creating new temporaries unnecessarily. If
803 this pointer is found to point to a subset or
804 superset of its former points-to set, then a new
805 tag will need to be created in create_name_tags. */
806 pi->pt_anything = 0;
807 pi->pt_null = 0;
808 pi->value_escapes_p = 0;
809 pi->is_dereferenced = 0;
810 if (pi->pt_vars)
811 bitmap_clear (pi->pt_vars);
812 }
813 }
814 }
815
816 /* Next time, we will need to reset alias information. */
817 aliases_computed_p = true;
818
819 return ai;
820 }
821
822
823 /* Deallocate memory used by alias analysis. */
824
825 static void
826 delete_alias_info (struct alias_info *ai)
827 {
828 size_t i;
829 referenced_var_iterator rvi;
830 tree var;
831
832 sbitmap_free (ai->ssa_names_visited);
833 ai->processed_ptrs = NULL;
834
835 for (i = 0; i < ai->num_addressable_vars; i++)
836 free (ai->addressable_vars[i]);
837
838 FOR_EACH_REFERENCED_VAR(var, rvi)
839 {
840 var_ann_t ann = var_ann (var);
841 NUM_REFERENCES_CLEAR (ann);
842 }
843
844 free (ai->addressable_vars);
845
846 for (i = 0; i < ai->num_pointers; i++)
847 free (ai->pointers[i]);
848 free (ai->pointers);
849
850 BITMAP_FREE (ai->written_vars);
851 BITMAP_FREE (ai->dereferenced_ptrs_store);
852 BITMAP_FREE (ai->dereferenced_ptrs_load);
853 bitmap_obstack_release (&alias_obstack);
854 free (ai);
855
856 delete_points_to_sets ();
857 }
858
859 /* Create name tags for all the pointers that have been dereferenced.
860 We only create a name tag for a pointer P if P is found to point to
861 a set of variables (so that we can alias them to *P) or if it is
862 the result of a call to malloc (which means that P cannot point to
863 anything else nor alias any other variable).
864
865 If two pointers P and Q point to the same set of variables, they
866 are assigned the same name tag. */
867
868 static void
869 create_name_tags (void)
870 {
871 size_t i;
872 VEC (tree, heap) *with_ptvars = NULL;
873 tree ptr;
874
875 /* Collect the list of pointers with a non-empty points to set. */
876 for (i = 1; i < num_ssa_names; i++)
877 {
878 tree ptr = ssa_name (i);
879 struct ptr_info_def *pi;
880
881 if (!ptr
882 || !POINTER_TYPE_P (TREE_TYPE (ptr))
883 || !SSA_NAME_PTR_INFO (ptr))
884 continue;
885
886 pi = SSA_NAME_PTR_INFO (ptr);
887
888 if (pi->pt_anything || !pi->is_dereferenced)
889 {
890 /* No name tags for pointers that have not been
891 dereferenced or point to an arbitrary location. */
892 pi->name_mem_tag = NULL_TREE;
893 continue;
894 }
895
896 /* Set pt_anything on the pointers without pt_vars filled in so
897 that they are assigned a type tag. */
898
899 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
900 VEC_safe_push (tree, heap, with_ptvars, ptr);
901 else
902 set_pt_anything (ptr);
903 }
904
905 /* If we didn't find any pointers with pt_vars set, we're done. */
906 if (!with_ptvars)
907 return;
908
909 /* Now go through the pointers with pt_vars, and find a name tag
910 with the same pt_vars as this pointer, or create one if one
911 doesn't exist. */
912 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
913 {
914 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
915 size_t j;
916 tree ptr2;
917 tree old_name_tag = pi->name_mem_tag;
918
919 /* If PTR points to a set of variables, check if we don't
920 have another pointer Q with the same points-to set before
921 creating a tag. If so, use Q's tag instead of creating a
922 new one.
923
924 This is important for not creating unnecessary symbols
925 and also for copy propagation. If we ever need to
926 propagate PTR into Q or vice-versa, we would run into
927 problems if they both had different name tags because
928 they would have different SSA version numbers (which
929 would force us to take the name tags in and out of SSA). */
930 for (j = 0; j < i && VEC_iterate (tree, with_ptvars, j, ptr2); j++)
931 {
932 struct ptr_info_def *qi = SSA_NAME_PTR_INFO (ptr2);
933
934 if (bitmap_equal_p (pi->pt_vars, qi->pt_vars))
935 {
936 pi->name_mem_tag = qi->name_mem_tag;
937 break;
938 }
939 }
940
941 /* If we didn't find a pointer with the same points-to set
942 as PTR, create a new name tag if needed. */
943 if (pi->name_mem_tag == NULL_TREE)
944 pi->name_mem_tag = get_nmt_for (ptr);
945
946 /* If the new name tag computed for PTR is different than
947 the old name tag that it used to have, then the old tag
948 needs to be removed from the IL, so we mark it for
949 renaming. */
950 if (old_name_tag && old_name_tag != pi->name_mem_tag)
951 mark_sym_for_renaming (old_name_tag);
952
953 TREE_THIS_VOLATILE (pi->name_mem_tag)
954 |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
955
956 /* Mark the new name tag for renaming. */
957 mark_sym_for_renaming (pi->name_mem_tag);
958 }
959
960 VEC_free (tree, heap, with_ptvars);
961 }
962
963
964 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
965 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
966 name tag and the variables it points-to are call-clobbered. Finally, if
967 P_i escapes and we could not determine where it points to, then all the
968 variables in the same alias set as *P_i are marked call-clobbered. This
969 is necessary because we must assume that P_i may take the address of any
970 variable in the same alias set. */
971
972 static void
973 compute_flow_sensitive_aliasing (struct alias_info *ai)
974 {
975 size_t i;
976
977 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
978 {
979 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
980 if (!find_what_p_points_to (ptr))
981 set_pt_anything (ptr);
982 }
983
984 create_name_tags ();
985
986 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
987 {
988 unsigned j;
989 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
990 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
991 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
992 bitmap_iterator bi;
993
994
995 /* Set up aliasing information for PTR's name memory tag (if it has
996 one). Note that only pointers that have been dereferenced will
997 have a name memory tag. */
998 if (pi->name_mem_tag && pi->pt_vars)
999 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
1000 {
1001 add_may_alias (pi->name_mem_tag, referenced_var (j));
1002 add_may_alias (v_ann->type_mem_tag, referenced_var (j));
1003 }
1004 }
1005 }
1006
1007
1008 /* Compute type-based alias sets. Traverse all the pointers and
1009 addressable variables found in setup_pointers_and_addressables.
1010
1011 For every pointer P in AI->POINTERS and addressable variable V in
1012 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's type
1013 memory tag (TMT) if their alias sets conflict. V is then marked as
1014 an alias tag so that the operand scanner knows that statements
1015 containing V have aliased operands. */
1016
1017 static void
1018 compute_flow_insensitive_aliasing (struct alias_info *ai)
1019 {
1020 size_t i;
1021
1022 /* Initialize counter for the total number of virtual operands that
1023 aliasing will introduce. When AI->TOTAL_ALIAS_VOPS goes beyond the
1024 threshold set by --params max-alias-vops, we enable alias
1025 grouping. */
1026 ai->total_alias_vops = 0;
1027
1028 /* For every pointer P, determine which addressable variables may alias
1029 with P's type memory tag. */
1030 for (i = 0; i < ai->num_pointers; i++)
1031 {
1032 size_t j;
1033 struct alias_map_d *p_map = ai->pointers[i];
1034 tree tag = var_ann (p_map->var)->type_mem_tag;
1035 var_ann_t tag_ann = var_ann (tag);
1036
1037 p_map->total_alias_vops = 0;
1038 p_map->may_aliases = BITMAP_ALLOC (&alias_obstack);
1039
1040 for (j = 0; j < ai->num_addressable_vars; j++)
1041 {
1042 struct alias_map_d *v_map;
1043 var_ann_t v_ann;
1044 tree var;
1045 bool tag_stored_p, var_stored_p;
1046
1047 v_map = ai->addressable_vars[j];
1048 var = v_map->var;
1049 v_ann = var_ann (var);
1050
1051 /* Skip memory tags and variables that have never been
1052 written to. We also need to check if the variables are
1053 call-clobbered because they may be overwritten by
1054 function calls.
1055
1056 Note this is effectively random accessing elements in
1057 the sparse bitset, which can be highly inefficient.
1058 So we first check the call_clobbered status of the
1059 tag and variable before querying the bitmap. */
1060 tag_stored_p = is_call_clobbered (tag)
1061 || bitmap_bit_p (ai->written_vars, DECL_UID (tag));
1062 var_stored_p = is_call_clobbered (var)
1063 || bitmap_bit_p (ai->written_vars, DECL_UID (var));
1064 if (!tag_stored_p && !var_stored_p)
1065 continue;
1066
1067 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
1068 {
1069 size_t num_tag_refs, num_var_refs;
1070
1071 num_tag_refs = NUM_REFERENCES (tag_ann);
1072 num_var_refs = NUM_REFERENCES (v_ann);
1073
1074 /* Add VAR to TAG's may-aliases set. */
1075
1076 /* We should never have a var with subvars here, because
1077 they shouldn't get into the set of addressable vars */
1078 gcc_assert (!var_can_have_subvars (var)
1079 || get_subvars_for_var (var) == NULL);
1080
1081 add_may_alias (tag, var);
1082 /* Update the bitmap used to represent TAG's alias set
1083 in case we need to group aliases. */
1084 bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
1085
1086 /* Update the total number of virtual operands due to
1087 aliasing. Since we are adding one more alias to TAG's
1088 may-aliases set, the total number of virtual operands due
1089 to aliasing will be increased by the number of references
1090 made to VAR and TAG (every reference to TAG will also
1091 count as a reference to VAR). */
1092 ai->total_alias_vops += (num_var_refs + num_tag_refs);
1093 p_map->total_alias_vops += (num_var_refs + num_tag_refs);
1094
1095
1096 }
1097 }
1098 }
1099
1100 /* Since this analysis is based exclusively on symbols, it fails to
1101 handle cases where two pointers P and Q have different memory
1102 tags with conflicting alias set numbers but no aliased symbols in
1103 common.
1104
1105 For example, suppose that we have two memory tags TMT.1 and TMT.2
1106 such that
1107
1108 may-aliases (TMT.1) = { a }
1109 may-aliases (TMT.2) = { b }
1110
1111 and the alias set number of TMT.1 conflicts with that of TMT.2.
1112 Since they don't have symbols in common, loads and stores from
1113 TMT.1 and TMT.2 will seem independent of each other, which will
1114 lead to the optimizers making invalid transformations (see
1115 testsuite/gcc.c-torture/execute/pr15262-[12].c).
1116
1117 To avoid this problem, we do a final traversal of AI->POINTERS
1118 looking for pairs of pointers that have no aliased symbols in
1119 common and yet have conflicting alias set numbers. */
1120 for (i = 0; i < ai->num_pointers; i++)
1121 {
1122 size_t j;
1123 struct alias_map_d *p_map1 = ai->pointers[i];
1124 tree tag1 = var_ann (p_map1->var)->type_mem_tag;
1125 bitmap may_aliases1 = p_map1->may_aliases;
1126
1127 for (j = i + 1; j < ai->num_pointers; j++)
1128 {
1129 struct alias_map_d *p_map2 = ai->pointers[j];
1130 tree tag2 = var_ann (p_map2->var)->type_mem_tag;
1131 bitmap may_aliases2 = p_map2->may_aliases;
1132
1133 /* If the pointers may not point to each other, do nothing. */
1134 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
1135 continue;
1136
1137 /* The two pointers may alias each other. If they already have
1138 symbols in common, do nothing. */
1139 if (bitmap_intersect_p (may_aliases1, may_aliases2))
1140 continue;
1141
1142 if (!bitmap_empty_p (may_aliases2))
1143 {
1144 unsigned int k;
1145 bitmap_iterator bi;
1146
1147 /* Add all the aliases for TAG2 into TAG1's alias set.
1148 FIXME, update grouping heuristic counters. */
1149 EXECUTE_IF_SET_IN_BITMAP (may_aliases2, 0, k, bi)
1150 add_may_alias (tag1, referenced_var (k));
1151 bitmap_ior_into (may_aliases1, may_aliases2);
1152 }
1153 else
1154 {
1155 /* Since TAG2 does not have any aliases of its own, add
1156 TAG2 itself to the alias set of TAG1. */
1157 add_may_alias (tag1, tag2);
1158 bitmap_set_bit (may_aliases1, DECL_UID (tag2));
1159 }
1160 }
1161 }
1162
1163 if (dump_file)
1164 fprintf (dump_file, "\n%s: Total number of aliased vops: %ld\n",
1165 get_name (current_function_decl),
1166 ai->total_alias_vops);
1167 }
1168
1169
1170 /* Comparison function for qsort used in group_aliases. */
1171
1172 static int
1173 total_alias_vops_cmp (const void *p, const void *q)
1174 {
1175 const struct alias_map_d **p1 = (const struct alias_map_d **)p;
1176 const struct alias_map_d **p2 = (const struct alias_map_d **)q;
1177 long n1 = (*p1)->total_alias_vops;
1178 long n2 = (*p2)->total_alias_vops;
1179
1180 /* We want to sort in descending order. */
1181 return (n1 > n2 ? -1 : (n1 == n2) ? 0 : 1);
1182 }
1183
1184 /* Group all the aliases for TAG to make TAG represent all the
1185 variables in its alias set. Update the total number
1186 of virtual operands due to aliasing (AI->TOTAL_ALIAS_VOPS). This
1187 function will make TAG be the unique alias tag for all the
1188 variables in its may-aliases. So, given:
1189
1190 may-aliases(TAG) = { V1, V2, V3 }
1191
1192 This function will group the variables into:
1193
1194 may-aliases(V1) = { TAG }
1195 may-aliases(V2) = { TAG }
1196 may-aliases(V2) = { TAG } */
1197
1198 static void
1199 group_aliases_into (tree tag, bitmap tag_aliases, struct alias_info *ai)
1200 {
1201 unsigned int i;
1202 var_ann_t tag_ann = var_ann (tag);
1203 size_t num_tag_refs = NUM_REFERENCES (tag_ann);
1204 bitmap_iterator bi;
1205
1206 EXECUTE_IF_SET_IN_BITMAP (tag_aliases, 0, i, bi)
1207 {
1208 tree var = referenced_var (i);
1209 var_ann_t ann = var_ann (var);
1210
1211 /* Make TAG the unique alias of VAR. */
1212 ann->is_alias_tag = 0;
1213 ann->may_aliases = NULL;
1214
1215 /* Note that VAR and TAG may be the same if the function has no
1216 addressable variables (see the discussion at the end of
1217 setup_pointers_and_addressables). */
1218 if (var != tag)
1219 add_may_alias (var, tag);
1220
1221 /* Reduce total number of virtual operands contributed
1222 by TAG on behalf of VAR. Notice that the references to VAR
1223 itself won't be removed. We will merely replace them with
1224 references to TAG. */
1225 ai->total_alias_vops -= num_tag_refs;
1226 }
1227
1228 /* We have reduced the number of virtual operands that TAG makes on
1229 behalf of all the variables formerly aliased with it. However,
1230 we have also "removed" all the virtual operands for TAG itself,
1231 so we add them back. */
1232 ai->total_alias_vops += num_tag_refs;
1233
1234 /* TAG no longer has any aliases. */
1235 tag_ann->may_aliases = NULL;
1236 }
1237
1238
1239 /* Group may-aliases sets to reduce the number of virtual operands due
1240 to aliasing.
1241
1242 1- Sort the list of pointers in decreasing number of contributed
1243 virtual operands.
1244
1245 2- Take the first entry in AI->POINTERS and revert the role of
1246 the memory tag and its aliases. Usually, whenever an aliased
1247 variable Vi is found to alias with a memory tag T, we add Vi
1248 to the may-aliases set for T. Meaning that after alias
1249 analysis, we will have:
1250
1251 may-aliases(T) = { V1, V2, V3, ..., Vn }
1252
1253 This means that every statement that references T, will get 'n'
1254 virtual operands for each of the Vi tags. But, when alias
1255 grouping is enabled, we make T an alias tag and add it to the
1256 alias set of all the Vi variables:
1257
1258 may-aliases(V1) = { T }
1259 may-aliases(V2) = { T }
1260 ...
1261 may-aliases(Vn) = { T }
1262
1263 This has two effects: (a) statements referencing T will only get
1264 a single virtual operand, and, (b) all the variables Vi will now
1265 appear to alias each other. So, we lose alias precision to
1266 improve compile time. But, in theory, a program with such a high
1267 level of aliasing should not be very optimizable in the first
1268 place.
1269
1270 3- Since variables may be in the alias set of more than one
1271 memory tag, the grouping done in step (2) needs to be extended
1272 to all the memory tags that have a non-empty intersection with
1273 the may-aliases set of tag T. For instance, if we originally
1274 had these may-aliases sets:
1275
1276 may-aliases(T) = { V1, V2, V3 }
1277 may-aliases(R) = { V2, V4 }
1278
1279 In step (2) we would have reverted the aliases for T as:
1280
1281 may-aliases(V1) = { T }
1282 may-aliases(V2) = { T }
1283 may-aliases(V3) = { T }
1284
1285 But note that now V2 is no longer aliased with R. We could
1286 add R to may-aliases(V2), but we are in the process of
1287 grouping aliases to reduce virtual operands so what we do is
1288 add V4 to the grouping to obtain:
1289
1290 may-aliases(V1) = { T }
1291 may-aliases(V2) = { T }
1292 may-aliases(V3) = { T }
1293 may-aliases(V4) = { T }
1294
1295 4- If the total number of virtual operands due to aliasing is
1296 still above the threshold set by max-alias-vops, go back to (2). */
1297
1298 static void
1299 group_aliases (struct alias_info *ai)
1300 {
1301 size_t i;
1302
1303 /* Sort the POINTERS array in descending order of contributed
1304 virtual operands. */
1305 qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
1306 total_alias_vops_cmp);
1307
1308 /* For every pointer in AI->POINTERS, reverse the roles of its tag
1309 and the tag's may-aliases set. */
1310 for (i = 0; i < ai->num_pointers; i++)
1311 {
1312 size_t j;
1313 tree tag1 = var_ann (ai->pointers[i]->var)->type_mem_tag;
1314 bitmap tag1_aliases = ai->pointers[i]->may_aliases;
1315
1316 /* Skip tags that have been grouped already. */
1317 if (ai->pointers[i]->grouped_p)
1318 continue;
1319
1320 /* See if TAG1 had any aliases in common with other type tags.
1321 If we find a TAG2 with common aliases with TAG1, add TAG2's
1322 aliases into TAG1. */
1323 for (j = i + 1; j < ai->num_pointers; j++)
1324 {
1325 bitmap tag2_aliases = ai->pointers[j]->may_aliases;
1326
1327 if (bitmap_intersect_p (tag1_aliases, tag2_aliases))
1328 {
1329 tree tag2 = var_ann (ai->pointers[j]->var)->type_mem_tag;
1330
1331 bitmap_ior_into (tag1_aliases, tag2_aliases);
1332
1333 /* TAG2 does not need its aliases anymore. */
1334 bitmap_clear (tag2_aliases);
1335 var_ann (tag2)->may_aliases = NULL;
1336
1337 /* TAG1 is the unique alias of TAG2. */
1338 add_may_alias (tag2, tag1);
1339
1340 ai->pointers[j]->grouped_p = true;
1341 }
1342 }
1343
1344 /* Now group all the aliases we collected into TAG1. */
1345 group_aliases_into (tag1, tag1_aliases, ai);
1346
1347 /* If we've reduced total number of virtual operands below the
1348 threshold, stop. */
1349 if (ai->total_alias_vops < MAX_ALIASED_VOPS)
1350 break;
1351 }
1352
1353 /* Finally, all the variables that have been grouped cannot be in
1354 the may-alias set of name memory tags. Suppose that we have
1355 grouped the aliases in this code so that may-aliases(a) = TMT.20
1356
1357 p_5 = &a;
1358 ...
1359 # a_9 = V_MAY_DEF <a_8>
1360 p_5->field = 0
1361 ... Several modifications to TMT.20 ...
1362 # VUSE <a_9>
1363 x_30 = p_5->field
1364
1365 Since p_5 points to 'a', the optimizers will try to propagate 0
1366 into p_5->field, but that is wrong because there have been
1367 modifications to 'TMT.20' in between. To prevent this we have to
1368 replace 'a' with 'TMT.20' in the name tag of p_5. */
1369 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
1370 {
1371 size_t j;
1372 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
1373 tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
1374 VEC(tree,gc) *aliases;
1375 tree alias;
1376
1377 if (name_tag == NULL_TREE)
1378 continue;
1379
1380 aliases = var_ann (name_tag)->may_aliases;
1381 for (j = 0; VEC_iterate (tree, aliases, j, alias); j++)
1382 {
1383 var_ann_t ann = var_ann (alias);
1384
1385 if ((!MTAG_P (alias)
1386 || TREE_CODE (alias) == STRUCT_FIELD_TAG)
1387 && ann->may_aliases)
1388 {
1389 tree new_alias;
1390
1391 gcc_assert (VEC_length (tree, ann->may_aliases) == 1);
1392
1393 new_alias = VEC_index (tree, ann->may_aliases, 0);
1394 replace_may_alias (name_tag, j, new_alias);
1395 }
1396 }
1397 }
1398
1399 if (dump_file)
1400 fprintf (dump_file,
1401 "%s: Total number of aliased vops after grouping: %ld%s\n",
1402 get_name (current_function_decl),
1403 ai->total_alias_vops,
1404 (ai->total_alias_vops < 0) ? " (negative values are OK)" : "");
1405 }
1406
1407
1408 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
1409
1410 static void
1411 create_alias_map_for (tree var, struct alias_info *ai)
1412 {
1413 struct alias_map_d *alias_map;
1414 alias_map = XCNEW (struct alias_map_d);
1415 alias_map->var = var;
1416 alias_map->set = get_alias_set (var);
1417 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
1418 }
1419
1420
1421 /* Create memory tags for all the dereferenced pointers and build the
1422 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
1423 sets. Based on the address escape and points-to information collected
1424 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
1425 variables whose address is not needed anymore. */
1426
1427 static void
1428 setup_pointers_and_addressables (struct alias_info *ai)
1429 {
1430 size_t n_vars, num_addressable_vars, num_pointers;
1431 referenced_var_iterator rvi;
1432 tree var;
1433 VEC (tree, heap) *varvec = NULL;
1434 safe_referenced_var_iterator srvi;
1435
1436 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
1437 num_addressable_vars = num_pointers = 0;
1438
1439 FOR_EACH_REFERENCED_VAR (var, rvi)
1440 {
1441 if (may_be_aliased (var))
1442 num_addressable_vars++;
1443
1444 if (POINTER_TYPE_P (TREE_TYPE (var)))
1445 {
1446 /* Since we don't keep track of volatile variables, assume that
1447 these pointers are used in indirect store operations. */
1448 if (TREE_THIS_VOLATILE (var))
1449 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
1450
1451 num_pointers++;
1452 }
1453 }
1454
1455 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
1456 always going to be slightly bigger than we actually need them
1457 because some TREE_ADDRESSABLE variables will be marked
1458 non-addressable below and only pointers with unique type tags are
1459 going to be added to POINTERS. */
1460 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
1461 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
1462 ai->num_addressable_vars = 0;
1463 ai->num_pointers = 0;
1464
1465 /* Since we will be creating type memory tags within this loop, cache the
1466 value of NUM_REFERENCED_VARS to avoid processing the additional tags
1467 unnecessarily. */
1468 n_vars = num_referenced_vars;
1469
1470 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
1471 {
1472 var_ann_t v_ann = var_ann (var);
1473 subvar_t svars;
1474
1475 /* Name memory tags already have flow-sensitive aliasing
1476 information, so they need not be processed by
1477 compute_flow_insensitive_aliasing. Similarly, type memory
1478 tags are already accounted for when we process their
1479 associated pointer.
1480
1481 Structure fields, on the other hand, have to have some of this
1482 information processed for them, but it's pointless to mark them
1483 non-addressable (since they are fake variables anyway). */
1484 if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
1485 continue;
1486
1487 /* Remove the ADDRESSABLE flag from every addressable variable whose
1488 address is not needed anymore. This is caused by the propagation
1489 of ADDR_EXPR constants into INDIRECT_REF expressions and the
1490 removal of dead pointer assignments done by the early scalar
1491 cleanup passes. */
1492 if (TREE_ADDRESSABLE (var))
1493 {
1494 if (!bitmap_bit_p (addressable_vars, DECL_UID (var))
1495 && TREE_CODE (var) != RESULT_DECL
1496 && !is_global_var (var))
1497 {
1498 bool okay_to_mark = true;
1499
1500 /* Since VAR is now a regular GIMPLE register, we will need
1501 to rename VAR into SSA afterwards. */
1502 mark_sym_for_renaming (var);
1503
1504 /* If VAR can have sub-variables, and any of its
1505 sub-variables has its address taken, then we cannot
1506 remove the addressable flag from VAR. */
1507 if (var_can_have_subvars (var)
1508 && (svars = get_subvars_for_var (var)))
1509 {
1510 subvar_t sv;
1511
1512 for (sv = svars; sv; sv = sv->next)
1513 {
1514 if (bitmap_bit_p (addressable_vars, DECL_UID (sv->var)))
1515 okay_to_mark = false;
1516 mark_sym_for_renaming (sv->var);
1517 }
1518 }
1519
1520 /* The address of VAR is not needed, remove the
1521 addressable bit, so that it can be optimized as a
1522 regular variable. */
1523 if (okay_to_mark)
1524 mark_non_addressable (var);
1525 }
1526 }
1527
1528 /* Global variables and addressable locals may be aliased. Create an
1529 entry in ADDRESSABLE_VARS for VAR. */
1530 if (may_be_aliased (var)
1531 && (!var_can_have_subvars (var)
1532 || get_subvars_for_var (var) == NULL))
1533 {
1534 create_alias_map_for (var, ai);
1535 mark_sym_for_renaming (var);
1536 }
1537
1538 /* Add pointer variables that have been dereferenced to the POINTERS
1539 array and create a type memory tag for them. */
1540 if (POINTER_TYPE_P (TREE_TYPE (var)))
1541 {
1542 if ((bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var))
1543 || bitmap_bit_p (ai->dereferenced_ptrs_load, DECL_UID (var))))
1544 {
1545 tree tag;
1546 var_ann_t t_ann;
1547
1548 /* If pointer VAR still doesn't have a memory tag
1549 associated with it, create it now or re-use an
1550 existing one. */
1551 tag = get_tmt_for (var, ai);
1552 t_ann = var_ann (tag);
1553
1554 /* The type tag will need to be renamed into SSA
1555 afterwards. Note that we cannot do this inside
1556 get_tmt_for because aliasing may run multiple times
1557 and we only create type tags the first time. */
1558 mark_sym_for_renaming (tag);
1559
1560 /* Similarly, if pointer VAR used to have another type
1561 tag, we will need to process it in the renamer to
1562 remove the stale virtual operands. */
1563 if (v_ann->type_mem_tag)
1564 mark_sym_for_renaming (v_ann->type_mem_tag);
1565
1566 /* Associate the tag with pointer VAR. */
1567 v_ann->type_mem_tag = tag;
1568
1569 /* If pointer VAR has been used in a store operation,
1570 then its memory tag must be marked as written-to. */
1571 if (bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var)))
1572 bitmap_set_bit (ai->written_vars, DECL_UID (tag));
1573
1574 /* All the dereferences of pointer VAR count as
1575 references of TAG. Since TAG can be associated with
1576 several pointers, add the dereferences of VAR to the
1577 TAG. */
1578 NUM_REFERENCES_SET (t_ann,
1579 NUM_REFERENCES (t_ann)
1580 + NUM_REFERENCES (v_ann));
1581 }
1582 else
1583 {
1584 /* The pointer has not been dereferenced. If it had a
1585 type memory tag, remove it and mark the old tag for
1586 renaming to remove it out of the IL. */
1587 var_ann_t ann = var_ann (var);
1588 tree tag = ann->type_mem_tag;
1589 if (tag)
1590 {
1591 mark_sym_for_renaming (tag);
1592 ann->type_mem_tag = NULL_TREE;
1593 }
1594 }
1595 }
1596 }
1597 VEC_free (tree, heap, varvec);
1598 }
1599
1600
1601 /* Determine whether to use .GLOBAL_VAR to model call clobbering semantics. At
1602 every call site, we need to emit V_MAY_DEF expressions to represent the
1603 clobbering effects of the call for variables whose address escapes the
1604 current function.
1605
1606 One approach is to group all call-clobbered variables into a single
1607 representative that is used as an alias of every call-clobbered variable
1608 (.GLOBAL_VAR). This works well, but it ties the optimizer hands because
1609 references to any call clobbered variable is a reference to .GLOBAL_VAR.
1610
1611 The second approach is to emit a clobbering V_MAY_DEF for every
1612 call-clobbered variable at call sites. This is the preferred way in terms
1613 of optimization opportunities but it may create too many V_MAY_DEF operands
1614 if there are many call clobbered variables and function calls in the
1615 function.
1616
1617 To decide whether or not to use .GLOBAL_VAR we multiply the number of
1618 function calls found by the number of call-clobbered variables. If that
1619 product is beyond a certain threshold, as determined by the parameterized
1620 values shown below, we use .GLOBAL_VAR.
1621
1622 FIXME. This heuristic should be improved. One idea is to use several
1623 .GLOBAL_VARs of different types instead of a single one. The thresholds
1624 have been derived from a typical bootstrap cycle, including all target
1625 libraries. Compile times were found increase by ~1% compared to using
1626 .GLOBAL_VAR. */
1627
1628 static void
1629 maybe_create_global_var (struct alias_info *ai)
1630 {
1631 unsigned i, n_clobbered;
1632 bitmap_iterator bi;
1633
1634 /* No need to create it, if we have one already. */
1635 if (global_var == NULL_TREE)
1636 {
1637 /* Count all the call-clobbered variables. */
1638 n_clobbered = 0;
1639 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1640 {
1641 n_clobbered++;
1642 }
1643
1644 /* If the number of virtual operands that would be needed to
1645 model all the call-clobbered variables is larger than
1646 GLOBAL_VAR_THRESHOLD, create .GLOBAL_VAR.
1647
1648 Also create .GLOBAL_VAR if there are no call-clobbered
1649 variables and the program contains a mixture of pure/const
1650 and regular function calls. This is to avoid the problem
1651 described in PR 20115:
1652
1653 int X;
1654 int func_pure (void) { return X; }
1655 int func_non_pure (int a) { X += a; }
1656 int foo ()
1657 {
1658 int a = func_pure ();
1659 func_non_pure (a);
1660 a = func_pure ();
1661 return a;
1662 }
1663
1664 Since foo() has no call-clobbered variables, there is
1665 no relationship between the calls to func_pure and
1666 func_non_pure. Since func_pure has no side-effects, value
1667 numbering optimizations elide the second call to func_pure.
1668 So, if we have some pure/const and some regular calls in the
1669 program we create .GLOBAL_VAR to avoid missing these
1670 relations. */
1671 if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD
1672 || (n_clobbered == 0
1673 && ai->num_calls_found > 0
1674 && ai->num_pure_const_calls_found > 0
1675 && ai->num_calls_found > ai->num_pure_const_calls_found))
1676 create_global_var ();
1677 }
1678
1679 /* Mark all call-clobbered symbols for renaming. Since the initial
1680 rewrite into SSA ignored all call sites, we may need to rename
1681 .GLOBAL_VAR and the call-clobbered variables. */
1682 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1683 {
1684 tree var = referenced_var (i);
1685
1686 /* If the function has calls to clobbering functions and
1687 .GLOBAL_VAR has been created, make it an alias for all
1688 call-clobbered variables. */
1689 if (global_var && var != global_var)
1690 {
1691 subvar_t svars;
1692 add_may_alias (var, global_var);
1693 if (var_can_have_subvars (var)
1694 && (svars = get_subvars_for_var (var)))
1695 {
1696 subvar_t sv;
1697 for (sv = svars; sv; sv = sv->next)
1698 mark_sym_for_renaming (sv->var);
1699 }
1700 }
1701
1702 mark_sym_for_renaming (var);
1703 }
1704 }
1705
1706
1707 /* Return TRUE if pointer PTR may point to variable VAR.
1708
1709 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
1710 This is needed because when checking for type conflicts we are
1711 interested in the alias set of the memory location pointed-to by
1712 PTR. The alias set of PTR itself is irrelevant.
1713
1714 VAR_ALIAS_SET is the alias set for VAR. */
1715
1716 static bool
1717 may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
1718 tree var, HOST_WIDE_INT var_alias_set,
1719 bool alias_set_only)
1720 {
1721 tree mem;
1722
1723 alias_stats.alias_queries++;
1724 alias_stats.simple_queries++;
1725
1726 /* By convention, a variable cannot alias itself. */
1727 mem = var_ann (ptr)->type_mem_tag;
1728 if (mem == var)
1729 {
1730 alias_stats.alias_noalias++;
1731 alias_stats.simple_resolved++;
1732 return false;
1733 }
1734
1735 /* If -fargument-noalias-global is >1, pointer arguments may
1736 not point to global variables. */
1737 if (flag_argument_noalias > 1 && is_global_var (var)
1738 && TREE_CODE (ptr) == PARM_DECL)
1739 {
1740 alias_stats.alias_noalias++;
1741 alias_stats.simple_resolved++;
1742 return false;
1743 }
1744
1745 /* If either MEM or VAR is a read-only global and the other one
1746 isn't, then PTR cannot point to VAR. */
1747 if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
1748 || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
1749 {
1750 alias_stats.alias_noalias++;
1751 alias_stats.simple_resolved++;
1752 return false;
1753 }
1754
1755 gcc_assert (TREE_CODE (mem) == TYPE_MEMORY_TAG);
1756
1757 alias_stats.tbaa_queries++;
1758
1759 /* If the alias sets don't conflict then MEM cannot alias VAR. */
1760 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
1761 {
1762 alias_stats.alias_noalias++;
1763 alias_stats.tbaa_resolved++;
1764 return false;
1765 }
1766
1767 /* If var is a record or union type, ptr cannot point into var
1768 unless there is some operation explicit address operation in the
1769 program that can reference a field of the ptr's dereferenced
1770 type. This also assumes that the types of both var and ptr are
1771 contained within the compilation unit, and that there is no fancy
1772 addressing arithmetic associated with any of the types
1773 involved. */
1774
1775 if ((mem_alias_set != 0) && (var_alias_set != 0))
1776 {
1777 tree ptr_type = TREE_TYPE (ptr);
1778 tree var_type = TREE_TYPE (var);
1779
1780 /* The star count is -1 if the type at the end of the pointer_to
1781 chain is not a record or union type. */
1782 if ((!alias_set_only) &&
1783 ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
1784 {
1785 int ptr_star_count = 0;
1786
1787 /* Ipa_type_escape_star_count_of_interesting_type is a little to
1788 restrictive for the pointer type, need to allow pointers to
1789 primitive types as long as those types cannot be pointers
1790 to everything. */
1791 while (POINTER_TYPE_P (ptr_type))
1792 /* Strip the *'s off. */
1793 {
1794 ptr_type = TREE_TYPE (ptr_type);
1795 ptr_star_count++;
1796 }
1797
1798 /* There does not appear to be a better test to see if the
1799 pointer type was one of the pointer to everything
1800 types. */
1801
1802 if (ptr_star_count > 0)
1803 {
1804 alias_stats.structnoaddress_queries++;
1805 if (ipa_type_escape_field_does_not_clobber_p (var_type,
1806 TREE_TYPE (ptr)))
1807 {
1808 alias_stats.structnoaddress_resolved++;
1809 alias_stats.alias_noalias++;
1810 return false;
1811 }
1812 }
1813 else if (ptr_star_count == 0)
1814 {
1815 /* If ptr_type was not really a pointer to type, it cannot
1816 alias. */
1817 alias_stats.structnoaddress_queries++;
1818 alias_stats.structnoaddress_resolved++;
1819 alias_stats.alias_noalias++;
1820 return false;
1821 }
1822 }
1823 }
1824
1825 alias_stats.alias_mayalias++;
1826 return true;
1827 }
1828
1829
1830 /* Add ALIAS to the set of variables that may alias VAR. */
1831
1832 static void
1833 add_may_alias (tree var, tree alias)
1834 {
1835 size_t i;
1836 var_ann_t v_ann = get_var_ann (var);
1837 var_ann_t a_ann = get_var_ann (alias);
1838 tree al;
1839
1840 /* Don't allow self-referential aliases. */
1841 gcc_assert (var != alias);
1842
1843 /* ALIAS must be addressable if it's being added to an alias set. */
1844 #if 1
1845 TREE_ADDRESSABLE (alias) = 1;
1846 #else
1847 gcc_assert (may_be_aliased (alias));
1848 #endif
1849
1850 if (v_ann->may_aliases == NULL)
1851 v_ann->may_aliases = VEC_alloc (tree, gc, 2);
1852
1853 /* Avoid adding duplicates. */
1854 for (i = 0; VEC_iterate (tree, v_ann->may_aliases, i, al); i++)
1855 if (alias == al)
1856 return;
1857
1858 VEC_safe_push (tree, gc, v_ann->may_aliases, alias);
1859 a_ann->is_alias_tag = 1;
1860 }
1861
1862
1863 /* Replace alias I in the alias sets of VAR with NEW_ALIAS. */
1864
1865 static void
1866 replace_may_alias (tree var, size_t i, tree new_alias)
1867 {
1868 var_ann_t v_ann = var_ann (var);
1869 VEC_replace (tree, v_ann->may_aliases, i, new_alias);
1870 }
1871
1872
1873 /* Mark pointer PTR as pointing to an arbitrary memory location. */
1874
1875 static void
1876 set_pt_anything (tree ptr)
1877 {
1878 struct ptr_info_def *pi = get_ptr_info (ptr);
1879
1880 pi->pt_anything = 1;
1881 pi->pt_vars = NULL;
1882
1883 /* The pointer used to have a name tag, but we now found it pointing
1884 to an arbitrary location. The name tag needs to be renamed and
1885 disassociated from PTR. */
1886 if (pi->name_mem_tag)
1887 {
1888 mark_sym_for_renaming (pi->name_mem_tag);
1889 pi->name_mem_tag = NULL_TREE;
1890 }
1891 }
1892
1893
1894 /* Return true if STMT is an "escape" site from the current function. Escape
1895 sites those statements which might expose the address of a variable
1896 outside the current function. STMT is an escape site iff:
1897
1898 1- STMT is a function call, or
1899 2- STMT is an __asm__ expression, or
1900 3- STMT is an assignment to a non-local variable, or
1901 4- STMT is a return statement.
1902
1903 AI points to the alias information collected so far.
1904
1905 Return the type of escape site found, if we found one, or NO_ESCAPE
1906 if none. */
1907
1908 enum escape_type
1909 is_escape_site (tree stmt, struct alias_info *ai)
1910 {
1911 tree call = get_call_expr_in (stmt);
1912 if (call != NULL_TREE)
1913 {
1914 ai->num_calls_found++;
1915
1916 if (!TREE_SIDE_EFFECTS (call))
1917 {
1918 ai->num_pure_const_calls_found++;
1919 return ESCAPE_TO_PURE_CONST;
1920 }
1921
1922 return ESCAPE_TO_CALL;
1923 }
1924 else if (TREE_CODE (stmt) == ASM_EXPR)
1925 return ESCAPE_TO_ASM;
1926 else if (TREE_CODE (stmt) == MODIFY_EXPR)
1927 {
1928 tree lhs = TREE_OPERAND (stmt, 0);
1929
1930 /* Get to the base of _REF nodes. */
1931 if (TREE_CODE (lhs) != SSA_NAME)
1932 lhs = get_base_address (lhs);
1933
1934 /* If we couldn't recognize the LHS of the assignment, assume that it
1935 is a non-local store. */
1936 if (lhs == NULL_TREE)
1937 return ESCAPE_UNKNOWN;
1938
1939 /* If the RHS is a conversion between a pointer and an integer, the
1940 pointer escapes since we can't track the integer. */
1941 if ((TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
1942 || TREE_CODE (TREE_OPERAND (stmt, 1)) == CONVERT_EXPR
1943 || TREE_CODE (TREE_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
1944 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND
1945 (TREE_OPERAND (stmt, 1), 0)))
1946 && !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 1))))
1947 return ESCAPE_BAD_CAST;
1948
1949 /* If the LHS is an SSA name, it can't possibly represent a non-local
1950 memory store. */
1951 if (TREE_CODE (lhs) == SSA_NAME)
1952 return NO_ESCAPE;
1953
1954 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
1955 local variables we cannot be sure if it will escape, because we
1956 don't have information about objects not in SSA form. Need to
1957 implement something along the lines of
1958
1959 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
1960 Midkiff, ``Escape analysis for java,'' in Proceedings of the
1961 Conference on Object-Oriented Programming Systems, Languages, and
1962 Applications (OOPSLA), pp. 1-19, 1999. */
1963 return ESCAPE_STORED_IN_GLOBAL;
1964 }
1965 else if (TREE_CODE (stmt) == RETURN_EXPR)
1966 return ESCAPE_TO_RETURN;
1967
1968 return NO_ESCAPE;
1969 }
1970
1971 /* Create a new memory tag of type TYPE.
1972 Does NOT push it into the current binding. */
1973
1974 static tree
1975 create_tag_raw (enum tree_code code, tree type, const char *prefix)
1976 {
1977 tree tmp_var;
1978 tree new_type;
1979
1980 /* Make the type of the variable writable. */
1981 new_type = build_type_variant (type, 0, 0);
1982 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
1983
1984 tmp_var = build_decl (code, create_tmp_var_name (prefix),
1985 type);
1986 /* Make the variable writable. */
1987 TREE_READONLY (tmp_var) = 0;
1988
1989 /* It doesn't start out global. */
1990 MTAG_GLOBAL (tmp_var) = 0;
1991 TREE_STATIC (tmp_var) = 0;
1992 TREE_USED (tmp_var) = 1;
1993
1994 return tmp_var;
1995 }
1996
1997 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
1998 is considered to represent all the pointers whose pointed-to types are
1999 in the same alias set class. Otherwise, the tag represents a single
2000 SSA_NAME pointer variable. */
2001
2002 static tree
2003 create_memory_tag (tree type, bool is_type_tag)
2004 {
2005 var_ann_t ann;
2006 tree tag = create_tag_raw (is_type_tag ? TYPE_MEMORY_TAG : NAME_MEMORY_TAG,
2007 type, (is_type_tag) ? "TMT" : "NMT");
2008
2009 /* By default, memory tags are local variables. Alias analysis will
2010 determine whether they should be considered globals. */
2011 DECL_CONTEXT (tag) = current_function_decl;
2012
2013 /* Memory tags are by definition addressable. */
2014 TREE_ADDRESSABLE (tag) = 1;
2015
2016 ann = get_var_ann (tag);
2017 ann->type_mem_tag = NULL_TREE;
2018
2019 /* Add the tag to the symbol table. */
2020 add_referenced_tmp_var (tag);
2021
2022 return tag;
2023 }
2024
2025
2026 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
2027 This is used if P_i has been found to point to a specific set of
2028 variables or to a non-aliased memory location like the address returned
2029 by malloc functions. */
2030
2031 static tree
2032 get_nmt_for (tree ptr)
2033 {
2034 struct ptr_info_def *pi = get_ptr_info (ptr);
2035 tree tag = pi->name_mem_tag;
2036
2037 if (tag == NULL_TREE)
2038 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
2039 return tag;
2040 }
2041
2042
2043 /* Return the type memory tag associated to pointer PTR. A memory tag is an
2044 artificial variable that represents the memory location pointed-to by
2045 PTR. It is used to model the effects of pointer de-references on
2046 addressable variables.
2047
2048 AI points to the data gathered during alias analysis. This function
2049 populates the array AI->POINTERS. */
2050
2051 static tree
2052 get_tmt_for (tree ptr, struct alias_info *ai)
2053 {
2054 size_t i;
2055 tree tag;
2056 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2057 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2058
2059 /* To avoid creating unnecessary memory tags, only create one memory tag
2060 per alias set class. Note that it may be tempting to group
2061 memory tags based on conflicting alias sets instead of
2062 equivalence. That would be wrong because alias sets are not
2063 necessarily transitive (as demonstrated by the libstdc++ test
2064 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
2065 such that conflicts (A, B) == true and conflicts (A, C) == true,
2066 it does not necessarily follow that conflicts (B, C) == true. */
2067 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
2068 {
2069 struct alias_map_d *curr = ai->pointers[i];
2070 tree curr_tag = var_ann (curr->var)->type_mem_tag;
2071 if (tag_set == curr->set
2072 && TYPE_READONLY (tag_type) == TYPE_READONLY (TREE_TYPE (curr_tag)))
2073 {
2074 tag = curr_tag;
2075 break;
2076 }
2077 }
2078
2079 /* If VAR cannot alias with any of the existing memory tags, create a new
2080 tag for PTR and add it to the POINTERS array. */
2081 if (tag == NULL_TREE)
2082 {
2083 struct alias_map_d *alias_map;
2084
2085 /* If PTR did not have a type tag already, create a new TMT.*
2086 artificial variable representing the memory location
2087 pointed-to by PTR. */
2088 if (var_ann (ptr)->type_mem_tag == NULL_TREE)
2089 tag = create_memory_tag (tag_type, true);
2090 else
2091 tag = var_ann (ptr)->type_mem_tag;
2092
2093 /* Add PTR to the POINTERS array. Note that we are not interested in
2094 PTR's alias set. Instead, we cache the alias set for the memory that
2095 PTR points to. */
2096 alias_map = XCNEW (struct alias_map_d);
2097 alias_map->var = ptr;
2098 alias_map->set = tag_set;
2099 ai->pointers[ai->num_pointers++] = alias_map;
2100 }
2101
2102 /* If the pointed-to type is volatile, so is the tag. */
2103 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
2104
2105 /* Make sure that the type tag has the same alias set as the
2106 pointed-to type. */
2107 gcc_assert (tag_set == get_alias_set (tag));
2108
2109 /* If PTR's pointed-to type is read-only, then TAG's type must also
2110 be read-only. */
2111 gcc_assert (TYPE_READONLY (tag_type) == TYPE_READONLY (TREE_TYPE (tag)));
2112
2113 return tag;
2114 }
2115
2116
2117 /* Create GLOBAL_VAR, an artificial global variable to act as a
2118 representative of all the variables that may be clobbered by function
2119 calls. */
2120
2121 static void
2122 create_global_var (void)
2123 {
2124 global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
2125 void_type_node);
2126 DECL_ARTIFICIAL (global_var) = 1;
2127 TREE_READONLY (global_var) = 0;
2128 DECL_EXTERNAL (global_var) = 1;
2129 TREE_STATIC (global_var) = 1;
2130 TREE_USED (global_var) = 1;
2131 DECL_CONTEXT (global_var) = NULL_TREE;
2132 TREE_THIS_VOLATILE (global_var) = 0;
2133 TREE_ADDRESSABLE (global_var) = 0;
2134
2135 create_var_ann (global_var);
2136 mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
2137 add_referenced_tmp_var (global_var);
2138 mark_sym_for_renaming (global_var);
2139 }
2140
2141
2142 /* Dump alias statistics on FILE. */
2143
2144 static void
2145 dump_alias_stats (FILE *file)
2146 {
2147 const char *funcname
2148 = lang_hooks.decl_printable_name (current_function_decl, 2);
2149 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
2150 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
2151 fprintf (file, "Total alias mayalias results:\t%u\n",
2152 alias_stats.alias_mayalias);
2153 fprintf (file, "Total alias noalias results:\t%u\n",
2154 alias_stats.alias_noalias);
2155 fprintf (file, "Total simple queries:\t%u\n",
2156 alias_stats.simple_queries);
2157 fprintf (file, "Total simple resolved:\t%u\n",
2158 alias_stats.simple_resolved);
2159 fprintf (file, "Total TBAA queries:\t%u\n",
2160 alias_stats.tbaa_queries);
2161 fprintf (file, "Total TBAA resolved:\t%u\n",
2162 alias_stats.tbaa_resolved);
2163 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
2164 alias_stats.structnoaddress_queries);
2165 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
2166 alias_stats.structnoaddress_resolved);
2167 }
2168
2169
2170 /* Dump alias information on FILE. */
2171
2172 void
2173 dump_alias_info (FILE *file)
2174 {
2175 size_t i;
2176 const char *funcname
2177 = lang_hooks.decl_printable_name (current_function_decl, 2);
2178 referenced_var_iterator rvi;
2179 tree var;
2180
2181 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
2182
2183 fprintf (file, "Aliased symbols\n\n");
2184
2185 FOR_EACH_REFERENCED_VAR (var, rvi)
2186 {
2187 if (may_be_aliased (var))
2188 dump_variable (file, var);
2189 }
2190
2191 fprintf (file, "\nDereferenced pointers\n\n");
2192
2193 FOR_EACH_REFERENCED_VAR (var, rvi)
2194 {
2195 var_ann_t ann = var_ann (var);
2196 if (ann->type_mem_tag)
2197 dump_variable (file, var);
2198 }
2199
2200 fprintf (file, "\nType memory tags\n\n");
2201
2202 FOR_EACH_REFERENCED_VAR (var, rvi)
2203 {
2204 if (TREE_CODE (var) == TYPE_MEMORY_TAG)
2205 dump_variable (file, var);
2206 }
2207
2208 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
2209
2210 fprintf (file, "SSA_NAME pointers\n\n");
2211 for (i = 1; i < num_ssa_names; i++)
2212 {
2213 tree ptr = ssa_name (i);
2214 struct ptr_info_def *pi;
2215
2216 if (ptr == NULL_TREE)
2217 continue;
2218
2219 pi = SSA_NAME_PTR_INFO (ptr);
2220 if (!SSA_NAME_IN_FREE_LIST (ptr)
2221 && pi
2222 && pi->name_mem_tag)
2223 dump_points_to_info_for (file, ptr);
2224 }
2225
2226 fprintf (file, "\nName memory tags\n\n");
2227
2228 FOR_EACH_REFERENCED_VAR (var, rvi)
2229 {
2230 if (TREE_CODE (var) == NAME_MEMORY_TAG)
2231 dump_variable (file, var);
2232 }
2233
2234 fprintf (file, "\n");
2235 }
2236
2237
2238 /* Dump alias information on stderr. */
2239
2240 void
2241 debug_alias_info (void)
2242 {
2243 dump_alias_info (stderr);
2244 }
2245
2246
2247 /* Return the alias information associated with pointer T. It creates a
2248 new instance if none existed. */
2249
2250 struct ptr_info_def *
2251 get_ptr_info (tree t)
2252 {
2253 struct ptr_info_def *pi;
2254
2255 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
2256
2257 pi = SSA_NAME_PTR_INFO (t);
2258 if (pi == NULL)
2259 {
2260 pi = GGC_NEW (struct ptr_info_def);
2261 memset ((void *)pi, 0, sizeof (*pi));
2262 SSA_NAME_PTR_INFO (t) = pi;
2263 }
2264
2265 return pi;
2266 }
2267
2268
2269 /* Dump points-to information for SSA_NAME PTR into FILE. */
2270
2271 void
2272 dump_points_to_info_for (FILE *file, tree ptr)
2273 {
2274 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2275
2276 print_generic_expr (file, ptr, dump_flags);
2277
2278 if (pi)
2279 {
2280 if (pi->name_mem_tag)
2281 {
2282 fprintf (file, ", name memory tag: ");
2283 print_generic_expr (file, pi->name_mem_tag, dump_flags);
2284 }
2285
2286 if (pi->is_dereferenced)
2287 fprintf (file, ", is dereferenced");
2288
2289 if (pi->value_escapes_p)
2290 fprintf (file, ", its value escapes");
2291
2292 if (pi->pt_anything)
2293 fprintf (file, ", points-to anything");
2294
2295 if (pi->pt_null)
2296 fprintf (file, ", points-to NULL");
2297
2298 if (pi->pt_vars)
2299 {
2300 unsigned ix;
2301 bitmap_iterator bi;
2302
2303 fprintf (file, ", points-to vars: { ");
2304 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix, bi)
2305 {
2306 print_generic_expr (file, referenced_var (ix), dump_flags);
2307 fprintf (file, " ");
2308 }
2309 fprintf (file, "}");
2310 }
2311 }
2312
2313 fprintf (file, "\n");
2314 }
2315
2316
2317 /* Dump points-to information for VAR into stderr. */
2318
2319 void
2320 debug_points_to_info_for (tree var)
2321 {
2322 dump_points_to_info_for (stderr, var);
2323 }
2324
2325
2326 /* Dump points-to information into FILE. NOTE: This function is slow, as
2327 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
2328
2329 void
2330 dump_points_to_info (FILE *file)
2331 {
2332 basic_block bb;
2333 block_stmt_iterator si;
2334 ssa_op_iter iter;
2335 const char *fname =
2336 lang_hooks.decl_printable_name (current_function_decl, 2);
2337 referenced_var_iterator rvi;
2338 tree var;
2339
2340 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
2341
2342 /* First dump points-to information for the default definitions of
2343 pointer variables. This is necessary because default definitions are
2344 not part of the code. */
2345 FOR_EACH_REFERENCED_VAR (var, rvi)
2346 {
2347 if (POINTER_TYPE_P (TREE_TYPE (var)))
2348 {
2349 tree def = default_def (var);
2350 if (def)
2351 dump_points_to_info_for (file, def);
2352 }
2353 }
2354
2355 /* Dump points-to information for every pointer defined in the program. */
2356 FOR_EACH_BB (bb)
2357 {
2358 tree phi;
2359
2360 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2361 {
2362 tree ptr = PHI_RESULT (phi);
2363 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2364 dump_points_to_info_for (file, ptr);
2365 }
2366
2367 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
2368 {
2369 tree stmt = bsi_stmt (si);
2370 tree def;
2371 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
2372 if (POINTER_TYPE_P (TREE_TYPE (def)))
2373 dump_points_to_info_for (file, def);
2374 }
2375 }
2376
2377 fprintf (file, "\n");
2378 }
2379
2380
2381 /* Dump points-to info pointed to by PTO into STDERR. */
2382
2383 void
2384 debug_points_to_info (void)
2385 {
2386 dump_points_to_info (stderr);
2387 }
2388
2389 /* Dump to FILE the list of variables that may be aliasing VAR. */
2390
2391 void
2392 dump_may_aliases_for (FILE *file, tree var)
2393 {
2394 VEC(tree, gc) *aliases;
2395
2396 if (TREE_CODE (var) == SSA_NAME)
2397 var = SSA_NAME_VAR (var);
2398
2399 aliases = var_ann (var)->may_aliases;
2400 if (aliases)
2401 {
2402 size_t i;
2403 tree al;
2404 fprintf (file, "{ ");
2405 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2406 {
2407 print_generic_expr (file, al, dump_flags);
2408 fprintf (file, " ");
2409 }
2410 fprintf (file, "}");
2411 }
2412 }
2413
2414
2415 /* Dump to stderr the list of variables that may be aliasing VAR. */
2416
2417 void
2418 debug_may_aliases_for (tree var)
2419 {
2420 dump_may_aliases_for (stderr, var);
2421 }
2422
2423 /* Return true if VAR may be aliased. */
2424
2425 bool
2426 may_be_aliased (tree var)
2427 {
2428 /* Obviously. */
2429 if (TREE_ADDRESSABLE (var))
2430 return true;
2431
2432 /* Globally visible variables can have their addresses taken by other
2433 translation units. */
2434
2435 if (MTAG_P (var)
2436 && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
2437 return true;
2438 else if (!MTAG_P (var)
2439 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
2440 return true;
2441
2442 /* Automatic variables can't have their addresses escape any other way.
2443 This must be after the check for global variables, as extern declarations
2444 do not have TREE_STATIC set. */
2445 if (!TREE_STATIC (var))
2446 return false;
2447
2448 /* If we're in unit-at-a-time mode, then we must have seen all occurrences
2449 of address-of operators, and so we can trust TREE_ADDRESSABLE. Otherwise
2450 we can only be sure the variable isn't addressable if it's local to the
2451 current function. */
2452 if (flag_unit_at_a_time)
2453 return false;
2454 if (decl_function_context (var) == current_function_decl)
2455 return false;
2456
2457 return true;
2458 }
2459
2460
2461 /* Given two symbols return TRUE if one is in the alias set of the other. */
2462 bool
2463 is_aliased_with (tree tag, tree sym)
2464 {
2465 size_t i;
2466 VEC(tree,gc) *aliases;
2467 tree al;
2468
2469 if (var_ann (sym)->is_alias_tag)
2470 {
2471 aliases = var_ann (tag)->may_aliases;
2472
2473 if (aliases == NULL)
2474 return false;
2475
2476 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2477 if (al == sym)
2478 return true;
2479 }
2480 else
2481 {
2482 aliases = var_ann (sym)->may_aliases;
2483
2484 if (aliases == NULL)
2485 return false;
2486
2487 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2488 if (al == tag)
2489 return true;
2490 }
2491
2492 return false;
2493 }
2494
2495
2496 /* Add VAR to the list of may-aliases of PTR's type tag. If PTR
2497 doesn't already have a type tag, create one. */
2498
2499 void
2500 add_type_alias (tree ptr, tree var)
2501 {
2502 VEC(tree, gc) *aliases;
2503 tree tag, al;
2504 var_ann_t ann = var_ann (ptr);
2505 subvar_t svars;
2506 VEC (tree, heap) *varvec = NULL;
2507 unsigned i;
2508
2509 if (ann->type_mem_tag == NULL_TREE)
2510 {
2511 tree q = NULL_TREE;
2512 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2513 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2514 safe_referenced_var_iterator rvi;
2515
2516 /* PTR doesn't have a type tag, create a new one and add VAR to
2517 the new tag's alias set.
2518
2519 FIXME, This is slower than necessary. We need to determine
2520 whether there is another pointer Q with the same alias set as
2521 PTR. This could be sped up by having type tags associated
2522 with types. */
2523 FOR_EACH_REFERENCED_VAR_SAFE (q, varvec, rvi)
2524 {
2525 if (POINTER_TYPE_P (TREE_TYPE (q))
2526 && tag_set == get_alias_set (TREE_TYPE (TREE_TYPE (q))))
2527 {
2528 /* Found another pointer Q with the same alias set as
2529 the PTR's pointed-to type. If Q has a type tag, use
2530 it. Otherwise, create a new memory tag for PTR. */
2531 var_ann_t ann1 = var_ann (q);
2532 if (ann1->type_mem_tag)
2533 ann->type_mem_tag = ann1->type_mem_tag;
2534 else
2535 ann->type_mem_tag = create_memory_tag (tag_type, true);
2536 goto found_tag;
2537 }
2538 }
2539
2540 /* Couldn't find any other pointer with a type tag we could use.
2541 Create a new memory tag for PTR. */
2542 ann->type_mem_tag = create_memory_tag (tag_type, true);
2543 }
2544
2545 found_tag:
2546 /* If VAR is not already PTR's type tag, add it to the may-alias set
2547 for PTR's type tag. */
2548 gcc_assert (!MTAG_P (var));
2549 tag = ann->type_mem_tag;
2550
2551 /* If VAR has subvars, add the subvars to the tag instead of the
2552 actual var. */
2553 if (var_can_have_subvars (var)
2554 && (svars = get_subvars_for_var (var)))
2555 {
2556 subvar_t sv;
2557 for (sv = svars; sv; sv = sv->next)
2558 add_may_alias (tag, sv->var);
2559 }
2560 else
2561 add_may_alias (tag, var);
2562
2563 /* TAG and its set of aliases need to be marked for renaming. */
2564 mark_sym_for_renaming (tag);
2565 if ((aliases = var_ann (tag)->may_aliases) != NULL)
2566 {
2567 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2568 mark_sym_for_renaming (al);
2569 }
2570
2571 /* If we had grouped aliases, VAR may have aliases of its own. Mark
2572 them for renaming as well. Other statements referencing the
2573 aliases of VAR will need to be updated. */
2574 if ((aliases = var_ann (var)->may_aliases) != NULL)
2575 {
2576 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2577 mark_sym_for_renaming (al);
2578 }
2579 VEC_free (tree, heap, varvec);
2580 }
2581
2582
2583 /* Create a new type tag for PTR. Construct the may-alias list of this type
2584 tag so that it has the aliasing of VAR.
2585
2586 Note, the set of aliases represented by the new type tag are not marked
2587 for renaming. */
2588
2589 void
2590 new_type_alias (tree ptr, tree var)
2591 {
2592 var_ann_t p_ann = var_ann (ptr);
2593 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2594 var_ann_t v_ann = var_ann (var);
2595 tree tag;
2596 subvar_t svars;
2597
2598 gcc_assert (p_ann->type_mem_tag == NULL_TREE);
2599 gcc_assert (!MTAG_P (var));
2600
2601 /* Add VAR to the may-alias set of PTR's new type tag. If VAR has
2602 subvars, add the subvars to the tag instead of the actual var. */
2603 if (var_can_have_subvars (var)
2604 && (svars = get_subvars_for_var (var)))
2605 {
2606 subvar_t sv;
2607
2608 tag = create_memory_tag (tag_type, true);
2609 p_ann->type_mem_tag = tag;
2610
2611 for (sv = svars; sv; sv = sv->next)
2612 add_may_alias (tag, sv->var);
2613 }
2614 else
2615 {
2616 /* The following is based on code in add_stmt_operand to ensure that the
2617 same defs/uses/vdefs/vuses will be found after replacing a reference
2618 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
2619 is the address of var. */
2620 VEC(tree, gc) *aliases = v_ann->may_aliases;
2621
2622 if ((aliases != NULL)
2623 && (VEC_length (tree, aliases) == 1))
2624 {
2625 tree ali = VEC_index (tree, aliases, 0);
2626
2627 if (TREE_CODE (ali) == TYPE_MEMORY_TAG)
2628 {
2629 p_ann->type_mem_tag = ali;
2630 return;
2631 }
2632 }
2633
2634 tag = create_memory_tag (tag_type, true);
2635 p_ann->type_mem_tag = tag;
2636
2637 if (aliases == NULL)
2638 add_may_alias (tag, var);
2639 else
2640 {
2641 unsigned i;
2642 tree al;
2643
2644 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2645 add_may_alias (tag, al);
2646 }
2647 }
2648 }
2649
2650
2651
2652 /* This represents the used range of a variable. */
2653
2654 typedef struct used_part
2655 {
2656 HOST_WIDE_INT minused;
2657 HOST_WIDE_INT maxused;
2658 /* True if we have an explicit use/def of some portion of this variable,
2659 even if it is all of it. i.e. a.b = 5 or temp = a.b. */
2660 bool explicit_uses;
2661 /* True if we have an implicit use/def of some portion of this
2662 variable. Implicit uses occur when we can't tell what part we
2663 are referencing, and have to make conservative assumptions. */
2664 bool implicit_uses;
2665 /* True if the structure is only written to or taken its address. */
2666 bool write_only;
2667 } *used_part_t;
2668
2669 /* An array of used_part structures, indexed by variable uid. */
2670
2671 static htab_t used_portions;
2672
2673 struct used_part_map
2674 {
2675 unsigned int uid;
2676 used_part_t to;
2677 };
2678
2679 /* Return true if the uid in the two used part maps are equal. */
2680
2681 static int
2682 used_part_map_eq (const void *va, const void *vb)
2683 {
2684 const struct used_part_map *a = (const struct used_part_map *) va;
2685 const struct used_part_map *b = (const struct used_part_map *) vb;
2686 return (a->uid == b->uid);
2687 }
2688
2689 /* Hash a from uid in a used_part_map. */
2690
2691 static unsigned int
2692 used_part_map_hash (const void *item)
2693 {
2694 return ((const struct used_part_map *)item)->uid;
2695 }
2696
2697 /* Free a used part map element. */
2698
2699 static void
2700 free_used_part_map (void *item)
2701 {
2702 free (((struct used_part_map *)item)->to);
2703 free (item);
2704 }
2705
2706 /* Lookup a used_part structure for a UID. */
2707
2708 static used_part_t
2709 up_lookup (unsigned int uid)
2710 {
2711 struct used_part_map *h, in;
2712 in.uid = uid;
2713 h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid);
2714 if (!h)
2715 return NULL;
2716 return h->to;
2717 }
2718
2719 /* Insert the pair UID, TO into the used part hashtable. */
2720
2721 static void
2722 up_insert (unsigned int uid, used_part_t to)
2723 {
2724 struct used_part_map *h;
2725 void **loc;
2726
2727 h = XNEW (struct used_part_map);
2728 h->uid = uid;
2729 h->to = to;
2730 loc = htab_find_slot_with_hash (used_portions, h,
2731 uid, INSERT);
2732 if (*loc != NULL)
2733 free (*loc);
2734 *(struct used_part_map **) loc = h;
2735 }
2736
2737
2738 /* Given a variable uid, UID, get or create the entry in the used portions
2739 table for the variable. */
2740
2741 static used_part_t
2742 get_or_create_used_part_for (size_t uid)
2743 {
2744 used_part_t up;
2745 if ((up = up_lookup (uid)) == NULL)
2746 {
2747 up = XCNEW (struct used_part);
2748 up->minused = INT_MAX;
2749 up->maxused = 0;
2750 up->explicit_uses = false;
2751 up->implicit_uses = false;
2752 up->write_only = true;
2753 }
2754
2755 return up;
2756 }
2757
2758
2759 /* Create and return a structure sub-variable for field type FIELD of
2760 variable VAR. */
2761
2762 static tree
2763 create_sft (tree var, tree field)
2764 {
2765 var_ann_t ann;
2766 tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
2767
2768 /* We need to copy the various flags from VAR to SUBVAR, so that
2769 they are is_global_var iff the original variable was. */
2770 DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
2771 MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var);
2772 TREE_PUBLIC (subvar) = TREE_PUBLIC (var);
2773 TREE_STATIC (subvar) = TREE_STATIC (var);
2774 TREE_READONLY (subvar) = TREE_READONLY (var);
2775
2776 /* Add the new variable to REFERENCED_VARS. */
2777 ann = get_var_ann (subvar);
2778 ann->type_mem_tag = NULL;
2779 add_referenced_tmp_var (subvar);
2780 SFT_PARENT_VAR (subvar) = var;
2781
2782 return subvar;
2783 }
2784
2785
2786 /* Given an aggregate VAR, create the subvariables that represent its
2787 fields. */
2788
2789 static void
2790 create_overlap_variables_for (tree var)
2791 {
2792 VEC(fieldoff_s,heap) *fieldstack = NULL;
2793 used_part_t up;
2794 size_t uid = DECL_UID (var);
2795
2796 up = up_lookup (uid);
2797 if (!up
2798 || up->write_only)
2799 return;
2800
2801 push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL);
2802 if (VEC_length (fieldoff_s, fieldstack) != 0)
2803 {
2804 subvar_t *subvars;
2805 fieldoff_s *fo;
2806 bool notokay = false;
2807 int fieldcount = 0;
2808 int i;
2809 HOST_WIDE_INT lastfooffset = -1;
2810 HOST_WIDE_INT lastfosize = -1;
2811 tree lastfotype = NULL_TREE;
2812
2813 /* Not all fields have DECL_SIZE set, and those that don't, we don't
2814 know their size, and thus, can't handle.
2815 The same is true of fields with DECL_SIZE that is not an integer
2816 constant (such as variable sized fields).
2817 Fields with offsets which are not constant will have an offset < 0
2818 We *could* handle fields that are constant sized arrays, but
2819 currently don't. Doing so would require some extra changes to
2820 tree-ssa-operands.c. */
2821
2822 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
2823 {
2824 if (!fo->size
2825 || TREE_CODE (fo->size) != INTEGER_CST
2826 || fo->offset < 0)
2827 {
2828 notokay = true;
2829 break;
2830 }
2831 fieldcount++;
2832 }
2833
2834 /* The current heuristic we use is as follows:
2835 If the variable has no used portions in this function, no
2836 structure vars are created for it.
2837 Otherwise,
2838 If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
2839 we always create structure vars for them.
2840 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2841 some explicit uses, we create structure vars for them.
2842 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2843 no explicit uses, we do not create structure vars for them.
2844 */
2845
2846 if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
2847 && !up->explicit_uses)
2848 {
2849 if (dump_file && (dump_flags & TDF_DETAILS))
2850 {
2851 fprintf (dump_file, "Variable ");
2852 print_generic_expr (dump_file, var, 0);
2853 fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
2854 }
2855 notokay = true;
2856 }
2857
2858 /* Bail out, if we can't create overlap variables. */
2859 if (notokay)
2860 {
2861 VEC_free (fieldoff_s, heap, fieldstack);
2862 return;
2863 }
2864
2865 /* Otherwise, create the variables. */
2866 subvars = lookup_subvars_for_var (var);
2867
2868 sort_fieldstack (fieldstack);
2869
2870 for (i = VEC_length (fieldoff_s, fieldstack);
2871 VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
2872 {
2873 subvar_t sv;
2874 HOST_WIDE_INT fosize;
2875 tree currfotype;
2876
2877 fosize = TREE_INT_CST_LOW (fo->size);
2878 currfotype = fo->type;
2879
2880 /* If this field isn't in the used portion,
2881 or it has the exact same offset and size as the last
2882 field, skip it. */
2883
2884 if (((fo->offset <= up->minused
2885 && fo->offset + fosize <= up->minused)
2886 || fo->offset >= up->maxused)
2887 || (fo->offset == lastfooffset
2888 && fosize == lastfosize
2889 && currfotype == lastfotype))
2890 continue;
2891 sv = GGC_NEW (struct subvar);
2892 sv->offset = fo->offset;
2893 sv->size = fosize;
2894 sv->next = *subvars;
2895 sv->var = create_sft (var, fo->type);
2896
2897 if (dump_file)
2898 {
2899 fprintf (dump_file, "structure field tag %s created for var %s",
2900 get_name (sv->var), get_name (var));
2901 fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
2902 sv->offset);
2903 fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
2904 sv->size);
2905 fprintf (dump_file, "\n");
2906 }
2907
2908 lastfotype = currfotype;
2909 lastfooffset = fo->offset;
2910 lastfosize = fosize;
2911 *subvars = sv;
2912 }
2913
2914 /* Once we have created subvars, the original is no longer call
2915 clobbered on its own. Its call clobbered status depends
2916 completely on the call clobbered status of the subvars.
2917
2918 add_referenced_var in the above loop will take care of
2919 marking subvars of global variables as call clobbered for us
2920 to start, since they are global as well. */
2921 clear_call_clobbered (var);
2922 }
2923
2924 VEC_free (fieldoff_s, heap, fieldstack);
2925 }
2926
2927
2928 /* Find the conservative answer to the question of what portions of what
2929 structures are used by this statement. We assume that if we have a
2930 component ref with a known size + offset, that we only need that part
2931 of the structure. For unknown cases, or cases where we do something
2932 to the whole structure, we assume we need to create fields for the
2933 entire structure. */
2934
2935 static tree
2936 find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p)
2937 {
2938 switch (TREE_CODE (*tp))
2939 {
2940 case MODIFY_EXPR:
2941 /* Recurse manually here to track whether the use is in the
2942 LHS of an assignment. */
2943 find_used_portions (&TREE_OPERAND (*tp, 0), walk_subtrees, tp);
2944 return find_used_portions (&TREE_OPERAND (*tp, 1), walk_subtrees, NULL);
2945 case REALPART_EXPR:
2946 case IMAGPART_EXPR:
2947 case COMPONENT_REF:
2948 case ARRAY_REF:
2949 {
2950 HOST_WIDE_INT bitsize;
2951 HOST_WIDE_INT bitmaxsize;
2952 HOST_WIDE_INT bitpos;
2953 tree ref;
2954 ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize);
2955 if (DECL_P (ref)
2956 && var_can_have_subvars (ref)
2957 && bitmaxsize != -1)
2958 {
2959 size_t uid = DECL_UID (ref);
2960 used_part_t up;
2961
2962 up = get_or_create_used_part_for (uid);
2963
2964 if (bitpos <= up->minused)
2965 up->minused = bitpos;
2966 if ((bitpos + bitmaxsize >= up->maxused))
2967 up->maxused = bitpos + bitmaxsize;
2968
2969 if (bitsize == bitmaxsize)
2970 up->explicit_uses = true;
2971 else
2972 up->implicit_uses = true;
2973 if (!lhs_p)
2974 up->write_only = false;
2975 up_insert (uid, up);
2976
2977 *walk_subtrees = 0;
2978 return NULL_TREE;
2979 }
2980 }
2981 break;
2982 /* This is here to make sure we mark the entire base variable as used
2983 when you take its address. Because our used portion analysis is
2984 simple, we aren't looking at casts or pointer arithmetic to see what
2985 happens when you take the address. */
2986 case ADDR_EXPR:
2987 {
2988 tree var = get_base_address (TREE_OPERAND (*tp, 0));
2989
2990 if (var
2991 && DECL_P (var)
2992 && DECL_SIZE (var)
2993 && var_can_have_subvars (var)
2994 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
2995 {
2996 used_part_t up;
2997 size_t uid = DECL_UID (var);
2998
2999 up = get_or_create_used_part_for (uid);
3000
3001 up->minused = 0;
3002 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3003 up->implicit_uses = true;
3004
3005 up_insert (uid, up);
3006 *walk_subtrees = 0;
3007 return NULL_TREE;
3008 }
3009 }
3010 break;
3011 case VAR_DECL:
3012 case PARM_DECL:
3013 case RESULT_DECL:
3014 {
3015 tree var = *tp;
3016 if (DECL_SIZE (var)
3017 && var_can_have_subvars (var)
3018 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3019 {
3020 used_part_t up;
3021 size_t uid = DECL_UID (var);
3022
3023 up = get_or_create_used_part_for (uid);
3024
3025 up->minused = 0;
3026 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3027 up->implicit_uses = true;
3028
3029 up_insert (uid, up);
3030 *walk_subtrees = 0;
3031 return NULL_TREE;
3032 }
3033 }
3034 break;
3035
3036 default:
3037 break;
3038
3039 }
3040 return NULL_TREE;
3041 }
3042
3043 /* Create structure field variables for structures used in this function. */
3044
3045 static void
3046 create_structure_vars (void)
3047 {
3048 basic_block bb;
3049 safe_referenced_var_iterator rvi;
3050 VEC (tree, heap) *varvec = NULL;
3051 tree var;
3052
3053 used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
3054 free_used_part_map);
3055
3056 FOR_EACH_BB (bb)
3057 {
3058 block_stmt_iterator bsi;
3059 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3060 {
3061 walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
3062 find_used_portions,
3063 NULL);
3064 }
3065 }
3066 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
3067 {
3068 /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
3069 if (var
3070 && DECL_SIZE (var)
3071 && var_can_have_subvars (var)
3072 && !MTAG_P (var)
3073 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3074 create_overlap_variables_for (var);
3075 }
3076 htab_delete (used_portions);
3077 VEC_free (tree, heap, varvec);
3078
3079 }
3080
3081 static bool
3082 gate_structure_vars (void)
3083 {
3084 return flag_tree_salias != 0;
3085 }
3086
3087 struct tree_opt_pass pass_create_structure_vars =
3088 {
3089 "salias", /* name */
3090 gate_structure_vars, /* gate */
3091 create_structure_vars, /* execute */
3092 NULL, /* sub */
3093 NULL, /* next */
3094 0, /* static_pass_number */
3095 0, /* tv_id */
3096 PROP_cfg, /* properties_required */
3097 0, /* properties_provided */
3098 0, /* properties_destroyed */
3099 0, /* todo_flags_start */
3100 TODO_dump_func, /* todo_flags_finish */
3101 0 /* letter */
3102 };