cfgloop.c (get_loop_body_in_bfs_order): Avoid redundant call to bitmap_bit_p.
[gcc.git] / gcc / tree-ssa-live.c
1 /* Liveness for SSA trees.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Andrew MacLeod <amacleod@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tree-pretty-print.h"
28 #include "gimple-pretty-print.h"
29 #include "bitmap.h"
30 #include "tree-flow.h"
31 #include "tree-dump.h"
32 #include "tree-ssa-live.h"
33 #include "diagnostic-core.h"
34 #include "toplev.h"
35 #include "debug.h"
36 #include "flags.h"
37 #include "gimple.h"
38
39 #ifdef ENABLE_CHECKING
40 static void verify_live_on_entry (tree_live_info_p);
41 #endif
42
43
44 /* VARMAP maintains a mapping from SSA version number to real variables.
45
46 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
47 only member of it's own partition. Coalescing will attempt to group any
48 ssa_names which occur in a copy or in a PHI node into the same partition.
49
50 At the end of out-of-ssa, each partition becomes a "real" variable and is
51 rewritten as a compiler variable.
52
53 The var_map data structure is used to manage these partitions. It allows
54 partitions to be combined, and determines which partition belongs to what
55 ssa_name or variable, and vice versa. */
56
57
58 /* This routine will initialize the basevar fields of MAP. */
59
60 static void
61 var_map_base_init (var_map map)
62 {
63 int x, num_part, num;
64 tree var;
65 var_ann_t ann;
66
67 num = 0;
68 num_part = num_var_partitions (map);
69
70 /* If a base table already exists, clear it, otherwise create it. */
71 if (map->partition_to_base_index != NULL)
72 {
73 free (map->partition_to_base_index);
74 VEC_truncate (tree, map->basevars, 0);
75 }
76 else
77 map->basevars = VEC_alloc (tree, heap, MAX (40, (num_part / 10)));
78
79 map->partition_to_base_index = (int *) xmalloc (sizeof (int) * num_part);
80
81 /* Build the base variable list, and point partitions at their bases. */
82 for (x = 0; x < num_part; x++)
83 {
84 var = partition_to_var (map, x);
85 if (TREE_CODE (var) == SSA_NAME)
86 var = SSA_NAME_VAR (var);
87 ann = var_ann (var);
88 /* If base variable hasn't been seen, set it up. */
89 if (!ann->base_var_processed)
90 {
91 ann->base_var_processed = 1;
92 VAR_ANN_BASE_INDEX (ann) = num++;
93 VEC_safe_push (tree, heap, map->basevars, var);
94 }
95 map->partition_to_base_index[x] = VAR_ANN_BASE_INDEX (ann);
96 }
97
98 map->num_basevars = num;
99
100 /* Now clear the processed bit. */
101 for (x = 0; x < num; x++)
102 {
103 var = VEC_index (tree, map->basevars, x);
104 var_ann (var)->base_var_processed = 0;
105 }
106
107 #ifdef ENABLE_CHECKING
108 for (x = 0; x < num_part; x++)
109 {
110 tree var2;
111 var = SSA_NAME_VAR (partition_to_var (map, x));
112 var2 = VEC_index (tree, map->basevars, basevar_index (map, x));
113 gcc_assert (var == var2);
114 }
115 #endif
116 }
117
118
119 /* Remove the base table in MAP. */
120
121 static void
122 var_map_base_fini (var_map map)
123 {
124 /* Free the basevar info if it is present. */
125 if (map->partition_to_base_index != NULL)
126 {
127 VEC_free (tree, heap, map->basevars);
128 free (map->partition_to_base_index);
129 map->partition_to_base_index = NULL;
130 map->num_basevars = 0;
131 }
132 }
133 /* Create a variable partition map of SIZE, initialize and return it. */
134
135 var_map
136 init_var_map (int size)
137 {
138 var_map map;
139
140 map = (var_map) xmalloc (sizeof (struct _var_map));
141 map->var_partition = partition_new (size);
142
143 map->partition_to_view = NULL;
144 map->view_to_partition = NULL;
145 map->num_partitions = size;
146 map->partition_size = size;
147 map->num_basevars = 0;
148 map->partition_to_base_index = NULL;
149 map->basevars = NULL;
150 return map;
151 }
152
153
154 /* Free memory associated with MAP. */
155
156 void
157 delete_var_map (var_map map)
158 {
159 var_map_base_fini (map);
160 partition_delete (map->var_partition);
161 if (map->partition_to_view)
162 free (map->partition_to_view);
163 if (map->view_to_partition)
164 free (map->view_to_partition);
165 free (map);
166 }
167
168
169 /* This function will combine the partitions in MAP for VAR1 and VAR2. It
170 Returns the partition which represents the new partition. If the two
171 partitions cannot be combined, NO_PARTITION is returned. */
172
173 int
174 var_union (var_map map, tree var1, tree var2)
175 {
176 int p1, p2, p3;
177
178 gcc_assert (TREE_CODE (var1) == SSA_NAME);
179 gcc_assert (TREE_CODE (var2) == SSA_NAME);
180
181 /* This is independent of partition_to_view. If partition_to_view is
182 on, then whichever one of these partitions is absorbed will never have a
183 dereference into the partition_to_view array any more. */
184
185 p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
186 p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
187
188 gcc_assert (p1 != NO_PARTITION);
189 gcc_assert (p2 != NO_PARTITION);
190
191 if (p1 == p2)
192 p3 = p1;
193 else
194 p3 = partition_union (map->var_partition, p1, p2);
195
196 if (map->partition_to_view)
197 p3 = map->partition_to_view[p3];
198
199 return p3;
200 }
201
202
203 /* Compress the partition numbers in MAP such that they fall in the range
204 0..(num_partitions-1) instead of wherever they turned out during
205 the partitioning exercise. This removes any references to unused
206 partitions, thereby allowing bitmaps and other vectors to be much
207 denser.
208
209 This is implemented such that compaction doesn't affect partitioning.
210 Ie., once partitions are created and possibly merged, running one
211 or more different kind of compaction will not affect the partitions
212 themselves. Their index might change, but all the same variables will
213 still be members of the same partition group. This allows work on reduced
214 sets, and no loss of information when a larger set is later desired.
215
216 In particular, coalescing can work on partitions which have 2 or more
217 definitions, and then 'recompact' later to include all the single
218 definitions for assignment to program variables. */
219
220
221 /* Set MAP back to the initial state of having no partition view. Return a
222 bitmap which has a bit set for each partition number which is in use in the
223 varmap. */
224
225 static bitmap
226 partition_view_init (var_map map)
227 {
228 bitmap used;
229 int tmp;
230 unsigned int x;
231
232 used = BITMAP_ALLOC (NULL);
233
234 /* Already in a view? Abandon the old one. */
235 if (map->partition_to_view)
236 {
237 free (map->partition_to_view);
238 map->partition_to_view = NULL;
239 }
240 if (map->view_to_partition)
241 {
242 free (map->view_to_partition);
243 map->view_to_partition = NULL;
244 }
245
246 /* Find out which partitions are actually referenced. */
247 for (x = 0; x < map->partition_size; x++)
248 {
249 tmp = partition_find (map->var_partition, x);
250 if (ssa_name (tmp) != NULL_TREE && is_gimple_reg (ssa_name (tmp))
251 && (!has_zero_uses (ssa_name (tmp))
252 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))))
253 bitmap_set_bit (used, tmp);
254 }
255
256 map->num_partitions = map->partition_size;
257 return used;
258 }
259
260
261 /* This routine will finalize the view data for MAP based on the partitions
262 set in SELECTED. This is either the same bitmap returned from
263 partition_view_init, or a trimmed down version if some of those partitions
264 were not desired in this view. SELECTED is freed before returning. */
265
266 static void
267 partition_view_fini (var_map map, bitmap selected)
268 {
269 bitmap_iterator bi;
270 unsigned count, i, x, limit;
271
272 gcc_assert (selected);
273
274 count = bitmap_count_bits (selected);
275 limit = map->partition_size;
276
277 /* If its a one-to-one ratio, we don't need any view compaction. */
278 if (count < limit)
279 {
280 map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
281 memset (map->partition_to_view, 0xff, (limit * sizeof (int)));
282 map->view_to_partition = (int *)xmalloc (count * sizeof (int));
283
284 i = 0;
285 /* Give each selected partition an index. */
286 EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
287 {
288 map->partition_to_view[x] = i;
289 map->view_to_partition[i] = x;
290 i++;
291 }
292 gcc_assert (i == count);
293 map->num_partitions = i;
294 }
295
296 BITMAP_FREE (selected);
297 }
298
299
300 /* Create a partition view which includes all the used partitions in MAP. If
301 WANT_BASES is true, create the base variable map as well. */
302
303 extern void
304 partition_view_normal (var_map map, bool want_bases)
305 {
306 bitmap used;
307
308 used = partition_view_init (map);
309 partition_view_fini (map, used);
310
311 if (want_bases)
312 var_map_base_init (map);
313 else
314 var_map_base_fini (map);
315 }
316
317
318 /* Create a partition view in MAP which includes just partitions which occur in
319 the bitmap ONLY. If WANT_BASES is true, create the base variable map
320 as well. */
321
322 extern void
323 partition_view_bitmap (var_map map, bitmap only, bool want_bases)
324 {
325 bitmap used;
326 bitmap new_partitions = BITMAP_ALLOC (NULL);
327 unsigned x, p;
328 bitmap_iterator bi;
329
330 used = partition_view_init (map);
331 EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
332 {
333 p = partition_find (map->var_partition, x);
334 gcc_assert (bitmap_bit_p (used, p));
335 bitmap_set_bit (new_partitions, p);
336 }
337 partition_view_fini (map, new_partitions);
338
339 BITMAP_FREE (used);
340 if (want_bases)
341 var_map_base_init (map);
342 else
343 var_map_base_fini (map);
344 }
345
346
347 static inline void mark_all_vars_used (tree *, void *data);
348
349 /* Helper function for mark_all_vars_used, called via walk_tree. */
350
351 static tree
352 mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data)
353 {
354 tree t = *tp;
355 enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
356 tree b;
357
358 if (TREE_CODE (t) == SSA_NAME)
359 t = SSA_NAME_VAR (t);
360
361 if (IS_EXPR_CODE_CLASS (c)
362 && (b = TREE_BLOCK (t)) != NULL)
363 TREE_USED (b) = true;
364
365 /* Ignore TREE_ORIGINAL for TARGET_MEM_REFS, as well as other
366 fields that do not contain vars. */
367 if (TREE_CODE (t) == TARGET_MEM_REF)
368 {
369 mark_all_vars_used (&TMR_SYMBOL (t), data);
370 mark_all_vars_used (&TMR_BASE (t), data);
371 mark_all_vars_used (&TMR_INDEX (t), data);
372 *walk_subtrees = 0;
373 return NULL;
374 }
375
376 /* Only need to mark VAR_DECLS; parameters and return results are not
377 eliminated as unused. */
378 if (TREE_CODE (t) == VAR_DECL)
379 {
380 if (data != NULL && bitmap_clear_bit ((bitmap) data, DECL_UID (t)))
381 mark_all_vars_used (&DECL_INITIAL (t), data);
382 set_is_used (t);
383 }
384 /* remove_unused_scope_block_p requires information about labels
385 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
386 if (TREE_CODE (t) == LABEL_DECL)
387 /* Although the TREE_USED values that the frontend uses would be
388 acceptable (albeit slightly over-conservative) for our purposes,
389 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
390 must re-compute it here. */
391 TREE_USED (t) = 1;
392
393 if (IS_TYPE_OR_DECL_P (t))
394 *walk_subtrees = 0;
395
396 return NULL;
397 }
398
399 /* Mark the scope block SCOPE and its subblocks unused when they can be
400 possibly eliminated if dead. */
401
402 static void
403 mark_scope_block_unused (tree scope)
404 {
405 tree t;
406 TREE_USED (scope) = false;
407 if (!(*debug_hooks->ignore_block) (scope))
408 TREE_USED (scope) = true;
409 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
410 mark_scope_block_unused (t);
411 }
412
413 /* Look if the block is dead (by possibly eliminating its dead subblocks)
414 and return true if so.
415 Block is declared dead if:
416 1) No statements are associated with it.
417 2) Declares no live variables
418 3) All subblocks are dead
419 or there is precisely one subblocks and the block
420 has same abstract origin as outer block and declares
421 no variables, so it is pure wrapper.
422 When we are not outputting full debug info, we also eliminate dead variables
423 out of scope blocks to let them to be recycled by GGC and to save copying work
424 done by the inliner. */
425
426 static bool
427 remove_unused_scope_block_p (tree scope)
428 {
429 tree *t, *next;
430 bool unused = !TREE_USED (scope);
431 var_ann_t ann;
432 int nsubblocks = 0;
433
434 for (t = &BLOCK_VARS (scope); *t; t = next)
435 {
436 next = &DECL_CHAIN (*t);
437
438 /* Debug info of nested function refers to the block of the
439 function. We might stil call it even if all statements
440 of function it was nested into was elliminated.
441
442 TODO: We can actually look into cgraph to see if function
443 will be output to file. */
444 if (TREE_CODE (*t) == FUNCTION_DECL)
445 unused = false;
446
447 /* If a decl has a value expr, we need to instantiate it
448 regardless of debug info generation, to avoid codegen
449 differences in memory overlap tests. update_equiv_regs() may
450 indirectly call validate_equiv_mem() to test whether a
451 SET_DEST overlaps with others, and if the value expr changes
452 by virtual register instantiation, we may get end up with
453 different results. */
454 else if (TREE_CODE (*t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*t))
455 unused = false;
456
457 /* Remove everything we don't generate debug info for. */
458 else if (DECL_IGNORED_P (*t))
459 {
460 *t = DECL_CHAIN (*t);
461 next = t;
462 }
463
464 /* When we are outputting debug info, we usually want to output
465 info about optimized-out variables in the scope blocks.
466 Exception are the scope blocks not containing any instructions
467 at all so user can't get into the scopes at first place. */
468 else if ((ann = var_ann (*t)) != NULL
469 && ann->used)
470 unused = false;
471 else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
472 /* For labels that are still used in the IL, the decision to
473 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
474 risk having different ordering in debug vs. non-debug builds
475 during inlining or versioning.
476 A label appearing here (we have already checked DECL_IGNORED_P)
477 should not be used in the IL unless it has been explicitly used
478 before, so we use TREE_USED as an approximation. */
479 /* In principle, we should do the same here as for the debug case
480 below, however, when debugging, there might be additional nested
481 levels that keep an upper level with a label live, so we have to
482 force this block to be considered used, too. */
483 unused = false;
484
485 /* When we are not doing full debug info, we however can keep around
486 only the used variables for cfgexpand's memory packing saving quite
487 a lot of memory.
488
489 For sake of -g3, we keep around those vars but we don't count this as
490 use of block, so innermost block with no used vars and no instructions
491 can be considered dead. We only want to keep around blocks user can
492 breakpoint into and ask about value of optimized out variables.
493
494 Similarly we need to keep around types at least until all variables of
495 all nested blocks are gone. We track no information on whether given
496 type is used or not. */
497
498 else if (debug_info_level == DINFO_LEVEL_NORMAL
499 || debug_info_level == DINFO_LEVEL_VERBOSE)
500 ;
501 else
502 {
503 *t = DECL_CHAIN (*t);
504 next = t;
505 }
506 }
507
508 for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
509 if (remove_unused_scope_block_p (*t))
510 {
511 if (BLOCK_SUBBLOCKS (*t))
512 {
513 tree next = BLOCK_CHAIN (*t);
514 tree supercontext = BLOCK_SUPERCONTEXT (*t);
515
516 *t = BLOCK_SUBBLOCKS (*t);
517 while (BLOCK_CHAIN (*t))
518 {
519 BLOCK_SUPERCONTEXT (*t) = supercontext;
520 t = &BLOCK_CHAIN (*t);
521 }
522 BLOCK_CHAIN (*t) = next;
523 BLOCK_SUPERCONTEXT (*t) = supercontext;
524 t = &BLOCK_CHAIN (*t);
525 nsubblocks ++;
526 }
527 else
528 *t = BLOCK_CHAIN (*t);
529 }
530 else
531 {
532 t = &BLOCK_CHAIN (*t);
533 nsubblocks ++;
534 }
535
536
537 if (!unused)
538 ;
539 /* Outer scope is always used. */
540 else if (!BLOCK_SUPERCONTEXT (scope)
541 || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
542 unused = false;
543 /* Innermost blocks with no live variables nor statements can be always
544 eliminated. */
545 else if (!nsubblocks)
546 ;
547 /* For terse debug info we can eliminate info on unused variables. */
548 else if (debug_info_level == DINFO_LEVEL_NONE
549 || debug_info_level == DINFO_LEVEL_TERSE)
550 {
551 /* Even for -g0/-g1 don't prune outer scopes from artificial
552 functions, otherwise diagnostics using tree_nonartificial_location
553 will not be emitted properly. */
554 if (inlined_function_outer_scope_p (scope))
555 {
556 tree ao = scope;
557
558 while (ao
559 && TREE_CODE (ao) == BLOCK
560 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
561 ao = BLOCK_ABSTRACT_ORIGIN (ao);
562 if (ao
563 && TREE_CODE (ao) == FUNCTION_DECL
564 && DECL_DECLARED_INLINE_P (ao)
565 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
566 unused = false;
567 }
568 }
569 else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
570 unused = false;
571 /* See if this block is important for representation of inlined function.
572 Inlined functions are always represented by block with
573 block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION
574 set... */
575 else if (inlined_function_outer_scope_p (scope))
576 unused = false;
577 else
578 /* Verfify that only blocks with source location set
579 are entry points to the inlined functions. */
580 gcc_assert (BLOCK_SOURCE_LOCATION (scope) == UNKNOWN_LOCATION);
581
582 TREE_USED (scope) = !unused;
583 return unused;
584 }
585
586 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
587 eliminated during the tree->rtl conversion process. */
588
589 static inline void
590 mark_all_vars_used (tree *expr_p, void *data)
591 {
592 walk_tree (expr_p, mark_all_vars_used_1, data, NULL);
593 }
594
595
596 /* Dump scope blocks starting at SCOPE to FILE. INDENT is the
597 indentation level and FLAGS is as in print_generic_expr. */
598
599 static void
600 dump_scope_block (FILE *file, int indent, tree scope, int flags)
601 {
602 tree var, t;
603 unsigned int i;
604
605 fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope),
606 TREE_USED (scope) ? "" : " (unused)",
607 BLOCK_ABSTRACT (scope) ? " (abstract)": "");
608 if (BLOCK_SOURCE_LOCATION (scope) != UNKNOWN_LOCATION)
609 {
610 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
611 fprintf (file, " %s:%i", s.file, s.line);
612 }
613 if (BLOCK_ABSTRACT_ORIGIN (scope))
614 {
615 tree origin = block_ultimate_origin (scope);
616 if (origin)
617 {
618 fprintf (file, " Originating from :");
619 if (DECL_P (origin))
620 print_generic_decl (file, origin, flags);
621 else
622 fprintf (file, "#%i", BLOCK_NUMBER (origin));
623 }
624 }
625 fprintf (file, " \n");
626 for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
627 {
628 bool used = false;
629 var_ann_t ann;
630
631 if ((ann = var_ann (var))
632 && ann->used)
633 used = true;
634
635 fprintf (file, "%*s",indent, "");
636 print_generic_decl (file, var, flags);
637 fprintf (file, "%s\n", used ? "" : " (unused)");
638 }
639 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
640 {
641 fprintf (file, "%*s",indent, "");
642 print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
643 flags);
644 fprintf (file, " (nonlocalized)\n");
645 }
646 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
647 dump_scope_block (file, indent + 2, t, flags);
648 fprintf (file, "\n%*s}\n",indent, "");
649 }
650
651 /* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
652 is as in print_generic_expr. */
653
654 DEBUG_FUNCTION void
655 debug_scope_block (tree scope, int flags)
656 {
657 dump_scope_block (stderr, 0, scope, flags);
658 }
659
660
661 /* Dump the tree of lexical scopes of current_function_decl to FILE.
662 FLAGS is as in print_generic_expr. */
663
664 void
665 dump_scope_blocks (FILE *file, int flags)
666 {
667 dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags);
668 }
669
670
671 /* Dump the tree of lexical scopes of current_function_decl to stderr.
672 FLAGS is as in print_generic_expr. */
673
674 DEBUG_FUNCTION void
675 debug_scope_blocks (int flags)
676 {
677 dump_scope_blocks (stderr, flags);
678 }
679
680 /* Remove local variables that are not referenced in the IL. */
681
682 void
683 remove_unused_locals (void)
684 {
685 basic_block bb;
686 tree var, t;
687 referenced_var_iterator rvi;
688 var_ann_t ann;
689 bitmap global_unused_vars = NULL;
690 unsigned srcidx, dstidx, num;
691
692 /* Removing declarations from lexical blocks when not optimizing is
693 not only a waste of time, it actually causes differences in stack
694 layout. */
695 if (!optimize)
696 return;
697
698 mark_scope_block_unused (DECL_INITIAL (current_function_decl));
699
700 /* Assume all locals are unused. */
701 FOR_EACH_REFERENCED_VAR (t, rvi)
702 var_ann (t)->used = false;
703
704 /* Walk the CFG marking all referenced symbols. */
705 FOR_EACH_BB (bb)
706 {
707 gimple_stmt_iterator gsi;
708 size_t i;
709 edge_iterator ei;
710 edge e;
711
712 /* Walk the statements. */
713 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
714 {
715 gimple stmt = gsi_stmt (gsi);
716 tree b = gimple_block (stmt);
717
718 if (is_gimple_debug (stmt))
719 continue;
720
721 if (b)
722 TREE_USED (b) = true;
723
724 for (i = 0; i < gimple_num_ops (stmt); i++)
725 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i), NULL);
726 }
727
728 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
729 {
730 use_operand_p arg_p;
731 ssa_op_iter i;
732 tree def;
733 gimple phi = gsi_stmt (gsi);
734
735 /* No point processing globals. */
736 if (is_global_var (SSA_NAME_VAR (gimple_phi_result (phi))))
737 continue;
738
739 def = gimple_phi_result (phi);
740 mark_all_vars_used (&def, NULL);
741
742 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
743 {
744 tree arg = USE_FROM_PTR (arg_p);
745 mark_all_vars_used (&arg, NULL);
746 }
747 }
748
749 FOR_EACH_EDGE (e, ei, bb->succs)
750 if (e->goto_locus)
751 TREE_USED (e->goto_block) = true;
752 }
753
754 cfun->has_local_explicit_reg_vars = false;
755
756 /* Remove unmarked local vars from local_decls. */
757 num = VEC_length (tree, cfun->local_decls);
758 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
759 {
760 var = VEC_index (tree, cfun->local_decls, srcidx);
761 if (TREE_CODE (var) != FUNCTION_DECL
762 && (!(ann = var_ann (var))
763 || !ann->used))
764 {
765 if (is_global_var (var))
766 {
767 if (global_unused_vars == NULL)
768 global_unused_vars = BITMAP_ALLOC (NULL);
769 bitmap_set_bit (global_unused_vars, DECL_UID (var));
770 }
771 else
772 continue;
773 }
774 else if (TREE_CODE (var) == VAR_DECL
775 && DECL_HARD_REGISTER (var)
776 && !is_global_var (var))
777 cfun->has_local_explicit_reg_vars = true;
778
779 if (srcidx != dstidx)
780 VEC_replace (tree, cfun->local_decls, dstidx, var);
781 dstidx++;
782 }
783 if (dstidx != num)
784 VEC_truncate (tree, cfun->local_decls, dstidx);
785
786 /* Remove unmarked global vars from local_decls. */
787 if (global_unused_vars != NULL)
788 {
789 tree var;
790 unsigned ix;
791 FOR_EACH_LOCAL_DECL (cfun, ix, var)
792 if (TREE_CODE (var) == VAR_DECL
793 && is_global_var (var)
794 && (ann = var_ann (var)) != NULL
795 && ann->used)
796 mark_all_vars_used (&DECL_INITIAL (var), global_unused_vars);
797
798 num = VEC_length (tree, cfun->local_decls);
799 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
800 {
801 var = VEC_index (tree, cfun->local_decls, srcidx);
802 if (TREE_CODE (var) == VAR_DECL
803 && is_global_var (var)
804 && bitmap_bit_p (global_unused_vars, DECL_UID (var)))
805 continue;
806
807 if (srcidx != dstidx)
808 VEC_replace (tree, cfun->local_decls, dstidx, var);
809 dstidx++;
810 }
811 if (dstidx != num)
812 VEC_truncate (tree, cfun->local_decls, dstidx);
813 BITMAP_FREE (global_unused_vars);
814 }
815
816 /* Remove unused variables from REFERENCED_VARs. As a special
817 exception keep the variables that are believed to be aliased.
818 Those can't be easily removed from the alias sets and operand
819 caches. They will be removed shortly after the next may_alias
820 pass is performed. */
821 FOR_EACH_REFERENCED_VAR (t, rvi)
822 if (!is_global_var (t)
823 && TREE_CODE (t) != PARM_DECL
824 && TREE_CODE (t) != RESULT_DECL
825 && !(ann = var_ann (t))->used
826 && !ann->is_heapvar
827 && !TREE_ADDRESSABLE (t))
828 remove_referenced_var (t);
829 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl));
830 if (dump_file && (dump_flags & TDF_DETAILS))
831 {
832 fprintf (dump_file, "Scope blocks after cleanups:\n");
833 dump_scope_blocks (dump_file, dump_flags);
834 }
835 }
836
837
838 /* Allocate and return a new live range information object base on MAP. */
839
840 static tree_live_info_p
841 new_tree_live_info (var_map map)
842 {
843 tree_live_info_p live;
844 unsigned x;
845
846 live = (tree_live_info_p) xmalloc (sizeof (struct tree_live_info_d));
847 live->map = map;
848 live->num_blocks = last_basic_block;
849
850 live->livein = (bitmap *)xmalloc (last_basic_block * sizeof (bitmap));
851 for (x = 0; x < (unsigned)last_basic_block; x++)
852 live->livein[x] = BITMAP_ALLOC (NULL);
853
854 live->liveout = (bitmap *)xmalloc (last_basic_block * sizeof (bitmap));
855 for (x = 0; x < (unsigned)last_basic_block; x++)
856 live->liveout[x] = BITMAP_ALLOC (NULL);
857
858 live->work_stack = XNEWVEC (int, last_basic_block);
859 live->stack_top = live->work_stack;
860
861 live->global = BITMAP_ALLOC (NULL);
862 return live;
863 }
864
865
866 /* Free storage for live range info object LIVE. */
867
868 void
869 delete_tree_live_info (tree_live_info_p live)
870 {
871 int x;
872
873 BITMAP_FREE (live->global);
874 free (live->work_stack);
875
876 for (x = live->num_blocks - 1; x >= 0; x--)
877 BITMAP_FREE (live->liveout[x]);
878 free (live->liveout);
879
880 for (x = live->num_blocks - 1; x >= 0; x--)
881 BITMAP_FREE (live->livein[x]);
882 free (live->livein);
883
884 free (live);
885 }
886
887
888 /* Visit basic block BB and propagate any required live on entry bits from
889 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
890 TMP is a temporary work bitmap which is passed in to avoid reallocating
891 it each time. */
892
893 static void
894 loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited,
895 bitmap tmp)
896 {
897 edge e;
898 bool change;
899 edge_iterator ei;
900 basic_block pred_bb;
901 bitmap loe;
902 gcc_assert (!TEST_BIT (visited, bb->index));
903
904 SET_BIT (visited, bb->index);
905 loe = live_on_entry (live, bb);
906
907 FOR_EACH_EDGE (e, ei, bb->preds)
908 {
909 pred_bb = e->src;
910 if (pred_bb == ENTRY_BLOCK_PTR)
911 continue;
912 /* TMP is variables live-on-entry from BB that aren't defined in the
913 predecessor block. This should be the live on entry vars to pred.
914 Note that liveout is the DEFs in a block while live on entry is
915 being calculated. */
916 bitmap_and_compl (tmp, loe, live->liveout[pred_bb->index]);
917
918 /* Add these bits to live-on-entry for the pred. if there are any
919 changes, and pred_bb has been visited already, add it to the
920 revisit stack. */
921 change = bitmap_ior_into (live_on_entry (live, pred_bb), tmp);
922 if (TEST_BIT (visited, pred_bb->index) && change)
923 {
924 RESET_BIT (visited, pred_bb->index);
925 *(live->stack_top)++ = pred_bb->index;
926 }
927 }
928 }
929
930
931 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
932 of all the variables. */
933
934 static void
935 live_worklist (tree_live_info_p live)
936 {
937 unsigned b;
938 basic_block bb;
939 sbitmap visited = sbitmap_alloc (last_basic_block + 1);
940 bitmap tmp = BITMAP_ALLOC (NULL);
941
942 sbitmap_zero (visited);
943
944 /* Visit all the blocks in reverse order and propagate live on entry values
945 into the predecessors blocks. */
946 FOR_EACH_BB_REVERSE (bb)
947 loe_visit_block (live, bb, visited, tmp);
948
949 /* Process any blocks which require further iteration. */
950 while (live->stack_top != live->work_stack)
951 {
952 b = *--(live->stack_top);
953 loe_visit_block (live, BASIC_BLOCK (b), visited, tmp);
954 }
955
956 BITMAP_FREE (tmp);
957 sbitmap_free (visited);
958 }
959
960
961 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
962 links. Set the live on entry fields in LIVE. Def's are marked temporarily
963 in the liveout vector. */
964
965 static void
966 set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
967 {
968 int p;
969 gimple stmt;
970 use_operand_p use;
971 basic_block def_bb = NULL;
972 imm_use_iterator imm_iter;
973 bool global = false;
974
975 p = var_to_partition (live->map, ssa_name);
976 if (p == NO_PARTITION)
977 return;
978
979 stmt = SSA_NAME_DEF_STMT (ssa_name);
980 if (stmt)
981 {
982 def_bb = gimple_bb (stmt);
983 /* Mark defs in liveout bitmap temporarily. */
984 if (def_bb)
985 bitmap_set_bit (live->liveout[def_bb->index], p);
986 }
987 else
988 def_bb = ENTRY_BLOCK_PTR;
989
990 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
991 add it to the list of live on entry blocks. */
992 FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
993 {
994 gimple use_stmt = USE_STMT (use);
995 basic_block add_block = NULL;
996
997 if (gimple_code (use_stmt) == GIMPLE_PHI)
998 {
999 /* Uses in PHI's are considered to be live at exit of the SRC block
1000 as this is where a copy would be inserted. Check to see if it is
1001 defined in that block, or whether its live on entry. */
1002 int index = PHI_ARG_INDEX_FROM_USE (use);
1003 edge e = gimple_phi_arg_edge (use_stmt, index);
1004 if (e->src != ENTRY_BLOCK_PTR)
1005 {
1006 if (e->src != def_bb)
1007 add_block = e->src;
1008 }
1009 }
1010 else if (is_gimple_debug (use_stmt))
1011 continue;
1012 else
1013 {
1014 /* If its not defined in this block, its live on entry. */
1015 basic_block use_bb = gimple_bb (use_stmt);
1016 if (use_bb != def_bb)
1017 add_block = use_bb;
1018 }
1019
1020 /* If there was a live on entry use, set the bit. */
1021 if (add_block)
1022 {
1023 global = true;
1024 bitmap_set_bit (live->livein[add_block->index], p);
1025 }
1026 }
1027
1028 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1029 on entry blocks between the def and all the uses. */
1030 if (global)
1031 bitmap_set_bit (live->global, p);
1032 }
1033
1034
1035 /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1036
1037 void
1038 calculate_live_on_exit (tree_live_info_p liveinfo)
1039 {
1040 basic_block bb;
1041 edge e;
1042 edge_iterator ei;
1043
1044 /* live on entry calculations used liveout vectors for defs, clear them. */
1045 FOR_EACH_BB (bb)
1046 bitmap_clear (liveinfo->liveout[bb->index]);
1047
1048 /* Set all the live-on-exit bits for uses in PHIs. */
1049 FOR_EACH_BB (bb)
1050 {
1051 gimple_stmt_iterator gsi;
1052 size_t i;
1053
1054 /* Mark the PHI arguments which are live on exit to the pred block. */
1055 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1056 {
1057 gimple phi = gsi_stmt (gsi);
1058 for (i = 0; i < gimple_phi_num_args (phi); i++)
1059 {
1060 tree t = PHI_ARG_DEF (phi, i);
1061 int p;
1062
1063 if (TREE_CODE (t) != SSA_NAME)
1064 continue;
1065
1066 p = var_to_partition (liveinfo->map, t);
1067 if (p == NO_PARTITION)
1068 continue;
1069 e = gimple_phi_arg_edge (phi, i);
1070 if (e->src != ENTRY_BLOCK_PTR)
1071 bitmap_set_bit (liveinfo->liveout[e->src->index], p);
1072 }
1073 }
1074
1075 /* Add each successors live on entry to this bock live on exit. */
1076 FOR_EACH_EDGE (e, ei, bb->succs)
1077 if (e->dest != EXIT_BLOCK_PTR)
1078 bitmap_ior_into (liveinfo->liveout[bb->index],
1079 live_on_entry (liveinfo, e->dest));
1080 }
1081 }
1082
1083
1084 /* Given partition map MAP, calculate all the live on entry bitmaps for
1085 each partition. Return a new live info object. */
1086
1087 tree_live_info_p
1088 calculate_live_ranges (var_map map)
1089 {
1090 tree var;
1091 unsigned i;
1092 tree_live_info_p live;
1093
1094 live = new_tree_live_info (map);
1095 for (i = 0; i < num_var_partitions (map); i++)
1096 {
1097 var = partition_to_var (map, i);
1098 if (var != NULL_TREE)
1099 set_var_live_on_entry (var, live);
1100 }
1101
1102 live_worklist (live);
1103
1104 #ifdef ENABLE_CHECKING
1105 verify_live_on_entry (live);
1106 #endif
1107
1108 calculate_live_on_exit (live);
1109 return live;
1110 }
1111
1112
1113 /* Output partition map MAP to file F. */
1114
1115 void
1116 dump_var_map (FILE *f, var_map map)
1117 {
1118 int t;
1119 unsigned x, y;
1120 int p;
1121
1122 fprintf (f, "\nPartition map \n\n");
1123
1124 for (x = 0; x < map->num_partitions; x++)
1125 {
1126 if (map->view_to_partition != NULL)
1127 p = map->view_to_partition[x];
1128 else
1129 p = x;
1130
1131 if (ssa_name (p) == NULL_TREE)
1132 continue;
1133
1134 t = 0;
1135 for (y = 1; y < num_ssa_names; y++)
1136 {
1137 p = partition_find (map->var_partition, y);
1138 if (map->partition_to_view)
1139 p = map->partition_to_view[p];
1140 if (p == (int)x)
1141 {
1142 if (t++ == 0)
1143 {
1144 fprintf(f, "Partition %d (", x);
1145 print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
1146 fprintf (f, " - ");
1147 }
1148 fprintf (f, "%d ", y);
1149 }
1150 }
1151 if (t != 0)
1152 fprintf (f, ")\n");
1153 }
1154 fprintf (f, "\n");
1155 }
1156
1157
1158 /* Output live range info LIVE to file F, controlled by FLAG. */
1159
1160 void
1161 dump_live_info (FILE *f, tree_live_info_p live, int flag)
1162 {
1163 basic_block bb;
1164 unsigned i;
1165 var_map map = live->map;
1166 bitmap_iterator bi;
1167
1168 if ((flag & LIVEDUMP_ENTRY) && live->livein)
1169 {
1170 FOR_EACH_BB (bb)
1171 {
1172 fprintf (f, "\nLive on entry to BB%d : ", bb->index);
1173 EXECUTE_IF_SET_IN_BITMAP (live->livein[bb->index], 0, i, bi)
1174 {
1175 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1176 fprintf (f, " ");
1177 }
1178 fprintf (f, "\n");
1179 }
1180 }
1181
1182 if ((flag & LIVEDUMP_EXIT) && live->liveout)
1183 {
1184 FOR_EACH_BB (bb)
1185 {
1186 fprintf (f, "\nLive on exit from BB%d : ", bb->index);
1187 EXECUTE_IF_SET_IN_BITMAP (live->liveout[bb->index], 0, i, bi)
1188 {
1189 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1190 fprintf (f, " ");
1191 }
1192 fprintf (f, "\n");
1193 }
1194 }
1195 }
1196
1197 struct GTY(()) numbered_tree_d
1198 {
1199 tree t;
1200 int num;
1201 };
1202 typedef struct numbered_tree_d numbered_tree;
1203
1204 DEF_VEC_O (numbered_tree);
1205 DEF_VEC_ALLOC_O (numbered_tree, heap);
1206
1207 /* Compare two declarations references by their DECL_UID / sequence number.
1208 Called via qsort. */
1209
1210 static int
1211 compare_decls_by_uid (const void *pa, const void *pb)
1212 {
1213 const numbered_tree *nt_a = ((const numbered_tree *)pa);
1214 const numbered_tree *nt_b = ((const numbered_tree *)pb);
1215
1216 if (DECL_UID (nt_a->t) != DECL_UID (nt_b->t))
1217 return DECL_UID (nt_a->t) - DECL_UID (nt_b->t);
1218 return nt_a->num - nt_b->num;
1219 }
1220
1221 /* Called via walk_gimple_stmt / walk_gimple_op by dump_enumerated_decls. */
1222 static tree
1223 dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data)
1224 {
1225 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1226 VEC (numbered_tree, heap) **list = (VEC (numbered_tree, heap) **) &wi->info;
1227 numbered_tree nt;
1228
1229 if (!DECL_P (*tp))
1230 return NULL_TREE;
1231 nt.t = *tp;
1232 nt.num = VEC_length (numbered_tree, *list);
1233 VEC_safe_push (numbered_tree, heap, *list, &nt);
1234 *walk_subtrees = 0;
1235 return NULL_TREE;
1236 }
1237
1238 /* Find all the declarations used by the current function, sort them by uid,
1239 and emit the sorted list. Each declaration is tagged with a sequence
1240 number indicating when it was found during statement / tree walking,
1241 so that TDF_NOUID comparisons of anonymous declarations are still
1242 meaningful. Where a declaration was encountered more than once, we
1243 emit only the sequence number of the first encounter.
1244 FILE is the dump file where to output the list and FLAGS is as in
1245 print_generic_expr. */
1246 void
1247 dump_enumerated_decls (FILE *file, int flags)
1248 {
1249 basic_block bb;
1250 struct walk_stmt_info wi;
1251 VEC (numbered_tree, heap) *decl_list = VEC_alloc (numbered_tree, heap, 40);
1252
1253 wi.info = (void*) decl_list;
1254 wi.pset = NULL;
1255 FOR_EACH_BB (bb)
1256 {
1257 gimple_stmt_iterator gsi;
1258
1259 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1260 if (!is_gimple_debug (gsi_stmt (gsi)))
1261 walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi);
1262 }
1263 decl_list = (VEC (numbered_tree, heap) *) wi.info;
1264 qsort (VEC_address (numbered_tree, decl_list),
1265 VEC_length (numbered_tree, decl_list),
1266 sizeof (numbered_tree), compare_decls_by_uid);
1267 if (VEC_length (numbered_tree, decl_list))
1268 {
1269 unsigned ix;
1270 numbered_tree *ntp;
1271 tree last = NULL_TREE;
1272
1273 fprintf (file, "Declarations used by %s, sorted by DECL_UID:\n",
1274 current_function_name ());
1275 for (ix = 0; VEC_iterate (numbered_tree, decl_list, ix, ntp); ix++)
1276 {
1277 if (ntp->t == last)
1278 continue;
1279 fprintf (file, "%d: ", ntp->num);
1280 print_generic_decl (file, ntp->t, flags);
1281 fprintf (file, "\n");
1282 last = ntp->t;
1283 }
1284 }
1285 VEC_free (numbered_tree, heap, decl_list);
1286 }
1287
1288 #ifdef ENABLE_CHECKING
1289 /* Verify that SSA_VAR is a non-virtual SSA_NAME. */
1290
1291 void
1292 register_ssa_partition_check (tree ssa_var)
1293 {
1294 gcc_assert (TREE_CODE (ssa_var) == SSA_NAME);
1295 if (!is_gimple_reg (SSA_NAME_VAR (ssa_var)))
1296 {
1297 fprintf (stderr, "Illegally registering a virtual SSA name :");
1298 print_generic_expr (stderr, ssa_var, TDF_SLIM);
1299 fprintf (stderr, " in the SSA->Normal phase.\n");
1300 internal_error ("SSA corruption");
1301 }
1302 }
1303
1304
1305 /* Verify that the info in LIVE matches the current cfg. */
1306
1307 static void
1308 verify_live_on_entry (tree_live_info_p live)
1309 {
1310 unsigned i;
1311 tree var;
1312 gimple stmt;
1313 basic_block bb;
1314 edge e;
1315 int num;
1316 edge_iterator ei;
1317 var_map map = live->map;
1318
1319 /* Check for live on entry partitions and report those with a DEF in
1320 the program. This will typically mean an optimization has done
1321 something wrong. */
1322 bb = ENTRY_BLOCK_PTR;
1323 num = 0;
1324 FOR_EACH_EDGE (e, ei, bb->succs)
1325 {
1326 int entry_block = e->dest->index;
1327 if (e->dest == EXIT_BLOCK_PTR)
1328 continue;
1329 for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1330 {
1331 basic_block tmp;
1332 tree d;
1333 bitmap loe;
1334 var = partition_to_var (map, i);
1335 stmt = SSA_NAME_DEF_STMT (var);
1336 tmp = gimple_bb (stmt);
1337 d = gimple_default_def (cfun, SSA_NAME_VAR (var));
1338
1339 loe = live_on_entry (live, e->dest);
1340 if (loe && bitmap_bit_p (loe, i))
1341 {
1342 if (!gimple_nop_p (stmt))
1343 {
1344 num++;
1345 print_generic_expr (stderr, var, TDF_SLIM);
1346 fprintf (stderr, " is defined ");
1347 if (tmp)
1348 fprintf (stderr, " in BB%d, ", tmp->index);
1349 fprintf (stderr, "by:\n");
1350 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
1351 fprintf (stderr, "\nIt is also live-on-entry to entry BB %d",
1352 entry_block);
1353 fprintf (stderr, " So it appears to have multiple defs.\n");
1354 }
1355 else
1356 {
1357 if (d != var)
1358 {
1359 num++;
1360 print_generic_expr (stderr, var, TDF_SLIM);
1361 fprintf (stderr, " is live-on-entry to BB%d ",
1362 entry_block);
1363 if (d)
1364 {
1365 fprintf (stderr, " but is not the default def of ");
1366 print_generic_expr (stderr, d, TDF_SLIM);
1367 fprintf (stderr, "\n");
1368 }
1369 else
1370 fprintf (stderr, " and there is no default def.\n");
1371 }
1372 }
1373 }
1374 else
1375 if (d == var)
1376 {
1377 /* The only way this var shouldn't be marked live on entry is
1378 if it occurs in a PHI argument of the block. */
1379 size_t z;
1380 bool ok = false;
1381 gimple_stmt_iterator gsi;
1382 for (gsi = gsi_start_phis (e->dest);
1383 !gsi_end_p (gsi) && !ok;
1384 gsi_next (&gsi))
1385 {
1386 gimple phi = gsi_stmt (gsi);
1387 for (z = 0; z < gimple_phi_num_args (phi); z++)
1388 if (var == gimple_phi_arg_def (phi, z))
1389 {
1390 ok = true;
1391 break;
1392 }
1393 }
1394 if (ok)
1395 continue;
1396 num++;
1397 print_generic_expr (stderr, var, TDF_SLIM);
1398 fprintf (stderr, " is not marked live-on-entry to entry BB%d ",
1399 entry_block);
1400 fprintf (stderr, "but it is a default def so it should be.\n");
1401 }
1402 }
1403 }
1404 gcc_assert (num <= 0);
1405 }
1406 #endif