cfgexpand.c (expand_used_vars): Use virtual_operand_p.
[gcc.git] / gcc / tree-ssa-live.c
1 /* Liveness for SSA trees.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4 Contributed by Andrew MacLeod <amacleod@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "gimple-pretty-print.h"
28 #include "bitmap.h"
29 #include "tree-flow.h"
30 #include "timevar.h"
31 #include "dumpfile.h"
32 #include "tree-ssa-live.h"
33 #include "diagnostic-core.h"
34 #include "debug.h"
35 #include "flags.h"
36 #include "gimple.h"
37
38 #ifdef ENABLE_CHECKING
39 static void verify_live_on_entry (tree_live_info_p);
40 #endif
41
42
43 /* VARMAP maintains a mapping from SSA version number to real variables.
44
45 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
46 only member of it's own partition. Coalescing will attempt to group any
47 ssa_names which occur in a copy or in a PHI node into the same partition.
48
49 At the end of out-of-ssa, each partition becomes a "real" variable and is
50 rewritten as a compiler variable.
51
52 The var_map data structure is used to manage these partitions. It allows
53 partitions to be combined, and determines which partition belongs to what
54 ssa_name or variable, and vice versa. */
55
56
57 /* This routine will initialize the basevar fields of MAP. */
58
59 static void
60 var_map_base_init (var_map map)
61 {
62 int x, num_part;
63 tree var;
64 htab_t tree_to_index;
65 struct tree_int_map *m, *mapstorage;
66
67 num_part = num_var_partitions (map);
68 tree_to_index = htab_create (num_part, tree_map_base_hash,
69 tree_int_map_eq, NULL);
70 /* We can have at most num_part entries in the hash tables, so it's
71 enough to allocate so many map elements once, saving some malloc
72 calls. */
73 mapstorage = m = XNEWVEC (struct tree_int_map, num_part);
74
75 /* If a base table already exists, clear it, otherwise create it. */
76 free (map->partition_to_base_index);
77 map->partition_to_base_index = (int *) xmalloc (sizeof (int) * num_part);
78
79 /* Build the base variable list, and point partitions at their bases. */
80 for (x = 0; x < num_part; x++)
81 {
82 struct tree_int_map **slot;
83 unsigned baseindex;
84 var = partition_to_var (map, x);
85 if (SSA_NAME_VAR (var))
86 m->base.from = SSA_NAME_VAR (var);
87 else
88 /* This restricts what anonymous SSA names we can coalesce
89 as it restricts the sets we compute conflicts for.
90 Using TREE_TYPE to generate sets is the easies as
91 type equivalency also holds for SSA names with the same
92 underlying decl. */
93 m->base.from = TREE_TYPE (var);
94 /* If base variable hasn't been seen, set it up. */
95 slot = (struct tree_int_map **) htab_find_slot (tree_to_index,
96 m, INSERT);
97 if (!*slot)
98 {
99 baseindex = m - mapstorage;
100 m->to = baseindex;
101 *slot = m;
102 m++;
103 }
104 else
105 baseindex = (*slot)->to;
106 map->partition_to_base_index[x] = baseindex;
107 }
108
109 map->num_basevars = m - mapstorage;
110
111 free (mapstorage);
112 htab_delete (tree_to_index);
113 }
114
115
116 /* Remove the base table in MAP. */
117
118 static void
119 var_map_base_fini (var_map map)
120 {
121 /* Free the basevar info if it is present. */
122 if (map->partition_to_base_index != NULL)
123 {
124 free (map->partition_to_base_index);
125 map->partition_to_base_index = NULL;
126 map->num_basevars = 0;
127 }
128 }
129 /* Create a variable partition map of SIZE, initialize and return it. */
130
131 var_map
132 init_var_map (int size)
133 {
134 var_map map;
135
136 map = (var_map) xmalloc (sizeof (struct _var_map));
137 map->var_partition = partition_new (size);
138
139 map->partition_to_view = NULL;
140 map->view_to_partition = NULL;
141 map->num_partitions = size;
142 map->partition_size = size;
143 map->num_basevars = 0;
144 map->partition_to_base_index = NULL;
145 return map;
146 }
147
148
149 /* Free memory associated with MAP. */
150
151 void
152 delete_var_map (var_map map)
153 {
154 var_map_base_fini (map);
155 partition_delete (map->var_partition);
156 free (map->partition_to_view);
157 free (map->view_to_partition);
158 free (map);
159 }
160
161
162 /* This function will combine the partitions in MAP for VAR1 and VAR2. It
163 Returns the partition which represents the new partition. If the two
164 partitions cannot be combined, NO_PARTITION is returned. */
165
166 int
167 var_union (var_map map, tree var1, tree var2)
168 {
169 int p1, p2, p3;
170
171 gcc_assert (TREE_CODE (var1) == SSA_NAME);
172 gcc_assert (TREE_CODE (var2) == SSA_NAME);
173
174 /* This is independent of partition_to_view. If partition_to_view is
175 on, then whichever one of these partitions is absorbed will never have a
176 dereference into the partition_to_view array any more. */
177
178 p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
179 p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
180
181 gcc_assert (p1 != NO_PARTITION);
182 gcc_assert (p2 != NO_PARTITION);
183
184 if (p1 == p2)
185 p3 = p1;
186 else
187 p3 = partition_union (map->var_partition, p1, p2);
188
189 if (map->partition_to_view)
190 p3 = map->partition_to_view[p3];
191
192 return p3;
193 }
194
195
196 /* Compress the partition numbers in MAP such that they fall in the range
197 0..(num_partitions-1) instead of wherever they turned out during
198 the partitioning exercise. This removes any references to unused
199 partitions, thereby allowing bitmaps and other vectors to be much
200 denser.
201
202 This is implemented such that compaction doesn't affect partitioning.
203 Ie., once partitions are created and possibly merged, running one
204 or more different kind of compaction will not affect the partitions
205 themselves. Their index might change, but all the same variables will
206 still be members of the same partition group. This allows work on reduced
207 sets, and no loss of information when a larger set is later desired.
208
209 In particular, coalescing can work on partitions which have 2 or more
210 definitions, and then 'recompact' later to include all the single
211 definitions for assignment to program variables. */
212
213
214 /* Set MAP back to the initial state of having no partition view. Return a
215 bitmap which has a bit set for each partition number which is in use in the
216 varmap. */
217
218 static bitmap
219 partition_view_init (var_map map)
220 {
221 bitmap used;
222 int tmp;
223 unsigned int x;
224
225 used = BITMAP_ALLOC (NULL);
226
227 /* Already in a view? Abandon the old one. */
228 if (map->partition_to_view)
229 {
230 free (map->partition_to_view);
231 map->partition_to_view = NULL;
232 }
233 if (map->view_to_partition)
234 {
235 free (map->view_to_partition);
236 map->view_to_partition = NULL;
237 }
238
239 /* Find out which partitions are actually referenced. */
240 for (x = 0; x < map->partition_size; x++)
241 {
242 tmp = partition_find (map->var_partition, x);
243 if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp))
244 && (!has_zero_uses (ssa_name (tmp))
245 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))))
246 bitmap_set_bit (used, tmp);
247 }
248
249 map->num_partitions = map->partition_size;
250 return used;
251 }
252
253
254 /* This routine will finalize the view data for MAP based on the partitions
255 set in SELECTED. This is either the same bitmap returned from
256 partition_view_init, or a trimmed down version if some of those partitions
257 were not desired in this view. SELECTED is freed before returning. */
258
259 static void
260 partition_view_fini (var_map map, bitmap selected)
261 {
262 bitmap_iterator bi;
263 unsigned count, i, x, limit;
264
265 gcc_assert (selected);
266
267 count = bitmap_count_bits (selected);
268 limit = map->partition_size;
269
270 /* If its a one-to-one ratio, we don't need any view compaction. */
271 if (count < limit)
272 {
273 map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
274 memset (map->partition_to_view, 0xff, (limit * sizeof (int)));
275 map->view_to_partition = (int *)xmalloc (count * sizeof (int));
276
277 i = 0;
278 /* Give each selected partition an index. */
279 EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
280 {
281 map->partition_to_view[x] = i;
282 map->view_to_partition[i] = x;
283 i++;
284 }
285 gcc_assert (i == count);
286 map->num_partitions = i;
287 }
288
289 BITMAP_FREE (selected);
290 }
291
292
293 /* Create a partition view which includes all the used partitions in MAP. If
294 WANT_BASES is true, create the base variable map as well. */
295
296 extern void
297 partition_view_normal (var_map map, bool want_bases)
298 {
299 bitmap used;
300
301 used = partition_view_init (map);
302 partition_view_fini (map, used);
303
304 if (want_bases)
305 var_map_base_init (map);
306 else
307 var_map_base_fini (map);
308 }
309
310
311 /* Create a partition view in MAP which includes just partitions which occur in
312 the bitmap ONLY. If WANT_BASES is true, create the base variable map
313 as well. */
314
315 extern void
316 partition_view_bitmap (var_map map, bitmap only, bool want_bases)
317 {
318 bitmap used;
319 bitmap new_partitions = BITMAP_ALLOC (NULL);
320 unsigned x, p;
321 bitmap_iterator bi;
322
323 used = partition_view_init (map);
324 EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
325 {
326 p = partition_find (map->var_partition, x);
327 gcc_assert (bitmap_bit_p (used, p));
328 bitmap_set_bit (new_partitions, p);
329 }
330 partition_view_fini (map, new_partitions);
331
332 BITMAP_FREE (used);
333 if (want_bases)
334 var_map_base_init (map);
335 else
336 var_map_base_fini (map);
337 }
338
339
340 static bitmap usedvars;
341
342 /* Mark VAR as used, so that it'll be preserved during rtl expansion.
343 Returns true if VAR wasn't marked before. */
344
345 static inline bool
346 set_is_used (tree var)
347 {
348 return bitmap_set_bit (usedvars, DECL_UID (var));
349 }
350
351 /* Return true if VAR is marked as used. */
352
353 static inline bool
354 is_used_p (tree var)
355 {
356 return bitmap_bit_p (usedvars, DECL_UID (var));
357 }
358
359 static inline void mark_all_vars_used (tree *);
360
361 /* Helper function for mark_all_vars_used, called via walk_tree. */
362
363 static tree
364 mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
365 {
366 tree t = *tp;
367 enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
368 tree b;
369
370 if (TREE_CODE (t) == SSA_NAME)
371 {
372 *walk_subtrees = 0;
373 t = SSA_NAME_VAR (t);
374 if (!t)
375 return NULL;
376 }
377
378 if (IS_EXPR_CODE_CLASS (c)
379 && (b = TREE_BLOCK (t)) != NULL)
380 TREE_USED (b) = true;
381
382 /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
383 fields do not contain vars. */
384 if (TREE_CODE (t) == TARGET_MEM_REF)
385 {
386 mark_all_vars_used (&TMR_BASE (t));
387 mark_all_vars_used (&TMR_INDEX (t));
388 mark_all_vars_used (&TMR_INDEX2 (t));
389 *walk_subtrees = 0;
390 return NULL;
391 }
392
393 /* Only need to mark VAR_DECLS; parameters and return results are not
394 eliminated as unused. */
395 if (TREE_CODE (t) == VAR_DECL)
396 {
397 /* When a global var becomes used for the first time also walk its
398 initializer (non global ones don't have any). */
399 if (set_is_used (t) && is_global_var (t))
400 mark_all_vars_used (&DECL_INITIAL (t));
401 }
402 /* remove_unused_scope_block_p requires information about labels
403 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
404 else if (TREE_CODE (t) == LABEL_DECL)
405 /* Although the TREE_USED values that the frontend uses would be
406 acceptable (albeit slightly over-conservative) for our purposes,
407 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
408 must re-compute it here. */
409 TREE_USED (t) = 1;
410
411 if (IS_TYPE_OR_DECL_P (t))
412 *walk_subtrees = 0;
413
414 return NULL;
415 }
416
417 /* Mark the scope block SCOPE and its subblocks unused when they can be
418 possibly eliminated if dead. */
419
420 static void
421 mark_scope_block_unused (tree scope)
422 {
423 tree t;
424 TREE_USED (scope) = false;
425 if (!(*debug_hooks->ignore_block) (scope))
426 TREE_USED (scope) = true;
427 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
428 mark_scope_block_unused (t);
429 }
430
431 /* Look if the block is dead (by possibly eliminating its dead subblocks)
432 and return true if so.
433 Block is declared dead if:
434 1) No statements are associated with it.
435 2) Declares no live variables
436 3) All subblocks are dead
437 or there is precisely one subblocks and the block
438 has same abstract origin as outer block and declares
439 no variables, so it is pure wrapper.
440 When we are not outputting full debug info, we also eliminate dead variables
441 out of scope blocks to let them to be recycled by GGC and to save copying work
442 done by the inliner. */
443
444 static bool
445 remove_unused_scope_block_p (tree scope)
446 {
447 tree *t, *next;
448 bool unused = !TREE_USED (scope);
449 int nsubblocks = 0;
450
451 for (t = &BLOCK_VARS (scope); *t; t = next)
452 {
453 next = &DECL_CHAIN (*t);
454
455 /* Debug info of nested function refers to the block of the
456 function. We might stil call it even if all statements
457 of function it was nested into was elliminated.
458
459 TODO: We can actually look into cgraph to see if function
460 will be output to file. */
461 if (TREE_CODE (*t) == FUNCTION_DECL)
462 unused = false;
463
464 /* If a decl has a value expr, we need to instantiate it
465 regardless of debug info generation, to avoid codegen
466 differences in memory overlap tests. update_equiv_regs() may
467 indirectly call validate_equiv_mem() to test whether a
468 SET_DEST overlaps with others, and if the value expr changes
469 by virtual register instantiation, we may get end up with
470 different results. */
471 else if (TREE_CODE (*t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*t))
472 unused = false;
473
474 /* Remove everything we don't generate debug info for. */
475 else if (DECL_IGNORED_P (*t))
476 {
477 *t = DECL_CHAIN (*t);
478 next = t;
479 }
480
481 /* When we are outputting debug info, we usually want to output
482 info about optimized-out variables in the scope blocks.
483 Exception are the scope blocks not containing any instructions
484 at all so user can't get into the scopes at first place. */
485 else if (is_used_p (*t))
486 unused = false;
487 else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
488 /* For labels that are still used in the IL, the decision to
489 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
490 risk having different ordering in debug vs. non-debug builds
491 during inlining or versioning.
492 A label appearing here (we have already checked DECL_IGNORED_P)
493 should not be used in the IL unless it has been explicitly used
494 before, so we use TREE_USED as an approximation. */
495 /* In principle, we should do the same here as for the debug case
496 below, however, when debugging, there might be additional nested
497 levels that keep an upper level with a label live, so we have to
498 force this block to be considered used, too. */
499 unused = false;
500
501 /* When we are not doing full debug info, we however can keep around
502 only the used variables for cfgexpand's memory packing saving quite
503 a lot of memory.
504
505 For sake of -g3, we keep around those vars but we don't count this as
506 use of block, so innermost block with no used vars and no instructions
507 can be considered dead. We only want to keep around blocks user can
508 breakpoint into and ask about value of optimized out variables.
509
510 Similarly we need to keep around types at least until all
511 variables of all nested blocks are gone. We track no
512 information on whether given type is used or not, so we have
513 to keep them even when not emitting debug information,
514 otherwise we may end up remapping variables and their (local)
515 types in different orders depending on whether debug
516 information is being generated. */
517
518 else if (TREE_CODE (*t) == TYPE_DECL
519 || debug_info_level == DINFO_LEVEL_NORMAL
520 || debug_info_level == DINFO_LEVEL_VERBOSE)
521 ;
522 else
523 {
524 *t = DECL_CHAIN (*t);
525 next = t;
526 }
527 }
528
529 for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
530 if (remove_unused_scope_block_p (*t))
531 {
532 if (BLOCK_SUBBLOCKS (*t))
533 {
534 tree next = BLOCK_CHAIN (*t);
535 tree supercontext = BLOCK_SUPERCONTEXT (*t);
536
537 *t = BLOCK_SUBBLOCKS (*t);
538 while (BLOCK_CHAIN (*t))
539 {
540 BLOCK_SUPERCONTEXT (*t) = supercontext;
541 t = &BLOCK_CHAIN (*t);
542 }
543 BLOCK_CHAIN (*t) = next;
544 BLOCK_SUPERCONTEXT (*t) = supercontext;
545 t = &BLOCK_CHAIN (*t);
546 nsubblocks ++;
547 }
548 else
549 *t = BLOCK_CHAIN (*t);
550 }
551 else
552 {
553 t = &BLOCK_CHAIN (*t);
554 nsubblocks ++;
555 }
556
557
558 if (!unused)
559 ;
560 /* Outer scope is always used. */
561 else if (!BLOCK_SUPERCONTEXT (scope)
562 || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
563 unused = false;
564 /* Innermost blocks with no live variables nor statements can be always
565 eliminated. */
566 else if (!nsubblocks)
567 ;
568 /* For terse debug info we can eliminate info on unused variables. */
569 else if (debug_info_level == DINFO_LEVEL_NONE
570 || debug_info_level == DINFO_LEVEL_TERSE)
571 {
572 /* Even for -g0/-g1 don't prune outer scopes from artificial
573 functions, otherwise diagnostics using tree_nonartificial_location
574 will not be emitted properly. */
575 if (inlined_function_outer_scope_p (scope))
576 {
577 tree ao = scope;
578
579 while (ao
580 && TREE_CODE (ao) == BLOCK
581 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
582 ao = BLOCK_ABSTRACT_ORIGIN (ao);
583 if (ao
584 && TREE_CODE (ao) == FUNCTION_DECL
585 && DECL_DECLARED_INLINE_P (ao)
586 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
587 unused = false;
588 }
589 }
590 else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
591 unused = false;
592 /* See if this block is important for representation of inlined function.
593 Inlined functions are always represented by block with
594 block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION
595 set... */
596 else if (inlined_function_outer_scope_p (scope))
597 unused = false;
598 else
599 /* Verfify that only blocks with source location set
600 are entry points to the inlined functions. */
601 gcc_assert (BLOCK_SOURCE_LOCATION (scope) == UNKNOWN_LOCATION);
602
603 TREE_USED (scope) = !unused;
604 return unused;
605 }
606
607 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
608 eliminated during the tree->rtl conversion process. */
609
610 static inline void
611 mark_all_vars_used (tree *expr_p)
612 {
613 walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL);
614 }
615
616
617 /* Dump scope blocks starting at SCOPE to FILE. INDENT is the
618 indentation level and FLAGS is as in print_generic_expr. */
619
620 static void
621 dump_scope_block (FILE *file, int indent, tree scope, int flags)
622 {
623 tree var, t;
624 unsigned int i;
625
626 fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope),
627 TREE_USED (scope) ? "" : " (unused)",
628 BLOCK_ABSTRACT (scope) ? " (abstract)": "");
629 if (BLOCK_SOURCE_LOCATION (scope) != UNKNOWN_LOCATION)
630 {
631 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
632 fprintf (file, " %s:%i", s.file, s.line);
633 }
634 if (BLOCK_ABSTRACT_ORIGIN (scope))
635 {
636 tree origin = block_ultimate_origin (scope);
637 if (origin)
638 {
639 fprintf (file, " Originating from :");
640 if (DECL_P (origin))
641 print_generic_decl (file, origin, flags);
642 else
643 fprintf (file, "#%i", BLOCK_NUMBER (origin));
644 }
645 }
646 fprintf (file, " \n");
647 for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
648 {
649 fprintf (file, "%*s", indent, "");
650 print_generic_decl (file, var, flags);
651 fprintf (file, "\n");
652 }
653 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
654 {
655 fprintf (file, "%*s",indent, "");
656 print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
657 flags);
658 fprintf (file, " (nonlocalized)\n");
659 }
660 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
661 dump_scope_block (file, indent + 2, t, flags);
662 fprintf (file, "\n%*s}\n",indent, "");
663 }
664
665 /* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
666 is as in print_generic_expr. */
667
668 DEBUG_FUNCTION void
669 debug_scope_block (tree scope, int flags)
670 {
671 dump_scope_block (stderr, 0, scope, flags);
672 }
673
674
675 /* Dump the tree of lexical scopes of current_function_decl to FILE.
676 FLAGS is as in print_generic_expr. */
677
678 void
679 dump_scope_blocks (FILE *file, int flags)
680 {
681 dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags);
682 }
683
684
685 /* Dump the tree of lexical scopes of current_function_decl to stderr.
686 FLAGS is as in print_generic_expr. */
687
688 DEBUG_FUNCTION void
689 debug_scope_blocks (int flags)
690 {
691 dump_scope_blocks (stderr, flags);
692 }
693
694 /* Remove local variables that are not referenced in the IL. */
695
696 void
697 remove_unused_locals (void)
698 {
699 basic_block bb;
700 tree var;
701 unsigned srcidx, dstidx, num;
702 bool have_local_clobbers = false;
703
704 /* Removing declarations from lexical blocks when not optimizing is
705 not only a waste of time, it actually causes differences in stack
706 layout. */
707 if (!optimize)
708 return;
709
710 timevar_push (TV_REMOVE_UNUSED);
711
712 mark_scope_block_unused (DECL_INITIAL (current_function_decl));
713
714 usedvars = BITMAP_ALLOC (NULL);
715
716 /* Walk the CFG marking all referenced symbols. */
717 FOR_EACH_BB (bb)
718 {
719 gimple_stmt_iterator gsi;
720 size_t i;
721 edge_iterator ei;
722 edge e;
723
724 /* Walk the statements. */
725 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
726 {
727 gimple stmt = gsi_stmt (gsi);
728 tree b = gimple_block (stmt);
729
730 if (is_gimple_debug (stmt))
731 continue;
732
733 if (gimple_clobber_p (stmt))
734 {
735 have_local_clobbers = true;
736 continue;
737 }
738
739 if (b)
740 TREE_USED (b) = true;
741
742 for (i = 0; i < gimple_num_ops (stmt); i++)
743 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i));
744 }
745
746 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
747 {
748 use_operand_p arg_p;
749 ssa_op_iter i;
750 tree def;
751 gimple phi = gsi_stmt (gsi);
752
753 if (virtual_operand_p (gimple_phi_result (phi)))
754 continue;
755
756 def = gimple_phi_result (phi);
757 mark_all_vars_used (&def);
758
759 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
760 {
761 tree arg = USE_FROM_PTR (arg_p);
762 mark_all_vars_used (&arg);
763 }
764 }
765
766 FOR_EACH_EDGE (e, ei, bb->succs)
767 if (e->goto_locus)
768 TREE_USED (e->goto_block) = true;
769 }
770
771 /* We do a two-pass approach about the out-of-scope clobbers. We want
772 to remove them if they are the only references to a local variable,
773 but we want to retain them when there's any other. So the first pass
774 ignores them, and the second pass (if there were any) tries to remove
775 them. */
776 if (have_local_clobbers)
777 FOR_EACH_BB (bb)
778 {
779 gimple_stmt_iterator gsi;
780
781 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
782 {
783 gimple stmt = gsi_stmt (gsi);
784 tree b = gimple_block (stmt);
785
786 if (gimple_clobber_p (stmt))
787 {
788 tree lhs = gimple_assign_lhs (stmt);
789 if (TREE_CODE (lhs) == VAR_DECL && !is_used_p (lhs))
790 {
791 unlink_stmt_vdef (stmt);
792 gsi_remove (&gsi, true);
793 release_defs (stmt);
794 continue;
795 }
796 if (b)
797 TREE_USED (b) = true;
798 }
799 gsi_next (&gsi);
800 }
801 }
802
803 cfun->has_local_explicit_reg_vars = false;
804
805 /* Remove unmarked local and global vars from local_decls. */
806 num = VEC_length (tree, cfun->local_decls);
807 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
808 {
809 var = VEC_index (tree, cfun->local_decls, srcidx);
810 if (TREE_CODE (var) == VAR_DECL)
811 {
812 if (!is_used_p (var))
813 {
814 tree def;
815 if (cfun->nonlocal_goto_save_area
816 && TREE_OPERAND (cfun->nonlocal_goto_save_area, 0) == var)
817 cfun->nonlocal_goto_save_area = NULL;
818 /* Release any default def associated with var. */
819 if ((def = ssa_default_def (cfun, var)) != NULL_TREE)
820 {
821 set_ssa_default_def (cfun, var, NULL_TREE);
822 release_ssa_name (def);
823 }
824 continue;
825 }
826 }
827 if (TREE_CODE (var) == VAR_DECL
828 && DECL_HARD_REGISTER (var)
829 && !is_global_var (var))
830 cfun->has_local_explicit_reg_vars = true;
831
832 if (srcidx != dstidx)
833 VEC_replace (tree, cfun->local_decls, dstidx, var);
834 dstidx++;
835 }
836 if (dstidx != num)
837 VEC_truncate (tree, cfun->local_decls, dstidx);
838
839 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl));
840
841 BITMAP_FREE (usedvars);
842
843 if (dump_file && (dump_flags & TDF_DETAILS))
844 {
845 fprintf (dump_file, "Scope blocks after cleanups:\n");
846 dump_scope_blocks (dump_file, dump_flags);
847 }
848
849 timevar_pop (TV_REMOVE_UNUSED);
850 }
851
852
853 /* Allocate and return a new live range information object base on MAP. */
854
855 static tree_live_info_p
856 new_tree_live_info (var_map map)
857 {
858 tree_live_info_p live;
859 unsigned x;
860
861 live = (tree_live_info_p) xmalloc (sizeof (struct tree_live_info_d));
862 live->map = map;
863 live->num_blocks = last_basic_block;
864
865 live->livein = (bitmap *)xmalloc (last_basic_block * sizeof (bitmap));
866 for (x = 0; x < (unsigned)last_basic_block; x++)
867 live->livein[x] = BITMAP_ALLOC (NULL);
868
869 live->liveout = (bitmap *)xmalloc (last_basic_block * sizeof (bitmap));
870 for (x = 0; x < (unsigned)last_basic_block; x++)
871 live->liveout[x] = BITMAP_ALLOC (NULL);
872
873 live->work_stack = XNEWVEC (int, last_basic_block);
874 live->stack_top = live->work_stack;
875
876 live->global = BITMAP_ALLOC (NULL);
877 return live;
878 }
879
880
881 /* Free storage for live range info object LIVE. */
882
883 void
884 delete_tree_live_info (tree_live_info_p live)
885 {
886 int x;
887
888 BITMAP_FREE (live->global);
889 free (live->work_stack);
890
891 for (x = live->num_blocks - 1; x >= 0; x--)
892 BITMAP_FREE (live->liveout[x]);
893 free (live->liveout);
894
895 for (x = live->num_blocks - 1; x >= 0; x--)
896 BITMAP_FREE (live->livein[x]);
897 free (live->livein);
898
899 free (live);
900 }
901
902
903 /* Visit basic block BB and propagate any required live on entry bits from
904 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
905 TMP is a temporary work bitmap which is passed in to avoid reallocating
906 it each time. */
907
908 static void
909 loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited,
910 bitmap tmp)
911 {
912 edge e;
913 bool change;
914 edge_iterator ei;
915 basic_block pred_bb;
916 bitmap loe;
917 gcc_assert (!TEST_BIT (visited, bb->index));
918
919 SET_BIT (visited, bb->index);
920 loe = live_on_entry (live, bb);
921
922 FOR_EACH_EDGE (e, ei, bb->preds)
923 {
924 pred_bb = e->src;
925 if (pred_bb == ENTRY_BLOCK_PTR)
926 continue;
927 /* TMP is variables live-on-entry from BB that aren't defined in the
928 predecessor block. This should be the live on entry vars to pred.
929 Note that liveout is the DEFs in a block while live on entry is
930 being calculated. */
931 bitmap_and_compl (tmp, loe, live->liveout[pred_bb->index]);
932
933 /* Add these bits to live-on-entry for the pred. if there are any
934 changes, and pred_bb has been visited already, add it to the
935 revisit stack. */
936 change = bitmap_ior_into (live_on_entry (live, pred_bb), tmp);
937 if (TEST_BIT (visited, pred_bb->index) && change)
938 {
939 RESET_BIT (visited, pred_bb->index);
940 *(live->stack_top)++ = pred_bb->index;
941 }
942 }
943 }
944
945
946 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
947 of all the variables. */
948
949 static void
950 live_worklist (tree_live_info_p live)
951 {
952 unsigned b;
953 basic_block bb;
954 sbitmap visited = sbitmap_alloc (last_basic_block + 1);
955 bitmap tmp = BITMAP_ALLOC (NULL);
956
957 sbitmap_zero (visited);
958
959 /* Visit all the blocks in reverse order and propagate live on entry values
960 into the predecessors blocks. */
961 FOR_EACH_BB_REVERSE (bb)
962 loe_visit_block (live, bb, visited, tmp);
963
964 /* Process any blocks which require further iteration. */
965 while (live->stack_top != live->work_stack)
966 {
967 b = *--(live->stack_top);
968 loe_visit_block (live, BASIC_BLOCK (b), visited, tmp);
969 }
970
971 BITMAP_FREE (tmp);
972 sbitmap_free (visited);
973 }
974
975
976 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
977 links. Set the live on entry fields in LIVE. Def's are marked temporarily
978 in the liveout vector. */
979
980 static void
981 set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
982 {
983 int p;
984 gimple stmt;
985 use_operand_p use;
986 basic_block def_bb = NULL;
987 imm_use_iterator imm_iter;
988 bool global = false;
989
990 p = var_to_partition (live->map, ssa_name);
991 if (p == NO_PARTITION)
992 return;
993
994 stmt = SSA_NAME_DEF_STMT (ssa_name);
995 if (stmt)
996 {
997 def_bb = gimple_bb (stmt);
998 /* Mark defs in liveout bitmap temporarily. */
999 if (def_bb)
1000 bitmap_set_bit (live->liveout[def_bb->index], p);
1001 }
1002 else
1003 def_bb = ENTRY_BLOCK_PTR;
1004
1005 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1006 add it to the list of live on entry blocks. */
1007 FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
1008 {
1009 gimple use_stmt = USE_STMT (use);
1010 basic_block add_block = NULL;
1011
1012 if (gimple_code (use_stmt) == GIMPLE_PHI)
1013 {
1014 /* Uses in PHI's are considered to be live at exit of the SRC block
1015 as this is where a copy would be inserted. Check to see if it is
1016 defined in that block, or whether its live on entry. */
1017 int index = PHI_ARG_INDEX_FROM_USE (use);
1018 edge e = gimple_phi_arg_edge (use_stmt, index);
1019 if (e->src != ENTRY_BLOCK_PTR)
1020 {
1021 if (e->src != def_bb)
1022 add_block = e->src;
1023 }
1024 }
1025 else if (is_gimple_debug (use_stmt))
1026 continue;
1027 else
1028 {
1029 /* If its not defined in this block, its live on entry. */
1030 basic_block use_bb = gimple_bb (use_stmt);
1031 if (use_bb != def_bb)
1032 add_block = use_bb;
1033 }
1034
1035 /* If there was a live on entry use, set the bit. */
1036 if (add_block)
1037 {
1038 global = true;
1039 bitmap_set_bit (live->livein[add_block->index], p);
1040 }
1041 }
1042
1043 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1044 on entry blocks between the def and all the uses. */
1045 if (global)
1046 bitmap_set_bit (live->global, p);
1047 }
1048
1049
1050 /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1051
1052 void
1053 calculate_live_on_exit (tree_live_info_p liveinfo)
1054 {
1055 basic_block bb;
1056 edge e;
1057 edge_iterator ei;
1058
1059 /* live on entry calculations used liveout vectors for defs, clear them. */
1060 FOR_EACH_BB (bb)
1061 bitmap_clear (liveinfo->liveout[bb->index]);
1062
1063 /* Set all the live-on-exit bits for uses in PHIs. */
1064 FOR_EACH_BB (bb)
1065 {
1066 gimple_stmt_iterator gsi;
1067 size_t i;
1068
1069 /* Mark the PHI arguments which are live on exit to the pred block. */
1070 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1071 {
1072 gimple phi = gsi_stmt (gsi);
1073 for (i = 0; i < gimple_phi_num_args (phi); i++)
1074 {
1075 tree t = PHI_ARG_DEF (phi, i);
1076 int p;
1077
1078 if (TREE_CODE (t) != SSA_NAME)
1079 continue;
1080
1081 p = var_to_partition (liveinfo->map, t);
1082 if (p == NO_PARTITION)
1083 continue;
1084 e = gimple_phi_arg_edge (phi, i);
1085 if (e->src != ENTRY_BLOCK_PTR)
1086 bitmap_set_bit (liveinfo->liveout[e->src->index], p);
1087 }
1088 }
1089
1090 /* Add each successors live on entry to this bock live on exit. */
1091 FOR_EACH_EDGE (e, ei, bb->succs)
1092 if (e->dest != EXIT_BLOCK_PTR)
1093 bitmap_ior_into (liveinfo->liveout[bb->index],
1094 live_on_entry (liveinfo, e->dest));
1095 }
1096 }
1097
1098
1099 /* Given partition map MAP, calculate all the live on entry bitmaps for
1100 each partition. Return a new live info object. */
1101
1102 tree_live_info_p
1103 calculate_live_ranges (var_map map)
1104 {
1105 tree var;
1106 unsigned i;
1107 tree_live_info_p live;
1108
1109 live = new_tree_live_info (map);
1110 for (i = 0; i < num_var_partitions (map); i++)
1111 {
1112 var = partition_to_var (map, i);
1113 if (var != NULL_TREE)
1114 set_var_live_on_entry (var, live);
1115 }
1116
1117 live_worklist (live);
1118
1119 #ifdef ENABLE_CHECKING
1120 verify_live_on_entry (live);
1121 #endif
1122
1123 calculate_live_on_exit (live);
1124 return live;
1125 }
1126
1127
1128 /* Output partition map MAP to file F. */
1129
1130 void
1131 dump_var_map (FILE *f, var_map map)
1132 {
1133 int t;
1134 unsigned x, y;
1135 int p;
1136
1137 fprintf (f, "\nPartition map \n\n");
1138
1139 for (x = 0; x < map->num_partitions; x++)
1140 {
1141 if (map->view_to_partition != NULL)
1142 p = map->view_to_partition[x];
1143 else
1144 p = x;
1145
1146 if (ssa_name (p) == NULL_TREE)
1147 continue;
1148
1149 t = 0;
1150 for (y = 1; y < num_ssa_names; y++)
1151 {
1152 p = partition_find (map->var_partition, y);
1153 if (map->partition_to_view)
1154 p = map->partition_to_view[p];
1155 if (p == (int)x)
1156 {
1157 if (t++ == 0)
1158 {
1159 fprintf(f, "Partition %d (", x);
1160 print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
1161 fprintf (f, " - ");
1162 }
1163 fprintf (f, "%d ", y);
1164 }
1165 }
1166 if (t != 0)
1167 fprintf (f, ")\n");
1168 }
1169 fprintf (f, "\n");
1170 }
1171
1172
1173 /* Output live range info LIVE to file F, controlled by FLAG. */
1174
1175 void
1176 dump_live_info (FILE *f, tree_live_info_p live, int flag)
1177 {
1178 basic_block bb;
1179 unsigned i;
1180 var_map map = live->map;
1181 bitmap_iterator bi;
1182
1183 if ((flag & LIVEDUMP_ENTRY) && live->livein)
1184 {
1185 FOR_EACH_BB (bb)
1186 {
1187 fprintf (f, "\nLive on entry to BB%d : ", bb->index);
1188 EXECUTE_IF_SET_IN_BITMAP (live->livein[bb->index], 0, i, bi)
1189 {
1190 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1191 fprintf (f, " ");
1192 }
1193 fprintf (f, "\n");
1194 }
1195 }
1196
1197 if ((flag & LIVEDUMP_EXIT) && live->liveout)
1198 {
1199 FOR_EACH_BB (bb)
1200 {
1201 fprintf (f, "\nLive on exit from BB%d : ", bb->index);
1202 EXECUTE_IF_SET_IN_BITMAP (live->liveout[bb->index], 0, i, bi)
1203 {
1204 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1205 fprintf (f, " ");
1206 }
1207 fprintf (f, "\n");
1208 }
1209 }
1210 }
1211
1212 #ifdef ENABLE_CHECKING
1213 /* Verify that SSA_VAR is a non-virtual SSA_NAME. */
1214
1215 void
1216 register_ssa_partition_check (tree ssa_var)
1217 {
1218 gcc_assert (TREE_CODE (ssa_var) == SSA_NAME);
1219 if (virtual_operand_p (ssa_var))
1220 {
1221 fprintf (stderr, "Illegally registering a virtual SSA name :");
1222 print_generic_expr (stderr, ssa_var, TDF_SLIM);
1223 fprintf (stderr, " in the SSA->Normal phase.\n");
1224 internal_error ("SSA corruption");
1225 }
1226 }
1227
1228
1229 /* Verify that the info in LIVE matches the current cfg. */
1230
1231 static void
1232 verify_live_on_entry (tree_live_info_p live)
1233 {
1234 unsigned i;
1235 tree var;
1236 gimple stmt;
1237 basic_block bb;
1238 edge e;
1239 int num;
1240 edge_iterator ei;
1241 var_map map = live->map;
1242
1243 /* Check for live on entry partitions and report those with a DEF in
1244 the program. This will typically mean an optimization has done
1245 something wrong. */
1246 bb = ENTRY_BLOCK_PTR;
1247 num = 0;
1248 FOR_EACH_EDGE (e, ei, bb->succs)
1249 {
1250 int entry_block = e->dest->index;
1251 if (e->dest == EXIT_BLOCK_PTR)
1252 continue;
1253 for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1254 {
1255 basic_block tmp;
1256 tree d = NULL_TREE;
1257 bitmap loe;
1258 var = partition_to_var (map, i);
1259 stmt = SSA_NAME_DEF_STMT (var);
1260 tmp = gimple_bb (stmt);
1261 if (SSA_NAME_VAR (var))
1262 d = ssa_default_def (cfun, SSA_NAME_VAR (var));
1263
1264 loe = live_on_entry (live, e->dest);
1265 if (loe && bitmap_bit_p (loe, i))
1266 {
1267 if (!gimple_nop_p (stmt))
1268 {
1269 num++;
1270 print_generic_expr (stderr, var, TDF_SLIM);
1271 fprintf (stderr, " is defined ");
1272 if (tmp)
1273 fprintf (stderr, " in BB%d, ", tmp->index);
1274 fprintf (stderr, "by:\n");
1275 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
1276 fprintf (stderr, "\nIt is also live-on-entry to entry BB %d",
1277 entry_block);
1278 fprintf (stderr, " So it appears to have multiple defs.\n");
1279 }
1280 else
1281 {
1282 if (d != var)
1283 {
1284 num++;
1285 print_generic_expr (stderr, var, TDF_SLIM);
1286 fprintf (stderr, " is live-on-entry to BB%d ",
1287 entry_block);
1288 if (d)
1289 {
1290 fprintf (stderr, " but is not the default def of ");
1291 print_generic_expr (stderr, d, TDF_SLIM);
1292 fprintf (stderr, "\n");
1293 }
1294 else
1295 fprintf (stderr, " and there is no default def.\n");
1296 }
1297 }
1298 }
1299 else
1300 if (d == var)
1301 {
1302 /* The only way this var shouldn't be marked live on entry is
1303 if it occurs in a PHI argument of the block. */
1304 size_t z;
1305 bool ok = false;
1306 gimple_stmt_iterator gsi;
1307 for (gsi = gsi_start_phis (e->dest);
1308 !gsi_end_p (gsi) && !ok;
1309 gsi_next (&gsi))
1310 {
1311 gimple phi = gsi_stmt (gsi);
1312 for (z = 0; z < gimple_phi_num_args (phi); z++)
1313 if (var == gimple_phi_arg_def (phi, z))
1314 {
1315 ok = true;
1316 break;
1317 }
1318 }
1319 if (ok)
1320 continue;
1321 num++;
1322 print_generic_expr (stderr, var, TDF_SLIM);
1323 fprintf (stderr, " is not marked live-on-entry to entry BB%d ",
1324 entry_block);
1325 fprintf (stderr, "but it is a default def so it should be.\n");
1326 }
1327 }
1328 }
1329 gcc_assert (num <= 0);
1330 }
1331 #endif