lto-cgraph.c (output_profile_summary, [...]): Use gcov streaming; stream hot bb thres...
[gcc.git] / gcc / tree-ssa-live.c
1 /* Liveness for SSA trees.
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3 Contributed by Andrew MacLeod <amacleod@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "gimple-pretty-print.h"
27 #include "bitmap.h"
28 #include "tree-flow.h"
29 #include "timevar.h"
30 #include "dumpfile.h"
31 #include "tree-ssa-live.h"
32 #include "diagnostic-core.h"
33 #include "debug.h"
34 #include "flags.h"
35 #include "gimple.h"
36
37 #ifdef ENABLE_CHECKING
38 static void verify_live_on_entry (tree_live_info_p);
39 #endif
40
41
42 /* VARMAP maintains a mapping from SSA version number to real variables.
43
44 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
45 only member of it's own partition. Coalescing will attempt to group any
46 ssa_names which occur in a copy or in a PHI node into the same partition.
47
48 At the end of out-of-ssa, each partition becomes a "real" variable and is
49 rewritten as a compiler variable.
50
51 The var_map data structure is used to manage these partitions. It allows
52 partitions to be combined, and determines which partition belongs to what
53 ssa_name or variable, and vice versa. */
54
55
56 /* This routine will initialize the basevar fields of MAP. */
57
58 static void
59 var_map_base_init (var_map map)
60 {
61 int x, num_part;
62 tree var;
63 htab_t tree_to_index;
64 struct tree_int_map *m, *mapstorage;
65
66 num_part = num_var_partitions (map);
67 tree_to_index = htab_create (num_part, tree_map_base_hash,
68 tree_int_map_eq, NULL);
69 /* We can have at most num_part entries in the hash tables, so it's
70 enough to allocate so many map elements once, saving some malloc
71 calls. */
72 mapstorage = m = XNEWVEC (struct tree_int_map, num_part);
73
74 /* If a base table already exists, clear it, otherwise create it. */
75 free (map->partition_to_base_index);
76 map->partition_to_base_index = (int *) xmalloc (sizeof (int) * num_part);
77
78 /* Build the base variable list, and point partitions at their bases. */
79 for (x = 0; x < num_part; x++)
80 {
81 struct tree_int_map **slot;
82 unsigned baseindex;
83 var = partition_to_var (map, x);
84 if (SSA_NAME_VAR (var))
85 m->base.from = SSA_NAME_VAR (var);
86 else
87 /* This restricts what anonymous SSA names we can coalesce
88 as it restricts the sets we compute conflicts for.
89 Using TREE_TYPE to generate sets is the easies as
90 type equivalency also holds for SSA names with the same
91 underlying decl. */
92 m->base.from = TREE_TYPE (var);
93 /* If base variable hasn't been seen, set it up. */
94 slot = (struct tree_int_map **) htab_find_slot (tree_to_index,
95 m, INSERT);
96 if (!*slot)
97 {
98 baseindex = m - mapstorage;
99 m->to = baseindex;
100 *slot = m;
101 m++;
102 }
103 else
104 baseindex = (*slot)->to;
105 map->partition_to_base_index[x] = baseindex;
106 }
107
108 map->num_basevars = m - mapstorage;
109
110 free (mapstorage);
111 htab_delete (tree_to_index);
112 }
113
114
115 /* Remove the base table in MAP. */
116
117 static void
118 var_map_base_fini (var_map map)
119 {
120 /* Free the basevar info if it is present. */
121 if (map->partition_to_base_index != NULL)
122 {
123 free (map->partition_to_base_index);
124 map->partition_to_base_index = NULL;
125 map->num_basevars = 0;
126 }
127 }
128 /* Create a variable partition map of SIZE, initialize and return it. */
129
130 var_map
131 init_var_map (int size)
132 {
133 var_map map;
134
135 map = (var_map) xmalloc (sizeof (struct _var_map));
136 map->var_partition = partition_new (size);
137
138 map->partition_to_view = NULL;
139 map->view_to_partition = NULL;
140 map->num_partitions = size;
141 map->partition_size = size;
142 map->num_basevars = 0;
143 map->partition_to_base_index = NULL;
144 return map;
145 }
146
147
148 /* Free memory associated with MAP. */
149
150 void
151 delete_var_map (var_map map)
152 {
153 var_map_base_fini (map);
154 partition_delete (map->var_partition);
155 free (map->partition_to_view);
156 free (map->view_to_partition);
157 free (map);
158 }
159
160
161 /* This function will combine the partitions in MAP for VAR1 and VAR2. It
162 Returns the partition which represents the new partition. If the two
163 partitions cannot be combined, NO_PARTITION is returned. */
164
165 int
166 var_union (var_map map, tree var1, tree var2)
167 {
168 int p1, p2, p3;
169
170 gcc_assert (TREE_CODE (var1) == SSA_NAME);
171 gcc_assert (TREE_CODE (var2) == SSA_NAME);
172
173 /* This is independent of partition_to_view. If partition_to_view is
174 on, then whichever one of these partitions is absorbed will never have a
175 dereference into the partition_to_view array any more. */
176
177 p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
178 p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
179
180 gcc_assert (p1 != NO_PARTITION);
181 gcc_assert (p2 != NO_PARTITION);
182
183 if (p1 == p2)
184 p3 = p1;
185 else
186 p3 = partition_union (map->var_partition, p1, p2);
187
188 if (map->partition_to_view)
189 p3 = map->partition_to_view[p3];
190
191 return p3;
192 }
193
194
195 /* Compress the partition numbers in MAP such that they fall in the range
196 0..(num_partitions-1) instead of wherever they turned out during
197 the partitioning exercise. This removes any references to unused
198 partitions, thereby allowing bitmaps and other vectors to be much
199 denser.
200
201 This is implemented such that compaction doesn't affect partitioning.
202 Ie., once partitions are created and possibly merged, running one
203 or more different kind of compaction will not affect the partitions
204 themselves. Their index might change, but all the same variables will
205 still be members of the same partition group. This allows work on reduced
206 sets, and no loss of information when a larger set is later desired.
207
208 In particular, coalescing can work on partitions which have 2 or more
209 definitions, and then 'recompact' later to include all the single
210 definitions for assignment to program variables. */
211
212
213 /* Set MAP back to the initial state of having no partition view. Return a
214 bitmap which has a bit set for each partition number which is in use in the
215 varmap. */
216
217 static bitmap
218 partition_view_init (var_map map)
219 {
220 bitmap used;
221 int tmp;
222 unsigned int x;
223
224 used = BITMAP_ALLOC (NULL);
225
226 /* Already in a view? Abandon the old one. */
227 if (map->partition_to_view)
228 {
229 free (map->partition_to_view);
230 map->partition_to_view = NULL;
231 }
232 if (map->view_to_partition)
233 {
234 free (map->view_to_partition);
235 map->view_to_partition = NULL;
236 }
237
238 /* Find out which partitions are actually referenced. */
239 for (x = 0; x < map->partition_size; x++)
240 {
241 tmp = partition_find (map->var_partition, x);
242 if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp))
243 && (!has_zero_uses (ssa_name (tmp))
244 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))))
245 bitmap_set_bit (used, tmp);
246 }
247
248 map->num_partitions = map->partition_size;
249 return used;
250 }
251
252
253 /* This routine will finalize the view data for MAP based on the partitions
254 set in SELECTED. This is either the same bitmap returned from
255 partition_view_init, or a trimmed down version if some of those partitions
256 were not desired in this view. SELECTED is freed before returning. */
257
258 static void
259 partition_view_fini (var_map map, bitmap selected)
260 {
261 bitmap_iterator bi;
262 unsigned count, i, x, limit;
263
264 gcc_assert (selected);
265
266 count = bitmap_count_bits (selected);
267 limit = map->partition_size;
268
269 /* If its a one-to-one ratio, we don't need any view compaction. */
270 if (count < limit)
271 {
272 map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
273 memset (map->partition_to_view, 0xff, (limit * sizeof (int)));
274 map->view_to_partition = (int *)xmalloc (count * sizeof (int));
275
276 i = 0;
277 /* Give each selected partition an index. */
278 EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
279 {
280 map->partition_to_view[x] = i;
281 map->view_to_partition[i] = x;
282 i++;
283 }
284 gcc_assert (i == count);
285 map->num_partitions = i;
286 }
287
288 BITMAP_FREE (selected);
289 }
290
291
292 /* Create a partition view which includes all the used partitions in MAP. If
293 WANT_BASES is true, create the base variable map as well. */
294
295 void
296 partition_view_normal (var_map map, bool want_bases)
297 {
298 bitmap used;
299
300 used = partition_view_init (map);
301 partition_view_fini (map, used);
302
303 if (want_bases)
304 var_map_base_init (map);
305 else
306 var_map_base_fini (map);
307 }
308
309
310 /* Create a partition view in MAP which includes just partitions which occur in
311 the bitmap ONLY. If WANT_BASES is true, create the base variable map
312 as well. */
313
314 void
315 partition_view_bitmap (var_map map, bitmap only, bool want_bases)
316 {
317 bitmap used;
318 bitmap new_partitions = BITMAP_ALLOC (NULL);
319 unsigned x, p;
320 bitmap_iterator bi;
321
322 used = partition_view_init (map);
323 EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
324 {
325 p = partition_find (map->var_partition, x);
326 gcc_assert (bitmap_bit_p (used, p));
327 bitmap_set_bit (new_partitions, p);
328 }
329 partition_view_fini (map, new_partitions);
330
331 if (want_bases)
332 var_map_base_init (map);
333 else
334 var_map_base_fini (map);
335 }
336
337
338 static bitmap usedvars;
339
340 /* Mark VAR as used, so that it'll be preserved during rtl expansion.
341 Returns true if VAR wasn't marked before. */
342
343 static inline bool
344 set_is_used (tree var)
345 {
346 return bitmap_set_bit (usedvars, DECL_UID (var));
347 }
348
349 /* Return true if VAR is marked as used. */
350
351 static inline bool
352 is_used_p (tree var)
353 {
354 return bitmap_bit_p (usedvars, DECL_UID (var));
355 }
356
357 static inline void mark_all_vars_used (tree *);
358
359 /* Helper function for mark_all_vars_used, called via walk_tree. */
360
361 static tree
362 mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
363 {
364 tree t = *tp;
365 enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
366 tree b;
367
368 if (TREE_CODE (t) == SSA_NAME)
369 {
370 *walk_subtrees = 0;
371 t = SSA_NAME_VAR (t);
372 if (!t)
373 return NULL;
374 }
375
376 if (IS_EXPR_CODE_CLASS (c)
377 && (b = TREE_BLOCK (t)) != NULL)
378 TREE_USED (b) = true;
379
380 /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
381 fields do not contain vars. */
382 if (TREE_CODE (t) == TARGET_MEM_REF)
383 {
384 mark_all_vars_used (&TMR_BASE (t));
385 mark_all_vars_used (&TMR_INDEX (t));
386 mark_all_vars_used (&TMR_INDEX2 (t));
387 *walk_subtrees = 0;
388 return NULL;
389 }
390
391 /* Only need to mark VAR_DECLS; parameters and return results are not
392 eliminated as unused. */
393 if (TREE_CODE (t) == VAR_DECL)
394 {
395 /* When a global var becomes used for the first time also walk its
396 initializer (non global ones don't have any). */
397 if (set_is_used (t) && is_global_var (t))
398 mark_all_vars_used (&DECL_INITIAL (t));
399 }
400 /* remove_unused_scope_block_p requires information about labels
401 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
402 else if (TREE_CODE (t) == LABEL_DECL)
403 /* Although the TREE_USED values that the frontend uses would be
404 acceptable (albeit slightly over-conservative) for our purposes,
405 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
406 must re-compute it here. */
407 TREE_USED (t) = 1;
408
409 if (IS_TYPE_OR_DECL_P (t))
410 *walk_subtrees = 0;
411
412 return NULL;
413 }
414
415 /* Mark the scope block SCOPE and its subblocks unused when they can be
416 possibly eliminated if dead. */
417
418 static void
419 mark_scope_block_unused (tree scope)
420 {
421 tree t;
422 TREE_USED (scope) = false;
423 if (!(*debug_hooks->ignore_block) (scope))
424 TREE_USED (scope) = true;
425 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
426 mark_scope_block_unused (t);
427 }
428
429 /* Look if the block is dead (by possibly eliminating its dead subblocks)
430 and return true if so.
431 Block is declared dead if:
432 1) No statements are associated with it.
433 2) Declares no live variables
434 3) All subblocks are dead
435 or there is precisely one subblocks and the block
436 has same abstract origin as outer block and declares
437 no variables, so it is pure wrapper.
438 When we are not outputting full debug info, we also eliminate dead variables
439 out of scope blocks to let them to be recycled by GGC and to save copying work
440 done by the inliner. */
441
442 static bool
443 remove_unused_scope_block_p (tree scope)
444 {
445 tree *t, *next;
446 bool unused = !TREE_USED (scope);
447 int nsubblocks = 0;
448
449 for (t = &BLOCK_VARS (scope); *t; t = next)
450 {
451 next = &DECL_CHAIN (*t);
452
453 /* Debug info of nested function refers to the block of the
454 function. We might stil call it even if all statements
455 of function it was nested into was elliminated.
456
457 TODO: We can actually look into cgraph to see if function
458 will be output to file. */
459 if (TREE_CODE (*t) == FUNCTION_DECL)
460 unused = false;
461
462 /* If a decl has a value expr, we need to instantiate it
463 regardless of debug info generation, to avoid codegen
464 differences in memory overlap tests. update_equiv_regs() may
465 indirectly call validate_equiv_mem() to test whether a
466 SET_DEST overlaps with others, and if the value expr changes
467 by virtual register instantiation, we may get end up with
468 different results. */
469 else if (TREE_CODE (*t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*t))
470 unused = false;
471
472 /* Remove everything we don't generate debug info for. */
473 else if (DECL_IGNORED_P (*t))
474 {
475 *t = DECL_CHAIN (*t);
476 next = t;
477 }
478
479 /* When we are outputting debug info, we usually want to output
480 info about optimized-out variables in the scope blocks.
481 Exception are the scope blocks not containing any instructions
482 at all so user can't get into the scopes at first place. */
483 else if (is_used_p (*t))
484 unused = false;
485 else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
486 /* For labels that are still used in the IL, the decision to
487 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
488 risk having different ordering in debug vs. non-debug builds
489 during inlining or versioning.
490 A label appearing here (we have already checked DECL_IGNORED_P)
491 should not be used in the IL unless it has been explicitly used
492 before, so we use TREE_USED as an approximation. */
493 /* In principle, we should do the same here as for the debug case
494 below, however, when debugging, there might be additional nested
495 levels that keep an upper level with a label live, so we have to
496 force this block to be considered used, too. */
497 unused = false;
498
499 /* When we are not doing full debug info, we however can keep around
500 only the used variables for cfgexpand's memory packing saving quite
501 a lot of memory.
502
503 For sake of -g3, we keep around those vars but we don't count this as
504 use of block, so innermost block with no used vars and no instructions
505 can be considered dead. We only want to keep around blocks user can
506 breakpoint into and ask about value of optimized out variables.
507
508 Similarly we need to keep around types at least until all
509 variables of all nested blocks are gone. We track no
510 information on whether given type is used or not, so we have
511 to keep them even when not emitting debug information,
512 otherwise we may end up remapping variables and their (local)
513 types in different orders depending on whether debug
514 information is being generated. */
515
516 else if (TREE_CODE (*t) == TYPE_DECL
517 || debug_info_level == DINFO_LEVEL_NORMAL
518 || debug_info_level == DINFO_LEVEL_VERBOSE)
519 ;
520 else
521 {
522 *t = DECL_CHAIN (*t);
523 next = t;
524 }
525 }
526
527 for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
528 if (remove_unused_scope_block_p (*t))
529 {
530 if (BLOCK_SUBBLOCKS (*t))
531 {
532 tree next = BLOCK_CHAIN (*t);
533 tree supercontext = BLOCK_SUPERCONTEXT (*t);
534
535 *t = BLOCK_SUBBLOCKS (*t);
536 while (BLOCK_CHAIN (*t))
537 {
538 BLOCK_SUPERCONTEXT (*t) = supercontext;
539 t = &BLOCK_CHAIN (*t);
540 }
541 BLOCK_CHAIN (*t) = next;
542 BLOCK_SUPERCONTEXT (*t) = supercontext;
543 t = &BLOCK_CHAIN (*t);
544 nsubblocks ++;
545 }
546 else
547 *t = BLOCK_CHAIN (*t);
548 }
549 else
550 {
551 t = &BLOCK_CHAIN (*t);
552 nsubblocks ++;
553 }
554
555
556 if (!unused)
557 ;
558 /* Outer scope is always used. */
559 else if (!BLOCK_SUPERCONTEXT (scope)
560 || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
561 unused = false;
562 /* Innermost blocks with no live variables nor statements can be always
563 eliminated. */
564 else if (!nsubblocks)
565 ;
566 /* For terse debug info we can eliminate info on unused variables. */
567 else if (debug_info_level == DINFO_LEVEL_NONE
568 || debug_info_level == DINFO_LEVEL_TERSE)
569 {
570 /* Even for -g0/-g1 don't prune outer scopes from artificial
571 functions, otherwise diagnostics using tree_nonartificial_location
572 will not be emitted properly. */
573 if (inlined_function_outer_scope_p (scope))
574 {
575 tree ao = scope;
576
577 while (ao
578 && TREE_CODE (ao) == BLOCK
579 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
580 ao = BLOCK_ABSTRACT_ORIGIN (ao);
581 if (ao
582 && TREE_CODE (ao) == FUNCTION_DECL
583 && DECL_DECLARED_INLINE_P (ao)
584 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
585 unused = false;
586 }
587 }
588 else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
589 unused = false;
590 /* See if this block is important for representation of inlined function.
591 Inlined functions are always represented by block with
592 block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION
593 set... */
594 else if (inlined_function_outer_scope_p (scope))
595 unused = false;
596 else
597 /* Verfify that only blocks with source location set
598 are entry points to the inlined functions. */
599 gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope))
600 == UNKNOWN_LOCATION);
601
602 TREE_USED (scope) = !unused;
603 return unused;
604 }
605
606 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
607 eliminated during the tree->rtl conversion process. */
608
609 static inline void
610 mark_all_vars_used (tree *expr_p)
611 {
612 walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL);
613 }
614
615 /* Helper function for clear_unused_block_pointer, called via walk_tree. */
616
617 static tree
618 clear_unused_block_pointer_1 (tree *tp, int *, void *)
619 {
620 if (EXPR_P (*tp) && TREE_BLOCK (*tp)
621 && !TREE_USED (TREE_BLOCK (*tp)))
622 TREE_SET_BLOCK (*tp, NULL);
623 return NULL_TREE;
624 }
625
626 /* Set all block pointer in debug stmt to NULL if the block is unused,
627 so that they will not be streamed out. */
628
629 static void
630 clear_unused_block_pointer (void)
631 {
632 basic_block bb;
633 gimple_stmt_iterator gsi;
634
635 FOR_EACH_BB (bb)
636 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
637 {
638 unsigned i;
639 tree b;
640 gimple stmt = gsi_stmt (gsi);
641
642 if (!is_gimple_debug (stmt))
643 continue;
644 b = gimple_block (stmt);
645 if (b && !TREE_USED (b))
646 gimple_set_block (stmt, NULL);
647 for (i = 0; i < gimple_num_ops (stmt); i++)
648 walk_tree (gimple_op_ptr (stmt, i), clear_unused_block_pointer_1,
649 NULL, NULL);
650 }
651 }
652
653 /* Dump scope blocks starting at SCOPE to FILE. INDENT is the
654 indentation level and FLAGS is as in print_generic_expr. */
655
656 static void
657 dump_scope_block (FILE *file, int indent, tree scope, int flags)
658 {
659 tree var, t;
660 unsigned int i;
661
662 fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope),
663 TREE_USED (scope) ? "" : " (unused)",
664 BLOCK_ABSTRACT (scope) ? " (abstract)": "");
665 if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) != UNKNOWN_LOCATION)
666 {
667 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
668 fprintf (file, " %s:%i", s.file, s.line);
669 }
670 if (BLOCK_ABSTRACT_ORIGIN (scope))
671 {
672 tree origin = block_ultimate_origin (scope);
673 if (origin)
674 {
675 fprintf (file, " Originating from :");
676 if (DECL_P (origin))
677 print_generic_decl (file, origin, flags);
678 else
679 fprintf (file, "#%i", BLOCK_NUMBER (origin));
680 }
681 }
682 fprintf (file, " \n");
683 for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
684 {
685 fprintf (file, "%*s", indent, "");
686 print_generic_decl (file, var, flags);
687 fprintf (file, "\n");
688 }
689 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
690 {
691 fprintf (file, "%*s",indent, "");
692 print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
693 flags);
694 fprintf (file, " (nonlocalized)\n");
695 }
696 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
697 dump_scope_block (file, indent + 2, t, flags);
698 fprintf (file, "\n%*s}\n",indent, "");
699 }
700
701 /* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
702 is as in print_generic_expr. */
703
704 DEBUG_FUNCTION void
705 debug_scope_block (tree scope, int flags)
706 {
707 dump_scope_block (stderr, 0, scope, flags);
708 }
709
710
711 /* Dump the tree of lexical scopes of current_function_decl to FILE.
712 FLAGS is as in print_generic_expr. */
713
714 void
715 dump_scope_blocks (FILE *file, int flags)
716 {
717 dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags);
718 }
719
720
721 /* Dump the tree of lexical scopes of current_function_decl to stderr.
722 FLAGS is as in print_generic_expr. */
723
724 DEBUG_FUNCTION void
725 debug_scope_blocks (int flags)
726 {
727 dump_scope_blocks (stderr, flags);
728 }
729
730 /* Remove local variables that are not referenced in the IL. */
731
732 void
733 remove_unused_locals (void)
734 {
735 basic_block bb;
736 tree var;
737 unsigned srcidx, dstidx, num;
738 bool have_local_clobbers = false;
739
740 /* Removing declarations from lexical blocks when not optimizing is
741 not only a waste of time, it actually causes differences in stack
742 layout. */
743 if (!optimize)
744 return;
745
746 timevar_push (TV_REMOVE_UNUSED);
747
748 mark_scope_block_unused (DECL_INITIAL (current_function_decl));
749
750 usedvars = BITMAP_ALLOC (NULL);
751
752 /* Walk the CFG marking all referenced symbols. */
753 FOR_EACH_BB (bb)
754 {
755 gimple_stmt_iterator gsi;
756 size_t i;
757 edge_iterator ei;
758 edge e;
759
760 /* Walk the statements. */
761 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
762 {
763 gimple stmt = gsi_stmt (gsi);
764 tree b = gimple_block (stmt);
765
766 if (is_gimple_debug (stmt))
767 continue;
768
769 if (gimple_clobber_p (stmt))
770 {
771 have_local_clobbers = true;
772 continue;
773 }
774
775 if (b)
776 TREE_USED (b) = true;
777
778 for (i = 0; i < gimple_num_ops (stmt); i++)
779 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i));
780 }
781
782 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
783 {
784 use_operand_p arg_p;
785 ssa_op_iter i;
786 tree def;
787 gimple phi = gsi_stmt (gsi);
788
789 if (virtual_operand_p (gimple_phi_result (phi)))
790 continue;
791
792 def = gimple_phi_result (phi);
793 mark_all_vars_used (&def);
794
795 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
796 {
797 tree arg = USE_FROM_PTR (arg_p);
798 int index = PHI_ARG_INDEX_FROM_USE (arg_p);
799 tree block =
800 LOCATION_BLOCK (gimple_phi_arg_location (phi, index));
801 if (block != NULL)
802 TREE_USED (block) = true;
803 mark_all_vars_used (&arg);
804 }
805 }
806
807 FOR_EACH_EDGE (e, ei, bb->succs)
808 if (LOCATION_BLOCK (e->goto_locus) != NULL)
809 TREE_USED (LOCATION_BLOCK (e->goto_locus)) = true;
810 }
811
812 /* We do a two-pass approach about the out-of-scope clobbers. We want
813 to remove them if they are the only references to a local variable,
814 but we want to retain them when there's any other. So the first pass
815 ignores them, and the second pass (if there were any) tries to remove
816 them. */
817 if (have_local_clobbers)
818 FOR_EACH_BB (bb)
819 {
820 gimple_stmt_iterator gsi;
821
822 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
823 {
824 gimple stmt = gsi_stmt (gsi);
825 tree b = gimple_block (stmt);
826
827 if (gimple_clobber_p (stmt))
828 {
829 tree lhs = gimple_assign_lhs (stmt);
830 if (TREE_CODE (lhs) == VAR_DECL && !is_used_p (lhs))
831 {
832 unlink_stmt_vdef (stmt);
833 gsi_remove (&gsi, true);
834 release_defs (stmt);
835 continue;
836 }
837 if (b)
838 TREE_USED (b) = true;
839 }
840 gsi_next (&gsi);
841 }
842 }
843
844 cfun->has_local_explicit_reg_vars = false;
845
846 /* Remove unmarked local and global vars from local_decls. */
847 num = vec_safe_length (cfun->local_decls);
848 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
849 {
850 var = (*cfun->local_decls)[srcidx];
851 if (TREE_CODE (var) == VAR_DECL)
852 {
853 if (!is_used_p (var))
854 {
855 tree def;
856 if (cfun->nonlocal_goto_save_area
857 && TREE_OPERAND (cfun->nonlocal_goto_save_area, 0) == var)
858 cfun->nonlocal_goto_save_area = NULL;
859 /* Release any default def associated with var. */
860 if ((def = ssa_default_def (cfun, var)) != NULL_TREE)
861 {
862 set_ssa_default_def (cfun, var, NULL_TREE);
863 release_ssa_name (def);
864 }
865 continue;
866 }
867 }
868 if (TREE_CODE (var) == VAR_DECL
869 && DECL_HARD_REGISTER (var)
870 && !is_global_var (var))
871 cfun->has_local_explicit_reg_vars = true;
872
873 if (srcidx != dstidx)
874 (*cfun->local_decls)[dstidx] = var;
875 dstidx++;
876 }
877 if (dstidx != num)
878 {
879 statistics_counter_event (cfun, "unused VAR_DECLs removed", num - dstidx);
880 cfun->local_decls->truncate (dstidx);
881 }
882
883 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl));
884 clear_unused_block_pointer ();
885
886 BITMAP_FREE (usedvars);
887
888 if (dump_file && (dump_flags & TDF_DETAILS))
889 {
890 fprintf (dump_file, "Scope blocks after cleanups:\n");
891 dump_scope_blocks (dump_file, dump_flags);
892 }
893
894 timevar_pop (TV_REMOVE_UNUSED);
895 }
896
897 /* Obstack for globale liveness info bitmaps. We don't want to put these
898 on the default obstack because these bitmaps can grow quite large and
899 we'll hold on to all that memory until the end of the compiler run.
900 As a bonus, delete_tree_live_info can destroy all the bitmaps by just
901 releasing the whole obstack. */
902 static bitmap_obstack liveness_bitmap_obstack;
903
904 /* Allocate and return a new live range information object base on MAP. */
905
906 static tree_live_info_p
907 new_tree_live_info (var_map map)
908 {
909 tree_live_info_p live;
910 basic_block bb;
911
912 live = XNEW (struct tree_live_info_d);
913 live->map = map;
914 live->num_blocks = last_basic_block;
915
916 live->livein = XNEWVEC (bitmap_head, last_basic_block);
917 FOR_EACH_BB (bb)
918 bitmap_initialize (&live->livein[bb->index], &liveness_bitmap_obstack);
919
920 live->liveout = XNEWVEC (bitmap_head, last_basic_block);
921 FOR_EACH_BB (bb)
922 bitmap_initialize (&live->liveout[bb->index], &liveness_bitmap_obstack);
923
924 live->work_stack = XNEWVEC (int, last_basic_block);
925 live->stack_top = live->work_stack;
926
927 live->global = BITMAP_ALLOC (&liveness_bitmap_obstack);
928 return live;
929 }
930
931
932 /* Free storage for live range info object LIVE. */
933
934 void
935 delete_tree_live_info (tree_live_info_p live)
936 {
937 bitmap_obstack_release (&liveness_bitmap_obstack);
938 free (live->work_stack);
939 free (live->liveout);
940 free (live->livein);
941 free (live);
942 }
943
944
945 /* Visit basic block BB and propagate any required live on entry bits from
946 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
947 TMP is a temporary work bitmap which is passed in to avoid reallocating
948 it each time. */
949
950 static void
951 loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited,
952 bitmap tmp)
953 {
954 edge e;
955 bool change;
956 edge_iterator ei;
957 basic_block pred_bb;
958 bitmap loe;
959 gcc_assert (!bitmap_bit_p (visited, bb->index));
960
961 bitmap_set_bit (visited, bb->index);
962 loe = live_on_entry (live, bb);
963
964 FOR_EACH_EDGE (e, ei, bb->preds)
965 {
966 pred_bb = e->src;
967 if (pred_bb == ENTRY_BLOCK_PTR)
968 continue;
969 /* TMP is variables live-on-entry from BB that aren't defined in the
970 predecessor block. This should be the live on entry vars to pred.
971 Note that liveout is the DEFs in a block while live on entry is
972 being calculated. */
973 bitmap_and_compl (tmp, loe, &live->liveout[pred_bb->index]);
974
975 /* Add these bits to live-on-entry for the pred. if there are any
976 changes, and pred_bb has been visited already, add it to the
977 revisit stack. */
978 change = bitmap_ior_into (live_on_entry (live, pred_bb), tmp);
979 if (bitmap_bit_p (visited, pred_bb->index) && change)
980 {
981 bitmap_clear_bit (visited, pred_bb->index);
982 *(live->stack_top)++ = pred_bb->index;
983 }
984 }
985 }
986
987
988 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
989 of all the variables. */
990
991 static void
992 live_worklist (tree_live_info_p live)
993 {
994 unsigned b;
995 basic_block bb;
996 sbitmap visited = sbitmap_alloc (last_basic_block + 1);
997 bitmap tmp = BITMAP_ALLOC (&liveness_bitmap_obstack);
998
999 bitmap_clear (visited);
1000
1001 /* Visit all the blocks in reverse order and propagate live on entry values
1002 into the predecessors blocks. */
1003 FOR_EACH_BB_REVERSE (bb)
1004 loe_visit_block (live, bb, visited, tmp);
1005
1006 /* Process any blocks which require further iteration. */
1007 while (live->stack_top != live->work_stack)
1008 {
1009 b = *--(live->stack_top);
1010 loe_visit_block (live, BASIC_BLOCK (b), visited, tmp);
1011 }
1012
1013 BITMAP_FREE (tmp);
1014 sbitmap_free (visited);
1015 }
1016
1017
1018 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
1019 links. Set the live on entry fields in LIVE. Def's are marked temporarily
1020 in the liveout vector. */
1021
1022 static void
1023 set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
1024 {
1025 int p;
1026 gimple stmt;
1027 use_operand_p use;
1028 basic_block def_bb = NULL;
1029 imm_use_iterator imm_iter;
1030 bool global = false;
1031
1032 p = var_to_partition (live->map, ssa_name);
1033 if (p == NO_PARTITION)
1034 return;
1035
1036 stmt = SSA_NAME_DEF_STMT (ssa_name);
1037 if (stmt)
1038 {
1039 def_bb = gimple_bb (stmt);
1040 /* Mark defs in liveout bitmap temporarily. */
1041 if (def_bb)
1042 bitmap_set_bit (&live->liveout[def_bb->index], p);
1043 }
1044 else
1045 def_bb = ENTRY_BLOCK_PTR;
1046
1047 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1048 add it to the list of live on entry blocks. */
1049 FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
1050 {
1051 gimple use_stmt = USE_STMT (use);
1052 basic_block add_block = NULL;
1053
1054 if (gimple_code (use_stmt) == GIMPLE_PHI)
1055 {
1056 /* Uses in PHI's are considered to be live at exit of the SRC block
1057 as this is where a copy would be inserted. Check to see if it is
1058 defined in that block, or whether its live on entry. */
1059 int index = PHI_ARG_INDEX_FROM_USE (use);
1060 edge e = gimple_phi_arg_edge (use_stmt, index);
1061 if (e->src != ENTRY_BLOCK_PTR)
1062 {
1063 if (e->src != def_bb)
1064 add_block = e->src;
1065 }
1066 }
1067 else if (is_gimple_debug (use_stmt))
1068 continue;
1069 else
1070 {
1071 /* If its not defined in this block, its live on entry. */
1072 basic_block use_bb = gimple_bb (use_stmt);
1073 if (use_bb != def_bb)
1074 add_block = use_bb;
1075 }
1076
1077 /* If there was a live on entry use, set the bit. */
1078 if (add_block)
1079 {
1080 global = true;
1081 bitmap_set_bit (&live->livein[add_block->index], p);
1082 }
1083 }
1084
1085 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1086 on entry blocks between the def and all the uses. */
1087 if (global)
1088 bitmap_set_bit (live->global, p);
1089 }
1090
1091
1092 /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1093
1094 void
1095 calculate_live_on_exit (tree_live_info_p liveinfo)
1096 {
1097 basic_block bb;
1098 edge e;
1099 edge_iterator ei;
1100
1101 /* live on entry calculations used liveout vectors for defs, clear them. */
1102 FOR_EACH_BB (bb)
1103 bitmap_clear (&liveinfo->liveout[bb->index]);
1104
1105 /* Set all the live-on-exit bits for uses in PHIs. */
1106 FOR_EACH_BB (bb)
1107 {
1108 gimple_stmt_iterator gsi;
1109 size_t i;
1110
1111 /* Mark the PHI arguments which are live on exit to the pred block. */
1112 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1113 {
1114 gimple phi = gsi_stmt (gsi);
1115 for (i = 0; i < gimple_phi_num_args (phi); i++)
1116 {
1117 tree t = PHI_ARG_DEF (phi, i);
1118 int p;
1119
1120 if (TREE_CODE (t) != SSA_NAME)
1121 continue;
1122
1123 p = var_to_partition (liveinfo->map, t);
1124 if (p == NO_PARTITION)
1125 continue;
1126 e = gimple_phi_arg_edge (phi, i);
1127 if (e->src != ENTRY_BLOCK_PTR)
1128 bitmap_set_bit (&liveinfo->liveout[e->src->index], p);
1129 }
1130 }
1131
1132 /* Add each successors live on entry to this bock live on exit. */
1133 FOR_EACH_EDGE (e, ei, bb->succs)
1134 if (e->dest != EXIT_BLOCK_PTR)
1135 bitmap_ior_into (&liveinfo->liveout[bb->index],
1136 live_on_entry (liveinfo, e->dest));
1137 }
1138 }
1139
1140
1141 /* Given partition map MAP, calculate all the live on entry bitmaps for
1142 each partition. Return a new live info object. */
1143
1144 tree_live_info_p
1145 calculate_live_ranges (var_map map)
1146 {
1147 tree var;
1148 unsigned i;
1149 tree_live_info_p live;
1150
1151 bitmap_obstack_initialize (&liveness_bitmap_obstack);
1152 live = new_tree_live_info (map);
1153 for (i = 0; i < num_var_partitions (map); i++)
1154 {
1155 var = partition_to_var (map, i);
1156 if (var != NULL_TREE)
1157 set_var_live_on_entry (var, live);
1158 }
1159
1160 live_worklist (live);
1161
1162 #ifdef ENABLE_CHECKING
1163 verify_live_on_entry (live);
1164 #endif
1165
1166 calculate_live_on_exit (live);
1167 return live;
1168 }
1169
1170
1171 /* Output partition map MAP to file F. */
1172
1173 void
1174 dump_var_map (FILE *f, var_map map)
1175 {
1176 int t;
1177 unsigned x, y;
1178 int p;
1179
1180 fprintf (f, "\nPartition map \n\n");
1181
1182 for (x = 0; x < map->num_partitions; x++)
1183 {
1184 if (map->view_to_partition != NULL)
1185 p = map->view_to_partition[x];
1186 else
1187 p = x;
1188
1189 if (ssa_name (p) == NULL_TREE
1190 || virtual_operand_p (ssa_name (p)))
1191 continue;
1192
1193 t = 0;
1194 for (y = 1; y < num_ssa_names; y++)
1195 {
1196 p = partition_find (map->var_partition, y);
1197 if (map->partition_to_view)
1198 p = map->partition_to_view[p];
1199 if (p == (int)x)
1200 {
1201 if (t++ == 0)
1202 {
1203 fprintf(f, "Partition %d (", x);
1204 print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
1205 fprintf (f, " - ");
1206 }
1207 fprintf (f, "%d ", y);
1208 }
1209 }
1210 if (t != 0)
1211 fprintf (f, ")\n");
1212 }
1213 fprintf (f, "\n");
1214 }
1215
1216
1217 /* Generic dump for the above. */
1218
1219 DEBUG_FUNCTION void
1220 debug (_var_map &ref)
1221 {
1222 dump_var_map (stderr, &ref);
1223 }
1224
1225 DEBUG_FUNCTION void
1226 debug (_var_map *ptr)
1227 {
1228 if (ptr)
1229 debug (*ptr);
1230 else
1231 fprintf (stderr, "<nil>\n");
1232 }
1233
1234
1235 /* Output live range info LIVE to file F, controlled by FLAG. */
1236
1237 void
1238 dump_live_info (FILE *f, tree_live_info_p live, int flag)
1239 {
1240 basic_block bb;
1241 unsigned i;
1242 var_map map = live->map;
1243 bitmap_iterator bi;
1244
1245 if ((flag & LIVEDUMP_ENTRY) && live->livein)
1246 {
1247 FOR_EACH_BB (bb)
1248 {
1249 fprintf (f, "\nLive on entry to BB%d : ", bb->index);
1250 EXECUTE_IF_SET_IN_BITMAP (&live->livein[bb->index], 0, i, bi)
1251 {
1252 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1253 fprintf (f, " ");
1254 }
1255 fprintf (f, "\n");
1256 }
1257 }
1258
1259 if ((flag & LIVEDUMP_EXIT) && live->liveout)
1260 {
1261 FOR_EACH_BB (bb)
1262 {
1263 fprintf (f, "\nLive on exit from BB%d : ", bb->index);
1264 EXECUTE_IF_SET_IN_BITMAP (&live->liveout[bb->index], 0, i, bi)
1265 {
1266 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1267 fprintf (f, " ");
1268 }
1269 fprintf (f, "\n");
1270 }
1271 }
1272 }
1273
1274
1275 /* Generic dump for the above. */
1276
1277 DEBUG_FUNCTION void
1278 debug (tree_live_info_d &ref)
1279 {
1280 dump_live_info (stderr, &ref, 0);
1281 }
1282
1283 DEBUG_FUNCTION void
1284 debug (tree_live_info_d *ptr)
1285 {
1286 if (ptr)
1287 debug (*ptr);
1288 else
1289 fprintf (stderr, "<nil>\n");
1290 }
1291
1292
1293 #ifdef ENABLE_CHECKING
1294 /* Verify that SSA_VAR is a non-virtual SSA_NAME. */
1295
1296 void
1297 register_ssa_partition_check (tree ssa_var)
1298 {
1299 gcc_assert (TREE_CODE (ssa_var) == SSA_NAME);
1300 if (virtual_operand_p (ssa_var))
1301 {
1302 fprintf (stderr, "Illegally registering a virtual SSA name :");
1303 print_generic_expr (stderr, ssa_var, TDF_SLIM);
1304 fprintf (stderr, " in the SSA->Normal phase.\n");
1305 internal_error ("SSA corruption");
1306 }
1307 }
1308
1309
1310 /* Verify that the info in LIVE matches the current cfg. */
1311
1312 static void
1313 verify_live_on_entry (tree_live_info_p live)
1314 {
1315 unsigned i;
1316 tree var;
1317 gimple stmt;
1318 basic_block bb;
1319 edge e;
1320 int num;
1321 edge_iterator ei;
1322 var_map map = live->map;
1323
1324 /* Check for live on entry partitions and report those with a DEF in
1325 the program. This will typically mean an optimization has done
1326 something wrong. */
1327 bb = ENTRY_BLOCK_PTR;
1328 num = 0;
1329 FOR_EACH_EDGE (e, ei, bb->succs)
1330 {
1331 int entry_block = e->dest->index;
1332 if (e->dest == EXIT_BLOCK_PTR)
1333 continue;
1334 for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1335 {
1336 basic_block tmp;
1337 tree d = NULL_TREE;
1338 bitmap loe;
1339 var = partition_to_var (map, i);
1340 stmt = SSA_NAME_DEF_STMT (var);
1341 tmp = gimple_bb (stmt);
1342 if (SSA_NAME_VAR (var))
1343 d = ssa_default_def (cfun, SSA_NAME_VAR (var));
1344
1345 loe = live_on_entry (live, e->dest);
1346 if (loe && bitmap_bit_p (loe, i))
1347 {
1348 if (!gimple_nop_p (stmt))
1349 {
1350 num++;
1351 print_generic_expr (stderr, var, TDF_SLIM);
1352 fprintf (stderr, " is defined ");
1353 if (tmp)
1354 fprintf (stderr, " in BB%d, ", tmp->index);
1355 fprintf (stderr, "by:\n");
1356 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
1357 fprintf (stderr, "\nIt is also live-on-entry to entry BB %d",
1358 entry_block);
1359 fprintf (stderr, " So it appears to have multiple defs.\n");
1360 }
1361 else
1362 {
1363 if (d != var)
1364 {
1365 num++;
1366 print_generic_expr (stderr, var, TDF_SLIM);
1367 fprintf (stderr, " is live-on-entry to BB%d ",
1368 entry_block);
1369 if (d)
1370 {
1371 fprintf (stderr, " but is not the default def of ");
1372 print_generic_expr (stderr, d, TDF_SLIM);
1373 fprintf (stderr, "\n");
1374 }
1375 else
1376 fprintf (stderr, " and there is no default def.\n");
1377 }
1378 }
1379 }
1380 else
1381 if (d == var)
1382 {
1383 /* The only way this var shouldn't be marked live on entry is
1384 if it occurs in a PHI argument of the block. */
1385 size_t z;
1386 bool ok = false;
1387 gimple_stmt_iterator gsi;
1388 for (gsi = gsi_start_phis (e->dest);
1389 !gsi_end_p (gsi) && !ok;
1390 gsi_next (&gsi))
1391 {
1392 gimple phi = gsi_stmt (gsi);
1393 for (z = 0; z < gimple_phi_num_args (phi); z++)
1394 if (var == gimple_phi_arg_def (phi, z))
1395 {
1396 ok = true;
1397 break;
1398 }
1399 }
1400 if (ok)
1401 continue;
1402 num++;
1403 print_generic_expr (stderr, var, TDF_SLIM);
1404 fprintf (stderr, " is not marked live-on-entry to entry BB%d ",
1405 entry_block);
1406 fprintf (stderr, "but it is a default def so it should be.\n");
1407 }
1408 }
1409 }
1410 gcc_assert (num <= 0);
1411 }
1412 #endif