coretypes.h: Include input.h and as-a.h.
[gcc.git] / gcc / tree-ssa-live.c
1 /* Liveness for SSA trees.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3 Contributed by Andrew MacLeod <amacleod@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "gimple-pretty-print.h"
30 #include "bitmap.h"
31 #include "sbitmap.h"
32 #include "predict.h"
33 #include "hard-reg-set.h"
34 #include "function.h"
35 #include "dominance.h"
36 #include "cfg.h"
37 #include "basic-block.h"
38 #include "tree-ssa-alias.h"
39 #include "internal-fn.h"
40 #include "gimple-expr.h"
41 #include "gimple.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-phinodes.h"
45 #include "ssa-iterators.h"
46 #include "stringpool.h"
47 #include "tree-ssanames.h"
48 #include "rtl.h"
49 #include "flags.h"
50 #include "insn-config.h"
51 #include "expmed.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "calls.h"
55 #include "emit-rtl.h"
56 #include "varasm.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "tree-dfa.h"
60 #include "timevar.h"
61 #include "dumpfile.h"
62 #include "tree-ssa-live.h"
63 #include "diagnostic-core.h"
64 #include "debug.h"
65 #include "tree-ssa.h"
66 #include "lto-streamer.h"
67 #include "ipa-ref.h"
68 #include "cgraph.h"
69 #include "ipa-utils.h"
70
71 #ifdef ENABLE_CHECKING
72 static void verify_live_on_entry (tree_live_info_p);
73 #endif
74
75
76 /* VARMAP maintains a mapping from SSA version number to real variables.
77
78 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
79 only member of it's own partition. Coalescing will attempt to group any
80 ssa_names which occur in a copy or in a PHI node into the same partition.
81
82 At the end of out-of-ssa, each partition becomes a "real" variable and is
83 rewritten as a compiler variable.
84
85 The var_map data structure is used to manage these partitions. It allows
86 partitions to be combined, and determines which partition belongs to what
87 ssa_name or variable, and vice versa. */
88
89
90 /* Hashtable helpers. */
91
92 struct tree_int_map_hasher : typed_noop_remove <tree_int_map>
93 {
94 typedef tree_int_map *value_type;
95 typedef tree_int_map *compare_type;
96 static inline hashval_t hash (const tree_int_map *);
97 static inline bool equal (const tree_int_map *, const tree_int_map *);
98 };
99
100 inline hashval_t
101 tree_int_map_hasher::hash (const tree_int_map *v)
102 {
103 return tree_map_base_hash (v);
104 }
105
106 inline bool
107 tree_int_map_hasher::equal (const tree_int_map *v, const tree_int_map *c)
108 {
109 return tree_int_map_eq (v, c);
110 }
111
112
113 /* This routine will initialize the basevar fields of MAP. */
114
115 static void
116 var_map_base_init (var_map map)
117 {
118 int x, num_part;
119 tree var;
120 struct tree_int_map *m, *mapstorage;
121
122 num_part = num_var_partitions (map);
123 hash_table<tree_int_map_hasher> tree_to_index (num_part);
124 /* We can have at most num_part entries in the hash tables, so it's
125 enough to allocate so many map elements once, saving some malloc
126 calls. */
127 mapstorage = m = XNEWVEC (struct tree_int_map, num_part);
128
129 /* If a base table already exists, clear it, otherwise create it. */
130 free (map->partition_to_base_index);
131 map->partition_to_base_index = (int *) xmalloc (sizeof (int) * num_part);
132
133 /* Build the base variable list, and point partitions at their bases. */
134 for (x = 0; x < num_part; x++)
135 {
136 struct tree_int_map **slot;
137 unsigned baseindex;
138 var = partition_to_var (map, x);
139 if (SSA_NAME_VAR (var)
140 && (!VAR_P (SSA_NAME_VAR (var))
141 || !DECL_IGNORED_P (SSA_NAME_VAR (var))))
142 m->base.from = SSA_NAME_VAR (var);
143 else
144 /* This restricts what anonymous SSA names we can coalesce
145 as it restricts the sets we compute conflicts for.
146 Using TREE_TYPE to generate sets is the easies as
147 type equivalency also holds for SSA names with the same
148 underlying decl.
149
150 Check gimple_can_coalesce_p when changing this code. */
151 m->base.from = (TYPE_CANONICAL (TREE_TYPE (var))
152 ? TYPE_CANONICAL (TREE_TYPE (var))
153 : TREE_TYPE (var));
154 /* If base variable hasn't been seen, set it up. */
155 slot = tree_to_index.find_slot (m, INSERT);
156 if (!*slot)
157 {
158 baseindex = m - mapstorage;
159 m->to = baseindex;
160 *slot = m;
161 m++;
162 }
163 else
164 baseindex = (*slot)->to;
165 map->partition_to_base_index[x] = baseindex;
166 }
167
168 map->num_basevars = m - mapstorage;
169
170 free (mapstorage);
171 }
172
173
174 /* Remove the base table in MAP. */
175
176 static void
177 var_map_base_fini (var_map map)
178 {
179 /* Free the basevar info if it is present. */
180 if (map->partition_to_base_index != NULL)
181 {
182 free (map->partition_to_base_index);
183 map->partition_to_base_index = NULL;
184 map->num_basevars = 0;
185 }
186 }
187 /* Create a variable partition map of SIZE, initialize and return it. */
188
189 var_map
190 init_var_map (int size)
191 {
192 var_map map;
193
194 map = (var_map) xmalloc (sizeof (struct _var_map));
195 map->var_partition = partition_new (size);
196
197 map->partition_to_view = NULL;
198 map->view_to_partition = NULL;
199 map->num_partitions = size;
200 map->partition_size = size;
201 map->num_basevars = 0;
202 map->partition_to_base_index = NULL;
203 return map;
204 }
205
206
207 /* Free memory associated with MAP. */
208
209 void
210 delete_var_map (var_map map)
211 {
212 var_map_base_fini (map);
213 partition_delete (map->var_partition);
214 free (map->partition_to_view);
215 free (map->view_to_partition);
216 free (map);
217 }
218
219
220 /* This function will combine the partitions in MAP for VAR1 and VAR2. It
221 Returns the partition which represents the new partition. If the two
222 partitions cannot be combined, NO_PARTITION is returned. */
223
224 int
225 var_union (var_map map, tree var1, tree var2)
226 {
227 int p1, p2, p3;
228
229 gcc_assert (TREE_CODE (var1) == SSA_NAME);
230 gcc_assert (TREE_CODE (var2) == SSA_NAME);
231
232 /* This is independent of partition_to_view. If partition_to_view is
233 on, then whichever one of these partitions is absorbed will never have a
234 dereference into the partition_to_view array any more. */
235
236 p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
237 p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
238
239 gcc_assert (p1 != NO_PARTITION);
240 gcc_assert (p2 != NO_PARTITION);
241
242 if (p1 == p2)
243 p3 = p1;
244 else
245 p3 = partition_union (map->var_partition, p1, p2);
246
247 if (map->partition_to_view)
248 p3 = map->partition_to_view[p3];
249
250 return p3;
251 }
252
253
254 /* Compress the partition numbers in MAP such that they fall in the range
255 0..(num_partitions-1) instead of wherever they turned out during
256 the partitioning exercise. This removes any references to unused
257 partitions, thereby allowing bitmaps and other vectors to be much
258 denser.
259
260 This is implemented such that compaction doesn't affect partitioning.
261 Ie., once partitions are created and possibly merged, running one
262 or more different kind of compaction will not affect the partitions
263 themselves. Their index might change, but all the same variables will
264 still be members of the same partition group. This allows work on reduced
265 sets, and no loss of information when a larger set is later desired.
266
267 In particular, coalescing can work on partitions which have 2 or more
268 definitions, and then 'recompact' later to include all the single
269 definitions for assignment to program variables. */
270
271
272 /* Set MAP back to the initial state of having no partition view. Return a
273 bitmap which has a bit set for each partition number which is in use in the
274 varmap. */
275
276 static bitmap
277 partition_view_init (var_map map)
278 {
279 bitmap used;
280 int tmp;
281 unsigned int x;
282
283 used = BITMAP_ALLOC (NULL);
284
285 /* Already in a view? Abandon the old one. */
286 if (map->partition_to_view)
287 {
288 free (map->partition_to_view);
289 map->partition_to_view = NULL;
290 }
291 if (map->view_to_partition)
292 {
293 free (map->view_to_partition);
294 map->view_to_partition = NULL;
295 }
296
297 /* Find out which partitions are actually referenced. */
298 for (x = 0; x < map->partition_size; x++)
299 {
300 tmp = partition_find (map->var_partition, x);
301 if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp))
302 && (!has_zero_uses (ssa_name (tmp))
303 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))))
304 bitmap_set_bit (used, tmp);
305 }
306
307 map->num_partitions = map->partition_size;
308 return used;
309 }
310
311
312 /* This routine will finalize the view data for MAP based on the partitions
313 set in SELECTED. This is either the same bitmap returned from
314 partition_view_init, or a trimmed down version if some of those partitions
315 were not desired in this view. SELECTED is freed before returning. */
316
317 static void
318 partition_view_fini (var_map map, bitmap selected)
319 {
320 bitmap_iterator bi;
321 unsigned count, i, x, limit;
322
323 gcc_assert (selected);
324
325 count = bitmap_count_bits (selected);
326 limit = map->partition_size;
327
328 /* If its a one-to-one ratio, we don't need any view compaction. */
329 if (count < limit)
330 {
331 map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
332 memset (map->partition_to_view, 0xff, (limit * sizeof (int)));
333 map->view_to_partition = (int *)xmalloc (count * sizeof (int));
334
335 i = 0;
336 /* Give each selected partition an index. */
337 EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
338 {
339 map->partition_to_view[x] = i;
340 map->view_to_partition[i] = x;
341 i++;
342 }
343 gcc_assert (i == count);
344 map->num_partitions = i;
345 }
346
347 BITMAP_FREE (selected);
348 }
349
350
351 /* Create a partition view which includes all the used partitions in MAP. If
352 WANT_BASES is true, create the base variable map as well. */
353
354 void
355 partition_view_normal (var_map map, bool want_bases)
356 {
357 bitmap used;
358
359 used = partition_view_init (map);
360 partition_view_fini (map, used);
361
362 if (want_bases)
363 var_map_base_init (map);
364 else
365 var_map_base_fini (map);
366 }
367
368
369 /* Create a partition view in MAP which includes just partitions which occur in
370 the bitmap ONLY. If WANT_BASES is true, create the base variable map
371 as well. */
372
373 void
374 partition_view_bitmap (var_map map, bitmap only, bool want_bases)
375 {
376 bitmap used;
377 bitmap new_partitions = BITMAP_ALLOC (NULL);
378 unsigned x, p;
379 bitmap_iterator bi;
380
381 used = partition_view_init (map);
382 EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
383 {
384 p = partition_find (map->var_partition, x);
385 gcc_assert (bitmap_bit_p (used, p));
386 bitmap_set_bit (new_partitions, p);
387 }
388 partition_view_fini (map, new_partitions);
389
390 if (want_bases)
391 var_map_base_init (map);
392 else
393 var_map_base_fini (map);
394 }
395
396
397 static bitmap usedvars;
398
399 /* Mark VAR as used, so that it'll be preserved during rtl expansion.
400 Returns true if VAR wasn't marked before. */
401
402 static inline bool
403 set_is_used (tree var)
404 {
405 return bitmap_set_bit (usedvars, DECL_UID (var));
406 }
407
408 /* Return true if VAR is marked as used. */
409
410 static inline bool
411 is_used_p (tree var)
412 {
413 return bitmap_bit_p (usedvars, DECL_UID (var));
414 }
415
416 static inline void mark_all_vars_used (tree *);
417
418 /* Helper function for mark_all_vars_used, called via walk_tree. */
419
420 static tree
421 mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
422 {
423 tree t = *tp;
424 enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
425 tree b;
426
427 if (TREE_CODE (t) == SSA_NAME)
428 {
429 *walk_subtrees = 0;
430 t = SSA_NAME_VAR (t);
431 if (!t)
432 return NULL;
433 }
434
435 if (IS_EXPR_CODE_CLASS (c)
436 && (b = TREE_BLOCK (t)) != NULL)
437 TREE_USED (b) = true;
438
439 /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
440 fields do not contain vars. */
441 if (TREE_CODE (t) == TARGET_MEM_REF)
442 {
443 mark_all_vars_used (&TMR_BASE (t));
444 mark_all_vars_used (&TMR_INDEX (t));
445 mark_all_vars_used (&TMR_INDEX2 (t));
446 *walk_subtrees = 0;
447 return NULL;
448 }
449
450 /* Only need to mark VAR_DECLS; parameters and return results are not
451 eliminated as unused. */
452 if (TREE_CODE (t) == VAR_DECL)
453 {
454 /* When a global var becomes used for the first time also walk its
455 initializer (non global ones don't have any). */
456 if (set_is_used (t) && is_global_var (t)
457 && DECL_CONTEXT (t) == current_function_decl)
458 mark_all_vars_used (&DECL_INITIAL (t));
459 }
460 /* remove_unused_scope_block_p requires information about labels
461 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
462 else if (TREE_CODE (t) == LABEL_DECL)
463 /* Although the TREE_USED values that the frontend uses would be
464 acceptable (albeit slightly over-conservative) for our purposes,
465 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
466 must re-compute it here. */
467 TREE_USED (t) = 1;
468
469 if (IS_TYPE_OR_DECL_P (t))
470 *walk_subtrees = 0;
471
472 return NULL;
473 }
474
475 /* Mark the scope block SCOPE and its subblocks unused when they can be
476 possibly eliminated if dead. */
477
478 static void
479 mark_scope_block_unused (tree scope)
480 {
481 tree t;
482 TREE_USED (scope) = false;
483 if (!(*debug_hooks->ignore_block) (scope))
484 TREE_USED (scope) = true;
485 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
486 mark_scope_block_unused (t);
487 }
488
489 /* Look if the block is dead (by possibly eliminating its dead subblocks)
490 and return true if so.
491 Block is declared dead if:
492 1) No statements are associated with it.
493 2) Declares no live variables
494 3) All subblocks are dead
495 or there is precisely one subblocks and the block
496 has same abstract origin as outer block and declares
497 no variables, so it is pure wrapper.
498 When we are not outputting full debug info, we also eliminate dead variables
499 out of scope blocks to let them to be recycled by GGC and to save copying work
500 done by the inliner. */
501
502 static bool
503 remove_unused_scope_block_p (tree scope, bool in_ctor_dtor_block)
504 {
505 tree *t, *next;
506 bool unused = !TREE_USED (scope);
507 int nsubblocks = 0;
508
509 /* For ipa-polymorphic-call.c purposes, preserve blocks:
510 1) with BLOCK_ABSTRACT_ORIGIN of a ctor/dtor or their clones */
511 if (inlined_polymorphic_ctor_dtor_block_p (scope, true))
512 {
513 in_ctor_dtor_block = true;
514 unused = false;
515 }
516 /* 2) inside such blocks, the outermost block with BLOCK_ABSTRACT_ORIGIN
517 being a FUNCTION_DECL. */
518 else if (in_ctor_dtor_block
519 && BLOCK_ABSTRACT_ORIGIN (scope)
520 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (scope)) == FUNCTION_DECL)
521 {
522 in_ctor_dtor_block = false;
523 unused = false;
524 }
525
526 for (t = &BLOCK_VARS (scope); *t; t = next)
527 {
528 next = &DECL_CHAIN (*t);
529
530 /* Debug info of nested function refers to the block of the
531 function. We might stil call it even if all statements
532 of function it was nested into was elliminated.
533
534 TODO: We can actually look into cgraph to see if function
535 will be output to file. */
536 if (TREE_CODE (*t) == FUNCTION_DECL)
537 unused = false;
538
539 /* If a decl has a value expr, we need to instantiate it
540 regardless of debug info generation, to avoid codegen
541 differences in memory overlap tests. update_equiv_regs() may
542 indirectly call validate_equiv_mem() to test whether a
543 SET_DEST overlaps with others, and if the value expr changes
544 by virtual register instantiation, we may get end up with
545 different results. */
546 else if (TREE_CODE (*t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*t))
547 unused = false;
548
549 /* Remove everything we don't generate debug info for. */
550 else if (DECL_IGNORED_P (*t))
551 {
552 *t = DECL_CHAIN (*t);
553 next = t;
554 }
555
556 /* When we are outputting debug info, we usually want to output
557 info about optimized-out variables in the scope blocks.
558 Exception are the scope blocks not containing any instructions
559 at all so user can't get into the scopes at first place. */
560 else if (is_used_p (*t))
561 unused = false;
562 else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
563 /* For labels that are still used in the IL, the decision to
564 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
565 risk having different ordering in debug vs. non-debug builds
566 during inlining or versioning.
567 A label appearing here (we have already checked DECL_IGNORED_P)
568 should not be used in the IL unless it has been explicitly used
569 before, so we use TREE_USED as an approximation. */
570 /* In principle, we should do the same here as for the debug case
571 below, however, when debugging, there might be additional nested
572 levels that keep an upper level with a label live, so we have to
573 force this block to be considered used, too. */
574 unused = false;
575
576 /* When we are not doing full debug info, we however can keep around
577 only the used variables for cfgexpand's memory packing saving quite
578 a lot of memory.
579
580 For sake of -g3, we keep around those vars but we don't count this as
581 use of block, so innermost block with no used vars and no instructions
582 can be considered dead. We only want to keep around blocks user can
583 breakpoint into and ask about value of optimized out variables.
584
585 Similarly we need to keep around types at least until all
586 variables of all nested blocks are gone. We track no
587 information on whether given type is used or not, so we have
588 to keep them even when not emitting debug information,
589 otherwise we may end up remapping variables and their (local)
590 types in different orders depending on whether debug
591 information is being generated. */
592
593 else if (TREE_CODE (*t) == TYPE_DECL
594 || debug_info_level == DINFO_LEVEL_NORMAL
595 || debug_info_level == DINFO_LEVEL_VERBOSE)
596 ;
597 else
598 {
599 *t = DECL_CHAIN (*t);
600 next = t;
601 }
602 }
603
604 for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
605 if (remove_unused_scope_block_p (*t, in_ctor_dtor_block))
606 {
607 if (BLOCK_SUBBLOCKS (*t))
608 {
609 tree next = BLOCK_CHAIN (*t);
610 tree supercontext = BLOCK_SUPERCONTEXT (*t);
611
612 *t = BLOCK_SUBBLOCKS (*t);
613 while (BLOCK_CHAIN (*t))
614 {
615 BLOCK_SUPERCONTEXT (*t) = supercontext;
616 t = &BLOCK_CHAIN (*t);
617 }
618 BLOCK_CHAIN (*t) = next;
619 BLOCK_SUPERCONTEXT (*t) = supercontext;
620 t = &BLOCK_CHAIN (*t);
621 nsubblocks ++;
622 }
623 else
624 *t = BLOCK_CHAIN (*t);
625 }
626 else
627 {
628 t = &BLOCK_CHAIN (*t);
629 nsubblocks ++;
630 }
631
632
633 if (!unused)
634 ;
635 /* Outer scope is always used. */
636 else if (!BLOCK_SUPERCONTEXT (scope)
637 || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
638 unused = false;
639 /* Innermost blocks with no live variables nor statements can be always
640 eliminated. */
641 else if (!nsubblocks)
642 ;
643 /* When not generating debug info we can eliminate info on unused
644 variables. */
645 else if (!flag_auto_profile && debug_info_level == DINFO_LEVEL_NONE)
646 {
647 /* Even for -g0 don't prune outer scopes from artificial
648 functions, otherwise diagnostics using tree_nonartificial_location
649 will not be emitted properly. */
650 if (inlined_function_outer_scope_p (scope))
651 {
652 tree ao = scope;
653
654 while (ao
655 && TREE_CODE (ao) == BLOCK
656 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
657 ao = BLOCK_ABSTRACT_ORIGIN (ao);
658 if (ao
659 && TREE_CODE (ao) == FUNCTION_DECL
660 && DECL_DECLARED_INLINE_P (ao)
661 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
662 unused = false;
663 }
664 }
665 else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
666 unused = false;
667 /* See if this block is important for representation of inlined function.
668 Inlined functions are always represented by block with
669 block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION
670 set... */
671 else if (inlined_function_outer_scope_p (scope))
672 unused = false;
673 else
674 /* Verfify that only blocks with source location set
675 are entry points to the inlined functions. */
676 gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope))
677 == UNKNOWN_LOCATION);
678
679 TREE_USED (scope) = !unused;
680 return unused;
681 }
682
683 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
684 eliminated during the tree->rtl conversion process. */
685
686 static inline void
687 mark_all_vars_used (tree *expr_p)
688 {
689 walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL);
690 }
691
692 /* Helper function for clear_unused_block_pointer, called via walk_tree. */
693
694 static tree
695 clear_unused_block_pointer_1 (tree *tp, int *, void *)
696 {
697 if (EXPR_P (*tp) && TREE_BLOCK (*tp)
698 && !TREE_USED (TREE_BLOCK (*tp)))
699 TREE_SET_BLOCK (*tp, NULL);
700 return NULL_TREE;
701 }
702
703 /* Set all block pointer in debug or clobber stmt to NULL if the block
704 is unused, so that they will not be streamed out. */
705
706 static void
707 clear_unused_block_pointer (void)
708 {
709 basic_block bb;
710 gimple_stmt_iterator gsi;
711
712 FOR_EACH_BB_FN (bb, cfun)
713 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
714 {
715 unsigned i;
716 tree b;
717 gimple stmt = gsi_stmt (gsi);
718
719 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
720 continue;
721 b = gimple_block (stmt);
722 if (b && !TREE_USED (b))
723 gimple_set_block (stmt, NULL);
724 for (i = 0; i < gimple_num_ops (stmt); i++)
725 walk_tree (gimple_op_ptr (stmt, i), clear_unused_block_pointer_1,
726 NULL, NULL);
727 }
728 }
729
730 /* Dump scope blocks starting at SCOPE to FILE. INDENT is the
731 indentation level and FLAGS is as in print_generic_expr. */
732
733 static void
734 dump_scope_block (FILE *file, int indent, tree scope, int flags)
735 {
736 tree var, t;
737 unsigned int i;
738
739 fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope),
740 TREE_USED (scope) ? "" : " (unused)",
741 BLOCK_ABSTRACT (scope) ? " (abstract)": "");
742 if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) != UNKNOWN_LOCATION)
743 {
744 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
745 fprintf (file, " %s:%i", s.file, s.line);
746 }
747 if (BLOCK_ABSTRACT_ORIGIN (scope))
748 {
749 tree origin = block_ultimate_origin (scope);
750 if (origin)
751 {
752 fprintf (file, " Originating from :");
753 if (DECL_P (origin))
754 print_generic_decl (file, origin, flags);
755 else
756 fprintf (file, "#%i", BLOCK_NUMBER (origin));
757 }
758 }
759 fprintf (file, " \n");
760 for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
761 {
762 fprintf (file, "%*s", indent, "");
763 print_generic_decl (file, var, flags);
764 fprintf (file, "\n");
765 }
766 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
767 {
768 fprintf (file, "%*s",indent, "");
769 print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
770 flags);
771 fprintf (file, " (nonlocalized)\n");
772 }
773 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
774 dump_scope_block (file, indent + 2, t, flags);
775 fprintf (file, "\n%*s}\n",indent, "");
776 }
777
778 /* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
779 is as in print_generic_expr. */
780
781 DEBUG_FUNCTION void
782 debug_scope_block (tree scope, int flags)
783 {
784 dump_scope_block (stderr, 0, scope, flags);
785 }
786
787
788 /* Dump the tree of lexical scopes of current_function_decl to FILE.
789 FLAGS is as in print_generic_expr. */
790
791 void
792 dump_scope_blocks (FILE *file, int flags)
793 {
794 dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags);
795 }
796
797
798 /* Dump the tree of lexical scopes of current_function_decl to stderr.
799 FLAGS is as in print_generic_expr. */
800
801 DEBUG_FUNCTION void
802 debug_scope_blocks (int flags)
803 {
804 dump_scope_blocks (stderr, flags);
805 }
806
807 /* Remove local variables that are not referenced in the IL. */
808
809 void
810 remove_unused_locals (void)
811 {
812 basic_block bb;
813 tree var;
814 unsigned srcidx, dstidx, num;
815 bool have_local_clobbers = false;
816
817 /* Removing declarations from lexical blocks when not optimizing is
818 not only a waste of time, it actually causes differences in stack
819 layout. */
820 if (!optimize)
821 return;
822
823 timevar_push (TV_REMOVE_UNUSED);
824
825 mark_scope_block_unused (DECL_INITIAL (current_function_decl));
826
827 usedvars = BITMAP_ALLOC (NULL);
828
829 /* Walk the CFG marking all referenced symbols. */
830 FOR_EACH_BB_FN (bb, cfun)
831 {
832 gimple_stmt_iterator gsi;
833 size_t i;
834 edge_iterator ei;
835 edge e;
836
837 /* Walk the statements. */
838 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
839 {
840 gimple stmt = gsi_stmt (gsi);
841 tree b = gimple_block (stmt);
842
843 if (is_gimple_debug (stmt))
844 continue;
845
846 if (gimple_clobber_p (stmt))
847 {
848 have_local_clobbers = true;
849 continue;
850 }
851
852 if (b)
853 TREE_USED (b) = true;
854
855 for (i = 0; i < gimple_num_ops (stmt); i++)
856 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i));
857 }
858
859 for (gphi_iterator gpi = gsi_start_phis (bb);
860 !gsi_end_p (gpi);
861 gsi_next (&gpi))
862 {
863 use_operand_p arg_p;
864 ssa_op_iter i;
865 tree def;
866 gphi *phi = gpi.phi ();
867
868 if (virtual_operand_p (gimple_phi_result (phi)))
869 continue;
870
871 def = gimple_phi_result (phi);
872 mark_all_vars_used (&def);
873
874 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
875 {
876 tree arg = USE_FROM_PTR (arg_p);
877 int index = PHI_ARG_INDEX_FROM_USE (arg_p);
878 tree block =
879 LOCATION_BLOCK (gimple_phi_arg_location (phi, index));
880 if (block != NULL)
881 TREE_USED (block) = true;
882 mark_all_vars_used (&arg);
883 }
884 }
885
886 FOR_EACH_EDGE (e, ei, bb->succs)
887 if (LOCATION_BLOCK (e->goto_locus) != NULL)
888 TREE_USED (LOCATION_BLOCK (e->goto_locus)) = true;
889 }
890
891 /* We do a two-pass approach about the out-of-scope clobbers. We want
892 to remove them if they are the only references to a local variable,
893 but we want to retain them when there's any other. So the first pass
894 ignores them, and the second pass (if there were any) tries to remove
895 them. */
896 if (have_local_clobbers)
897 FOR_EACH_BB_FN (bb, cfun)
898 {
899 gimple_stmt_iterator gsi;
900
901 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
902 {
903 gimple stmt = gsi_stmt (gsi);
904 tree b = gimple_block (stmt);
905
906 if (gimple_clobber_p (stmt))
907 {
908 tree lhs = gimple_assign_lhs (stmt);
909 tree base = get_base_address (lhs);
910 /* Remove clobbers referencing unused vars, or clobbers
911 with MEM_REF lhs referencing uninitialized pointers. */
912 if ((TREE_CODE (base) == VAR_DECL && !is_used_p (base))
913 || (TREE_CODE (lhs) == MEM_REF
914 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
915 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0))
916 && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs, 0)))
917 != PARM_DECL)))
918 {
919 unlink_stmt_vdef (stmt);
920 gsi_remove (&gsi, true);
921 release_defs (stmt);
922 continue;
923 }
924 if (b)
925 TREE_USED (b) = true;
926 }
927 gsi_next (&gsi);
928 }
929 }
930
931 cfun->has_local_explicit_reg_vars = false;
932
933 /* Remove unmarked local and global vars from local_decls. */
934 num = vec_safe_length (cfun->local_decls);
935 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
936 {
937 var = (*cfun->local_decls)[srcidx];
938 if (TREE_CODE (var) == VAR_DECL)
939 {
940 if (!is_used_p (var))
941 {
942 tree def;
943 if (cfun->nonlocal_goto_save_area
944 && TREE_OPERAND (cfun->nonlocal_goto_save_area, 0) == var)
945 cfun->nonlocal_goto_save_area = NULL;
946 /* Release any default def associated with var. */
947 if ((def = ssa_default_def (cfun, var)) != NULL_TREE)
948 {
949 set_ssa_default_def (cfun, var, NULL_TREE);
950 release_ssa_name (def);
951 }
952 continue;
953 }
954 }
955 if (TREE_CODE (var) == VAR_DECL
956 && DECL_HARD_REGISTER (var)
957 && !is_global_var (var))
958 cfun->has_local_explicit_reg_vars = true;
959
960 if (srcidx != dstidx)
961 (*cfun->local_decls)[dstidx] = var;
962 dstidx++;
963 }
964 if (dstidx != num)
965 {
966 statistics_counter_event (cfun, "unused VAR_DECLs removed", num - dstidx);
967 cfun->local_decls->truncate (dstidx);
968 }
969
970 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl), false);
971 clear_unused_block_pointer ();
972
973 BITMAP_FREE (usedvars);
974
975 if (dump_file && (dump_flags & TDF_DETAILS))
976 {
977 fprintf (dump_file, "Scope blocks after cleanups:\n");
978 dump_scope_blocks (dump_file, dump_flags);
979 }
980
981 timevar_pop (TV_REMOVE_UNUSED);
982 }
983
984 /* Allocate and return a new live range information object base on MAP. */
985
986 static tree_live_info_p
987 new_tree_live_info (var_map map)
988 {
989 tree_live_info_p live;
990 basic_block bb;
991
992 live = XNEW (struct tree_live_info_d);
993 live->map = map;
994 live->num_blocks = last_basic_block_for_fn (cfun);
995
996 bitmap_obstack_initialize (&live->livein_obstack);
997 bitmap_obstack_initialize (&live->liveout_obstack);
998 live->livein = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
999 FOR_EACH_BB_FN (bb, cfun)
1000 bitmap_initialize (&live->livein[bb->index], &live->livein_obstack);
1001
1002 live->liveout = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
1003 FOR_EACH_BB_FN (bb, cfun)
1004 bitmap_initialize (&live->liveout[bb->index], &live->liveout_obstack);
1005
1006 live->work_stack = XNEWVEC (int, last_basic_block_for_fn (cfun));
1007 live->stack_top = live->work_stack;
1008
1009 live->global = BITMAP_ALLOC (NULL);
1010 return live;
1011 }
1012
1013
1014 /* Free storage for live range info object LIVE. */
1015
1016 void
1017 delete_tree_live_info (tree_live_info_p live)
1018 {
1019 if (live->livein)
1020 {
1021 bitmap_obstack_release (&live->livein_obstack);
1022 free (live->livein);
1023 }
1024 if (live->liveout)
1025 {
1026 bitmap_obstack_release (&live->liveout_obstack);
1027 free (live->liveout);
1028 }
1029 BITMAP_FREE (live->global);
1030 free (live->work_stack);
1031 free (live);
1032 }
1033
1034
1035 /* Visit basic block BB and propagate any required live on entry bits from
1036 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
1037 TMP is a temporary work bitmap which is passed in to avoid reallocating
1038 it each time. */
1039
1040 static void
1041 loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited)
1042 {
1043 edge e;
1044 bool change;
1045 edge_iterator ei;
1046 basic_block pred_bb;
1047 bitmap loe;
1048
1049 gcc_checking_assert (!bitmap_bit_p (visited, bb->index));
1050 bitmap_set_bit (visited, bb->index);
1051
1052 loe = live_on_entry (live, bb);
1053
1054 FOR_EACH_EDGE (e, ei, bb->preds)
1055 {
1056 pred_bb = e->src;
1057 if (pred_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1058 continue;
1059 /* Variables live-on-entry from BB that aren't defined in the
1060 predecessor block. This should be the live on entry vars to pred.
1061 Note that liveout is the DEFs in a block while live on entry is
1062 being calculated.
1063 Add these bits to live-on-entry for the pred. if there are any
1064 changes, and pred_bb has been visited already, add it to the
1065 revisit stack. */
1066 change = bitmap_ior_and_compl_into (live_on_entry (live, pred_bb),
1067 loe, &live->liveout[pred_bb->index]);
1068 if (change
1069 && bitmap_bit_p (visited, pred_bb->index))
1070 {
1071 bitmap_clear_bit (visited, pred_bb->index);
1072 *(live->stack_top)++ = pred_bb->index;
1073 }
1074 }
1075 }
1076
1077
1078 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
1079 of all the variables. */
1080
1081 static void
1082 live_worklist (tree_live_info_p live)
1083 {
1084 unsigned b;
1085 basic_block bb;
1086 sbitmap visited = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
1087
1088 bitmap_clear (visited);
1089
1090 /* Visit all the blocks in reverse order and propagate live on entry values
1091 into the predecessors blocks. */
1092 FOR_EACH_BB_REVERSE_FN (bb, cfun)
1093 loe_visit_block (live, bb, visited);
1094
1095 /* Process any blocks which require further iteration. */
1096 while (live->stack_top != live->work_stack)
1097 {
1098 b = *--(live->stack_top);
1099 loe_visit_block (live, BASIC_BLOCK_FOR_FN (cfun, b), visited);
1100 }
1101
1102 sbitmap_free (visited);
1103 }
1104
1105
1106 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
1107 links. Set the live on entry fields in LIVE. Def's are marked temporarily
1108 in the liveout vector. */
1109
1110 static void
1111 set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
1112 {
1113 int p;
1114 gimple stmt;
1115 use_operand_p use;
1116 basic_block def_bb = NULL;
1117 imm_use_iterator imm_iter;
1118 bool global = false;
1119
1120 p = var_to_partition (live->map, ssa_name);
1121 if (p == NO_PARTITION)
1122 return;
1123
1124 stmt = SSA_NAME_DEF_STMT (ssa_name);
1125 if (stmt)
1126 {
1127 def_bb = gimple_bb (stmt);
1128 /* Mark defs in liveout bitmap temporarily. */
1129 if (def_bb)
1130 bitmap_set_bit (&live->liveout[def_bb->index], p);
1131 }
1132 else
1133 def_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1134
1135 /* An undefined local variable does not need to be very alive. */
1136 if (ssa_undefined_value_p (ssa_name, false))
1137 return;
1138
1139 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1140 add it to the list of live on entry blocks. */
1141 FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
1142 {
1143 gimple use_stmt = USE_STMT (use);
1144 basic_block add_block = NULL;
1145
1146 if (gimple_code (use_stmt) == GIMPLE_PHI)
1147 {
1148 /* Uses in PHI's are considered to be live at exit of the SRC block
1149 as this is where a copy would be inserted. Check to see if it is
1150 defined in that block, or whether its live on entry. */
1151 int index = PHI_ARG_INDEX_FROM_USE (use);
1152 edge e = gimple_phi_arg_edge (as_a <gphi *> (use_stmt), index);
1153 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1154 {
1155 if (e->src != def_bb)
1156 add_block = e->src;
1157 }
1158 }
1159 else if (is_gimple_debug (use_stmt))
1160 continue;
1161 else
1162 {
1163 /* If its not defined in this block, its live on entry. */
1164 basic_block use_bb = gimple_bb (use_stmt);
1165 if (use_bb != def_bb)
1166 add_block = use_bb;
1167 }
1168
1169 /* If there was a live on entry use, set the bit. */
1170 if (add_block)
1171 {
1172 global = true;
1173 bitmap_set_bit (&live->livein[add_block->index], p);
1174 }
1175 }
1176
1177 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1178 on entry blocks between the def and all the uses. */
1179 if (global)
1180 bitmap_set_bit (live->global, p);
1181 }
1182
1183
1184 /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1185
1186 static void
1187 calculate_live_on_exit (tree_live_info_p liveinfo)
1188 {
1189 basic_block bb;
1190 edge e;
1191 edge_iterator ei;
1192
1193 /* live on entry calculations used liveout vectors for defs, clear them. */
1194 FOR_EACH_BB_FN (bb, cfun)
1195 bitmap_clear (&liveinfo->liveout[bb->index]);
1196
1197 /* Set all the live-on-exit bits for uses in PHIs. */
1198 FOR_EACH_BB_FN (bb, cfun)
1199 {
1200 gphi_iterator gsi;
1201 size_t i;
1202
1203 /* Mark the PHI arguments which are live on exit to the pred block. */
1204 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1205 {
1206 gphi *phi = gsi.phi ();
1207 for (i = 0; i < gimple_phi_num_args (phi); i++)
1208 {
1209 tree t = PHI_ARG_DEF (phi, i);
1210 int p;
1211
1212 if (TREE_CODE (t) != SSA_NAME)
1213 continue;
1214
1215 p = var_to_partition (liveinfo->map, t);
1216 if (p == NO_PARTITION)
1217 continue;
1218 e = gimple_phi_arg_edge (phi, i);
1219 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1220 bitmap_set_bit (&liveinfo->liveout[e->src->index], p);
1221 }
1222 }
1223
1224 /* Add each successors live on entry to this bock live on exit. */
1225 FOR_EACH_EDGE (e, ei, bb->succs)
1226 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1227 bitmap_ior_into (&liveinfo->liveout[bb->index],
1228 live_on_entry (liveinfo, e->dest));
1229 }
1230 }
1231
1232
1233 /* Given partition map MAP, calculate all the live on entry bitmaps for
1234 each partition. Return a new live info object. */
1235
1236 tree_live_info_p
1237 calculate_live_ranges (var_map map, bool want_livein)
1238 {
1239 tree var;
1240 unsigned i;
1241 tree_live_info_p live;
1242
1243 live = new_tree_live_info (map);
1244 for (i = 0; i < num_var_partitions (map); i++)
1245 {
1246 var = partition_to_var (map, i);
1247 if (var != NULL_TREE)
1248 set_var_live_on_entry (var, live);
1249 }
1250
1251 live_worklist (live);
1252
1253 #ifdef ENABLE_CHECKING
1254 verify_live_on_entry (live);
1255 #endif
1256
1257 calculate_live_on_exit (live);
1258
1259 if (!want_livein)
1260 {
1261 bitmap_obstack_release (&live->livein_obstack);
1262 free (live->livein);
1263 live->livein = NULL;
1264 }
1265
1266 return live;
1267 }
1268
1269
1270 /* Output partition map MAP to file F. */
1271
1272 void
1273 dump_var_map (FILE *f, var_map map)
1274 {
1275 int t;
1276 unsigned x, y;
1277 int p;
1278
1279 fprintf (f, "\nPartition map \n\n");
1280
1281 for (x = 0; x < map->num_partitions; x++)
1282 {
1283 if (map->view_to_partition != NULL)
1284 p = map->view_to_partition[x];
1285 else
1286 p = x;
1287
1288 if (ssa_name (p) == NULL_TREE
1289 || virtual_operand_p (ssa_name (p)))
1290 continue;
1291
1292 t = 0;
1293 for (y = 1; y < num_ssa_names; y++)
1294 {
1295 p = partition_find (map->var_partition, y);
1296 if (map->partition_to_view)
1297 p = map->partition_to_view[p];
1298 if (p == (int)x)
1299 {
1300 if (t++ == 0)
1301 {
1302 fprintf (f, "Partition %d (", x);
1303 print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
1304 fprintf (f, " - ");
1305 }
1306 fprintf (f, "%d ", y);
1307 }
1308 }
1309 if (t != 0)
1310 fprintf (f, ")\n");
1311 }
1312 fprintf (f, "\n");
1313 }
1314
1315
1316 /* Generic dump for the above. */
1317
1318 DEBUG_FUNCTION void
1319 debug (_var_map &ref)
1320 {
1321 dump_var_map (stderr, &ref);
1322 }
1323
1324 DEBUG_FUNCTION void
1325 debug (_var_map *ptr)
1326 {
1327 if (ptr)
1328 debug (*ptr);
1329 else
1330 fprintf (stderr, "<nil>\n");
1331 }
1332
1333
1334 /* Output live range info LIVE to file F, controlled by FLAG. */
1335
1336 void
1337 dump_live_info (FILE *f, tree_live_info_p live, int flag)
1338 {
1339 basic_block bb;
1340 unsigned i;
1341 var_map map = live->map;
1342 bitmap_iterator bi;
1343
1344 if ((flag & LIVEDUMP_ENTRY) && live->livein)
1345 {
1346 FOR_EACH_BB_FN (bb, cfun)
1347 {
1348 fprintf (f, "\nLive on entry to BB%d : ", bb->index);
1349 EXECUTE_IF_SET_IN_BITMAP (&live->livein[bb->index], 0, i, bi)
1350 {
1351 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1352 fprintf (f, " ");
1353 }
1354 fprintf (f, "\n");
1355 }
1356 }
1357
1358 if ((flag & LIVEDUMP_EXIT) && live->liveout)
1359 {
1360 FOR_EACH_BB_FN (bb, cfun)
1361 {
1362 fprintf (f, "\nLive on exit from BB%d : ", bb->index);
1363 EXECUTE_IF_SET_IN_BITMAP (&live->liveout[bb->index], 0, i, bi)
1364 {
1365 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1366 fprintf (f, " ");
1367 }
1368 fprintf (f, "\n");
1369 }
1370 }
1371 }
1372
1373
1374 /* Generic dump for the above. */
1375
1376 DEBUG_FUNCTION void
1377 debug (tree_live_info_d &ref)
1378 {
1379 dump_live_info (stderr, &ref, 0);
1380 }
1381
1382 DEBUG_FUNCTION void
1383 debug (tree_live_info_d *ptr)
1384 {
1385 if (ptr)
1386 debug (*ptr);
1387 else
1388 fprintf (stderr, "<nil>\n");
1389 }
1390
1391
1392 #ifdef ENABLE_CHECKING
1393 /* Verify that SSA_VAR is a non-virtual SSA_NAME. */
1394
1395 void
1396 register_ssa_partition_check (tree ssa_var)
1397 {
1398 gcc_assert (TREE_CODE (ssa_var) == SSA_NAME);
1399 if (virtual_operand_p (ssa_var))
1400 {
1401 fprintf (stderr, "Illegally registering a virtual SSA name :");
1402 print_generic_expr (stderr, ssa_var, TDF_SLIM);
1403 fprintf (stderr, " in the SSA->Normal phase.\n");
1404 internal_error ("SSA corruption");
1405 }
1406 }
1407
1408
1409 /* Verify that the info in LIVE matches the current cfg. */
1410
1411 static void
1412 verify_live_on_entry (tree_live_info_p live)
1413 {
1414 unsigned i;
1415 tree var;
1416 gimple stmt;
1417 basic_block bb;
1418 edge e;
1419 int num;
1420 edge_iterator ei;
1421 var_map map = live->map;
1422
1423 /* Check for live on entry partitions and report those with a DEF in
1424 the program. This will typically mean an optimization has done
1425 something wrong. */
1426 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1427 num = 0;
1428 FOR_EACH_EDGE (e, ei, bb->succs)
1429 {
1430 int entry_block = e->dest->index;
1431 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1432 continue;
1433 for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1434 {
1435 basic_block tmp;
1436 tree d = NULL_TREE;
1437 bitmap loe;
1438 var = partition_to_var (map, i);
1439 stmt = SSA_NAME_DEF_STMT (var);
1440 tmp = gimple_bb (stmt);
1441 if (SSA_NAME_VAR (var))
1442 d = ssa_default_def (cfun, SSA_NAME_VAR (var));
1443
1444 loe = live_on_entry (live, e->dest);
1445 if (loe && bitmap_bit_p (loe, i))
1446 {
1447 if (!gimple_nop_p (stmt))
1448 {
1449 num++;
1450 print_generic_expr (stderr, var, TDF_SLIM);
1451 fprintf (stderr, " is defined ");
1452 if (tmp)
1453 fprintf (stderr, " in BB%d, ", tmp->index);
1454 fprintf (stderr, "by:\n");
1455 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
1456 fprintf (stderr, "\nIt is also live-on-entry to entry BB %d",
1457 entry_block);
1458 fprintf (stderr, " So it appears to have multiple defs.\n");
1459 }
1460 else
1461 {
1462 if (d != var)
1463 {
1464 num++;
1465 print_generic_expr (stderr, var, TDF_SLIM);
1466 fprintf (stderr, " is live-on-entry to BB%d ",
1467 entry_block);
1468 if (d)
1469 {
1470 fprintf (stderr, " but is not the default def of ");
1471 print_generic_expr (stderr, d, TDF_SLIM);
1472 fprintf (stderr, "\n");
1473 }
1474 else
1475 fprintf (stderr, " and there is no default def.\n");
1476 }
1477 }
1478 }
1479 else
1480 if (d == var)
1481 {
1482 /* An undefined local variable does not need to be very
1483 alive. */
1484 if (ssa_undefined_value_p (var, false))
1485 continue;
1486
1487 /* The only way this var shouldn't be marked live on entry is
1488 if it occurs in a PHI argument of the block. */
1489 size_t z;
1490 bool ok = false;
1491 gphi_iterator gsi;
1492 for (gsi = gsi_start_phis (e->dest);
1493 !gsi_end_p (gsi) && !ok;
1494 gsi_next (&gsi))
1495 {
1496 gphi *phi = gsi.phi ();
1497 for (z = 0; z < gimple_phi_num_args (phi); z++)
1498 if (var == gimple_phi_arg_def (phi, z))
1499 {
1500 ok = true;
1501 break;
1502 }
1503 }
1504 if (ok)
1505 continue;
1506 num++;
1507 print_generic_expr (stderr, var, TDF_SLIM);
1508 fprintf (stderr, " is not marked live-on-entry to entry BB%d ",
1509 entry_block);
1510 fprintf (stderr, "but it is a default def so it should be.\n");
1511 }
1512 }
1513 }
1514 gcc_assert (num <= 0);
1515 }
1516 #endif