tree-dfa.c (add_referenced_var): Do not walk DECL_INITIAL for more referenced vars.
[gcc.git] / gcc / tree-ssa-live.c
1 /* Liveness for SSA trees.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4 Contributed by Andrew MacLeod <amacleod@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tree-pretty-print.h"
28 #include "gimple-pretty-print.h"
29 #include "bitmap.h"
30 #include "tree-flow.h"
31 #include "tree-dump.h"
32 #include "tree-ssa-live.h"
33 #include "diagnostic-core.h"
34 #include "debug.h"
35 #include "flags.h"
36 #include "gimple.h"
37
38 #ifdef ENABLE_CHECKING
39 static void verify_live_on_entry (tree_live_info_p);
40 #endif
41
42
43 /* VARMAP maintains a mapping from SSA version number to real variables.
44
45 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
46 only member of it's own partition. Coalescing will attempt to group any
47 ssa_names which occur in a copy or in a PHI node into the same partition.
48
49 At the end of out-of-ssa, each partition becomes a "real" variable and is
50 rewritten as a compiler variable.
51
52 The var_map data structure is used to manage these partitions. It allows
53 partitions to be combined, and determines which partition belongs to what
54 ssa_name or variable, and vice versa. */
55
56
57 /* This routine will initialize the basevar fields of MAP. */
58
59 static void
60 var_map_base_init (var_map map)
61 {
62 int x, num_part, num;
63 tree var;
64 var_ann_t ann;
65
66 num = 0;
67 num_part = num_var_partitions (map);
68
69 /* If a base table already exists, clear it, otherwise create it. */
70 if (map->partition_to_base_index != NULL)
71 {
72 free (map->partition_to_base_index);
73 VEC_truncate (tree, map->basevars, 0);
74 }
75 else
76 map->basevars = VEC_alloc (tree, heap, MAX (40, (num_part / 10)));
77
78 map->partition_to_base_index = (int *) xmalloc (sizeof (int) * num_part);
79
80 /* Build the base variable list, and point partitions at their bases. */
81 for (x = 0; x < num_part; x++)
82 {
83 var = partition_to_var (map, x);
84 if (TREE_CODE (var) == SSA_NAME)
85 var = SSA_NAME_VAR (var);
86 ann = var_ann (var);
87 /* If base variable hasn't been seen, set it up. */
88 if (!ann->base_var_processed)
89 {
90 ann->base_var_processed = 1;
91 VAR_ANN_BASE_INDEX (ann) = num++;
92 VEC_safe_push (tree, heap, map->basevars, var);
93 }
94 map->partition_to_base_index[x] = VAR_ANN_BASE_INDEX (ann);
95 }
96
97 map->num_basevars = num;
98
99 /* Now clear the processed bit. */
100 for (x = 0; x < num; x++)
101 {
102 var = VEC_index (tree, map->basevars, x);
103 var_ann (var)->base_var_processed = 0;
104 }
105
106 #ifdef ENABLE_CHECKING
107 for (x = 0; x < num_part; x++)
108 {
109 tree var2;
110 var = SSA_NAME_VAR (partition_to_var (map, x));
111 var2 = VEC_index (tree, map->basevars, basevar_index (map, x));
112 gcc_assert (var == var2);
113 }
114 #endif
115 }
116
117
118 /* Remove the base table in MAP. */
119
120 static void
121 var_map_base_fini (var_map map)
122 {
123 /* Free the basevar info if it is present. */
124 if (map->partition_to_base_index != NULL)
125 {
126 VEC_free (tree, heap, map->basevars);
127 free (map->partition_to_base_index);
128 map->partition_to_base_index = NULL;
129 map->num_basevars = 0;
130 }
131 }
132 /* Create a variable partition map of SIZE, initialize and return it. */
133
134 var_map
135 init_var_map (int size)
136 {
137 var_map map;
138
139 map = (var_map) xmalloc (sizeof (struct _var_map));
140 map->var_partition = partition_new (size);
141
142 map->partition_to_view = NULL;
143 map->view_to_partition = NULL;
144 map->num_partitions = size;
145 map->partition_size = size;
146 map->num_basevars = 0;
147 map->partition_to_base_index = NULL;
148 map->basevars = NULL;
149 return map;
150 }
151
152
153 /* Free memory associated with MAP. */
154
155 void
156 delete_var_map (var_map map)
157 {
158 var_map_base_fini (map);
159 partition_delete (map->var_partition);
160 free (map->partition_to_view);
161 free (map->view_to_partition);
162 free (map);
163 }
164
165
166 /* This function will combine the partitions in MAP for VAR1 and VAR2. It
167 Returns the partition which represents the new partition. If the two
168 partitions cannot be combined, NO_PARTITION is returned. */
169
170 int
171 var_union (var_map map, tree var1, tree var2)
172 {
173 int p1, p2, p3;
174
175 gcc_assert (TREE_CODE (var1) == SSA_NAME);
176 gcc_assert (TREE_CODE (var2) == SSA_NAME);
177
178 /* This is independent of partition_to_view. If partition_to_view is
179 on, then whichever one of these partitions is absorbed will never have a
180 dereference into the partition_to_view array any more. */
181
182 p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
183 p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
184
185 gcc_assert (p1 != NO_PARTITION);
186 gcc_assert (p2 != NO_PARTITION);
187
188 if (p1 == p2)
189 p3 = p1;
190 else
191 p3 = partition_union (map->var_partition, p1, p2);
192
193 if (map->partition_to_view)
194 p3 = map->partition_to_view[p3];
195
196 return p3;
197 }
198
199
200 /* Compress the partition numbers in MAP such that they fall in the range
201 0..(num_partitions-1) instead of wherever they turned out during
202 the partitioning exercise. This removes any references to unused
203 partitions, thereby allowing bitmaps and other vectors to be much
204 denser.
205
206 This is implemented such that compaction doesn't affect partitioning.
207 Ie., once partitions are created and possibly merged, running one
208 or more different kind of compaction will not affect the partitions
209 themselves. Their index might change, but all the same variables will
210 still be members of the same partition group. This allows work on reduced
211 sets, and no loss of information when a larger set is later desired.
212
213 In particular, coalescing can work on partitions which have 2 or more
214 definitions, and then 'recompact' later to include all the single
215 definitions for assignment to program variables. */
216
217
218 /* Set MAP back to the initial state of having no partition view. Return a
219 bitmap which has a bit set for each partition number which is in use in the
220 varmap. */
221
222 static bitmap
223 partition_view_init (var_map map)
224 {
225 bitmap used;
226 int tmp;
227 unsigned int x;
228
229 used = BITMAP_ALLOC (NULL);
230
231 /* Already in a view? Abandon the old one. */
232 if (map->partition_to_view)
233 {
234 free (map->partition_to_view);
235 map->partition_to_view = NULL;
236 }
237 if (map->view_to_partition)
238 {
239 free (map->view_to_partition);
240 map->view_to_partition = NULL;
241 }
242
243 /* Find out which partitions are actually referenced. */
244 for (x = 0; x < map->partition_size; x++)
245 {
246 tmp = partition_find (map->var_partition, x);
247 if (ssa_name (tmp) != NULL_TREE && is_gimple_reg (ssa_name (tmp))
248 && (!has_zero_uses (ssa_name (tmp))
249 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))))
250 bitmap_set_bit (used, tmp);
251 }
252
253 map->num_partitions = map->partition_size;
254 return used;
255 }
256
257
258 /* This routine will finalize the view data for MAP based on the partitions
259 set in SELECTED. This is either the same bitmap returned from
260 partition_view_init, or a trimmed down version if some of those partitions
261 were not desired in this view. SELECTED is freed before returning. */
262
263 static void
264 partition_view_fini (var_map map, bitmap selected)
265 {
266 bitmap_iterator bi;
267 unsigned count, i, x, limit;
268
269 gcc_assert (selected);
270
271 count = bitmap_count_bits (selected);
272 limit = map->partition_size;
273
274 /* If its a one-to-one ratio, we don't need any view compaction. */
275 if (count < limit)
276 {
277 map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
278 memset (map->partition_to_view, 0xff, (limit * sizeof (int)));
279 map->view_to_partition = (int *)xmalloc (count * sizeof (int));
280
281 i = 0;
282 /* Give each selected partition an index. */
283 EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
284 {
285 map->partition_to_view[x] = i;
286 map->view_to_partition[i] = x;
287 i++;
288 }
289 gcc_assert (i == count);
290 map->num_partitions = i;
291 }
292
293 BITMAP_FREE (selected);
294 }
295
296
297 /* Create a partition view which includes all the used partitions in MAP. If
298 WANT_BASES is true, create the base variable map as well. */
299
300 extern void
301 partition_view_normal (var_map map, bool want_bases)
302 {
303 bitmap used;
304
305 used = partition_view_init (map);
306 partition_view_fini (map, used);
307
308 if (want_bases)
309 var_map_base_init (map);
310 else
311 var_map_base_fini (map);
312 }
313
314
315 /* Create a partition view in MAP which includes just partitions which occur in
316 the bitmap ONLY. If WANT_BASES is true, create the base variable map
317 as well. */
318
319 extern void
320 partition_view_bitmap (var_map map, bitmap only, bool want_bases)
321 {
322 bitmap used;
323 bitmap new_partitions = BITMAP_ALLOC (NULL);
324 unsigned x, p;
325 bitmap_iterator bi;
326
327 used = partition_view_init (map);
328 EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
329 {
330 p = partition_find (map->var_partition, x);
331 gcc_assert (bitmap_bit_p (used, p));
332 bitmap_set_bit (new_partitions, p);
333 }
334 partition_view_fini (map, new_partitions);
335
336 BITMAP_FREE (used);
337 if (want_bases)
338 var_map_base_init (map);
339 else
340 var_map_base_fini (map);
341 }
342
343
344 static inline void mark_all_vars_used (tree *, void *data);
345
346 /* Helper function for mark_all_vars_used, called via walk_tree. */
347
348 static tree
349 mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data)
350 {
351 tree t = *tp;
352 enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
353 tree b;
354
355 if (TREE_CODE (t) == SSA_NAME)
356 t = SSA_NAME_VAR (t);
357
358 if (IS_EXPR_CODE_CLASS (c)
359 && (b = TREE_BLOCK (t)) != NULL)
360 TREE_USED (b) = true;
361
362 /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
363 fields do not contain vars. */
364 if (TREE_CODE (t) == TARGET_MEM_REF)
365 {
366 mark_all_vars_used (&TMR_BASE (t), data);
367 mark_all_vars_used (&TMR_INDEX (t), data);
368 mark_all_vars_used (&TMR_INDEX2 (t), data);
369 *walk_subtrees = 0;
370 return NULL;
371 }
372
373 /* Only need to mark VAR_DECLS; parameters and return results are not
374 eliminated as unused. */
375 if (TREE_CODE (t) == VAR_DECL)
376 {
377 if (data != NULL && bitmap_clear_bit ((bitmap) data, DECL_UID (t))
378 && DECL_CONTEXT (t) == current_function_decl)
379 mark_all_vars_used (&DECL_INITIAL (t), data);
380 if (var_ann (t) != NULL)
381 set_is_used (t);
382 }
383 /* remove_unused_scope_block_p requires information about labels
384 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
385 if (TREE_CODE (t) == LABEL_DECL)
386 /* Although the TREE_USED values that the frontend uses would be
387 acceptable (albeit slightly over-conservative) for our purposes,
388 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
389 must re-compute it here. */
390 TREE_USED (t) = 1;
391
392 if (IS_TYPE_OR_DECL_P (t))
393 *walk_subtrees = 0;
394
395 return NULL;
396 }
397
398 /* Mark the scope block SCOPE and its subblocks unused when they can be
399 possibly eliminated if dead. */
400
401 static void
402 mark_scope_block_unused (tree scope)
403 {
404 tree t;
405 TREE_USED (scope) = false;
406 if (!(*debug_hooks->ignore_block) (scope))
407 TREE_USED (scope) = true;
408 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
409 mark_scope_block_unused (t);
410 }
411
412 /* Look if the block is dead (by possibly eliminating its dead subblocks)
413 and return true if so.
414 Block is declared dead if:
415 1) No statements are associated with it.
416 2) Declares no live variables
417 3) All subblocks are dead
418 or there is precisely one subblocks and the block
419 has same abstract origin as outer block and declares
420 no variables, so it is pure wrapper.
421 When we are not outputting full debug info, we also eliminate dead variables
422 out of scope blocks to let them to be recycled by GGC and to save copying work
423 done by the inliner. */
424
425 static bool
426 remove_unused_scope_block_p (tree scope)
427 {
428 tree *t, *next;
429 bool unused = !TREE_USED (scope);
430 int nsubblocks = 0;
431
432 for (t = &BLOCK_VARS (scope); *t; t = next)
433 {
434 next = &DECL_CHAIN (*t);
435
436 /* Debug info of nested function refers to the block of the
437 function. We might stil call it even if all statements
438 of function it was nested into was elliminated.
439
440 TODO: We can actually look into cgraph to see if function
441 will be output to file. */
442 if (TREE_CODE (*t) == FUNCTION_DECL)
443 unused = false;
444
445 /* If a decl has a value expr, we need to instantiate it
446 regardless of debug info generation, to avoid codegen
447 differences in memory overlap tests. update_equiv_regs() may
448 indirectly call validate_equiv_mem() to test whether a
449 SET_DEST overlaps with others, and if the value expr changes
450 by virtual register instantiation, we may get end up with
451 different results. */
452 else if (TREE_CODE (*t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*t))
453 unused = false;
454
455 /* Remove everything we don't generate debug info for.
456 Don't remove larger vars though, because BLOCK_VARS are
457 used also during expansion to determine which variables
458 might share stack space. */
459 else if (DECL_IGNORED_P (*t) && is_gimple_reg (*t))
460 {
461 *t = DECL_CHAIN (*t);
462 next = t;
463 }
464
465 /* When we are outputting debug info, we usually want to output
466 info about optimized-out variables in the scope blocks.
467 Exception are the scope blocks not containing any instructions
468 at all so user can't get into the scopes at first place. */
469 else if (var_ann (*t) != NULL && is_used_p (*t))
470 unused = false;
471 else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
472 /* For labels that are still used in the IL, the decision to
473 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
474 risk having different ordering in debug vs. non-debug builds
475 during inlining or versioning.
476 A label appearing here (we have already checked DECL_IGNORED_P)
477 should not be used in the IL unless it has been explicitly used
478 before, so we use TREE_USED as an approximation. */
479 /* In principle, we should do the same here as for the debug case
480 below, however, when debugging, there might be additional nested
481 levels that keep an upper level with a label live, so we have to
482 force this block to be considered used, too. */
483 unused = false;
484
485 /* When we are not doing full debug info, we however can keep around
486 only the used variables for cfgexpand's memory packing saving quite
487 a lot of memory.
488
489 For sake of -g3, we keep around those vars but we don't count this as
490 use of block, so innermost block with no used vars and no instructions
491 can be considered dead. We only want to keep around blocks user can
492 breakpoint into and ask about value of optimized out variables.
493
494 Similarly we need to keep around types at least until all
495 variables of all nested blocks are gone. We track no
496 information on whether given type is used or not, so we have
497 to keep them even when not emitting debug information,
498 otherwise we may end up remapping variables and their (local)
499 types in different orders depending on whether debug
500 information is being generated. */
501
502 else if (TREE_CODE (*t) == TYPE_DECL
503 || debug_info_level == DINFO_LEVEL_NORMAL
504 || debug_info_level == DINFO_LEVEL_VERBOSE)
505 ;
506 else
507 {
508 *t = DECL_CHAIN (*t);
509 next = t;
510 }
511 }
512
513 for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
514 if (remove_unused_scope_block_p (*t))
515 {
516 if (BLOCK_SUBBLOCKS (*t))
517 {
518 tree next = BLOCK_CHAIN (*t);
519 tree supercontext = BLOCK_SUPERCONTEXT (*t);
520
521 *t = BLOCK_SUBBLOCKS (*t);
522 while (BLOCK_CHAIN (*t))
523 {
524 BLOCK_SUPERCONTEXT (*t) = supercontext;
525 t = &BLOCK_CHAIN (*t);
526 }
527 BLOCK_CHAIN (*t) = next;
528 BLOCK_SUPERCONTEXT (*t) = supercontext;
529 t = &BLOCK_CHAIN (*t);
530 nsubblocks ++;
531 }
532 else
533 *t = BLOCK_CHAIN (*t);
534 }
535 else
536 {
537 t = &BLOCK_CHAIN (*t);
538 nsubblocks ++;
539 }
540
541
542 if (!unused)
543 ;
544 /* Outer scope is always used. */
545 else if (!BLOCK_SUPERCONTEXT (scope)
546 || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
547 unused = false;
548 /* Innermost blocks with no live variables nor statements can be always
549 eliminated. */
550 else if (!nsubblocks)
551 ;
552 /* For terse debug info we can eliminate info on unused variables. */
553 else if (debug_info_level == DINFO_LEVEL_NONE
554 || debug_info_level == DINFO_LEVEL_TERSE)
555 {
556 /* Even for -g0/-g1 don't prune outer scopes from artificial
557 functions, otherwise diagnostics using tree_nonartificial_location
558 will not be emitted properly. */
559 if (inlined_function_outer_scope_p (scope))
560 {
561 tree ao = scope;
562
563 while (ao
564 && TREE_CODE (ao) == BLOCK
565 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
566 ao = BLOCK_ABSTRACT_ORIGIN (ao);
567 if (ao
568 && TREE_CODE (ao) == FUNCTION_DECL
569 && DECL_DECLARED_INLINE_P (ao)
570 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
571 unused = false;
572 }
573 }
574 else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
575 unused = false;
576 /* See if this block is important for representation of inlined function.
577 Inlined functions are always represented by block with
578 block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION
579 set... */
580 else if (inlined_function_outer_scope_p (scope))
581 unused = false;
582 else
583 /* Verfify that only blocks with source location set
584 are entry points to the inlined functions. */
585 gcc_assert (BLOCK_SOURCE_LOCATION (scope) == UNKNOWN_LOCATION);
586
587 TREE_USED (scope) = !unused;
588 return unused;
589 }
590
591 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
592 eliminated during the tree->rtl conversion process. */
593
594 static inline void
595 mark_all_vars_used (tree *expr_p, void *data)
596 {
597 walk_tree (expr_p, mark_all_vars_used_1, data, NULL);
598 }
599
600
601 /* Dump scope blocks starting at SCOPE to FILE. INDENT is the
602 indentation level and FLAGS is as in print_generic_expr. */
603
604 static void
605 dump_scope_block (FILE *file, int indent, tree scope, int flags)
606 {
607 tree var, t;
608 unsigned int i;
609
610 fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope),
611 TREE_USED (scope) ? "" : " (unused)",
612 BLOCK_ABSTRACT (scope) ? " (abstract)": "");
613 if (BLOCK_SOURCE_LOCATION (scope) != UNKNOWN_LOCATION)
614 {
615 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
616 fprintf (file, " %s:%i", s.file, s.line);
617 }
618 if (BLOCK_ABSTRACT_ORIGIN (scope))
619 {
620 tree origin = block_ultimate_origin (scope);
621 if (origin)
622 {
623 fprintf (file, " Originating from :");
624 if (DECL_P (origin))
625 print_generic_decl (file, origin, flags);
626 else
627 fprintf (file, "#%i", BLOCK_NUMBER (origin));
628 }
629 }
630 fprintf (file, " \n");
631 for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
632 {
633 bool used = false;
634
635 if (var_ann (var))
636 used = is_used_p (var);
637
638 fprintf (file, "%*s", indent, "");
639 print_generic_decl (file, var, flags);
640 fprintf (file, "%s\n", used ? "" : " (unused)");
641 }
642 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
643 {
644 fprintf (file, "%*s",indent, "");
645 print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
646 flags);
647 fprintf (file, " (nonlocalized)\n");
648 }
649 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
650 dump_scope_block (file, indent + 2, t, flags);
651 fprintf (file, "\n%*s}\n",indent, "");
652 }
653
654 /* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
655 is as in print_generic_expr. */
656
657 DEBUG_FUNCTION void
658 debug_scope_block (tree scope, int flags)
659 {
660 dump_scope_block (stderr, 0, scope, flags);
661 }
662
663
664 /* Dump the tree of lexical scopes of current_function_decl to FILE.
665 FLAGS is as in print_generic_expr. */
666
667 void
668 dump_scope_blocks (FILE *file, int flags)
669 {
670 dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags);
671 }
672
673
674 /* Dump the tree of lexical scopes of current_function_decl to stderr.
675 FLAGS is as in print_generic_expr. */
676
677 DEBUG_FUNCTION void
678 debug_scope_blocks (int flags)
679 {
680 dump_scope_blocks (stderr, flags);
681 }
682
683 /* Remove local variables that are not referenced in the IL. */
684
685 void
686 remove_unused_locals (void)
687 {
688 basic_block bb;
689 tree var, t;
690 referenced_var_iterator rvi;
691 bitmap global_unused_vars = NULL;
692 unsigned srcidx, dstidx, num;
693 bool have_local_clobbers = false;
694
695 /* Removing declarations from lexical blocks when not optimizing is
696 not only a waste of time, it actually causes differences in stack
697 layout. */
698 if (!optimize)
699 return;
700
701 timevar_push (TV_REMOVE_UNUSED);
702
703 mark_scope_block_unused (DECL_INITIAL (current_function_decl));
704
705 /* Assume all locals are unused. */
706 FOR_EACH_REFERENCED_VAR (cfun, t, rvi)
707 clear_is_used (t);
708
709 /* Walk the CFG marking all referenced symbols. */
710 FOR_EACH_BB (bb)
711 {
712 gimple_stmt_iterator gsi;
713 size_t i;
714 edge_iterator ei;
715 edge e;
716
717 /* Walk the statements. */
718 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
719 {
720 gimple stmt = gsi_stmt (gsi);
721 tree b = gimple_block (stmt);
722
723 if (is_gimple_debug (stmt))
724 continue;
725
726 if (gimple_clobber_p (stmt))
727 {
728 have_local_clobbers = true;
729 continue;
730 }
731
732 if (b)
733 TREE_USED (b) = true;
734
735 for (i = 0; i < gimple_num_ops (stmt); i++)
736 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i), NULL);
737 }
738
739 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
740 {
741 use_operand_p arg_p;
742 ssa_op_iter i;
743 tree def;
744 gimple phi = gsi_stmt (gsi);
745
746 /* No point processing globals. */
747 if (is_global_var (SSA_NAME_VAR (gimple_phi_result (phi))))
748 continue;
749
750 def = gimple_phi_result (phi);
751 mark_all_vars_used (&def, NULL);
752
753 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
754 {
755 tree arg = USE_FROM_PTR (arg_p);
756 mark_all_vars_used (&arg, NULL);
757 }
758 }
759
760 FOR_EACH_EDGE (e, ei, bb->succs)
761 if (e->goto_locus)
762 TREE_USED (e->goto_block) = true;
763 }
764
765 /* We do a two-pass approach about the out-of-scope clobbers. We want
766 to remove them if they are the only references to a local variable,
767 but we want to retain them when there's any other. So the first pass
768 ignores them, and the second pass (if there were any) tries to remove
769 them. */
770 if (have_local_clobbers)
771 FOR_EACH_BB (bb)
772 {
773 gimple_stmt_iterator gsi;
774
775 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
776 {
777 gimple stmt = gsi_stmt (gsi);
778 tree b = gimple_block (stmt);
779
780 if (gimple_clobber_p (stmt))
781 {
782 tree lhs = gimple_assign_lhs (stmt);
783 lhs = get_base_address (lhs);
784 if (TREE_CODE (lhs) == SSA_NAME)
785 lhs = SSA_NAME_VAR (lhs);
786 if (DECL_P (lhs) && (!var_ann (lhs) || !is_used_p (lhs)))
787 {
788 unlink_stmt_vdef (stmt);
789 gsi_remove (&gsi, true);
790 release_defs (stmt);
791 continue;
792 }
793 if (b)
794 TREE_USED (b) = true;
795 }
796 gsi_next (&gsi);
797 }
798 }
799
800 cfun->has_local_explicit_reg_vars = false;
801
802 /* Remove unmarked local vars from local_decls. */
803 num = VEC_length (tree, cfun->local_decls);
804 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
805 {
806 var = VEC_index (tree, cfun->local_decls, srcidx);
807 if (TREE_CODE (var) != FUNCTION_DECL
808 && (!var_ann (var)
809 || !is_used_p (var)))
810 {
811 if (is_global_var (var))
812 {
813 if (global_unused_vars == NULL)
814 global_unused_vars = BITMAP_ALLOC (NULL);
815 bitmap_set_bit (global_unused_vars, DECL_UID (var));
816 }
817 else
818 continue;
819 }
820 else if (TREE_CODE (var) == VAR_DECL
821 && DECL_HARD_REGISTER (var)
822 && !is_global_var (var))
823 cfun->has_local_explicit_reg_vars = true;
824
825 if (srcidx != dstidx)
826 VEC_replace (tree, cfun->local_decls, dstidx, var);
827 dstidx++;
828 }
829 if (dstidx != num)
830 VEC_truncate (tree, cfun->local_decls, dstidx);
831
832 /* Remove unmarked global vars from local_decls. */
833 if (global_unused_vars != NULL)
834 {
835 tree var;
836 unsigned ix;
837 FOR_EACH_LOCAL_DECL (cfun, ix, var)
838 if (TREE_CODE (var) == VAR_DECL
839 && is_global_var (var)
840 && var_ann (var) != NULL
841 && is_used_p (var)
842 && DECL_CONTEXT (var) == current_function_decl)
843 mark_all_vars_used (&DECL_INITIAL (var), global_unused_vars);
844
845 num = VEC_length (tree, cfun->local_decls);
846 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
847 {
848 var = VEC_index (tree, cfun->local_decls, srcidx);
849 if (TREE_CODE (var) == VAR_DECL
850 && is_global_var (var)
851 && bitmap_bit_p (global_unused_vars, DECL_UID (var)))
852 continue;
853
854 if (srcidx != dstidx)
855 VEC_replace (tree, cfun->local_decls, dstidx, var);
856 dstidx++;
857 }
858 if (dstidx != num)
859 VEC_truncate (tree, cfun->local_decls, dstidx);
860 BITMAP_FREE (global_unused_vars);
861 }
862
863 /* Remove unused variables from REFERENCED_VARs. */
864 FOR_EACH_REFERENCED_VAR (cfun, t, rvi)
865 if (!is_global_var (t)
866 && TREE_CODE (t) != PARM_DECL
867 && TREE_CODE (t) != RESULT_DECL
868 && !is_used_p (t))
869 remove_referenced_var (t);
870 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl));
871 if (dump_file && (dump_flags & TDF_DETAILS))
872 {
873 fprintf (dump_file, "Scope blocks after cleanups:\n");
874 dump_scope_blocks (dump_file, dump_flags);
875 }
876
877 timevar_pop (TV_REMOVE_UNUSED);
878 }
879
880
881 /* Allocate and return a new live range information object base on MAP. */
882
883 static tree_live_info_p
884 new_tree_live_info (var_map map)
885 {
886 tree_live_info_p live;
887 unsigned x;
888
889 live = (tree_live_info_p) xmalloc (sizeof (struct tree_live_info_d));
890 live->map = map;
891 live->num_blocks = last_basic_block;
892
893 live->livein = (bitmap *)xmalloc (last_basic_block * sizeof (bitmap));
894 for (x = 0; x < (unsigned)last_basic_block; x++)
895 live->livein[x] = BITMAP_ALLOC (NULL);
896
897 live->liveout = (bitmap *)xmalloc (last_basic_block * sizeof (bitmap));
898 for (x = 0; x < (unsigned)last_basic_block; x++)
899 live->liveout[x] = BITMAP_ALLOC (NULL);
900
901 live->work_stack = XNEWVEC (int, last_basic_block);
902 live->stack_top = live->work_stack;
903
904 live->global = BITMAP_ALLOC (NULL);
905 return live;
906 }
907
908
909 /* Free storage for live range info object LIVE. */
910
911 void
912 delete_tree_live_info (tree_live_info_p live)
913 {
914 int x;
915
916 BITMAP_FREE (live->global);
917 free (live->work_stack);
918
919 for (x = live->num_blocks - 1; x >= 0; x--)
920 BITMAP_FREE (live->liveout[x]);
921 free (live->liveout);
922
923 for (x = live->num_blocks - 1; x >= 0; x--)
924 BITMAP_FREE (live->livein[x]);
925 free (live->livein);
926
927 free (live);
928 }
929
930
931 /* Visit basic block BB and propagate any required live on entry bits from
932 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
933 TMP is a temporary work bitmap which is passed in to avoid reallocating
934 it each time. */
935
936 static void
937 loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited,
938 bitmap tmp)
939 {
940 edge e;
941 bool change;
942 edge_iterator ei;
943 basic_block pred_bb;
944 bitmap loe;
945 gcc_assert (!TEST_BIT (visited, bb->index));
946
947 SET_BIT (visited, bb->index);
948 loe = live_on_entry (live, bb);
949
950 FOR_EACH_EDGE (e, ei, bb->preds)
951 {
952 pred_bb = e->src;
953 if (pred_bb == ENTRY_BLOCK_PTR)
954 continue;
955 /* TMP is variables live-on-entry from BB that aren't defined in the
956 predecessor block. This should be the live on entry vars to pred.
957 Note that liveout is the DEFs in a block while live on entry is
958 being calculated. */
959 bitmap_and_compl (tmp, loe, live->liveout[pred_bb->index]);
960
961 /* Add these bits to live-on-entry for the pred. if there are any
962 changes, and pred_bb has been visited already, add it to the
963 revisit stack. */
964 change = bitmap_ior_into (live_on_entry (live, pred_bb), tmp);
965 if (TEST_BIT (visited, pred_bb->index) && change)
966 {
967 RESET_BIT (visited, pred_bb->index);
968 *(live->stack_top)++ = pred_bb->index;
969 }
970 }
971 }
972
973
974 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
975 of all the variables. */
976
977 static void
978 live_worklist (tree_live_info_p live)
979 {
980 unsigned b;
981 basic_block bb;
982 sbitmap visited = sbitmap_alloc (last_basic_block + 1);
983 bitmap tmp = BITMAP_ALLOC (NULL);
984
985 sbitmap_zero (visited);
986
987 /* Visit all the blocks in reverse order and propagate live on entry values
988 into the predecessors blocks. */
989 FOR_EACH_BB_REVERSE (bb)
990 loe_visit_block (live, bb, visited, tmp);
991
992 /* Process any blocks which require further iteration. */
993 while (live->stack_top != live->work_stack)
994 {
995 b = *--(live->stack_top);
996 loe_visit_block (live, BASIC_BLOCK (b), visited, tmp);
997 }
998
999 BITMAP_FREE (tmp);
1000 sbitmap_free (visited);
1001 }
1002
1003
1004 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
1005 links. Set the live on entry fields in LIVE. Def's are marked temporarily
1006 in the liveout vector. */
1007
1008 static void
1009 set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
1010 {
1011 int p;
1012 gimple stmt;
1013 use_operand_p use;
1014 basic_block def_bb = NULL;
1015 imm_use_iterator imm_iter;
1016 bool global = false;
1017
1018 p = var_to_partition (live->map, ssa_name);
1019 if (p == NO_PARTITION)
1020 return;
1021
1022 stmt = SSA_NAME_DEF_STMT (ssa_name);
1023 if (stmt)
1024 {
1025 def_bb = gimple_bb (stmt);
1026 /* Mark defs in liveout bitmap temporarily. */
1027 if (def_bb)
1028 bitmap_set_bit (live->liveout[def_bb->index], p);
1029 }
1030 else
1031 def_bb = ENTRY_BLOCK_PTR;
1032
1033 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1034 add it to the list of live on entry blocks. */
1035 FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
1036 {
1037 gimple use_stmt = USE_STMT (use);
1038 basic_block add_block = NULL;
1039
1040 if (gimple_code (use_stmt) == GIMPLE_PHI)
1041 {
1042 /* Uses in PHI's are considered to be live at exit of the SRC block
1043 as this is where a copy would be inserted. Check to see if it is
1044 defined in that block, or whether its live on entry. */
1045 int index = PHI_ARG_INDEX_FROM_USE (use);
1046 edge e = gimple_phi_arg_edge (use_stmt, index);
1047 if (e->src != ENTRY_BLOCK_PTR)
1048 {
1049 if (e->src != def_bb)
1050 add_block = e->src;
1051 }
1052 }
1053 else if (is_gimple_debug (use_stmt))
1054 continue;
1055 else
1056 {
1057 /* If its not defined in this block, its live on entry. */
1058 basic_block use_bb = gimple_bb (use_stmt);
1059 if (use_bb != def_bb)
1060 add_block = use_bb;
1061 }
1062
1063 /* If there was a live on entry use, set the bit. */
1064 if (add_block)
1065 {
1066 global = true;
1067 bitmap_set_bit (live->livein[add_block->index], p);
1068 }
1069 }
1070
1071 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1072 on entry blocks between the def and all the uses. */
1073 if (global)
1074 bitmap_set_bit (live->global, p);
1075 }
1076
1077
1078 /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1079
1080 void
1081 calculate_live_on_exit (tree_live_info_p liveinfo)
1082 {
1083 basic_block bb;
1084 edge e;
1085 edge_iterator ei;
1086
1087 /* live on entry calculations used liveout vectors for defs, clear them. */
1088 FOR_EACH_BB (bb)
1089 bitmap_clear (liveinfo->liveout[bb->index]);
1090
1091 /* Set all the live-on-exit bits for uses in PHIs. */
1092 FOR_EACH_BB (bb)
1093 {
1094 gimple_stmt_iterator gsi;
1095 size_t i;
1096
1097 /* Mark the PHI arguments which are live on exit to the pred block. */
1098 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1099 {
1100 gimple phi = gsi_stmt (gsi);
1101 for (i = 0; i < gimple_phi_num_args (phi); i++)
1102 {
1103 tree t = PHI_ARG_DEF (phi, i);
1104 int p;
1105
1106 if (TREE_CODE (t) != SSA_NAME)
1107 continue;
1108
1109 p = var_to_partition (liveinfo->map, t);
1110 if (p == NO_PARTITION)
1111 continue;
1112 e = gimple_phi_arg_edge (phi, i);
1113 if (e->src != ENTRY_BLOCK_PTR)
1114 bitmap_set_bit (liveinfo->liveout[e->src->index], p);
1115 }
1116 }
1117
1118 /* Add each successors live on entry to this bock live on exit. */
1119 FOR_EACH_EDGE (e, ei, bb->succs)
1120 if (e->dest != EXIT_BLOCK_PTR)
1121 bitmap_ior_into (liveinfo->liveout[bb->index],
1122 live_on_entry (liveinfo, e->dest));
1123 }
1124 }
1125
1126
1127 /* Given partition map MAP, calculate all the live on entry bitmaps for
1128 each partition. Return a new live info object. */
1129
1130 tree_live_info_p
1131 calculate_live_ranges (var_map map)
1132 {
1133 tree var;
1134 unsigned i;
1135 tree_live_info_p live;
1136
1137 live = new_tree_live_info (map);
1138 for (i = 0; i < num_var_partitions (map); i++)
1139 {
1140 var = partition_to_var (map, i);
1141 if (var != NULL_TREE)
1142 set_var_live_on_entry (var, live);
1143 }
1144
1145 live_worklist (live);
1146
1147 #ifdef ENABLE_CHECKING
1148 verify_live_on_entry (live);
1149 #endif
1150
1151 calculate_live_on_exit (live);
1152 return live;
1153 }
1154
1155
1156 /* Output partition map MAP to file F. */
1157
1158 void
1159 dump_var_map (FILE *f, var_map map)
1160 {
1161 int t;
1162 unsigned x, y;
1163 int p;
1164
1165 fprintf (f, "\nPartition map \n\n");
1166
1167 for (x = 0; x < map->num_partitions; x++)
1168 {
1169 if (map->view_to_partition != NULL)
1170 p = map->view_to_partition[x];
1171 else
1172 p = x;
1173
1174 if (ssa_name (p) == NULL_TREE)
1175 continue;
1176
1177 t = 0;
1178 for (y = 1; y < num_ssa_names; y++)
1179 {
1180 p = partition_find (map->var_partition, y);
1181 if (map->partition_to_view)
1182 p = map->partition_to_view[p];
1183 if (p == (int)x)
1184 {
1185 if (t++ == 0)
1186 {
1187 fprintf(f, "Partition %d (", x);
1188 print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
1189 fprintf (f, " - ");
1190 }
1191 fprintf (f, "%d ", y);
1192 }
1193 }
1194 if (t != 0)
1195 fprintf (f, ")\n");
1196 }
1197 fprintf (f, "\n");
1198 }
1199
1200
1201 /* Output live range info LIVE to file F, controlled by FLAG. */
1202
1203 void
1204 dump_live_info (FILE *f, tree_live_info_p live, int flag)
1205 {
1206 basic_block bb;
1207 unsigned i;
1208 var_map map = live->map;
1209 bitmap_iterator bi;
1210
1211 if ((flag & LIVEDUMP_ENTRY) && live->livein)
1212 {
1213 FOR_EACH_BB (bb)
1214 {
1215 fprintf (f, "\nLive on entry to BB%d : ", bb->index);
1216 EXECUTE_IF_SET_IN_BITMAP (live->livein[bb->index], 0, i, bi)
1217 {
1218 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1219 fprintf (f, " ");
1220 }
1221 fprintf (f, "\n");
1222 }
1223 }
1224
1225 if ((flag & LIVEDUMP_EXIT) && live->liveout)
1226 {
1227 FOR_EACH_BB (bb)
1228 {
1229 fprintf (f, "\nLive on exit from BB%d : ", bb->index);
1230 EXECUTE_IF_SET_IN_BITMAP (live->liveout[bb->index], 0, i, bi)
1231 {
1232 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1233 fprintf (f, " ");
1234 }
1235 fprintf (f, "\n");
1236 }
1237 }
1238 }
1239
1240 struct GTY(()) numbered_tree_d
1241 {
1242 tree t;
1243 int num;
1244 };
1245 typedef struct numbered_tree_d numbered_tree;
1246
1247 DEF_VEC_O (numbered_tree);
1248 DEF_VEC_ALLOC_O (numbered_tree, heap);
1249
1250 /* Compare two declarations references by their DECL_UID / sequence number.
1251 Called via qsort. */
1252
1253 static int
1254 compare_decls_by_uid (const void *pa, const void *pb)
1255 {
1256 const numbered_tree *nt_a = ((const numbered_tree *)pa);
1257 const numbered_tree *nt_b = ((const numbered_tree *)pb);
1258
1259 if (DECL_UID (nt_a->t) != DECL_UID (nt_b->t))
1260 return DECL_UID (nt_a->t) - DECL_UID (nt_b->t);
1261 return nt_a->num - nt_b->num;
1262 }
1263
1264 /* Called via walk_gimple_stmt / walk_gimple_op by dump_enumerated_decls. */
1265 static tree
1266 dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data)
1267 {
1268 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1269 VEC (numbered_tree, heap) **list = (VEC (numbered_tree, heap) **) &wi->info;
1270 numbered_tree nt;
1271
1272 if (!DECL_P (*tp))
1273 return NULL_TREE;
1274 nt.t = *tp;
1275 nt.num = VEC_length (numbered_tree, *list);
1276 VEC_safe_push (numbered_tree, heap, *list, &nt);
1277 *walk_subtrees = 0;
1278 return NULL_TREE;
1279 }
1280
1281 /* Find all the declarations used by the current function, sort them by uid,
1282 and emit the sorted list. Each declaration is tagged with a sequence
1283 number indicating when it was found during statement / tree walking,
1284 so that TDF_NOUID comparisons of anonymous declarations are still
1285 meaningful. Where a declaration was encountered more than once, we
1286 emit only the sequence number of the first encounter.
1287 FILE is the dump file where to output the list and FLAGS is as in
1288 print_generic_expr. */
1289 void
1290 dump_enumerated_decls (FILE *file, int flags)
1291 {
1292 basic_block bb;
1293 struct walk_stmt_info wi;
1294 VEC (numbered_tree, heap) *decl_list = VEC_alloc (numbered_tree, heap, 40);
1295
1296 memset (&wi, '\0', sizeof (wi));
1297 wi.info = (void*) decl_list;
1298 FOR_EACH_BB (bb)
1299 {
1300 gimple_stmt_iterator gsi;
1301
1302 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1303 if (!is_gimple_debug (gsi_stmt (gsi)))
1304 walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi);
1305 }
1306 decl_list = (VEC (numbered_tree, heap) *) wi.info;
1307 VEC_qsort (numbered_tree, decl_list, compare_decls_by_uid);
1308 if (VEC_length (numbered_tree, decl_list))
1309 {
1310 unsigned ix;
1311 numbered_tree *ntp;
1312 tree last = NULL_TREE;
1313
1314 fprintf (file, "Declarations used by %s, sorted by DECL_UID:\n",
1315 current_function_name ());
1316 FOR_EACH_VEC_ELT (numbered_tree, decl_list, ix, ntp)
1317 {
1318 if (ntp->t == last)
1319 continue;
1320 fprintf (file, "%d: ", ntp->num);
1321 print_generic_decl (file, ntp->t, flags);
1322 fprintf (file, "\n");
1323 last = ntp->t;
1324 }
1325 }
1326 VEC_free (numbered_tree, heap, decl_list);
1327 }
1328
1329 #ifdef ENABLE_CHECKING
1330 /* Verify that SSA_VAR is a non-virtual SSA_NAME. */
1331
1332 void
1333 register_ssa_partition_check (tree ssa_var)
1334 {
1335 gcc_assert (TREE_CODE (ssa_var) == SSA_NAME);
1336 if (!is_gimple_reg (SSA_NAME_VAR (ssa_var)))
1337 {
1338 fprintf (stderr, "Illegally registering a virtual SSA name :");
1339 print_generic_expr (stderr, ssa_var, TDF_SLIM);
1340 fprintf (stderr, " in the SSA->Normal phase.\n");
1341 internal_error ("SSA corruption");
1342 }
1343 }
1344
1345
1346 /* Verify that the info in LIVE matches the current cfg. */
1347
1348 static void
1349 verify_live_on_entry (tree_live_info_p live)
1350 {
1351 unsigned i;
1352 tree var;
1353 gimple stmt;
1354 basic_block bb;
1355 edge e;
1356 int num;
1357 edge_iterator ei;
1358 var_map map = live->map;
1359
1360 /* Check for live on entry partitions and report those with a DEF in
1361 the program. This will typically mean an optimization has done
1362 something wrong. */
1363 bb = ENTRY_BLOCK_PTR;
1364 num = 0;
1365 FOR_EACH_EDGE (e, ei, bb->succs)
1366 {
1367 int entry_block = e->dest->index;
1368 if (e->dest == EXIT_BLOCK_PTR)
1369 continue;
1370 for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1371 {
1372 basic_block tmp;
1373 tree d;
1374 bitmap loe;
1375 var = partition_to_var (map, i);
1376 stmt = SSA_NAME_DEF_STMT (var);
1377 tmp = gimple_bb (stmt);
1378 d = gimple_default_def (cfun, SSA_NAME_VAR (var));
1379
1380 loe = live_on_entry (live, e->dest);
1381 if (loe && bitmap_bit_p (loe, i))
1382 {
1383 if (!gimple_nop_p (stmt))
1384 {
1385 num++;
1386 print_generic_expr (stderr, var, TDF_SLIM);
1387 fprintf (stderr, " is defined ");
1388 if (tmp)
1389 fprintf (stderr, " in BB%d, ", tmp->index);
1390 fprintf (stderr, "by:\n");
1391 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
1392 fprintf (stderr, "\nIt is also live-on-entry to entry BB %d",
1393 entry_block);
1394 fprintf (stderr, " So it appears to have multiple defs.\n");
1395 }
1396 else
1397 {
1398 if (d != var)
1399 {
1400 num++;
1401 print_generic_expr (stderr, var, TDF_SLIM);
1402 fprintf (stderr, " is live-on-entry to BB%d ",
1403 entry_block);
1404 if (d)
1405 {
1406 fprintf (stderr, " but is not the default def of ");
1407 print_generic_expr (stderr, d, TDF_SLIM);
1408 fprintf (stderr, "\n");
1409 }
1410 else
1411 fprintf (stderr, " and there is no default def.\n");
1412 }
1413 }
1414 }
1415 else
1416 if (d == var)
1417 {
1418 /* The only way this var shouldn't be marked live on entry is
1419 if it occurs in a PHI argument of the block. */
1420 size_t z;
1421 bool ok = false;
1422 gimple_stmt_iterator gsi;
1423 for (gsi = gsi_start_phis (e->dest);
1424 !gsi_end_p (gsi) && !ok;
1425 gsi_next (&gsi))
1426 {
1427 gimple phi = gsi_stmt (gsi);
1428 for (z = 0; z < gimple_phi_num_args (phi); z++)
1429 if (var == gimple_phi_arg_def (phi, z))
1430 {
1431 ok = true;
1432 break;
1433 }
1434 }
1435 if (ok)
1436 continue;
1437 num++;
1438 print_generic_expr (stderr, var, TDF_SLIM);
1439 fprintf (stderr, " is not marked live-on-entry to entry BB%d ",
1440 entry_block);
1441 fprintf (stderr, "but it is a default def so it should be.\n");
1442 }
1443 }
1444 }
1445 gcc_assert (num <= 0);
1446 }
1447 #endif