re PR tree-optimization/50672 (ice: verify_ssa failed: no immediate_use list)
[gcc.git] / gcc / tree-ssa-dce.c
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4 Contributed by Ben Elliston <bje@redhat.com>
5 and Andrew MacLeod <amacleod@redhat.com>
6 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by the
12 Free Software Foundation; either version 3, or (at your option) any
13 later version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 /* Dead code elimination.
25
26 References:
27
28 Building an Optimizing Compiler,
29 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30
31 Advanced Compiler Design and Implementation,
32 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33
34 Dead-code elimination is the removal of statements which have no
35 impact on the program's output. "Dead statements" have no impact
36 on the program's output, while "necessary statements" may have
37 impact on the output.
38
39 The algorithm consists of three phases:
40 1. Marking as necessary all statements known to be necessary,
41 e.g. most function calls, writing a value to memory, etc;
42 2. Propagating necessary statements, e.g., the statements
43 giving values to operands in necessary statements; and
44 3. Removing dead statements. */
45
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50
51 #include "tree.h"
52 #include "tree-pretty-print.h"
53 #include "gimple-pretty-print.h"
54 #include "basic-block.h"
55 #include "tree-flow.h"
56 #include "gimple.h"
57 #include "tree-dump.h"
58 #include "tree-pass.h"
59 #include "timevar.h"
60 #include "flags.h"
61 #include "cfgloop.h"
62 #include "tree-scalar-evolution.h"
63
64 static struct stmt_stats
65 {
66 int total;
67 int total_phis;
68 int removed;
69 int removed_phis;
70 } stats;
71
72 #define STMT_NECESSARY GF_PLF_1
73
74 static VEC(gimple,heap) *worklist;
75
76 /* Vector indicating an SSA name has already been processed and marked
77 as necessary. */
78 static sbitmap processed;
79
80 /* Vector indicating that the last statement of a basic block has already
81 been marked as necessary. */
82 static sbitmap last_stmt_necessary;
83
84 /* Vector indicating that BB contains statements that are live. */
85 static sbitmap bb_contains_live_stmts;
86
87 /* Before we can determine whether a control branch is dead, we need to
88 compute which blocks are control dependent on which edges.
89
90 We expect each block to be control dependent on very few edges so we
91 use a bitmap for each block recording its edges. An array holds the
92 bitmap. The Ith bit in the bitmap is set if that block is dependent
93 on the Ith edge. */
94 static bitmap *control_dependence_map;
95
96 /* Vector indicating that a basic block has already had all the edges
97 processed that it is control dependent on. */
98 static sbitmap visited_control_parents;
99
100 /* TRUE if this pass alters the CFG (by removing control statements).
101 FALSE otherwise.
102
103 If this pass alters the CFG, then it will arrange for the dominators
104 to be recomputed. */
105 static bool cfg_altered;
106
107 /* Execute code that follows the macro for each edge (given number
108 EDGE_NUMBER within the CODE) for which the block with index N is
109 control dependent. */
110 #define EXECUTE_IF_CONTROL_DEPENDENT(BI, N, EDGE_NUMBER) \
111 EXECUTE_IF_SET_IN_BITMAP (control_dependence_map[(N)], 0, \
112 (EDGE_NUMBER), (BI))
113
114
115 /* Indicate block BB is control dependent on an edge with index EDGE_INDEX. */
116 static inline void
117 set_control_dependence_map_bit (basic_block bb, int edge_index)
118 {
119 if (bb == ENTRY_BLOCK_PTR)
120 return;
121 gcc_assert (bb != EXIT_BLOCK_PTR);
122 bitmap_set_bit (control_dependence_map[bb->index], edge_index);
123 }
124
125 /* Clear all control dependences for block BB. */
126 static inline void
127 clear_control_dependence_bitmap (basic_block bb)
128 {
129 bitmap_clear (control_dependence_map[bb->index]);
130 }
131
132
133 /* Find the immediate postdominator PDOM of the specified basic block BLOCK.
134 This function is necessary because some blocks have negative numbers. */
135
136 static inline basic_block
137 find_pdom (basic_block block)
138 {
139 gcc_assert (block != ENTRY_BLOCK_PTR);
140
141 if (block == EXIT_BLOCK_PTR)
142 return EXIT_BLOCK_PTR;
143 else
144 {
145 basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
146 if (! bb)
147 return EXIT_BLOCK_PTR;
148 return bb;
149 }
150 }
151
152
153 /* Determine all blocks' control dependences on the given edge with edge_list
154 EL index EDGE_INDEX, ala Morgan, Section 3.6. */
155
156 static void
157 find_control_dependence (struct edge_list *el, int edge_index)
158 {
159 basic_block current_block;
160 basic_block ending_block;
161
162 gcc_assert (INDEX_EDGE_PRED_BB (el, edge_index) != EXIT_BLOCK_PTR);
163
164 if (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR)
165 ending_block = single_succ (ENTRY_BLOCK_PTR);
166 else
167 ending_block = find_pdom (INDEX_EDGE_PRED_BB (el, edge_index));
168
169 for (current_block = INDEX_EDGE_SUCC_BB (el, edge_index);
170 current_block != ending_block && current_block != EXIT_BLOCK_PTR;
171 current_block = find_pdom (current_block))
172 {
173 edge e = INDEX_EDGE (el, edge_index);
174
175 /* For abnormal edges, we don't make current_block control
176 dependent because instructions that throw are always necessary
177 anyway. */
178 if (e->flags & EDGE_ABNORMAL)
179 continue;
180
181 set_control_dependence_map_bit (current_block, edge_index);
182 }
183 }
184
185
186 /* Record all blocks' control dependences on all edges in the edge
187 list EL, ala Morgan, Section 3.6. */
188
189 static void
190 find_all_control_dependences (struct edge_list *el)
191 {
192 int i;
193
194 for (i = 0; i < NUM_EDGES (el); ++i)
195 find_control_dependence (el, i);
196 }
197
198 /* If STMT is not already marked necessary, mark it, and add it to the
199 worklist if ADD_TO_WORKLIST is true. */
200
201 static inline void
202 mark_stmt_necessary (gimple stmt, bool add_to_worklist)
203 {
204 gcc_assert (stmt);
205
206 if (gimple_plf (stmt, STMT_NECESSARY))
207 return;
208
209 if (dump_file && (dump_flags & TDF_DETAILS))
210 {
211 fprintf (dump_file, "Marking useful stmt: ");
212 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
213 fprintf (dump_file, "\n");
214 }
215
216 gimple_set_plf (stmt, STMT_NECESSARY, true);
217 if (add_to_worklist)
218 VEC_safe_push (gimple, heap, worklist, stmt);
219 if (bb_contains_live_stmts && !is_gimple_debug (stmt))
220 SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index);
221 }
222
223
224 /* Mark the statement defining operand OP as necessary. */
225
226 static inline void
227 mark_operand_necessary (tree op)
228 {
229 gimple stmt;
230 int ver;
231
232 gcc_assert (op);
233
234 ver = SSA_NAME_VERSION (op);
235 if (TEST_BIT (processed, ver))
236 {
237 stmt = SSA_NAME_DEF_STMT (op);
238 gcc_assert (gimple_nop_p (stmt)
239 || gimple_plf (stmt, STMT_NECESSARY));
240 return;
241 }
242 SET_BIT (processed, ver);
243
244 stmt = SSA_NAME_DEF_STMT (op);
245 gcc_assert (stmt);
246
247 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
248 return;
249
250 if (dump_file && (dump_flags & TDF_DETAILS))
251 {
252 fprintf (dump_file, "marking necessary through ");
253 print_generic_expr (dump_file, op, 0);
254 fprintf (dump_file, " stmt ");
255 print_gimple_stmt (dump_file, stmt, 0, 0);
256 }
257
258 gimple_set_plf (stmt, STMT_NECESSARY, true);
259 if (bb_contains_live_stmts)
260 SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index);
261 VEC_safe_push (gimple, heap, worklist, stmt);
262 }
263
264
265 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
266 it can make other statements necessary.
267
268 If AGGRESSIVE is false, control statements are conservatively marked as
269 necessary. */
270
271 static void
272 mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
273 {
274 /* With non-call exceptions, we have to assume that all statements could
275 throw. If a statement may throw, it is inherently necessary. */
276 if (cfun->can_throw_non_call_exceptions && stmt_could_throw_p (stmt))
277 {
278 mark_stmt_necessary (stmt, true);
279 return;
280 }
281
282 /* Statements that are implicitly live. Most function calls, asm
283 and return statements are required. Labels and GIMPLE_BIND nodes
284 are kept because they are control flow, and we have no way of
285 knowing whether they can be removed. DCE can eliminate all the
286 other statements in a block, and CFG can then remove the block
287 and labels. */
288 switch (gimple_code (stmt))
289 {
290 case GIMPLE_PREDICT:
291 case GIMPLE_LABEL:
292 mark_stmt_necessary (stmt, false);
293 return;
294
295 case GIMPLE_ASM:
296 case GIMPLE_RESX:
297 case GIMPLE_RETURN:
298 mark_stmt_necessary (stmt, true);
299 return;
300
301 case GIMPLE_CALL:
302 {
303 tree callee = gimple_call_fndecl (stmt);
304 if (callee != NULL_TREE
305 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
306 switch (DECL_FUNCTION_CODE (callee))
307 {
308 case BUILT_IN_MALLOC:
309 case BUILT_IN_CALLOC:
310 case BUILT_IN_ALLOCA:
311 case BUILT_IN_ALLOCA_WITH_ALIGN:
312 return;
313
314 default:;
315 }
316 /* Most, but not all function calls are required. Function calls that
317 produce no result and have no side effects (i.e. const pure
318 functions) are unnecessary. */
319 if (gimple_has_side_effects (stmt))
320 {
321 mark_stmt_necessary (stmt, true);
322 return;
323 }
324 if (!gimple_call_lhs (stmt))
325 return;
326 break;
327 }
328
329 case GIMPLE_DEBUG:
330 /* Debug temps without a value are not useful. ??? If we could
331 easily locate the debug temp bind stmt for a use thereof,
332 would could refrain from marking all debug temps here, and
333 mark them only if they're used. */
334 if (!gimple_debug_bind_p (stmt)
335 || gimple_debug_bind_has_value_p (stmt)
336 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
337 mark_stmt_necessary (stmt, false);
338 return;
339
340 case GIMPLE_GOTO:
341 gcc_assert (!simple_goto_p (stmt));
342 mark_stmt_necessary (stmt, true);
343 return;
344
345 case GIMPLE_COND:
346 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
347 /* Fall through. */
348
349 case GIMPLE_SWITCH:
350 if (! aggressive)
351 mark_stmt_necessary (stmt, true);
352 break;
353
354 default:
355 break;
356 }
357
358 /* If the statement has volatile operands, it needs to be preserved.
359 Same for statements that can alter control flow in unpredictable
360 ways. */
361 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
362 {
363 mark_stmt_necessary (stmt, true);
364 return;
365 }
366
367 if (is_hidden_global_store (stmt))
368 {
369 mark_stmt_necessary (stmt, true);
370 return;
371 }
372
373 return;
374 }
375
376
377 /* Mark the last statement of BB as necessary. */
378
379 static void
380 mark_last_stmt_necessary (basic_block bb)
381 {
382 gimple stmt = last_stmt (bb);
383
384 SET_BIT (last_stmt_necessary, bb->index);
385 SET_BIT (bb_contains_live_stmts, bb->index);
386
387 /* We actually mark the statement only if it is a control statement. */
388 if (stmt && is_ctrl_stmt (stmt))
389 mark_stmt_necessary (stmt, true);
390 }
391
392
393 /* Mark control dependent edges of BB as necessary. We have to do this only
394 once for each basic block so we set the appropriate bit after we're done.
395
396 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
397
398 static void
399 mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el,
400 bool ignore_self)
401 {
402 bitmap_iterator bi;
403 unsigned edge_number;
404 bool skipped = false;
405
406 gcc_assert (bb != EXIT_BLOCK_PTR);
407
408 if (bb == ENTRY_BLOCK_PTR)
409 return;
410
411 EXECUTE_IF_CONTROL_DEPENDENT (bi, bb->index, edge_number)
412 {
413 basic_block cd_bb = INDEX_EDGE_PRED_BB (el, edge_number);
414
415 if (ignore_self && cd_bb == bb)
416 {
417 skipped = true;
418 continue;
419 }
420
421 if (!TEST_BIT (last_stmt_necessary, cd_bb->index))
422 mark_last_stmt_necessary (cd_bb);
423 }
424
425 if (!skipped)
426 SET_BIT (visited_control_parents, bb->index);
427 }
428
429
430 /* Find obviously necessary statements. These are things like most function
431 calls, and stores to file level variables.
432
433 If EL is NULL, control statements are conservatively marked as
434 necessary. Otherwise it contains the list of edges used by control
435 dependence analysis. */
436
437 static void
438 find_obviously_necessary_stmts (struct edge_list *el)
439 {
440 basic_block bb;
441 gimple_stmt_iterator gsi;
442 edge e;
443 gimple phi, stmt;
444 int flags;
445
446 FOR_EACH_BB (bb)
447 {
448 /* PHI nodes are never inherently necessary. */
449 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
450 {
451 phi = gsi_stmt (gsi);
452 gimple_set_plf (phi, STMT_NECESSARY, false);
453 }
454
455 /* Check all statements in the block. */
456 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
457 {
458 stmt = gsi_stmt (gsi);
459 gimple_set_plf (stmt, STMT_NECESSARY, false);
460 mark_stmt_if_obviously_necessary (stmt, el != NULL);
461 }
462 }
463
464 /* Pure and const functions are finite and thus have no infinite loops in
465 them. */
466 flags = flags_from_decl_or_type (current_function_decl);
467 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
468 return;
469
470 /* Prevent the empty possibly infinite loops from being removed. */
471 if (el)
472 {
473 loop_iterator li;
474 struct loop *loop;
475 scev_initialize ();
476 if (mark_irreducible_loops ())
477 FOR_EACH_BB (bb)
478 {
479 edge_iterator ei;
480 FOR_EACH_EDGE (e, ei, bb->succs)
481 if ((e->flags & EDGE_DFS_BACK)
482 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
483 {
484 if (dump_file)
485 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
486 e->src->index, e->dest->index);
487 mark_control_dependent_edges_necessary (e->dest, el, false);
488 }
489 }
490
491 FOR_EACH_LOOP (li, loop, 0)
492 if (!finite_loop_p (loop))
493 {
494 if (dump_file)
495 fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
496 mark_control_dependent_edges_necessary (loop->latch, el, false);
497 }
498 scev_finalize ();
499 }
500 }
501
502
503 /* Return true if REF is based on an aliased base, otherwise false. */
504
505 static bool
506 ref_may_be_aliased (tree ref)
507 {
508 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
509 while (handled_component_p (ref))
510 ref = TREE_OPERAND (ref, 0);
511 if (TREE_CODE (ref) == MEM_REF
512 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
513 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
514 return !(DECL_P (ref)
515 && !may_be_aliased (ref));
516 }
517
518 static bitmap visited = NULL;
519 static unsigned int longest_chain = 0;
520 static unsigned int total_chain = 0;
521 static unsigned int nr_walks = 0;
522 static bool chain_ovfl = false;
523
524 /* Worker for the walker that marks reaching definitions of REF,
525 which is based on a non-aliased decl, necessary. It returns
526 true whenever the defining statement of the current VDEF is
527 a kill for REF, as no dominating may-defs are necessary for REF
528 anymore. DATA points to the basic-block that contains the
529 stmt that refers to REF. */
530
531 static bool
532 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
533 {
534 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
535
536 /* All stmts we visit are necessary. */
537 mark_operand_necessary (vdef);
538
539 /* If the stmt lhs kills ref, then we can stop walking. */
540 if (gimple_has_lhs (def_stmt)
541 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
542 /* The assignment is not necessarily carried out if it can throw
543 and we can catch it in the current function where we could inspect
544 the previous value.
545 ??? We only need to care about the RHS throwing. For aggregate
546 assignments or similar calls and non-call exceptions the LHS
547 might throw as well. */
548 && !stmt_can_throw_internal (def_stmt))
549 {
550 tree base, lhs = gimple_get_lhs (def_stmt);
551 HOST_WIDE_INT size, offset, max_size;
552 ao_ref_base (ref);
553 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
554 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
555 so base == refd->base does not always hold. */
556 if (base == ref->base)
557 {
558 /* For a must-alias check we need to be able to constrain
559 the accesses properly. */
560 if (size != -1 && size == max_size
561 && ref->max_size != -1)
562 {
563 if (offset <= ref->offset
564 && offset + size >= ref->offset + ref->max_size)
565 return true;
566 }
567 /* Or they need to be exactly the same. */
568 else if (ref->ref
569 /* Make sure there is no induction variable involved
570 in the references (gcc.c-torture/execute/pr42142.c).
571 The simplest way is to check if the kill dominates
572 the use. */
573 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
574 gimple_bb (def_stmt))
575 && operand_equal_p (ref->ref, lhs, 0))
576 return true;
577 }
578 }
579
580 /* Otherwise keep walking. */
581 return false;
582 }
583
584 static void
585 mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
586 {
587 unsigned int chain;
588 ao_ref refd;
589 gcc_assert (!chain_ovfl);
590 ao_ref_init (&refd, ref);
591 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
592 mark_aliased_reaching_defs_necessary_1,
593 gimple_bb (stmt), NULL);
594 if (chain > longest_chain)
595 longest_chain = chain;
596 total_chain += chain;
597 nr_walks++;
598 }
599
600 /* Worker for the walker that marks reaching definitions of REF, which
601 is not based on a non-aliased decl. For simplicity we need to end
602 up marking all may-defs necessary that are not based on a non-aliased
603 decl. The only job of this walker is to skip may-defs based on
604 a non-aliased decl. */
605
606 static bool
607 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
608 tree vdef, void *data ATTRIBUTE_UNUSED)
609 {
610 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
611
612 /* We have to skip already visited (and thus necessary) statements
613 to make the chaining work after we dropped back to simple mode. */
614 if (chain_ovfl
615 && TEST_BIT (processed, SSA_NAME_VERSION (vdef)))
616 {
617 gcc_assert (gimple_nop_p (def_stmt)
618 || gimple_plf (def_stmt, STMT_NECESSARY));
619 return false;
620 }
621
622 /* We want to skip stores to non-aliased variables. */
623 if (!chain_ovfl
624 && gimple_assign_single_p (def_stmt))
625 {
626 tree lhs = gimple_assign_lhs (def_stmt);
627 if (!ref_may_be_aliased (lhs))
628 return false;
629 }
630
631 /* We want to skip statments that do not constitute stores but have
632 a virtual definition. */
633 if (is_gimple_call (def_stmt))
634 {
635 tree callee = gimple_call_fndecl (def_stmt);
636 if (callee != NULL_TREE
637 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
638 switch (DECL_FUNCTION_CODE (callee))
639 {
640 case BUILT_IN_MALLOC:
641 case BUILT_IN_CALLOC:
642 case BUILT_IN_ALLOCA:
643 case BUILT_IN_ALLOCA_WITH_ALIGN:
644 case BUILT_IN_FREE:
645 return false;
646
647 default:;
648 }
649 }
650
651 mark_operand_necessary (vdef);
652
653 return false;
654 }
655
656 static void
657 mark_all_reaching_defs_necessary (gimple stmt)
658 {
659 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
660 mark_all_reaching_defs_necessary_1, NULL, &visited);
661 }
662
663 /* Return true for PHI nodes with one or identical arguments
664 can be removed. */
665 static bool
666 degenerate_phi_p (gimple phi)
667 {
668 unsigned int i;
669 tree op = gimple_phi_arg_def (phi, 0);
670 for (i = 1; i < gimple_phi_num_args (phi); i++)
671 if (gimple_phi_arg_def (phi, i) != op)
672 return false;
673 return true;
674 }
675
676 /* Propagate necessity using the operands of necessary statements.
677 Process the uses on each statement in the worklist, and add all
678 feeding statements which contribute to the calculation of this
679 value to the worklist.
680
681 In conservative mode, EL is NULL. */
682
683 static void
684 propagate_necessity (struct edge_list *el)
685 {
686 gimple stmt;
687 bool aggressive = (el ? true : false);
688
689 if (dump_file && (dump_flags & TDF_DETAILS))
690 fprintf (dump_file, "\nProcessing worklist:\n");
691
692 while (VEC_length (gimple, worklist) > 0)
693 {
694 /* Take STMT from worklist. */
695 stmt = VEC_pop (gimple, worklist);
696
697 if (dump_file && (dump_flags & TDF_DETAILS))
698 {
699 fprintf (dump_file, "processing: ");
700 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
701 fprintf (dump_file, "\n");
702 }
703
704 if (aggressive)
705 {
706 /* Mark the last statement of the basic blocks on which the block
707 containing STMT is control dependent, but only if we haven't
708 already done so. */
709 basic_block bb = gimple_bb (stmt);
710 if (bb != ENTRY_BLOCK_PTR
711 && !TEST_BIT (visited_control_parents, bb->index))
712 mark_control_dependent_edges_necessary (bb, el, false);
713 }
714
715 if (gimple_code (stmt) == GIMPLE_PHI
716 /* We do not process virtual PHI nodes nor do we track their
717 necessity. */
718 && is_gimple_reg (gimple_phi_result (stmt)))
719 {
720 /* PHI nodes are somewhat special in that each PHI alternative has
721 data and control dependencies. All the statements feeding the
722 PHI node's arguments are always necessary. In aggressive mode,
723 we also consider the control dependent edges leading to the
724 predecessor block associated with each PHI alternative as
725 necessary. */
726 size_t k;
727
728 for (k = 0; k < gimple_phi_num_args (stmt); k++)
729 {
730 tree arg = PHI_ARG_DEF (stmt, k);
731 if (TREE_CODE (arg) == SSA_NAME)
732 mark_operand_necessary (arg);
733 }
734
735 /* For PHI operands it matters from where the control flow arrives
736 to the BB. Consider the following example:
737
738 a=exp1;
739 b=exp2;
740 if (test)
741 ;
742 else
743 ;
744 c=PHI(a,b)
745
746 We need to mark control dependence of the empty basic blocks, since they
747 contains computation of PHI operands.
748
749 Doing so is too restrictive in the case the predecestor block is in
750 the loop. Consider:
751
752 if (b)
753 {
754 int i;
755 for (i = 0; i<1000; ++i)
756 ;
757 j = 0;
758 }
759 return j;
760
761 There is PHI for J in the BB containing return statement.
762 In this case the control dependence of predecestor block (that is
763 within the empty loop) also contains the block determining number
764 of iterations of the block that would prevent removing of empty
765 loop in this case.
766
767 This scenario can be avoided by splitting critical edges.
768 To save the critical edge splitting pass we identify how the control
769 dependence would look like if the edge was split.
770
771 Consider the modified CFG created from current CFG by splitting
772 edge B->C. In the postdominance tree of modified CFG, C' is
773 always child of C. There are two cases how chlids of C' can look
774 like:
775
776 1) C' is leaf
777
778 In this case the only basic block C' is control dependent on is B.
779
780 2) C' has single child that is B
781
782 In this case control dependence of C' is same as control
783 dependence of B in original CFG except for block B itself.
784 (since C' postdominate B in modified CFG)
785
786 Now how to decide what case happens? There are two basic options:
787
788 a) C postdominate B. Then C immediately postdominate B and
789 case 2 happens iff there is no other way from B to C except
790 the edge B->C.
791
792 There is other way from B to C iff there is succesor of B that
793 is not postdominated by B. Testing this condition is somewhat
794 expensive, because we need to iterate all succesors of B.
795 We are safe to assume that this does not happen: we will mark B
796 as needed when processing the other path from B to C that is
797 conrol dependent on B and marking control dependencies of B
798 itself is harmless because they will be processed anyway after
799 processing control statement in B.
800
801 b) C does not postdominate B. Always case 1 happens since there is
802 path from C to exit that does not go through B and thus also C'. */
803
804 if (aggressive && !degenerate_phi_p (stmt))
805 {
806 for (k = 0; k < gimple_phi_num_args (stmt); k++)
807 {
808 basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src;
809
810 if (gimple_bb (stmt)
811 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
812 {
813 if (!TEST_BIT (last_stmt_necessary, arg_bb->index))
814 mark_last_stmt_necessary (arg_bb);
815 }
816 else if (arg_bb != ENTRY_BLOCK_PTR
817 && !TEST_BIT (visited_control_parents,
818 arg_bb->index))
819 mark_control_dependent_edges_necessary (arg_bb, el, true);
820 }
821 }
822 }
823 else
824 {
825 /* Propagate through the operands. Examine all the USE, VUSE and
826 VDEF operands in this statement. Mark all the statements
827 which feed this statement's uses as necessary. */
828 ssa_op_iter iter;
829 tree use;
830
831 /* If this is a call to free which is directly fed by an
832 allocation function do not mark that necessary through
833 processing the argument. */
834 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
835 {
836 tree ptr = gimple_call_arg (stmt, 0);
837 gimple def_stmt;
838 tree def_callee;
839 /* If the pointer we free is defined by an allocation
840 function do not add the call to the worklist. */
841 if (TREE_CODE (ptr) == SSA_NAME
842 && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
843 && (def_callee = gimple_call_fndecl (def_stmt))
844 && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
845 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
846 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
847 continue;
848 }
849
850 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
851 mark_operand_necessary (use);
852
853 use = gimple_vuse (stmt);
854 if (!use)
855 continue;
856
857 /* If we dropped to simple mode make all immediately
858 reachable definitions necessary. */
859 if (chain_ovfl)
860 {
861 mark_all_reaching_defs_necessary (stmt);
862 continue;
863 }
864
865 /* For statements that may load from memory (have a VUSE) we
866 have to mark all reaching (may-)definitions as necessary.
867 We partition this task into two cases:
868 1) explicit loads based on decls that are not aliased
869 2) implicit loads (like calls) and explicit loads not
870 based on decls that are not aliased (like indirect
871 references or loads from globals)
872 For 1) we mark all reaching may-defs as necessary, stopping
873 at dominating kills. For 2) we want to mark all dominating
874 references necessary, but non-aliased ones which we handle
875 in 1). By keeping a global visited bitmap for references
876 we walk for 2) we avoid quadratic behavior for those. */
877
878 if (is_gimple_call (stmt))
879 {
880 tree callee = gimple_call_fndecl (stmt);
881 unsigned i;
882
883 /* Calls to functions that are merely acting as barriers
884 or that only store to memory do not make any previous
885 stores necessary. */
886 if (callee != NULL_TREE
887 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
888 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
889 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
890 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
891 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
892 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
893 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
894 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
895 || (DECL_FUNCTION_CODE (callee)
896 == BUILT_IN_ALLOCA_WITH_ALIGN)
897 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
898 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
899 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
900 continue;
901
902 /* Calls implicitly load from memory, their arguments
903 in addition may explicitly perform memory loads. */
904 mark_all_reaching_defs_necessary (stmt);
905 for (i = 0; i < gimple_call_num_args (stmt); ++i)
906 {
907 tree arg = gimple_call_arg (stmt, i);
908 if (TREE_CODE (arg) == SSA_NAME
909 || is_gimple_min_invariant (arg))
910 continue;
911 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
912 arg = TREE_OPERAND (arg, 0);
913 if (!ref_may_be_aliased (arg))
914 mark_aliased_reaching_defs_necessary (stmt, arg);
915 }
916 }
917 else if (gimple_assign_single_p (stmt))
918 {
919 tree rhs;
920 bool rhs_aliased = false;
921 /* If this is a load mark things necessary. */
922 rhs = gimple_assign_rhs1 (stmt);
923 if (TREE_CODE (rhs) != SSA_NAME
924 && !is_gimple_min_invariant (rhs))
925 {
926 if (!ref_may_be_aliased (rhs))
927 mark_aliased_reaching_defs_necessary (stmt, rhs);
928 else
929 rhs_aliased = true;
930 }
931 if (rhs_aliased)
932 mark_all_reaching_defs_necessary (stmt);
933 }
934 else if (gimple_code (stmt) == GIMPLE_RETURN)
935 {
936 tree rhs = gimple_return_retval (stmt);
937 /* A return statement may perform a load. */
938 if (rhs
939 && TREE_CODE (rhs) != SSA_NAME
940 && !is_gimple_min_invariant (rhs))
941 {
942 if (!ref_may_be_aliased (rhs))
943 mark_aliased_reaching_defs_necessary (stmt, rhs);
944 else
945 mark_all_reaching_defs_necessary (stmt);
946 }
947 }
948 else if (gimple_code (stmt) == GIMPLE_ASM)
949 {
950 unsigned i;
951 mark_all_reaching_defs_necessary (stmt);
952 /* Inputs may perform loads. */
953 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
954 {
955 tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
956 if (TREE_CODE (op) != SSA_NAME
957 && !is_gimple_min_invariant (op)
958 && !ref_may_be_aliased (op))
959 mark_aliased_reaching_defs_necessary (stmt, op);
960 }
961 }
962 else
963 gcc_unreachable ();
964
965 /* If we over-used our alias oracle budget drop to simple
966 mode. The cost metric allows quadratic behavior
967 (number of uses times number of may-defs queries) up to
968 a constant maximal number of queries and after that falls back to
969 super-linear complexity. */
970 if (/* Constant but quadratic for small functions. */
971 total_chain > 128 * 128
972 /* Linear in the number of may-defs. */
973 && total_chain > 32 * longest_chain
974 /* Linear in the number of uses. */
975 && total_chain > nr_walks * 32)
976 {
977 chain_ovfl = true;
978 if (visited)
979 bitmap_clear (visited);
980 }
981 }
982 }
983 }
984
985 /* Replace all uses of NAME by underlying variable and mark it
986 for renaming. */
987
988 void
989 mark_virtual_operand_for_renaming (tree name)
990 {
991 bool used = false;
992 imm_use_iterator iter;
993 use_operand_p use_p;
994 gimple stmt;
995 tree name_var;
996
997 name_var = SSA_NAME_VAR (name);
998 FOR_EACH_IMM_USE_STMT (stmt, iter, name)
999 {
1000 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1001 SET_USE (use_p, name_var);
1002 update_stmt (stmt);
1003 used = true;
1004 }
1005 if (used)
1006 mark_sym_for_renaming (name_var);
1007 }
1008
1009 /* Replace all uses of result of PHI by underlying variable and mark it
1010 for renaming. */
1011
1012 void
1013 mark_virtual_phi_result_for_renaming (gimple phi)
1014 {
1015 if (dump_file && (dump_flags & TDF_DETAILS))
1016 {
1017 fprintf (dump_file, "Marking result for renaming : ");
1018 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1019 fprintf (dump_file, "\n");
1020 }
1021
1022 mark_virtual_operand_for_renaming (gimple_phi_result (phi));
1023 }
1024
1025
1026 /* Remove dead PHI nodes from block BB. */
1027
1028 static bool
1029 remove_dead_phis (basic_block bb)
1030 {
1031 bool something_changed = false;
1032 gimple_seq phis;
1033 gimple phi;
1034 gimple_stmt_iterator gsi;
1035 phis = phi_nodes (bb);
1036
1037 for (gsi = gsi_start (phis); !gsi_end_p (gsi);)
1038 {
1039 stats.total_phis++;
1040 phi = gsi_stmt (gsi);
1041
1042 /* We do not track necessity of virtual PHI nodes. Instead do
1043 very simple dead PHI removal here. */
1044 if (!is_gimple_reg (gimple_phi_result (phi)))
1045 {
1046 /* Virtual PHI nodes with one or identical arguments
1047 can be removed. */
1048 if (degenerate_phi_p (phi))
1049 {
1050 tree vdef = gimple_phi_result (phi);
1051 tree vuse = gimple_phi_arg_def (phi, 0);
1052
1053 use_operand_p use_p;
1054 imm_use_iterator iter;
1055 gimple use_stmt;
1056 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1057 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1058 SET_USE (use_p, vuse);
1059 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1060 && TREE_CODE (vuse) == SSA_NAME)
1061 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1062 }
1063 else
1064 gimple_set_plf (phi, STMT_NECESSARY, true);
1065 }
1066
1067 if (!gimple_plf (phi, STMT_NECESSARY))
1068 {
1069 something_changed = true;
1070 if (dump_file && (dump_flags & TDF_DETAILS))
1071 {
1072 fprintf (dump_file, "Deleting : ");
1073 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1074 fprintf (dump_file, "\n");
1075 }
1076
1077 remove_phi_node (&gsi, true);
1078 stats.removed_phis++;
1079 continue;
1080 }
1081
1082 gsi_next (&gsi);
1083 }
1084 return something_changed;
1085 }
1086
1087 /* Forward edge E to respective POST_DOM_BB and update PHIs. */
1088
1089 static edge
1090 forward_edge_to_pdom (edge e, basic_block post_dom_bb)
1091 {
1092 gimple_stmt_iterator gsi;
1093 edge e2 = NULL;
1094 edge_iterator ei;
1095
1096 if (dump_file && (dump_flags & TDF_DETAILS))
1097 fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
1098 e->dest->index, post_dom_bb->index);
1099
1100 e2 = redirect_edge_and_branch (e, post_dom_bb);
1101 cfg_altered = true;
1102
1103 /* If edge was already around, no updating is neccesary. */
1104 if (e2 != e)
1105 return e2;
1106
1107 if (!gimple_seq_empty_p (phi_nodes (post_dom_bb)))
1108 {
1109 /* We are sure that for every live PHI we are seeing control dependent BB.
1110 This means that we can pick any edge to duplicate PHI args from. */
1111 FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
1112 if (e2 != e)
1113 break;
1114 for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
1115 {
1116 gimple phi = gsi_stmt (gsi);
1117 tree op;
1118 source_location locus;
1119
1120 /* PHIs for virtuals have no control dependency relation on them.
1121 We are lost here and must force renaming of the symbol. */
1122 if (!is_gimple_reg (gimple_phi_result (phi)))
1123 {
1124 mark_virtual_phi_result_for_renaming (phi);
1125 remove_phi_node (&gsi, true);
1126 continue;
1127 }
1128
1129 /* Dead PHI do not imply control dependency. */
1130 if (!gimple_plf (phi, STMT_NECESSARY))
1131 {
1132 gsi_next (&gsi);
1133 continue;
1134 }
1135
1136 op = gimple_phi_arg_def (phi, e2->dest_idx);
1137 locus = gimple_phi_arg_location (phi, e2->dest_idx);
1138 add_phi_arg (phi, op, e, locus);
1139 /* The resulting PHI if not dead can only be degenerate. */
1140 gcc_assert (degenerate_phi_p (phi));
1141 gsi_next (&gsi);
1142 }
1143 }
1144 return e;
1145 }
1146
1147 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1148 containing I so that we don't have to look it up. */
1149
1150 static void
1151 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1152 {
1153 gimple stmt = gsi_stmt (*i);
1154
1155 if (dump_file && (dump_flags & TDF_DETAILS))
1156 {
1157 fprintf (dump_file, "Deleting : ");
1158 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1159 fprintf (dump_file, "\n");
1160 }
1161
1162 stats.removed++;
1163
1164 /* If we have determined that a conditional branch statement contributes
1165 nothing to the program, then we not only remove it, but we also change
1166 the flow graph so that the current block will simply fall-thru to its
1167 immediate post-dominator. The blocks we are circumventing will be
1168 removed by cleanup_tree_cfg if this change in the flow graph makes them
1169 unreachable. */
1170 if (is_ctrl_stmt (stmt))
1171 {
1172 basic_block post_dom_bb;
1173 edge e, e2;
1174 edge_iterator ei;
1175
1176 post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
1177
1178 e = find_edge (bb, post_dom_bb);
1179
1180 /* If edge is already there, try to use it. This avoids need to update
1181 PHI nodes. Also watch for cases where post dominator does not exists
1182 or is exit block. These can happen for infinite loops as we create
1183 fake edges in the dominator tree. */
1184 if (e)
1185 ;
1186 else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR)
1187 e = EDGE_SUCC (bb, 0);
1188 else
1189 e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
1190 gcc_assert (e);
1191 e->probability = REG_BR_PROB_BASE;
1192 e->count = bb->count;
1193
1194 /* The edge is no longer associated with a conditional, so it does
1195 not have TRUE/FALSE flags. */
1196 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
1197
1198 /* The lone outgoing edge from BB will be a fallthru edge. */
1199 e->flags |= EDGE_FALLTHRU;
1200
1201 /* Remove the remaining outgoing edges. */
1202 for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1203 if (e != e2)
1204 {
1205 cfg_altered = true;
1206 remove_edge (e2);
1207 }
1208 else
1209 ei_next (&ei);
1210 }
1211
1212 unlink_stmt_vdef (stmt);
1213 gsi_remove (i, true);
1214 release_defs (stmt);
1215 }
1216
1217 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1218 contributes nothing to the program, and can be deleted. */
1219
1220 static bool
1221 eliminate_unnecessary_stmts (void)
1222 {
1223 bool something_changed = false;
1224 basic_block bb;
1225 gimple_stmt_iterator gsi, psi;
1226 gimple stmt;
1227 tree call;
1228 VEC (basic_block, heap) *h;
1229
1230 if (dump_file && (dump_flags & TDF_DETAILS))
1231 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1232
1233 clear_special_calls ();
1234
1235 /* Walking basic blocks and statements in reverse order avoids
1236 releasing SSA names before any other DEFs that refer to them are
1237 released. This helps avoid loss of debug information, as we get
1238 a chance to propagate all RHSs of removed SSAs into debug uses,
1239 rather than only the latest ones. E.g., consider:
1240
1241 x_3 = y_1 + z_2;
1242 a_5 = x_3 - b_4;
1243 # DEBUG a => a_5
1244
1245 If we were to release x_3 before a_5, when we reached a_5 and
1246 tried to substitute it into the debug stmt, we'd see x_3 there,
1247 but x_3's DEF, type, etc would have already been disconnected.
1248 By going backwards, the debug stmt first changes to:
1249
1250 # DEBUG a => x_3 - b_4
1251
1252 and then to:
1253
1254 # DEBUG a => y_1 + z_2 - b_4
1255
1256 as desired. */
1257 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1258 h = get_all_dominated_blocks (CDI_DOMINATORS, single_succ (ENTRY_BLOCK_PTR));
1259
1260 while (VEC_length (basic_block, h))
1261 {
1262 bb = VEC_pop (basic_block, h);
1263
1264 /* Remove dead statements. */
1265 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1266 {
1267 stmt = gsi_stmt (gsi);
1268
1269 psi = gsi;
1270 gsi_prev (&psi);
1271
1272 stats.total++;
1273
1274 /* We can mark a call to free as not necessary if the
1275 defining statement of its argument is an allocation
1276 function and that is not necessary itself. */
1277 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1278 {
1279 tree ptr = gimple_call_arg (stmt, 0);
1280 tree callee2;
1281 gimple def_stmt;
1282 if (TREE_CODE (ptr) != SSA_NAME)
1283 continue;
1284 def_stmt = SSA_NAME_DEF_STMT (ptr);
1285 if (!is_gimple_call (def_stmt)
1286 || gimple_plf (def_stmt, STMT_NECESSARY))
1287 continue;
1288 callee2 = gimple_call_fndecl (def_stmt);
1289 if (callee2 == NULL_TREE
1290 || DECL_BUILT_IN_CLASS (callee2) != BUILT_IN_NORMAL
1291 || (DECL_FUNCTION_CODE (callee2) != BUILT_IN_MALLOC
1292 && DECL_FUNCTION_CODE (callee2) != BUILT_IN_CALLOC))
1293 continue;
1294 gimple_set_plf (stmt, STMT_NECESSARY, false);
1295 }
1296
1297 /* If GSI is not necessary then remove it. */
1298 if (!gimple_plf (stmt, STMT_NECESSARY))
1299 {
1300 if (!is_gimple_debug (stmt))
1301 something_changed = true;
1302 remove_dead_stmt (&gsi, bb);
1303 }
1304 else if (is_gimple_call (stmt))
1305 {
1306 call = gimple_call_fndecl (stmt);
1307 if (call)
1308 {
1309 tree name;
1310
1311 /* When LHS of var = call (); is dead, simplify it into
1312 call (); saving one operand. */
1313 name = gimple_call_lhs (stmt);
1314 if (name && TREE_CODE (name) == SSA_NAME
1315 && !TEST_BIT (processed, SSA_NAME_VERSION (name)))
1316 {
1317 something_changed = true;
1318 if (dump_file && (dump_flags & TDF_DETAILS))
1319 {
1320 fprintf (dump_file, "Deleting LHS of call: ");
1321 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1322 fprintf (dump_file, "\n");
1323 }
1324
1325 gimple_call_set_lhs (stmt, NULL_TREE);
1326 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1327 update_stmt (stmt);
1328 release_ssa_name (name);
1329 }
1330 notice_special_calls (stmt);
1331 }
1332 }
1333 }
1334 }
1335
1336 VEC_free (basic_block, heap, h);
1337
1338 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1339 rendered some PHI nodes unreachable while they are still in use.
1340 Mark them for renaming. */
1341 if (cfg_altered)
1342 {
1343 basic_block prev_bb;
1344
1345 find_unreachable_blocks ();
1346
1347 /* Delete all unreachable basic blocks in reverse dominator order. */
1348 for (bb = EXIT_BLOCK_PTR->prev_bb; bb != ENTRY_BLOCK_PTR; bb = prev_bb)
1349 {
1350 prev_bb = bb->prev_bb;
1351
1352 if (!TEST_BIT (bb_contains_live_stmts, bb->index)
1353 || !(bb->flags & BB_REACHABLE))
1354 {
1355 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1356 if (!is_gimple_reg (gimple_phi_result (gsi_stmt (gsi))))
1357 {
1358 bool found = false;
1359 imm_use_iterator iter;
1360
1361 FOR_EACH_IMM_USE_STMT (stmt, iter, gimple_phi_result (gsi_stmt (gsi)))
1362 {
1363 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1364 continue;
1365 if (gimple_code (stmt) == GIMPLE_PHI
1366 || gimple_plf (stmt, STMT_NECESSARY))
1367 {
1368 found = true;
1369 BREAK_FROM_IMM_USE_STMT (iter);
1370 }
1371 }
1372 if (found)
1373 mark_virtual_phi_result_for_renaming (gsi_stmt (gsi));
1374 }
1375
1376 if (!(bb->flags & BB_REACHABLE))
1377 {
1378 /* Speed up the removal of blocks that don't
1379 dominate others. Walking backwards, this should
1380 be the common case. ??? Do we need to recompute
1381 dominators because of cfg_altered? */
1382 if (!MAY_HAVE_DEBUG_STMTS
1383 || !first_dom_son (CDI_DOMINATORS, bb))
1384 delete_basic_block (bb);
1385 else
1386 {
1387 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1388
1389 while (VEC_length (basic_block, h))
1390 {
1391 bb = VEC_pop (basic_block, h);
1392 prev_bb = bb->prev_bb;
1393 /* Rearrangements to the CFG may have failed
1394 to update the dominators tree, so that
1395 formerly-dominated blocks are now
1396 otherwise reachable. */
1397 if (!!(bb->flags & BB_REACHABLE))
1398 continue;
1399 delete_basic_block (bb);
1400 }
1401
1402 VEC_free (basic_block, heap, h);
1403 }
1404 }
1405 }
1406 }
1407 }
1408 FOR_EACH_BB (bb)
1409 {
1410 /* Remove dead PHI nodes. */
1411 something_changed |= remove_dead_phis (bb);
1412 }
1413
1414 return something_changed;
1415 }
1416
1417
1418 /* Print out removed statement statistics. */
1419
1420 static void
1421 print_stats (void)
1422 {
1423 float percg;
1424
1425 percg = ((float) stats.removed / (float) stats.total) * 100;
1426 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1427 stats.removed, stats.total, (int) percg);
1428
1429 if (stats.total_phis == 0)
1430 percg = 0;
1431 else
1432 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1433
1434 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1435 stats.removed_phis, stats.total_phis, (int) percg);
1436 }
1437
1438 /* Initialization for this pass. Set up the used data structures. */
1439
1440 static void
1441 tree_dce_init (bool aggressive)
1442 {
1443 memset ((void *) &stats, 0, sizeof (stats));
1444
1445 if (aggressive)
1446 {
1447 int i;
1448
1449 control_dependence_map = XNEWVEC (bitmap, last_basic_block);
1450 for (i = 0; i < last_basic_block; ++i)
1451 control_dependence_map[i] = BITMAP_ALLOC (NULL);
1452
1453 last_stmt_necessary = sbitmap_alloc (last_basic_block);
1454 sbitmap_zero (last_stmt_necessary);
1455 bb_contains_live_stmts = sbitmap_alloc (last_basic_block);
1456 sbitmap_zero (bb_contains_live_stmts);
1457 }
1458
1459 processed = sbitmap_alloc (num_ssa_names + 1);
1460 sbitmap_zero (processed);
1461
1462 worklist = VEC_alloc (gimple, heap, 64);
1463 cfg_altered = false;
1464 }
1465
1466 /* Cleanup after this pass. */
1467
1468 static void
1469 tree_dce_done (bool aggressive)
1470 {
1471 if (aggressive)
1472 {
1473 int i;
1474
1475 for (i = 0; i < last_basic_block; ++i)
1476 BITMAP_FREE (control_dependence_map[i]);
1477 free (control_dependence_map);
1478
1479 sbitmap_free (visited_control_parents);
1480 sbitmap_free (last_stmt_necessary);
1481 sbitmap_free (bb_contains_live_stmts);
1482 bb_contains_live_stmts = NULL;
1483 }
1484
1485 sbitmap_free (processed);
1486
1487 VEC_free (gimple, heap, worklist);
1488 }
1489
1490 /* Main routine to eliminate dead code.
1491
1492 AGGRESSIVE controls the aggressiveness of the algorithm.
1493 In conservative mode, we ignore control dependence and simply declare
1494 all but the most trivially dead branches necessary. This mode is fast.
1495 In aggressive mode, control dependences are taken into account, which
1496 results in more dead code elimination, but at the cost of some time.
1497
1498 FIXME: Aggressive mode before PRE doesn't work currently because
1499 the dominance info is not invalidated after DCE1. This is
1500 not an issue right now because we only run aggressive DCE
1501 as the last tree SSA pass, but keep this in mind when you
1502 start experimenting with pass ordering. */
1503
1504 static unsigned int
1505 perform_tree_ssa_dce (bool aggressive)
1506 {
1507 struct edge_list *el = NULL;
1508 bool something_changed = 0;
1509
1510 calculate_dominance_info (CDI_DOMINATORS);
1511
1512 /* Preheaders are needed for SCEV to work.
1513 Simple lateches and recorded exits improve chances that loop will
1514 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1515 if (aggressive)
1516 loop_optimizer_init (LOOPS_NORMAL
1517 | LOOPS_HAVE_RECORDED_EXITS);
1518
1519 tree_dce_init (aggressive);
1520
1521 if (aggressive)
1522 {
1523 /* Compute control dependence. */
1524 timevar_push (TV_CONTROL_DEPENDENCES);
1525 calculate_dominance_info (CDI_POST_DOMINATORS);
1526 el = create_edge_list ();
1527 find_all_control_dependences (el);
1528 timevar_pop (TV_CONTROL_DEPENDENCES);
1529
1530 visited_control_parents = sbitmap_alloc (last_basic_block);
1531 sbitmap_zero (visited_control_parents);
1532
1533 mark_dfs_back_edges ();
1534 }
1535
1536 find_obviously_necessary_stmts (el);
1537
1538 if (aggressive)
1539 loop_optimizer_finalize ();
1540
1541 longest_chain = 0;
1542 total_chain = 0;
1543 nr_walks = 0;
1544 chain_ovfl = false;
1545 visited = BITMAP_ALLOC (NULL);
1546 propagate_necessity (el);
1547 BITMAP_FREE (visited);
1548
1549 something_changed |= eliminate_unnecessary_stmts ();
1550 something_changed |= cfg_altered;
1551
1552 /* We do not update postdominators, so free them unconditionally. */
1553 free_dominance_info (CDI_POST_DOMINATORS);
1554
1555 /* If we removed paths in the CFG, then we need to update
1556 dominators as well. I haven't investigated the possibility
1557 of incrementally updating dominators. */
1558 if (cfg_altered)
1559 free_dominance_info (CDI_DOMINATORS);
1560
1561 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1562 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1563
1564 /* Debugging dumps. */
1565 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1566 print_stats ();
1567
1568 tree_dce_done (aggressive);
1569
1570 free_edge_list (el);
1571
1572 if (something_changed)
1573 return (TODO_update_ssa | TODO_cleanup_cfg | TODO_ggc_collect
1574 | TODO_remove_unused_locals);
1575 else
1576 return 0;
1577 }
1578
1579 /* Pass entry points. */
1580 static unsigned int
1581 tree_ssa_dce (void)
1582 {
1583 return perform_tree_ssa_dce (/*aggressive=*/false);
1584 }
1585
1586 static unsigned int
1587 tree_ssa_dce_loop (void)
1588 {
1589 unsigned int todo;
1590 todo = perform_tree_ssa_dce (/*aggressive=*/false);
1591 if (todo)
1592 {
1593 free_numbers_of_iterations_estimates ();
1594 scev_reset ();
1595 }
1596 return todo;
1597 }
1598
1599 static unsigned int
1600 tree_ssa_cd_dce (void)
1601 {
1602 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1603 }
1604
1605 static bool
1606 gate_dce (void)
1607 {
1608 return flag_tree_dce != 0;
1609 }
1610
1611 struct gimple_opt_pass pass_dce =
1612 {
1613 {
1614 GIMPLE_PASS,
1615 "dce", /* name */
1616 gate_dce, /* gate */
1617 tree_ssa_dce, /* execute */
1618 NULL, /* sub */
1619 NULL, /* next */
1620 0, /* static_pass_number */
1621 TV_TREE_DCE, /* tv_id */
1622 PROP_cfg | PROP_ssa, /* properties_required */
1623 0, /* properties_provided */
1624 0, /* properties_destroyed */
1625 0, /* todo_flags_start */
1626 TODO_verify_ssa /* todo_flags_finish */
1627 }
1628 };
1629
1630 struct gimple_opt_pass pass_dce_loop =
1631 {
1632 {
1633 GIMPLE_PASS,
1634 "dceloop", /* name */
1635 gate_dce, /* gate */
1636 tree_ssa_dce_loop, /* execute */
1637 NULL, /* sub */
1638 NULL, /* next */
1639 0, /* static_pass_number */
1640 TV_TREE_DCE, /* tv_id */
1641 PROP_cfg | PROP_ssa, /* properties_required */
1642 0, /* properties_provided */
1643 0, /* properties_destroyed */
1644 0, /* todo_flags_start */
1645 TODO_verify_ssa /* todo_flags_finish */
1646 }
1647 };
1648
1649 struct gimple_opt_pass pass_cd_dce =
1650 {
1651 {
1652 GIMPLE_PASS,
1653 "cddce", /* name */
1654 gate_dce, /* gate */
1655 tree_ssa_cd_dce, /* execute */
1656 NULL, /* sub */
1657 NULL, /* next */
1658 0, /* static_pass_number */
1659 TV_TREE_CD_DCE, /* tv_id */
1660 PROP_cfg | PROP_ssa, /* properties_required */
1661 0, /* properties_provided */
1662 0, /* properties_destroyed */
1663 0, /* todo_flags_start */
1664 TODO_verify_ssa
1665 | TODO_verify_flow /* todo_flags_finish */
1666 }
1667 };