tree.h: Include real.h and fixed-value.h as basic datatypes.
[gcc.git] / gcc / tree-ssa-dce.c
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Ben Elliston <bje@redhat.com>
5 and Andrew MacLeod <amacleod@redhat.com>
6 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by the
12 Free Software Foundation; either version 3, or (at your option) any
13 later version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 /* Dead code elimination.
25
26 References:
27
28 Building an Optimizing Compiler,
29 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30
31 Advanced Compiler Design and Implementation,
32 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33
34 Dead-code elimination is the removal of statements which have no
35 impact on the program's output. "Dead statements" have no impact
36 on the program's output, while "necessary statements" may have
37 impact on the output.
38
39 The algorithm consists of three phases:
40 1. Marking as necessary all statements known to be necessary,
41 e.g. most function calls, writing a value to memory, etc;
42 2. Propagating necessary statements, e.g., the statements
43 giving values to operands in necessary statements; and
44 3. Removing dead statements. */
45
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50
51 #include "tree.h"
52 #include "diagnostic.h"
53 #include "basic-block.h"
54 #include "tree-flow.h"
55 #include "gimple.h"
56 #include "tree-dump.h"
57 #include "tree-pass.h"
58 #include "timevar.h"
59 #include "flags.h"
60 #include "cfgloop.h"
61 #include "tree-scalar-evolution.h"
62
63 static struct stmt_stats
64 {
65 int total;
66 int total_phis;
67 int removed;
68 int removed_phis;
69 } stats;
70
71 #define STMT_NECESSARY GF_PLF_1
72
73 static VEC(gimple,heap) *worklist;
74
75 /* Vector indicating an SSA name has already been processed and marked
76 as necessary. */
77 static sbitmap processed;
78
79 /* Vector indicating that last_stmt if a basic block has already been
80 marked as necessary. */
81 static sbitmap last_stmt_necessary;
82
83 /* Vector indicating that BB contains statements that are live. */
84 static sbitmap bb_contains_live_stmts;
85
86 /* Before we can determine whether a control branch is dead, we need to
87 compute which blocks are control dependent on which edges.
88
89 We expect each block to be control dependent on very few edges so we
90 use a bitmap for each block recording its edges. An array holds the
91 bitmap. The Ith bit in the bitmap is set if that block is dependent
92 on the Ith edge. */
93 static bitmap *control_dependence_map;
94
95 /* Vector indicating that a basic block has already had all the edges
96 processed that it is control dependent on. */
97 static sbitmap visited_control_parents;
98
99 /* TRUE if this pass alters the CFG (by removing control statements).
100 FALSE otherwise.
101
102 If this pass alters the CFG, then it will arrange for the dominators
103 to be recomputed. */
104 static bool cfg_altered;
105
106 /* Execute code that follows the macro for each edge (given number
107 EDGE_NUMBER within the CODE) for which the block with index N is
108 control dependent. */
109 #define EXECUTE_IF_CONTROL_DEPENDENT(BI, N, EDGE_NUMBER) \
110 EXECUTE_IF_SET_IN_BITMAP (control_dependence_map[(N)], 0, \
111 (EDGE_NUMBER), (BI))
112
113
114 /* Indicate block BB is control dependent on an edge with index EDGE_INDEX. */
115 static inline void
116 set_control_dependence_map_bit (basic_block bb, int edge_index)
117 {
118 if (bb == ENTRY_BLOCK_PTR)
119 return;
120 gcc_assert (bb != EXIT_BLOCK_PTR);
121 bitmap_set_bit (control_dependence_map[bb->index], edge_index);
122 }
123
124 /* Clear all control dependences for block BB. */
125 static inline void
126 clear_control_dependence_bitmap (basic_block bb)
127 {
128 bitmap_clear (control_dependence_map[bb->index]);
129 }
130
131
132 /* Find the immediate postdominator PDOM of the specified basic block BLOCK.
133 This function is necessary because some blocks have negative numbers. */
134
135 static inline basic_block
136 find_pdom (basic_block block)
137 {
138 gcc_assert (block != ENTRY_BLOCK_PTR);
139
140 if (block == EXIT_BLOCK_PTR)
141 return EXIT_BLOCK_PTR;
142 else
143 {
144 basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
145 if (! bb)
146 return EXIT_BLOCK_PTR;
147 return bb;
148 }
149 }
150
151
152 /* Determine all blocks' control dependences on the given edge with edge_list
153 EL index EDGE_INDEX, ala Morgan, Section 3.6. */
154
155 static void
156 find_control_dependence (struct edge_list *el, int edge_index)
157 {
158 basic_block current_block;
159 basic_block ending_block;
160
161 gcc_assert (INDEX_EDGE_PRED_BB (el, edge_index) != EXIT_BLOCK_PTR);
162
163 if (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR)
164 ending_block = single_succ (ENTRY_BLOCK_PTR);
165 else
166 ending_block = find_pdom (INDEX_EDGE_PRED_BB (el, edge_index));
167
168 for (current_block = INDEX_EDGE_SUCC_BB (el, edge_index);
169 current_block != ending_block && current_block != EXIT_BLOCK_PTR;
170 current_block = find_pdom (current_block))
171 {
172 edge e = INDEX_EDGE (el, edge_index);
173
174 /* For abnormal edges, we don't make current_block control
175 dependent because instructions that throw are always necessary
176 anyway. */
177 if (e->flags & EDGE_ABNORMAL)
178 continue;
179
180 set_control_dependence_map_bit (current_block, edge_index);
181 }
182 }
183
184
185 /* Record all blocks' control dependences on all edges in the edge
186 list EL, ala Morgan, Section 3.6. */
187
188 static void
189 find_all_control_dependences (struct edge_list *el)
190 {
191 int i;
192
193 for (i = 0; i < NUM_EDGES (el); ++i)
194 find_control_dependence (el, i);
195 }
196
197 /* If STMT is not already marked necessary, mark it, and add it to the
198 worklist if ADD_TO_WORKLIST is true. */
199 static inline void
200 mark_stmt_necessary (gimple stmt, bool add_to_worklist)
201 {
202 gcc_assert (stmt);
203
204 if (gimple_plf (stmt, STMT_NECESSARY))
205 return;
206
207 if (dump_file && (dump_flags & TDF_DETAILS))
208 {
209 fprintf (dump_file, "Marking useful stmt: ");
210 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
211 fprintf (dump_file, "\n");
212 }
213
214 gimple_set_plf (stmt, STMT_NECESSARY, true);
215 if (add_to_worklist)
216 VEC_safe_push (gimple, heap, worklist, stmt);
217 if (bb_contains_live_stmts && !is_gimple_debug (stmt))
218 SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index);
219 }
220
221
222 /* Mark the statement defining operand OP as necessary. */
223
224 static inline void
225 mark_operand_necessary (tree op)
226 {
227 gimple stmt;
228 int ver;
229
230 gcc_assert (op);
231
232 ver = SSA_NAME_VERSION (op);
233 if (TEST_BIT (processed, ver))
234 {
235 stmt = SSA_NAME_DEF_STMT (op);
236 gcc_assert (gimple_nop_p (stmt)
237 || gimple_plf (stmt, STMT_NECESSARY));
238 return;
239 }
240 SET_BIT (processed, ver);
241
242 stmt = SSA_NAME_DEF_STMT (op);
243 gcc_assert (stmt);
244
245 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
246 return;
247
248 if (dump_file && (dump_flags & TDF_DETAILS))
249 {
250 fprintf (dump_file, "marking necessary through ");
251 print_generic_expr (dump_file, op, 0);
252 fprintf (dump_file, " stmt ");
253 print_gimple_stmt (dump_file, stmt, 0, 0);
254 }
255
256 gimple_set_plf (stmt, STMT_NECESSARY, true);
257 if (bb_contains_live_stmts)
258 SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index);
259 VEC_safe_push (gimple, heap, worklist, stmt);
260 }
261
262
263 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
264 it can make other statements necessary.
265
266 If AGGRESSIVE is false, control statements are conservatively marked as
267 necessary. */
268
269 static void
270 mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
271 {
272 tree lhs = NULL_TREE;
273 /* With non-call exceptions, we have to assume that all statements could
274 throw. If a statement may throw, it is inherently necessary. */
275 if (flag_non_call_exceptions
276 && stmt_could_throw_p (stmt))
277 {
278 mark_stmt_necessary (stmt, true);
279 return;
280 }
281
282 /* Statements that are implicitly live. Most function calls, asm
283 and return statements are required. Labels and GIMPLE_BIND nodes
284 are kept because they are control flow, and we have no way of
285 knowing whether they can be removed. DCE can eliminate all the
286 other statements in a block, and CFG can then remove the block
287 and labels. */
288 switch (gimple_code (stmt))
289 {
290 case GIMPLE_PREDICT:
291 case GIMPLE_LABEL:
292 mark_stmt_necessary (stmt, false);
293 return;
294
295 case GIMPLE_ASM:
296 case GIMPLE_RESX:
297 case GIMPLE_RETURN:
298 mark_stmt_necessary (stmt, true);
299 return;
300
301 case GIMPLE_CALL:
302 /* Most, but not all function calls are required. Function calls that
303 produce no result and have no side effects (i.e. const pure
304 functions) are unnecessary. */
305 if (gimple_has_side_effects (stmt))
306 {
307 mark_stmt_necessary (stmt, true);
308 return;
309 }
310 if (!gimple_call_lhs (stmt))
311 return;
312 lhs = gimple_call_lhs (stmt);
313 /* Fall through */
314
315 case GIMPLE_ASSIGN:
316 if (!lhs)
317 lhs = gimple_assign_lhs (stmt);
318 break;
319
320 case GIMPLE_DEBUG:
321 /* Debug temps without a value are not useful. ??? If we could
322 easily locate the debug temp bind stmt for a use thereof,
323 would could refrain from marking all debug temps here, and
324 mark them only if they're used. */
325 if (gimple_debug_bind_has_value_p (stmt)
326 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
327 mark_stmt_necessary (stmt, false);
328 return;
329
330 case GIMPLE_GOTO:
331 gcc_assert (!simple_goto_p (stmt));
332 mark_stmt_necessary (stmt, true);
333 return;
334
335 case GIMPLE_COND:
336 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
337 /* Fall through. */
338
339 case GIMPLE_SWITCH:
340 if (! aggressive)
341 mark_stmt_necessary (stmt, true);
342 break;
343
344 default:
345 break;
346 }
347
348 /* If the statement has volatile operands, it needs to be preserved.
349 Same for statements that can alter control flow in unpredictable
350 ways. */
351 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
352 {
353 mark_stmt_necessary (stmt, true);
354 return;
355 }
356
357 if (is_hidden_global_store (stmt))
358 {
359 mark_stmt_necessary (stmt, true);
360 return;
361 }
362
363 return;
364 }
365
366
367 /* Make corresponding control dependent edges necessary. We only
368 have to do this once for each basic block, so we clear the bitmap
369 after we're done.
370
371 When IGNORE_SELF it true, ignore BB from the list of control dependences. */
372 static void
373 mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el, bool ignore_self)
374 {
375 bitmap_iterator bi;
376 unsigned edge_number;
377 bool skipped = false;
378
379 gcc_assert (bb != EXIT_BLOCK_PTR);
380
381 if (bb == ENTRY_BLOCK_PTR)
382 return;
383
384 EXECUTE_IF_CONTROL_DEPENDENT (bi, bb->index, edge_number)
385 {
386 gimple stmt;
387 basic_block cd_bb = INDEX_EDGE_PRED_BB (el, edge_number);
388
389 if (ignore_self && cd_bb == bb)
390 {
391 skipped = true;
392 continue;
393 }
394
395 if (TEST_BIT (last_stmt_necessary, cd_bb->index))
396 continue;
397 SET_BIT (last_stmt_necessary, cd_bb->index);
398 SET_BIT (bb_contains_live_stmts, cd_bb->index);
399
400 stmt = last_stmt (cd_bb);
401 if (stmt && is_ctrl_stmt (stmt))
402 mark_stmt_necessary (stmt, true);
403 }
404 if (!skipped)
405 SET_BIT (visited_control_parents, bb->index);
406 }
407
408
409 /* Find obviously necessary statements. These are things like most function
410 calls, and stores to file level variables.
411
412 If EL is NULL, control statements are conservatively marked as
413 necessary. Otherwise it contains the list of edges used by control
414 dependence analysis. */
415
416 static void
417 find_obviously_necessary_stmts (struct edge_list *el)
418 {
419 basic_block bb;
420 gimple_stmt_iterator gsi;
421 edge e;
422 gimple phi, stmt;
423
424 FOR_EACH_BB (bb)
425 {
426 /* PHI nodes are never inherently necessary. */
427 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
428 {
429 phi = gsi_stmt (gsi);
430 gimple_set_plf (phi, STMT_NECESSARY, false);
431 }
432
433 /* Check all statements in the block. */
434 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
435 {
436 stmt = gsi_stmt (gsi);
437 gimple_set_plf (stmt, STMT_NECESSARY, false);
438 mark_stmt_if_obviously_necessary (stmt, el != NULL);
439 }
440 }
441
442 /* Pure and const functions are finite and thus have no infinite loops in
443 them. */
444 if ((TREE_READONLY (current_function_decl)
445 || DECL_PURE_P (current_function_decl))
446 && !DECL_LOOPING_CONST_OR_PURE_P (current_function_decl))
447 return;
448
449 /* Prevent the empty possibly infinite loops from being removed. */
450 if (el)
451 {
452 loop_iterator li;
453 struct loop *loop;
454 scev_initialize ();
455 if (mark_irreducible_loops ())
456 FOR_EACH_BB (bb)
457 {
458 edge_iterator ei;
459 FOR_EACH_EDGE (e, ei, bb->succs)
460 if ((e->flags & EDGE_DFS_BACK)
461 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
462 {
463 if (dump_file)
464 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
465 e->src->index, e->dest->index);
466 mark_control_dependent_edges_necessary (e->dest, el, false);
467 }
468 }
469
470 FOR_EACH_LOOP (li, loop, 0)
471 if (!finite_loop_p (loop))
472 {
473 if (dump_file)
474 fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
475 mark_control_dependent_edges_necessary (loop->latch, el, false);
476 }
477 scev_finalize ();
478 }
479 }
480
481
482 /* Return true if REF is based on an aliased base, otherwise false. */
483
484 static bool
485 ref_may_be_aliased (tree ref)
486 {
487 while (handled_component_p (ref))
488 ref = TREE_OPERAND (ref, 0);
489 return !(DECL_P (ref)
490 && !may_be_aliased (ref));
491 }
492
493 static bitmap visited = NULL;
494 static unsigned int longest_chain = 0;
495 static unsigned int total_chain = 0;
496 static unsigned int nr_walks = 0;
497 static bool chain_ovfl = false;
498
499 /* Worker for the walker that marks reaching definitions of REF,
500 which is based on a non-aliased decl, necessary. It returns
501 true whenever the defining statement of the current VDEF is
502 a kill for REF, as no dominating may-defs are necessary for REF
503 anymore. DATA points to the basic-block that contains the
504 stmt that refers to REF. */
505
506 static bool
507 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
508 {
509 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
510
511 /* All stmts we visit are necessary. */
512 mark_operand_necessary (vdef);
513
514 /* If the stmt lhs kills ref, then we can stop walking. */
515 if (gimple_has_lhs (def_stmt)
516 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME)
517 {
518 tree base, lhs = gimple_get_lhs (def_stmt);
519 HOST_WIDE_INT size, offset, max_size;
520 ao_ref_base (ref);
521 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
522 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
523 so base == refd->base does not always hold. */
524 if (base == ref->base)
525 {
526 /* For a must-alias check we need to be able to constrain
527 the accesses properly. */
528 if (size != -1 && size == max_size
529 && ref->max_size != -1)
530 {
531 if (offset <= ref->offset
532 && offset + size >= ref->offset + ref->max_size)
533 return true;
534 }
535 /* Or they need to be exactly the same. */
536 else if (ref->ref
537 /* Make sure there is no induction variable involved
538 in the references (gcc.c-torture/execute/pr42142.c).
539 The simplest way is to check if the kill dominates
540 the use. */
541 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
542 gimple_bb (def_stmt))
543 && operand_equal_p (ref->ref, lhs, 0))
544 return true;
545 }
546 }
547
548 /* Otherwise keep walking. */
549 return false;
550 }
551
552 static void
553 mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
554 {
555 unsigned int chain;
556 ao_ref refd;
557 gcc_assert (!chain_ovfl);
558 ao_ref_init (&refd, ref);
559 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
560 mark_aliased_reaching_defs_necessary_1,
561 gimple_bb (stmt), NULL);
562 if (chain > longest_chain)
563 longest_chain = chain;
564 total_chain += chain;
565 nr_walks++;
566 }
567
568 /* Worker for the walker that marks reaching definitions of REF, which
569 is not based on a non-aliased decl. For simplicity we need to end
570 up marking all may-defs necessary that are not based on a non-aliased
571 decl. The only job of this walker is to skip may-defs based on
572 a non-aliased decl. */
573
574 static bool
575 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
576 tree vdef, void *data ATTRIBUTE_UNUSED)
577 {
578 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
579
580 /* We have to skip already visited (and thus necessary) statements
581 to make the chaining work after we dropped back to simple mode. */
582 if (chain_ovfl
583 && TEST_BIT (processed, SSA_NAME_VERSION (vdef)))
584 {
585 gcc_assert (gimple_nop_p (def_stmt)
586 || gimple_plf (def_stmt, STMT_NECESSARY));
587 return false;
588 }
589
590 /* We want to skip stores to non-aliased variables. */
591 if (!chain_ovfl
592 && gimple_assign_single_p (def_stmt))
593 {
594 tree lhs = gimple_assign_lhs (def_stmt);
595 if (!ref_may_be_aliased (lhs))
596 return false;
597 }
598
599 mark_operand_necessary (vdef);
600
601 return false;
602 }
603
604 static void
605 mark_all_reaching_defs_necessary (gimple stmt)
606 {
607 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
608 mark_all_reaching_defs_necessary_1, NULL, &visited);
609 }
610
611 /* Return true for PHI nodes with one or identical arguments
612 can be removed. */
613 static bool
614 degenerate_phi_p (gimple phi)
615 {
616 unsigned int i;
617 tree op = gimple_phi_arg_def (phi, 0);
618 for (i = 1; i < gimple_phi_num_args (phi); i++)
619 if (gimple_phi_arg_def (phi, i) != op)
620 return false;
621 return true;
622 }
623
624 /* Propagate necessity using the operands of necessary statements.
625 Process the uses on each statement in the worklist, and add all
626 feeding statements which contribute to the calculation of this
627 value to the worklist.
628
629 In conservative mode, EL is NULL. */
630
631 static void
632 propagate_necessity (struct edge_list *el)
633 {
634 gimple stmt;
635 bool aggressive = (el ? true : false);
636
637 if (dump_file && (dump_flags & TDF_DETAILS))
638 fprintf (dump_file, "\nProcessing worklist:\n");
639
640 while (VEC_length (gimple, worklist) > 0)
641 {
642 /* Take STMT from worklist. */
643 stmt = VEC_pop (gimple, worklist);
644
645 if (dump_file && (dump_flags & TDF_DETAILS))
646 {
647 fprintf (dump_file, "processing: ");
648 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
649 fprintf (dump_file, "\n");
650 }
651
652 if (aggressive)
653 {
654 /* Mark the last statements of the basic blocks that the block
655 containing STMT is control dependent on, but only if we haven't
656 already done so. */
657 basic_block bb = gimple_bb (stmt);
658 if (bb != ENTRY_BLOCK_PTR
659 && ! TEST_BIT (visited_control_parents, bb->index))
660 mark_control_dependent_edges_necessary (bb, el, false);
661 }
662
663 if (gimple_code (stmt) == GIMPLE_PHI
664 /* We do not process virtual PHI nodes nor do we track their
665 necessity. */
666 && is_gimple_reg (gimple_phi_result (stmt)))
667 {
668 /* PHI nodes are somewhat special in that each PHI alternative has
669 data and control dependencies. All the statements feeding the
670 PHI node's arguments are always necessary. In aggressive mode,
671 we also consider the control dependent edges leading to the
672 predecessor block associated with each PHI alternative as
673 necessary. */
674 size_t k;
675
676 for (k = 0; k < gimple_phi_num_args (stmt); k++)
677 {
678 tree arg = PHI_ARG_DEF (stmt, k);
679 if (TREE_CODE (arg) == SSA_NAME)
680 mark_operand_necessary (arg);
681 }
682
683 /* For PHI operands it matters from where the control flow arrives
684 to the BB. Consider the following example:
685
686 a=exp1;
687 b=exp2;
688 if (test)
689 ;
690 else
691 ;
692 c=PHI(a,b)
693
694 We need to mark control dependence of the empty basic blocks, since they
695 contains computation of PHI operands.
696
697 Doing so is too restrictive in the case the predecestor block is in
698 the loop. Consider:
699
700 if (b)
701 {
702 int i;
703 for (i = 0; i<1000; ++i)
704 ;
705 j = 0;
706 }
707 return j;
708
709 There is PHI for J in the BB containing return statement.
710 In this case the control dependence of predecestor block (that is
711 within the empty loop) also contains the block determining number
712 of iterations of the block that would prevent removing of empty
713 loop in this case.
714
715 This scenario can be avoided by splitting critical edges.
716 To save the critical edge splitting pass we identify how the control
717 dependence would look like if the edge was split.
718
719 Consider the modified CFG created from current CFG by splitting
720 edge B->C. In the postdominance tree of modified CFG, C' is
721 always child of C. There are two cases how chlids of C' can look
722 like:
723
724 1) C' is leaf
725
726 In this case the only basic block C' is control dependent on is B.
727
728 2) C' has single child that is B
729
730 In this case control dependence of C' is same as control
731 dependence of B in original CFG except for block B itself.
732 (since C' postdominate B in modified CFG)
733
734 Now how to decide what case happens? There are two basic options:
735
736 a) C postdominate B. Then C immediately postdominate B and
737 case 2 happens iff there is no other way from B to C except
738 the edge B->C.
739
740 There is other way from B to C iff there is succesor of B that
741 is not postdominated by B. Testing this condition is somewhat
742 expensive, because we need to iterate all succesors of B.
743 We are safe to assume that this does not happen: we will mark B
744 as needed when processing the other path from B to C that is
745 conrol dependent on B and marking control dependencies of B
746 itself is harmless because they will be processed anyway after
747 processing control statement in B.
748
749 b) C does not postdominate B. Always case 1 happens since there is
750 path from C to exit that does not go through B and thus also C'. */
751
752 if (aggressive && !degenerate_phi_p (stmt))
753 {
754 for (k = 0; k < gimple_phi_num_args (stmt); k++)
755 {
756 basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src;
757
758 if (gimple_bb (stmt)
759 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
760 {
761 if (!TEST_BIT (last_stmt_necessary, arg_bb->index))
762 {
763 gimple stmt2;
764 SET_BIT (last_stmt_necessary, arg_bb->index);
765 SET_BIT (bb_contains_live_stmts, arg_bb->index);
766
767 stmt2 = last_stmt (arg_bb);
768 if (stmt2 && is_ctrl_stmt (stmt2))
769 mark_stmt_necessary (stmt2, true);
770 }
771 }
772 else if (arg_bb != ENTRY_BLOCK_PTR
773 && ! TEST_BIT (visited_control_parents, arg_bb->index))
774 mark_control_dependent_edges_necessary (arg_bb, el, true);
775 }
776 }
777 }
778 else
779 {
780 /* Propagate through the operands. Examine all the USE, VUSE and
781 VDEF operands in this statement. Mark all the statements
782 which feed this statement's uses as necessary. */
783 ssa_op_iter iter;
784 tree use;
785
786 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
787 mark_operand_necessary (use);
788
789 use = gimple_vuse (stmt);
790 if (!use)
791 continue;
792
793 /* If we dropped to simple mode make all immediately
794 reachable definitions necessary. */
795 if (chain_ovfl)
796 {
797 mark_all_reaching_defs_necessary (stmt);
798 continue;
799 }
800
801 /* For statements that may load from memory (have a VUSE) we
802 have to mark all reaching (may-)definitions as necessary.
803 We partition this task into two cases:
804 1) explicit loads based on decls that are not aliased
805 2) implicit loads (like calls) and explicit loads not
806 based on decls that are not aliased (like indirect
807 references or loads from globals)
808 For 1) we mark all reaching may-defs as necessary, stopping
809 at dominating kills. For 2) we want to mark all dominating
810 references necessary, but non-aliased ones which we handle
811 in 1). By keeping a global visited bitmap for references
812 we walk for 2) we avoid quadratic behavior for those. */
813
814 if (is_gimple_call (stmt))
815 {
816 tree callee = gimple_call_fndecl (stmt);
817 unsigned i;
818
819 /* Calls to functions that are merely acting as barriers
820 or that only store to memory do not make any previous
821 stores necessary. */
822 if (callee != NULL_TREE
823 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
824 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
825 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
826 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE))
827 continue;
828
829 /* Calls implicitly load from memory, their arguments
830 in addition may explicitly perform memory loads. */
831 mark_all_reaching_defs_necessary (stmt);
832 for (i = 0; i < gimple_call_num_args (stmt); ++i)
833 {
834 tree arg = gimple_call_arg (stmt, i);
835 if (TREE_CODE (arg) == SSA_NAME
836 || is_gimple_min_invariant (arg))
837 continue;
838 if (!ref_may_be_aliased (arg))
839 mark_aliased_reaching_defs_necessary (stmt, arg);
840 }
841 }
842 else if (gimple_assign_single_p (stmt))
843 {
844 tree rhs;
845 bool rhs_aliased = false;
846 /* If this is a load mark things necessary. */
847 rhs = gimple_assign_rhs1 (stmt);
848 if (TREE_CODE (rhs) != SSA_NAME
849 && !is_gimple_min_invariant (rhs))
850 {
851 if (!ref_may_be_aliased (rhs))
852 mark_aliased_reaching_defs_necessary (stmt, rhs);
853 else
854 rhs_aliased = true;
855 }
856 if (rhs_aliased)
857 mark_all_reaching_defs_necessary (stmt);
858 }
859 else if (gimple_code (stmt) == GIMPLE_RETURN)
860 {
861 tree rhs = gimple_return_retval (stmt);
862 /* A return statement may perform a load. */
863 if (TREE_CODE (rhs) != SSA_NAME
864 && !is_gimple_min_invariant (rhs))
865 {
866 if (!ref_may_be_aliased (rhs))
867 mark_aliased_reaching_defs_necessary (stmt, rhs);
868 else
869 mark_all_reaching_defs_necessary (stmt);
870 }
871 }
872 else if (gimple_code (stmt) == GIMPLE_ASM)
873 {
874 unsigned i;
875 mark_all_reaching_defs_necessary (stmt);
876 /* Inputs may perform loads. */
877 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
878 {
879 tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
880 if (TREE_CODE (op) != SSA_NAME
881 && !is_gimple_min_invariant (op)
882 && !ref_may_be_aliased (op))
883 mark_aliased_reaching_defs_necessary (stmt, op);
884 }
885 }
886 else
887 gcc_unreachable ();
888
889 /* If we over-used our alias oracle budget drop to simple
890 mode. The cost metric allows quadratic behavior
891 (number of uses times number of may-defs queries) up to
892 a constant maximal number of queries and after that falls back to
893 super-linear complexity. */
894 if (/* Constant but quadratic for small functions. */
895 total_chain > 128 * 128
896 /* Linear in the number of may-defs. */
897 && total_chain > 32 * longest_chain
898 /* Linear in the number of uses. */
899 && total_chain > nr_walks * 32)
900 {
901 chain_ovfl = true;
902 if (visited)
903 bitmap_clear (visited);
904 }
905 }
906 }
907 }
908
909 /* Replace all uses of result of PHI by underlying variable and mark it
910 for renaming. */
911
912 void
913 mark_virtual_phi_result_for_renaming (gimple phi)
914 {
915 bool used = false;
916 imm_use_iterator iter;
917 use_operand_p use_p;
918 gimple stmt;
919 tree result_ssa, result_var;
920
921 if (dump_file && (dump_flags & TDF_DETAILS))
922 {
923 fprintf (dump_file, "Marking result for renaming : ");
924 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
925 fprintf (dump_file, "\n");
926 }
927
928 result_ssa = gimple_phi_result (phi);
929 result_var = SSA_NAME_VAR (result_ssa);
930 FOR_EACH_IMM_USE_STMT (stmt, iter, result_ssa)
931 {
932 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
933 SET_USE (use_p, result_var);
934 update_stmt (stmt);
935 used = true;
936 }
937 if (used)
938 mark_sym_for_renaming (result_var);
939 }
940
941 /* Remove dead PHI nodes from block BB. */
942
943 static bool
944 remove_dead_phis (basic_block bb)
945 {
946 bool something_changed = false;
947 gimple_seq phis;
948 gimple phi;
949 gimple_stmt_iterator gsi;
950 phis = phi_nodes (bb);
951
952 for (gsi = gsi_start (phis); !gsi_end_p (gsi);)
953 {
954 stats.total_phis++;
955 phi = gsi_stmt (gsi);
956
957 /* We do not track necessity of virtual PHI nodes. Instead do
958 very simple dead PHI removal here. */
959 if (!is_gimple_reg (gimple_phi_result (phi)))
960 {
961 /* Virtual PHI nodes with one or identical arguments
962 can be removed. */
963 if (degenerate_phi_p (phi))
964 {
965 tree vdef = gimple_phi_result (phi);
966 tree vuse = gimple_phi_arg_def (phi, 0);
967
968 use_operand_p use_p;
969 imm_use_iterator iter;
970 gimple use_stmt;
971 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
972 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
973 SET_USE (use_p, vuse);
974 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
975 && TREE_CODE (vuse) == SSA_NAME)
976 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
977 }
978 else
979 gimple_set_plf (phi, STMT_NECESSARY, true);
980 }
981
982 if (!gimple_plf (phi, STMT_NECESSARY))
983 {
984 something_changed = true;
985 if (dump_file && (dump_flags & TDF_DETAILS))
986 {
987 fprintf (dump_file, "Deleting : ");
988 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
989 fprintf (dump_file, "\n");
990 }
991
992 remove_phi_node (&gsi, true);
993 stats.removed_phis++;
994 continue;
995 }
996
997 gsi_next (&gsi);
998 }
999 return something_changed;
1000 }
1001
1002 /* Forward edge E to respective POST_DOM_BB and update PHIs. */
1003
1004 static edge
1005 forward_edge_to_pdom (edge e, basic_block post_dom_bb)
1006 {
1007 gimple_stmt_iterator gsi;
1008 edge e2 = NULL;
1009 edge_iterator ei;
1010
1011 if (dump_file && (dump_flags & TDF_DETAILS))
1012 fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
1013 e->dest->index, post_dom_bb->index);
1014
1015 e2 = redirect_edge_and_branch (e, post_dom_bb);
1016 cfg_altered = true;
1017
1018 /* If edge was already around, no updating is neccesary. */
1019 if (e2 != e)
1020 return e2;
1021
1022 if (!gimple_seq_empty_p (phi_nodes (post_dom_bb)))
1023 {
1024 /* We are sure that for every live PHI we are seeing control dependent BB.
1025 This means that we can pick any edge to duplicate PHI args from. */
1026 FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
1027 if (e2 != e)
1028 break;
1029 for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
1030 {
1031 gimple phi = gsi_stmt (gsi);
1032 tree op;
1033 source_location locus;
1034
1035 /* PHIs for virtuals have no control dependency relation on them.
1036 We are lost here and must force renaming of the symbol. */
1037 if (!is_gimple_reg (gimple_phi_result (phi)))
1038 {
1039 mark_virtual_phi_result_for_renaming (phi);
1040 remove_phi_node (&gsi, true);
1041 continue;
1042 }
1043
1044 /* Dead PHI do not imply control dependency. */
1045 if (!gimple_plf (phi, STMT_NECESSARY))
1046 {
1047 gsi_next (&gsi);
1048 continue;
1049 }
1050
1051 op = gimple_phi_arg_def (phi, e2->dest_idx);
1052 locus = gimple_phi_arg_location (phi, e2->dest_idx);
1053 add_phi_arg (phi, op, e, locus);
1054 /* The resulting PHI if not dead can only be degenerate. */
1055 gcc_assert (degenerate_phi_p (phi));
1056 gsi_next (&gsi);
1057 }
1058 }
1059 return e;
1060 }
1061
1062 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1063 containing I so that we don't have to look it up. */
1064
1065 static void
1066 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1067 {
1068 gimple stmt = gsi_stmt (*i);
1069
1070 if (dump_file && (dump_flags & TDF_DETAILS))
1071 {
1072 fprintf (dump_file, "Deleting : ");
1073 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1074 fprintf (dump_file, "\n");
1075 }
1076
1077 stats.removed++;
1078
1079 /* If we have determined that a conditional branch statement contributes
1080 nothing to the program, then we not only remove it, but we also change
1081 the flow graph so that the current block will simply fall-thru to its
1082 immediate post-dominator. The blocks we are circumventing will be
1083 removed by cleanup_tree_cfg if this change in the flow graph makes them
1084 unreachable. */
1085 if (is_ctrl_stmt (stmt))
1086 {
1087 basic_block post_dom_bb;
1088 edge e, e2;
1089 edge_iterator ei;
1090
1091 post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
1092
1093 e = find_edge (bb, post_dom_bb);
1094
1095 /* If edge is already there, try to use it. This avoids need to update
1096 PHI nodes. Also watch for cases where post dominator does not exists
1097 or is exit block. These can happen for infinite loops as we create
1098 fake edges in the dominator tree. */
1099 if (e)
1100 ;
1101 else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR)
1102 e = EDGE_SUCC (bb, 0);
1103 else
1104 e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
1105 gcc_assert (e);
1106 e->probability = REG_BR_PROB_BASE;
1107 e->count = bb->count;
1108
1109 /* The edge is no longer associated with a conditional, so it does
1110 not have TRUE/FALSE flags. */
1111 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
1112
1113 /* The lone outgoing edge from BB will be a fallthru edge. */
1114 e->flags |= EDGE_FALLTHRU;
1115
1116 /* Remove the remaining outgoing edges. */
1117 for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1118 if (e != e2)
1119 {
1120 cfg_altered = true;
1121 remove_edge (e2);
1122 }
1123 else
1124 ei_next (&ei);
1125 }
1126
1127 unlink_stmt_vdef (stmt);
1128 gsi_remove (i, true);
1129 release_defs (stmt);
1130 }
1131
1132 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1133 contributes nothing to the program, and can be deleted. */
1134
1135 static bool
1136 eliminate_unnecessary_stmts (void)
1137 {
1138 bool something_changed = false;
1139 basic_block bb;
1140 gimple_stmt_iterator gsi, psi;
1141 gimple stmt;
1142 tree call;
1143 VEC (basic_block, heap) *h;
1144
1145 if (dump_file && (dump_flags & TDF_DETAILS))
1146 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1147
1148 clear_special_calls ();
1149
1150 /* Walking basic blocks and statements in reverse order avoids
1151 releasing SSA names before any other DEFs that refer to them are
1152 released. This helps avoid loss of debug information, as we get
1153 a chance to propagate all RHSs of removed SSAs into debug uses,
1154 rather than only the latest ones. E.g., consider:
1155
1156 x_3 = y_1 + z_2;
1157 a_5 = x_3 - b_4;
1158 # DEBUG a => a_5
1159
1160 If we were to release x_3 before a_5, when we reached a_5 and
1161 tried to substitute it into the debug stmt, we'd see x_3 there,
1162 but x_3's DEF, type, etc would have already been disconnected.
1163 By going backwards, the debug stmt first changes to:
1164
1165 # DEBUG a => x_3 - b_4
1166
1167 and then to:
1168
1169 # DEBUG a => y_1 + z_2 - b_4
1170
1171 as desired. */
1172 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1173 h = get_all_dominated_blocks (CDI_DOMINATORS, single_succ (ENTRY_BLOCK_PTR));
1174
1175 while (VEC_length (basic_block, h))
1176 {
1177 bb = VEC_pop (basic_block, h);
1178
1179 /* Remove dead statements. */
1180 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1181 {
1182 stmt = gsi_stmt (gsi);
1183
1184 psi = gsi;
1185 gsi_prev (&psi);
1186
1187 stats.total++;
1188
1189 /* If GSI is not necessary then remove it. */
1190 if (!gimple_plf (stmt, STMT_NECESSARY))
1191 {
1192 if (!is_gimple_debug (stmt))
1193 something_changed = true;
1194 remove_dead_stmt (&gsi, bb);
1195 }
1196 else if (is_gimple_call (stmt))
1197 {
1198 call = gimple_call_fndecl (stmt);
1199 if (call)
1200 {
1201 tree name;
1202
1203 /* When LHS of var = call (); is dead, simplify it into
1204 call (); saving one operand. */
1205 name = gimple_call_lhs (stmt);
1206 if (name && TREE_CODE (name) == SSA_NAME
1207 && !TEST_BIT (processed, SSA_NAME_VERSION (name)))
1208 {
1209 something_changed = true;
1210 if (dump_file && (dump_flags & TDF_DETAILS))
1211 {
1212 fprintf (dump_file, "Deleting LHS of call: ");
1213 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1214 fprintf (dump_file, "\n");
1215 }
1216
1217 gimple_call_set_lhs (stmt, NULL_TREE);
1218 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1219 update_stmt (stmt);
1220 release_ssa_name (name);
1221 }
1222 notice_special_calls (stmt);
1223 }
1224 }
1225 }
1226 }
1227
1228 VEC_free (basic_block, heap, h);
1229
1230 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1231 rendered some PHI nodes unreachable while they are still in use.
1232 Mark them for renaming. */
1233 if (cfg_altered)
1234 {
1235 basic_block prev_bb;
1236
1237 find_unreachable_blocks ();
1238
1239 /* Delete all unreachable basic blocks in reverse dominator order. */
1240 for (bb = EXIT_BLOCK_PTR->prev_bb; bb != ENTRY_BLOCK_PTR; bb = prev_bb)
1241 {
1242 prev_bb = bb->prev_bb;
1243
1244 if (!TEST_BIT (bb_contains_live_stmts, bb->index)
1245 || !(bb->flags & BB_REACHABLE))
1246 {
1247 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1248 if (!is_gimple_reg (gimple_phi_result (gsi_stmt (gsi))))
1249 {
1250 bool found = false;
1251 imm_use_iterator iter;
1252
1253 FOR_EACH_IMM_USE_STMT (stmt, iter, gimple_phi_result (gsi_stmt (gsi)))
1254 {
1255 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1256 continue;
1257 if (gimple_code (stmt) == GIMPLE_PHI
1258 || gimple_plf (stmt, STMT_NECESSARY))
1259 {
1260 found = true;
1261 BREAK_FROM_IMM_USE_STMT (iter);
1262 }
1263 }
1264 if (found)
1265 mark_virtual_phi_result_for_renaming (gsi_stmt (gsi));
1266 }
1267
1268 if (!(bb->flags & BB_REACHABLE))
1269 {
1270 /* Speed up the removal of blocks that don't
1271 dominate others. Walking backwards, this should
1272 be the common case. ??? Do we need to recompute
1273 dominators because of cfg_altered? */
1274 if (!MAY_HAVE_DEBUG_STMTS
1275 || !first_dom_son (CDI_DOMINATORS, bb))
1276 delete_basic_block (bb);
1277 else
1278 {
1279 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1280
1281 while (VEC_length (basic_block, h))
1282 {
1283 bb = VEC_pop (basic_block, h);
1284 prev_bb = bb->prev_bb;
1285 /* Rearrangements to the CFG may have failed
1286 to update the dominators tree, so that
1287 formerly-dominated blocks are now
1288 otherwise reachable. */
1289 if (!!(bb->flags & BB_REACHABLE))
1290 continue;
1291 delete_basic_block (bb);
1292 }
1293
1294 VEC_free (basic_block, heap, h);
1295 }
1296 }
1297 }
1298 }
1299 }
1300 FOR_EACH_BB (bb)
1301 {
1302 /* Remove dead PHI nodes. */
1303 something_changed |= remove_dead_phis (bb);
1304 }
1305
1306 return something_changed;
1307 }
1308
1309
1310 /* Print out removed statement statistics. */
1311
1312 static void
1313 print_stats (void)
1314 {
1315 float percg;
1316
1317 percg = ((float) stats.removed / (float) stats.total) * 100;
1318 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1319 stats.removed, stats.total, (int) percg);
1320
1321 if (stats.total_phis == 0)
1322 percg = 0;
1323 else
1324 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1325
1326 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1327 stats.removed_phis, stats.total_phis, (int) percg);
1328 }
1329
1330 /* Initialization for this pass. Set up the used data structures. */
1331
1332 static void
1333 tree_dce_init (bool aggressive)
1334 {
1335 memset ((void *) &stats, 0, sizeof (stats));
1336
1337 if (aggressive)
1338 {
1339 int i;
1340
1341 control_dependence_map = XNEWVEC (bitmap, last_basic_block);
1342 for (i = 0; i < last_basic_block; ++i)
1343 control_dependence_map[i] = BITMAP_ALLOC (NULL);
1344
1345 last_stmt_necessary = sbitmap_alloc (last_basic_block);
1346 sbitmap_zero (last_stmt_necessary);
1347 bb_contains_live_stmts = sbitmap_alloc (last_basic_block);
1348 sbitmap_zero (bb_contains_live_stmts);
1349 }
1350
1351 processed = sbitmap_alloc (num_ssa_names + 1);
1352 sbitmap_zero (processed);
1353
1354 worklist = VEC_alloc (gimple, heap, 64);
1355 cfg_altered = false;
1356 }
1357
1358 /* Cleanup after this pass. */
1359
1360 static void
1361 tree_dce_done (bool aggressive)
1362 {
1363 if (aggressive)
1364 {
1365 int i;
1366
1367 for (i = 0; i < last_basic_block; ++i)
1368 BITMAP_FREE (control_dependence_map[i]);
1369 free (control_dependence_map);
1370
1371 sbitmap_free (visited_control_parents);
1372 sbitmap_free (last_stmt_necessary);
1373 sbitmap_free (bb_contains_live_stmts);
1374 bb_contains_live_stmts = NULL;
1375 }
1376
1377 sbitmap_free (processed);
1378
1379 VEC_free (gimple, heap, worklist);
1380 }
1381
1382 /* Main routine to eliminate dead code.
1383
1384 AGGRESSIVE controls the aggressiveness of the algorithm.
1385 In conservative mode, we ignore control dependence and simply declare
1386 all but the most trivially dead branches necessary. This mode is fast.
1387 In aggressive mode, control dependences are taken into account, which
1388 results in more dead code elimination, but at the cost of some time.
1389
1390 FIXME: Aggressive mode before PRE doesn't work currently because
1391 the dominance info is not invalidated after DCE1. This is
1392 not an issue right now because we only run aggressive DCE
1393 as the last tree SSA pass, but keep this in mind when you
1394 start experimenting with pass ordering. */
1395
1396 static unsigned int
1397 perform_tree_ssa_dce (bool aggressive)
1398 {
1399 struct edge_list *el = NULL;
1400 bool something_changed = 0;
1401
1402 /* Preheaders are needed for SCEV to work.
1403 Simple lateches and recorded exits improve chances that loop will
1404 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1405 if (aggressive)
1406 loop_optimizer_init (LOOPS_NORMAL
1407 | LOOPS_HAVE_RECORDED_EXITS);
1408
1409 tree_dce_init (aggressive);
1410
1411 if (aggressive)
1412 {
1413 /* Compute control dependence. */
1414 timevar_push (TV_CONTROL_DEPENDENCES);
1415 calculate_dominance_info (CDI_POST_DOMINATORS);
1416 el = create_edge_list ();
1417 find_all_control_dependences (el);
1418 timevar_pop (TV_CONTROL_DEPENDENCES);
1419
1420 visited_control_parents = sbitmap_alloc (last_basic_block);
1421 sbitmap_zero (visited_control_parents);
1422
1423 mark_dfs_back_edges ();
1424 }
1425
1426 find_obviously_necessary_stmts (el);
1427
1428 if (aggressive)
1429 loop_optimizer_finalize ();
1430
1431 longest_chain = 0;
1432 total_chain = 0;
1433 nr_walks = 0;
1434 chain_ovfl = false;
1435 visited = BITMAP_ALLOC (NULL);
1436 propagate_necessity (el);
1437 BITMAP_FREE (visited);
1438
1439 something_changed |= eliminate_unnecessary_stmts ();
1440 something_changed |= cfg_altered;
1441
1442 /* We do not update postdominators, so free them unconditionally. */
1443 free_dominance_info (CDI_POST_DOMINATORS);
1444
1445 /* If we removed paths in the CFG, then we need to update
1446 dominators as well. I haven't investigated the possibility
1447 of incrementally updating dominators. */
1448 if (cfg_altered)
1449 free_dominance_info (CDI_DOMINATORS);
1450
1451 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1452 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1453
1454 /* Debugging dumps. */
1455 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1456 print_stats ();
1457
1458 tree_dce_done (aggressive);
1459
1460 free_edge_list (el);
1461
1462 if (something_changed)
1463 return (TODO_update_ssa | TODO_cleanup_cfg | TODO_ggc_collect
1464 | TODO_remove_unused_locals);
1465 else
1466 return 0;
1467 }
1468
1469 /* Pass entry points. */
1470 static unsigned int
1471 tree_ssa_dce (void)
1472 {
1473 return perform_tree_ssa_dce (/*aggressive=*/false);
1474 }
1475
1476 static unsigned int
1477 tree_ssa_dce_loop (void)
1478 {
1479 unsigned int todo;
1480 todo = perform_tree_ssa_dce (/*aggressive=*/false);
1481 if (todo)
1482 {
1483 free_numbers_of_iterations_estimates ();
1484 scev_reset ();
1485 }
1486 return todo;
1487 }
1488
1489 static unsigned int
1490 tree_ssa_cd_dce (void)
1491 {
1492 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1493 }
1494
1495 static bool
1496 gate_dce (void)
1497 {
1498 return flag_tree_dce != 0;
1499 }
1500
1501 struct gimple_opt_pass pass_dce =
1502 {
1503 {
1504 GIMPLE_PASS,
1505 "dce", /* name */
1506 gate_dce, /* gate */
1507 tree_ssa_dce, /* execute */
1508 NULL, /* sub */
1509 NULL, /* next */
1510 0, /* static_pass_number */
1511 TV_TREE_DCE, /* tv_id */
1512 PROP_cfg | PROP_ssa, /* properties_required */
1513 0, /* properties_provided */
1514 0, /* properties_destroyed */
1515 0, /* todo_flags_start */
1516 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
1517 }
1518 };
1519
1520 struct gimple_opt_pass pass_dce_loop =
1521 {
1522 {
1523 GIMPLE_PASS,
1524 "dceloop", /* name */
1525 gate_dce, /* gate */
1526 tree_ssa_dce_loop, /* execute */
1527 NULL, /* sub */
1528 NULL, /* next */
1529 0, /* static_pass_number */
1530 TV_TREE_DCE, /* tv_id */
1531 PROP_cfg | PROP_ssa, /* properties_required */
1532 0, /* properties_provided */
1533 0, /* properties_destroyed */
1534 0, /* todo_flags_start */
1535 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
1536 }
1537 };
1538
1539 struct gimple_opt_pass pass_cd_dce =
1540 {
1541 {
1542 GIMPLE_PASS,
1543 "cddce", /* name */
1544 gate_dce, /* gate */
1545 tree_ssa_cd_dce, /* execute */
1546 NULL, /* sub */
1547 NULL, /* next */
1548 0, /* static_pass_number */
1549 TV_TREE_CD_DCE, /* tv_id */
1550 PROP_cfg | PROP_ssa, /* properties_required */
1551 0, /* properties_provided */
1552 0, /* properties_destroyed */
1553 0, /* todo_flags_start */
1554 TODO_dump_func | TODO_verify_ssa
1555 | TODO_verify_flow /* todo_flags_finish */
1556 }
1557 };