gimplify-be.h: New file.
[gcc.git] / gcc / tree-ssa-dce.c
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2013 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* Dead code elimination.
24
25 References:
26
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
37
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49
50 #include "tree.h"
51 #include "gimple-pretty-print.h"
52 #include "basic-block.h"
53 #include "gimple.h"
54 #include "gimplify.h"
55 #include "gimple-iterator.h"
56 #include "gimple-ssa.h"
57 #include "tree-cfg.h"
58 #include "tree-phinodes.h"
59 #include "ssa-iterators.h"
60 #include "tree-ssanames.h"
61 #include "tree-ssa-loop-niter.h"
62 #include "tree-into-ssa.h"
63 #include "tree-dfa.h"
64 #include "tree-pass.h"
65 #include "flags.h"
66 #include "cfgloop.h"
67 #include "tree-scalar-evolution.h"
68
69 static struct stmt_stats
70 {
71 int total;
72 int total_phis;
73 int removed;
74 int removed_phis;
75 } stats;
76
77 #define STMT_NECESSARY GF_PLF_1
78
79 static vec<gimple> worklist;
80
81 /* Vector indicating an SSA name has already been processed and marked
82 as necessary. */
83 static sbitmap processed;
84
85 /* Vector indicating that the last statement of a basic block has already
86 been marked as necessary. */
87 static sbitmap last_stmt_necessary;
88
89 /* Vector indicating that BB contains statements that are live. */
90 static sbitmap bb_contains_live_stmts;
91
92 /* Before we can determine whether a control branch is dead, we need to
93 compute which blocks are control dependent on which edges.
94
95 We expect each block to be control dependent on very few edges so we
96 use a bitmap for each block recording its edges. An array holds the
97 bitmap. The Ith bit in the bitmap is set if that block is dependent
98 on the Ith edge. */
99 static control_dependences *cd;
100
101 /* Vector indicating that a basic block has already had all the edges
102 processed that it is control dependent on. */
103 static sbitmap visited_control_parents;
104
105 /* TRUE if this pass alters the CFG (by removing control statements).
106 FALSE otherwise.
107
108 If this pass alters the CFG, then it will arrange for the dominators
109 to be recomputed. */
110 static bool cfg_altered;
111
112
113 /* If STMT is not already marked necessary, mark it, and add it to the
114 worklist if ADD_TO_WORKLIST is true. */
115
116 static inline void
117 mark_stmt_necessary (gimple stmt, bool add_to_worklist)
118 {
119 gcc_assert (stmt);
120
121 if (gimple_plf (stmt, STMT_NECESSARY))
122 return;
123
124 if (dump_file && (dump_flags & TDF_DETAILS))
125 {
126 fprintf (dump_file, "Marking useful stmt: ");
127 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
128 fprintf (dump_file, "\n");
129 }
130
131 gimple_set_plf (stmt, STMT_NECESSARY, true);
132 if (add_to_worklist)
133 worklist.safe_push (stmt);
134 if (bb_contains_live_stmts && !is_gimple_debug (stmt))
135 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
136 }
137
138
139 /* Mark the statement defining operand OP as necessary. */
140
141 static inline void
142 mark_operand_necessary (tree op)
143 {
144 gimple stmt;
145 int ver;
146
147 gcc_assert (op);
148
149 ver = SSA_NAME_VERSION (op);
150 if (bitmap_bit_p (processed, ver))
151 {
152 stmt = SSA_NAME_DEF_STMT (op);
153 gcc_assert (gimple_nop_p (stmt)
154 || gimple_plf (stmt, STMT_NECESSARY));
155 return;
156 }
157 bitmap_set_bit (processed, ver);
158
159 stmt = SSA_NAME_DEF_STMT (op);
160 gcc_assert (stmt);
161
162 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
163 return;
164
165 if (dump_file && (dump_flags & TDF_DETAILS))
166 {
167 fprintf (dump_file, "marking necessary through ");
168 print_generic_expr (dump_file, op, 0);
169 fprintf (dump_file, " stmt ");
170 print_gimple_stmt (dump_file, stmt, 0, 0);
171 }
172
173 gimple_set_plf (stmt, STMT_NECESSARY, true);
174 if (bb_contains_live_stmts)
175 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
176 worklist.safe_push (stmt);
177 }
178
179
180 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
181 it can make other statements necessary.
182
183 If AGGRESSIVE is false, control statements are conservatively marked as
184 necessary. */
185
186 static void
187 mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
188 {
189 /* With non-call exceptions, we have to assume that all statements could
190 throw. If a statement could throw, it can be deemed necessary. */
191 if (cfun->can_throw_non_call_exceptions
192 && !cfun->can_delete_dead_exceptions
193 && stmt_could_throw_p (stmt))
194 {
195 mark_stmt_necessary (stmt, true);
196 return;
197 }
198
199 /* Statements that are implicitly live. Most function calls, asm
200 and return statements are required. Labels and GIMPLE_BIND nodes
201 are kept because they are control flow, and we have no way of
202 knowing whether they can be removed. DCE can eliminate all the
203 other statements in a block, and CFG can then remove the block
204 and labels. */
205 switch (gimple_code (stmt))
206 {
207 case GIMPLE_PREDICT:
208 case GIMPLE_LABEL:
209 mark_stmt_necessary (stmt, false);
210 return;
211
212 case GIMPLE_ASM:
213 case GIMPLE_RESX:
214 case GIMPLE_RETURN:
215 mark_stmt_necessary (stmt, true);
216 return;
217
218 case GIMPLE_CALL:
219 {
220 tree callee = gimple_call_fndecl (stmt);
221 if (callee != NULL_TREE
222 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
223 switch (DECL_FUNCTION_CODE (callee))
224 {
225 case BUILT_IN_MALLOC:
226 case BUILT_IN_CALLOC:
227 case BUILT_IN_ALLOCA:
228 case BUILT_IN_ALLOCA_WITH_ALIGN:
229 return;
230
231 default:;
232 }
233 /* Most, but not all function calls are required. Function calls that
234 produce no result and have no side effects (i.e. const pure
235 functions) are unnecessary. */
236 if (gimple_has_side_effects (stmt))
237 {
238 mark_stmt_necessary (stmt, true);
239 return;
240 }
241 if (!gimple_call_lhs (stmt))
242 return;
243 break;
244 }
245
246 case GIMPLE_DEBUG:
247 /* Debug temps without a value are not useful. ??? If we could
248 easily locate the debug temp bind stmt for a use thereof,
249 would could refrain from marking all debug temps here, and
250 mark them only if they're used. */
251 if (!gimple_debug_bind_p (stmt)
252 || gimple_debug_bind_has_value_p (stmt)
253 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
254 mark_stmt_necessary (stmt, false);
255 return;
256
257 case GIMPLE_GOTO:
258 gcc_assert (!simple_goto_p (stmt));
259 mark_stmt_necessary (stmt, true);
260 return;
261
262 case GIMPLE_COND:
263 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
264 /* Fall through. */
265
266 case GIMPLE_SWITCH:
267 if (! aggressive)
268 mark_stmt_necessary (stmt, true);
269 break;
270
271 case GIMPLE_ASSIGN:
272 if (TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
273 && TREE_CLOBBER_P (gimple_assign_rhs1 (stmt)))
274 return;
275 break;
276
277 default:
278 break;
279 }
280
281 /* If the statement has volatile operands, it needs to be preserved.
282 Same for statements that can alter control flow in unpredictable
283 ways. */
284 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
285 {
286 mark_stmt_necessary (stmt, true);
287 return;
288 }
289
290 if (stmt_may_clobber_global_p (stmt))
291 {
292 mark_stmt_necessary (stmt, true);
293 return;
294 }
295
296 return;
297 }
298
299
300 /* Mark the last statement of BB as necessary. */
301
302 static void
303 mark_last_stmt_necessary (basic_block bb)
304 {
305 gimple stmt = last_stmt (bb);
306
307 bitmap_set_bit (last_stmt_necessary, bb->index);
308 bitmap_set_bit (bb_contains_live_stmts, bb->index);
309
310 /* We actually mark the statement only if it is a control statement. */
311 if (stmt && is_ctrl_stmt (stmt))
312 mark_stmt_necessary (stmt, true);
313 }
314
315
316 /* Mark control dependent edges of BB as necessary. We have to do this only
317 once for each basic block so we set the appropriate bit after we're done.
318
319 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
320
321 static void
322 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
323 {
324 bitmap_iterator bi;
325 unsigned edge_number;
326 bool skipped = false;
327
328 gcc_assert (bb != EXIT_BLOCK_PTR);
329
330 if (bb == ENTRY_BLOCK_PTR)
331 return;
332
333 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
334 0, edge_number, bi)
335 {
336 basic_block cd_bb = cd->get_edge (edge_number)->src;
337
338 if (ignore_self && cd_bb == bb)
339 {
340 skipped = true;
341 continue;
342 }
343
344 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
345 mark_last_stmt_necessary (cd_bb);
346 }
347
348 if (!skipped)
349 bitmap_set_bit (visited_control_parents, bb->index);
350 }
351
352
353 /* Find obviously necessary statements. These are things like most function
354 calls, and stores to file level variables.
355
356 If EL is NULL, control statements are conservatively marked as
357 necessary. Otherwise it contains the list of edges used by control
358 dependence analysis. */
359
360 static void
361 find_obviously_necessary_stmts (bool aggressive)
362 {
363 basic_block bb;
364 gimple_stmt_iterator gsi;
365 edge e;
366 gimple phi, stmt;
367 int flags;
368
369 FOR_EACH_BB (bb)
370 {
371 /* PHI nodes are never inherently necessary. */
372 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
373 {
374 phi = gsi_stmt (gsi);
375 gimple_set_plf (phi, STMT_NECESSARY, false);
376 }
377
378 /* Check all statements in the block. */
379 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
380 {
381 stmt = gsi_stmt (gsi);
382 gimple_set_plf (stmt, STMT_NECESSARY, false);
383 mark_stmt_if_obviously_necessary (stmt, aggressive);
384 }
385 }
386
387 /* Pure and const functions are finite and thus have no infinite loops in
388 them. */
389 flags = flags_from_decl_or_type (current_function_decl);
390 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
391 return;
392
393 /* Prevent the empty possibly infinite loops from being removed. */
394 if (aggressive)
395 {
396 loop_iterator li;
397 struct loop *loop;
398 scev_initialize ();
399 if (mark_irreducible_loops ())
400 FOR_EACH_BB (bb)
401 {
402 edge_iterator ei;
403 FOR_EACH_EDGE (e, ei, bb->succs)
404 if ((e->flags & EDGE_DFS_BACK)
405 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
406 {
407 if (dump_file)
408 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
409 e->src->index, e->dest->index);
410 mark_control_dependent_edges_necessary (e->dest, false);
411 }
412 }
413
414 FOR_EACH_LOOP (li, loop, 0)
415 if (!finite_loop_p (loop))
416 {
417 if (dump_file)
418 fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
419 mark_control_dependent_edges_necessary (loop->latch, false);
420 }
421 scev_finalize ();
422 }
423 }
424
425
426 /* Return true if REF is based on an aliased base, otherwise false. */
427
428 static bool
429 ref_may_be_aliased (tree ref)
430 {
431 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
432 while (handled_component_p (ref))
433 ref = TREE_OPERAND (ref, 0);
434 if (TREE_CODE (ref) == MEM_REF
435 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
436 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
437 return !(DECL_P (ref)
438 && !may_be_aliased (ref));
439 }
440
441 static bitmap visited = NULL;
442 static unsigned int longest_chain = 0;
443 static unsigned int total_chain = 0;
444 static unsigned int nr_walks = 0;
445 static bool chain_ovfl = false;
446
447 /* Worker for the walker that marks reaching definitions of REF,
448 which is based on a non-aliased decl, necessary. It returns
449 true whenever the defining statement of the current VDEF is
450 a kill for REF, as no dominating may-defs are necessary for REF
451 anymore. DATA points to the basic-block that contains the
452 stmt that refers to REF. */
453
454 static bool
455 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
456 {
457 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
458
459 /* All stmts we visit are necessary. */
460 mark_operand_necessary (vdef);
461
462 /* If the stmt lhs kills ref, then we can stop walking. */
463 if (gimple_has_lhs (def_stmt)
464 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
465 /* The assignment is not necessarily carried out if it can throw
466 and we can catch it in the current function where we could inspect
467 the previous value.
468 ??? We only need to care about the RHS throwing. For aggregate
469 assignments or similar calls and non-call exceptions the LHS
470 might throw as well. */
471 && !stmt_can_throw_internal (def_stmt))
472 {
473 tree base, lhs = gimple_get_lhs (def_stmt);
474 HOST_WIDE_INT size, offset, max_size;
475 ao_ref_base (ref);
476 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
477 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
478 so base == refd->base does not always hold. */
479 if (base == ref->base)
480 {
481 /* For a must-alias check we need to be able to constrain
482 the accesses properly. */
483 if (size != -1 && size == max_size
484 && ref->max_size != -1)
485 {
486 if (offset <= ref->offset
487 && offset + size >= ref->offset + ref->max_size)
488 return true;
489 }
490 /* Or they need to be exactly the same. */
491 else if (ref->ref
492 /* Make sure there is no induction variable involved
493 in the references (gcc.c-torture/execute/pr42142.c).
494 The simplest way is to check if the kill dominates
495 the use. */
496 /* But when both are in the same block we cannot
497 easily tell whether we came from a backedge
498 unless we decide to compute stmt UIDs
499 (see PR58246). */
500 && (basic_block) data != gimple_bb (def_stmt)
501 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
502 gimple_bb (def_stmt))
503 && operand_equal_p (ref->ref, lhs, 0))
504 return true;
505 }
506 }
507
508 /* Otherwise keep walking. */
509 return false;
510 }
511
512 static void
513 mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
514 {
515 unsigned int chain;
516 ao_ref refd;
517 gcc_assert (!chain_ovfl);
518 ao_ref_init (&refd, ref);
519 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
520 mark_aliased_reaching_defs_necessary_1,
521 gimple_bb (stmt), NULL);
522 if (chain > longest_chain)
523 longest_chain = chain;
524 total_chain += chain;
525 nr_walks++;
526 }
527
528 /* Worker for the walker that marks reaching definitions of REF, which
529 is not based on a non-aliased decl. For simplicity we need to end
530 up marking all may-defs necessary that are not based on a non-aliased
531 decl. The only job of this walker is to skip may-defs based on
532 a non-aliased decl. */
533
534 static bool
535 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
536 tree vdef, void *data ATTRIBUTE_UNUSED)
537 {
538 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
539
540 /* We have to skip already visited (and thus necessary) statements
541 to make the chaining work after we dropped back to simple mode. */
542 if (chain_ovfl
543 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
544 {
545 gcc_assert (gimple_nop_p (def_stmt)
546 || gimple_plf (def_stmt, STMT_NECESSARY));
547 return false;
548 }
549
550 /* We want to skip stores to non-aliased variables. */
551 if (!chain_ovfl
552 && gimple_assign_single_p (def_stmt))
553 {
554 tree lhs = gimple_assign_lhs (def_stmt);
555 if (!ref_may_be_aliased (lhs))
556 return false;
557 }
558
559 /* We want to skip statments that do not constitute stores but have
560 a virtual definition. */
561 if (is_gimple_call (def_stmt))
562 {
563 tree callee = gimple_call_fndecl (def_stmt);
564 if (callee != NULL_TREE
565 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
566 switch (DECL_FUNCTION_CODE (callee))
567 {
568 case BUILT_IN_MALLOC:
569 case BUILT_IN_CALLOC:
570 case BUILT_IN_ALLOCA:
571 case BUILT_IN_ALLOCA_WITH_ALIGN:
572 case BUILT_IN_FREE:
573 return false;
574
575 default:;
576 }
577 }
578
579 mark_operand_necessary (vdef);
580
581 return false;
582 }
583
584 static void
585 mark_all_reaching_defs_necessary (gimple stmt)
586 {
587 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
588 mark_all_reaching_defs_necessary_1, NULL, &visited);
589 }
590
591 /* Return true for PHI nodes with one or identical arguments
592 can be removed. */
593 static bool
594 degenerate_phi_p (gimple phi)
595 {
596 unsigned int i;
597 tree op = gimple_phi_arg_def (phi, 0);
598 for (i = 1; i < gimple_phi_num_args (phi); i++)
599 if (gimple_phi_arg_def (phi, i) != op)
600 return false;
601 return true;
602 }
603
604 /* Propagate necessity using the operands of necessary statements.
605 Process the uses on each statement in the worklist, and add all
606 feeding statements which contribute to the calculation of this
607 value to the worklist.
608
609 In conservative mode, EL is NULL. */
610
611 static void
612 propagate_necessity (bool aggressive)
613 {
614 gimple stmt;
615
616 if (dump_file && (dump_flags & TDF_DETAILS))
617 fprintf (dump_file, "\nProcessing worklist:\n");
618
619 while (worklist.length () > 0)
620 {
621 /* Take STMT from worklist. */
622 stmt = worklist.pop ();
623
624 if (dump_file && (dump_flags & TDF_DETAILS))
625 {
626 fprintf (dump_file, "processing: ");
627 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
628 fprintf (dump_file, "\n");
629 }
630
631 if (aggressive)
632 {
633 /* Mark the last statement of the basic blocks on which the block
634 containing STMT is control dependent, but only if we haven't
635 already done so. */
636 basic_block bb = gimple_bb (stmt);
637 if (bb != ENTRY_BLOCK_PTR
638 && !bitmap_bit_p (visited_control_parents, bb->index))
639 mark_control_dependent_edges_necessary (bb, false);
640 }
641
642 if (gimple_code (stmt) == GIMPLE_PHI
643 /* We do not process virtual PHI nodes nor do we track their
644 necessity. */
645 && !virtual_operand_p (gimple_phi_result (stmt)))
646 {
647 /* PHI nodes are somewhat special in that each PHI alternative has
648 data and control dependencies. All the statements feeding the
649 PHI node's arguments are always necessary. In aggressive mode,
650 we also consider the control dependent edges leading to the
651 predecessor block associated with each PHI alternative as
652 necessary. */
653 size_t k;
654
655 for (k = 0; k < gimple_phi_num_args (stmt); k++)
656 {
657 tree arg = PHI_ARG_DEF (stmt, k);
658 if (TREE_CODE (arg) == SSA_NAME)
659 mark_operand_necessary (arg);
660 }
661
662 /* For PHI operands it matters from where the control flow arrives
663 to the BB. Consider the following example:
664
665 a=exp1;
666 b=exp2;
667 if (test)
668 ;
669 else
670 ;
671 c=PHI(a,b)
672
673 We need to mark control dependence of the empty basic blocks, since they
674 contains computation of PHI operands.
675
676 Doing so is too restrictive in the case the predecestor block is in
677 the loop. Consider:
678
679 if (b)
680 {
681 int i;
682 for (i = 0; i<1000; ++i)
683 ;
684 j = 0;
685 }
686 return j;
687
688 There is PHI for J in the BB containing return statement.
689 In this case the control dependence of predecestor block (that is
690 within the empty loop) also contains the block determining number
691 of iterations of the block that would prevent removing of empty
692 loop in this case.
693
694 This scenario can be avoided by splitting critical edges.
695 To save the critical edge splitting pass we identify how the control
696 dependence would look like if the edge was split.
697
698 Consider the modified CFG created from current CFG by splitting
699 edge B->C. In the postdominance tree of modified CFG, C' is
700 always child of C. There are two cases how chlids of C' can look
701 like:
702
703 1) C' is leaf
704
705 In this case the only basic block C' is control dependent on is B.
706
707 2) C' has single child that is B
708
709 In this case control dependence of C' is same as control
710 dependence of B in original CFG except for block B itself.
711 (since C' postdominate B in modified CFG)
712
713 Now how to decide what case happens? There are two basic options:
714
715 a) C postdominate B. Then C immediately postdominate B and
716 case 2 happens iff there is no other way from B to C except
717 the edge B->C.
718
719 There is other way from B to C iff there is succesor of B that
720 is not postdominated by B. Testing this condition is somewhat
721 expensive, because we need to iterate all succesors of B.
722 We are safe to assume that this does not happen: we will mark B
723 as needed when processing the other path from B to C that is
724 conrol dependent on B and marking control dependencies of B
725 itself is harmless because they will be processed anyway after
726 processing control statement in B.
727
728 b) C does not postdominate B. Always case 1 happens since there is
729 path from C to exit that does not go through B and thus also C'. */
730
731 if (aggressive && !degenerate_phi_p (stmt))
732 {
733 for (k = 0; k < gimple_phi_num_args (stmt); k++)
734 {
735 basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src;
736
737 if (gimple_bb (stmt)
738 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
739 {
740 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
741 mark_last_stmt_necessary (arg_bb);
742 }
743 else if (arg_bb != ENTRY_BLOCK_PTR
744 && !bitmap_bit_p (visited_control_parents,
745 arg_bb->index))
746 mark_control_dependent_edges_necessary (arg_bb, true);
747 }
748 }
749 }
750 else
751 {
752 /* Propagate through the operands. Examine all the USE, VUSE and
753 VDEF operands in this statement. Mark all the statements
754 which feed this statement's uses as necessary. */
755 ssa_op_iter iter;
756 tree use;
757
758 /* If this is a call to free which is directly fed by an
759 allocation function do not mark that necessary through
760 processing the argument. */
761 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
762 {
763 tree ptr = gimple_call_arg (stmt, 0);
764 gimple def_stmt;
765 tree def_callee;
766 /* If the pointer we free is defined by an allocation
767 function do not add the call to the worklist. */
768 if (TREE_CODE (ptr) == SSA_NAME
769 && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
770 && (def_callee = gimple_call_fndecl (def_stmt))
771 && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
772 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
773 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
774 continue;
775 }
776
777 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
778 mark_operand_necessary (use);
779
780 use = gimple_vuse (stmt);
781 if (!use)
782 continue;
783
784 /* If we dropped to simple mode make all immediately
785 reachable definitions necessary. */
786 if (chain_ovfl)
787 {
788 mark_all_reaching_defs_necessary (stmt);
789 continue;
790 }
791
792 /* For statements that may load from memory (have a VUSE) we
793 have to mark all reaching (may-)definitions as necessary.
794 We partition this task into two cases:
795 1) explicit loads based on decls that are not aliased
796 2) implicit loads (like calls) and explicit loads not
797 based on decls that are not aliased (like indirect
798 references or loads from globals)
799 For 1) we mark all reaching may-defs as necessary, stopping
800 at dominating kills. For 2) we want to mark all dominating
801 references necessary, but non-aliased ones which we handle
802 in 1). By keeping a global visited bitmap for references
803 we walk for 2) we avoid quadratic behavior for those. */
804
805 if (is_gimple_call (stmt))
806 {
807 tree callee = gimple_call_fndecl (stmt);
808 unsigned i;
809
810 /* Calls to functions that are merely acting as barriers
811 or that only store to memory do not make any previous
812 stores necessary. */
813 if (callee != NULL_TREE
814 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
815 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
816 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
817 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
818 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
819 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
820 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
821 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
822 || (DECL_FUNCTION_CODE (callee)
823 == BUILT_IN_ALLOCA_WITH_ALIGN)
824 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
825 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
826 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
827 continue;
828
829 /* Calls implicitly load from memory, their arguments
830 in addition may explicitly perform memory loads. */
831 mark_all_reaching_defs_necessary (stmt);
832 for (i = 0; i < gimple_call_num_args (stmt); ++i)
833 {
834 tree arg = gimple_call_arg (stmt, i);
835 if (TREE_CODE (arg) == SSA_NAME
836 || is_gimple_min_invariant (arg))
837 continue;
838 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
839 arg = TREE_OPERAND (arg, 0);
840 if (!ref_may_be_aliased (arg))
841 mark_aliased_reaching_defs_necessary (stmt, arg);
842 }
843 }
844 else if (gimple_assign_single_p (stmt))
845 {
846 tree rhs;
847 /* If this is a load mark things necessary. */
848 rhs = gimple_assign_rhs1 (stmt);
849 if (TREE_CODE (rhs) != SSA_NAME
850 && !is_gimple_min_invariant (rhs)
851 && TREE_CODE (rhs) != CONSTRUCTOR)
852 {
853 if (!ref_may_be_aliased (rhs))
854 mark_aliased_reaching_defs_necessary (stmt, rhs);
855 else
856 mark_all_reaching_defs_necessary (stmt);
857 }
858 }
859 else if (gimple_code (stmt) == GIMPLE_RETURN)
860 {
861 tree rhs = gimple_return_retval (stmt);
862 /* A return statement may perform a load. */
863 if (rhs
864 && TREE_CODE (rhs) != SSA_NAME
865 && !is_gimple_min_invariant (rhs)
866 && TREE_CODE (rhs) != CONSTRUCTOR)
867 {
868 if (!ref_may_be_aliased (rhs))
869 mark_aliased_reaching_defs_necessary (stmt, rhs);
870 else
871 mark_all_reaching_defs_necessary (stmt);
872 }
873 }
874 else if (gimple_code (stmt) == GIMPLE_ASM)
875 {
876 unsigned i;
877 mark_all_reaching_defs_necessary (stmt);
878 /* Inputs may perform loads. */
879 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
880 {
881 tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
882 if (TREE_CODE (op) != SSA_NAME
883 && !is_gimple_min_invariant (op)
884 && TREE_CODE (op) != CONSTRUCTOR
885 && !ref_may_be_aliased (op))
886 mark_aliased_reaching_defs_necessary (stmt, op);
887 }
888 }
889 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
890 {
891 /* The beginning of a transaction is a memory barrier. */
892 /* ??? If we were really cool, we'd only be a barrier
893 for the memories touched within the transaction. */
894 mark_all_reaching_defs_necessary (stmt);
895 }
896 else
897 gcc_unreachable ();
898
899 /* If we over-used our alias oracle budget drop to simple
900 mode. The cost metric allows quadratic behavior
901 (number of uses times number of may-defs queries) up to
902 a constant maximal number of queries and after that falls back to
903 super-linear complexity. */
904 if (/* Constant but quadratic for small functions. */
905 total_chain > 128 * 128
906 /* Linear in the number of may-defs. */
907 && total_chain > 32 * longest_chain
908 /* Linear in the number of uses. */
909 && total_chain > nr_walks * 32)
910 {
911 chain_ovfl = true;
912 if (visited)
913 bitmap_clear (visited);
914 }
915 }
916 }
917 }
918
919 /* Remove dead PHI nodes from block BB. */
920
921 static bool
922 remove_dead_phis (basic_block bb)
923 {
924 bool something_changed = false;
925 gimple phi;
926 gimple_stmt_iterator gsi;
927
928 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
929 {
930 stats.total_phis++;
931 phi = gsi_stmt (gsi);
932
933 /* We do not track necessity of virtual PHI nodes. Instead do
934 very simple dead PHI removal here. */
935 if (virtual_operand_p (gimple_phi_result (phi)))
936 {
937 /* Virtual PHI nodes with one or identical arguments
938 can be removed. */
939 if (degenerate_phi_p (phi))
940 {
941 tree vdef = gimple_phi_result (phi);
942 tree vuse = gimple_phi_arg_def (phi, 0);
943
944 use_operand_p use_p;
945 imm_use_iterator iter;
946 gimple use_stmt;
947 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
948 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
949 SET_USE (use_p, vuse);
950 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
951 && TREE_CODE (vuse) == SSA_NAME)
952 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
953 }
954 else
955 gimple_set_plf (phi, STMT_NECESSARY, true);
956 }
957
958 if (!gimple_plf (phi, STMT_NECESSARY))
959 {
960 something_changed = true;
961 if (dump_file && (dump_flags & TDF_DETAILS))
962 {
963 fprintf (dump_file, "Deleting : ");
964 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
965 fprintf (dump_file, "\n");
966 }
967
968 remove_phi_node (&gsi, true);
969 stats.removed_phis++;
970 continue;
971 }
972
973 gsi_next (&gsi);
974 }
975 return something_changed;
976 }
977
978 /* Forward edge E to respective POST_DOM_BB and update PHIs. */
979
980 static edge
981 forward_edge_to_pdom (edge e, basic_block post_dom_bb)
982 {
983 gimple_stmt_iterator gsi;
984 edge e2 = NULL;
985 edge_iterator ei;
986
987 if (dump_file && (dump_flags & TDF_DETAILS))
988 fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
989 e->dest->index, post_dom_bb->index);
990
991 e2 = redirect_edge_and_branch (e, post_dom_bb);
992 cfg_altered = true;
993
994 /* If edge was already around, no updating is necessary. */
995 if (e2 != e)
996 return e2;
997
998 if (!gimple_seq_empty_p (phi_nodes (post_dom_bb)))
999 {
1000 /* We are sure that for every live PHI we are seeing control dependent BB.
1001 This means that we can pick any edge to duplicate PHI args from. */
1002 FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
1003 if (e2 != e)
1004 break;
1005 for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
1006 {
1007 gimple phi = gsi_stmt (gsi);
1008 tree op;
1009 source_location locus;
1010
1011 /* PHIs for virtuals have no control dependency relation on them.
1012 We are lost here and must force renaming of the symbol. */
1013 if (virtual_operand_p (gimple_phi_result (phi)))
1014 {
1015 mark_virtual_phi_result_for_renaming (phi);
1016 remove_phi_node (&gsi, true);
1017 continue;
1018 }
1019
1020 /* Dead PHI do not imply control dependency. */
1021 if (!gimple_plf (phi, STMT_NECESSARY))
1022 {
1023 gsi_next (&gsi);
1024 continue;
1025 }
1026
1027 op = gimple_phi_arg_def (phi, e2->dest_idx);
1028 locus = gimple_phi_arg_location (phi, e2->dest_idx);
1029 add_phi_arg (phi, op, e, locus);
1030 /* The resulting PHI if not dead can only be degenerate. */
1031 gcc_assert (degenerate_phi_p (phi));
1032 gsi_next (&gsi);
1033 }
1034 }
1035 return e;
1036 }
1037
1038 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1039 containing I so that we don't have to look it up. */
1040
1041 static void
1042 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1043 {
1044 gimple stmt = gsi_stmt (*i);
1045
1046 if (dump_file && (dump_flags & TDF_DETAILS))
1047 {
1048 fprintf (dump_file, "Deleting : ");
1049 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1050 fprintf (dump_file, "\n");
1051 }
1052
1053 stats.removed++;
1054
1055 /* If we have determined that a conditional branch statement contributes
1056 nothing to the program, then we not only remove it, but we also change
1057 the flow graph so that the current block will simply fall-thru to its
1058 immediate post-dominator. The blocks we are circumventing will be
1059 removed by cleanup_tree_cfg if this change in the flow graph makes them
1060 unreachable. */
1061 if (is_ctrl_stmt (stmt))
1062 {
1063 basic_block post_dom_bb;
1064 edge e, e2;
1065 edge_iterator ei;
1066
1067 post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
1068
1069 e = find_edge (bb, post_dom_bb);
1070
1071 /* If edge is already there, try to use it. This avoids need to update
1072 PHI nodes. Also watch for cases where post dominator does not exists
1073 or is exit block. These can happen for infinite loops as we create
1074 fake edges in the dominator tree. */
1075 if (e)
1076 ;
1077 else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR)
1078 e = EDGE_SUCC (bb, 0);
1079 else
1080 e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
1081 gcc_assert (e);
1082 e->probability = REG_BR_PROB_BASE;
1083 e->count = bb->count;
1084
1085 /* The edge is no longer associated with a conditional, so it does
1086 not have TRUE/FALSE flags. */
1087 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
1088
1089 /* The lone outgoing edge from BB will be a fallthru edge. */
1090 e->flags |= EDGE_FALLTHRU;
1091
1092 /* Remove the remaining outgoing edges. */
1093 for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1094 if (e != e2)
1095 {
1096 cfg_altered = true;
1097 remove_edge (e2);
1098 }
1099 else
1100 ei_next (&ei);
1101 }
1102
1103 /* If this is a store into a variable that is being optimized away,
1104 add a debug bind stmt if possible. */
1105 if (MAY_HAVE_DEBUG_STMTS
1106 && gimple_assign_single_p (stmt)
1107 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1108 {
1109 tree lhs = gimple_assign_lhs (stmt);
1110 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1111 && !DECL_IGNORED_P (lhs)
1112 && is_gimple_reg_type (TREE_TYPE (lhs))
1113 && !is_global_var (lhs)
1114 && !DECL_HAS_VALUE_EXPR_P (lhs))
1115 {
1116 tree rhs = gimple_assign_rhs1 (stmt);
1117 gimple note
1118 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1119 gsi_insert_after (i, note, GSI_SAME_STMT);
1120 }
1121 }
1122
1123 unlink_stmt_vdef (stmt);
1124 gsi_remove (i, true);
1125 release_defs (stmt);
1126 }
1127
1128 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1129 contributes nothing to the program, and can be deleted. */
1130
1131 static bool
1132 eliminate_unnecessary_stmts (void)
1133 {
1134 bool something_changed = false;
1135 basic_block bb;
1136 gimple_stmt_iterator gsi, psi;
1137 gimple stmt;
1138 tree call;
1139 vec<basic_block> h;
1140
1141 if (dump_file && (dump_flags & TDF_DETAILS))
1142 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1143
1144 clear_special_calls ();
1145
1146 /* Walking basic blocks and statements in reverse order avoids
1147 releasing SSA names before any other DEFs that refer to them are
1148 released. This helps avoid loss of debug information, as we get
1149 a chance to propagate all RHSs of removed SSAs into debug uses,
1150 rather than only the latest ones. E.g., consider:
1151
1152 x_3 = y_1 + z_2;
1153 a_5 = x_3 - b_4;
1154 # DEBUG a => a_5
1155
1156 If we were to release x_3 before a_5, when we reached a_5 and
1157 tried to substitute it into the debug stmt, we'd see x_3 there,
1158 but x_3's DEF, type, etc would have already been disconnected.
1159 By going backwards, the debug stmt first changes to:
1160
1161 # DEBUG a => x_3 - b_4
1162
1163 and then to:
1164
1165 # DEBUG a => y_1 + z_2 - b_4
1166
1167 as desired. */
1168 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1169 h = get_all_dominated_blocks (CDI_DOMINATORS, single_succ (ENTRY_BLOCK_PTR));
1170
1171 while (h.length ())
1172 {
1173 bb = h.pop ();
1174
1175 /* Remove dead statements. */
1176 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1177 {
1178 stmt = gsi_stmt (gsi);
1179
1180 psi = gsi;
1181 gsi_prev (&psi);
1182
1183 stats.total++;
1184
1185 /* We can mark a call to free as not necessary if the
1186 defining statement of its argument is an allocation
1187 function and that is not necessary itself. */
1188 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1189 {
1190 tree ptr = gimple_call_arg (stmt, 0);
1191 tree callee2;
1192 gimple def_stmt;
1193 if (TREE_CODE (ptr) != SSA_NAME)
1194 continue;
1195 def_stmt = SSA_NAME_DEF_STMT (ptr);
1196 if (!is_gimple_call (def_stmt)
1197 || gimple_plf (def_stmt, STMT_NECESSARY))
1198 continue;
1199 callee2 = gimple_call_fndecl (def_stmt);
1200 if (callee2 == NULL_TREE
1201 || DECL_BUILT_IN_CLASS (callee2) != BUILT_IN_NORMAL
1202 || (DECL_FUNCTION_CODE (callee2) != BUILT_IN_MALLOC
1203 && DECL_FUNCTION_CODE (callee2) != BUILT_IN_CALLOC))
1204 continue;
1205 gimple_set_plf (stmt, STMT_NECESSARY, false);
1206 }
1207
1208 /* If GSI is not necessary then remove it. */
1209 if (!gimple_plf (stmt, STMT_NECESSARY))
1210 {
1211 if (!is_gimple_debug (stmt))
1212 something_changed = true;
1213 remove_dead_stmt (&gsi, bb);
1214 }
1215 else if (is_gimple_call (stmt))
1216 {
1217 tree name = gimple_call_lhs (stmt);
1218
1219 notice_special_calls (stmt);
1220
1221 /* When LHS of var = call (); is dead, simplify it into
1222 call (); saving one operand. */
1223 if (name
1224 && TREE_CODE (name) == SSA_NAME
1225 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1226 /* Avoid doing so for allocation calls which we
1227 did not mark as necessary, it will confuse the
1228 special logic we apply to malloc/free pair removal. */
1229 && (!(call = gimple_call_fndecl (stmt))
1230 || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1231 || (DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1232 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1233 && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA
1234 && (DECL_FUNCTION_CODE (call)
1235 != BUILT_IN_ALLOCA_WITH_ALIGN))))
1236 {
1237 something_changed = true;
1238 if (dump_file && (dump_flags & TDF_DETAILS))
1239 {
1240 fprintf (dump_file, "Deleting LHS of call: ");
1241 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1242 fprintf (dump_file, "\n");
1243 }
1244
1245 gimple_call_set_lhs (stmt, NULL_TREE);
1246 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1247 update_stmt (stmt);
1248 release_ssa_name (name);
1249 }
1250 }
1251 }
1252 }
1253
1254 h.release ();
1255
1256 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1257 rendered some PHI nodes unreachable while they are still in use.
1258 Mark them for renaming. */
1259 if (cfg_altered)
1260 {
1261 basic_block prev_bb;
1262
1263 find_unreachable_blocks ();
1264
1265 /* Delete all unreachable basic blocks in reverse dominator order. */
1266 for (bb = EXIT_BLOCK_PTR->prev_bb; bb != ENTRY_BLOCK_PTR; bb = prev_bb)
1267 {
1268 prev_bb = bb->prev_bb;
1269
1270 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1271 || !(bb->flags & BB_REACHABLE))
1272 {
1273 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1274 if (virtual_operand_p (gimple_phi_result (gsi_stmt (gsi))))
1275 {
1276 bool found = false;
1277 imm_use_iterator iter;
1278
1279 FOR_EACH_IMM_USE_STMT (stmt, iter, gimple_phi_result (gsi_stmt (gsi)))
1280 {
1281 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1282 continue;
1283 if (gimple_code (stmt) == GIMPLE_PHI
1284 || gimple_plf (stmt, STMT_NECESSARY))
1285 {
1286 found = true;
1287 BREAK_FROM_IMM_USE_STMT (iter);
1288 }
1289 }
1290 if (found)
1291 mark_virtual_phi_result_for_renaming (gsi_stmt (gsi));
1292 }
1293
1294 if (!(bb->flags & BB_REACHABLE))
1295 {
1296 /* Speed up the removal of blocks that don't
1297 dominate others. Walking backwards, this should
1298 be the common case. ??? Do we need to recompute
1299 dominators because of cfg_altered? */
1300 if (!MAY_HAVE_DEBUG_STMTS
1301 || !first_dom_son (CDI_DOMINATORS, bb))
1302 delete_basic_block (bb);
1303 else
1304 {
1305 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1306
1307 while (h.length ())
1308 {
1309 bb = h.pop ();
1310 prev_bb = bb->prev_bb;
1311 /* Rearrangements to the CFG may have failed
1312 to update the dominators tree, so that
1313 formerly-dominated blocks are now
1314 otherwise reachable. */
1315 if (!!(bb->flags & BB_REACHABLE))
1316 continue;
1317 delete_basic_block (bb);
1318 }
1319
1320 h.release ();
1321 }
1322 }
1323 }
1324 }
1325 }
1326 FOR_EACH_BB (bb)
1327 {
1328 /* Remove dead PHI nodes. */
1329 something_changed |= remove_dead_phis (bb);
1330 }
1331
1332 return something_changed;
1333 }
1334
1335
1336 /* Print out removed statement statistics. */
1337
1338 static void
1339 print_stats (void)
1340 {
1341 float percg;
1342
1343 percg = ((float) stats.removed / (float) stats.total) * 100;
1344 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1345 stats.removed, stats.total, (int) percg);
1346
1347 if (stats.total_phis == 0)
1348 percg = 0;
1349 else
1350 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1351
1352 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1353 stats.removed_phis, stats.total_phis, (int) percg);
1354 }
1355
1356 /* Initialization for this pass. Set up the used data structures. */
1357
1358 static void
1359 tree_dce_init (bool aggressive)
1360 {
1361 memset ((void *) &stats, 0, sizeof (stats));
1362
1363 if (aggressive)
1364 {
1365 last_stmt_necessary = sbitmap_alloc (last_basic_block);
1366 bitmap_clear (last_stmt_necessary);
1367 bb_contains_live_stmts = sbitmap_alloc (last_basic_block);
1368 bitmap_clear (bb_contains_live_stmts);
1369 }
1370
1371 processed = sbitmap_alloc (num_ssa_names + 1);
1372 bitmap_clear (processed);
1373
1374 worklist.create (64);
1375 cfg_altered = false;
1376 }
1377
1378 /* Cleanup after this pass. */
1379
1380 static void
1381 tree_dce_done (bool aggressive)
1382 {
1383 if (aggressive)
1384 {
1385 delete cd;
1386 sbitmap_free (visited_control_parents);
1387 sbitmap_free (last_stmt_necessary);
1388 sbitmap_free (bb_contains_live_stmts);
1389 bb_contains_live_stmts = NULL;
1390 }
1391
1392 sbitmap_free (processed);
1393
1394 worklist.release ();
1395 }
1396
1397 /* Main routine to eliminate dead code.
1398
1399 AGGRESSIVE controls the aggressiveness of the algorithm.
1400 In conservative mode, we ignore control dependence and simply declare
1401 all but the most trivially dead branches necessary. This mode is fast.
1402 In aggressive mode, control dependences are taken into account, which
1403 results in more dead code elimination, but at the cost of some time.
1404
1405 FIXME: Aggressive mode before PRE doesn't work currently because
1406 the dominance info is not invalidated after DCE1. This is
1407 not an issue right now because we only run aggressive DCE
1408 as the last tree SSA pass, but keep this in mind when you
1409 start experimenting with pass ordering. */
1410
1411 static unsigned int
1412 perform_tree_ssa_dce (bool aggressive)
1413 {
1414 bool something_changed = 0;
1415
1416 calculate_dominance_info (CDI_DOMINATORS);
1417
1418 /* Preheaders are needed for SCEV to work.
1419 Simple lateches and recorded exits improve chances that loop will
1420 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1421 if (aggressive)
1422 loop_optimizer_init (LOOPS_NORMAL
1423 | LOOPS_HAVE_RECORDED_EXITS);
1424
1425 tree_dce_init (aggressive);
1426
1427 if (aggressive)
1428 {
1429 /* Compute control dependence. */
1430 calculate_dominance_info (CDI_POST_DOMINATORS);
1431 cd = new control_dependences (create_edge_list ());
1432
1433 visited_control_parents = sbitmap_alloc (last_basic_block);
1434 bitmap_clear (visited_control_parents);
1435
1436 mark_dfs_back_edges ();
1437 }
1438
1439 find_obviously_necessary_stmts (aggressive);
1440
1441 if (aggressive)
1442 loop_optimizer_finalize ();
1443
1444 longest_chain = 0;
1445 total_chain = 0;
1446 nr_walks = 0;
1447 chain_ovfl = false;
1448 visited = BITMAP_ALLOC (NULL);
1449 propagate_necessity (aggressive);
1450 BITMAP_FREE (visited);
1451
1452 something_changed |= eliminate_unnecessary_stmts ();
1453 something_changed |= cfg_altered;
1454
1455 /* We do not update postdominators, so free them unconditionally. */
1456 free_dominance_info (CDI_POST_DOMINATORS);
1457
1458 /* If we removed paths in the CFG, then we need to update
1459 dominators as well. I haven't investigated the possibility
1460 of incrementally updating dominators. */
1461 if (cfg_altered)
1462 free_dominance_info (CDI_DOMINATORS);
1463
1464 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1465 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1466
1467 /* Debugging dumps. */
1468 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1469 print_stats ();
1470
1471 tree_dce_done (aggressive);
1472
1473 if (something_changed)
1474 return TODO_update_ssa | TODO_cleanup_cfg;
1475 return 0;
1476 }
1477
1478 /* Pass entry points. */
1479 static unsigned int
1480 tree_ssa_dce (void)
1481 {
1482 return perform_tree_ssa_dce (/*aggressive=*/false);
1483 }
1484
1485 static unsigned int
1486 tree_ssa_dce_loop (void)
1487 {
1488 unsigned int todo;
1489 todo = perform_tree_ssa_dce (/*aggressive=*/false);
1490 if (todo)
1491 {
1492 free_numbers_of_iterations_estimates ();
1493 scev_reset ();
1494 }
1495 return todo;
1496 }
1497
1498 static unsigned int
1499 tree_ssa_cd_dce (void)
1500 {
1501 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1502 }
1503
1504 static bool
1505 gate_dce (void)
1506 {
1507 return flag_tree_dce != 0;
1508 }
1509
1510 namespace {
1511
1512 const pass_data pass_data_dce =
1513 {
1514 GIMPLE_PASS, /* type */
1515 "dce", /* name */
1516 OPTGROUP_NONE, /* optinfo_flags */
1517 true, /* has_gate */
1518 true, /* has_execute */
1519 TV_TREE_DCE, /* tv_id */
1520 ( PROP_cfg | PROP_ssa ), /* properties_required */
1521 0, /* properties_provided */
1522 0, /* properties_destroyed */
1523 0, /* todo_flags_start */
1524 TODO_verify_ssa, /* todo_flags_finish */
1525 };
1526
1527 class pass_dce : public gimple_opt_pass
1528 {
1529 public:
1530 pass_dce (gcc::context *ctxt)
1531 : gimple_opt_pass (pass_data_dce, ctxt)
1532 {}
1533
1534 /* opt_pass methods: */
1535 opt_pass * clone () { return new pass_dce (m_ctxt); }
1536 bool gate () { return gate_dce (); }
1537 unsigned int execute () { return tree_ssa_dce (); }
1538
1539 }; // class pass_dce
1540
1541 } // anon namespace
1542
1543 gimple_opt_pass *
1544 make_pass_dce (gcc::context *ctxt)
1545 {
1546 return new pass_dce (ctxt);
1547 }
1548
1549 namespace {
1550
1551 const pass_data pass_data_dce_loop =
1552 {
1553 GIMPLE_PASS, /* type */
1554 "dceloop", /* name */
1555 OPTGROUP_NONE, /* optinfo_flags */
1556 true, /* has_gate */
1557 true, /* has_execute */
1558 TV_TREE_DCE, /* tv_id */
1559 ( PROP_cfg | PROP_ssa ), /* properties_required */
1560 0, /* properties_provided */
1561 0, /* properties_destroyed */
1562 0, /* todo_flags_start */
1563 TODO_verify_ssa, /* todo_flags_finish */
1564 };
1565
1566 class pass_dce_loop : public gimple_opt_pass
1567 {
1568 public:
1569 pass_dce_loop (gcc::context *ctxt)
1570 : gimple_opt_pass (pass_data_dce_loop, ctxt)
1571 {}
1572
1573 /* opt_pass methods: */
1574 opt_pass * clone () { return new pass_dce_loop (m_ctxt); }
1575 bool gate () { return gate_dce (); }
1576 unsigned int execute () { return tree_ssa_dce_loop (); }
1577
1578 }; // class pass_dce_loop
1579
1580 } // anon namespace
1581
1582 gimple_opt_pass *
1583 make_pass_dce_loop (gcc::context *ctxt)
1584 {
1585 return new pass_dce_loop (ctxt);
1586 }
1587
1588 namespace {
1589
1590 const pass_data pass_data_cd_dce =
1591 {
1592 GIMPLE_PASS, /* type */
1593 "cddce", /* name */
1594 OPTGROUP_NONE, /* optinfo_flags */
1595 true, /* has_gate */
1596 true, /* has_execute */
1597 TV_TREE_CD_DCE, /* tv_id */
1598 ( PROP_cfg | PROP_ssa ), /* properties_required */
1599 0, /* properties_provided */
1600 0, /* properties_destroyed */
1601 0, /* todo_flags_start */
1602 ( TODO_verify_ssa | TODO_verify_flow ), /* todo_flags_finish */
1603 };
1604
1605 class pass_cd_dce : public gimple_opt_pass
1606 {
1607 public:
1608 pass_cd_dce (gcc::context *ctxt)
1609 : gimple_opt_pass (pass_data_cd_dce, ctxt)
1610 {}
1611
1612 /* opt_pass methods: */
1613 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1614 bool gate () { return gate_dce (); }
1615 unsigned int execute () { return tree_ssa_cd_dce (); }
1616
1617 }; // class pass_cd_dce
1618
1619 } // anon namespace
1620
1621 gimple_opt_pass *
1622 make_pass_cd_dce (gcc::context *ctxt)
1623 {
1624 return new pass_cd_dce (ctxt);
1625 }