Factor unrelated declarations out of tree.h.
[gcc.git] / gcc / tree-ssa-dce.c
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2013 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* Dead code elimination.
24
25 References:
26
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
37
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49
50 #include "tree.h"
51 #include "calls.h"
52 #include "gimple-pretty-print.h"
53 #include "basic-block.h"
54 #include "gimple.h"
55 #include "gimplify.h"
56 #include "gimple-iterator.h"
57 #include "gimple-ssa.h"
58 #include "tree-cfg.h"
59 #include "tree-phinodes.h"
60 #include "ssa-iterators.h"
61 #include "stringpool.h"
62 #include "tree-ssanames.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
65 #include "expr.h"
66 #include "tree-dfa.h"
67 #include "tree-pass.h"
68 #include "flags.h"
69 #include "cfgloop.h"
70 #include "tree-scalar-evolution.h"
71
72 static struct stmt_stats
73 {
74 int total;
75 int total_phis;
76 int removed;
77 int removed_phis;
78 } stats;
79
80 #define STMT_NECESSARY GF_PLF_1
81
82 static vec<gimple> worklist;
83
84 /* Vector indicating an SSA name has already been processed and marked
85 as necessary. */
86 static sbitmap processed;
87
88 /* Vector indicating that the last statement of a basic block has already
89 been marked as necessary. */
90 static sbitmap last_stmt_necessary;
91
92 /* Vector indicating that BB contains statements that are live. */
93 static sbitmap bb_contains_live_stmts;
94
95 /* Before we can determine whether a control branch is dead, we need to
96 compute which blocks are control dependent on which edges.
97
98 We expect each block to be control dependent on very few edges so we
99 use a bitmap for each block recording its edges. An array holds the
100 bitmap. The Ith bit in the bitmap is set if that block is dependent
101 on the Ith edge. */
102 static control_dependences *cd;
103
104 /* Vector indicating that a basic block has already had all the edges
105 processed that it is control dependent on. */
106 static sbitmap visited_control_parents;
107
108 /* TRUE if this pass alters the CFG (by removing control statements).
109 FALSE otherwise.
110
111 If this pass alters the CFG, then it will arrange for the dominators
112 to be recomputed. */
113 static bool cfg_altered;
114
115
116 /* If STMT is not already marked necessary, mark it, and add it to the
117 worklist if ADD_TO_WORKLIST is true. */
118
119 static inline void
120 mark_stmt_necessary (gimple stmt, bool add_to_worklist)
121 {
122 gcc_assert (stmt);
123
124 if (gimple_plf (stmt, STMT_NECESSARY))
125 return;
126
127 if (dump_file && (dump_flags & TDF_DETAILS))
128 {
129 fprintf (dump_file, "Marking useful stmt: ");
130 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
131 fprintf (dump_file, "\n");
132 }
133
134 gimple_set_plf (stmt, STMT_NECESSARY, true);
135 if (add_to_worklist)
136 worklist.safe_push (stmt);
137 if (bb_contains_live_stmts && !is_gimple_debug (stmt))
138 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
139 }
140
141
142 /* Mark the statement defining operand OP as necessary. */
143
144 static inline void
145 mark_operand_necessary (tree op)
146 {
147 gimple stmt;
148 int ver;
149
150 gcc_assert (op);
151
152 ver = SSA_NAME_VERSION (op);
153 if (bitmap_bit_p (processed, ver))
154 {
155 stmt = SSA_NAME_DEF_STMT (op);
156 gcc_assert (gimple_nop_p (stmt)
157 || gimple_plf (stmt, STMT_NECESSARY));
158 return;
159 }
160 bitmap_set_bit (processed, ver);
161
162 stmt = SSA_NAME_DEF_STMT (op);
163 gcc_assert (stmt);
164
165 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
166 return;
167
168 if (dump_file && (dump_flags & TDF_DETAILS))
169 {
170 fprintf (dump_file, "marking necessary through ");
171 print_generic_expr (dump_file, op, 0);
172 fprintf (dump_file, " stmt ");
173 print_gimple_stmt (dump_file, stmt, 0, 0);
174 }
175
176 gimple_set_plf (stmt, STMT_NECESSARY, true);
177 if (bb_contains_live_stmts)
178 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
179 worklist.safe_push (stmt);
180 }
181
182
183 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
184 it can make other statements necessary.
185
186 If AGGRESSIVE is false, control statements are conservatively marked as
187 necessary. */
188
189 static void
190 mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
191 {
192 /* With non-call exceptions, we have to assume that all statements could
193 throw. If a statement could throw, it can be deemed necessary. */
194 if (cfun->can_throw_non_call_exceptions
195 && !cfun->can_delete_dead_exceptions
196 && stmt_could_throw_p (stmt))
197 {
198 mark_stmt_necessary (stmt, true);
199 return;
200 }
201
202 /* Statements that are implicitly live. Most function calls, asm
203 and return statements are required. Labels and GIMPLE_BIND nodes
204 are kept because they are control flow, and we have no way of
205 knowing whether they can be removed. DCE can eliminate all the
206 other statements in a block, and CFG can then remove the block
207 and labels. */
208 switch (gimple_code (stmt))
209 {
210 case GIMPLE_PREDICT:
211 case GIMPLE_LABEL:
212 mark_stmt_necessary (stmt, false);
213 return;
214
215 case GIMPLE_ASM:
216 case GIMPLE_RESX:
217 case GIMPLE_RETURN:
218 mark_stmt_necessary (stmt, true);
219 return;
220
221 case GIMPLE_CALL:
222 {
223 tree callee = gimple_call_fndecl (stmt);
224 if (callee != NULL_TREE
225 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
226 switch (DECL_FUNCTION_CODE (callee))
227 {
228 case BUILT_IN_MALLOC:
229 case BUILT_IN_CALLOC:
230 case BUILT_IN_ALLOCA:
231 case BUILT_IN_ALLOCA_WITH_ALIGN:
232 return;
233
234 default:;
235 }
236 /* Most, but not all function calls are required. Function calls that
237 produce no result and have no side effects (i.e. const pure
238 functions) are unnecessary. */
239 if (gimple_has_side_effects (stmt))
240 {
241 mark_stmt_necessary (stmt, true);
242 return;
243 }
244 if (!gimple_call_lhs (stmt))
245 return;
246 break;
247 }
248
249 case GIMPLE_DEBUG:
250 /* Debug temps without a value are not useful. ??? If we could
251 easily locate the debug temp bind stmt for a use thereof,
252 would could refrain from marking all debug temps here, and
253 mark them only if they're used. */
254 if (!gimple_debug_bind_p (stmt)
255 || gimple_debug_bind_has_value_p (stmt)
256 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
257 mark_stmt_necessary (stmt, false);
258 return;
259
260 case GIMPLE_GOTO:
261 gcc_assert (!simple_goto_p (stmt));
262 mark_stmt_necessary (stmt, true);
263 return;
264
265 case GIMPLE_COND:
266 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
267 /* Fall through. */
268
269 case GIMPLE_SWITCH:
270 if (! aggressive)
271 mark_stmt_necessary (stmt, true);
272 break;
273
274 case GIMPLE_ASSIGN:
275 if (TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
276 && TREE_CLOBBER_P (gimple_assign_rhs1 (stmt)))
277 return;
278 break;
279
280 default:
281 break;
282 }
283
284 /* If the statement has volatile operands, it needs to be preserved.
285 Same for statements that can alter control flow in unpredictable
286 ways. */
287 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
288 {
289 mark_stmt_necessary (stmt, true);
290 return;
291 }
292
293 if (stmt_may_clobber_global_p (stmt))
294 {
295 mark_stmt_necessary (stmt, true);
296 return;
297 }
298
299 return;
300 }
301
302
303 /* Mark the last statement of BB as necessary. */
304
305 static void
306 mark_last_stmt_necessary (basic_block bb)
307 {
308 gimple stmt = last_stmt (bb);
309
310 bitmap_set_bit (last_stmt_necessary, bb->index);
311 bitmap_set_bit (bb_contains_live_stmts, bb->index);
312
313 /* We actually mark the statement only if it is a control statement. */
314 if (stmt && is_ctrl_stmt (stmt))
315 mark_stmt_necessary (stmt, true);
316 }
317
318
319 /* Mark control dependent edges of BB as necessary. We have to do this only
320 once for each basic block so we set the appropriate bit after we're done.
321
322 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
323
324 static void
325 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
326 {
327 bitmap_iterator bi;
328 unsigned edge_number;
329 bool skipped = false;
330
331 gcc_assert (bb != EXIT_BLOCK_PTR);
332
333 if (bb == ENTRY_BLOCK_PTR)
334 return;
335
336 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
337 0, edge_number, bi)
338 {
339 basic_block cd_bb = cd->get_edge (edge_number)->src;
340
341 if (ignore_self && cd_bb == bb)
342 {
343 skipped = true;
344 continue;
345 }
346
347 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
348 mark_last_stmt_necessary (cd_bb);
349 }
350
351 if (!skipped)
352 bitmap_set_bit (visited_control_parents, bb->index);
353 }
354
355
356 /* Find obviously necessary statements. These are things like most function
357 calls, and stores to file level variables.
358
359 If EL is NULL, control statements are conservatively marked as
360 necessary. Otherwise it contains the list of edges used by control
361 dependence analysis. */
362
363 static void
364 find_obviously_necessary_stmts (bool aggressive)
365 {
366 basic_block bb;
367 gimple_stmt_iterator gsi;
368 edge e;
369 gimple phi, stmt;
370 int flags;
371
372 FOR_EACH_BB (bb)
373 {
374 /* PHI nodes are never inherently necessary. */
375 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
376 {
377 phi = gsi_stmt (gsi);
378 gimple_set_plf (phi, STMT_NECESSARY, false);
379 }
380
381 /* Check all statements in the block. */
382 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
383 {
384 stmt = gsi_stmt (gsi);
385 gimple_set_plf (stmt, STMT_NECESSARY, false);
386 mark_stmt_if_obviously_necessary (stmt, aggressive);
387 }
388 }
389
390 /* Pure and const functions are finite and thus have no infinite loops in
391 them. */
392 flags = flags_from_decl_or_type (current_function_decl);
393 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
394 return;
395
396 /* Prevent the empty possibly infinite loops from being removed. */
397 if (aggressive)
398 {
399 loop_iterator li;
400 struct loop *loop;
401 scev_initialize ();
402 if (mark_irreducible_loops ())
403 FOR_EACH_BB (bb)
404 {
405 edge_iterator ei;
406 FOR_EACH_EDGE (e, ei, bb->succs)
407 if ((e->flags & EDGE_DFS_BACK)
408 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
409 {
410 if (dump_file)
411 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
412 e->src->index, e->dest->index);
413 mark_control_dependent_edges_necessary (e->dest, false);
414 }
415 }
416
417 FOR_EACH_LOOP (li, loop, 0)
418 if (!finite_loop_p (loop))
419 {
420 if (dump_file)
421 fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
422 mark_control_dependent_edges_necessary (loop->latch, false);
423 }
424 scev_finalize ();
425 }
426 }
427
428
429 /* Return true if REF is based on an aliased base, otherwise false. */
430
431 static bool
432 ref_may_be_aliased (tree ref)
433 {
434 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
435 while (handled_component_p (ref))
436 ref = TREE_OPERAND (ref, 0);
437 if (TREE_CODE (ref) == MEM_REF
438 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
439 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
440 return !(DECL_P (ref)
441 && !may_be_aliased (ref));
442 }
443
444 static bitmap visited = NULL;
445 static unsigned int longest_chain = 0;
446 static unsigned int total_chain = 0;
447 static unsigned int nr_walks = 0;
448 static bool chain_ovfl = false;
449
450 /* Worker for the walker that marks reaching definitions of REF,
451 which is based on a non-aliased decl, necessary. It returns
452 true whenever the defining statement of the current VDEF is
453 a kill for REF, as no dominating may-defs are necessary for REF
454 anymore. DATA points to the basic-block that contains the
455 stmt that refers to REF. */
456
457 static bool
458 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
459 {
460 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
461
462 /* All stmts we visit are necessary. */
463 mark_operand_necessary (vdef);
464
465 /* If the stmt lhs kills ref, then we can stop walking. */
466 if (gimple_has_lhs (def_stmt)
467 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
468 /* The assignment is not necessarily carried out if it can throw
469 and we can catch it in the current function where we could inspect
470 the previous value.
471 ??? We only need to care about the RHS throwing. For aggregate
472 assignments or similar calls and non-call exceptions the LHS
473 might throw as well. */
474 && !stmt_can_throw_internal (def_stmt))
475 {
476 tree base, lhs = gimple_get_lhs (def_stmt);
477 HOST_WIDE_INT size, offset, max_size;
478 ao_ref_base (ref);
479 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
480 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
481 so base == refd->base does not always hold. */
482 if (base == ref->base)
483 {
484 /* For a must-alias check we need to be able to constrain
485 the accesses properly. */
486 if (size != -1 && size == max_size
487 && ref->max_size != -1)
488 {
489 if (offset <= ref->offset
490 && offset + size >= ref->offset + ref->max_size)
491 return true;
492 }
493 /* Or they need to be exactly the same. */
494 else if (ref->ref
495 /* Make sure there is no induction variable involved
496 in the references (gcc.c-torture/execute/pr42142.c).
497 The simplest way is to check if the kill dominates
498 the use. */
499 /* But when both are in the same block we cannot
500 easily tell whether we came from a backedge
501 unless we decide to compute stmt UIDs
502 (see PR58246). */
503 && (basic_block) data != gimple_bb (def_stmt)
504 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
505 gimple_bb (def_stmt))
506 && operand_equal_p (ref->ref, lhs, 0))
507 return true;
508 }
509 }
510
511 /* Otherwise keep walking. */
512 return false;
513 }
514
515 static void
516 mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
517 {
518 unsigned int chain;
519 ao_ref refd;
520 gcc_assert (!chain_ovfl);
521 ao_ref_init (&refd, ref);
522 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
523 mark_aliased_reaching_defs_necessary_1,
524 gimple_bb (stmt), NULL);
525 if (chain > longest_chain)
526 longest_chain = chain;
527 total_chain += chain;
528 nr_walks++;
529 }
530
531 /* Worker for the walker that marks reaching definitions of REF, which
532 is not based on a non-aliased decl. For simplicity we need to end
533 up marking all may-defs necessary that are not based on a non-aliased
534 decl. The only job of this walker is to skip may-defs based on
535 a non-aliased decl. */
536
537 static bool
538 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
539 tree vdef, void *data ATTRIBUTE_UNUSED)
540 {
541 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
542
543 /* We have to skip already visited (and thus necessary) statements
544 to make the chaining work after we dropped back to simple mode. */
545 if (chain_ovfl
546 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
547 {
548 gcc_assert (gimple_nop_p (def_stmt)
549 || gimple_plf (def_stmt, STMT_NECESSARY));
550 return false;
551 }
552
553 /* We want to skip stores to non-aliased variables. */
554 if (!chain_ovfl
555 && gimple_assign_single_p (def_stmt))
556 {
557 tree lhs = gimple_assign_lhs (def_stmt);
558 if (!ref_may_be_aliased (lhs))
559 return false;
560 }
561
562 /* We want to skip statments that do not constitute stores but have
563 a virtual definition. */
564 if (is_gimple_call (def_stmt))
565 {
566 tree callee = gimple_call_fndecl (def_stmt);
567 if (callee != NULL_TREE
568 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
569 switch (DECL_FUNCTION_CODE (callee))
570 {
571 case BUILT_IN_MALLOC:
572 case BUILT_IN_CALLOC:
573 case BUILT_IN_ALLOCA:
574 case BUILT_IN_ALLOCA_WITH_ALIGN:
575 case BUILT_IN_FREE:
576 return false;
577
578 default:;
579 }
580 }
581
582 mark_operand_necessary (vdef);
583
584 return false;
585 }
586
587 static void
588 mark_all_reaching_defs_necessary (gimple stmt)
589 {
590 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
591 mark_all_reaching_defs_necessary_1, NULL, &visited);
592 }
593
594 /* Return true for PHI nodes with one or identical arguments
595 can be removed. */
596 static bool
597 degenerate_phi_p (gimple phi)
598 {
599 unsigned int i;
600 tree op = gimple_phi_arg_def (phi, 0);
601 for (i = 1; i < gimple_phi_num_args (phi); i++)
602 if (gimple_phi_arg_def (phi, i) != op)
603 return false;
604 return true;
605 }
606
607 /* Propagate necessity using the operands of necessary statements.
608 Process the uses on each statement in the worklist, and add all
609 feeding statements which contribute to the calculation of this
610 value to the worklist.
611
612 In conservative mode, EL is NULL. */
613
614 static void
615 propagate_necessity (bool aggressive)
616 {
617 gimple stmt;
618
619 if (dump_file && (dump_flags & TDF_DETAILS))
620 fprintf (dump_file, "\nProcessing worklist:\n");
621
622 while (worklist.length () > 0)
623 {
624 /* Take STMT from worklist. */
625 stmt = worklist.pop ();
626
627 if (dump_file && (dump_flags & TDF_DETAILS))
628 {
629 fprintf (dump_file, "processing: ");
630 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
631 fprintf (dump_file, "\n");
632 }
633
634 if (aggressive)
635 {
636 /* Mark the last statement of the basic blocks on which the block
637 containing STMT is control dependent, but only if we haven't
638 already done so. */
639 basic_block bb = gimple_bb (stmt);
640 if (bb != ENTRY_BLOCK_PTR
641 && !bitmap_bit_p (visited_control_parents, bb->index))
642 mark_control_dependent_edges_necessary (bb, false);
643 }
644
645 if (gimple_code (stmt) == GIMPLE_PHI
646 /* We do not process virtual PHI nodes nor do we track their
647 necessity. */
648 && !virtual_operand_p (gimple_phi_result (stmt)))
649 {
650 /* PHI nodes are somewhat special in that each PHI alternative has
651 data and control dependencies. All the statements feeding the
652 PHI node's arguments are always necessary. In aggressive mode,
653 we also consider the control dependent edges leading to the
654 predecessor block associated with each PHI alternative as
655 necessary. */
656 size_t k;
657
658 for (k = 0; k < gimple_phi_num_args (stmt); k++)
659 {
660 tree arg = PHI_ARG_DEF (stmt, k);
661 if (TREE_CODE (arg) == SSA_NAME)
662 mark_operand_necessary (arg);
663 }
664
665 /* For PHI operands it matters from where the control flow arrives
666 to the BB. Consider the following example:
667
668 a=exp1;
669 b=exp2;
670 if (test)
671 ;
672 else
673 ;
674 c=PHI(a,b)
675
676 We need to mark control dependence of the empty basic blocks, since they
677 contains computation of PHI operands.
678
679 Doing so is too restrictive in the case the predecestor block is in
680 the loop. Consider:
681
682 if (b)
683 {
684 int i;
685 for (i = 0; i<1000; ++i)
686 ;
687 j = 0;
688 }
689 return j;
690
691 There is PHI for J in the BB containing return statement.
692 In this case the control dependence of predecestor block (that is
693 within the empty loop) also contains the block determining number
694 of iterations of the block that would prevent removing of empty
695 loop in this case.
696
697 This scenario can be avoided by splitting critical edges.
698 To save the critical edge splitting pass we identify how the control
699 dependence would look like if the edge was split.
700
701 Consider the modified CFG created from current CFG by splitting
702 edge B->C. In the postdominance tree of modified CFG, C' is
703 always child of C. There are two cases how chlids of C' can look
704 like:
705
706 1) C' is leaf
707
708 In this case the only basic block C' is control dependent on is B.
709
710 2) C' has single child that is B
711
712 In this case control dependence of C' is same as control
713 dependence of B in original CFG except for block B itself.
714 (since C' postdominate B in modified CFG)
715
716 Now how to decide what case happens? There are two basic options:
717
718 a) C postdominate B. Then C immediately postdominate B and
719 case 2 happens iff there is no other way from B to C except
720 the edge B->C.
721
722 There is other way from B to C iff there is succesor of B that
723 is not postdominated by B. Testing this condition is somewhat
724 expensive, because we need to iterate all succesors of B.
725 We are safe to assume that this does not happen: we will mark B
726 as needed when processing the other path from B to C that is
727 conrol dependent on B and marking control dependencies of B
728 itself is harmless because they will be processed anyway after
729 processing control statement in B.
730
731 b) C does not postdominate B. Always case 1 happens since there is
732 path from C to exit that does not go through B and thus also C'. */
733
734 if (aggressive && !degenerate_phi_p (stmt))
735 {
736 for (k = 0; k < gimple_phi_num_args (stmt); k++)
737 {
738 basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src;
739
740 if (gimple_bb (stmt)
741 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
742 {
743 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
744 mark_last_stmt_necessary (arg_bb);
745 }
746 else if (arg_bb != ENTRY_BLOCK_PTR
747 && !bitmap_bit_p (visited_control_parents,
748 arg_bb->index))
749 mark_control_dependent_edges_necessary (arg_bb, true);
750 }
751 }
752 }
753 else
754 {
755 /* Propagate through the operands. Examine all the USE, VUSE and
756 VDEF operands in this statement. Mark all the statements
757 which feed this statement's uses as necessary. */
758 ssa_op_iter iter;
759 tree use;
760
761 /* If this is a call to free which is directly fed by an
762 allocation function do not mark that necessary through
763 processing the argument. */
764 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
765 {
766 tree ptr = gimple_call_arg (stmt, 0);
767 gimple def_stmt;
768 tree def_callee;
769 /* If the pointer we free is defined by an allocation
770 function do not add the call to the worklist. */
771 if (TREE_CODE (ptr) == SSA_NAME
772 && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
773 && (def_callee = gimple_call_fndecl (def_stmt))
774 && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
775 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
776 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
777 continue;
778 }
779
780 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
781 mark_operand_necessary (use);
782
783 use = gimple_vuse (stmt);
784 if (!use)
785 continue;
786
787 /* If we dropped to simple mode make all immediately
788 reachable definitions necessary. */
789 if (chain_ovfl)
790 {
791 mark_all_reaching_defs_necessary (stmt);
792 continue;
793 }
794
795 /* For statements that may load from memory (have a VUSE) we
796 have to mark all reaching (may-)definitions as necessary.
797 We partition this task into two cases:
798 1) explicit loads based on decls that are not aliased
799 2) implicit loads (like calls) and explicit loads not
800 based on decls that are not aliased (like indirect
801 references or loads from globals)
802 For 1) we mark all reaching may-defs as necessary, stopping
803 at dominating kills. For 2) we want to mark all dominating
804 references necessary, but non-aliased ones which we handle
805 in 1). By keeping a global visited bitmap for references
806 we walk for 2) we avoid quadratic behavior for those. */
807
808 if (is_gimple_call (stmt))
809 {
810 tree callee = gimple_call_fndecl (stmt);
811 unsigned i;
812
813 /* Calls to functions that are merely acting as barriers
814 or that only store to memory do not make any previous
815 stores necessary. */
816 if (callee != NULL_TREE
817 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
818 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
819 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
820 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
821 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
822 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
823 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
824 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
825 || (DECL_FUNCTION_CODE (callee)
826 == BUILT_IN_ALLOCA_WITH_ALIGN)
827 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
828 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
829 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
830 continue;
831
832 /* Calls implicitly load from memory, their arguments
833 in addition may explicitly perform memory loads. */
834 mark_all_reaching_defs_necessary (stmt);
835 for (i = 0; i < gimple_call_num_args (stmt); ++i)
836 {
837 tree arg = gimple_call_arg (stmt, i);
838 if (TREE_CODE (arg) == SSA_NAME
839 || is_gimple_min_invariant (arg))
840 continue;
841 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
842 arg = TREE_OPERAND (arg, 0);
843 if (!ref_may_be_aliased (arg))
844 mark_aliased_reaching_defs_necessary (stmt, arg);
845 }
846 }
847 else if (gimple_assign_single_p (stmt))
848 {
849 tree rhs;
850 /* If this is a load mark things necessary. */
851 rhs = gimple_assign_rhs1 (stmt);
852 if (TREE_CODE (rhs) != SSA_NAME
853 && !is_gimple_min_invariant (rhs)
854 && TREE_CODE (rhs) != CONSTRUCTOR)
855 {
856 if (!ref_may_be_aliased (rhs))
857 mark_aliased_reaching_defs_necessary (stmt, rhs);
858 else
859 mark_all_reaching_defs_necessary (stmt);
860 }
861 }
862 else if (gimple_code (stmt) == GIMPLE_RETURN)
863 {
864 tree rhs = gimple_return_retval (stmt);
865 /* A return statement may perform a load. */
866 if (rhs
867 && TREE_CODE (rhs) != SSA_NAME
868 && !is_gimple_min_invariant (rhs)
869 && TREE_CODE (rhs) != CONSTRUCTOR)
870 {
871 if (!ref_may_be_aliased (rhs))
872 mark_aliased_reaching_defs_necessary (stmt, rhs);
873 else
874 mark_all_reaching_defs_necessary (stmt);
875 }
876 }
877 else if (gimple_code (stmt) == GIMPLE_ASM)
878 {
879 unsigned i;
880 mark_all_reaching_defs_necessary (stmt);
881 /* Inputs may perform loads. */
882 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
883 {
884 tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
885 if (TREE_CODE (op) != SSA_NAME
886 && !is_gimple_min_invariant (op)
887 && TREE_CODE (op) != CONSTRUCTOR
888 && !ref_may_be_aliased (op))
889 mark_aliased_reaching_defs_necessary (stmt, op);
890 }
891 }
892 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
893 {
894 /* The beginning of a transaction is a memory barrier. */
895 /* ??? If we were really cool, we'd only be a barrier
896 for the memories touched within the transaction. */
897 mark_all_reaching_defs_necessary (stmt);
898 }
899 else
900 gcc_unreachable ();
901
902 /* If we over-used our alias oracle budget drop to simple
903 mode. The cost metric allows quadratic behavior
904 (number of uses times number of may-defs queries) up to
905 a constant maximal number of queries and after that falls back to
906 super-linear complexity. */
907 if (/* Constant but quadratic for small functions. */
908 total_chain > 128 * 128
909 /* Linear in the number of may-defs. */
910 && total_chain > 32 * longest_chain
911 /* Linear in the number of uses. */
912 && total_chain > nr_walks * 32)
913 {
914 chain_ovfl = true;
915 if (visited)
916 bitmap_clear (visited);
917 }
918 }
919 }
920 }
921
922 /* Remove dead PHI nodes from block BB. */
923
924 static bool
925 remove_dead_phis (basic_block bb)
926 {
927 bool something_changed = false;
928 gimple phi;
929 gimple_stmt_iterator gsi;
930
931 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
932 {
933 stats.total_phis++;
934 phi = gsi_stmt (gsi);
935
936 /* We do not track necessity of virtual PHI nodes. Instead do
937 very simple dead PHI removal here. */
938 if (virtual_operand_p (gimple_phi_result (phi)))
939 {
940 /* Virtual PHI nodes with one or identical arguments
941 can be removed. */
942 if (degenerate_phi_p (phi))
943 {
944 tree vdef = gimple_phi_result (phi);
945 tree vuse = gimple_phi_arg_def (phi, 0);
946
947 use_operand_p use_p;
948 imm_use_iterator iter;
949 gimple use_stmt;
950 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
951 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
952 SET_USE (use_p, vuse);
953 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
954 && TREE_CODE (vuse) == SSA_NAME)
955 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
956 }
957 else
958 gimple_set_plf (phi, STMT_NECESSARY, true);
959 }
960
961 if (!gimple_plf (phi, STMT_NECESSARY))
962 {
963 something_changed = true;
964 if (dump_file && (dump_flags & TDF_DETAILS))
965 {
966 fprintf (dump_file, "Deleting : ");
967 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
968 fprintf (dump_file, "\n");
969 }
970
971 remove_phi_node (&gsi, true);
972 stats.removed_phis++;
973 continue;
974 }
975
976 gsi_next (&gsi);
977 }
978 return something_changed;
979 }
980
981 /* Forward edge E to respective POST_DOM_BB and update PHIs. */
982
983 static edge
984 forward_edge_to_pdom (edge e, basic_block post_dom_bb)
985 {
986 gimple_stmt_iterator gsi;
987 edge e2 = NULL;
988 edge_iterator ei;
989
990 if (dump_file && (dump_flags & TDF_DETAILS))
991 fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
992 e->dest->index, post_dom_bb->index);
993
994 e2 = redirect_edge_and_branch (e, post_dom_bb);
995 cfg_altered = true;
996
997 /* If edge was already around, no updating is necessary. */
998 if (e2 != e)
999 return e2;
1000
1001 if (!gimple_seq_empty_p (phi_nodes (post_dom_bb)))
1002 {
1003 /* We are sure that for every live PHI we are seeing control dependent BB.
1004 This means that we can pick any edge to duplicate PHI args from. */
1005 FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
1006 if (e2 != e)
1007 break;
1008 for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
1009 {
1010 gimple phi = gsi_stmt (gsi);
1011 tree op;
1012 source_location locus;
1013
1014 /* PHIs for virtuals have no control dependency relation on them.
1015 We are lost here and must force renaming of the symbol. */
1016 if (virtual_operand_p (gimple_phi_result (phi)))
1017 {
1018 mark_virtual_phi_result_for_renaming (phi);
1019 remove_phi_node (&gsi, true);
1020 continue;
1021 }
1022
1023 /* Dead PHI do not imply control dependency. */
1024 if (!gimple_plf (phi, STMT_NECESSARY))
1025 {
1026 gsi_next (&gsi);
1027 continue;
1028 }
1029
1030 op = gimple_phi_arg_def (phi, e2->dest_idx);
1031 locus = gimple_phi_arg_location (phi, e2->dest_idx);
1032 add_phi_arg (phi, op, e, locus);
1033 /* The resulting PHI if not dead can only be degenerate. */
1034 gcc_assert (degenerate_phi_p (phi));
1035 gsi_next (&gsi);
1036 }
1037 }
1038 return e;
1039 }
1040
1041 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1042 containing I so that we don't have to look it up. */
1043
1044 static void
1045 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1046 {
1047 gimple stmt = gsi_stmt (*i);
1048
1049 if (dump_file && (dump_flags & TDF_DETAILS))
1050 {
1051 fprintf (dump_file, "Deleting : ");
1052 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1053 fprintf (dump_file, "\n");
1054 }
1055
1056 stats.removed++;
1057
1058 /* If we have determined that a conditional branch statement contributes
1059 nothing to the program, then we not only remove it, but we also change
1060 the flow graph so that the current block will simply fall-thru to its
1061 immediate post-dominator. The blocks we are circumventing will be
1062 removed by cleanup_tree_cfg if this change in the flow graph makes them
1063 unreachable. */
1064 if (is_ctrl_stmt (stmt))
1065 {
1066 basic_block post_dom_bb;
1067 edge e, e2;
1068 edge_iterator ei;
1069
1070 post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
1071
1072 e = find_edge (bb, post_dom_bb);
1073
1074 /* If edge is already there, try to use it. This avoids need to update
1075 PHI nodes. Also watch for cases where post dominator does not exists
1076 or is exit block. These can happen for infinite loops as we create
1077 fake edges in the dominator tree. */
1078 if (e)
1079 ;
1080 else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR)
1081 e = EDGE_SUCC (bb, 0);
1082 else
1083 e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
1084 gcc_assert (e);
1085 e->probability = REG_BR_PROB_BASE;
1086 e->count = bb->count;
1087
1088 /* The edge is no longer associated with a conditional, so it does
1089 not have TRUE/FALSE flags. */
1090 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
1091
1092 /* The lone outgoing edge from BB will be a fallthru edge. */
1093 e->flags |= EDGE_FALLTHRU;
1094
1095 /* Remove the remaining outgoing edges. */
1096 for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1097 if (e != e2)
1098 {
1099 cfg_altered = true;
1100 remove_edge (e2);
1101 }
1102 else
1103 ei_next (&ei);
1104 }
1105
1106 /* If this is a store into a variable that is being optimized away,
1107 add a debug bind stmt if possible. */
1108 if (MAY_HAVE_DEBUG_STMTS
1109 && gimple_assign_single_p (stmt)
1110 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1111 {
1112 tree lhs = gimple_assign_lhs (stmt);
1113 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1114 && !DECL_IGNORED_P (lhs)
1115 && is_gimple_reg_type (TREE_TYPE (lhs))
1116 && !is_global_var (lhs)
1117 && !DECL_HAS_VALUE_EXPR_P (lhs))
1118 {
1119 tree rhs = gimple_assign_rhs1 (stmt);
1120 gimple note
1121 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1122 gsi_insert_after (i, note, GSI_SAME_STMT);
1123 }
1124 }
1125
1126 unlink_stmt_vdef (stmt);
1127 gsi_remove (i, true);
1128 release_defs (stmt);
1129 }
1130
1131 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1132 contributes nothing to the program, and can be deleted. */
1133
1134 static bool
1135 eliminate_unnecessary_stmts (void)
1136 {
1137 bool something_changed = false;
1138 basic_block bb;
1139 gimple_stmt_iterator gsi, psi;
1140 gimple stmt;
1141 tree call;
1142 vec<basic_block> h;
1143
1144 if (dump_file && (dump_flags & TDF_DETAILS))
1145 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1146
1147 clear_special_calls ();
1148
1149 /* Walking basic blocks and statements in reverse order avoids
1150 releasing SSA names before any other DEFs that refer to them are
1151 released. This helps avoid loss of debug information, as we get
1152 a chance to propagate all RHSs of removed SSAs into debug uses,
1153 rather than only the latest ones. E.g., consider:
1154
1155 x_3 = y_1 + z_2;
1156 a_5 = x_3 - b_4;
1157 # DEBUG a => a_5
1158
1159 If we were to release x_3 before a_5, when we reached a_5 and
1160 tried to substitute it into the debug stmt, we'd see x_3 there,
1161 but x_3's DEF, type, etc would have already been disconnected.
1162 By going backwards, the debug stmt first changes to:
1163
1164 # DEBUG a => x_3 - b_4
1165
1166 and then to:
1167
1168 # DEBUG a => y_1 + z_2 - b_4
1169
1170 as desired. */
1171 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1172 h = get_all_dominated_blocks (CDI_DOMINATORS, single_succ (ENTRY_BLOCK_PTR));
1173
1174 while (h.length ())
1175 {
1176 bb = h.pop ();
1177
1178 /* Remove dead statements. */
1179 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1180 {
1181 stmt = gsi_stmt (gsi);
1182
1183 psi = gsi;
1184 gsi_prev (&psi);
1185
1186 stats.total++;
1187
1188 /* We can mark a call to free as not necessary if the
1189 defining statement of its argument is an allocation
1190 function and that is not necessary itself. */
1191 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1192 {
1193 tree ptr = gimple_call_arg (stmt, 0);
1194 tree callee2;
1195 gimple def_stmt;
1196 if (TREE_CODE (ptr) != SSA_NAME)
1197 continue;
1198 def_stmt = SSA_NAME_DEF_STMT (ptr);
1199 if (!is_gimple_call (def_stmt)
1200 || gimple_plf (def_stmt, STMT_NECESSARY))
1201 continue;
1202 callee2 = gimple_call_fndecl (def_stmt);
1203 if (callee2 == NULL_TREE
1204 || DECL_BUILT_IN_CLASS (callee2) != BUILT_IN_NORMAL
1205 || (DECL_FUNCTION_CODE (callee2) != BUILT_IN_MALLOC
1206 && DECL_FUNCTION_CODE (callee2) != BUILT_IN_CALLOC))
1207 continue;
1208 gimple_set_plf (stmt, STMT_NECESSARY, false);
1209 }
1210
1211 /* If GSI is not necessary then remove it. */
1212 if (!gimple_plf (stmt, STMT_NECESSARY))
1213 {
1214 if (!is_gimple_debug (stmt))
1215 something_changed = true;
1216 remove_dead_stmt (&gsi, bb);
1217 }
1218 else if (is_gimple_call (stmt))
1219 {
1220 tree name = gimple_call_lhs (stmt);
1221
1222 notice_special_calls (stmt);
1223
1224 /* When LHS of var = call (); is dead, simplify it into
1225 call (); saving one operand. */
1226 if (name
1227 && TREE_CODE (name) == SSA_NAME
1228 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1229 /* Avoid doing so for allocation calls which we
1230 did not mark as necessary, it will confuse the
1231 special logic we apply to malloc/free pair removal. */
1232 && (!(call = gimple_call_fndecl (stmt))
1233 || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1234 || (DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1235 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1236 && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA
1237 && (DECL_FUNCTION_CODE (call)
1238 != BUILT_IN_ALLOCA_WITH_ALIGN))))
1239 {
1240 something_changed = true;
1241 if (dump_file && (dump_flags & TDF_DETAILS))
1242 {
1243 fprintf (dump_file, "Deleting LHS of call: ");
1244 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1245 fprintf (dump_file, "\n");
1246 }
1247
1248 gimple_call_set_lhs (stmt, NULL_TREE);
1249 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1250 update_stmt (stmt);
1251 release_ssa_name (name);
1252 }
1253 }
1254 }
1255 }
1256
1257 h.release ();
1258
1259 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1260 rendered some PHI nodes unreachable while they are still in use.
1261 Mark them for renaming. */
1262 if (cfg_altered)
1263 {
1264 basic_block prev_bb;
1265
1266 find_unreachable_blocks ();
1267
1268 /* Delete all unreachable basic blocks in reverse dominator order. */
1269 for (bb = EXIT_BLOCK_PTR->prev_bb; bb != ENTRY_BLOCK_PTR; bb = prev_bb)
1270 {
1271 prev_bb = bb->prev_bb;
1272
1273 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1274 || !(bb->flags & BB_REACHABLE))
1275 {
1276 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1277 if (virtual_operand_p (gimple_phi_result (gsi_stmt (gsi))))
1278 {
1279 bool found = false;
1280 imm_use_iterator iter;
1281
1282 FOR_EACH_IMM_USE_STMT (stmt, iter, gimple_phi_result (gsi_stmt (gsi)))
1283 {
1284 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1285 continue;
1286 if (gimple_code (stmt) == GIMPLE_PHI
1287 || gimple_plf (stmt, STMT_NECESSARY))
1288 {
1289 found = true;
1290 BREAK_FROM_IMM_USE_STMT (iter);
1291 }
1292 }
1293 if (found)
1294 mark_virtual_phi_result_for_renaming (gsi_stmt (gsi));
1295 }
1296
1297 if (!(bb->flags & BB_REACHABLE))
1298 {
1299 /* Speed up the removal of blocks that don't
1300 dominate others. Walking backwards, this should
1301 be the common case. ??? Do we need to recompute
1302 dominators because of cfg_altered? */
1303 if (!MAY_HAVE_DEBUG_STMTS
1304 || !first_dom_son (CDI_DOMINATORS, bb))
1305 delete_basic_block (bb);
1306 else
1307 {
1308 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1309
1310 while (h.length ())
1311 {
1312 bb = h.pop ();
1313 prev_bb = bb->prev_bb;
1314 /* Rearrangements to the CFG may have failed
1315 to update the dominators tree, so that
1316 formerly-dominated blocks are now
1317 otherwise reachable. */
1318 if (!!(bb->flags & BB_REACHABLE))
1319 continue;
1320 delete_basic_block (bb);
1321 }
1322
1323 h.release ();
1324 }
1325 }
1326 }
1327 }
1328 }
1329 FOR_EACH_BB (bb)
1330 {
1331 /* Remove dead PHI nodes. */
1332 something_changed |= remove_dead_phis (bb);
1333 }
1334
1335 return something_changed;
1336 }
1337
1338
1339 /* Print out removed statement statistics. */
1340
1341 static void
1342 print_stats (void)
1343 {
1344 float percg;
1345
1346 percg = ((float) stats.removed / (float) stats.total) * 100;
1347 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1348 stats.removed, stats.total, (int) percg);
1349
1350 if (stats.total_phis == 0)
1351 percg = 0;
1352 else
1353 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1354
1355 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1356 stats.removed_phis, stats.total_phis, (int) percg);
1357 }
1358
1359 /* Initialization for this pass. Set up the used data structures. */
1360
1361 static void
1362 tree_dce_init (bool aggressive)
1363 {
1364 memset ((void *) &stats, 0, sizeof (stats));
1365
1366 if (aggressive)
1367 {
1368 last_stmt_necessary = sbitmap_alloc (last_basic_block);
1369 bitmap_clear (last_stmt_necessary);
1370 bb_contains_live_stmts = sbitmap_alloc (last_basic_block);
1371 bitmap_clear (bb_contains_live_stmts);
1372 }
1373
1374 processed = sbitmap_alloc (num_ssa_names + 1);
1375 bitmap_clear (processed);
1376
1377 worklist.create (64);
1378 cfg_altered = false;
1379 }
1380
1381 /* Cleanup after this pass. */
1382
1383 static void
1384 tree_dce_done (bool aggressive)
1385 {
1386 if (aggressive)
1387 {
1388 delete cd;
1389 sbitmap_free (visited_control_parents);
1390 sbitmap_free (last_stmt_necessary);
1391 sbitmap_free (bb_contains_live_stmts);
1392 bb_contains_live_stmts = NULL;
1393 }
1394
1395 sbitmap_free (processed);
1396
1397 worklist.release ();
1398 }
1399
1400 /* Main routine to eliminate dead code.
1401
1402 AGGRESSIVE controls the aggressiveness of the algorithm.
1403 In conservative mode, we ignore control dependence and simply declare
1404 all but the most trivially dead branches necessary. This mode is fast.
1405 In aggressive mode, control dependences are taken into account, which
1406 results in more dead code elimination, but at the cost of some time.
1407
1408 FIXME: Aggressive mode before PRE doesn't work currently because
1409 the dominance info is not invalidated after DCE1. This is
1410 not an issue right now because we only run aggressive DCE
1411 as the last tree SSA pass, but keep this in mind when you
1412 start experimenting with pass ordering. */
1413
1414 static unsigned int
1415 perform_tree_ssa_dce (bool aggressive)
1416 {
1417 bool something_changed = 0;
1418
1419 calculate_dominance_info (CDI_DOMINATORS);
1420
1421 /* Preheaders are needed for SCEV to work.
1422 Simple lateches and recorded exits improve chances that loop will
1423 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1424 if (aggressive)
1425 loop_optimizer_init (LOOPS_NORMAL
1426 | LOOPS_HAVE_RECORDED_EXITS);
1427
1428 tree_dce_init (aggressive);
1429
1430 if (aggressive)
1431 {
1432 /* Compute control dependence. */
1433 calculate_dominance_info (CDI_POST_DOMINATORS);
1434 cd = new control_dependences (create_edge_list ());
1435
1436 visited_control_parents = sbitmap_alloc (last_basic_block);
1437 bitmap_clear (visited_control_parents);
1438
1439 mark_dfs_back_edges ();
1440 }
1441
1442 find_obviously_necessary_stmts (aggressive);
1443
1444 if (aggressive)
1445 loop_optimizer_finalize ();
1446
1447 longest_chain = 0;
1448 total_chain = 0;
1449 nr_walks = 0;
1450 chain_ovfl = false;
1451 visited = BITMAP_ALLOC (NULL);
1452 propagate_necessity (aggressive);
1453 BITMAP_FREE (visited);
1454
1455 something_changed |= eliminate_unnecessary_stmts ();
1456 something_changed |= cfg_altered;
1457
1458 /* We do not update postdominators, so free them unconditionally. */
1459 free_dominance_info (CDI_POST_DOMINATORS);
1460
1461 /* If we removed paths in the CFG, then we need to update
1462 dominators as well. I haven't investigated the possibility
1463 of incrementally updating dominators. */
1464 if (cfg_altered)
1465 free_dominance_info (CDI_DOMINATORS);
1466
1467 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1468 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1469
1470 /* Debugging dumps. */
1471 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1472 print_stats ();
1473
1474 tree_dce_done (aggressive);
1475
1476 if (something_changed)
1477 return TODO_update_ssa | TODO_cleanup_cfg;
1478 return 0;
1479 }
1480
1481 /* Pass entry points. */
1482 static unsigned int
1483 tree_ssa_dce (void)
1484 {
1485 return perform_tree_ssa_dce (/*aggressive=*/false);
1486 }
1487
1488 static unsigned int
1489 tree_ssa_dce_loop (void)
1490 {
1491 unsigned int todo;
1492 todo = perform_tree_ssa_dce (/*aggressive=*/false);
1493 if (todo)
1494 {
1495 free_numbers_of_iterations_estimates ();
1496 scev_reset ();
1497 }
1498 return todo;
1499 }
1500
1501 static unsigned int
1502 tree_ssa_cd_dce (void)
1503 {
1504 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1505 }
1506
1507 static bool
1508 gate_dce (void)
1509 {
1510 return flag_tree_dce != 0;
1511 }
1512
1513 namespace {
1514
1515 const pass_data pass_data_dce =
1516 {
1517 GIMPLE_PASS, /* type */
1518 "dce", /* name */
1519 OPTGROUP_NONE, /* optinfo_flags */
1520 true, /* has_gate */
1521 true, /* has_execute */
1522 TV_TREE_DCE, /* tv_id */
1523 ( PROP_cfg | PROP_ssa ), /* properties_required */
1524 0, /* properties_provided */
1525 0, /* properties_destroyed */
1526 0, /* todo_flags_start */
1527 TODO_verify_ssa, /* todo_flags_finish */
1528 };
1529
1530 class pass_dce : public gimple_opt_pass
1531 {
1532 public:
1533 pass_dce (gcc::context *ctxt)
1534 : gimple_opt_pass (pass_data_dce, ctxt)
1535 {}
1536
1537 /* opt_pass methods: */
1538 opt_pass * clone () { return new pass_dce (m_ctxt); }
1539 bool gate () { return gate_dce (); }
1540 unsigned int execute () { return tree_ssa_dce (); }
1541
1542 }; // class pass_dce
1543
1544 } // anon namespace
1545
1546 gimple_opt_pass *
1547 make_pass_dce (gcc::context *ctxt)
1548 {
1549 return new pass_dce (ctxt);
1550 }
1551
1552 namespace {
1553
1554 const pass_data pass_data_dce_loop =
1555 {
1556 GIMPLE_PASS, /* type */
1557 "dceloop", /* name */
1558 OPTGROUP_NONE, /* optinfo_flags */
1559 true, /* has_gate */
1560 true, /* has_execute */
1561 TV_TREE_DCE, /* tv_id */
1562 ( PROP_cfg | PROP_ssa ), /* properties_required */
1563 0, /* properties_provided */
1564 0, /* properties_destroyed */
1565 0, /* todo_flags_start */
1566 TODO_verify_ssa, /* todo_flags_finish */
1567 };
1568
1569 class pass_dce_loop : public gimple_opt_pass
1570 {
1571 public:
1572 pass_dce_loop (gcc::context *ctxt)
1573 : gimple_opt_pass (pass_data_dce_loop, ctxt)
1574 {}
1575
1576 /* opt_pass methods: */
1577 opt_pass * clone () { return new pass_dce_loop (m_ctxt); }
1578 bool gate () { return gate_dce (); }
1579 unsigned int execute () { return tree_ssa_dce_loop (); }
1580
1581 }; // class pass_dce_loop
1582
1583 } // anon namespace
1584
1585 gimple_opt_pass *
1586 make_pass_dce_loop (gcc::context *ctxt)
1587 {
1588 return new pass_dce_loop (ctxt);
1589 }
1590
1591 namespace {
1592
1593 const pass_data pass_data_cd_dce =
1594 {
1595 GIMPLE_PASS, /* type */
1596 "cddce", /* name */
1597 OPTGROUP_NONE, /* optinfo_flags */
1598 true, /* has_gate */
1599 true, /* has_execute */
1600 TV_TREE_CD_DCE, /* tv_id */
1601 ( PROP_cfg | PROP_ssa ), /* properties_required */
1602 0, /* properties_provided */
1603 0, /* properties_destroyed */
1604 0, /* todo_flags_start */
1605 ( TODO_verify_ssa | TODO_verify_flow ), /* todo_flags_finish */
1606 };
1607
1608 class pass_cd_dce : public gimple_opt_pass
1609 {
1610 public:
1611 pass_cd_dce (gcc::context *ctxt)
1612 : gimple_opt_pass (pass_data_cd_dce, ctxt)
1613 {}
1614
1615 /* opt_pass methods: */
1616 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1617 bool gate () { return gate_dce (); }
1618 unsigned int execute () { return tree_ssa_cd_dce (); }
1619
1620 }; // class pass_cd_dce
1621
1622 } // anon namespace
1623
1624 gimple_opt_pass *
1625 make_pass_cd_dce (gcc::context *ctxt)
1626 {
1627 return new pass_cd_dce (ctxt);
1628 }