coretypes.h: Include machmode.h...
[gcc.git] / gcc / tree-ssa-dce.c
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* Dead code elimination.
24
25 References:
26
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
37
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "hash-set.h"
50 #include "vec.h"
51 #include "input.h"
52 #include "alias.h"
53 #include "symtab.h"
54 #include "inchash.h"
55 #include "tree.h"
56 #include "fold-const.h"
57 #include "calls.h"
58 #include "gimple-pretty-print.h"
59 #include "predict.h"
60 #include "hard-reg-set.h"
61 #include "function.h"
62 #include "dominance.h"
63 #include "cfg.h"
64 #include "cfganal.h"
65 #include "basic-block.h"
66 #include "tree-ssa-alias.h"
67 #include "internal-fn.h"
68 #include "tree-eh.h"
69 #include "gimple-expr.h"
70 #include "is-a.h"
71 #include "gimple.h"
72 #include "gimplify.h"
73 #include "gimple-iterator.h"
74 #include "gimple-ssa.h"
75 #include "tree-cfg.h"
76 #include "tree-phinodes.h"
77 #include "ssa-iterators.h"
78 #include "stringpool.h"
79 #include "tree-ssanames.h"
80 #include "tree-ssa-loop-niter.h"
81 #include "tree-into-ssa.h"
82 #include "hashtab.h"
83 #include "rtl.h"
84 #include "flags.h"
85 #include "statistics.h"
86 #include "insn-config.h"
87 #include "expmed.h"
88 #include "dojump.h"
89 #include "explow.h"
90 #include "emit-rtl.h"
91 #include "varasm.h"
92 #include "stmt.h"
93 #include "expr.h"
94 #include "tree-dfa.h"
95 #include "tree-pass.h"
96 #include "cfgloop.h"
97 #include "tree-scalar-evolution.h"
98 #include "tree-chkp.h"
99 #include "tree-ssa-propagate.h"
100 #include "gimple-fold.h"
101
102 static struct stmt_stats
103 {
104 int total;
105 int total_phis;
106 int removed;
107 int removed_phis;
108 } stats;
109
110 #define STMT_NECESSARY GF_PLF_1
111
112 static vec<gimple> worklist;
113
114 /* Vector indicating an SSA name has already been processed and marked
115 as necessary. */
116 static sbitmap processed;
117
118 /* Vector indicating that the last statement of a basic block has already
119 been marked as necessary. */
120 static sbitmap last_stmt_necessary;
121
122 /* Vector indicating that BB contains statements that are live. */
123 static sbitmap bb_contains_live_stmts;
124
125 /* Before we can determine whether a control branch is dead, we need to
126 compute which blocks are control dependent on which edges.
127
128 We expect each block to be control dependent on very few edges so we
129 use a bitmap for each block recording its edges. An array holds the
130 bitmap. The Ith bit in the bitmap is set if that block is dependent
131 on the Ith edge. */
132 static control_dependences *cd;
133
134 /* Vector indicating that a basic block has already had all the edges
135 processed that it is control dependent on. */
136 static sbitmap visited_control_parents;
137
138 /* TRUE if this pass alters the CFG (by removing control statements).
139 FALSE otherwise.
140
141 If this pass alters the CFG, then it will arrange for the dominators
142 to be recomputed. */
143 static bool cfg_altered;
144
145
146 /* If STMT is not already marked necessary, mark it, and add it to the
147 worklist if ADD_TO_WORKLIST is true. */
148
149 static inline void
150 mark_stmt_necessary (gimple stmt, bool add_to_worklist)
151 {
152 gcc_assert (stmt);
153
154 if (gimple_plf (stmt, STMT_NECESSARY))
155 return;
156
157 if (dump_file && (dump_flags & TDF_DETAILS))
158 {
159 fprintf (dump_file, "Marking useful stmt: ");
160 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
161 fprintf (dump_file, "\n");
162 }
163
164 gimple_set_plf (stmt, STMT_NECESSARY, true);
165 if (add_to_worklist)
166 worklist.safe_push (stmt);
167 if (bb_contains_live_stmts && !is_gimple_debug (stmt))
168 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
169 }
170
171
172 /* Mark the statement defining operand OP as necessary. */
173
174 static inline void
175 mark_operand_necessary (tree op)
176 {
177 gimple stmt;
178 int ver;
179
180 gcc_assert (op);
181
182 ver = SSA_NAME_VERSION (op);
183 if (bitmap_bit_p (processed, ver))
184 {
185 stmt = SSA_NAME_DEF_STMT (op);
186 gcc_assert (gimple_nop_p (stmt)
187 || gimple_plf (stmt, STMT_NECESSARY));
188 return;
189 }
190 bitmap_set_bit (processed, ver);
191
192 stmt = SSA_NAME_DEF_STMT (op);
193 gcc_assert (stmt);
194
195 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
196 return;
197
198 if (dump_file && (dump_flags & TDF_DETAILS))
199 {
200 fprintf (dump_file, "marking necessary through ");
201 print_generic_expr (dump_file, op, 0);
202 fprintf (dump_file, " stmt ");
203 print_gimple_stmt (dump_file, stmt, 0, 0);
204 }
205
206 gimple_set_plf (stmt, STMT_NECESSARY, true);
207 if (bb_contains_live_stmts)
208 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
209 worklist.safe_push (stmt);
210 }
211
212
213 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
214 it can make other statements necessary.
215
216 If AGGRESSIVE is false, control statements are conservatively marked as
217 necessary. */
218
219 static void
220 mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
221 {
222 /* With non-call exceptions, we have to assume that all statements could
223 throw. If a statement could throw, it can be deemed necessary. */
224 if (cfun->can_throw_non_call_exceptions
225 && !cfun->can_delete_dead_exceptions
226 && stmt_could_throw_p (stmt))
227 {
228 mark_stmt_necessary (stmt, true);
229 return;
230 }
231
232 /* Statements that are implicitly live. Most function calls, asm
233 and return statements are required. Labels and GIMPLE_BIND nodes
234 are kept because they are control flow, and we have no way of
235 knowing whether they can be removed. DCE can eliminate all the
236 other statements in a block, and CFG can then remove the block
237 and labels. */
238 switch (gimple_code (stmt))
239 {
240 case GIMPLE_PREDICT:
241 case GIMPLE_LABEL:
242 mark_stmt_necessary (stmt, false);
243 return;
244
245 case GIMPLE_ASM:
246 case GIMPLE_RESX:
247 case GIMPLE_RETURN:
248 mark_stmt_necessary (stmt, true);
249 return;
250
251 case GIMPLE_CALL:
252 {
253 tree callee = gimple_call_fndecl (stmt);
254 if (callee != NULL_TREE
255 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
256 switch (DECL_FUNCTION_CODE (callee))
257 {
258 case BUILT_IN_MALLOC:
259 case BUILT_IN_ALIGNED_ALLOC:
260 case BUILT_IN_CALLOC:
261 case BUILT_IN_ALLOCA:
262 case BUILT_IN_ALLOCA_WITH_ALIGN:
263 return;
264
265 default:;
266 }
267 /* Most, but not all function calls are required. Function calls that
268 produce no result and have no side effects (i.e. const pure
269 functions) are unnecessary. */
270 if (gimple_has_side_effects (stmt))
271 {
272 mark_stmt_necessary (stmt, true);
273 return;
274 }
275 if (!gimple_call_lhs (stmt))
276 return;
277 break;
278 }
279
280 case GIMPLE_DEBUG:
281 /* Debug temps without a value are not useful. ??? If we could
282 easily locate the debug temp bind stmt for a use thereof,
283 would could refrain from marking all debug temps here, and
284 mark them only if they're used. */
285 if (!gimple_debug_bind_p (stmt)
286 || gimple_debug_bind_has_value_p (stmt)
287 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
288 mark_stmt_necessary (stmt, false);
289 return;
290
291 case GIMPLE_GOTO:
292 gcc_assert (!simple_goto_p (stmt));
293 mark_stmt_necessary (stmt, true);
294 return;
295
296 case GIMPLE_COND:
297 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
298 /* Fall through. */
299
300 case GIMPLE_SWITCH:
301 if (! aggressive)
302 mark_stmt_necessary (stmt, true);
303 break;
304
305 case GIMPLE_ASSIGN:
306 if (gimple_clobber_p (stmt))
307 return;
308 break;
309
310 default:
311 break;
312 }
313
314 /* If the statement has volatile operands, it needs to be preserved.
315 Same for statements that can alter control flow in unpredictable
316 ways. */
317 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
318 {
319 mark_stmt_necessary (stmt, true);
320 return;
321 }
322
323 if (stmt_may_clobber_global_p (stmt))
324 {
325 mark_stmt_necessary (stmt, true);
326 return;
327 }
328
329 return;
330 }
331
332
333 /* Mark the last statement of BB as necessary. */
334
335 static void
336 mark_last_stmt_necessary (basic_block bb)
337 {
338 gimple stmt = last_stmt (bb);
339
340 bitmap_set_bit (last_stmt_necessary, bb->index);
341 bitmap_set_bit (bb_contains_live_stmts, bb->index);
342
343 /* We actually mark the statement only if it is a control statement. */
344 if (stmt && is_ctrl_stmt (stmt))
345 mark_stmt_necessary (stmt, true);
346 }
347
348
349 /* Mark control dependent edges of BB as necessary. We have to do this only
350 once for each basic block so we set the appropriate bit after we're done.
351
352 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
353
354 static void
355 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
356 {
357 bitmap_iterator bi;
358 unsigned edge_number;
359 bool skipped = false;
360
361 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
362
363 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
364 return;
365
366 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
367 0, edge_number, bi)
368 {
369 basic_block cd_bb = cd->get_edge (edge_number)->src;
370
371 if (ignore_self && cd_bb == bb)
372 {
373 skipped = true;
374 continue;
375 }
376
377 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
378 mark_last_stmt_necessary (cd_bb);
379 }
380
381 if (!skipped)
382 bitmap_set_bit (visited_control_parents, bb->index);
383 }
384
385
386 /* Find obviously necessary statements. These are things like most function
387 calls, and stores to file level variables.
388
389 If EL is NULL, control statements are conservatively marked as
390 necessary. Otherwise it contains the list of edges used by control
391 dependence analysis. */
392
393 static void
394 find_obviously_necessary_stmts (bool aggressive)
395 {
396 basic_block bb;
397 gimple_stmt_iterator gsi;
398 edge e;
399 gimple phi, stmt;
400 int flags;
401
402 FOR_EACH_BB_FN (bb, cfun)
403 {
404 /* PHI nodes are never inherently necessary. */
405 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
406 {
407 phi = gsi_stmt (gsi);
408 gimple_set_plf (phi, STMT_NECESSARY, false);
409 }
410
411 /* Check all statements in the block. */
412 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
413 {
414 stmt = gsi_stmt (gsi);
415 gimple_set_plf (stmt, STMT_NECESSARY, false);
416 mark_stmt_if_obviously_necessary (stmt, aggressive);
417 }
418 }
419
420 /* Pure and const functions are finite and thus have no infinite loops in
421 them. */
422 flags = flags_from_decl_or_type (current_function_decl);
423 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
424 return;
425
426 /* Prevent the empty possibly infinite loops from being removed. */
427 if (aggressive)
428 {
429 struct loop *loop;
430 scev_initialize ();
431 if (mark_irreducible_loops ())
432 FOR_EACH_BB_FN (bb, cfun)
433 {
434 edge_iterator ei;
435 FOR_EACH_EDGE (e, ei, bb->succs)
436 if ((e->flags & EDGE_DFS_BACK)
437 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
438 {
439 if (dump_file)
440 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
441 e->src->index, e->dest->index);
442 mark_control_dependent_edges_necessary (e->dest, false);
443 }
444 }
445
446 FOR_EACH_LOOP (loop, 0)
447 if (!finite_loop_p (loop))
448 {
449 if (dump_file)
450 fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
451 mark_control_dependent_edges_necessary (loop->latch, false);
452 }
453 scev_finalize ();
454 }
455 }
456
457
458 /* Return true if REF is based on an aliased base, otherwise false. */
459
460 static bool
461 ref_may_be_aliased (tree ref)
462 {
463 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
464 while (handled_component_p (ref))
465 ref = TREE_OPERAND (ref, 0);
466 if (TREE_CODE (ref) == MEM_REF
467 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
468 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
469 return !(DECL_P (ref)
470 && !may_be_aliased (ref));
471 }
472
473 static bitmap visited = NULL;
474 static unsigned int longest_chain = 0;
475 static unsigned int total_chain = 0;
476 static unsigned int nr_walks = 0;
477 static bool chain_ovfl = false;
478
479 /* Worker for the walker that marks reaching definitions of REF,
480 which is based on a non-aliased decl, necessary. It returns
481 true whenever the defining statement of the current VDEF is
482 a kill for REF, as no dominating may-defs are necessary for REF
483 anymore. DATA points to the basic-block that contains the
484 stmt that refers to REF. */
485
486 static bool
487 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
488 {
489 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
490
491 /* All stmts we visit are necessary. */
492 mark_operand_necessary (vdef);
493
494 /* If the stmt lhs kills ref, then we can stop walking. */
495 if (gimple_has_lhs (def_stmt)
496 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
497 /* The assignment is not necessarily carried out if it can throw
498 and we can catch it in the current function where we could inspect
499 the previous value.
500 ??? We only need to care about the RHS throwing. For aggregate
501 assignments or similar calls and non-call exceptions the LHS
502 might throw as well. */
503 && !stmt_can_throw_internal (def_stmt))
504 {
505 tree base, lhs = gimple_get_lhs (def_stmt);
506 HOST_WIDE_INT size, offset, max_size;
507 ao_ref_base (ref);
508 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
509 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
510 so base == refd->base does not always hold. */
511 if (base == ref->base)
512 {
513 /* For a must-alias check we need to be able to constrain
514 the accesses properly. */
515 if (size != -1 && size == max_size
516 && ref->max_size != -1)
517 {
518 if (offset <= ref->offset
519 && offset + size >= ref->offset + ref->max_size)
520 return true;
521 }
522 /* Or they need to be exactly the same. */
523 else if (ref->ref
524 /* Make sure there is no induction variable involved
525 in the references (gcc.c-torture/execute/pr42142.c).
526 The simplest way is to check if the kill dominates
527 the use. */
528 /* But when both are in the same block we cannot
529 easily tell whether we came from a backedge
530 unless we decide to compute stmt UIDs
531 (see PR58246). */
532 && (basic_block) data != gimple_bb (def_stmt)
533 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
534 gimple_bb (def_stmt))
535 && operand_equal_p (ref->ref, lhs, 0))
536 return true;
537 }
538 }
539
540 /* Otherwise keep walking. */
541 return false;
542 }
543
544 static void
545 mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
546 {
547 unsigned int chain;
548 ao_ref refd;
549 gcc_assert (!chain_ovfl);
550 ao_ref_init (&refd, ref);
551 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
552 mark_aliased_reaching_defs_necessary_1,
553 gimple_bb (stmt), NULL);
554 if (chain > longest_chain)
555 longest_chain = chain;
556 total_chain += chain;
557 nr_walks++;
558 }
559
560 /* Worker for the walker that marks reaching definitions of REF, which
561 is not based on a non-aliased decl. For simplicity we need to end
562 up marking all may-defs necessary that are not based on a non-aliased
563 decl. The only job of this walker is to skip may-defs based on
564 a non-aliased decl. */
565
566 static bool
567 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
568 tree vdef, void *data ATTRIBUTE_UNUSED)
569 {
570 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
571
572 /* We have to skip already visited (and thus necessary) statements
573 to make the chaining work after we dropped back to simple mode. */
574 if (chain_ovfl
575 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
576 {
577 gcc_assert (gimple_nop_p (def_stmt)
578 || gimple_plf (def_stmt, STMT_NECESSARY));
579 return false;
580 }
581
582 /* We want to skip stores to non-aliased variables. */
583 if (!chain_ovfl
584 && gimple_assign_single_p (def_stmt))
585 {
586 tree lhs = gimple_assign_lhs (def_stmt);
587 if (!ref_may_be_aliased (lhs))
588 return false;
589 }
590
591 /* We want to skip statments that do not constitute stores but have
592 a virtual definition. */
593 if (is_gimple_call (def_stmt))
594 {
595 tree callee = gimple_call_fndecl (def_stmt);
596 if (callee != NULL_TREE
597 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
598 switch (DECL_FUNCTION_CODE (callee))
599 {
600 case BUILT_IN_MALLOC:
601 case BUILT_IN_ALIGNED_ALLOC:
602 case BUILT_IN_CALLOC:
603 case BUILT_IN_ALLOCA:
604 case BUILT_IN_ALLOCA_WITH_ALIGN:
605 case BUILT_IN_FREE:
606 return false;
607
608 default:;
609 }
610 }
611
612 mark_operand_necessary (vdef);
613
614 return false;
615 }
616
617 static void
618 mark_all_reaching_defs_necessary (gimple stmt)
619 {
620 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
621 mark_all_reaching_defs_necessary_1, NULL, &visited);
622 }
623
624 /* Return true for PHI nodes with one or identical arguments
625 can be removed. */
626 static bool
627 degenerate_phi_p (gimple phi)
628 {
629 unsigned int i;
630 tree op = gimple_phi_arg_def (phi, 0);
631 for (i = 1; i < gimple_phi_num_args (phi); i++)
632 if (gimple_phi_arg_def (phi, i) != op)
633 return false;
634 return true;
635 }
636
637 /* Propagate necessity using the operands of necessary statements.
638 Process the uses on each statement in the worklist, and add all
639 feeding statements which contribute to the calculation of this
640 value to the worklist.
641
642 In conservative mode, EL is NULL. */
643
644 static void
645 propagate_necessity (bool aggressive)
646 {
647 gimple stmt;
648
649 if (dump_file && (dump_flags & TDF_DETAILS))
650 fprintf (dump_file, "\nProcessing worklist:\n");
651
652 while (worklist.length () > 0)
653 {
654 /* Take STMT from worklist. */
655 stmt = worklist.pop ();
656
657 if (dump_file && (dump_flags & TDF_DETAILS))
658 {
659 fprintf (dump_file, "processing: ");
660 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
661 fprintf (dump_file, "\n");
662 }
663
664 if (aggressive)
665 {
666 /* Mark the last statement of the basic blocks on which the block
667 containing STMT is control dependent, but only if we haven't
668 already done so. */
669 basic_block bb = gimple_bb (stmt);
670 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
671 && !bitmap_bit_p (visited_control_parents, bb->index))
672 mark_control_dependent_edges_necessary (bb, false);
673 }
674
675 if (gimple_code (stmt) == GIMPLE_PHI
676 /* We do not process virtual PHI nodes nor do we track their
677 necessity. */
678 && !virtual_operand_p (gimple_phi_result (stmt)))
679 {
680 /* PHI nodes are somewhat special in that each PHI alternative has
681 data and control dependencies. All the statements feeding the
682 PHI node's arguments are always necessary. In aggressive mode,
683 we also consider the control dependent edges leading to the
684 predecessor block associated with each PHI alternative as
685 necessary. */
686 gphi *phi = as_a <gphi *> (stmt);
687 size_t k;
688
689 for (k = 0; k < gimple_phi_num_args (stmt); k++)
690 {
691 tree arg = PHI_ARG_DEF (stmt, k);
692 if (TREE_CODE (arg) == SSA_NAME)
693 mark_operand_necessary (arg);
694 }
695
696 /* For PHI operands it matters from where the control flow arrives
697 to the BB. Consider the following example:
698
699 a=exp1;
700 b=exp2;
701 if (test)
702 ;
703 else
704 ;
705 c=PHI(a,b)
706
707 We need to mark control dependence of the empty basic blocks, since they
708 contains computation of PHI operands.
709
710 Doing so is too restrictive in the case the predecestor block is in
711 the loop. Consider:
712
713 if (b)
714 {
715 int i;
716 for (i = 0; i<1000; ++i)
717 ;
718 j = 0;
719 }
720 return j;
721
722 There is PHI for J in the BB containing return statement.
723 In this case the control dependence of predecestor block (that is
724 within the empty loop) also contains the block determining number
725 of iterations of the block that would prevent removing of empty
726 loop in this case.
727
728 This scenario can be avoided by splitting critical edges.
729 To save the critical edge splitting pass we identify how the control
730 dependence would look like if the edge was split.
731
732 Consider the modified CFG created from current CFG by splitting
733 edge B->C. In the postdominance tree of modified CFG, C' is
734 always child of C. There are two cases how chlids of C' can look
735 like:
736
737 1) C' is leaf
738
739 In this case the only basic block C' is control dependent on is B.
740
741 2) C' has single child that is B
742
743 In this case control dependence of C' is same as control
744 dependence of B in original CFG except for block B itself.
745 (since C' postdominate B in modified CFG)
746
747 Now how to decide what case happens? There are two basic options:
748
749 a) C postdominate B. Then C immediately postdominate B and
750 case 2 happens iff there is no other way from B to C except
751 the edge B->C.
752
753 There is other way from B to C iff there is succesor of B that
754 is not postdominated by B. Testing this condition is somewhat
755 expensive, because we need to iterate all succesors of B.
756 We are safe to assume that this does not happen: we will mark B
757 as needed when processing the other path from B to C that is
758 conrol dependent on B and marking control dependencies of B
759 itself is harmless because they will be processed anyway after
760 processing control statement in B.
761
762 b) C does not postdominate B. Always case 1 happens since there is
763 path from C to exit that does not go through B and thus also C'. */
764
765 if (aggressive && !degenerate_phi_p (stmt))
766 {
767 for (k = 0; k < gimple_phi_num_args (stmt); k++)
768 {
769 basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
770
771 if (gimple_bb (stmt)
772 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
773 {
774 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
775 mark_last_stmt_necessary (arg_bb);
776 }
777 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
778 && !bitmap_bit_p (visited_control_parents,
779 arg_bb->index))
780 mark_control_dependent_edges_necessary (arg_bb, true);
781 }
782 }
783 }
784 else
785 {
786 /* Propagate through the operands. Examine all the USE, VUSE and
787 VDEF operands in this statement. Mark all the statements
788 which feed this statement's uses as necessary. */
789 ssa_op_iter iter;
790 tree use;
791
792 /* If this is a call to free which is directly fed by an
793 allocation function do not mark that necessary through
794 processing the argument. */
795 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
796 {
797 tree ptr = gimple_call_arg (stmt, 0);
798 gimple def_stmt;
799 tree def_callee;
800 /* If the pointer we free is defined by an allocation
801 function do not add the call to the worklist. */
802 if (TREE_CODE (ptr) == SSA_NAME
803 && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
804 && (def_callee = gimple_call_fndecl (def_stmt))
805 && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
806 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
807 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
808 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
809 {
810 gimple bounds_def_stmt;
811 tree bounds;
812
813 /* For instrumented calls we should also check used
814 bounds are returned by the same allocation call. */
815 if (!gimple_call_with_bounds_p (stmt)
816 || ((bounds = gimple_call_arg (stmt, 1))
817 && TREE_CODE (bounds) == SSA_NAME
818 && (bounds_def_stmt = SSA_NAME_DEF_STMT (bounds))
819 && chkp_gimple_call_builtin_p (bounds_def_stmt,
820 BUILT_IN_CHKP_BNDRET)
821 && gimple_call_arg (bounds_def_stmt, 0) == ptr))
822 continue;
823 }
824 }
825
826 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
827 mark_operand_necessary (use);
828
829 use = gimple_vuse (stmt);
830 if (!use)
831 continue;
832
833 /* If we dropped to simple mode make all immediately
834 reachable definitions necessary. */
835 if (chain_ovfl)
836 {
837 mark_all_reaching_defs_necessary (stmt);
838 continue;
839 }
840
841 /* For statements that may load from memory (have a VUSE) we
842 have to mark all reaching (may-)definitions as necessary.
843 We partition this task into two cases:
844 1) explicit loads based on decls that are not aliased
845 2) implicit loads (like calls) and explicit loads not
846 based on decls that are not aliased (like indirect
847 references or loads from globals)
848 For 1) we mark all reaching may-defs as necessary, stopping
849 at dominating kills. For 2) we want to mark all dominating
850 references necessary, but non-aliased ones which we handle
851 in 1). By keeping a global visited bitmap for references
852 we walk for 2) we avoid quadratic behavior for those. */
853
854 if (is_gimple_call (stmt))
855 {
856 tree callee = gimple_call_fndecl (stmt);
857 unsigned i;
858
859 /* Calls to functions that are merely acting as barriers
860 or that only store to memory do not make any previous
861 stores necessary. */
862 if (callee != NULL_TREE
863 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
864 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
865 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
866 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
867 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
868 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
869 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
870 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
871 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
872 || (DECL_FUNCTION_CODE (callee)
873 == BUILT_IN_ALLOCA_WITH_ALIGN)
874 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
875 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
876 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
877 continue;
878
879 /* Calls implicitly load from memory, their arguments
880 in addition may explicitly perform memory loads. */
881 mark_all_reaching_defs_necessary (stmt);
882 for (i = 0; i < gimple_call_num_args (stmt); ++i)
883 {
884 tree arg = gimple_call_arg (stmt, i);
885 if (TREE_CODE (arg) == SSA_NAME
886 || is_gimple_min_invariant (arg))
887 continue;
888 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
889 arg = TREE_OPERAND (arg, 0);
890 if (!ref_may_be_aliased (arg))
891 mark_aliased_reaching_defs_necessary (stmt, arg);
892 }
893 }
894 else if (gimple_assign_single_p (stmt))
895 {
896 tree rhs;
897 /* If this is a load mark things necessary. */
898 rhs = gimple_assign_rhs1 (stmt);
899 if (TREE_CODE (rhs) != SSA_NAME
900 && !is_gimple_min_invariant (rhs)
901 && TREE_CODE (rhs) != CONSTRUCTOR)
902 {
903 if (!ref_may_be_aliased (rhs))
904 mark_aliased_reaching_defs_necessary (stmt, rhs);
905 else
906 mark_all_reaching_defs_necessary (stmt);
907 }
908 }
909 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
910 {
911 tree rhs = gimple_return_retval (return_stmt);
912 /* A return statement may perform a load. */
913 if (rhs
914 && TREE_CODE (rhs) != SSA_NAME
915 && !is_gimple_min_invariant (rhs)
916 && TREE_CODE (rhs) != CONSTRUCTOR)
917 {
918 if (!ref_may_be_aliased (rhs))
919 mark_aliased_reaching_defs_necessary (stmt, rhs);
920 else
921 mark_all_reaching_defs_necessary (stmt);
922 }
923 }
924 else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
925 {
926 unsigned i;
927 mark_all_reaching_defs_necessary (stmt);
928 /* Inputs may perform loads. */
929 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
930 {
931 tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
932 if (TREE_CODE (op) != SSA_NAME
933 && !is_gimple_min_invariant (op)
934 && TREE_CODE (op) != CONSTRUCTOR
935 && !ref_may_be_aliased (op))
936 mark_aliased_reaching_defs_necessary (stmt, op);
937 }
938 }
939 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
940 {
941 /* The beginning of a transaction is a memory barrier. */
942 /* ??? If we were really cool, we'd only be a barrier
943 for the memories touched within the transaction. */
944 mark_all_reaching_defs_necessary (stmt);
945 }
946 else
947 gcc_unreachable ();
948
949 /* If we over-used our alias oracle budget drop to simple
950 mode. The cost metric allows quadratic behavior
951 (number of uses times number of may-defs queries) up to
952 a constant maximal number of queries and after that falls back to
953 super-linear complexity. */
954 if (/* Constant but quadratic for small functions. */
955 total_chain > 128 * 128
956 /* Linear in the number of may-defs. */
957 && total_chain > 32 * longest_chain
958 /* Linear in the number of uses. */
959 && total_chain > nr_walks * 32)
960 {
961 chain_ovfl = true;
962 if (visited)
963 bitmap_clear (visited);
964 }
965 }
966 }
967 }
968
969 /* Remove dead PHI nodes from block BB. */
970
971 static bool
972 remove_dead_phis (basic_block bb)
973 {
974 bool something_changed = false;
975 gphi *phi;
976 gphi_iterator gsi;
977
978 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
979 {
980 stats.total_phis++;
981 phi = gsi.phi ();
982
983 /* We do not track necessity of virtual PHI nodes. Instead do
984 very simple dead PHI removal here. */
985 if (virtual_operand_p (gimple_phi_result (phi)))
986 {
987 /* Virtual PHI nodes with one or identical arguments
988 can be removed. */
989 if (degenerate_phi_p (phi))
990 {
991 tree vdef = gimple_phi_result (phi);
992 tree vuse = gimple_phi_arg_def (phi, 0);
993
994 use_operand_p use_p;
995 imm_use_iterator iter;
996 gimple use_stmt;
997 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
998 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
999 SET_USE (use_p, vuse);
1000 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1001 && TREE_CODE (vuse) == SSA_NAME)
1002 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1003 }
1004 else
1005 gimple_set_plf (phi, STMT_NECESSARY, true);
1006 }
1007
1008 if (!gimple_plf (phi, STMT_NECESSARY))
1009 {
1010 something_changed = true;
1011 if (dump_file && (dump_flags & TDF_DETAILS))
1012 {
1013 fprintf (dump_file, "Deleting : ");
1014 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1015 fprintf (dump_file, "\n");
1016 }
1017
1018 remove_phi_node (&gsi, true);
1019 stats.removed_phis++;
1020 continue;
1021 }
1022
1023 gsi_next (&gsi);
1024 }
1025 return something_changed;
1026 }
1027
1028 /* Forward edge E to respective POST_DOM_BB and update PHIs. */
1029
1030 static edge
1031 forward_edge_to_pdom (edge e, basic_block post_dom_bb)
1032 {
1033 gphi_iterator gsi;
1034 edge e2 = NULL;
1035 edge_iterator ei;
1036
1037 if (dump_file && (dump_flags & TDF_DETAILS))
1038 fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
1039 e->dest->index, post_dom_bb->index);
1040
1041 e2 = redirect_edge_and_branch (e, post_dom_bb);
1042 cfg_altered = true;
1043
1044 /* If edge was already around, no updating is necessary. */
1045 if (e2 != e)
1046 return e2;
1047
1048 if (!gimple_seq_empty_p (phi_nodes (post_dom_bb)))
1049 {
1050 /* We are sure that for every live PHI we are seeing control dependent BB.
1051 This means that we can pick any edge to duplicate PHI args from. */
1052 FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
1053 if (e2 != e)
1054 break;
1055 for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
1056 {
1057 gphi *phi = gsi.phi ();
1058 tree op;
1059 source_location locus;
1060
1061 /* PHIs for virtuals have no control dependency relation on them.
1062 We are lost here and must force renaming of the symbol. */
1063 if (virtual_operand_p (gimple_phi_result (phi)))
1064 {
1065 mark_virtual_phi_result_for_renaming (phi);
1066 remove_phi_node (&gsi, true);
1067 continue;
1068 }
1069
1070 /* Dead PHI do not imply control dependency. */
1071 if (!gimple_plf (phi, STMT_NECESSARY))
1072 {
1073 gsi_next (&gsi);
1074 continue;
1075 }
1076
1077 op = gimple_phi_arg_def (phi, e2->dest_idx);
1078 locus = gimple_phi_arg_location (phi, e2->dest_idx);
1079 add_phi_arg (phi, op, e, locus);
1080 /* The resulting PHI if not dead can only be degenerate. */
1081 gcc_assert (degenerate_phi_p (phi));
1082 gsi_next (&gsi);
1083 }
1084 }
1085 return e;
1086 }
1087
1088 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1089 containing I so that we don't have to look it up. */
1090
1091 static void
1092 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1093 {
1094 gimple stmt = gsi_stmt (*i);
1095
1096 if (dump_file && (dump_flags & TDF_DETAILS))
1097 {
1098 fprintf (dump_file, "Deleting : ");
1099 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1100 fprintf (dump_file, "\n");
1101 }
1102
1103 stats.removed++;
1104
1105 /* If we have determined that a conditional branch statement contributes
1106 nothing to the program, then we not only remove it, but we also change
1107 the flow graph so that the current block will simply fall-thru to its
1108 immediate post-dominator. The blocks we are circumventing will be
1109 removed by cleanup_tree_cfg if this change in the flow graph makes them
1110 unreachable. */
1111 if (is_ctrl_stmt (stmt))
1112 {
1113 basic_block post_dom_bb;
1114 edge e, e2;
1115 edge_iterator ei;
1116
1117 post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
1118
1119 e = find_edge (bb, post_dom_bb);
1120
1121 /* If edge is already there, try to use it. This avoids need to update
1122 PHI nodes. Also watch for cases where post dominator does not exists
1123 or is exit block. These can happen for infinite loops as we create
1124 fake edges in the dominator tree. */
1125 if (e)
1126 ;
1127 else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1128 e = EDGE_SUCC (bb, 0);
1129 else
1130 e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
1131 gcc_assert (e);
1132 e->probability = REG_BR_PROB_BASE;
1133 e->count = bb->count;
1134
1135 /* The edge is no longer associated with a conditional, so it does
1136 not have TRUE/FALSE flags. */
1137 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
1138
1139 /* The lone outgoing edge from BB will be a fallthru edge. */
1140 e->flags |= EDGE_FALLTHRU;
1141
1142 /* Remove the remaining outgoing edges. */
1143 for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1144 if (e != e2)
1145 {
1146 cfg_altered = true;
1147 /* If we made a BB unconditionally exit a loop then this
1148 transform alters the set of BBs in the loop. Schedule
1149 a fixup. */
1150 if (loop_exit_edge_p (bb->loop_father, e))
1151 loops_state_set (LOOPS_NEED_FIXUP);
1152 remove_edge (e2);
1153 }
1154 else
1155 ei_next (&ei);
1156 }
1157
1158 /* If this is a store into a variable that is being optimized away,
1159 add a debug bind stmt if possible. */
1160 if (MAY_HAVE_DEBUG_STMTS
1161 && gimple_assign_single_p (stmt)
1162 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1163 {
1164 tree lhs = gimple_assign_lhs (stmt);
1165 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1166 && !DECL_IGNORED_P (lhs)
1167 && is_gimple_reg_type (TREE_TYPE (lhs))
1168 && !is_global_var (lhs)
1169 && !DECL_HAS_VALUE_EXPR_P (lhs))
1170 {
1171 tree rhs = gimple_assign_rhs1 (stmt);
1172 gdebug *note
1173 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1174 gsi_insert_after (i, note, GSI_SAME_STMT);
1175 }
1176 }
1177
1178 unlink_stmt_vdef (stmt);
1179 gsi_remove (i, true);
1180 release_defs (stmt);
1181 }
1182
1183 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1184 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1185
1186 static tree
1187 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1188 {
1189 if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1190 *walk_subtrees = 0;
1191 if (*tp == (tree) data)
1192 return *tp;
1193 return NULL_TREE;
1194 }
1195
1196 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1197 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1198 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1199 uses. */
1200
1201 static void
1202 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1203 enum tree_code subcode)
1204 {
1205 gimple stmt = gsi_stmt (*gsi);
1206 tree lhs = gimple_call_lhs (stmt);
1207
1208 if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1209 return;
1210
1211 imm_use_iterator imm_iter;
1212 use_operand_p use_p;
1213 bool has_debug_uses = false;
1214 bool has_realpart_uses = false;
1215 bool has_other_uses = false;
1216 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1217 {
1218 gimple use_stmt = USE_STMT (use_p);
1219 if (is_gimple_debug (use_stmt))
1220 has_debug_uses = true;
1221 else if (is_gimple_assign (use_stmt)
1222 && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1223 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1224 has_realpart_uses = true;
1225 else
1226 {
1227 has_other_uses = true;
1228 break;
1229 }
1230 }
1231
1232 if (!has_realpart_uses || has_other_uses)
1233 return;
1234
1235 tree arg0 = gimple_call_arg (stmt, 0);
1236 tree arg1 = gimple_call_arg (stmt, 1);
1237 location_t loc = gimple_location (stmt);
1238 tree type = TREE_TYPE (TREE_TYPE (lhs));
1239 tree utype = type;
1240 if (!TYPE_UNSIGNED (type))
1241 utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1242 tree result = fold_build2_loc (loc, subcode, utype,
1243 fold_convert_loc (loc, utype, arg0),
1244 fold_convert_loc (loc, utype, arg1));
1245 result = fold_convert_loc (loc, type, result);
1246
1247 if (has_debug_uses)
1248 {
1249 gimple use_stmt;
1250 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1251 {
1252 if (!gimple_debug_bind_p (use_stmt))
1253 continue;
1254 tree v = gimple_debug_bind_get_value (use_stmt);
1255 if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1256 {
1257 gimple_debug_bind_reset_value (use_stmt);
1258 update_stmt (use_stmt);
1259 }
1260 }
1261 }
1262
1263 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1264 result = drop_tree_overflow (result);
1265 tree overflow = build_zero_cst (type);
1266 tree ctype = build_complex_type (type);
1267 if (TREE_CODE (result) == INTEGER_CST)
1268 result = build_complex (ctype, result, overflow);
1269 else
1270 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1271 ctype, result, overflow);
1272
1273 if (dump_file && (dump_flags & TDF_DETAILS))
1274 {
1275 fprintf (dump_file, "Transforming call: ");
1276 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1277 fprintf (dump_file, "because the overflow result is never used into: ");
1278 print_generic_stmt (dump_file, result, TDF_SLIM);
1279 fprintf (dump_file, "\n");
1280 }
1281
1282 if (!update_call_from_tree (gsi, result))
1283 gimplify_and_update_call_from_tree (gsi, result);
1284 }
1285
1286 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1287 contributes nothing to the program, and can be deleted. */
1288
1289 static bool
1290 eliminate_unnecessary_stmts (void)
1291 {
1292 bool something_changed = false;
1293 basic_block bb;
1294 gimple_stmt_iterator gsi, psi;
1295 gimple stmt;
1296 tree call;
1297 vec<basic_block> h;
1298
1299 if (dump_file && (dump_flags & TDF_DETAILS))
1300 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1301
1302 clear_special_calls ();
1303
1304 /* Walking basic blocks and statements in reverse order avoids
1305 releasing SSA names before any other DEFs that refer to them are
1306 released. This helps avoid loss of debug information, as we get
1307 a chance to propagate all RHSs of removed SSAs into debug uses,
1308 rather than only the latest ones. E.g., consider:
1309
1310 x_3 = y_1 + z_2;
1311 a_5 = x_3 - b_4;
1312 # DEBUG a => a_5
1313
1314 If we were to release x_3 before a_5, when we reached a_5 and
1315 tried to substitute it into the debug stmt, we'd see x_3 there,
1316 but x_3's DEF, type, etc would have already been disconnected.
1317 By going backwards, the debug stmt first changes to:
1318
1319 # DEBUG a => x_3 - b_4
1320
1321 and then to:
1322
1323 # DEBUG a => y_1 + z_2 - b_4
1324
1325 as desired. */
1326 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1327 h = get_all_dominated_blocks (CDI_DOMINATORS,
1328 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1329
1330 while (h.length ())
1331 {
1332 bb = h.pop ();
1333
1334 /* Remove dead statements. */
1335 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1336 {
1337 stmt = gsi_stmt (gsi);
1338
1339 psi = gsi;
1340 gsi_prev (&psi);
1341
1342 stats.total++;
1343
1344 /* We can mark a call to free as not necessary if the
1345 defining statement of its argument is not necessary
1346 (and thus is getting removed). */
1347 if (gimple_plf (stmt, STMT_NECESSARY)
1348 && gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1349 {
1350 tree ptr = gimple_call_arg (stmt, 0);
1351 if (TREE_CODE (ptr) == SSA_NAME)
1352 {
1353 gimple def_stmt = SSA_NAME_DEF_STMT (ptr);
1354 if (!gimple_nop_p (def_stmt)
1355 && !gimple_plf (def_stmt, STMT_NECESSARY))
1356 gimple_set_plf (stmt, STMT_NECESSARY, false);
1357 }
1358 /* We did not propagate necessity for free calls fed
1359 by allocation function to allow unnecessary
1360 alloc-free sequence elimination. For instrumented
1361 calls it also means we did not mark bounds producer
1362 as necessary and it is time to do it in case free
1363 call is not removed. */
1364 if (gimple_call_with_bounds_p (stmt))
1365 {
1366 gimple bounds_def_stmt;
1367 tree bounds = gimple_call_arg (stmt, 1);
1368 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
1369 bounds_def_stmt = SSA_NAME_DEF_STMT (bounds);
1370 if (bounds_def_stmt
1371 && !gimple_plf (bounds_def_stmt, STMT_NECESSARY))
1372 gimple_set_plf (bounds_def_stmt, STMT_NECESSARY,
1373 gimple_plf (stmt, STMT_NECESSARY));
1374 }
1375 }
1376
1377 /* If GSI is not necessary then remove it. */
1378 if (!gimple_plf (stmt, STMT_NECESSARY))
1379 {
1380 /* Keep clobbers that we can keep live live. */
1381 if (gimple_clobber_p (stmt))
1382 {
1383 ssa_op_iter iter;
1384 use_operand_p use_p;
1385 bool dead = false;
1386 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1387 {
1388 tree name = USE_FROM_PTR (use_p);
1389 if (!SSA_NAME_IS_DEFAULT_DEF (name)
1390 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1391 {
1392 dead = true;
1393 break;
1394 }
1395 }
1396 if (!dead)
1397 continue;
1398 }
1399 if (!is_gimple_debug (stmt))
1400 something_changed = true;
1401 remove_dead_stmt (&gsi, bb);
1402 }
1403 else if (is_gimple_call (stmt))
1404 {
1405 tree name = gimple_call_lhs (stmt);
1406
1407 notice_special_calls (as_a <gcall *> (stmt));
1408
1409 /* When LHS of var = call (); is dead, simplify it into
1410 call (); saving one operand. */
1411 if (name
1412 && TREE_CODE (name) == SSA_NAME
1413 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1414 /* Avoid doing so for allocation calls which we
1415 did not mark as necessary, it will confuse the
1416 special logic we apply to malloc/free pair removal. */
1417 && (!(call = gimple_call_fndecl (stmt))
1418 || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1419 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1420 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1421 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1422 && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA
1423 && (DECL_FUNCTION_CODE (call)
1424 != BUILT_IN_ALLOCA_WITH_ALIGN)))
1425 /* Avoid doing so for bndret calls for the same reason. */
1426 && !chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
1427 {
1428 something_changed = true;
1429 if (dump_file && (dump_flags & TDF_DETAILS))
1430 {
1431 fprintf (dump_file, "Deleting LHS of call: ");
1432 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1433 fprintf (dump_file, "\n");
1434 }
1435
1436 gimple_call_set_lhs (stmt, NULL_TREE);
1437 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1438 update_stmt (stmt);
1439 release_ssa_name (name);
1440
1441 /* GOMP_SIMD_LANE without lhs is not needed. */
1442 if (gimple_call_internal_p (stmt)
1443 && gimple_call_internal_fn (stmt) == IFN_GOMP_SIMD_LANE)
1444 remove_dead_stmt (&gsi, bb);
1445 }
1446 else if (gimple_call_internal_p (stmt))
1447 switch (gimple_call_internal_fn (stmt))
1448 {
1449 case IFN_ADD_OVERFLOW:
1450 maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1451 break;
1452 case IFN_SUB_OVERFLOW:
1453 maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1454 break;
1455 case IFN_MUL_OVERFLOW:
1456 maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1457 break;
1458 default:
1459 break;
1460 }
1461 }
1462 }
1463 }
1464
1465 h.release ();
1466
1467 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1468 rendered some PHI nodes unreachable while they are still in use.
1469 Mark them for renaming. */
1470 if (cfg_altered)
1471 {
1472 basic_block prev_bb;
1473
1474 find_unreachable_blocks ();
1475
1476 /* Delete all unreachable basic blocks in reverse dominator order. */
1477 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1478 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1479 {
1480 prev_bb = bb->prev_bb;
1481
1482 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1483 || !(bb->flags & BB_REACHABLE))
1484 {
1485 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1486 gsi_next (&gsi))
1487 if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1488 {
1489 bool found = false;
1490 imm_use_iterator iter;
1491
1492 FOR_EACH_IMM_USE_STMT (stmt, iter,
1493 gimple_phi_result (gsi.phi ()))
1494 {
1495 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1496 continue;
1497 if (gimple_code (stmt) == GIMPLE_PHI
1498 || gimple_plf (stmt, STMT_NECESSARY))
1499 {
1500 found = true;
1501 BREAK_FROM_IMM_USE_STMT (iter);
1502 }
1503 }
1504 if (found)
1505 mark_virtual_phi_result_for_renaming (gsi.phi ());
1506 }
1507
1508 if (!(bb->flags & BB_REACHABLE))
1509 {
1510 /* Speed up the removal of blocks that don't
1511 dominate others. Walking backwards, this should
1512 be the common case. ??? Do we need to recompute
1513 dominators because of cfg_altered? */
1514 if (!MAY_HAVE_DEBUG_STMTS
1515 || !first_dom_son (CDI_DOMINATORS, bb))
1516 delete_basic_block (bb);
1517 else
1518 {
1519 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1520
1521 while (h.length ())
1522 {
1523 bb = h.pop ();
1524 prev_bb = bb->prev_bb;
1525 /* Rearrangements to the CFG may have failed
1526 to update the dominators tree, so that
1527 formerly-dominated blocks are now
1528 otherwise reachable. */
1529 if (!!(bb->flags & BB_REACHABLE))
1530 continue;
1531 delete_basic_block (bb);
1532 }
1533
1534 h.release ();
1535 }
1536 }
1537 }
1538 }
1539 }
1540 FOR_EACH_BB_FN (bb, cfun)
1541 {
1542 /* Remove dead PHI nodes. */
1543 something_changed |= remove_dead_phis (bb);
1544 }
1545
1546 return something_changed;
1547 }
1548
1549
1550 /* Print out removed statement statistics. */
1551
1552 static void
1553 print_stats (void)
1554 {
1555 float percg;
1556
1557 percg = ((float) stats.removed / (float) stats.total) * 100;
1558 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1559 stats.removed, stats.total, (int) percg);
1560
1561 if (stats.total_phis == 0)
1562 percg = 0;
1563 else
1564 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1565
1566 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1567 stats.removed_phis, stats.total_phis, (int) percg);
1568 }
1569
1570 /* Initialization for this pass. Set up the used data structures. */
1571
1572 static void
1573 tree_dce_init (bool aggressive)
1574 {
1575 memset ((void *) &stats, 0, sizeof (stats));
1576
1577 if (aggressive)
1578 {
1579 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1580 bitmap_clear (last_stmt_necessary);
1581 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1582 bitmap_clear (bb_contains_live_stmts);
1583 }
1584
1585 processed = sbitmap_alloc (num_ssa_names + 1);
1586 bitmap_clear (processed);
1587
1588 worklist.create (64);
1589 cfg_altered = false;
1590 }
1591
1592 /* Cleanup after this pass. */
1593
1594 static void
1595 tree_dce_done (bool aggressive)
1596 {
1597 if (aggressive)
1598 {
1599 delete cd;
1600 sbitmap_free (visited_control_parents);
1601 sbitmap_free (last_stmt_necessary);
1602 sbitmap_free (bb_contains_live_stmts);
1603 bb_contains_live_stmts = NULL;
1604 }
1605
1606 sbitmap_free (processed);
1607
1608 worklist.release ();
1609 }
1610
1611 /* Main routine to eliminate dead code.
1612
1613 AGGRESSIVE controls the aggressiveness of the algorithm.
1614 In conservative mode, we ignore control dependence and simply declare
1615 all but the most trivially dead branches necessary. This mode is fast.
1616 In aggressive mode, control dependences are taken into account, which
1617 results in more dead code elimination, but at the cost of some time.
1618
1619 FIXME: Aggressive mode before PRE doesn't work currently because
1620 the dominance info is not invalidated after DCE1. This is
1621 not an issue right now because we only run aggressive DCE
1622 as the last tree SSA pass, but keep this in mind when you
1623 start experimenting with pass ordering. */
1624
1625 static unsigned int
1626 perform_tree_ssa_dce (bool aggressive)
1627 {
1628 bool something_changed = 0;
1629
1630 calculate_dominance_info (CDI_DOMINATORS);
1631
1632 /* Preheaders are needed for SCEV to work.
1633 Simple lateches and recorded exits improve chances that loop will
1634 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1635 if (aggressive)
1636 loop_optimizer_init (LOOPS_NORMAL
1637 | LOOPS_HAVE_RECORDED_EXITS);
1638
1639 tree_dce_init (aggressive);
1640
1641 if (aggressive)
1642 {
1643 /* Compute control dependence. */
1644 calculate_dominance_info (CDI_POST_DOMINATORS);
1645 cd = new control_dependences (create_edge_list ());
1646
1647 visited_control_parents =
1648 sbitmap_alloc (last_basic_block_for_fn (cfun));
1649 bitmap_clear (visited_control_parents);
1650
1651 mark_dfs_back_edges ();
1652 }
1653
1654 find_obviously_necessary_stmts (aggressive);
1655
1656 if (aggressive)
1657 loop_optimizer_finalize ();
1658
1659 longest_chain = 0;
1660 total_chain = 0;
1661 nr_walks = 0;
1662 chain_ovfl = false;
1663 visited = BITMAP_ALLOC (NULL);
1664 propagate_necessity (aggressive);
1665 BITMAP_FREE (visited);
1666
1667 something_changed |= eliminate_unnecessary_stmts ();
1668 something_changed |= cfg_altered;
1669
1670 /* We do not update postdominators, so free them unconditionally. */
1671 free_dominance_info (CDI_POST_DOMINATORS);
1672
1673 /* If we removed paths in the CFG, then we need to update
1674 dominators as well. I haven't investigated the possibility
1675 of incrementally updating dominators. */
1676 if (cfg_altered)
1677 free_dominance_info (CDI_DOMINATORS);
1678
1679 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1680 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1681
1682 /* Debugging dumps. */
1683 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1684 print_stats ();
1685
1686 tree_dce_done (aggressive);
1687
1688 if (something_changed)
1689 {
1690 free_numbers_of_iterations_estimates ();
1691 if (scev_initialized_p ())
1692 scev_reset ();
1693 return TODO_update_ssa | TODO_cleanup_cfg;
1694 }
1695 return 0;
1696 }
1697
1698 /* Pass entry points. */
1699 static unsigned int
1700 tree_ssa_dce (void)
1701 {
1702 return perform_tree_ssa_dce (/*aggressive=*/false);
1703 }
1704
1705 static unsigned int
1706 tree_ssa_cd_dce (void)
1707 {
1708 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1709 }
1710
1711 namespace {
1712
1713 const pass_data pass_data_dce =
1714 {
1715 GIMPLE_PASS, /* type */
1716 "dce", /* name */
1717 OPTGROUP_NONE, /* optinfo_flags */
1718 TV_TREE_DCE, /* tv_id */
1719 ( PROP_cfg | PROP_ssa ), /* properties_required */
1720 0, /* properties_provided */
1721 0, /* properties_destroyed */
1722 0, /* todo_flags_start */
1723 0, /* todo_flags_finish */
1724 };
1725
1726 class pass_dce : public gimple_opt_pass
1727 {
1728 public:
1729 pass_dce (gcc::context *ctxt)
1730 : gimple_opt_pass (pass_data_dce, ctxt)
1731 {}
1732
1733 /* opt_pass methods: */
1734 opt_pass * clone () { return new pass_dce (m_ctxt); }
1735 virtual bool gate (function *) { return flag_tree_dce != 0; }
1736 virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1737
1738 }; // class pass_dce
1739
1740 } // anon namespace
1741
1742 gimple_opt_pass *
1743 make_pass_dce (gcc::context *ctxt)
1744 {
1745 return new pass_dce (ctxt);
1746 }
1747
1748 namespace {
1749
1750 const pass_data pass_data_cd_dce =
1751 {
1752 GIMPLE_PASS, /* type */
1753 "cddce", /* name */
1754 OPTGROUP_NONE, /* optinfo_flags */
1755 TV_TREE_CD_DCE, /* tv_id */
1756 ( PROP_cfg | PROP_ssa ), /* properties_required */
1757 0, /* properties_provided */
1758 0, /* properties_destroyed */
1759 0, /* todo_flags_start */
1760 0, /* todo_flags_finish */
1761 };
1762
1763 class pass_cd_dce : public gimple_opt_pass
1764 {
1765 public:
1766 pass_cd_dce (gcc::context *ctxt)
1767 : gimple_opt_pass (pass_data_cd_dce, ctxt)
1768 {}
1769
1770 /* opt_pass methods: */
1771 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1772 virtual bool gate (function *) { return flag_tree_dce != 0; }
1773 virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); }
1774
1775 }; // class pass_cd_dce
1776
1777 } // anon namespace
1778
1779 gimple_opt_pass *
1780 make_pass_cd_dce (gcc::context *ctxt)
1781 {
1782 return new pass_cd_dce (ctxt);
1783 }