openmp: Add support for the OpenMP 5.0 task detach clause
[gcc.git] / gcc / tree-ssa-dce.c
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2021 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* Dead code elimination.
24
25 References:
26
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
37
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "backend.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "cfghooks.h"
53 #include "tree-pass.h"
54 #include "ssa.h"
55 #include "gimple-pretty-print.h"
56 #include "fold-const.h"
57 #include "calls.h"
58 #include "cfganal.h"
59 #include "tree-eh.h"
60 #include "gimplify.h"
61 #include "gimple-iterator.h"
62 #include "tree-cfg.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
65 #include "tree-dfa.h"
66 #include "cfgloop.h"
67 #include "tree-scalar-evolution.h"
68 #include "tree-ssa-propagate.h"
69 #include "gimple-fold.h"
70
71 static struct stmt_stats
72 {
73 int total;
74 int total_phis;
75 int removed;
76 int removed_phis;
77 } stats;
78
79 #define STMT_NECESSARY GF_PLF_1
80
81 static vec<gimple *> worklist;
82
83 /* Vector indicating an SSA name has already been processed and marked
84 as necessary. */
85 static sbitmap processed;
86
87 /* Vector indicating that the last statement of a basic block has already
88 been marked as necessary. */
89 static sbitmap last_stmt_necessary;
90
91 /* Vector indicating that BB contains statements that are live. */
92 static sbitmap bb_contains_live_stmts;
93
94 /* Before we can determine whether a control branch is dead, we need to
95 compute which blocks are control dependent on which edges.
96
97 We expect each block to be control dependent on very few edges so we
98 use a bitmap for each block recording its edges. An array holds the
99 bitmap. The Ith bit in the bitmap is set if that block is dependent
100 on the Ith edge. */
101 static control_dependences *cd;
102
103 /* Vector indicating that a basic block has already had all the edges
104 processed that it is control dependent on. */
105 static sbitmap visited_control_parents;
106
107 /* TRUE if this pass alters the CFG (by removing control statements).
108 FALSE otherwise.
109
110 If this pass alters the CFG, then it will arrange for the dominators
111 to be recomputed. */
112 static bool cfg_altered;
113
114 /* When non-NULL holds map from basic block index into the postorder. */
115 static int *bb_postorder;
116
117
118 /* True if we should treat any stmt with a vdef as necessary. */
119
120 static inline bool
121 keep_all_vdefs_p ()
122 {
123 return optimize_debug;
124 }
125
126 /* If STMT is not already marked necessary, mark it, and add it to the
127 worklist if ADD_TO_WORKLIST is true. */
128
129 static inline void
130 mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
131 {
132 gcc_assert (stmt);
133
134 if (gimple_plf (stmt, STMT_NECESSARY))
135 return;
136
137 if (dump_file && (dump_flags & TDF_DETAILS))
138 {
139 fprintf (dump_file, "Marking useful stmt: ");
140 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
141 fprintf (dump_file, "\n");
142 }
143
144 gimple_set_plf (stmt, STMT_NECESSARY, true);
145 if (add_to_worklist)
146 worklist.safe_push (stmt);
147 if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
148 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
149 }
150
151
152 /* Mark the statement defining operand OP as necessary. */
153
154 static inline void
155 mark_operand_necessary (tree op)
156 {
157 gimple *stmt;
158 int ver;
159
160 gcc_assert (op);
161
162 ver = SSA_NAME_VERSION (op);
163 if (bitmap_bit_p (processed, ver))
164 {
165 stmt = SSA_NAME_DEF_STMT (op);
166 gcc_assert (gimple_nop_p (stmt)
167 || gimple_plf (stmt, STMT_NECESSARY));
168 return;
169 }
170 bitmap_set_bit (processed, ver);
171
172 stmt = SSA_NAME_DEF_STMT (op);
173 gcc_assert (stmt);
174
175 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
176 return;
177
178 if (dump_file && (dump_flags & TDF_DETAILS))
179 {
180 fprintf (dump_file, "marking necessary through ");
181 print_generic_expr (dump_file, op);
182 fprintf (dump_file, " stmt ");
183 print_gimple_stmt (dump_file, stmt, 0);
184 }
185
186 gimple_set_plf (stmt, STMT_NECESSARY, true);
187 if (bb_contains_live_stmts)
188 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
189 worklist.safe_push (stmt);
190 }
191
192
193 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
194 it can make other statements necessary.
195
196 If AGGRESSIVE is false, control statements are conservatively marked as
197 necessary. */
198
199 static void
200 mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
201 {
202 /* With non-call exceptions, we have to assume that all statements could
203 throw. If a statement could throw, it can be deemed necessary. */
204 if (stmt_unremovable_because_of_non_call_eh_p (cfun, stmt))
205 {
206 mark_stmt_necessary (stmt, true);
207 return;
208 }
209
210 /* Statements that are implicitly live. Most function calls, asm
211 and return statements are required. Labels and GIMPLE_BIND nodes
212 are kept because they are control flow, and we have no way of
213 knowing whether they can be removed. DCE can eliminate all the
214 other statements in a block, and CFG can then remove the block
215 and labels. */
216 switch (gimple_code (stmt))
217 {
218 case GIMPLE_PREDICT:
219 case GIMPLE_LABEL:
220 mark_stmt_necessary (stmt, false);
221 return;
222
223 case GIMPLE_ASM:
224 case GIMPLE_RESX:
225 case GIMPLE_RETURN:
226 mark_stmt_necessary (stmt, true);
227 return;
228
229 case GIMPLE_CALL:
230 {
231 tree callee = gimple_call_fndecl (stmt);
232 if (callee != NULL_TREE
233 && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
234 switch (DECL_FUNCTION_CODE (callee))
235 {
236 case BUILT_IN_MALLOC:
237 case BUILT_IN_ALIGNED_ALLOC:
238 case BUILT_IN_CALLOC:
239 CASE_BUILT_IN_ALLOCA:
240 case BUILT_IN_STRDUP:
241 case BUILT_IN_STRNDUP:
242 case BUILT_IN_GOMP_ALLOC:
243 return;
244
245 default:;
246 }
247
248 if (callee != NULL_TREE
249 && flag_allocation_dce
250 && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee))
251 return;
252
253 /* Most, but not all function calls are required. Function calls that
254 produce no result and have no side effects (i.e. const pure
255 functions) are unnecessary. */
256 if (gimple_has_side_effects (stmt))
257 {
258 mark_stmt_necessary (stmt, true);
259 return;
260 }
261 /* IFN_GOACC_LOOP calls are necessary in that they are used to
262 represent parameter (i.e. step, bound) of a lowered OpenACC
263 partitioned loop. But this kind of partitioned loop might not
264 survive from aggressive loop removal for it has loop exit and
265 is assumed to be finite. Therefore, we need to explicitly mark
266 these calls. (An example is libgomp.oacc-c-c++-common/pr84955.c) */
267 if (gimple_call_internal_p (stmt, IFN_GOACC_LOOP))
268 {
269 mark_stmt_necessary (stmt, true);
270 return;
271 }
272 if (!gimple_call_lhs (stmt))
273 return;
274 break;
275 }
276
277 case GIMPLE_DEBUG:
278 /* Debug temps without a value are not useful. ??? If we could
279 easily locate the debug temp bind stmt for a use thereof,
280 would could refrain from marking all debug temps here, and
281 mark them only if they're used. */
282 if (gimple_debug_nonbind_marker_p (stmt)
283 || !gimple_debug_bind_p (stmt)
284 || gimple_debug_bind_has_value_p (stmt)
285 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
286 mark_stmt_necessary (stmt, false);
287 return;
288
289 case GIMPLE_GOTO:
290 gcc_assert (!simple_goto_p (stmt));
291 mark_stmt_necessary (stmt, true);
292 return;
293
294 case GIMPLE_COND:
295 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
296 /* Fall through. */
297
298 case GIMPLE_SWITCH:
299 if (! aggressive)
300 mark_stmt_necessary (stmt, true);
301 break;
302
303 case GIMPLE_ASSIGN:
304 if (gimple_clobber_p (stmt))
305 return;
306 break;
307
308 default:
309 break;
310 }
311
312 /* If the statement has volatile operands, it needs to be preserved.
313 Same for statements that can alter control flow in unpredictable
314 ways. */
315 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
316 {
317 mark_stmt_necessary (stmt, true);
318 return;
319 }
320
321 if (stmt_may_clobber_global_p (stmt))
322 {
323 mark_stmt_necessary (stmt, true);
324 return;
325 }
326
327 if (gimple_vdef (stmt) && keep_all_vdefs_p ())
328 {
329 mark_stmt_necessary (stmt, true);
330 return;
331 }
332
333 return;
334 }
335
336
337 /* Mark the last statement of BB as necessary. */
338
339 static void
340 mark_last_stmt_necessary (basic_block bb)
341 {
342 gimple *stmt = last_stmt (bb);
343
344 bitmap_set_bit (last_stmt_necessary, bb->index);
345 bitmap_set_bit (bb_contains_live_stmts, bb->index);
346
347 /* We actually mark the statement only if it is a control statement. */
348 if (stmt && is_ctrl_stmt (stmt))
349 mark_stmt_necessary (stmt, true);
350 }
351
352
353 /* Mark control dependent edges of BB as necessary. We have to do this only
354 once for each basic block so we set the appropriate bit after we're done.
355
356 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
357
358 static void
359 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
360 {
361 bitmap_iterator bi;
362 unsigned edge_number;
363 bool skipped = false;
364
365 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
366
367 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
368 return;
369
370 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
371 0, edge_number, bi)
372 {
373 basic_block cd_bb = cd->get_edge_src (edge_number);
374
375 if (ignore_self && cd_bb == bb)
376 {
377 skipped = true;
378 continue;
379 }
380
381 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
382 mark_last_stmt_necessary (cd_bb);
383 }
384
385 if (!skipped)
386 bitmap_set_bit (visited_control_parents, bb->index);
387 }
388
389
390 /* Find obviously necessary statements. These are things like most function
391 calls, and stores to file level variables.
392
393 If EL is NULL, control statements are conservatively marked as
394 necessary. Otherwise it contains the list of edges used by control
395 dependence analysis. */
396
397 static void
398 find_obviously_necessary_stmts (bool aggressive)
399 {
400 basic_block bb;
401 gimple_stmt_iterator gsi;
402 edge e;
403 gimple *phi, *stmt;
404 int flags;
405
406 FOR_EACH_BB_FN (bb, cfun)
407 {
408 /* PHI nodes are never inherently necessary. */
409 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
410 {
411 phi = gsi_stmt (gsi);
412 gimple_set_plf (phi, STMT_NECESSARY, false);
413 }
414
415 /* Check all statements in the block. */
416 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
417 {
418 stmt = gsi_stmt (gsi);
419 gimple_set_plf (stmt, STMT_NECESSARY, false);
420 mark_stmt_if_obviously_necessary (stmt, aggressive);
421 }
422 }
423
424 /* Pure and const functions are finite and thus have no infinite loops in
425 them. */
426 flags = flags_from_decl_or_type (current_function_decl);
427 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
428 return;
429
430 /* Prevent the empty possibly infinite loops from being removed. */
431 if (aggressive)
432 {
433 class loop *loop;
434 if (mark_irreducible_loops ())
435 FOR_EACH_BB_FN (bb, cfun)
436 {
437 edge_iterator ei;
438 FOR_EACH_EDGE (e, ei, bb->succs)
439 if ((e->flags & EDGE_DFS_BACK)
440 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
441 {
442 if (dump_file)
443 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
444 e->src->index, e->dest->index);
445 mark_control_dependent_edges_necessary (e->dest, false);
446 }
447 }
448
449 FOR_EACH_LOOP (loop, 0)
450 if (!finite_loop_p (loop))
451 {
452 if (dump_file)
453 fprintf (dump_file, "cannot prove finiteness of loop %i\n", loop->num);
454 mark_control_dependent_edges_necessary (loop->latch, false);
455 }
456 }
457 }
458
459
460 /* Return true if REF is based on an aliased base, otherwise false. */
461
462 static bool
463 ref_may_be_aliased (tree ref)
464 {
465 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
466 while (handled_component_p (ref))
467 ref = TREE_OPERAND (ref, 0);
468 if (TREE_CODE (ref) == MEM_REF
469 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
470 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
471 return !(DECL_P (ref)
472 && !may_be_aliased (ref));
473 }
474
475 static bitmap visited = NULL;
476 static unsigned int longest_chain = 0;
477 static unsigned int total_chain = 0;
478 static unsigned int nr_walks = 0;
479 static bool chain_ovfl = false;
480
481 /* Worker for the walker that marks reaching definitions of REF,
482 which is based on a non-aliased decl, necessary. It returns
483 true whenever the defining statement of the current VDEF is
484 a kill for REF, as no dominating may-defs are necessary for REF
485 anymore. DATA points to the basic-block that contains the
486 stmt that refers to REF. */
487
488 static bool
489 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
490 {
491 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
492
493 /* All stmts we visit are necessary. */
494 if (! gimple_clobber_p (def_stmt))
495 mark_operand_necessary (vdef);
496
497 /* If the stmt lhs kills ref, then we can stop walking. */
498 if (gimple_has_lhs (def_stmt)
499 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
500 /* The assignment is not necessarily carried out if it can throw
501 and we can catch it in the current function where we could inspect
502 the previous value.
503 ??? We only need to care about the RHS throwing. For aggregate
504 assignments or similar calls and non-call exceptions the LHS
505 might throw as well. */
506 && !stmt_can_throw_internal (cfun, def_stmt))
507 {
508 tree base, lhs = gimple_get_lhs (def_stmt);
509 poly_int64 size, offset, max_size;
510 bool reverse;
511 ao_ref_base (ref);
512 base
513 = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
514 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
515 so base == refd->base does not always hold. */
516 if (base == ref->base)
517 {
518 /* For a must-alias check we need to be able to constrain
519 the accesses properly. */
520 if (known_eq (size, max_size)
521 && known_subrange_p (ref->offset, ref->max_size, offset, size))
522 return true;
523 /* Or they need to be exactly the same. */
524 else if (ref->ref
525 /* Make sure there is no induction variable involved
526 in the references (gcc.c-torture/execute/pr42142.c).
527 The simplest way is to check if the kill dominates
528 the use. */
529 /* But when both are in the same block we cannot
530 easily tell whether we came from a backedge
531 unless we decide to compute stmt UIDs
532 (see PR58246). */
533 && (basic_block) data != gimple_bb (def_stmt)
534 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
535 gimple_bb (def_stmt))
536 && operand_equal_p (ref->ref, lhs, 0))
537 return true;
538 }
539 }
540
541 /* Otherwise keep walking. */
542 return false;
543 }
544
545 static void
546 mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
547 {
548 /* Should have been caught before calling this function. */
549 gcc_checking_assert (!keep_all_vdefs_p ());
550
551 unsigned int chain;
552 ao_ref refd;
553 gcc_assert (!chain_ovfl);
554 ao_ref_init (&refd, ref);
555 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
556 mark_aliased_reaching_defs_necessary_1,
557 gimple_bb (stmt), NULL);
558 if (chain > longest_chain)
559 longest_chain = chain;
560 total_chain += chain;
561 nr_walks++;
562 }
563
564 /* Worker for the walker that marks reaching definitions of REF, which
565 is not based on a non-aliased decl. For simplicity we need to end
566 up marking all may-defs necessary that are not based on a non-aliased
567 decl. The only job of this walker is to skip may-defs based on
568 a non-aliased decl. */
569
570 static bool
571 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
572 tree vdef, void *data ATTRIBUTE_UNUSED)
573 {
574 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
575
576 /* We have to skip already visited (and thus necessary) statements
577 to make the chaining work after we dropped back to simple mode. */
578 if (chain_ovfl
579 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
580 {
581 gcc_assert (gimple_nop_p (def_stmt)
582 || gimple_plf (def_stmt, STMT_NECESSARY));
583 return false;
584 }
585
586 /* We want to skip stores to non-aliased variables. */
587 if (!chain_ovfl
588 && gimple_assign_single_p (def_stmt))
589 {
590 tree lhs = gimple_assign_lhs (def_stmt);
591 if (!ref_may_be_aliased (lhs))
592 return false;
593 }
594
595 /* We want to skip statments that do not constitute stores but have
596 a virtual definition. */
597 if (gcall *call = dyn_cast <gcall *> (def_stmt))
598 {
599 tree callee = gimple_call_fndecl (call);
600 if (callee != NULL_TREE
601 && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
602 switch (DECL_FUNCTION_CODE (callee))
603 {
604 case BUILT_IN_MALLOC:
605 case BUILT_IN_ALIGNED_ALLOC:
606 case BUILT_IN_CALLOC:
607 CASE_BUILT_IN_ALLOCA:
608 case BUILT_IN_FREE:
609 case BUILT_IN_GOMP_ALLOC:
610 case BUILT_IN_GOMP_FREE:
611 return false;
612
613 default:;
614 }
615
616 if (callee != NULL_TREE
617 && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
618 || DECL_IS_OPERATOR_DELETE_P (callee))
619 && gimple_call_from_new_or_delete (call))
620 return false;
621 }
622
623 if (! gimple_clobber_p (def_stmt))
624 mark_operand_necessary (vdef);
625
626 return false;
627 }
628
629 static void
630 mark_all_reaching_defs_necessary (gimple *stmt)
631 {
632 /* Should have been caught before calling this function. */
633 gcc_checking_assert (!keep_all_vdefs_p ());
634 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
635 mark_all_reaching_defs_necessary_1, NULL, &visited);
636 }
637
638 /* Return true for PHI nodes with one or identical arguments
639 can be removed. */
640 static bool
641 degenerate_phi_p (gimple *phi)
642 {
643 unsigned int i;
644 tree op = gimple_phi_arg_def (phi, 0);
645 for (i = 1; i < gimple_phi_num_args (phi); i++)
646 if (gimple_phi_arg_def (phi, i) != op)
647 return false;
648 return true;
649 }
650
651 /* Return that NEW_CALL and DELETE_CALL are a valid pair of new
652 and delete operators. */
653
654 static bool
655 valid_new_delete_pair_p (gimple *new_call, gimple *delete_call)
656 {
657 tree new_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (new_call));
658 tree delete_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (delete_call));
659 return valid_new_delete_pair_p (new_asm, delete_asm);
660 }
661
662 /* Propagate necessity using the operands of necessary statements.
663 Process the uses on each statement in the worklist, and add all
664 feeding statements which contribute to the calculation of this
665 value to the worklist.
666
667 In conservative mode, EL is NULL. */
668
669 static void
670 propagate_necessity (bool aggressive)
671 {
672 gimple *stmt;
673
674 if (dump_file && (dump_flags & TDF_DETAILS))
675 fprintf (dump_file, "\nProcessing worklist:\n");
676
677 while (worklist.length () > 0)
678 {
679 /* Take STMT from worklist. */
680 stmt = worklist.pop ();
681
682 if (dump_file && (dump_flags & TDF_DETAILS))
683 {
684 fprintf (dump_file, "processing: ");
685 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
686 fprintf (dump_file, "\n");
687 }
688
689 if (aggressive)
690 {
691 /* Mark the last statement of the basic blocks on which the block
692 containing STMT is control dependent, but only if we haven't
693 already done so. */
694 basic_block bb = gimple_bb (stmt);
695 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
696 && !bitmap_bit_p (visited_control_parents, bb->index))
697 mark_control_dependent_edges_necessary (bb, false);
698 }
699
700 if (gimple_code (stmt) == GIMPLE_PHI
701 /* We do not process virtual PHI nodes nor do we track their
702 necessity. */
703 && !virtual_operand_p (gimple_phi_result (stmt)))
704 {
705 /* PHI nodes are somewhat special in that each PHI alternative has
706 data and control dependencies. All the statements feeding the
707 PHI node's arguments are always necessary. In aggressive mode,
708 we also consider the control dependent edges leading to the
709 predecessor block associated with each PHI alternative as
710 necessary. */
711 gphi *phi = as_a <gphi *> (stmt);
712 size_t k;
713
714 for (k = 0; k < gimple_phi_num_args (stmt); k++)
715 {
716 tree arg = PHI_ARG_DEF (stmt, k);
717 if (TREE_CODE (arg) == SSA_NAME)
718 mark_operand_necessary (arg);
719 }
720
721 /* For PHI operands it matters from where the control flow arrives
722 to the BB. Consider the following example:
723
724 a=exp1;
725 b=exp2;
726 if (test)
727 ;
728 else
729 ;
730 c=PHI(a,b)
731
732 We need to mark control dependence of the empty basic blocks, since they
733 contains computation of PHI operands.
734
735 Doing so is too restrictive in the case the predecestor block is in
736 the loop. Consider:
737
738 if (b)
739 {
740 int i;
741 for (i = 0; i<1000; ++i)
742 ;
743 j = 0;
744 }
745 return j;
746
747 There is PHI for J in the BB containing return statement.
748 In this case the control dependence of predecestor block (that is
749 within the empty loop) also contains the block determining number
750 of iterations of the block that would prevent removing of empty
751 loop in this case.
752
753 This scenario can be avoided by splitting critical edges.
754 To save the critical edge splitting pass we identify how the control
755 dependence would look like if the edge was split.
756
757 Consider the modified CFG created from current CFG by splitting
758 edge B->C. In the postdominance tree of modified CFG, C' is
759 always child of C. There are two cases how chlids of C' can look
760 like:
761
762 1) C' is leaf
763
764 In this case the only basic block C' is control dependent on is B.
765
766 2) C' has single child that is B
767
768 In this case control dependence of C' is same as control
769 dependence of B in original CFG except for block B itself.
770 (since C' postdominate B in modified CFG)
771
772 Now how to decide what case happens? There are two basic options:
773
774 a) C postdominate B. Then C immediately postdominate B and
775 case 2 happens iff there is no other way from B to C except
776 the edge B->C.
777
778 There is other way from B to C iff there is succesor of B that
779 is not postdominated by B. Testing this condition is somewhat
780 expensive, because we need to iterate all succesors of B.
781 We are safe to assume that this does not happen: we will mark B
782 as needed when processing the other path from B to C that is
783 conrol dependent on B and marking control dependencies of B
784 itself is harmless because they will be processed anyway after
785 processing control statement in B.
786
787 b) C does not postdominate B. Always case 1 happens since there is
788 path from C to exit that does not go through B and thus also C'. */
789
790 if (aggressive && !degenerate_phi_p (stmt))
791 {
792 for (k = 0; k < gimple_phi_num_args (stmt); k++)
793 {
794 basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
795
796 if (gimple_bb (stmt)
797 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
798 {
799 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
800 mark_last_stmt_necessary (arg_bb);
801 }
802 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
803 && !bitmap_bit_p (visited_control_parents,
804 arg_bb->index))
805 mark_control_dependent_edges_necessary (arg_bb, true);
806 }
807 }
808 }
809 else
810 {
811 /* Propagate through the operands. Examine all the USE, VUSE and
812 VDEF operands in this statement. Mark all the statements
813 which feed this statement's uses as necessary. */
814 ssa_op_iter iter;
815 tree use;
816
817 /* If this is a call to free which is directly fed by an
818 allocation function do not mark that necessary through
819 processing the argument. */
820 bool is_delete_operator
821 = (is_gimple_call (stmt)
822 && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
823 && gimple_call_operator_delete_p (as_a <gcall *> (stmt)));
824 if (is_delete_operator
825 || gimple_call_builtin_p (stmt, BUILT_IN_FREE)
826 || gimple_call_builtin_p (stmt, BUILT_IN_GOMP_FREE))
827 {
828 tree ptr = gimple_call_arg (stmt, 0);
829 gcall *def_stmt;
830 tree def_callee;
831 /* If the pointer we free is defined by an allocation
832 function do not add the call to the worklist. */
833 if (TREE_CODE (ptr) == SSA_NAME
834 && (def_stmt = dyn_cast <gcall *> (SSA_NAME_DEF_STMT (ptr)))
835 && (def_callee = gimple_call_fndecl (def_stmt))
836 && ((DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
837 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
838 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
839 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC
840 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_GOMP_ALLOC))
841 || (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (def_callee)
842 && gimple_call_from_new_or_delete (def_stmt))))
843 {
844 if (is_delete_operator
845 && !valid_new_delete_pair_p (def_stmt, stmt))
846 mark_operand_necessary (gimple_call_arg (stmt, 0));
847
848 /* Delete operators can have alignment and (or) size
849 as next arguments. When being a SSA_NAME, they
850 must be marked as necessary. Similarly GOMP_free. */
851 if (gimple_call_num_args (stmt) >= 2)
852 for (unsigned i = 1; i < gimple_call_num_args (stmt);
853 i++)
854 {
855 tree arg = gimple_call_arg (stmt, i);
856 if (TREE_CODE (arg) == SSA_NAME)
857 mark_operand_necessary (arg);
858 }
859
860 continue;
861 }
862 }
863
864 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
865 mark_operand_necessary (use);
866
867 use = gimple_vuse (stmt);
868 if (!use)
869 continue;
870
871 /* No need to search for vdefs if we intrinsicly keep them all. */
872 if (keep_all_vdefs_p ())
873 continue;
874
875 /* If we dropped to simple mode make all immediately
876 reachable definitions necessary. */
877 if (chain_ovfl)
878 {
879 mark_all_reaching_defs_necessary (stmt);
880 continue;
881 }
882
883 /* For statements that may load from memory (have a VUSE) we
884 have to mark all reaching (may-)definitions as necessary.
885 We partition this task into two cases:
886 1) explicit loads based on decls that are not aliased
887 2) implicit loads (like calls) and explicit loads not
888 based on decls that are not aliased (like indirect
889 references or loads from globals)
890 For 1) we mark all reaching may-defs as necessary, stopping
891 at dominating kills. For 2) we want to mark all dominating
892 references necessary, but non-aliased ones which we handle
893 in 1). By keeping a global visited bitmap for references
894 we walk for 2) we avoid quadratic behavior for those. */
895
896 if (gcall *call = dyn_cast <gcall *> (stmt))
897 {
898 tree callee = gimple_call_fndecl (call);
899 unsigned i;
900
901 /* Calls to functions that are merely acting as barriers
902 or that only store to memory do not make any previous
903 stores necessary. */
904 if (callee != NULL_TREE
905 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
906 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
907 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
908 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
909 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
910 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
911 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
912 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
913 || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee))
914 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
915 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
916 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
917 continue;
918
919 if (callee != NULL_TREE
920 && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
921 || DECL_IS_OPERATOR_DELETE_P (callee))
922 && gimple_call_from_new_or_delete (call))
923 continue;
924
925 /* Calls implicitly load from memory, their arguments
926 in addition may explicitly perform memory loads. */
927 mark_all_reaching_defs_necessary (call);
928 for (i = 0; i < gimple_call_num_args (call); ++i)
929 {
930 tree arg = gimple_call_arg (call, i);
931 if (TREE_CODE (arg) == SSA_NAME
932 || is_gimple_min_invariant (arg))
933 continue;
934 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
935 arg = TREE_OPERAND (arg, 0);
936 if (!ref_may_be_aliased (arg))
937 mark_aliased_reaching_defs_necessary (call, arg);
938 }
939 }
940 else if (gimple_assign_single_p (stmt))
941 {
942 tree rhs;
943 /* If this is a load mark things necessary. */
944 rhs = gimple_assign_rhs1 (stmt);
945 if (TREE_CODE (rhs) != SSA_NAME
946 && !is_gimple_min_invariant (rhs)
947 && TREE_CODE (rhs) != CONSTRUCTOR)
948 {
949 if (!ref_may_be_aliased (rhs))
950 mark_aliased_reaching_defs_necessary (stmt, rhs);
951 else
952 mark_all_reaching_defs_necessary (stmt);
953 }
954 }
955 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
956 {
957 tree rhs = gimple_return_retval (return_stmt);
958 /* A return statement may perform a load. */
959 if (rhs
960 && TREE_CODE (rhs) != SSA_NAME
961 && !is_gimple_min_invariant (rhs)
962 && TREE_CODE (rhs) != CONSTRUCTOR)
963 {
964 if (!ref_may_be_aliased (rhs))
965 mark_aliased_reaching_defs_necessary (stmt, rhs);
966 else
967 mark_all_reaching_defs_necessary (stmt);
968 }
969 }
970 else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
971 {
972 unsigned i;
973 mark_all_reaching_defs_necessary (stmt);
974 /* Inputs may perform loads. */
975 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
976 {
977 tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
978 if (TREE_CODE (op) != SSA_NAME
979 && !is_gimple_min_invariant (op)
980 && TREE_CODE (op) != CONSTRUCTOR
981 && !ref_may_be_aliased (op))
982 mark_aliased_reaching_defs_necessary (stmt, op);
983 }
984 }
985 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
986 {
987 /* The beginning of a transaction is a memory barrier. */
988 /* ??? If we were really cool, we'd only be a barrier
989 for the memories touched within the transaction. */
990 mark_all_reaching_defs_necessary (stmt);
991 }
992 else
993 gcc_unreachable ();
994
995 /* If we over-used our alias oracle budget drop to simple
996 mode. The cost metric allows quadratic behavior
997 (number of uses times number of may-defs queries) up to
998 a constant maximal number of queries and after that falls back to
999 super-linear complexity. */
1000 if (/* Constant but quadratic for small functions. */
1001 total_chain > 128 * 128
1002 /* Linear in the number of may-defs. */
1003 && total_chain > 32 * longest_chain
1004 /* Linear in the number of uses. */
1005 && total_chain > nr_walks * 32)
1006 {
1007 chain_ovfl = true;
1008 if (visited)
1009 bitmap_clear (visited);
1010 }
1011 }
1012 }
1013 }
1014
1015 /* Remove dead PHI nodes from block BB. */
1016
1017 static bool
1018 remove_dead_phis (basic_block bb)
1019 {
1020 bool something_changed = false;
1021 gphi *phi;
1022 gphi_iterator gsi;
1023
1024 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
1025 {
1026 stats.total_phis++;
1027 phi = gsi.phi ();
1028
1029 /* We do not track necessity of virtual PHI nodes. Instead do
1030 very simple dead PHI removal here. */
1031 if (virtual_operand_p (gimple_phi_result (phi)))
1032 {
1033 /* Virtual PHI nodes with one or identical arguments
1034 can be removed. */
1035 if (degenerate_phi_p (phi))
1036 {
1037 tree vdef = gimple_phi_result (phi);
1038 tree vuse = gimple_phi_arg_def (phi, 0);
1039
1040 use_operand_p use_p;
1041 imm_use_iterator iter;
1042 gimple *use_stmt;
1043 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1044 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1045 SET_USE (use_p, vuse);
1046 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1047 && TREE_CODE (vuse) == SSA_NAME)
1048 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1049 }
1050 else
1051 gimple_set_plf (phi, STMT_NECESSARY, true);
1052 }
1053
1054 if (!gimple_plf (phi, STMT_NECESSARY))
1055 {
1056 something_changed = true;
1057 if (dump_file && (dump_flags & TDF_DETAILS))
1058 {
1059 fprintf (dump_file, "Deleting : ");
1060 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1061 fprintf (dump_file, "\n");
1062 }
1063
1064 remove_phi_node (&gsi, true);
1065 stats.removed_phis++;
1066 continue;
1067 }
1068
1069 gsi_next (&gsi);
1070 }
1071 return something_changed;
1072 }
1073
1074
1075 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1076 containing I so that we don't have to look it up. */
1077
1078 static void
1079 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb,
1080 vec<edge> &to_remove_edges)
1081 {
1082 gimple *stmt = gsi_stmt (*i);
1083
1084 if (dump_file && (dump_flags & TDF_DETAILS))
1085 {
1086 fprintf (dump_file, "Deleting : ");
1087 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1088 fprintf (dump_file, "\n");
1089 }
1090
1091 stats.removed++;
1092
1093 /* If we have determined that a conditional branch statement contributes
1094 nothing to the program, then we not only remove it, but we need to update
1095 the CFG. We can chose any of edges out of BB as long as we are sure to not
1096 close infinite loops. This is done by always choosing the edge closer to
1097 exit in inverted_post_order_compute order. */
1098 if (is_ctrl_stmt (stmt))
1099 {
1100 edge_iterator ei;
1101 edge e = NULL, e2;
1102
1103 /* See if there is only one non-abnormal edge. */
1104 if (single_succ_p (bb))
1105 e = single_succ_edge (bb);
1106 /* Otherwise chose one that is closer to bb with live statement in it.
1107 To be able to chose one, we compute inverted post order starting from
1108 all BBs with live statements. */
1109 if (!e)
1110 {
1111 if (!bb_postorder)
1112 {
1113 auto_vec<int, 20> postorder;
1114 inverted_post_order_compute (&postorder,
1115 &bb_contains_live_stmts);
1116 bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1117 for (unsigned int i = 0; i < postorder.length (); ++i)
1118 bb_postorder[postorder[i]] = i;
1119 }
1120 FOR_EACH_EDGE (e2, ei, bb->succs)
1121 if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1122 || bb_postorder [e->dest->index]
1123 < bb_postorder [e2->dest->index])
1124 e = e2;
1125 }
1126 gcc_assert (e);
1127 e->probability = profile_probability::always ();
1128
1129 /* The edge is no longer associated with a conditional, so it does
1130 not have TRUE/FALSE flags.
1131 We are also safe to drop EH/ABNORMAL flags and turn them into
1132 normal control flow, because we know that all the destinations (including
1133 those odd edges) are equivalent for program execution. */
1134 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1135
1136 /* The lone outgoing edge from BB will be a fallthru edge. */
1137 e->flags |= EDGE_FALLTHRU;
1138
1139 /* Remove the remaining outgoing edges. */
1140 FOR_EACH_EDGE (e2, ei, bb->succs)
1141 if (e != e2)
1142 {
1143 /* If we made a BB unconditionally exit a loop or removed
1144 an entry into an irreducible region, then this transform
1145 alters the set of BBs in the loop. Schedule a fixup. */
1146 if (loop_exit_edge_p (bb->loop_father, e)
1147 || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1148 loops_state_set (LOOPS_NEED_FIXUP);
1149 to_remove_edges.safe_push (e2);
1150 }
1151 }
1152
1153 /* If this is a store into a variable that is being optimized away,
1154 add a debug bind stmt if possible. */
1155 if (MAY_HAVE_DEBUG_BIND_STMTS
1156 && gimple_assign_single_p (stmt)
1157 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1158 {
1159 tree lhs = gimple_assign_lhs (stmt);
1160 if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1161 && !DECL_IGNORED_P (lhs)
1162 && is_gimple_reg_type (TREE_TYPE (lhs))
1163 && !is_global_var (lhs)
1164 && !DECL_HAS_VALUE_EXPR_P (lhs))
1165 {
1166 tree rhs = gimple_assign_rhs1 (stmt);
1167 gdebug *note
1168 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1169 gsi_insert_after (i, note, GSI_SAME_STMT);
1170 }
1171 }
1172
1173 unlink_stmt_vdef (stmt);
1174 gsi_remove (i, true);
1175 release_defs (stmt);
1176 }
1177
1178 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1179 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1180
1181 static tree
1182 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1183 {
1184 if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1185 *walk_subtrees = 0;
1186 if (*tp == (tree) data)
1187 return *tp;
1188 return NULL_TREE;
1189 }
1190
1191 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1192 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1193 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1194 uses. */
1195
1196 static void
1197 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1198 enum tree_code subcode)
1199 {
1200 gimple *stmt = gsi_stmt (*gsi);
1201 tree lhs = gimple_call_lhs (stmt);
1202
1203 if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1204 return;
1205
1206 imm_use_iterator imm_iter;
1207 use_operand_p use_p;
1208 bool has_debug_uses = false;
1209 bool has_realpart_uses = false;
1210 bool has_other_uses = false;
1211 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1212 {
1213 gimple *use_stmt = USE_STMT (use_p);
1214 if (is_gimple_debug (use_stmt))
1215 has_debug_uses = true;
1216 else if (is_gimple_assign (use_stmt)
1217 && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1218 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1219 has_realpart_uses = true;
1220 else
1221 {
1222 has_other_uses = true;
1223 break;
1224 }
1225 }
1226
1227 if (!has_realpart_uses || has_other_uses)
1228 return;
1229
1230 tree arg0 = gimple_call_arg (stmt, 0);
1231 tree arg1 = gimple_call_arg (stmt, 1);
1232 location_t loc = gimple_location (stmt);
1233 tree type = TREE_TYPE (TREE_TYPE (lhs));
1234 tree utype = type;
1235 if (!TYPE_UNSIGNED (type))
1236 utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1237 tree result = fold_build2_loc (loc, subcode, utype,
1238 fold_convert_loc (loc, utype, arg0),
1239 fold_convert_loc (loc, utype, arg1));
1240 result = fold_convert_loc (loc, type, result);
1241
1242 if (has_debug_uses)
1243 {
1244 gimple *use_stmt;
1245 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1246 {
1247 if (!gimple_debug_bind_p (use_stmt))
1248 continue;
1249 tree v = gimple_debug_bind_get_value (use_stmt);
1250 if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1251 {
1252 gimple_debug_bind_reset_value (use_stmt);
1253 update_stmt (use_stmt);
1254 }
1255 }
1256 }
1257
1258 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1259 result = drop_tree_overflow (result);
1260 tree overflow = build_zero_cst (type);
1261 tree ctype = build_complex_type (type);
1262 if (TREE_CODE (result) == INTEGER_CST)
1263 result = build_complex (ctype, result, overflow);
1264 else
1265 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1266 ctype, result, overflow);
1267
1268 if (dump_file && (dump_flags & TDF_DETAILS))
1269 {
1270 fprintf (dump_file, "Transforming call: ");
1271 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1272 fprintf (dump_file, "because the overflow result is never used into: ");
1273 print_generic_stmt (dump_file, result, TDF_SLIM);
1274 fprintf (dump_file, "\n");
1275 }
1276
1277 if (!update_call_from_tree (gsi, result))
1278 gimplify_and_update_call_from_tree (gsi, result);
1279 }
1280
1281 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1282 contributes nothing to the program, and can be deleted. */
1283
1284 static bool
1285 eliminate_unnecessary_stmts (void)
1286 {
1287 bool something_changed = false;
1288 basic_block bb;
1289 gimple_stmt_iterator gsi, psi;
1290 gimple *stmt;
1291 tree call;
1292 vec<basic_block> h;
1293 auto_vec<edge> to_remove_edges;
1294
1295 if (dump_file && (dump_flags & TDF_DETAILS))
1296 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1297
1298 clear_special_calls ();
1299
1300 /* Walking basic blocks and statements in reverse order avoids
1301 releasing SSA names before any other DEFs that refer to them are
1302 released. This helps avoid loss of debug information, as we get
1303 a chance to propagate all RHSs of removed SSAs into debug uses,
1304 rather than only the latest ones. E.g., consider:
1305
1306 x_3 = y_1 + z_2;
1307 a_5 = x_3 - b_4;
1308 # DEBUG a => a_5
1309
1310 If we were to release x_3 before a_5, when we reached a_5 and
1311 tried to substitute it into the debug stmt, we'd see x_3 there,
1312 but x_3's DEF, type, etc would have already been disconnected.
1313 By going backwards, the debug stmt first changes to:
1314
1315 # DEBUG a => x_3 - b_4
1316
1317 and then to:
1318
1319 # DEBUG a => y_1 + z_2 - b_4
1320
1321 as desired. */
1322 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1323 h = get_all_dominated_blocks (CDI_DOMINATORS,
1324 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1325
1326 while (h.length ())
1327 {
1328 bb = h.pop ();
1329
1330 /* Remove dead statements. */
1331 auto_bitmap debug_seen;
1332 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1333 {
1334 stmt = gsi_stmt (gsi);
1335
1336 psi = gsi;
1337 gsi_prev (&psi);
1338
1339 stats.total++;
1340
1341 /* We can mark a call to free as not necessary if the
1342 defining statement of its argument is not necessary
1343 (and thus is getting removed). */
1344 if (gimple_plf (stmt, STMT_NECESSARY)
1345 && (gimple_call_builtin_p (stmt, BUILT_IN_FREE)
1346 || (is_gimple_call (stmt)
1347 && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
1348 && gimple_call_operator_delete_p (as_a <gcall *> (stmt)))))
1349 {
1350 tree ptr = gimple_call_arg (stmt, 0);
1351 if (TREE_CODE (ptr) == SSA_NAME)
1352 {
1353 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1354 if (!gimple_nop_p (def_stmt)
1355 && !gimple_plf (def_stmt, STMT_NECESSARY))
1356 gimple_set_plf (stmt, STMT_NECESSARY, false);
1357 }
1358 }
1359
1360 /* If GSI is not necessary then remove it. */
1361 if (!gimple_plf (stmt, STMT_NECESSARY))
1362 {
1363 /* Keep clobbers that we can keep live live. */
1364 if (gimple_clobber_p (stmt))
1365 {
1366 ssa_op_iter iter;
1367 use_operand_p use_p;
1368 bool dead = false;
1369 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1370 {
1371 tree name = USE_FROM_PTR (use_p);
1372 if (!SSA_NAME_IS_DEFAULT_DEF (name)
1373 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1374 {
1375 dead = true;
1376 break;
1377 }
1378 }
1379 if (!dead)
1380 {
1381 bitmap_clear (debug_seen);
1382 continue;
1383 }
1384 }
1385 if (!is_gimple_debug (stmt))
1386 something_changed = true;
1387 remove_dead_stmt (&gsi, bb, to_remove_edges);
1388 continue;
1389 }
1390 else if (is_gimple_call (stmt))
1391 {
1392 tree name = gimple_call_lhs (stmt);
1393
1394 notice_special_calls (as_a <gcall *> (stmt));
1395
1396 /* When LHS of var = call (); is dead, simplify it into
1397 call (); saving one operand. */
1398 if (name
1399 && TREE_CODE (name) == SSA_NAME
1400 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1401 /* Avoid doing so for allocation calls which we
1402 did not mark as necessary, it will confuse the
1403 special logic we apply to malloc/free pair removal. */
1404 && (!(call = gimple_call_fndecl (stmt))
1405 || ((DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1406 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1407 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1408 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1409 && !ALLOCA_FUNCTION_CODE_P
1410 (DECL_FUNCTION_CODE (call))))
1411 && !DECL_IS_REPLACEABLE_OPERATOR_NEW_P (call))))
1412 {
1413 something_changed = true;
1414 if (dump_file && (dump_flags & TDF_DETAILS))
1415 {
1416 fprintf (dump_file, "Deleting LHS of call: ");
1417 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1418 fprintf (dump_file, "\n");
1419 }
1420
1421 gimple_call_set_lhs (stmt, NULL_TREE);
1422 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1423 update_stmt (stmt);
1424 release_ssa_name (name);
1425
1426 /* GOMP_SIMD_LANE (unless three argument) or ASAN_POISON
1427 without lhs is not needed. */
1428 if (gimple_call_internal_p (stmt))
1429 switch (gimple_call_internal_fn (stmt))
1430 {
1431 case IFN_GOMP_SIMD_LANE:
1432 if (gimple_call_num_args (stmt) >= 3
1433 && !integer_nonzerop (gimple_call_arg (stmt, 2)))
1434 break;
1435 /* FALLTHRU */
1436 case IFN_ASAN_POISON:
1437 remove_dead_stmt (&gsi, bb, to_remove_edges);
1438 break;
1439 default:
1440 break;
1441 }
1442 }
1443 else if (gimple_call_internal_p (stmt))
1444 switch (gimple_call_internal_fn (stmt))
1445 {
1446 case IFN_ADD_OVERFLOW:
1447 maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1448 break;
1449 case IFN_SUB_OVERFLOW:
1450 maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1451 break;
1452 case IFN_MUL_OVERFLOW:
1453 maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1454 break;
1455 default:
1456 break;
1457 }
1458 }
1459 else if (gimple_debug_bind_p (stmt))
1460 {
1461 /* We are only keeping the last debug-bind of a
1462 non-DEBUG_EXPR_DECL variable in a series of
1463 debug-bind stmts. */
1464 tree var = gimple_debug_bind_get_var (stmt);
1465 if (TREE_CODE (var) != DEBUG_EXPR_DECL
1466 && !bitmap_set_bit (debug_seen, DECL_UID (var)))
1467 remove_dead_stmt (&gsi, bb, to_remove_edges);
1468 continue;
1469 }
1470 bitmap_clear (debug_seen);
1471 }
1472
1473 /* Remove dead PHI nodes. */
1474 something_changed |= remove_dead_phis (bb);
1475 }
1476
1477 h.release ();
1478
1479 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1480 rendered some PHI nodes unreachable while they are still in use.
1481 Mark them for renaming. */
1482 if (!to_remove_edges.is_empty ())
1483 {
1484 basic_block prev_bb;
1485
1486 /* Remove edges. We've delayed this to not get bogus debug stmts
1487 during PHI node removal. */
1488 for (unsigned i = 0; i < to_remove_edges.length (); ++i)
1489 remove_edge (to_remove_edges[i]);
1490 cfg_altered = true;
1491
1492 find_unreachable_blocks ();
1493
1494 /* Delete all unreachable basic blocks in reverse dominator order. */
1495 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1496 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1497 {
1498 prev_bb = bb->prev_bb;
1499
1500 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1501 || !(bb->flags & BB_REACHABLE))
1502 {
1503 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1504 gsi_next (&gsi))
1505 if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1506 {
1507 bool found = false;
1508 imm_use_iterator iter;
1509
1510 FOR_EACH_IMM_USE_STMT (stmt, iter,
1511 gimple_phi_result (gsi.phi ()))
1512 {
1513 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1514 continue;
1515 if (gimple_code (stmt) == GIMPLE_PHI
1516 || gimple_plf (stmt, STMT_NECESSARY))
1517 {
1518 found = true;
1519 break;
1520 }
1521 }
1522 if (found)
1523 mark_virtual_phi_result_for_renaming (gsi.phi ());
1524 }
1525
1526 if (!(bb->flags & BB_REACHABLE))
1527 {
1528 /* Speed up the removal of blocks that don't
1529 dominate others. Walking backwards, this should
1530 be the common case. ??? Do we need to recompute
1531 dominators because of cfg_altered? */
1532 if (!first_dom_son (CDI_DOMINATORS, bb))
1533 delete_basic_block (bb);
1534 else
1535 {
1536 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1537
1538 while (h.length ())
1539 {
1540 bb = h.pop ();
1541 prev_bb = bb->prev_bb;
1542 /* Rearrangements to the CFG may have failed
1543 to update the dominators tree, so that
1544 formerly-dominated blocks are now
1545 otherwise reachable. */
1546 if (!!(bb->flags & BB_REACHABLE))
1547 continue;
1548 delete_basic_block (bb);
1549 }
1550
1551 h.release ();
1552 }
1553 }
1554 }
1555 }
1556 }
1557
1558 if (bb_postorder)
1559 free (bb_postorder);
1560 bb_postorder = NULL;
1561
1562 return something_changed;
1563 }
1564
1565
1566 /* Print out removed statement statistics. */
1567
1568 static void
1569 print_stats (void)
1570 {
1571 float percg;
1572
1573 percg = ((float) stats.removed / (float) stats.total) * 100;
1574 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1575 stats.removed, stats.total, (int) percg);
1576
1577 if (stats.total_phis == 0)
1578 percg = 0;
1579 else
1580 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1581
1582 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1583 stats.removed_phis, stats.total_phis, (int) percg);
1584 }
1585
1586 /* Initialization for this pass. Set up the used data structures. */
1587
1588 static void
1589 tree_dce_init (bool aggressive)
1590 {
1591 memset ((void *) &stats, 0, sizeof (stats));
1592
1593 if (aggressive)
1594 {
1595 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1596 bitmap_clear (last_stmt_necessary);
1597 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1598 bitmap_clear (bb_contains_live_stmts);
1599 }
1600
1601 processed = sbitmap_alloc (num_ssa_names + 1);
1602 bitmap_clear (processed);
1603
1604 worklist.create (64);
1605 cfg_altered = false;
1606 }
1607
1608 /* Cleanup after this pass. */
1609
1610 static void
1611 tree_dce_done (bool aggressive)
1612 {
1613 if (aggressive)
1614 {
1615 delete cd;
1616 sbitmap_free (visited_control_parents);
1617 sbitmap_free (last_stmt_necessary);
1618 sbitmap_free (bb_contains_live_stmts);
1619 bb_contains_live_stmts = NULL;
1620 }
1621
1622 sbitmap_free (processed);
1623
1624 worklist.release ();
1625 }
1626
1627 /* Main routine to eliminate dead code.
1628
1629 AGGRESSIVE controls the aggressiveness of the algorithm.
1630 In conservative mode, we ignore control dependence and simply declare
1631 all but the most trivially dead branches necessary. This mode is fast.
1632 In aggressive mode, control dependences are taken into account, which
1633 results in more dead code elimination, but at the cost of some time.
1634
1635 FIXME: Aggressive mode before PRE doesn't work currently because
1636 the dominance info is not invalidated after DCE1. This is
1637 not an issue right now because we only run aggressive DCE
1638 as the last tree SSA pass, but keep this in mind when you
1639 start experimenting with pass ordering. */
1640
1641 static unsigned int
1642 perform_tree_ssa_dce (bool aggressive)
1643 {
1644 bool something_changed = 0;
1645
1646 calculate_dominance_info (CDI_DOMINATORS);
1647
1648 /* Preheaders are needed for SCEV to work.
1649 Simple lateches and recorded exits improve chances that loop will
1650 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1651 bool in_loop_pipeline = scev_initialized_p ();
1652 if (aggressive && ! in_loop_pipeline)
1653 {
1654 scev_initialize ();
1655 loop_optimizer_init (LOOPS_NORMAL
1656 | LOOPS_HAVE_RECORDED_EXITS);
1657 }
1658
1659 tree_dce_init (aggressive);
1660
1661 if (aggressive)
1662 {
1663 /* Compute control dependence. */
1664 calculate_dominance_info (CDI_POST_DOMINATORS);
1665 cd = new control_dependences ();
1666
1667 visited_control_parents =
1668 sbitmap_alloc (last_basic_block_for_fn (cfun));
1669 bitmap_clear (visited_control_parents);
1670
1671 mark_dfs_back_edges ();
1672 }
1673
1674 find_obviously_necessary_stmts (aggressive);
1675
1676 if (aggressive && ! in_loop_pipeline)
1677 {
1678 loop_optimizer_finalize ();
1679 scev_finalize ();
1680 }
1681
1682 longest_chain = 0;
1683 total_chain = 0;
1684 nr_walks = 0;
1685 chain_ovfl = false;
1686 visited = BITMAP_ALLOC (NULL);
1687 propagate_necessity (aggressive);
1688 BITMAP_FREE (visited);
1689
1690 something_changed |= eliminate_unnecessary_stmts ();
1691 something_changed |= cfg_altered;
1692
1693 /* We do not update postdominators, so free them unconditionally. */
1694 free_dominance_info (CDI_POST_DOMINATORS);
1695
1696 /* If we removed paths in the CFG, then we need to update
1697 dominators as well. I haven't investigated the possibility
1698 of incrementally updating dominators. */
1699 if (cfg_altered)
1700 free_dominance_info (CDI_DOMINATORS);
1701
1702 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1703 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1704
1705 /* Debugging dumps. */
1706 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1707 print_stats ();
1708
1709 tree_dce_done (aggressive);
1710
1711 if (something_changed)
1712 {
1713 free_numbers_of_iterations_estimates (cfun);
1714 if (in_loop_pipeline)
1715 scev_reset ();
1716 return TODO_update_ssa | TODO_cleanup_cfg;
1717 }
1718 return 0;
1719 }
1720
1721 /* Pass entry points. */
1722 static unsigned int
1723 tree_ssa_dce (void)
1724 {
1725 return perform_tree_ssa_dce (/*aggressive=*/false);
1726 }
1727
1728 static unsigned int
1729 tree_ssa_cd_dce (void)
1730 {
1731 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1732 }
1733
1734 namespace {
1735
1736 const pass_data pass_data_dce =
1737 {
1738 GIMPLE_PASS, /* type */
1739 "dce", /* name */
1740 OPTGROUP_NONE, /* optinfo_flags */
1741 TV_TREE_DCE, /* tv_id */
1742 ( PROP_cfg | PROP_ssa ), /* properties_required */
1743 0, /* properties_provided */
1744 0, /* properties_destroyed */
1745 0, /* todo_flags_start */
1746 0, /* todo_flags_finish */
1747 };
1748
1749 class pass_dce : public gimple_opt_pass
1750 {
1751 public:
1752 pass_dce (gcc::context *ctxt)
1753 : gimple_opt_pass (pass_data_dce, ctxt)
1754 {}
1755
1756 /* opt_pass methods: */
1757 opt_pass * clone () { return new pass_dce (m_ctxt); }
1758 virtual bool gate (function *) { return flag_tree_dce != 0; }
1759 virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1760
1761 }; // class pass_dce
1762
1763 } // anon namespace
1764
1765 gimple_opt_pass *
1766 make_pass_dce (gcc::context *ctxt)
1767 {
1768 return new pass_dce (ctxt);
1769 }
1770
1771 namespace {
1772
1773 const pass_data pass_data_cd_dce =
1774 {
1775 GIMPLE_PASS, /* type */
1776 "cddce", /* name */
1777 OPTGROUP_NONE, /* optinfo_flags */
1778 TV_TREE_CD_DCE, /* tv_id */
1779 ( PROP_cfg | PROP_ssa ), /* properties_required */
1780 0, /* properties_provided */
1781 0, /* properties_destroyed */
1782 0, /* todo_flags_start */
1783 0, /* todo_flags_finish */
1784 };
1785
1786 class pass_cd_dce : public gimple_opt_pass
1787 {
1788 public:
1789 pass_cd_dce (gcc::context *ctxt)
1790 : gimple_opt_pass (pass_data_cd_dce, ctxt), update_address_taken_p (false)
1791 {}
1792
1793 /* opt_pass methods: */
1794 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1795 void set_pass_param (unsigned n, bool param)
1796 {
1797 gcc_assert (n == 0);
1798 update_address_taken_p = param;
1799 }
1800 virtual bool gate (function *) { return flag_tree_dce != 0; }
1801 virtual unsigned int execute (function *)
1802 {
1803 return (tree_ssa_cd_dce ()
1804 | (update_address_taken_p ? TODO_update_address_taken : 0));
1805 }
1806
1807 private:
1808 bool update_address_taken_p;
1809 }; // class pass_cd_dce
1810
1811 } // anon namespace
1812
1813 gimple_opt_pass *
1814 make_pass_cd_dce (gcc::context *ctxt)
1815 {
1816 return new pass_cd_dce (ctxt);
1817 }
1818
1819
1820 /* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
1821 is consumed by this function. The function has linear complexity in
1822 the number of dead stmts with a constant factor like the average SSA
1823 use operands number. */
1824
1825 void
1826 simple_dce_from_worklist (bitmap worklist)
1827 {
1828 while (! bitmap_empty_p (worklist))
1829 {
1830 /* Pop item. */
1831 unsigned i = bitmap_first_set_bit (worklist);
1832 bitmap_clear_bit (worklist, i);
1833
1834 tree def = ssa_name (i);
1835 /* Removed by somebody else or still in use. */
1836 if (! def || ! has_zero_uses (def))
1837 continue;
1838
1839 gimple *t = SSA_NAME_DEF_STMT (def);
1840 if (gimple_has_side_effects (t))
1841 continue;
1842
1843 /* Add uses to the worklist. */
1844 ssa_op_iter iter;
1845 use_operand_p use_p;
1846 FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
1847 {
1848 tree use = USE_FROM_PTR (use_p);
1849 if (TREE_CODE (use) == SSA_NAME
1850 && ! SSA_NAME_IS_DEFAULT_DEF (use))
1851 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
1852 }
1853
1854 /* Remove stmt. */
1855 if (dump_file && (dump_flags & TDF_DETAILS))
1856 {
1857 fprintf (dump_file, "Removing dead stmt:");
1858 print_gimple_stmt (dump_file, t, 0);
1859 }
1860 gimple_stmt_iterator gsi = gsi_for_stmt (t);
1861 if (gimple_code (t) == GIMPLE_PHI)
1862 remove_phi_node (&gsi, true);
1863 else
1864 {
1865 gsi_remove (&gsi, true);
1866 release_defs (t);
1867 }
1868 }
1869 }