dce.c (deletable_insn_p_1): New function, split out from...
[gcc.git] / gcc / dce.c
1 /* RTL dead code elimination.
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hashtab.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "flags.h"
31 #include "df.h"
32 #include "cselib.h"
33 #include "dce.h"
34 #include "timevar.h"
35 #include "tree-pass.h"
36 #include "dbgcnt.h"
37
38 DEF_VEC_I(int);
39 DEF_VEC_ALLOC_I(int,heap);
40
41
42 /* -------------------------------------------------------------------------
43 Core mark/delete routines
44 ------------------------------------------------------------------------- */
45
46 /* The data-flow information needed by this pass. */
47 static bool df_in_progress = false;
48
49 /* True if we deleted at least one instruction. */
50 static bool something_changed;
51
52 /* Instructions that have been marked but whose dependencies have not
53 yet been processed. */
54 static VEC(rtx,heap) *worklist;
55
56 static bitmap_obstack dce_blocks_bitmap_obstack;
57 static bitmap_obstack dce_tmp_bitmap_obstack;
58
59 static sbitmap marked = NULL;
60
61 /* A subroutine for which BODY is part of the instruction being tested;
62 either the top-level pattern, or an element of a PARALLEL. The
63 instruction is known not to be a bare USE or CLOBBER. */
64
65 static bool
66 deletable_insn_p_1 (rtx body)
67 {
68 switch (GET_CODE (body))
69 {
70 case PREFETCH:
71 case TRAP_IF:
72 /* The UNSPEC case was added here because the ia-64 claims that
73 USEs do not work after reload and generates UNSPECS rather
74 than USEs. Since dce is run after reload we need to avoid
75 deleting these even if they are dead. If it turns out that
76 USEs really do work after reload, the ia-64 should be
77 changed, and the UNSPEC case can be removed. */
78 case UNSPEC:
79 return false;
80
81 default:
82 if (volatile_insn_p (body))
83 return false;
84
85 if (flag_non_call_exceptions && may_trap_p (body))
86 return false;
87
88 return true;
89 }
90 }
91
92 /* Return true if INSN is a normal instruction that can be deleted by
93 the DCE pass. */
94
95 static bool
96 deletable_insn_p (rtx insn, bool fast)
97 {
98 rtx body, x;
99 int i;
100
101 if (!NONJUMP_INSN_P (insn))
102 return false;
103
104 body = PATTERN (insn);
105 switch (GET_CODE (body))
106 {
107 case USE:
108 return false;
109
110 case CLOBBER:
111 if (fast)
112 {
113 /* A CLOBBER of a dead pseudo register serves no purpose.
114 That is not necessarily true for hard registers until
115 after reload. */
116 x = XEXP (body, 0);
117 return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
118 }
119 else
120 /* Because of the way that use-def chains are built, it is not
121 possible to tell if the clobber is dead because it can
122 never be the target of a use-def chain. */
123 return false;
124
125 case PARALLEL:
126 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
127 if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
128 return false;
129 return true;
130
131 default:
132 return deletable_insn_p_1 (body);
133 }
134 }
135
136
137 /* Return true if INSN has not been marked as needed. */
138
139 static inline int
140 marked_insn_p (rtx insn)
141 {
142 if (insn)
143 return TEST_BIT (marked, INSN_UID (insn));
144 else
145 /* Artificial defs are always needed and they do not have an
146 insn. */
147 return true;
148 }
149
150
151 /* If INSN has not yet been marked as needed, mark it now, and add it to
152 the worklist. */
153
154 static void
155 mark_insn (rtx insn, bool fast)
156 {
157 if (!marked_insn_p (insn))
158 {
159 if (!fast)
160 VEC_safe_push (rtx, heap, worklist, insn);
161 SET_BIT (marked, INSN_UID (insn));
162 if (dump_file)
163 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
164 }
165 }
166
167
168 /* A note_stores callback used by mark_nonreg_stores. DATA is the
169 instruction containing DEST. */
170
171 static void
172 mark_nonreg_stores_1 (rtx dest, rtx pattern, void *data)
173 {
174 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
175 mark_insn ((rtx) data, true);
176 }
177
178
179 /* A note_stores callback used by mark_nonreg_stores. DATA is the
180 instruction containing DEST. */
181
182 static void
183 mark_nonreg_stores_2 (rtx dest, rtx pattern, void *data)
184 {
185 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
186 mark_insn ((rtx) data, false);
187 }
188
189
190 /* Mark INSN if BODY stores to a non-register destination. */
191
192 static void
193 mark_nonreg_stores (rtx body, rtx insn, bool fast)
194 {
195 if (fast)
196 note_stores (body, mark_nonreg_stores_1, insn);
197 else
198 note_stores (body, mark_nonreg_stores_2, insn);
199 }
200
201
202 /* Initialize global variables for a new DCE pass. */
203
204 static void
205 init_dce (bool fast)
206 {
207 if (!df_in_progress)
208 {
209 if (!fast)
210 df_chain_add_problem (DF_UD_CHAIN);
211 df_analyze ();
212 }
213
214 if (dump_file)
215 df_dump (dump_file);
216
217 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
218 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
219 marked = sbitmap_alloc (get_max_uid () + 1);
220 sbitmap_zero (marked);
221 }
222
223
224 /* Delete all REG_EQUAL notes of the registers INSN writes, to prevent
225 bad dangling REG_EQUAL notes. */
226
227 static void
228 delete_corresponding_reg_eq_notes (rtx insn)
229 {
230 struct df_ref **def_rec;
231 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
232 {
233 struct df_ref *def = *def_rec;
234 unsigned int regno = DF_REF_REGNO (def);
235 /* This loop is a little tricky. We cannot just go down the
236 chain because it is being modified by the actions in the
237 loop. So we just get the head. We plan to drain the list
238 anyway. */
239 while (DF_REG_EQ_USE_CHAIN (regno))
240 {
241 struct df_ref *eq_use = DF_REG_EQ_USE_CHAIN (regno);
242 rtx noted_insn = DF_REF_INSN (eq_use);
243 rtx note = find_reg_note (noted_insn, REG_EQUAL, NULL_RTX);
244 if (!note)
245 note = find_reg_note (noted_insn, REG_EQUIV, NULL_RTX);
246
247 /* This assert is generally triggered when someone deletes a
248 REG_EQUAL or REG_EQUIV note by hacking the list manually
249 rather than calling remove_note. */
250 gcc_assert (note);
251 remove_note (noted_insn, note);
252 }
253 }
254 }
255
256
257 /* Delete every instruction that hasn't been marked. Clear the insn
258 from DCE_DF if DF_DELETE is true. */
259
260 static void
261 delete_unmarked_insns (void)
262 {
263 basic_block bb;
264 rtx insn, next;
265
266 something_changed = false;
267 FOR_EACH_BB (bb)
268 FOR_BB_INSNS_SAFE (bb, insn, next)
269 if (INSN_P (insn))
270 {
271 if (noop_move_p (insn))
272 {
273 /* Note that this code does not handle the case where
274 the last insn of libcall is deleted. As it turns out
275 this case is excluded in the call to noop_move_p. */
276 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
277 if (note && (XEXP (note, 0) != insn))
278 {
279 rtx new_libcall_insn = next_real_insn (insn);
280 rtx retval_note = find_reg_note (XEXP (note, 0),
281 REG_RETVAL, NULL_RTX);
282 REG_NOTES (new_libcall_insn)
283 = gen_rtx_INSN_LIST (REG_LIBCALL, XEXP (note, 0),
284 REG_NOTES (new_libcall_insn));
285 XEXP (retval_note, 0) = new_libcall_insn;
286 }
287 }
288 else if (marked_insn_p (insn))
289 continue;
290
291 /* WARNING, this debugging can itself cause problems if the
292 edge of the counter causes part of a libcall to be
293 deleted but not all of it. */
294 if (!dbg_cnt (dce))
295 continue;
296
297 if (dump_file)
298 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
299
300 /* Before we delete the insn, we have to delete
301 REG_EQUAL of the destination regs of the deleted insn
302 to prevent dangling REG_EQUAL. */
303 delete_corresponding_reg_eq_notes (insn);
304
305 delete_insn_and_edges (insn);
306 something_changed = true;
307 }
308 }
309
310
311 /* Mark all insns using DELETE_PARM in the libcall that contains
312 START_INSN. */
313 static void
314 mark_libcall (rtx start_insn, bool delete_parm)
315 {
316 rtx note = find_reg_note (start_insn, REG_LIBCALL_ID, NULL_RTX);
317 int id = INTVAL (XEXP (note, 0));
318 rtx insn;
319
320 mark_insn (start_insn, delete_parm);
321 insn = NEXT_INSN (start_insn);
322
323 /* There are tales, long ago and far away, of the mystical nested
324 libcall. No one alive has actually seen one, but other parts of
325 the compiler support them so we will here. */
326 for (insn = NEXT_INSN (start_insn); insn; insn = NEXT_INSN (insn))
327 {
328 if (INSN_P (insn))
329 {
330 /* Stay in the loop as long as we are in any libcall. */
331 if ((note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX)))
332 {
333 if (id == INTVAL (XEXP (note, 0)))
334 {
335 mark_insn (insn, delete_parm);
336 if (dump_file)
337 fprintf (dump_file, "matching forward libcall %d[%d]\n",
338 INSN_UID (insn), id);
339 }
340 }
341 else
342 break;
343 }
344 }
345
346 for (insn = PREV_INSN (start_insn); insn; insn = PREV_INSN (insn))
347 {
348 if (INSN_P (insn))
349 {
350 /* Stay in the loop as long as we are in any libcall. */
351 if ((note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX)))
352 {
353 if (id == INTVAL (XEXP (note, 0)))
354 {
355 mark_insn (insn, delete_parm);
356 if (dump_file)
357 fprintf (dump_file, "matching backward libcall %d[%d]\n",
358 INSN_UID (insn), id);
359 }
360 }
361 else
362 break;
363 }
364 }
365 }
366
367
368 /* Go through the instructions and mark those whose necessity is not
369 dependent on inter-instruction information. Make sure all other
370 instructions are not marked. */
371
372 static void
373 prescan_insns_for_dce (bool fast)
374 {
375 basic_block bb;
376 rtx insn;
377
378 if (dump_file)
379 fprintf (dump_file, "Finding needed instructions:\n");
380
381 FOR_EACH_BB (bb)
382 FOR_BB_INSNS (bb, insn)
383 if (INSN_P (insn))
384 {
385 rtx note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX);
386 if (note)
387 mark_libcall (insn, fast);
388 else if (deletable_insn_p (insn, fast))
389 mark_nonreg_stores (PATTERN (insn), insn, fast);
390 else
391 mark_insn (insn, fast);
392 }
393
394 if (dump_file)
395 fprintf (dump_file, "Finished finding needed instructions:\n");
396 }
397
398
399 /* UD-based DSE routines. */
400
401 /* Mark instructions that define artifically-used registers, such as
402 the frame pointer and the stack pointer. */
403
404 static void
405 mark_artificial_uses (void)
406 {
407 basic_block bb;
408 struct df_link *defs;
409 struct df_ref **use_rec;
410
411 FOR_ALL_BB (bb)
412 {
413 for (use_rec = df_get_artificial_uses (bb->index);
414 *use_rec; use_rec++)
415 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
416 mark_insn (DF_REF_INSN (defs->ref), false);
417 }
418 }
419
420 /* Mark every instruction that defines a register value that INSN uses. */
421
422 static void
423 mark_reg_dependencies (rtx insn)
424 {
425 struct df_link *defs;
426 struct df_ref **use_rec;
427
428 /* If this is part of a libcall, mark the entire libcall. */
429 if (find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX))
430 mark_libcall (insn, false);
431
432 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
433 {
434 struct df_ref *use = *use_rec;
435 if (dump_file)
436 {
437 fprintf (dump_file, "Processing use of ");
438 print_simple_rtl (dump_file, DF_REF_REG (use));
439 fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
440 }
441 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
442 mark_insn (DF_REF_INSN (defs->ref), false);
443 }
444 }
445
446
447 static void
448 end_ud_dce (void)
449 {
450 sbitmap_free (marked);
451 gcc_assert (VEC_empty (rtx, worklist));
452 }
453
454
455 /* UD-chain based DCE. */
456
457 static unsigned int
458 rest_of_handle_ud_dce (void)
459 {
460 rtx insn;
461
462 df_in_progress = false;
463 init_dce (false);
464
465 prescan_insns_for_dce (false);
466 mark_artificial_uses ();
467 while (VEC_length (rtx, worklist) > 0)
468 {
469 insn = VEC_pop (rtx, worklist);
470 mark_reg_dependencies (insn);
471 }
472 /* Before any insns are deleted, we must remove the chains since
473 they are not bidirectional. */
474 df_remove_problem (df_chain);
475 delete_unmarked_insns ();
476
477 end_ud_dce ();
478 return 0;
479 }
480
481
482 static bool
483 gate_ud_dce (void)
484 {
485 return optimize > 1 && flag_dce;
486 }
487
488 struct tree_opt_pass pass_ud_rtl_dce =
489 {
490 "dce", /* name */
491 gate_ud_dce, /* gate */
492 rest_of_handle_ud_dce, /* execute */
493 NULL, /* sub */
494 NULL, /* next */
495 0, /* static_pass_number */
496 TV_DCE, /* tv_id */
497 0, /* properties_required */
498 0, /* properties_provided */
499 0, /* properties_destroyed */
500 0, /* todo_flags_start */
501 TODO_dump_func |
502 TODO_df_finish |
503 TODO_ggc_collect, /* todo_flags_finish */
504 'w' /* letter */
505 };
506
507 /* -------------------------------------------------------------------------
508 Fast DCE functions
509 ------------------------------------------------------------------------- */
510
511
512 /* Free the data allocated by init_dce. */
513
514 static void
515 fini_dce (void)
516 {
517 sbitmap_free (marked);
518 bitmap_obstack_release (&dce_blocks_bitmap_obstack);
519 bitmap_obstack_release (&dce_tmp_bitmap_obstack);
520 df_in_progress = false;
521 }
522
523
524 /* Process basic block BB. Return true if the live_in set has
525 changed. */
526
527 static bool
528 dce_process_block (basic_block bb, bool redo_out)
529 {
530 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
531 rtx insn;
532 bool block_changed;
533 struct df_ref **def_rec, **use_rec;
534 unsigned int bb_index = bb->index;
535
536 if (redo_out)
537 {
538 /* Need to redo the live_out set of this block if when one of
539 the succs of this block has had a change in it live in
540 set. */
541 edge e;
542 edge_iterator ei;
543 df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
544 bitmap_clear (DF_LR_OUT (bb));
545 FOR_EACH_EDGE (e, ei, bb->succs)
546 (*con_fun_n) (e);
547 }
548
549 if (dump_file)
550 {
551 fprintf (dump_file, "processing block %d live out = ", bb->index);
552 df_print_regset (dump_file, DF_LR_OUT (bb));
553 }
554
555 bitmap_copy (local_live, DF_LR_OUT (bb));
556
557 /* Process the artificial defs and uses at the bottom of the block. */
558 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
559 {
560 struct df_ref *def = *def_rec;
561 if (((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
562 && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
563 bitmap_clear_bit (local_live, DF_REF_REGNO (def));
564 }
565
566 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
567 {
568 struct df_ref *use = *use_rec;
569 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
570 bitmap_set_bit (local_live, DF_REF_REGNO (use));
571 }
572
573 FOR_BB_INSNS_REVERSE (bb, insn)
574 if (INSN_P (insn))
575 {
576 /* If this is a recursive call, the libcall will have already
577 been marked. */
578 if (!marked_insn_p (insn))
579 {
580 bool needed = false;
581
582 /* The insn is needed if there is someone who uses the output. */
583 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
584 if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec)))
585 {
586 needed = true;
587 break;
588 }
589
590 if (needed)
591 {
592 rtx note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX);
593
594 /* If we need to mark an insn in the middle of a
595 libcall, we need to back up to mark the entire
596 libcall. Given that libcalls are rare, rescanning
597 the block should be a reasonable solution to trying
598 to figure out how to back up. */
599 if (note)
600 {
601 if (dump_file)
602 fprintf (dump_file, "needed libcall %d\n", INSN_UID (insn));
603 mark_libcall (insn, true);
604 BITMAP_FREE (local_live);
605 return dce_process_block (bb, false);
606 }
607 else
608 mark_insn (insn, true);
609 }
610 }
611
612 /* No matter if the instruction is needed or not, we remove
613 any regno in the defs from the live set. */
614 df_simulate_defs (insn, local_live);
615
616 /* On the other hand, we do not allow the dead uses to set
617 anything in local_live. */
618 if (marked_insn_p (insn))
619 df_simulate_uses (insn, local_live);
620 }
621
622 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
623 {
624 struct df_ref *def = *def_rec;
625 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP)
626 && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
627 bitmap_clear_bit (local_live, DF_REF_REGNO (def));
628 }
629 #ifdef EH_USES
630 /* Process the uses that are live into an exception handler. */
631 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
632 {
633 /* Add use to set of uses in this BB. */
634 struct df_ref *use = *use_rec;
635 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
636 bitmap_set_bit (local_live, DF_REF_REGNO (use));
637 }
638 #endif
639
640 block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
641 if (block_changed)
642 bitmap_copy (DF_LR_IN (bb), local_live);
643
644 BITMAP_FREE (local_live);
645 return block_changed;
646 }
647
648 static void
649 fast_dce (void)
650 {
651 int *postorder = df_get_postorder (DF_BACKWARD);
652 int n_blocks = df_get_n_blocks (DF_BACKWARD);
653 int i;
654 /* The set of blocks that have been seen on this iteration. */
655 bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
656 /* The set of blocks that need to have the out vectors reset because
657 the in of one of their successors has changed. */
658 bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
659 bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
660 bool global_changed = true;
661
662 int loop_count = 0;
663
664 prescan_insns_for_dce (true);
665
666 for (i = 0; i < n_blocks; i++)
667 bitmap_set_bit (all_blocks, postorder[i]);
668
669 while (global_changed)
670 {
671 global_changed = false;
672 for (i = 0; i < n_blocks; i++)
673 {
674 int index = postorder[i];
675 basic_block bb = BASIC_BLOCK (index);
676 bool local_changed;
677
678 if (index < NUM_FIXED_BLOCKS)
679 {
680 bitmap_set_bit (processed, index);
681 continue;
682 }
683
684 local_changed
685 = dce_process_block (bb, bitmap_bit_p (redo_out, index));
686 bitmap_set_bit (processed, index);
687
688 if (local_changed)
689 {
690 edge e;
691 edge_iterator ei;
692 FOR_EACH_EDGE (e, ei, bb->preds)
693 if (bitmap_bit_p (processed, e->src->index))
694 /* Be tricky about when we need to iterate the
695 analysis. We only have redo the analysis if the
696 bitmaps change at the top of a block that is the
697 entry to a loop. */
698 global_changed = true;
699 else
700 bitmap_set_bit (redo_out, e->src->index);
701 }
702 }
703
704 if (global_changed)
705 {
706 /* Turn off the RUN_DCE flag to prevent recursive calls to
707 dce. */
708 int old_flag = df_clear_flags (DF_LR_RUN_DCE);
709
710 /* So something was deleted that requires a redo. Do it on
711 the cheap. */
712 delete_unmarked_insns ();
713 sbitmap_zero (marked);
714 bitmap_clear (processed);
715 bitmap_clear (redo_out);
716
717 /* We do not need to rescan any instructions. We only need
718 to redo the dataflow equations for the blocks that had a
719 change at the top of the block. Then we need to redo the
720 iteration. */
721 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
722
723 if (old_flag & DF_LR_RUN_DCE)
724 df_set_flags (DF_LR_RUN_DCE);
725 prescan_insns_for_dce (true);
726 }
727 loop_count++;
728 }
729
730 delete_unmarked_insns ();
731
732 BITMAP_FREE (processed);
733 BITMAP_FREE (redo_out);
734 BITMAP_FREE (all_blocks);
735 }
736
737
738 /* Callback for running pass_rtl_dce. */
739
740 static unsigned int
741 rest_of_handle_fast_dce (void)
742 {
743 init_dce (true);
744 fast_dce ();
745 fini_dce ();
746 df_in_progress = false;
747 return 0;
748 }
749
750
751 /* This is an internal call that is used by the df live register
752 problem to run fast dce as a side effect of creating the live
753 information. The stack is organized so that the lr problem is run,
754 this pass is run, which updates the live info and the df scanning
755 info, and then returns to allow the rest of the problems to be run.
756
757 This can be called by elsewhere but it will not update the bit
758 vectors for any other problems than LR.
759 */
760
761 void
762 run_fast_df_dce (void)
763 {
764 if (flag_dce)
765 {
766 /* If dce is able to delete something, it has to happen
767 immediately. Otherwise there will be problems handling the
768 eq_notes. */
769 enum df_changeable_flags old_flags
770 = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
771
772 df_in_progress = true;
773 rest_of_handle_fast_dce ();
774 df_set_flags (old_flags);
775 }
776 }
777
778 static bool
779 gate_fast_dce (void)
780 {
781 return optimize > 0 && flag_dce;
782 }
783
784
785 /* Run a fast DCE pass and return true if any instructions were
786 deleted. */
787
788 bool
789 run_fast_dce (void)
790 {
791 return gate_fast_dce () && (rest_of_handle_fast_dce (), something_changed);
792 }
793
794
795 struct tree_opt_pass pass_fast_rtl_dce =
796 {
797 "dce", /* name */
798 gate_fast_dce, /* gate */
799 rest_of_handle_fast_dce, /* execute */
800 NULL, /* sub */
801 NULL, /* next */
802 0, /* static_pass_number */
803 TV_DCE, /* tv_id */
804 0, /* properties_required */
805 0, /* properties_provided */
806 0, /* properties_destroyed */
807 0, /* todo_flags_start */
808 TODO_dump_func |
809 TODO_df_finish |
810 TODO_ggc_collect, /* todo_flags_finish */
811 'w' /* letter */
812 };
813