revert: dce.c (deletable_insn_p_1): New function, split out from...
[gcc.git] / gcc / dce.c
1 /* RTL dead code elimination.
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hashtab.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "flags.h"
31 #include "df.h"
32 #include "cselib.h"
33 #include "dce.h"
34 #include "timevar.h"
35 #include "tree-pass.h"
36 #include "dbgcnt.h"
37
38 DEF_VEC_I(int);
39 DEF_VEC_ALLOC_I(int,heap);
40
41
42 /* -------------------------------------------------------------------------
43 Core mark/delete routines
44 ------------------------------------------------------------------------- */
45
46 /* The data-flow information needed by this pass. */
47 static bool df_in_progress = false;
48
49 /* True if we deleted at least one instruction. */
50 static bool something_changed;
51
52 /* Instructions that have been marked but whose dependencies have not
53 yet been processed. */
54 static VEC(rtx,heap) *worklist;
55
56 static bitmap_obstack dce_blocks_bitmap_obstack;
57 static bitmap_obstack dce_tmp_bitmap_obstack;
58
59 static sbitmap marked = NULL;
60
61 /* Return true if INSN with BODY is a normal instruction that can be
62 deleted by the DCE pass. */
63
64 static bool
65 deletable_insn_p (rtx insn, rtx body, bool fast)
66 {
67 rtx x;
68 switch (GET_CODE (body))
69 {
70 case USE:
71 case PREFETCH:
72 case TRAP_IF:
73 /* The UNSPEC case was added here because the ia-64 claims that
74 USEs do not work after reload and generates UNSPECS rather
75 than USEs. Since dce is run after reload we need to avoid
76 deleting these even if they are dead. If it turns out that
77 USEs really do work after reload, the ia-64 should be
78 changed, and the UNSPEC case can be removed. */
79 case UNSPEC:
80 return false;
81
82 case CLOBBER:
83 if (fast)
84 {
85 /* A CLOBBER of a dead pseudo register serves no purpose.
86 That is not necessarily true for hard registers until
87 after reload. */
88 x = XEXP (body, 0);
89 return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
90 }
91 else
92 /* Because of the way that use-def chains are built, it is not
93 possible to tell if the clobber is dead because it can
94 never be the target of a use-def chain. */
95 return false;
96
97 case PARALLEL:
98 {
99 int i;
100 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
101 if (!deletable_insn_p (insn, XVECEXP (body, 0, i), fast))
102 return false;
103 return true;
104 }
105
106 default:
107 if (!NONJUMP_INSN_P (insn))
108 return false;
109
110 if (volatile_insn_p (body))
111 return false;
112
113 if (flag_non_call_exceptions && may_trap_p (body))
114 return false;
115
116 return true;
117 }
118 }
119
120
121 /* Return true if INSN has not been marked as needed. */
122
123 static inline int
124 marked_insn_p (rtx insn)
125 {
126 if (insn)
127 return TEST_BIT (marked, INSN_UID (insn));
128 else
129 /* Artificial defs are always needed and they do not have an
130 insn. */
131 return true;
132 }
133
134
135 /* If INSN has not yet been marked as needed, mark it now, and add it to
136 the worklist. */
137
138 static void
139 mark_insn (rtx insn, bool fast)
140 {
141 if (!marked_insn_p (insn))
142 {
143 if (!fast)
144 VEC_safe_push (rtx, heap, worklist, insn);
145 SET_BIT (marked, INSN_UID (insn));
146 if (dump_file)
147 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
148 }
149 }
150
151
152 /* A note_stores callback used by mark_nonreg_stores. DATA is the
153 instruction containing DEST. */
154
155 static void
156 mark_nonreg_stores_1 (rtx dest, rtx pattern, void *data)
157 {
158 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
159 mark_insn ((rtx) data, true);
160 }
161
162
163 /* A note_stores callback used by mark_nonreg_stores. DATA is the
164 instruction containing DEST. */
165
166 static void
167 mark_nonreg_stores_2 (rtx dest, rtx pattern, void *data)
168 {
169 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
170 mark_insn ((rtx) data, false);
171 }
172
173
174 /* Mark INSN if BODY stores to a non-register destination. */
175
176 static void
177 mark_nonreg_stores (rtx body, rtx insn, bool fast)
178 {
179 if (fast)
180 note_stores (body, mark_nonreg_stores_1, insn);
181 else
182 note_stores (body, mark_nonreg_stores_2, insn);
183 }
184
185
186 /* Initialize global variables for a new DCE pass. */
187
188 static void
189 init_dce (bool fast)
190 {
191 if (!df_in_progress)
192 {
193 if (!fast)
194 df_chain_add_problem (DF_UD_CHAIN);
195 df_analyze ();
196 }
197
198 if (dump_file)
199 df_dump (dump_file);
200
201 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
202 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
203 marked = sbitmap_alloc (get_max_uid () + 1);
204 sbitmap_zero (marked);
205 }
206
207
208 /* Delete all REG_EQUAL notes of the registers INSN writes, to prevent
209 bad dangling REG_EQUAL notes. */
210
211 static void
212 delete_corresponding_reg_eq_notes (rtx insn)
213 {
214 struct df_ref **def_rec;
215 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
216 {
217 struct df_ref *def = *def_rec;
218 unsigned int regno = DF_REF_REGNO (def);
219 /* This loop is a little tricky. We cannot just go down the
220 chain because it is being modified by the actions in the
221 loop. So we just get the head. We plan to drain the list
222 anyway. */
223 while (DF_REG_EQ_USE_CHAIN (regno))
224 {
225 struct df_ref *eq_use = DF_REG_EQ_USE_CHAIN (regno);
226 rtx noted_insn = DF_REF_INSN (eq_use);
227 rtx note = find_reg_note (noted_insn, REG_EQUAL, NULL_RTX);
228 if (!note)
229 note = find_reg_note (noted_insn, REG_EQUIV, NULL_RTX);
230
231 /* This assert is generally triggered when someone deletes a
232 REG_EQUAL or REG_EQUIV note by hacking the list manually
233 rather than calling remove_note. */
234 gcc_assert (note);
235 remove_note (noted_insn, note);
236 }
237 }
238 }
239
240
241 /* Delete every instruction that hasn't been marked. Clear the insn
242 from DCE_DF if DF_DELETE is true. */
243
244 static void
245 delete_unmarked_insns (void)
246 {
247 basic_block bb;
248 rtx insn, next;
249
250 something_changed = false;
251 FOR_EACH_BB (bb)
252 FOR_BB_INSNS_SAFE (bb, insn, next)
253 if (INSN_P (insn))
254 {
255 if (noop_move_p (insn))
256 {
257 /* Note that this code does not handle the case where
258 the last insn of libcall is deleted. As it turns out
259 this case is excluded in the call to noop_move_p. */
260 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
261 if (note && (XEXP (note, 0) != insn))
262 {
263 rtx new_libcall_insn = next_real_insn (insn);
264 rtx retval_note = find_reg_note (XEXP (note, 0),
265 REG_RETVAL, NULL_RTX);
266 REG_NOTES (new_libcall_insn)
267 = gen_rtx_INSN_LIST (REG_LIBCALL, XEXP (note, 0),
268 REG_NOTES (new_libcall_insn));
269 XEXP (retval_note, 0) = new_libcall_insn;
270 }
271 }
272 else if (marked_insn_p (insn))
273 continue;
274
275 /* WARNING, this debugging can itself cause problems if the
276 edge of the counter causes part of a libcall to be
277 deleted but not all of it. */
278 if (!dbg_cnt (dce))
279 continue;
280
281 if (dump_file)
282 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
283
284 /* Before we delete the insn, we have to delete
285 REG_EQUAL of the destination regs of the deleted insn
286 to prevent dangling REG_EQUAL. */
287 delete_corresponding_reg_eq_notes (insn);
288
289 delete_insn_and_edges (insn);
290 something_changed = true;
291 }
292 }
293
294
295 /* Mark all insns using DELETE_PARM in the libcall that contains
296 START_INSN. */
297 static void
298 mark_libcall (rtx start_insn, bool delete_parm)
299 {
300 rtx note = find_reg_note (start_insn, REG_LIBCALL_ID, NULL_RTX);
301 int id = INTVAL (XEXP (note, 0));
302 rtx insn;
303
304 mark_insn (start_insn, delete_parm);
305 insn = NEXT_INSN (start_insn);
306
307 /* There are tales, long ago and far away, of the mystical nested
308 libcall. No one alive has actually seen one, but other parts of
309 the compiler support them so we will here. */
310 for (insn = NEXT_INSN (start_insn); insn; insn = NEXT_INSN (insn))
311 {
312 if (INSN_P (insn))
313 {
314 /* Stay in the loop as long as we are in any libcall. */
315 if ((note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX)))
316 {
317 if (id == INTVAL (XEXP (note, 0)))
318 {
319 mark_insn (insn, delete_parm);
320 if (dump_file)
321 fprintf (dump_file, "matching forward libcall %d[%d]\n",
322 INSN_UID (insn), id);
323 }
324 }
325 else
326 break;
327 }
328 }
329
330 for (insn = PREV_INSN (start_insn); insn; insn = PREV_INSN (insn))
331 {
332 if (INSN_P (insn))
333 {
334 /* Stay in the loop as long as we are in any libcall. */
335 if ((note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX)))
336 {
337 if (id == INTVAL (XEXP (note, 0)))
338 {
339 mark_insn (insn, delete_parm);
340 if (dump_file)
341 fprintf (dump_file, "matching backward libcall %d[%d]\n",
342 INSN_UID (insn), id);
343 }
344 }
345 else
346 break;
347 }
348 }
349 }
350
351
352 /* Go through the instructions and mark those whose necessity is not
353 dependent on inter-instruction information. Make sure all other
354 instructions are not marked. */
355
356 static void
357 prescan_insns_for_dce (bool fast)
358 {
359 basic_block bb;
360 rtx insn;
361
362 if (dump_file)
363 fprintf (dump_file, "Finding needed instructions:\n");
364
365 FOR_EACH_BB (bb)
366 FOR_BB_INSNS (bb, insn)
367 if (INSN_P (insn))
368 {
369 rtx note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX);
370 if (note)
371 mark_libcall (insn, fast);
372 else if (deletable_insn_p (insn, PATTERN (insn), fast))
373 mark_nonreg_stores (PATTERN (insn), insn, fast);
374 else
375 mark_insn (insn, fast);
376 }
377
378 if (dump_file)
379 fprintf (dump_file, "Finished finding needed instructions:\n");
380 }
381
382
383 /* UD-based DSE routines. */
384
385 /* Mark instructions that define artifically-used registers, such as
386 the frame pointer and the stack pointer. */
387
388 static void
389 mark_artificial_uses (void)
390 {
391 basic_block bb;
392 struct df_link *defs;
393 struct df_ref **use_rec;
394
395 FOR_ALL_BB (bb)
396 {
397 for (use_rec = df_get_artificial_uses (bb->index);
398 *use_rec; use_rec++)
399 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
400 mark_insn (DF_REF_INSN (defs->ref), false);
401 }
402 }
403
404 /* Mark every instruction that defines a register value that INSN uses. */
405
406 static void
407 mark_reg_dependencies (rtx insn)
408 {
409 struct df_link *defs;
410 struct df_ref **use_rec;
411
412 /* If this is part of a libcall, mark the entire libcall. */
413 if (find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX))
414 mark_libcall (insn, false);
415
416 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
417 {
418 struct df_ref *use = *use_rec;
419 if (dump_file)
420 {
421 fprintf (dump_file, "Processing use of ");
422 print_simple_rtl (dump_file, DF_REF_REG (use));
423 fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
424 }
425 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
426 mark_insn (DF_REF_INSN (defs->ref), false);
427 }
428 }
429
430
431 static void
432 end_ud_dce (void)
433 {
434 sbitmap_free (marked);
435 gcc_assert (VEC_empty (rtx, worklist));
436 }
437
438
439 /* UD-chain based DCE. */
440
441 static unsigned int
442 rest_of_handle_ud_dce (void)
443 {
444 rtx insn;
445
446 df_in_progress = false;
447 init_dce (false);
448
449 prescan_insns_for_dce (false);
450 mark_artificial_uses ();
451 while (VEC_length (rtx, worklist) > 0)
452 {
453 insn = VEC_pop (rtx, worklist);
454 mark_reg_dependencies (insn);
455 }
456 /* Before any insns are deleted, we must remove the chains since
457 they are not bidirectional. */
458 df_remove_problem (df_chain);
459 delete_unmarked_insns ();
460
461 end_ud_dce ();
462 return 0;
463 }
464
465
466 static bool
467 gate_ud_dce (void)
468 {
469 return optimize > 1 && flag_dce;
470 }
471
472 struct tree_opt_pass pass_ud_rtl_dce =
473 {
474 "dce", /* name */
475 gate_ud_dce, /* gate */
476 rest_of_handle_ud_dce, /* execute */
477 NULL, /* sub */
478 NULL, /* next */
479 0, /* static_pass_number */
480 TV_DCE, /* tv_id */
481 0, /* properties_required */
482 0, /* properties_provided */
483 0, /* properties_destroyed */
484 0, /* todo_flags_start */
485 TODO_dump_func |
486 TODO_df_finish |
487 TODO_ggc_collect, /* todo_flags_finish */
488 'w' /* letter */
489 };
490
491 /* -------------------------------------------------------------------------
492 Fast DCE functions
493 ------------------------------------------------------------------------- */
494
495
496 /* Free the data allocated by init_dce. */
497
498 static void
499 fini_dce (void)
500 {
501 sbitmap_free (marked);
502 bitmap_obstack_release (&dce_blocks_bitmap_obstack);
503 bitmap_obstack_release (&dce_tmp_bitmap_obstack);
504 df_in_progress = false;
505 }
506
507
508 /* Process basic block BB. Return true if the live_in set has
509 changed. */
510
511 static bool
512 dce_process_block (basic_block bb, bool redo_out)
513 {
514 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
515 rtx insn;
516 bool block_changed;
517 struct df_ref **def_rec, **use_rec;
518 unsigned int bb_index = bb->index;
519
520 if (redo_out)
521 {
522 /* Need to redo the live_out set of this block if when one of
523 the succs of this block has had a change in it live in
524 set. */
525 edge e;
526 edge_iterator ei;
527 df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
528 bitmap_clear (DF_LR_OUT (bb));
529 FOR_EACH_EDGE (e, ei, bb->succs)
530 (*con_fun_n) (e);
531 }
532
533 if (dump_file)
534 {
535 fprintf (dump_file, "processing block %d live out = ", bb->index);
536 df_print_regset (dump_file, DF_LR_OUT (bb));
537 }
538
539 bitmap_copy (local_live, DF_LR_OUT (bb));
540
541 /* Process the artificial defs and uses at the bottom of the block. */
542 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
543 {
544 struct df_ref *def = *def_rec;
545 if (((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
546 && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
547 bitmap_clear_bit (local_live, DF_REF_REGNO (def));
548 }
549
550 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
551 {
552 struct df_ref *use = *use_rec;
553 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
554 bitmap_set_bit (local_live, DF_REF_REGNO (use));
555 }
556
557 FOR_BB_INSNS_REVERSE (bb, insn)
558 if (INSN_P (insn))
559 {
560 /* If this is a recursive call, the libcall will have already
561 been marked. */
562 if (!marked_insn_p (insn))
563 {
564 bool needed = false;
565
566 /* The insn is needed if there is someone who uses the output. */
567 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
568 if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec)))
569 {
570 needed = true;
571 break;
572 }
573
574 if (needed)
575 {
576 rtx note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX);
577
578 /* If we need to mark an insn in the middle of a
579 libcall, we need to back up to mark the entire
580 libcall. Given that libcalls are rare, rescanning
581 the block should be a reasonable solution to trying
582 to figure out how to back up. */
583 if (note)
584 {
585 if (dump_file)
586 fprintf (dump_file, "needed libcall %d\n", INSN_UID (insn));
587 mark_libcall (insn, true);
588 BITMAP_FREE (local_live);
589 return dce_process_block (bb, false);
590 }
591 else
592 mark_insn (insn, true);
593 }
594 }
595
596 /* No matter if the instruction is needed or not, we remove
597 any regno in the defs from the live set. */
598 df_simulate_defs (insn, local_live);
599
600 /* On the other hand, we do not allow the dead uses to set
601 anything in local_live. */
602 if (marked_insn_p (insn))
603 df_simulate_uses (insn, local_live);
604 }
605
606 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
607 {
608 struct df_ref *def = *def_rec;
609 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP)
610 && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
611 bitmap_clear_bit (local_live, DF_REF_REGNO (def));
612 }
613 #ifdef EH_USES
614 /* Process the uses that are live into an exception handler. */
615 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
616 {
617 /* Add use to set of uses in this BB. */
618 struct df_ref *use = *use_rec;
619 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
620 bitmap_set_bit (local_live, DF_REF_REGNO (use));
621 }
622 #endif
623
624 block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
625 if (block_changed)
626 bitmap_copy (DF_LR_IN (bb), local_live);
627
628 BITMAP_FREE (local_live);
629 return block_changed;
630 }
631
632 static void
633 fast_dce (void)
634 {
635 int *postorder = df_get_postorder (DF_BACKWARD);
636 int n_blocks = df_get_n_blocks (DF_BACKWARD);
637 int i;
638 /* The set of blocks that have been seen on this iteration. */
639 bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
640 /* The set of blocks that need to have the out vectors reset because
641 the in of one of their successors has changed. */
642 bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
643 bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
644 bool global_changed = true;
645
646 int loop_count = 0;
647
648 prescan_insns_for_dce (true);
649
650 for (i = 0; i < n_blocks; i++)
651 bitmap_set_bit (all_blocks, postorder[i]);
652
653 while (global_changed)
654 {
655 global_changed = false;
656 for (i = 0; i < n_blocks; i++)
657 {
658 int index = postorder[i];
659 basic_block bb = BASIC_BLOCK (index);
660 bool local_changed;
661
662 if (index < NUM_FIXED_BLOCKS)
663 {
664 bitmap_set_bit (processed, index);
665 continue;
666 }
667
668 local_changed
669 = dce_process_block (bb, bitmap_bit_p (redo_out, index));
670 bitmap_set_bit (processed, index);
671
672 if (local_changed)
673 {
674 edge e;
675 edge_iterator ei;
676 FOR_EACH_EDGE (e, ei, bb->preds)
677 if (bitmap_bit_p (processed, e->src->index))
678 /* Be tricky about when we need to iterate the
679 analysis. We only have redo the analysis if the
680 bitmaps change at the top of a block that is the
681 entry to a loop. */
682 global_changed = true;
683 else
684 bitmap_set_bit (redo_out, e->src->index);
685 }
686 }
687
688 if (global_changed)
689 {
690 /* Turn off the RUN_DCE flag to prevent recursive calls to
691 dce. */
692 int old_flag = df_clear_flags (DF_LR_RUN_DCE);
693
694 /* So something was deleted that requires a redo. Do it on
695 the cheap. */
696 delete_unmarked_insns ();
697 sbitmap_zero (marked);
698 bitmap_clear (processed);
699 bitmap_clear (redo_out);
700
701 /* We do not need to rescan any instructions. We only need
702 to redo the dataflow equations for the blocks that had a
703 change at the top of the block. Then we need to redo the
704 iteration. */
705 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
706
707 if (old_flag & DF_LR_RUN_DCE)
708 df_set_flags (DF_LR_RUN_DCE);
709 prescan_insns_for_dce (true);
710 }
711 loop_count++;
712 }
713
714 delete_unmarked_insns ();
715
716 BITMAP_FREE (processed);
717 BITMAP_FREE (redo_out);
718 BITMAP_FREE (all_blocks);
719 }
720
721
722 /* Callback for running pass_rtl_dce. */
723
724 static unsigned int
725 rest_of_handle_fast_dce (void)
726 {
727 init_dce (true);
728 fast_dce ();
729 fini_dce ();
730 df_in_progress = false;
731 return 0;
732 }
733
734
735 /* This is an internal call that is used by the df live register
736 problem to run fast dce as a side effect of creating the live
737 information. The stack is organized so that the lr problem is run,
738 this pass is run, which updates the live info and the df scanning
739 info, and then returns to allow the rest of the problems to be run.
740
741 This can be called by elsewhere but it will not update the bit
742 vectors for any other problems than LR.
743 */
744
745 void
746 run_fast_df_dce (void)
747 {
748 if (flag_dce)
749 {
750 /* If dce is able to delete something, it has to happen
751 immediately. Otherwise there will be problems handling the
752 eq_notes. */
753 enum df_changeable_flags old_flags
754 = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
755
756 df_in_progress = true;
757 rest_of_handle_fast_dce ();
758 df_set_flags (old_flags);
759 }
760 }
761
762 static bool
763 gate_fast_dce (void)
764 {
765 return optimize > 0 && flag_dce;
766 }
767
768
769 /* Run a fast DCE pass and return true if any instructions were
770 deleted. */
771
772 bool
773 run_fast_dce (void)
774 {
775 return gate_fast_dce () && (rest_of_handle_fast_dce (), something_changed);
776 }
777
778
779 struct tree_opt_pass pass_fast_rtl_dce =
780 {
781 "dce", /* name */
782 gate_fast_dce, /* gate */
783 rest_of_handle_fast_dce, /* execute */
784 NULL, /* sub */
785 NULL, /* next */
786 0, /* static_pass_number */
787 TV_DCE, /* tv_id */
788 0, /* properties_required */
789 0, /* properties_provided */
790 0, /* properties_destroyed */
791 0, /* todo_flags_start */
792 TODO_dump_func |
793 TODO_df_finish |
794 TODO_ggc_collect, /* todo_flags_finish */
795 'w' /* letter */
796 };
797