Merge dataflow branch into mainline
[gcc.git] / gcc / dce.c
1 /* RTL dead code elimination.
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hashtab.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "flags.h"
31 #include "df.h"
32 #include "cselib.h"
33 #include "dce.h"
34 #include "timevar.h"
35 #include "tree-pass.h"
36 #include "dbgcnt.h"
37
38 DEF_VEC_I(int);
39 DEF_VEC_ALLOC_I(int,heap);
40
41
42 /* -------------------------------------------------------------------------
43 Core mark/delete routines
44 ------------------------------------------------------------------------- */
45
46 /* The data-flow information needed by this pass. */
47 static bool df_in_progress = false;
48
49 /* True if we deleted at least one instruction. */
50 static bool something_changed;
51
52 /* Instructions that have been marked but whose dependencies have not
53 yet been processed. */
54 static VEC(rtx,heap) *worklist;
55
56 static bitmap_obstack dce_blocks_bitmap_obstack;
57 static bitmap_obstack dce_tmp_bitmap_obstack;
58
59 static sbitmap marked = NULL;
60
61 /* Return true if INSN a normal instruction that can be deleted by the
62 DCE pass. */
63
64 static bool
65 deletable_insn_p (rtx insn, bool fast)
66 {
67 rtx x;
68
69 switch (GET_CODE (PATTERN (insn)))
70 {
71 case USE:
72 case PREFETCH:
73 case TRAP_IF:
74 /* The UNSPEC case was added here because the ia-64 claims that
75 USEs do not work after reload and generates UNSPECS rather
76 than USEs. Since dce is run after reload we need to avoid
77 deleting these even if they are dead. If it turns out that
78 USEs really do work after reload, the ia-64 should be
79 changed, and the UNSPEC case can be removed. */
80 case UNSPEC:
81 return false;
82
83 case CLOBBER:
84 if (fast)
85 {
86 /* A CLOBBER of a dead pseudo register serves no purpose.
87 That is not necessarily true for hard registers until
88 after reload. */
89 x = XEXP (PATTERN (insn), 0);
90 return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
91 }
92 else
93 /* Because of the way that use-def chains are built, it is not
94 possible to tell if the clobber is dead because it can
95 never be the target of a use-def chain. */
96 return false;
97
98 default:
99 if (!NONJUMP_INSN_P (insn))
100 return false;
101
102 if (volatile_insn_p (PATTERN (insn)))
103 return false;
104
105 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
106 return false;
107
108 return true;
109 }
110 }
111
112
113 /* Return true if INSN has not been marked as needed. */
114
115 static inline int
116 marked_insn_p (rtx insn)
117 {
118 if (insn)
119 return TEST_BIT (marked, INSN_UID (insn));
120 else
121 /* Artificial defs are always needed and they do not have an
122 insn. */
123 return true;
124 }
125
126
127 /* If INSN has not yet been marked as needed, mark it now, and add it to
128 the worklist. */
129
130 static void
131 mark_insn (rtx insn, bool fast)
132 {
133 if (!marked_insn_p (insn))
134 {
135 if (!fast)
136 VEC_safe_push (rtx, heap, worklist, insn);
137 SET_BIT (marked, INSN_UID (insn));
138 if (dump_file)
139 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
140 }
141 }
142
143
144 /* A note_stores callback used by mark_nonreg_stores. DATA is the
145 instruction containing DEST. */
146
147 static void
148 mark_nonreg_stores_1 (rtx dest, rtx pattern, void *data)
149 {
150 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
151 mark_insn ((rtx) data, true);
152 }
153
154
155 /* A note_stores callback used by mark_nonreg_stores. DATA is the
156 instruction containing DEST. */
157
158 static void
159 mark_nonreg_stores_2 (rtx dest, rtx pattern, void *data)
160 {
161 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
162 mark_insn ((rtx) data, false);
163 }
164
165
166 /* Mark INSN if BODY stores to a non-register destination. */
167
168 static void
169 mark_nonreg_stores (rtx body, rtx insn, bool fast)
170 {
171 if (fast)
172 note_stores (body, mark_nonreg_stores_1, insn);
173 else
174 note_stores (body, mark_nonreg_stores_2, insn);
175 }
176
177
178 /* Initialize global variables for a new DCE pass. */
179
180 static void
181 init_dce (bool fast)
182 {
183 if (!df_in_progress)
184 {
185 if (!fast)
186 df_chain_add_problem (DF_UD_CHAIN);
187 df_analyze ();
188 }
189
190 if (dump_file)
191 df_dump (dump_file);
192
193 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
194 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
195 marked = sbitmap_alloc (get_max_uid () + 1);
196 sbitmap_zero (marked);
197 }
198
199
200 /* Delete all REG_EQUAL notes of the registers INSN writes, to prevent
201 bad dangling REG_EQUAL notes. */
202
203 static void
204 delete_corresponding_reg_eq_notes (rtx insn)
205 {
206 struct df_ref **def_rec;
207 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
208 {
209 struct df_ref *def = *def_rec;
210 unsigned int regno = DF_REF_REGNO (def);
211 /* This loop is a little tricky. We cannot just go down the
212 chain because it is being modified by the actions in the
213 loop. So we just get the head. We plan to drain the list
214 anyway. */
215 while (DF_REG_EQ_USE_CHAIN (regno))
216 {
217 struct df_ref *eq_use = DF_REG_EQ_USE_CHAIN (regno);
218 rtx noted_insn = DF_REF_INSN (eq_use);
219 rtx note = find_reg_note (noted_insn, REG_EQUAL, NULL_RTX);
220 if (!note)
221 note = find_reg_note (noted_insn, REG_EQUIV, NULL_RTX);
222
223 /* This assert is generally triggered when someone deletes a
224 REG_EQUAL or REG_EQUIV note by hacking the list manually
225 rather than calling remove_note. */
226 gcc_assert (note);
227 remove_note (noted_insn, note);
228 }
229 }
230 }
231
232
233 /* Delete every instruction that hasn't been marked. Clear the insn
234 from DCE_DF if DF_DELETE is true. */
235
236 static void
237 delete_unmarked_insns (void)
238 {
239 basic_block bb;
240 rtx insn, next;
241
242 something_changed = false;
243 FOR_EACH_BB (bb)
244 FOR_BB_INSNS_SAFE (bb, insn, next)
245 if (INSN_P (insn))
246 {
247 if (noop_move_p (insn))
248 {
249 /* Note that this code does not handle the case where
250 the last insn of libcall is deleted. As it turns out
251 this case is excluded in the call to noop_move_p. */
252 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
253 if (note && (XEXP (note, 0) != insn))
254 {
255 rtx new_libcall_insn = next_real_insn (insn);
256 rtx retval_note = find_reg_note (XEXP (note, 0),
257 REG_RETVAL, NULL_RTX);
258 REG_NOTES (new_libcall_insn)
259 = gen_rtx_INSN_LIST (REG_LIBCALL, XEXP (note, 0),
260 REG_NOTES (new_libcall_insn));
261 XEXP (retval_note, 0) = new_libcall_insn;
262 }
263 }
264 else if (marked_insn_p (insn))
265 continue;
266
267 /* WARNING, this debugging can itself cause problems if the
268 edge of the counter causes part of a libcall to be
269 deleted but not all of it. */
270 if (!dbg_cnt (dce))
271 continue;
272
273 if (dump_file)
274 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
275
276 /* Before we delete the insn, we have to delete
277 REG_EQUAL of the destination regs of the deleted insn
278 to prevent dangling REG_EQUAL. */
279 delete_corresponding_reg_eq_notes (insn);
280
281 delete_insn_and_edges (insn);
282 something_changed = true;
283 }
284 }
285
286
287 /* Mark all insns using DELETE_PARM in the libcall that contains
288 START_INSN. */
289 static void
290 mark_libcall (rtx start_insn, bool delete_parm)
291 {
292 rtx note = find_reg_note (start_insn, REG_LIBCALL_ID, NULL_RTX);
293 int id = INTVAL (XEXP (note, 0));
294 rtx insn;
295
296 mark_insn (start_insn, delete_parm);
297 insn = NEXT_INSN (start_insn);
298
299 /* There are tales, long ago and far away, of the mystical nested
300 libcall. No one alive has actually seen one, but other parts of
301 the compiler support them so we will here. */
302 for (insn = NEXT_INSN (start_insn); insn; insn = NEXT_INSN (insn))
303 {
304 if (INSN_P (insn))
305 {
306 /* Stay in the loop as long as we are in any libcall. */
307 if ((note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX)))
308 {
309 if (id == INTVAL (XEXP (note, 0)))
310 {
311 mark_insn (insn, delete_parm);
312 if (dump_file)
313 fprintf (dump_file, "matching forward libcall %d[%d]\n",
314 INSN_UID (insn), id);
315 }
316 }
317 else
318 break;
319 }
320 }
321
322 for (insn = PREV_INSN (start_insn); insn; insn = PREV_INSN (insn))
323 {
324 if (INSN_P (insn))
325 {
326 /* Stay in the loop as long as we are in any libcall. */
327 if ((note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX)))
328 {
329 if (id == INTVAL (XEXP (note, 0)))
330 {
331 mark_insn (insn, delete_parm);
332 if (dump_file)
333 fprintf (dump_file, "matching backward libcall %d[%d]\n",
334 INSN_UID (insn), id);
335 }
336 }
337 else
338 break;
339 }
340 }
341 }
342
343
344 /* Go through the instructions and mark those whose necessity is not
345 dependent on inter-instruction information. Make sure all other
346 instructions are not marked. */
347
348 static void
349 prescan_insns_for_dce (bool fast)
350 {
351 basic_block bb;
352 rtx insn;
353
354 if (dump_file)
355 fprintf (dump_file, "Finding needed instructions:\n");
356
357 FOR_EACH_BB (bb)
358 FOR_BB_INSNS (bb, insn)
359 if (INSN_P (insn))
360 {
361 rtx note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX);
362 if (note)
363 mark_libcall (insn, fast);
364 else if (deletable_insn_p (insn, fast))
365 mark_nonreg_stores (PATTERN (insn), insn, fast);
366 else
367 mark_insn (insn, fast);
368 }
369
370 if (dump_file)
371 fprintf (dump_file, "Finished finding needed instructions:\n");
372 }
373
374
375 /* UD-based DSE routines. */
376
377 /* Mark instructions that define artifically-used registers, such as
378 the frame pointer and the stack pointer. */
379
380 static void
381 mark_artificial_uses (void)
382 {
383 basic_block bb;
384 struct df_link *defs;
385 struct df_ref **use_rec;
386
387 FOR_ALL_BB (bb)
388 {
389 for (use_rec = df_get_artificial_uses (bb->index);
390 *use_rec; use_rec++)
391 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
392 mark_insn (DF_REF_INSN (defs->ref), false);
393 }
394 }
395
396 /* Mark every instruction that defines a register value that INSN uses. */
397
398 static void
399 mark_reg_dependencies (rtx insn)
400 {
401 struct df_link *defs;
402 struct df_ref **use_rec;
403
404 /* If this is part of a libcall, mark the entire libcall. */
405 if (find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX))
406 mark_libcall (insn, false);
407
408 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
409 {
410 struct df_ref *use = *use_rec;
411 if (dump_file)
412 {
413 fprintf (dump_file, "Processing use of ");
414 print_simple_rtl (dump_file, DF_REF_REG (use));
415 fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
416 }
417 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
418 mark_insn (DF_REF_INSN (defs->ref), false);
419 }
420 }
421
422
423 static void
424 end_ud_dce (void)
425 {
426 sbitmap_free (marked);
427 gcc_assert (VEC_empty (rtx, worklist));
428 }
429
430
431 /* UD-chain based DCE. */
432
433 static unsigned int
434 rest_of_handle_ud_dce (void)
435 {
436 rtx insn;
437
438 df_in_progress = false;
439 init_dce (false);
440
441 prescan_insns_for_dce (false);
442 mark_artificial_uses ();
443 while (VEC_length (rtx, worklist) > 0)
444 {
445 insn = VEC_pop (rtx, worklist);
446 mark_reg_dependencies (insn);
447 }
448 /* Before any insns are deleted, we must remove the chains since
449 they are not bidirectional. */
450 df_remove_problem (df_chain);
451 delete_unmarked_insns ();
452
453 end_ud_dce ();
454 return 0;
455 }
456
457
458 static bool
459 gate_ud_dce (void)
460 {
461 return optimize > 1 && flag_dce;
462 }
463
464 struct tree_opt_pass pass_ud_rtl_dce =
465 {
466 "dce", /* name */
467 gate_ud_dce, /* gate */
468 rest_of_handle_ud_dce, /* execute */
469 NULL, /* sub */
470 NULL, /* next */
471 0, /* static_pass_number */
472 TV_DCE, /* tv_id */
473 0, /* properties_required */
474 0, /* properties_provided */
475 0, /* properties_destroyed */
476 0, /* todo_flags_start */
477 TODO_dump_func |
478 TODO_df_finish |
479 TODO_ggc_collect, /* todo_flags_finish */
480 'w' /* letter */
481 };
482
483 /* -------------------------------------------------------------------------
484 Fast DCE functions
485 ------------------------------------------------------------------------- */
486
487
488 /* Free the data allocated by init_dce. */
489
490 static void
491 fini_dce (void)
492 {
493 sbitmap_free (marked);
494 bitmap_obstack_release (&dce_blocks_bitmap_obstack);
495 bitmap_obstack_release (&dce_tmp_bitmap_obstack);
496 df_in_progress = false;
497 }
498
499
500 /* Process basic block BB. Return true if the live_in set has
501 changed. */
502
503 static bool
504 dce_process_block (basic_block bb, bool redo_out)
505 {
506 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
507 rtx insn;
508 bool block_changed;
509 struct df_ref **def_rec, **use_rec;
510 unsigned int bb_index = bb->index;
511
512 if (redo_out)
513 {
514 /* Need to redo the live_out set of this block if when one of
515 the succs of this block has had a change in it live in
516 set. */
517 edge e;
518 edge_iterator ei;
519 df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
520 bitmap_clear (DF_LR_OUT (bb));
521 FOR_EACH_EDGE (e, ei, bb->succs)
522 (*con_fun_n) (e);
523 }
524
525 if (dump_file)
526 {
527 fprintf (dump_file, "processing block %d live out = ", bb->index);
528 df_print_regset (dump_file, DF_LR_OUT (bb));
529 }
530
531 bitmap_copy (local_live, DF_LR_OUT (bb));
532
533 /* Process the artificial defs and uses at the bottom of the block. */
534 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
535 {
536 struct df_ref *def = *def_rec;
537 if (((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
538 && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
539 bitmap_clear_bit (local_live, DF_REF_REGNO (def));
540 }
541
542 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
543 {
544 struct df_ref *use = *use_rec;
545 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
546 bitmap_set_bit (local_live, DF_REF_REGNO (use));
547 }
548
549 FOR_BB_INSNS_REVERSE (bb, insn)
550 if (INSN_P (insn))
551 {
552 /* If this is a recursive call, the libcall will have already
553 been marked. */
554 if (!marked_insn_p (insn))
555 {
556 bool needed = false;
557
558 /* The insn is needed if there is someone who uses the output. */
559 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
560 if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec)))
561 {
562 needed = true;
563 break;
564 }
565
566 if (needed)
567 {
568 rtx note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX);
569
570 /* If we need to mark an insn in the middle of a
571 libcall, we need to back up to mark the entire
572 libcall. Given that libcalls are rare, rescanning
573 the block should be a reasonable solution to trying
574 to figure out how to back up. */
575 if (note)
576 {
577 if (dump_file)
578 fprintf (dump_file, "needed libcall %d\n", INSN_UID (insn));
579 mark_libcall (insn, true);
580 BITMAP_FREE (local_live);
581 return dce_process_block (bb, false);
582 }
583 else
584 mark_insn (insn, true);
585 }
586 }
587
588 /* No matter if the instruction is needed or not, we remove
589 any regno in the defs from the live set. */
590 df_simulate_defs (insn, local_live);
591
592 /* On the other hand, we do not allow the dead uses to set
593 anything in local_live. */
594 if (marked_insn_p (insn))
595 df_simulate_uses (insn, local_live);
596 }
597
598 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
599 {
600 struct df_ref *def = *def_rec;
601 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP)
602 && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
603 bitmap_clear_bit (local_live, DF_REF_REGNO (def));
604 }
605 #ifdef EH_USES
606 /* Process the uses that are live into an exception handler. */
607 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
608 {
609 /* Add use to set of uses in this BB. */
610 struct df_ref *use = *use_rec;
611 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
612 bitmap_set_bit (local_live, DF_REF_REGNO (use));
613 }
614 #endif
615
616 block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
617 if (block_changed)
618 bitmap_copy (DF_LR_IN (bb), local_live);
619
620 BITMAP_FREE (local_live);
621 return block_changed;
622 }
623
624 static void
625 fast_dce (void)
626 {
627 int *postorder = df_get_postorder (DF_BACKWARD);
628 int n_blocks = df_get_n_blocks (DF_BACKWARD);
629 int i;
630 /* The set of blocks that have been seen on this iteration. */
631 bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
632 /* The set of blocks that need to have the out vectors reset because
633 the in of one of their successors has changed. */
634 bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
635 bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
636 bool global_changed = true;
637
638 int loop_count = 0;
639
640 prescan_insns_for_dce (true);
641
642 for (i = 0; i < n_blocks; i++)
643 bitmap_set_bit (all_blocks, postorder[i]);
644
645 while (global_changed)
646 {
647 global_changed = false;
648 for (i = 0; i < n_blocks; i++)
649 {
650 int index = postorder[i];
651 basic_block bb = BASIC_BLOCK (index);
652 bool local_changed;
653
654 if (index < NUM_FIXED_BLOCKS)
655 {
656 bitmap_set_bit (processed, index);
657 continue;
658 }
659
660 local_changed
661 = dce_process_block (bb, bitmap_bit_p (redo_out, index));
662 bitmap_set_bit (processed, index);
663
664 if (local_changed)
665 {
666 edge e;
667 edge_iterator ei;
668 FOR_EACH_EDGE (e, ei, bb->preds)
669 if (bitmap_bit_p (processed, e->src->index))
670 /* Be tricky about when we need to iterate the
671 analysis. We only have redo the analysis if the
672 bitmaps change at the top of a block that is the
673 entry to a loop. */
674 global_changed = true;
675 else
676 bitmap_set_bit (redo_out, e->src->index);
677 }
678 }
679
680 if (global_changed)
681 {
682 /* Turn off the RUN_DCE flag to prevent recursive calls to
683 dce. */
684 int old_flag = df_clear_flags (DF_LR_RUN_DCE);
685
686 /* So something was deleted that requires a redo. Do it on
687 the cheap. */
688 delete_unmarked_insns ();
689 sbitmap_zero (marked);
690 bitmap_clear (processed);
691 bitmap_clear (redo_out);
692
693 /* We do not need to rescan any instructions. We only need
694 to redo the dataflow equations for the blocks that had a
695 change at the top of the block. Then we need to redo the
696 iteration. */
697 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
698
699 if (old_flag & DF_LR_RUN_DCE)
700 df_set_flags (DF_LR_RUN_DCE);
701 prescan_insns_for_dce (true);
702 }
703 loop_count++;
704 }
705
706 delete_unmarked_insns ();
707
708 BITMAP_FREE (processed);
709 BITMAP_FREE (redo_out);
710 BITMAP_FREE (all_blocks);
711 }
712
713
714 /* Callback for running pass_rtl_dce. */
715
716 static unsigned int
717 rest_of_handle_fast_dce (void)
718 {
719 init_dce (true);
720 fast_dce ();
721 fini_dce ();
722 df_in_progress = false;
723 return 0;
724 }
725
726
727 /* This is an internal call that is used by the df live register
728 problem to run fast dce as a side effect of creating the live
729 information. The stack is organized so that the lr problem is run,
730 this pass is run, which updates the live info and the df scanning
731 info, and then returns to allow the rest of the problems to be run.
732
733 This can be called by elsewhere but it will not update the bit
734 vectors for any other problems than LR.
735 */
736
737 void
738 run_fast_df_dce (void)
739 {
740 if (flag_dce)
741 {
742 /* If dce is able to delete something, it has to happen
743 immediately. Otherwise there will be problems handling the
744 eq_notes. */
745 enum df_changeable_flags old_flags
746 = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
747
748 df_in_progress = true;
749 rest_of_handle_fast_dce ();
750 df_set_flags (old_flags);
751 }
752 }
753
754 static bool
755 gate_fast_dce (void)
756 {
757 return optimize > 0 && flag_dce;
758 }
759
760
761 /* Run a fast DCE pass and return true if any instructions were
762 deleted. */
763
764 bool
765 run_fast_dce (void)
766 {
767 return gate_fast_dce () && (rest_of_handle_fast_dce (), something_changed);
768 }
769
770
771 struct tree_opt_pass pass_fast_rtl_dce =
772 {
773 "dce", /* name */
774 gate_fast_dce, /* gate */
775 rest_of_handle_fast_dce, /* execute */
776 NULL, /* sub */
777 NULL, /* next */
778 0, /* static_pass_number */
779 TV_DCE, /* tv_id */
780 0, /* properties_required */
781 0, /* properties_provided */
782 0, /* properties_destroyed */
783 0, /* todo_flags_start */
784 TODO_dump_func |
785 TODO_df_finish |
786 TODO_ggc_collect, /* todo_flags_finish */
787 'w' /* letter */
788 };
789