dce.c (init_dce): Only initialize the obstacks for the fast pass.
[gcc.git] / gcc / dce.c
1 /* RTL dead code elimination.
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hashtab.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "df.h"
31 #include "cselib.h"
32 #include "dce.h"
33 #include "timevar.h"
34 #include "tree-pass.h"
35 #include "dbgcnt.h"
36
37 DEF_VEC_I(int);
38 DEF_VEC_ALLOC_I(int,heap);
39
40
41 /* -------------------------------------------------------------------------
42 Core mark/delete routines
43 ------------------------------------------------------------------------- */
44
45 /* True if we are invoked while the df engine is running; in this case,
46 we don't want to reenter it. */
47 static bool df_in_progress = false;
48
49 /* True if we deleted at least one instruction. */
50 static bool something_changed;
51
52 /* Instructions that have been marked but whose dependencies have not
53 yet been processed. */
54 static VEC(rtx,heap) *worklist;
55
56 /* Bitmap of instructions marked as needed indexed by INSN_UID. */
57 static sbitmap marked;
58
59 /* Bitmap obstacks used for block processing by the fast algorithm. */
60 static bitmap_obstack dce_blocks_bitmap_obstack;
61 static bitmap_obstack dce_tmp_bitmap_obstack;
62
63
64 /* A subroutine for which BODY is part of the instruction being tested;
65 either the top-level pattern, or an element of a PARALLEL. The
66 instruction is known not to be a bare USE or CLOBBER. */
67
68 static bool
69 deletable_insn_p_1 (rtx body)
70 {
71 switch (GET_CODE (body))
72 {
73 case PREFETCH:
74 case TRAP_IF:
75 /* The UNSPEC case was added here because the ia-64 claims that
76 USEs do not work after reload and generates UNSPECS rather
77 than USEs. Since dce is run after reload we need to avoid
78 deleting these even if they are dead. If it turns out that
79 USEs really do work after reload, the ia-64 should be
80 changed, and the UNSPEC case can be removed. */
81 case UNSPEC:
82 return false;
83
84 default:
85 if (volatile_refs_p (body))
86 return false;
87
88 if (flag_non_call_exceptions && may_trap_p (body))
89 return false;
90
91 return true;
92 }
93 }
94
95
96 /* Return true if INSN is a normal instruction that can be deleted by
97 the DCE pass. */
98
99 static bool
100 deletable_insn_p (rtx insn, bool fast)
101 {
102 rtx body, x;
103 int i;
104
105 if (!NONJUMP_INSN_P (insn))
106 return false;
107
108 body = PATTERN (insn);
109 switch (GET_CODE (body))
110 {
111 case USE:
112 return false;
113
114 case CLOBBER:
115 if (fast)
116 {
117 /* A CLOBBER of a dead pseudo register serves no purpose.
118 That is not necessarily true for hard registers until
119 after reload. */
120 x = XEXP (body, 0);
121 return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
122 }
123 else
124 /* Because of the way that use-def chains are built, it is not
125 possible to tell if the clobber is dead because it can
126 never be the target of a use-def chain. */
127 return false;
128
129 case PARALLEL:
130 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
131 if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
132 return false;
133 return true;
134
135 default:
136 return deletable_insn_p_1 (body);
137 }
138 }
139
140
141 /* Return true if INSN has been marked as needed. */
142
143 static inline int
144 marked_insn_p (rtx insn)
145 {
146 if (insn)
147 return TEST_BIT (marked, INSN_UID (insn));
148 else
149 /* Artificial defs are always needed and they do not have an
150 insn. */
151 return true;
152 }
153
154
155 /* If INSN has not yet been marked as needed, mark it now, and add it to
156 the worklist. */
157
158 static void
159 mark_insn (rtx insn, bool fast)
160 {
161 if (!marked_insn_p (insn))
162 {
163 if (!fast)
164 VEC_safe_push (rtx, heap, worklist, insn);
165 SET_BIT (marked, INSN_UID (insn));
166 if (dump_file)
167 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
168 }
169 }
170
171
172 /* A note_stores callback used by mark_nonreg_stores. DATA is the
173 instruction containing DEST. */
174
175 static void
176 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
177 {
178 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
179 mark_insn ((rtx) data, true);
180 }
181
182
183 /* A note_stores callback used by mark_nonreg_stores. DATA is the
184 instruction containing DEST. */
185
186 static void
187 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
188 {
189 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
190 mark_insn ((rtx) data, false);
191 }
192
193
194 /* Mark INSN if BODY stores to a non-register destination. */
195
196 static void
197 mark_nonreg_stores (rtx body, rtx insn, bool fast)
198 {
199 if (fast)
200 note_stores (body, mark_nonreg_stores_1, insn);
201 else
202 note_stores (body, mark_nonreg_stores_2, insn);
203 }
204
205
206 /* Delete all REG_EQUAL notes of the registers INSN writes, to prevent
207 bad dangling REG_EQUAL notes. */
208
209 static void
210 delete_corresponding_reg_eq_notes (rtx insn)
211 {
212 struct df_ref **def_rec;
213 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
214 {
215 struct df_ref *def = *def_rec;
216 unsigned int regno = DF_REF_REGNO (def);
217 /* This loop is a little tricky. We cannot just go down the
218 chain because it is being modified by the actions in the
219 loop. So we just get the head. We plan to drain the list
220 anyway. */
221 while (DF_REG_EQ_USE_CHAIN (regno))
222 {
223 struct df_ref *eq_use = DF_REG_EQ_USE_CHAIN (regno);
224 rtx noted_insn = DF_REF_INSN (eq_use);
225 rtx note = find_reg_note (noted_insn, REG_EQUAL, NULL_RTX);
226 if (!note)
227 note = find_reg_note (noted_insn, REG_EQUIV, NULL_RTX);
228
229 /* This assert is generally triggered when someone deletes a
230 REG_EQUAL or REG_EQUIV note by hacking the list manually
231 rather than calling remove_note. */
232 gcc_assert (note);
233 remove_note (noted_insn, note);
234 }
235 }
236 }
237
238
239 /* Delete every instruction that hasn't been marked. Clear the insn
240 from DCE_DF if DF_DELETE is true. */
241
242 static void
243 delete_unmarked_insns (void)
244 {
245 basic_block bb;
246 rtx insn, next;
247
248 something_changed = false;
249
250 FOR_EACH_BB (bb)
251 FOR_BB_INSNS_SAFE (bb, insn, next)
252 if (INSN_P (insn))
253 {
254 if (noop_move_p (insn))
255 {
256 /* Note that this code does not handle the case where
257 the last insn of libcall is deleted. As it turns out
258 this case is excluded in the call to noop_move_p. */
259 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
260 if (note && (XEXP (note, 0) != insn))
261 {
262 rtx new_libcall_insn = next_real_insn (insn);
263 rtx retval_note = find_reg_note (XEXP (note, 0),
264 REG_RETVAL, NULL_RTX);
265 REG_NOTES (new_libcall_insn)
266 = gen_rtx_INSN_LIST (REG_LIBCALL, XEXP (note, 0),
267 REG_NOTES (new_libcall_insn));
268 XEXP (retval_note, 0) = new_libcall_insn;
269 }
270 }
271 else if (marked_insn_p (insn))
272 continue;
273
274 /* WARNING, this debugging can itself cause problems if the
275 edge of the counter causes part of a libcall to be
276 deleted but not all of it. */
277 if (!dbg_cnt (dce))
278 continue;
279
280 if (dump_file)
281 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
282
283 /* Before we delete the insn, we have to delete
284 REG_EQUAL of the destination regs of the deleted insn
285 to prevent dangling REG_EQUAL. */
286 delete_corresponding_reg_eq_notes (insn);
287
288 delete_insn_and_edges (insn);
289 something_changed = true;
290 }
291 }
292
293
294 /* Mark all insns using DELETE_PARM in the libcall that contains
295 START_INSN. */
296
297 static void
298 mark_libcall (rtx start_insn, bool delete_parm)
299 {
300 rtx note = find_reg_note (start_insn, REG_LIBCALL_ID, NULL_RTX);
301 int id = INTVAL (XEXP (note, 0));
302 rtx insn;
303
304 mark_insn (start_insn, delete_parm);
305 insn = NEXT_INSN (start_insn);
306
307 /* There are tales, long ago and far away, of the mystical nested
308 libcall. No one alive has actually seen one, but other parts of
309 the compiler support them so we will here. */
310 for (insn = NEXT_INSN (start_insn); insn; insn = NEXT_INSN (insn))
311 {
312 if (INSN_P (insn))
313 {
314 /* Stay in the loop as long as we are in any libcall. */
315 if ((note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX)))
316 {
317 if (id == INTVAL (XEXP (note, 0)))
318 {
319 mark_insn (insn, delete_parm);
320 if (dump_file)
321 fprintf (dump_file, "matching forward libcall %d[%d]\n",
322 INSN_UID (insn), id);
323 }
324 }
325 else
326 break;
327 }
328 }
329
330 for (insn = PREV_INSN (start_insn); insn; insn = PREV_INSN (insn))
331 {
332 if (INSN_P (insn))
333 {
334 /* Stay in the loop as long as we are in any libcall. */
335 if ((note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX)))
336 {
337 if (id == INTVAL (XEXP (note, 0)))
338 {
339 mark_insn (insn, delete_parm);
340 if (dump_file)
341 fprintf (dump_file, "matching backward libcall %d[%d]\n",
342 INSN_UID (insn), id);
343 }
344 }
345 else
346 break;
347 }
348 }
349 }
350
351
352 /* Go through the instructions and mark those whose necessity is not
353 dependent on inter-instruction information. Make sure all other
354 instructions are not marked. */
355
356 static void
357 prescan_insns_for_dce (bool fast)
358 {
359 basic_block bb;
360 rtx insn;
361
362 if (dump_file)
363 fprintf (dump_file, "Finding needed instructions:\n");
364
365 FOR_EACH_BB (bb)
366 FOR_BB_INSNS (bb, insn)
367 if (INSN_P (insn))
368 {
369 rtx note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX);
370 if (note)
371 mark_libcall (insn, fast);
372 else if (deletable_insn_p (insn, fast))
373 mark_nonreg_stores (PATTERN (insn), insn, fast);
374 else
375 mark_insn (insn, fast);
376 }
377
378 if (dump_file)
379 fprintf (dump_file, "Finished finding needed instructions:\n");
380 }
381
382
383 /* UD-based DSE routines. */
384
385 /* Mark instructions that define artificially-used registers, such as
386 the frame pointer and the stack pointer. */
387
388 static void
389 mark_artificial_uses (void)
390 {
391 basic_block bb;
392 struct df_link *defs;
393 struct df_ref **use_rec;
394
395 FOR_ALL_BB (bb)
396 {
397 for (use_rec = df_get_artificial_uses (bb->index);
398 *use_rec; use_rec++)
399 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
400 mark_insn (DF_REF_INSN (defs->ref), false);
401 }
402 }
403
404
405 /* Mark every instruction that defines a register value that INSN uses. */
406
407 static void
408 mark_reg_dependencies (rtx insn)
409 {
410 struct df_link *defs;
411 struct df_ref **use_rec;
412
413 /* If this is part of a libcall, mark the entire libcall. */
414 if (find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX))
415 mark_libcall (insn, false);
416
417 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
418 {
419 struct df_ref *use = *use_rec;
420 if (dump_file)
421 {
422 fprintf (dump_file, "Processing use of ");
423 print_simple_rtl (dump_file, DF_REF_REG (use));
424 fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
425 }
426 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
427 mark_insn (DF_REF_INSN (defs->ref), false);
428 }
429 }
430
431
432 /* Initialize global variables for a new DCE pass. */
433
434 static void
435 init_dce (bool fast)
436 {
437 if (!df_in_progress)
438 {
439 if (!fast)
440 df_chain_add_problem (DF_UD_CHAIN);
441 df_analyze ();
442 }
443
444 if (dump_file)
445 df_dump (dump_file);
446
447 if (fast)
448 {
449 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
450 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
451 }
452
453 marked = sbitmap_alloc (get_max_uid () + 1);
454 sbitmap_zero (marked);
455 }
456
457
458 /* Free the data allocated by init_dce. */
459
460 static void
461 fini_dce (bool fast)
462 {
463 sbitmap_free (marked);
464
465 if (fast)
466 {
467 bitmap_obstack_release (&dce_blocks_bitmap_obstack);
468 bitmap_obstack_release (&dce_tmp_bitmap_obstack);
469 }
470 }
471
472
473 /* UD-chain based DCE. */
474
475 static unsigned int
476 rest_of_handle_ud_dce (void)
477 {
478 rtx insn;
479
480 init_dce (false);
481
482 prescan_insns_for_dce (false);
483 mark_artificial_uses ();
484 while (VEC_length (rtx, worklist) > 0)
485 {
486 insn = VEC_pop (rtx, worklist);
487 mark_reg_dependencies (insn);
488 }
489
490 /* Before any insns are deleted, we must remove the chains since
491 they are not bidirectional. */
492 df_remove_problem (df_chain);
493 delete_unmarked_insns ();
494
495 fini_dce (false);
496 return 0;
497 }
498
499
500 static bool
501 gate_ud_dce (void)
502 {
503 return optimize > 1 && flag_dce;
504 }
505
506 struct tree_opt_pass pass_ud_rtl_dce =
507 {
508 "dce", /* name */
509 gate_ud_dce, /* gate */
510 rest_of_handle_ud_dce, /* execute */
511 NULL, /* sub */
512 NULL, /* next */
513 0, /* static_pass_number */
514 TV_DCE, /* tv_id */
515 0, /* properties_required */
516 0, /* properties_provided */
517 0, /* properties_destroyed */
518 0, /* todo_flags_start */
519 TODO_dump_func |
520 TODO_df_finish | TODO_verify_rtl_sharing |
521 TODO_ggc_collect, /* todo_flags_finish */
522 'w' /* letter */
523 };
524
525
526 /* -------------------------------------------------------------------------
527 Fast DCE functions
528 ------------------------------------------------------------------------- */
529
530 /* Process basic block BB. Return true if the live_in set has changed. */
531
532 static bool
533 dce_process_block (basic_block bb, bool redo_out)
534 {
535 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
536 bitmap au;
537 rtx insn;
538 bool block_changed;
539 struct df_ref **def_rec, **use_rec;
540 unsigned int bb_index = bb->index;
541
542 if (redo_out)
543 {
544 /* Need to redo the live_out set of this block if when one of
545 the succs of this block has had a change in it live in
546 set. */
547 edge e;
548 edge_iterator ei;
549 df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
550 bitmap_clear (DF_LR_OUT (bb));
551 FOR_EACH_EDGE (e, ei, bb->succs)
552 (*con_fun_n) (e);
553 }
554
555 if (dump_file)
556 {
557 fprintf (dump_file, "processing block %d live out = ", bb->index);
558 df_print_regset (dump_file, DF_LR_OUT (bb));
559 }
560
561 bitmap_copy (local_live, DF_LR_OUT (bb));
562
563 /* Process the artificial defs and uses at the bottom of the block. */
564 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
565 {
566 struct df_ref *def = *def_rec;
567 if (((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
568 && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
569 bitmap_clear_bit (local_live, DF_REF_REGNO (def));
570 }
571
572 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
573 {
574 struct df_ref *use = *use_rec;
575 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
576 bitmap_set_bit (local_live, DF_REF_REGNO (use));
577 }
578
579 /* These regs are considered always live so if they end up dying
580 because of some def, we need to bring the back again.
581 Calling df_simulate_fixup_sets has the disadvantage of calling
582 bb_has_eh_pred once per insn, so we cache the information here. */
583 if (bb_has_eh_pred (bb))
584 au = df->eh_block_artificial_uses;
585 else
586 au = df->regular_block_artificial_uses;
587
588 FOR_BB_INSNS_REVERSE (bb, insn)
589 if (INSN_P (insn))
590 {
591 /* If this is a recursive call, the libcall will have already
592 been marked. */
593 if (!marked_insn_p (insn))
594 {
595 bool needed = false;
596
597 /* The insn is needed if there is someone who uses the output. */
598 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
599 if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec))
600 || bitmap_bit_p (au, DF_REF_REGNO (*def_rec)))
601 {
602 needed = true;
603 break;
604 }
605
606 if (needed)
607 {
608 rtx note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX);
609
610 /* If we need to mark an insn in the middle of a
611 libcall, we need to back up to mark the entire
612 libcall. Given that libcalls are rare, rescanning
613 the block should be a reasonable solution to trying
614 to figure out how to back up. */
615 if (note)
616 {
617 if (dump_file)
618 fprintf (dump_file, "needed libcall %d\n", INSN_UID (insn));
619 mark_libcall (insn, true);
620 BITMAP_FREE (local_live);
621 return dce_process_block (bb, false);
622 }
623 else
624 mark_insn (insn, true);
625 }
626 }
627
628 /* No matter if the instruction is needed or not, we remove
629 any regno in the defs from the live set. */
630 df_simulate_defs (insn, local_live);
631
632 /* On the other hand, we do not allow the dead uses to set
633 anything in local_live. */
634 if (marked_insn_p (insn))
635 df_simulate_uses (insn, local_live);
636 }
637
638 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
639 {
640 struct df_ref *def = *def_rec;
641 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP)
642 && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
643 bitmap_clear_bit (local_live, DF_REF_REGNO (def));
644 }
645 #ifdef EH_USES
646 /* Process the uses that are live into an exception handler. */
647 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
648 {
649 /* Add use to set of uses in this BB. */
650 struct df_ref *use = *use_rec;
651 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
652 bitmap_set_bit (local_live, DF_REF_REGNO (use));
653 }
654 #endif
655
656 block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
657 if (block_changed)
658 bitmap_copy (DF_LR_IN (bb), local_live);
659
660 BITMAP_FREE (local_live);
661 return block_changed;
662 }
663
664
665 /* Perform fast DCE once initialization is done. */
666
667 static void
668 fast_dce (void)
669 {
670 int *postorder = df_get_postorder (DF_BACKWARD);
671 int n_blocks = df_get_n_blocks (DF_BACKWARD);
672 /* The set of blocks that have been seen on this iteration. */
673 bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
674 /* The set of blocks that need to have the out vectors reset because
675 the in of one of their successors has changed. */
676 bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
677 bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
678 bool global_changed = true;
679 int i, loop_count = 0;
680
681 prescan_insns_for_dce (true);
682
683 for (i = 0; i < n_blocks; i++)
684 bitmap_set_bit (all_blocks, postorder[i]);
685
686 while (global_changed)
687 {
688 global_changed = false;
689 for (i = 0; i < n_blocks; i++)
690 {
691 int index = postorder[i];
692 basic_block bb = BASIC_BLOCK (index);
693 bool local_changed;
694
695 if (index < NUM_FIXED_BLOCKS)
696 {
697 bitmap_set_bit (processed, index);
698 continue;
699 }
700
701 local_changed
702 = dce_process_block (bb, bitmap_bit_p (redo_out, index));
703 bitmap_set_bit (processed, index);
704
705 if (local_changed)
706 {
707 edge e;
708 edge_iterator ei;
709 FOR_EACH_EDGE (e, ei, bb->preds)
710 if (bitmap_bit_p (processed, e->src->index))
711 /* Be tricky about when we need to iterate the
712 analysis. We only have redo the analysis if the
713 bitmaps change at the top of a block that is the
714 entry to a loop. */
715 global_changed = true;
716 else
717 bitmap_set_bit (redo_out, e->src->index);
718 }
719 }
720
721 if (global_changed)
722 {
723 /* Turn off the RUN_DCE flag to prevent recursive calls to
724 dce. */
725 int old_flag = df_clear_flags (DF_LR_RUN_DCE);
726
727 /* So something was deleted that requires a redo. Do it on
728 the cheap. */
729 delete_unmarked_insns ();
730 sbitmap_zero (marked);
731 bitmap_clear (processed);
732 bitmap_clear (redo_out);
733
734 /* We do not need to rescan any instructions. We only need
735 to redo the dataflow equations for the blocks that had a
736 change at the top of the block. Then we need to redo the
737 iteration. */
738 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
739
740 if (old_flag & DF_LR_RUN_DCE)
741 df_set_flags (DF_LR_RUN_DCE);
742 prescan_insns_for_dce (true);
743 }
744 loop_count++;
745 }
746
747 delete_unmarked_insns ();
748
749 BITMAP_FREE (processed);
750 BITMAP_FREE (redo_out);
751 BITMAP_FREE (all_blocks);
752 }
753
754
755 /* Fast DCE. */
756
757 static unsigned int
758 rest_of_handle_fast_dce (void)
759 {
760 init_dce (true);
761 fast_dce ();
762 fini_dce (true);
763 return 0;
764 }
765
766
767 /* This is an internal call that is used by the df live register
768 problem to run fast dce as a side effect of creating the live
769 information. The stack is organized so that the lr problem is run,
770 this pass is run, which updates the live info and the df scanning
771 info, and then returns to allow the rest of the problems to be run.
772
773 This can be called by elsewhere but it will not update the bit
774 vectors for any other problems than LR. */
775
776 void
777 run_fast_df_dce (void)
778 {
779 if (flag_dce)
780 {
781 /* If dce is able to delete something, it has to happen
782 immediately. Otherwise there will be problems handling the
783 eq_notes. */
784 enum df_changeable_flags old_flags
785 = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
786
787 df_in_progress = true;
788 rest_of_handle_fast_dce ();
789 df_in_progress = false;
790
791 df_set_flags (old_flags);
792 }
793 }
794
795
796 static bool
797 gate_fast_dce (void)
798 {
799 return optimize > 0 && flag_dce;
800 }
801
802
803 /* Run a fast DCE pass and return true if any instructions were deleted. */
804
805 bool
806 run_fast_dce (void)
807 {
808 return gate_fast_dce () && (rest_of_handle_fast_dce (), something_changed);
809 }
810
811
812 struct tree_opt_pass pass_fast_rtl_dce =
813 {
814 "dce", /* name */
815 gate_fast_dce, /* gate */
816 rest_of_handle_fast_dce, /* execute */
817 NULL, /* sub */
818 NULL, /* next */
819 0, /* static_pass_number */
820 TV_DCE, /* tv_id */
821 0, /* properties_required */
822 0, /* properties_provided */
823 0, /* properties_destroyed */
824 0, /* todo_flags_start */
825 TODO_dump_func |
826 TODO_df_finish | TODO_verify_rtl_sharing |
827 TODO_ggc_collect, /* todo_flags_finish */
828 'w' /* letter */
829 };