gfortran.h (gfc_option_t): Remove warn_aliasing,
[gcc.git] / gcc / dce.c
1 /* RTL dead code elimination.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hashtab.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "except.h"
31 #include "dominance.h"
32 #include "cfg.h"
33 #include "cfgrtl.h"
34 #include "cfgbuild.h"
35 #include "cfgcleanup.h"
36 #include "predict.h"
37 #include "basic-block.h"
38 #include "df.h"
39 #include "cselib.h"
40 #include "dce.h"
41 #include "valtrack.h"
42 #include "tree-pass.h"
43 #include "dbgcnt.h"
44 #include "tm_p.h"
45 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
46
47
48 /* -------------------------------------------------------------------------
49 Core mark/delete routines
50 ------------------------------------------------------------------------- */
51
52 /* True if we are invoked while the df engine is running; in this case,
53 we don't want to reenter it. */
54 static bool df_in_progress = false;
55
56 /* True if we are allowed to alter the CFG in this pass. */
57 static bool can_alter_cfg = false;
58
59 /* Instructions that have been marked but whose dependencies have not
60 yet been processed. */
61 static vec<rtx_insn *> worklist;
62
63 /* Bitmap of instructions marked as needed indexed by INSN_UID. */
64 static sbitmap marked;
65
66 /* Bitmap obstacks used for block processing by the fast algorithm. */
67 static bitmap_obstack dce_blocks_bitmap_obstack;
68 static bitmap_obstack dce_tmp_bitmap_obstack;
69
70 static bool find_call_stack_args (rtx_call_insn *, bool, bool, bitmap);
71
72 /* A subroutine for which BODY is part of the instruction being tested;
73 either the top-level pattern, or an element of a PARALLEL. The
74 instruction is known not to be a bare USE or CLOBBER. */
75
76 static bool
77 deletable_insn_p_1 (rtx body)
78 {
79 switch (GET_CODE (body))
80 {
81 case PREFETCH:
82 case TRAP_IF:
83 /* The UNSPEC case was added here because the ia-64 claims that
84 USEs do not work after reload and generates UNSPECS rather
85 than USEs. Since dce is run after reload we need to avoid
86 deleting these even if they are dead. If it turns out that
87 USEs really do work after reload, the ia-64 should be
88 changed, and the UNSPEC case can be removed. */
89 case UNSPEC:
90 return false;
91
92 default:
93 return !volatile_refs_p (body);
94 }
95 }
96
97
98 /* Return true if INSN is a normal instruction that can be deleted by
99 the DCE pass. */
100
101 static bool
102 deletable_insn_p (rtx_insn *insn, bool fast, bitmap arg_stores)
103 {
104 rtx body, x;
105 int i;
106 df_ref def;
107
108 if (CALL_P (insn)
109 /* We cannot delete calls inside of the recursive dce because
110 this may cause basic blocks to be deleted and this messes up
111 the rest of the stack of optimization passes. */
112 && (!df_in_progress)
113 /* We cannot delete pure or const sibling calls because it is
114 hard to see the result. */
115 && (!SIBLING_CALL_P (insn))
116 /* We can delete dead const or pure calls as long as they do not
117 infinite loop. */
118 && (RTL_CONST_OR_PURE_CALL_P (insn)
119 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
120 return find_call_stack_args (as_a <rtx_call_insn *> (insn), false,
121 fast, arg_stores);
122
123 /* Don't delete jumps, notes and the like. */
124 if (!NONJUMP_INSN_P (insn))
125 return false;
126
127 /* Don't delete insns that may throw if we cannot do so. */
128 if (!(cfun->can_delete_dead_exceptions && can_alter_cfg)
129 && !insn_nothrow_p (insn))
130 return false;
131
132 /* If INSN sets a global_reg, leave it untouched. */
133 FOR_EACH_INSN_DEF (def, insn)
134 if (HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
135 && global_regs[DF_REF_REGNO (def)])
136 return false;
137 /* Initialization of pseudo PIC register should never be removed. */
138 else if (DF_REF_REG (def) == pic_offset_table_rtx
139 && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
140 return false;
141
142 body = PATTERN (insn);
143 switch (GET_CODE (body))
144 {
145 case USE:
146 case VAR_LOCATION:
147 return false;
148
149 case CLOBBER:
150 if (fast)
151 {
152 /* A CLOBBER of a dead pseudo register serves no purpose.
153 That is not necessarily true for hard registers until
154 after reload. */
155 x = XEXP (body, 0);
156 return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
157 }
158 else
159 /* Because of the way that use-def chains are built, it is not
160 possible to tell if the clobber is dead because it can
161 never be the target of a use-def chain. */
162 return false;
163
164 case PARALLEL:
165 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
166 if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
167 return false;
168 return true;
169
170 default:
171 return deletable_insn_p_1 (body);
172 }
173 }
174
175
176 /* Return true if INSN has been marked as needed. */
177
178 static inline int
179 marked_insn_p (rtx_insn *insn)
180 {
181 /* Artificial defs are always needed and they do not have an insn.
182 We should never see them here. */
183 gcc_assert (insn);
184 return bitmap_bit_p (marked, INSN_UID (insn));
185 }
186
187
188 /* If INSN has not yet been marked as needed, mark it now, and add it to
189 the worklist. */
190
191 static void
192 mark_insn (rtx_insn *insn, bool fast)
193 {
194 if (!marked_insn_p (insn))
195 {
196 if (!fast)
197 worklist.safe_push (insn);
198 bitmap_set_bit (marked, INSN_UID (insn));
199 if (dump_file)
200 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
201 if (CALL_P (insn)
202 && !df_in_progress
203 && !SIBLING_CALL_P (insn)
204 && (RTL_CONST_OR_PURE_CALL_P (insn)
205 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
206 find_call_stack_args (as_a <rtx_call_insn *> (insn), true, fast, NULL);
207 }
208 }
209
210
211 /* A note_stores callback used by mark_nonreg_stores. DATA is the
212 instruction containing DEST. */
213
214 static void
215 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
216 {
217 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
218 mark_insn ((rtx_insn *) data, true);
219 }
220
221
222 /* A note_stores callback used by mark_nonreg_stores. DATA is the
223 instruction containing DEST. */
224
225 static void
226 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
227 {
228 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
229 mark_insn ((rtx_insn *) data, false);
230 }
231
232
233 /* Mark INSN if BODY stores to a non-register destination. */
234
235 static void
236 mark_nonreg_stores (rtx body, rtx_insn *insn, bool fast)
237 {
238 if (fast)
239 note_stores (body, mark_nonreg_stores_1, insn);
240 else
241 note_stores (body, mark_nonreg_stores_2, insn);
242 }
243
244
245 /* Return true if store to MEM, starting OFF bytes from stack pointer,
246 is a call argument store, and clear corresponding bits from SP_BYTES
247 bitmap if it is. */
248
249 static bool
250 check_argument_store (rtx mem, HOST_WIDE_INT off, HOST_WIDE_INT min_sp_off,
251 HOST_WIDE_INT max_sp_off, bitmap sp_bytes)
252 {
253 HOST_WIDE_INT byte;
254 for (byte = off; byte < off + GET_MODE_SIZE (GET_MODE (mem)); byte++)
255 {
256 if (byte < min_sp_off
257 || byte >= max_sp_off
258 || !bitmap_clear_bit (sp_bytes, byte - min_sp_off))
259 return false;
260 }
261 return true;
262 }
263
264
265 /* Try to find all stack stores of CALL_INSN arguments if
266 ACCUMULATE_OUTGOING_ARGS. If all stack stores have been found
267 and it is therefore safe to eliminate the call, return true,
268 otherwise return false. This function should be first called
269 with DO_MARK false, and only when the CALL_INSN is actually
270 going to be marked called again with DO_MARK true. */
271
272 static bool
273 find_call_stack_args (rtx_call_insn *call_insn, bool do_mark, bool fast,
274 bitmap arg_stores)
275 {
276 rtx p;
277 rtx_insn *insn, *prev_insn;
278 bool ret;
279 HOST_WIDE_INT min_sp_off, max_sp_off;
280 bitmap sp_bytes;
281
282 gcc_assert (CALL_P (call_insn));
283 if (!ACCUMULATE_OUTGOING_ARGS)
284 return true;
285
286 if (!do_mark)
287 {
288 gcc_assert (arg_stores);
289 bitmap_clear (arg_stores);
290 }
291
292 min_sp_off = INTTYPE_MAXIMUM (HOST_WIDE_INT);
293 max_sp_off = 0;
294
295 /* First determine the minimum and maximum offset from sp for
296 stored arguments. */
297 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
298 if (GET_CODE (XEXP (p, 0)) == USE
299 && MEM_P (XEXP (XEXP (p, 0), 0)))
300 {
301 rtx mem = XEXP (XEXP (p, 0), 0), addr;
302 HOST_WIDE_INT off = 0, size;
303 if (!MEM_SIZE_KNOWN_P (mem))
304 return false;
305 size = MEM_SIZE (mem);
306 addr = XEXP (mem, 0);
307 if (GET_CODE (addr) == PLUS
308 && REG_P (XEXP (addr, 0))
309 && CONST_INT_P (XEXP (addr, 1)))
310 {
311 off = INTVAL (XEXP (addr, 1));
312 addr = XEXP (addr, 0);
313 }
314 if (addr != stack_pointer_rtx)
315 {
316 if (!REG_P (addr))
317 return false;
318 /* If not fast, use chains to see if addr wasn't set to
319 sp + offset. */
320 if (!fast)
321 {
322 df_ref use;
323 struct df_link *defs;
324 rtx set;
325
326 FOR_EACH_INSN_USE (use, call_insn)
327 if (rtx_equal_p (addr, DF_REF_REG (use)))
328 break;
329
330 if (use == NULL)
331 return false;
332
333 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
334 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
335 break;
336
337 if (defs == NULL)
338 return false;
339
340 set = single_set (DF_REF_INSN (defs->ref));
341 if (!set)
342 return false;
343
344 if (GET_CODE (SET_SRC (set)) != PLUS
345 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
346 || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
347 return false;
348
349 off += INTVAL (XEXP (SET_SRC (set), 1));
350 }
351 else
352 return false;
353 }
354 min_sp_off = MIN (min_sp_off, off);
355 max_sp_off = MAX (max_sp_off, off + size);
356 }
357
358 if (min_sp_off >= max_sp_off)
359 return true;
360 sp_bytes = BITMAP_ALLOC (NULL);
361
362 /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
363 which contain arguments. Checking has been done in the previous
364 loop. */
365 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
366 if (GET_CODE (XEXP (p, 0)) == USE
367 && MEM_P (XEXP (XEXP (p, 0), 0)))
368 {
369 rtx mem = XEXP (XEXP (p, 0), 0), addr;
370 HOST_WIDE_INT off = 0, byte;
371 addr = XEXP (mem, 0);
372 if (GET_CODE (addr) == PLUS
373 && REG_P (XEXP (addr, 0))
374 && CONST_INT_P (XEXP (addr, 1)))
375 {
376 off = INTVAL (XEXP (addr, 1));
377 addr = XEXP (addr, 0);
378 }
379 if (addr != stack_pointer_rtx)
380 {
381 df_ref use;
382 struct df_link *defs;
383 rtx set;
384
385 FOR_EACH_INSN_USE (use, call_insn)
386 if (rtx_equal_p (addr, DF_REF_REG (use)))
387 break;
388
389 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
390 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
391 break;
392
393 set = single_set (DF_REF_INSN (defs->ref));
394 off += INTVAL (XEXP (SET_SRC (set), 1));
395 }
396 for (byte = off; byte < off + MEM_SIZE (mem); byte++)
397 {
398 if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
399 gcc_unreachable ();
400 }
401 }
402
403 /* Walk backwards, looking for argument stores. The search stops
404 when seeing another call, sp adjustment or memory store other than
405 argument store. */
406 ret = false;
407 for (insn = PREV_INSN (call_insn); insn; insn = prev_insn)
408 {
409 rtx set, mem, addr;
410 HOST_WIDE_INT off;
411
412 if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn)))
413 prev_insn = NULL;
414 else
415 prev_insn = PREV_INSN (insn);
416
417 if (CALL_P (insn))
418 break;
419
420 if (!NONDEBUG_INSN_P (insn))
421 continue;
422
423 set = single_set (insn);
424 if (!set || SET_DEST (set) == stack_pointer_rtx)
425 break;
426
427 if (!MEM_P (SET_DEST (set)))
428 continue;
429
430 mem = SET_DEST (set);
431 addr = XEXP (mem, 0);
432 off = 0;
433 if (GET_CODE (addr) == PLUS
434 && REG_P (XEXP (addr, 0))
435 && CONST_INT_P (XEXP (addr, 1)))
436 {
437 off = INTVAL (XEXP (addr, 1));
438 addr = XEXP (addr, 0);
439 }
440 if (addr != stack_pointer_rtx)
441 {
442 if (!REG_P (addr))
443 break;
444 if (!fast)
445 {
446 df_ref use;
447 struct df_link *defs;
448 rtx set;
449
450 FOR_EACH_INSN_USE (use, insn)
451 if (rtx_equal_p (addr, DF_REF_REG (use)))
452 break;
453
454 if (use == NULL)
455 break;
456
457 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
458 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
459 break;
460
461 if (defs == NULL)
462 break;
463
464 set = single_set (DF_REF_INSN (defs->ref));
465 if (!set)
466 break;
467
468 if (GET_CODE (SET_SRC (set)) != PLUS
469 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
470 || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
471 break;
472
473 off += INTVAL (XEXP (SET_SRC (set), 1));
474 }
475 else
476 break;
477 }
478
479 if (GET_MODE_SIZE (GET_MODE (mem)) == 0
480 || !check_argument_store (mem, off, min_sp_off,
481 max_sp_off, sp_bytes))
482 break;
483
484 if (!deletable_insn_p (insn, fast, NULL))
485 break;
486
487 if (do_mark)
488 mark_insn (insn, fast);
489 else
490 bitmap_set_bit (arg_stores, INSN_UID (insn));
491
492 if (bitmap_empty_p (sp_bytes))
493 {
494 ret = true;
495 break;
496 }
497 }
498
499 BITMAP_FREE (sp_bytes);
500 if (!ret && arg_stores)
501 bitmap_clear (arg_stores);
502
503 return ret;
504 }
505
506
507 /* Remove all REG_EQUAL and REG_EQUIV notes referring to the registers INSN
508 writes to. */
509
510 static void
511 remove_reg_equal_equiv_notes_for_defs (rtx_insn *insn)
512 {
513 df_ref def;
514
515 FOR_EACH_INSN_DEF (def, insn)
516 remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (def));
517 }
518
519 /* Scan all BBs for debug insns and reset those that reference values
520 defined in unmarked insns. */
521
522 static void
523 reset_unmarked_insns_debug_uses (void)
524 {
525 basic_block bb;
526 rtx_insn *insn, *next;
527
528 FOR_EACH_BB_REVERSE_FN (bb, cfun)
529 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
530 if (DEBUG_INSN_P (insn))
531 {
532 df_ref use;
533
534 FOR_EACH_INSN_USE (use, insn)
535 {
536 struct df_link *defs;
537 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
538 {
539 rtx_insn *ref_insn;
540 if (DF_REF_IS_ARTIFICIAL (defs->ref))
541 continue;
542 ref_insn = DF_REF_INSN (defs->ref);
543 if (!marked_insn_p (ref_insn))
544 break;
545 }
546 if (!defs)
547 continue;
548 /* ??? FIXME could we propagate the values assigned to
549 each of the DEFs? */
550 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
551 df_insn_rescan_debug_internal (insn);
552 break;
553 }
554 }
555 }
556
557 /* Delete every instruction that hasn't been marked. */
558
559 static void
560 delete_unmarked_insns (void)
561 {
562 basic_block bb;
563 rtx_insn *insn, *next;
564 bool must_clean = false;
565
566 FOR_EACH_BB_REVERSE_FN (bb, cfun)
567 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
568 if (NONDEBUG_INSN_P (insn))
569 {
570 /* Always delete no-op moves. */
571 if (noop_move_p (insn))
572 ;
573
574 /* Otherwise rely only on the DCE algorithm. */
575 else if (marked_insn_p (insn))
576 continue;
577
578 /* Beware that reaching a dbg counter limit here can result
579 in miscompiled file. This occurs when a group of insns
580 must be deleted together, typically because the kept insn
581 depends on the output from the deleted insn. Deleting
582 this insns in reverse order (both at the bb level and
583 when looking at the blocks) minimizes this, but does not
584 eliminate it, since it is possible for the using insn to
585 be top of a block and the producer to be at the bottom of
586 the block. However, in most cases this will only result
587 in an uninitialized use of an insn that is dead anyway.
588
589 However, there is one rare case that will cause a
590 miscompile: deletion of non-looping pure and constant
591 calls on a machine where ACCUMULATE_OUTGOING_ARGS is true.
592 In this case it is possible to remove the call, but leave
593 the argument pushes to the stack. Because of the changes
594 to the stack pointer, this will almost always lead to a
595 miscompile. */
596 if (!dbg_cnt (dce))
597 continue;
598
599 if (dump_file)
600 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
601
602 /* Before we delete the insn we have to remove the REG_EQUAL notes
603 for the destination regs in order to avoid dangling notes. */
604 remove_reg_equal_equiv_notes_for_defs (insn);
605
606 /* If a pure or const call is deleted, this may make the cfg
607 have unreachable blocks. We rememeber this and call
608 delete_unreachable_blocks at the end. */
609 if (CALL_P (insn))
610 must_clean = true;
611
612 /* Now delete the insn. */
613 delete_insn_and_edges (insn);
614 }
615
616 /* Deleted a pure or const call. */
617 if (must_clean)
618 delete_unreachable_blocks ();
619 }
620
621
622 /* Go through the instructions and mark those whose necessity is not
623 dependent on inter-instruction information. Make sure all other
624 instructions are not marked. */
625
626 static void
627 prescan_insns_for_dce (bool fast)
628 {
629 basic_block bb;
630 rtx_insn *insn, *prev;
631 bitmap arg_stores = NULL;
632
633 if (dump_file)
634 fprintf (dump_file, "Finding needed instructions:\n");
635
636 if (!df_in_progress && ACCUMULATE_OUTGOING_ARGS)
637 arg_stores = BITMAP_ALLOC (NULL);
638
639 FOR_EACH_BB_FN (bb, cfun)
640 {
641 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, prev)
642 if (NONDEBUG_INSN_P (insn))
643 {
644 /* Don't mark argument stores now. They will be marked
645 if needed when the associated CALL is marked. */
646 if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn)))
647 continue;
648 if (deletable_insn_p (insn, fast, arg_stores))
649 mark_nonreg_stores (PATTERN (insn), insn, fast);
650 else
651 mark_insn (insn, fast);
652 }
653 /* find_call_stack_args only looks at argument stores in the
654 same bb. */
655 if (arg_stores)
656 bitmap_clear (arg_stores);
657 }
658
659 if (arg_stores)
660 BITMAP_FREE (arg_stores);
661
662 if (dump_file)
663 fprintf (dump_file, "Finished finding needed instructions:\n");
664 }
665
666
667 /* UD-based DSE routines. */
668
669 /* Mark instructions that define artificially-used registers, such as
670 the frame pointer and the stack pointer. */
671
672 static void
673 mark_artificial_uses (void)
674 {
675 basic_block bb;
676 struct df_link *defs;
677 df_ref use;
678
679 FOR_ALL_BB_FN (bb, cfun)
680 FOR_EACH_ARTIFICIAL_USE (use, bb->index)
681 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
682 if (!DF_REF_IS_ARTIFICIAL (defs->ref))
683 mark_insn (DF_REF_INSN (defs->ref), false);
684 }
685
686
687 /* Mark every instruction that defines a register value that INSN uses. */
688
689 static void
690 mark_reg_dependencies (rtx_insn *insn)
691 {
692 struct df_link *defs;
693 df_ref use;
694
695 if (DEBUG_INSN_P (insn))
696 return;
697
698 FOR_EACH_INSN_USE (use, insn)
699 {
700 if (dump_file)
701 {
702 fprintf (dump_file, "Processing use of ");
703 print_simple_rtl (dump_file, DF_REF_REG (use));
704 fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
705 }
706 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
707 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
708 mark_insn (DF_REF_INSN (defs->ref), false);
709 }
710 }
711
712
713 /* Initialize global variables for a new DCE pass. */
714
715 static void
716 init_dce (bool fast)
717 {
718 if (!df_in_progress)
719 {
720 if (!fast)
721 {
722 df_set_flags (DF_RD_PRUNE_DEAD_DEFS);
723 df_chain_add_problem (DF_UD_CHAIN);
724 }
725 df_analyze ();
726 }
727
728 if (dump_file)
729 df_dump (dump_file);
730
731 if (fast)
732 {
733 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
734 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
735 can_alter_cfg = false;
736 }
737 else
738 can_alter_cfg = true;
739
740 marked = sbitmap_alloc (get_max_uid () + 1);
741 bitmap_clear (marked);
742 }
743
744
745 /* Free the data allocated by init_dce. */
746
747 static void
748 fini_dce (bool fast)
749 {
750 sbitmap_free (marked);
751
752 if (fast)
753 {
754 bitmap_obstack_release (&dce_blocks_bitmap_obstack);
755 bitmap_obstack_release (&dce_tmp_bitmap_obstack);
756 }
757 }
758
759
760 /* UD-chain based DCE. */
761
762 static unsigned int
763 rest_of_handle_ud_dce (void)
764 {
765 rtx_insn *insn;
766
767 init_dce (false);
768
769 prescan_insns_for_dce (false);
770 mark_artificial_uses ();
771 while (worklist.length () > 0)
772 {
773 insn = worklist.pop ();
774 mark_reg_dependencies (insn);
775 }
776 worklist.release ();
777
778 if (MAY_HAVE_DEBUG_INSNS)
779 reset_unmarked_insns_debug_uses ();
780
781 /* Before any insns are deleted, we must remove the chains since
782 they are not bidirectional. */
783 df_remove_problem (df_chain);
784 delete_unmarked_insns ();
785
786 fini_dce (false);
787 return 0;
788 }
789
790
791 namespace {
792
793 const pass_data pass_data_ud_rtl_dce =
794 {
795 RTL_PASS, /* type */
796 "ud_dce", /* name */
797 OPTGROUP_NONE, /* optinfo_flags */
798 TV_DCE, /* tv_id */
799 0, /* properties_required */
800 0, /* properties_provided */
801 0, /* properties_destroyed */
802 0, /* todo_flags_start */
803 TODO_df_finish, /* todo_flags_finish */
804 };
805
806 class pass_ud_rtl_dce : public rtl_opt_pass
807 {
808 public:
809 pass_ud_rtl_dce (gcc::context *ctxt)
810 : rtl_opt_pass (pass_data_ud_rtl_dce, ctxt)
811 {}
812
813 /* opt_pass methods: */
814 virtual bool gate (function *)
815 {
816 return optimize > 1 && flag_dce && dbg_cnt (dce_ud);
817 }
818
819 virtual unsigned int execute (function *)
820 {
821 return rest_of_handle_ud_dce ();
822 }
823
824 }; // class pass_ud_rtl_dce
825
826 } // anon namespace
827
828 rtl_opt_pass *
829 make_pass_ud_rtl_dce (gcc::context *ctxt)
830 {
831 return new pass_ud_rtl_dce (ctxt);
832 }
833
834
835 /* -------------------------------------------------------------------------
836 Fast DCE functions
837 ------------------------------------------------------------------------- */
838
839 /* Process basic block BB. Return true if the live_in set has
840 changed. REDO_OUT is true if the info at the bottom of the block
841 needs to be recalculated before starting. AU is the proper set of
842 artificial uses. Track global substitution of uses of dead pseudos
843 in debug insns using GLOBAL_DEBUG. */
844
845 static bool
846 word_dce_process_block (basic_block bb, bool redo_out,
847 struct dead_debug_global *global_debug)
848 {
849 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
850 rtx_insn *insn;
851 bool block_changed;
852 struct dead_debug_local debug;
853
854 if (redo_out)
855 {
856 /* Need to redo the live_out set of this block if when one of
857 the succs of this block has had a change in it live in
858 set. */
859 edge e;
860 edge_iterator ei;
861 df_confluence_function_n con_fun_n = df_word_lr->problem->con_fun_n;
862 bitmap_clear (DF_WORD_LR_OUT (bb));
863 FOR_EACH_EDGE (e, ei, bb->succs)
864 (*con_fun_n) (e);
865 }
866
867 if (dump_file)
868 {
869 fprintf (dump_file, "processing block %d live out = ", bb->index);
870 df_print_word_regset (dump_file, DF_WORD_LR_OUT (bb));
871 }
872
873 bitmap_copy (local_live, DF_WORD_LR_OUT (bb));
874 dead_debug_local_init (&debug, NULL, global_debug);
875
876 FOR_BB_INSNS_REVERSE (bb, insn)
877 if (DEBUG_INSN_P (insn))
878 {
879 df_ref use;
880 FOR_EACH_INSN_USE (use, insn)
881 if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER
882 && (GET_MODE_SIZE (GET_MODE (DF_REF_REAL_REG (use)))
883 == 2 * UNITS_PER_WORD)
884 && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use))
885 && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use) + 1))
886 dead_debug_add (&debug, use, DF_REF_REGNO (use));
887 }
888 else if (INSN_P (insn))
889 {
890 bool any_changed;
891
892 /* No matter if the instruction is needed or not, we remove
893 any regno in the defs from the live set. */
894 any_changed = df_word_lr_simulate_defs (insn, local_live);
895 if (any_changed)
896 mark_insn (insn, true);
897
898 /* On the other hand, we do not allow the dead uses to set
899 anything in local_live. */
900 if (marked_insn_p (insn))
901 df_word_lr_simulate_uses (insn, local_live);
902
903 /* Insert debug temps for dead REGs used in subsequent debug
904 insns. We may have to emit a debug temp even if the insn
905 was marked, in case the debug use was after the point of
906 death. */
907 if (debug.used && !bitmap_empty_p (debug.used))
908 {
909 df_ref def;
910
911 FOR_EACH_INSN_DEF (def, insn)
912 dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn,
913 marked_insn_p (insn)
914 && !control_flow_insn_p (insn)
915 ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
916 : DEBUG_TEMP_BEFORE_WITH_VALUE);
917 }
918
919 if (dump_file)
920 {
921 fprintf (dump_file, "finished processing insn %d live out = ",
922 INSN_UID (insn));
923 df_print_word_regset (dump_file, local_live);
924 }
925 }
926
927 block_changed = !bitmap_equal_p (local_live, DF_WORD_LR_IN (bb));
928 if (block_changed)
929 bitmap_copy (DF_WORD_LR_IN (bb), local_live);
930
931 dead_debug_local_finish (&debug, NULL);
932 BITMAP_FREE (local_live);
933 return block_changed;
934 }
935
936
937 /* Process basic block BB. Return true if the live_in set has
938 changed. REDO_OUT is true if the info at the bottom of the block
939 needs to be recalculated before starting. AU is the proper set of
940 artificial uses. Track global substitution of uses of dead pseudos
941 in debug insns using GLOBAL_DEBUG. */
942
943 static bool
944 dce_process_block (basic_block bb, bool redo_out, bitmap au,
945 struct dead_debug_global *global_debug)
946 {
947 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
948 rtx_insn *insn;
949 bool block_changed;
950 df_ref def;
951 struct dead_debug_local debug;
952
953 if (redo_out)
954 {
955 /* Need to redo the live_out set of this block if when one of
956 the succs of this block has had a change in it live in
957 set. */
958 edge e;
959 edge_iterator ei;
960 df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
961 bitmap_clear (DF_LR_OUT (bb));
962 FOR_EACH_EDGE (e, ei, bb->succs)
963 (*con_fun_n) (e);
964 }
965
966 if (dump_file)
967 {
968 fprintf (dump_file, "processing block %d lr out = ", bb->index);
969 df_print_regset (dump_file, DF_LR_OUT (bb));
970 }
971
972 bitmap_copy (local_live, DF_LR_OUT (bb));
973
974 df_simulate_initialize_backwards (bb, local_live);
975 dead_debug_local_init (&debug, NULL, global_debug);
976
977 FOR_BB_INSNS_REVERSE (bb, insn)
978 if (DEBUG_INSN_P (insn))
979 {
980 df_ref use;
981 FOR_EACH_INSN_USE (use, insn)
982 if (!bitmap_bit_p (local_live, DF_REF_REGNO (use))
983 && !bitmap_bit_p (au, DF_REF_REGNO (use)))
984 dead_debug_add (&debug, use, DF_REF_REGNO (use));
985 }
986 else if (INSN_P (insn))
987 {
988 bool needed = marked_insn_p (insn);
989
990 /* The insn is needed if there is someone who uses the output. */
991 if (!needed)
992 FOR_EACH_INSN_DEF (def, insn)
993 if (bitmap_bit_p (local_live, DF_REF_REGNO (def))
994 || bitmap_bit_p (au, DF_REF_REGNO (def)))
995 {
996 needed = true;
997 mark_insn (insn, true);
998 break;
999 }
1000
1001 /* No matter if the instruction is needed or not, we remove
1002 any regno in the defs from the live set. */
1003 df_simulate_defs (insn, local_live);
1004
1005 /* On the other hand, we do not allow the dead uses to set
1006 anything in local_live. */
1007 if (needed)
1008 df_simulate_uses (insn, local_live);
1009
1010 /* Insert debug temps for dead REGs used in subsequent debug
1011 insns. We may have to emit a debug temp even if the insn
1012 was marked, in case the debug use was after the point of
1013 death. */
1014 if (debug.used && !bitmap_empty_p (debug.used))
1015 FOR_EACH_INSN_DEF (def, insn)
1016 dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn,
1017 needed && !control_flow_insn_p (insn)
1018 ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
1019 : DEBUG_TEMP_BEFORE_WITH_VALUE);
1020 }
1021
1022 dead_debug_local_finish (&debug, NULL);
1023 df_simulate_finalize_backwards (bb, local_live);
1024
1025 block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
1026 if (block_changed)
1027 bitmap_copy (DF_LR_IN (bb), local_live);
1028
1029 BITMAP_FREE (local_live);
1030 return block_changed;
1031 }
1032
1033
1034 /* Perform fast DCE once initialization is done. If WORD_LEVEL is
1035 true, use the word level dce, otherwise do it at the pseudo
1036 level. */
1037
1038 static void
1039 fast_dce (bool word_level)
1040 {
1041 int *postorder = df_get_postorder (DF_BACKWARD);
1042 int n_blocks = df_get_n_blocks (DF_BACKWARD);
1043 /* The set of blocks that have been seen on this iteration. */
1044 bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1045 /* The set of blocks that need to have the out vectors reset because
1046 the in of one of their successors has changed. */
1047 bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1048 bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1049 bool global_changed = true;
1050
1051 /* These regs are considered always live so if they end up dying
1052 because of some def, we need to bring the back again. Calling
1053 df_simulate_fixup_sets has the disadvantage of calling
1054 bb_has_eh_pred once per insn, so we cache the information
1055 here. */
1056 bitmap au = &df->regular_block_artificial_uses;
1057 bitmap au_eh = &df->eh_block_artificial_uses;
1058 int i;
1059 struct dead_debug_global global_debug;
1060
1061 prescan_insns_for_dce (true);
1062
1063 for (i = 0; i < n_blocks; i++)
1064 bitmap_set_bit (all_blocks, postorder[i]);
1065
1066 dead_debug_global_init (&global_debug, NULL);
1067
1068 while (global_changed)
1069 {
1070 global_changed = false;
1071
1072 for (i = 0; i < n_blocks; i++)
1073 {
1074 int index = postorder[i];
1075 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, index);
1076 bool local_changed;
1077
1078 if (index < NUM_FIXED_BLOCKS)
1079 {
1080 bitmap_set_bit (processed, index);
1081 continue;
1082 }
1083
1084 if (word_level)
1085 local_changed
1086 = word_dce_process_block (bb, bitmap_bit_p (redo_out, index),
1087 &global_debug);
1088 else
1089 local_changed
1090 = dce_process_block (bb, bitmap_bit_p (redo_out, index),
1091 bb_has_eh_pred (bb) ? au_eh : au,
1092 &global_debug);
1093 bitmap_set_bit (processed, index);
1094
1095 if (local_changed)
1096 {
1097 edge e;
1098 edge_iterator ei;
1099 FOR_EACH_EDGE (e, ei, bb->preds)
1100 if (bitmap_bit_p (processed, e->src->index))
1101 /* Be tricky about when we need to iterate the
1102 analysis. We only have redo the analysis if the
1103 bitmaps change at the top of a block that is the
1104 entry to a loop. */
1105 global_changed = true;
1106 else
1107 bitmap_set_bit (redo_out, e->src->index);
1108 }
1109 }
1110
1111 if (global_changed)
1112 {
1113 /* Turn off the RUN_DCE flag to prevent recursive calls to
1114 dce. */
1115 int old_flag = df_clear_flags (DF_LR_RUN_DCE);
1116
1117 /* So something was deleted that requires a redo. Do it on
1118 the cheap. */
1119 delete_unmarked_insns ();
1120 bitmap_clear (marked);
1121 bitmap_clear (processed);
1122 bitmap_clear (redo_out);
1123
1124 /* We do not need to rescan any instructions. We only need
1125 to redo the dataflow equations for the blocks that had a
1126 change at the top of the block. Then we need to redo the
1127 iteration. */
1128 if (word_level)
1129 df_analyze_problem (df_word_lr, all_blocks, postorder, n_blocks);
1130 else
1131 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
1132
1133 if (old_flag & DF_LR_RUN_DCE)
1134 df_set_flags (DF_LR_RUN_DCE);
1135
1136 prescan_insns_for_dce (true);
1137 }
1138 }
1139
1140 dead_debug_global_finish (&global_debug, NULL);
1141
1142 delete_unmarked_insns ();
1143
1144 BITMAP_FREE (processed);
1145 BITMAP_FREE (redo_out);
1146 BITMAP_FREE (all_blocks);
1147 }
1148
1149
1150 /* Fast register level DCE. */
1151
1152 static unsigned int
1153 rest_of_handle_fast_dce (void)
1154 {
1155 init_dce (true);
1156 fast_dce (false);
1157 fini_dce (true);
1158 return 0;
1159 }
1160
1161
1162 /* Fast byte level DCE. */
1163
1164 void
1165 run_word_dce (void)
1166 {
1167 int old_flags;
1168
1169 if (!flag_dce)
1170 return;
1171
1172 timevar_push (TV_DCE);
1173 old_flags = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1174 df_word_lr_add_problem ();
1175 init_dce (true);
1176 fast_dce (true);
1177 fini_dce (true);
1178 df_set_flags (old_flags);
1179 timevar_pop (TV_DCE);
1180 }
1181
1182
1183 /* This is an internal call that is used by the df live register
1184 problem to run fast dce as a side effect of creating the live
1185 information. The stack is organized so that the lr problem is run,
1186 this pass is run, which updates the live info and the df scanning
1187 info, and then returns to allow the rest of the problems to be run.
1188
1189 This can be called by elsewhere but it will not update the bit
1190 vectors for any other problems than LR. */
1191
1192 void
1193 run_fast_df_dce (void)
1194 {
1195 if (flag_dce)
1196 {
1197 /* If dce is able to delete something, it has to happen
1198 immediately. Otherwise there will be problems handling the
1199 eq_notes. */
1200 int old_flags =
1201 df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1202
1203 df_in_progress = true;
1204 rest_of_handle_fast_dce ();
1205 df_in_progress = false;
1206
1207 df_set_flags (old_flags);
1208 }
1209 }
1210
1211
1212 /* Run a fast DCE pass. */
1213
1214 void
1215 run_fast_dce (void)
1216 {
1217 if (flag_dce)
1218 rest_of_handle_fast_dce ();
1219 }
1220
1221
1222 namespace {
1223
1224 const pass_data pass_data_fast_rtl_dce =
1225 {
1226 RTL_PASS, /* type */
1227 "rtl_dce", /* name */
1228 OPTGROUP_NONE, /* optinfo_flags */
1229 TV_DCE, /* tv_id */
1230 0, /* properties_required */
1231 0, /* properties_provided */
1232 0, /* properties_destroyed */
1233 0, /* todo_flags_start */
1234 TODO_df_finish, /* todo_flags_finish */
1235 };
1236
1237 class pass_fast_rtl_dce : public rtl_opt_pass
1238 {
1239 public:
1240 pass_fast_rtl_dce (gcc::context *ctxt)
1241 : rtl_opt_pass (pass_data_fast_rtl_dce, ctxt)
1242 {}
1243
1244 /* opt_pass methods: */
1245 virtual bool gate (function *)
1246 {
1247 return optimize > 0 && flag_dce && dbg_cnt (dce_fast);
1248 }
1249
1250 virtual unsigned int execute (function *)
1251 {
1252 return rest_of_handle_fast_dce ();
1253 }
1254
1255 }; // class pass_fast_rtl_dce
1256
1257 } // anon namespace
1258
1259 rtl_opt_pass *
1260 make_pass_fast_rtl_dce (gcc::context *ctxt)
1261 {
1262 return new pass_fast_rtl_dce (ctxt);
1263 }