Update copyright years.
[gcc.git] / gcc / dce.c
1 /* RTL dead code elimination.
2 Copyright (C) 2005-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "predict.h"
27 #include "df.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
31 #include "cfgrtl.h"
32 #include "cfgbuild.h"
33 #include "cfgcleanup.h"
34 #include "dce.h"
35 #include "valtrack.h"
36 #include "tree-pass.h"
37 #include "dbgcnt.h"
38
39
40 /* -------------------------------------------------------------------------
41 Core mark/delete routines
42 ------------------------------------------------------------------------- */
43
44 /* True if we are invoked while the df engine is running; in this case,
45 we don't want to reenter it. */
46 static bool df_in_progress = false;
47
48 /* True if we are allowed to alter the CFG in this pass. */
49 static bool can_alter_cfg = false;
50
51 /* Instructions that have been marked but whose dependencies have not
52 yet been processed. */
53 static vec<rtx_insn *> worklist;
54
55 /* Bitmap of instructions marked as needed indexed by INSN_UID. */
56 static sbitmap marked;
57
58 /* Bitmap obstacks used for block processing by the fast algorithm. */
59 static bitmap_obstack dce_blocks_bitmap_obstack;
60 static bitmap_obstack dce_tmp_bitmap_obstack;
61
62 static bool find_call_stack_args (rtx_call_insn *, bool, bool, bitmap);
63
64 /* A subroutine for which BODY is part of the instruction being tested;
65 either the top-level pattern, or an element of a PARALLEL. The
66 instruction is known not to be a bare USE or CLOBBER. */
67
68 static bool
69 deletable_insn_p_1 (rtx body)
70 {
71 switch (GET_CODE (body))
72 {
73 case PREFETCH:
74 case TRAP_IF:
75 /* The UNSPEC case was added here because the ia-64 claims that
76 USEs do not work after reload and generates UNSPECS rather
77 than USEs. Since dce is run after reload we need to avoid
78 deleting these even if they are dead. If it turns out that
79 USEs really do work after reload, the ia-64 should be
80 changed, and the UNSPEC case can be removed. */
81 case UNSPEC:
82 return false;
83
84 default:
85 return !volatile_refs_p (body);
86 }
87 }
88
89
90 /* Return true if INSN is a normal instruction that can be deleted by
91 the DCE pass. */
92
93 static bool
94 deletable_insn_p (rtx_insn *insn, bool fast, bitmap arg_stores)
95 {
96 rtx body, x;
97 int i;
98 df_ref def;
99
100 if (CALL_P (insn)
101 /* We cannot delete calls inside of the recursive dce because
102 this may cause basic blocks to be deleted and this messes up
103 the rest of the stack of optimization passes. */
104 && (!df_in_progress)
105 /* We cannot delete pure or const sibling calls because it is
106 hard to see the result. */
107 && (!SIBLING_CALL_P (insn))
108 /* We can delete dead const or pure calls as long as they do not
109 infinite loop. */
110 && (RTL_CONST_OR_PURE_CALL_P (insn)
111 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
112 return find_call_stack_args (as_a <rtx_call_insn *> (insn), false,
113 fast, arg_stores);
114
115 /* Don't delete jumps, notes and the like. */
116 if (!NONJUMP_INSN_P (insn))
117 return false;
118
119 /* Don't delete insns that may throw if we cannot do so. */
120 if (!(cfun->can_delete_dead_exceptions && can_alter_cfg)
121 && !insn_nothrow_p (insn))
122 return false;
123
124 /* If INSN sets a global_reg, leave it untouched. */
125 FOR_EACH_INSN_DEF (def, insn)
126 if (HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
127 && global_regs[DF_REF_REGNO (def)])
128 return false;
129 /* Initialization of pseudo PIC register should never be removed. */
130 else if (DF_REF_REG (def) == pic_offset_table_rtx
131 && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
132 return false;
133
134 body = PATTERN (insn);
135 switch (GET_CODE (body))
136 {
137 case USE:
138 case VAR_LOCATION:
139 return false;
140
141 case CLOBBER:
142 if (fast)
143 {
144 /* A CLOBBER of a dead pseudo register serves no purpose.
145 That is not necessarily true for hard registers until
146 after reload. */
147 x = XEXP (body, 0);
148 return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
149 }
150 else
151 /* Because of the way that use-def chains are built, it is not
152 possible to tell if the clobber is dead because it can
153 never be the target of a use-def chain. */
154 return false;
155
156 case PARALLEL:
157 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
158 if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
159 return false;
160 return true;
161
162 default:
163 return deletable_insn_p_1 (body);
164 }
165 }
166
167
168 /* Return true if INSN has been marked as needed. */
169
170 static inline int
171 marked_insn_p (rtx_insn *insn)
172 {
173 /* Artificial defs are always needed and they do not have an insn.
174 We should never see them here. */
175 gcc_assert (insn);
176 return bitmap_bit_p (marked, INSN_UID (insn));
177 }
178
179
180 /* If INSN has not yet been marked as needed, mark it now, and add it to
181 the worklist. */
182
183 static void
184 mark_insn (rtx_insn *insn, bool fast)
185 {
186 if (!marked_insn_p (insn))
187 {
188 if (!fast)
189 worklist.safe_push (insn);
190 bitmap_set_bit (marked, INSN_UID (insn));
191 if (dump_file)
192 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
193 if (CALL_P (insn)
194 && !df_in_progress
195 && !SIBLING_CALL_P (insn)
196 && (RTL_CONST_OR_PURE_CALL_P (insn)
197 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
198 find_call_stack_args (as_a <rtx_call_insn *> (insn), true, fast, NULL);
199 }
200 }
201
202
203 /* A note_stores callback used by mark_nonreg_stores. DATA is the
204 instruction containing DEST. */
205
206 static void
207 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
208 {
209 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
210 mark_insn ((rtx_insn *) data, true);
211 }
212
213
214 /* A note_stores callback used by mark_nonreg_stores. DATA is the
215 instruction containing DEST. */
216
217 static void
218 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
219 {
220 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
221 mark_insn ((rtx_insn *) data, false);
222 }
223
224
225 /* Mark INSN if BODY stores to a non-register destination. */
226
227 static void
228 mark_nonreg_stores (rtx body, rtx_insn *insn, bool fast)
229 {
230 if (fast)
231 note_stores (body, mark_nonreg_stores_1, insn);
232 else
233 note_stores (body, mark_nonreg_stores_2, insn);
234 }
235
236
237 /* Return true if a store to SIZE bytes, starting OFF bytes from stack pointer,
238 is a call argument store, and clear corresponding bits from SP_BYTES
239 bitmap if it is. */
240
241 static bool
242 check_argument_store (HOST_WIDE_INT size, HOST_WIDE_INT off,
243 HOST_WIDE_INT min_sp_off, HOST_WIDE_INT max_sp_off,
244 bitmap sp_bytes)
245 {
246 HOST_WIDE_INT byte;
247 for (byte = off; byte < off + size; byte++)
248 {
249 if (byte < min_sp_off
250 || byte >= max_sp_off
251 || !bitmap_clear_bit (sp_bytes, byte - min_sp_off))
252 return false;
253 }
254 return true;
255 }
256
257
258 /* Try to find all stack stores of CALL_INSN arguments if
259 ACCUMULATE_OUTGOING_ARGS. If all stack stores have been found
260 and it is therefore safe to eliminate the call, return true,
261 otherwise return false. This function should be first called
262 with DO_MARK false, and only when the CALL_INSN is actually
263 going to be marked called again with DO_MARK true. */
264
265 static bool
266 find_call_stack_args (rtx_call_insn *call_insn, bool do_mark, bool fast,
267 bitmap arg_stores)
268 {
269 rtx p;
270 rtx_insn *insn, *prev_insn;
271 bool ret;
272 HOST_WIDE_INT min_sp_off, max_sp_off;
273 bitmap sp_bytes;
274
275 gcc_assert (CALL_P (call_insn));
276 if (!ACCUMULATE_OUTGOING_ARGS)
277 return true;
278
279 if (!do_mark)
280 {
281 gcc_assert (arg_stores);
282 bitmap_clear (arg_stores);
283 }
284
285 min_sp_off = INTTYPE_MAXIMUM (HOST_WIDE_INT);
286 max_sp_off = 0;
287
288 /* First determine the minimum and maximum offset from sp for
289 stored arguments. */
290 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
291 if (GET_CODE (XEXP (p, 0)) == USE
292 && MEM_P (XEXP (XEXP (p, 0), 0)))
293 {
294 rtx mem = XEXP (XEXP (p, 0), 0), addr;
295 HOST_WIDE_INT off = 0, size;
296 if (!MEM_SIZE_KNOWN_P (mem) || !MEM_SIZE (mem).is_constant (&size))
297 return false;
298 addr = XEXP (mem, 0);
299 if (GET_CODE (addr) == PLUS
300 && REG_P (XEXP (addr, 0))
301 && CONST_INT_P (XEXP (addr, 1)))
302 {
303 off = INTVAL (XEXP (addr, 1));
304 addr = XEXP (addr, 0);
305 }
306 if (addr != stack_pointer_rtx)
307 {
308 if (!REG_P (addr))
309 return false;
310 /* If not fast, use chains to see if addr wasn't set to
311 sp + offset. */
312 if (!fast)
313 {
314 df_ref use;
315 struct df_link *defs;
316 rtx set;
317
318 FOR_EACH_INSN_USE (use, call_insn)
319 if (rtx_equal_p (addr, DF_REF_REG (use)))
320 break;
321
322 if (use == NULL)
323 return false;
324
325 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
326 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
327 break;
328
329 if (defs == NULL)
330 return false;
331
332 set = single_set (DF_REF_INSN (defs->ref));
333 if (!set)
334 return false;
335
336 if (GET_CODE (SET_SRC (set)) != PLUS
337 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
338 || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
339 return false;
340
341 off += INTVAL (XEXP (SET_SRC (set), 1));
342 }
343 else
344 return false;
345 }
346 min_sp_off = MIN (min_sp_off, off);
347 max_sp_off = MAX (max_sp_off, off + size);
348 }
349
350 if (min_sp_off >= max_sp_off)
351 return true;
352 sp_bytes = BITMAP_ALLOC (NULL);
353
354 /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
355 which contain arguments. Checking has been done in the previous
356 loop. */
357 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
358 if (GET_CODE (XEXP (p, 0)) == USE
359 && MEM_P (XEXP (XEXP (p, 0), 0)))
360 {
361 rtx mem = XEXP (XEXP (p, 0), 0), addr;
362 HOST_WIDE_INT off = 0, byte, size;
363 /* Checked in the previous iteration. */
364 size = MEM_SIZE (mem).to_constant ();
365 addr = XEXP (mem, 0);
366 if (GET_CODE (addr) == PLUS
367 && REG_P (XEXP (addr, 0))
368 && CONST_INT_P (XEXP (addr, 1)))
369 {
370 off = INTVAL (XEXP (addr, 1));
371 addr = XEXP (addr, 0);
372 }
373 if (addr != stack_pointer_rtx)
374 {
375 df_ref use;
376 struct df_link *defs;
377 rtx set;
378
379 FOR_EACH_INSN_USE (use, call_insn)
380 if (rtx_equal_p (addr, DF_REF_REG (use)))
381 break;
382
383 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
384 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
385 break;
386
387 set = single_set (DF_REF_INSN (defs->ref));
388 off += INTVAL (XEXP (SET_SRC (set), 1));
389 }
390 for (byte = off; byte < off + size; byte++)
391 {
392 if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
393 gcc_unreachable ();
394 }
395 }
396
397 /* Walk backwards, looking for argument stores. The search stops
398 when seeing another call, sp adjustment or memory store other than
399 argument store. */
400 ret = false;
401 for (insn = PREV_INSN (call_insn); insn; insn = prev_insn)
402 {
403 rtx set, mem, addr;
404 HOST_WIDE_INT off;
405
406 if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn)))
407 prev_insn = NULL;
408 else
409 prev_insn = PREV_INSN (insn);
410
411 if (CALL_P (insn))
412 break;
413
414 if (!NONDEBUG_INSN_P (insn))
415 continue;
416
417 set = single_set (insn);
418 if (!set || SET_DEST (set) == stack_pointer_rtx)
419 break;
420
421 if (!MEM_P (SET_DEST (set)))
422 continue;
423
424 mem = SET_DEST (set);
425 addr = XEXP (mem, 0);
426 off = 0;
427 if (GET_CODE (addr) == PLUS
428 && REG_P (XEXP (addr, 0))
429 && CONST_INT_P (XEXP (addr, 1)))
430 {
431 off = INTVAL (XEXP (addr, 1));
432 addr = XEXP (addr, 0);
433 }
434 if (addr != stack_pointer_rtx)
435 {
436 if (!REG_P (addr))
437 break;
438 if (!fast)
439 {
440 df_ref use;
441 struct df_link *defs;
442 rtx set;
443
444 FOR_EACH_INSN_USE (use, insn)
445 if (rtx_equal_p (addr, DF_REF_REG (use)))
446 break;
447
448 if (use == NULL)
449 break;
450
451 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
452 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
453 break;
454
455 if (defs == NULL)
456 break;
457
458 set = single_set (DF_REF_INSN (defs->ref));
459 if (!set)
460 break;
461
462 if (GET_CODE (SET_SRC (set)) != PLUS
463 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
464 || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
465 break;
466
467 off += INTVAL (XEXP (SET_SRC (set), 1));
468 }
469 else
470 break;
471 }
472
473 HOST_WIDE_INT size;
474 if (!MEM_SIZE_KNOWN_P (mem)
475 || !MEM_SIZE (mem).is_constant (&size)
476 || !check_argument_store (size, off, min_sp_off,
477 max_sp_off, sp_bytes))
478 break;
479
480 if (!deletable_insn_p (insn, fast, NULL))
481 break;
482
483 if (do_mark)
484 mark_insn (insn, fast);
485 else
486 bitmap_set_bit (arg_stores, INSN_UID (insn));
487
488 if (bitmap_empty_p (sp_bytes))
489 {
490 ret = true;
491 break;
492 }
493 }
494
495 BITMAP_FREE (sp_bytes);
496 if (!ret && arg_stores)
497 bitmap_clear (arg_stores);
498
499 return ret;
500 }
501
502
503 /* Remove all REG_EQUAL and REG_EQUIV notes referring to the registers INSN
504 writes to. */
505
506 static void
507 remove_reg_equal_equiv_notes_for_defs (rtx_insn *insn)
508 {
509 df_ref def;
510
511 FOR_EACH_INSN_DEF (def, insn)
512 remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (def));
513 }
514
515 /* Scan all BBs for debug insns and reset those that reference values
516 defined in unmarked insns. */
517
518 static void
519 reset_unmarked_insns_debug_uses (void)
520 {
521 basic_block bb;
522 rtx_insn *insn, *next;
523
524 FOR_EACH_BB_REVERSE_FN (bb, cfun)
525 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
526 if (DEBUG_INSN_P (insn))
527 {
528 df_ref use;
529
530 FOR_EACH_INSN_USE (use, insn)
531 {
532 struct df_link *defs;
533 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
534 {
535 rtx_insn *ref_insn;
536 if (DF_REF_IS_ARTIFICIAL (defs->ref))
537 continue;
538 ref_insn = DF_REF_INSN (defs->ref);
539 if (!marked_insn_p (ref_insn))
540 break;
541 }
542 if (!defs)
543 continue;
544 /* ??? FIXME could we propagate the values assigned to
545 each of the DEFs? */
546 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
547 df_insn_rescan_debug_internal (insn);
548 break;
549 }
550 }
551 }
552
553 /* Delete every instruction that hasn't been marked. */
554
555 static void
556 delete_unmarked_insns (void)
557 {
558 basic_block bb;
559 rtx_insn *insn, *next;
560 bool must_clean = false;
561
562 FOR_EACH_BB_REVERSE_FN (bb, cfun)
563 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
564 if (NONDEBUG_INSN_P (insn))
565 {
566 /* Always delete no-op moves. */
567 if (noop_move_p (insn))
568 ;
569
570 /* Otherwise rely only on the DCE algorithm. */
571 else if (marked_insn_p (insn))
572 continue;
573
574 /* Beware that reaching a dbg counter limit here can result
575 in miscompiled file. This occurs when a group of insns
576 must be deleted together, typically because the kept insn
577 depends on the output from the deleted insn. Deleting
578 this insns in reverse order (both at the bb level and
579 when looking at the blocks) minimizes this, but does not
580 eliminate it, since it is possible for the using insn to
581 be top of a block and the producer to be at the bottom of
582 the block. However, in most cases this will only result
583 in an uninitialized use of an insn that is dead anyway.
584
585 However, there is one rare case that will cause a
586 miscompile: deletion of non-looping pure and constant
587 calls on a machine where ACCUMULATE_OUTGOING_ARGS is true.
588 In this case it is possible to remove the call, but leave
589 the argument pushes to the stack. Because of the changes
590 to the stack pointer, this will almost always lead to a
591 miscompile. */
592 if (!dbg_cnt (dce))
593 continue;
594
595 if (crtl->shrink_wrapped_separate
596 && find_reg_note (insn, REG_CFA_RESTORE, NULL))
597 {
598 if (dump_file)
599 fprintf (dump_file, "DCE: NOT deleting insn %d, it's a "
600 "callee-save restore\n", INSN_UID (insn));
601 continue;
602 }
603
604 if (dump_file)
605 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
606
607 /* Before we delete the insn we have to remove the REG_EQUAL notes
608 for the destination regs in order to avoid dangling notes. */
609 remove_reg_equal_equiv_notes_for_defs (insn);
610
611 /* If a pure or const call is deleted, this may make the cfg
612 have unreachable blocks. We rememeber this and call
613 delete_unreachable_blocks at the end. */
614 if (CALL_P (insn))
615 must_clean = true;
616
617 /* Now delete the insn. */
618 delete_insn_and_edges (insn);
619 }
620
621 /* Deleted a pure or const call. */
622 if (must_clean)
623 delete_unreachable_blocks ();
624 }
625
626
627 /* Go through the instructions and mark those whose necessity is not
628 dependent on inter-instruction information. Make sure all other
629 instructions are not marked. */
630
631 static void
632 prescan_insns_for_dce (bool fast)
633 {
634 basic_block bb;
635 rtx_insn *insn, *prev;
636 bitmap arg_stores = NULL;
637
638 if (dump_file)
639 fprintf (dump_file, "Finding needed instructions:\n");
640
641 if (!df_in_progress && ACCUMULATE_OUTGOING_ARGS)
642 arg_stores = BITMAP_ALLOC (NULL);
643
644 FOR_EACH_BB_FN (bb, cfun)
645 {
646 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, prev)
647 if (NONDEBUG_INSN_P (insn))
648 {
649 /* Don't mark argument stores now. They will be marked
650 if needed when the associated CALL is marked. */
651 if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn)))
652 continue;
653 if (deletable_insn_p (insn, fast, arg_stores))
654 mark_nonreg_stores (PATTERN (insn), insn, fast);
655 else
656 mark_insn (insn, fast);
657 }
658 /* find_call_stack_args only looks at argument stores in the
659 same bb. */
660 if (arg_stores)
661 bitmap_clear (arg_stores);
662 }
663
664 if (arg_stores)
665 BITMAP_FREE (arg_stores);
666
667 if (dump_file)
668 fprintf (dump_file, "Finished finding needed instructions:\n");
669 }
670
671
672 /* UD-based DSE routines. */
673
674 /* Mark instructions that define artificially-used registers, such as
675 the frame pointer and the stack pointer. */
676
677 static void
678 mark_artificial_uses (void)
679 {
680 basic_block bb;
681 struct df_link *defs;
682 df_ref use;
683
684 FOR_ALL_BB_FN (bb, cfun)
685 FOR_EACH_ARTIFICIAL_USE (use, bb->index)
686 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
687 if (!DF_REF_IS_ARTIFICIAL (defs->ref))
688 mark_insn (DF_REF_INSN (defs->ref), false);
689 }
690
691
692 /* Mark every instruction that defines a register value that INSN uses. */
693
694 static void
695 mark_reg_dependencies (rtx_insn *insn)
696 {
697 struct df_link *defs;
698 df_ref use;
699
700 if (DEBUG_INSN_P (insn))
701 return;
702
703 FOR_EACH_INSN_USE (use, insn)
704 {
705 if (dump_file)
706 {
707 fprintf (dump_file, "Processing use of ");
708 print_simple_rtl (dump_file, DF_REF_REG (use));
709 fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
710 }
711 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
712 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
713 mark_insn (DF_REF_INSN (defs->ref), false);
714 }
715 }
716
717
718 /* Initialize global variables for a new DCE pass. */
719
720 static void
721 init_dce (bool fast)
722 {
723 if (!df_in_progress)
724 {
725 if (!fast)
726 {
727 df_set_flags (DF_RD_PRUNE_DEAD_DEFS);
728 df_chain_add_problem (DF_UD_CHAIN);
729 }
730 df_analyze ();
731 }
732
733 if (dump_file)
734 df_dump (dump_file);
735
736 if (fast)
737 {
738 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
739 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
740 can_alter_cfg = false;
741 }
742 else
743 can_alter_cfg = true;
744
745 marked = sbitmap_alloc (get_max_uid () + 1);
746 bitmap_clear (marked);
747 }
748
749
750 /* Free the data allocated by init_dce. */
751
752 static void
753 fini_dce (bool fast)
754 {
755 sbitmap_free (marked);
756
757 if (fast)
758 {
759 bitmap_obstack_release (&dce_blocks_bitmap_obstack);
760 bitmap_obstack_release (&dce_tmp_bitmap_obstack);
761 }
762 }
763
764
765 /* UD-chain based DCE. */
766
767 static unsigned int
768 rest_of_handle_ud_dce (void)
769 {
770 rtx_insn *insn;
771
772 init_dce (false);
773
774 prescan_insns_for_dce (false);
775 mark_artificial_uses ();
776 while (worklist.length () > 0)
777 {
778 insn = worklist.pop ();
779 mark_reg_dependencies (insn);
780 }
781 worklist.release ();
782
783 if (MAY_HAVE_DEBUG_BIND_INSNS)
784 reset_unmarked_insns_debug_uses ();
785
786 /* Before any insns are deleted, we must remove the chains since
787 they are not bidirectional. */
788 df_remove_problem (df_chain);
789 delete_unmarked_insns ();
790
791 fini_dce (false);
792 return 0;
793 }
794
795
796 namespace {
797
798 const pass_data pass_data_ud_rtl_dce =
799 {
800 RTL_PASS, /* type */
801 "ud_dce", /* name */
802 OPTGROUP_NONE, /* optinfo_flags */
803 TV_DCE, /* tv_id */
804 0, /* properties_required */
805 0, /* properties_provided */
806 0, /* properties_destroyed */
807 0, /* todo_flags_start */
808 TODO_df_finish, /* todo_flags_finish */
809 };
810
811 class pass_ud_rtl_dce : public rtl_opt_pass
812 {
813 public:
814 pass_ud_rtl_dce (gcc::context *ctxt)
815 : rtl_opt_pass (pass_data_ud_rtl_dce, ctxt)
816 {}
817
818 /* opt_pass methods: */
819 virtual bool gate (function *)
820 {
821 return optimize > 1 && flag_dce && dbg_cnt (dce_ud);
822 }
823
824 virtual unsigned int execute (function *)
825 {
826 return rest_of_handle_ud_dce ();
827 }
828
829 }; // class pass_ud_rtl_dce
830
831 } // anon namespace
832
833 rtl_opt_pass *
834 make_pass_ud_rtl_dce (gcc::context *ctxt)
835 {
836 return new pass_ud_rtl_dce (ctxt);
837 }
838
839
840 /* -------------------------------------------------------------------------
841 Fast DCE functions
842 ------------------------------------------------------------------------- */
843
844 /* Process basic block BB. Return true if the live_in set has
845 changed. REDO_OUT is true if the info at the bottom of the block
846 needs to be recalculated before starting. AU is the proper set of
847 artificial uses. Track global substitution of uses of dead pseudos
848 in debug insns using GLOBAL_DEBUG. */
849
850 static bool
851 word_dce_process_block (basic_block bb, bool redo_out,
852 struct dead_debug_global *global_debug)
853 {
854 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
855 rtx_insn *insn;
856 bool block_changed;
857 struct dead_debug_local debug;
858
859 if (redo_out)
860 {
861 /* Need to redo the live_out set of this block if when one of
862 the succs of this block has had a change in it live in
863 set. */
864 edge e;
865 edge_iterator ei;
866 df_confluence_function_n con_fun_n = df_word_lr->problem->con_fun_n;
867 bitmap_clear (DF_WORD_LR_OUT (bb));
868 FOR_EACH_EDGE (e, ei, bb->succs)
869 (*con_fun_n) (e);
870 }
871
872 if (dump_file)
873 {
874 fprintf (dump_file, "processing block %d live out = ", bb->index);
875 df_print_word_regset (dump_file, DF_WORD_LR_OUT (bb));
876 }
877
878 bitmap_copy (local_live, DF_WORD_LR_OUT (bb));
879 dead_debug_local_init (&debug, NULL, global_debug);
880
881 FOR_BB_INSNS_REVERSE (bb, insn)
882 if (DEBUG_INSN_P (insn))
883 {
884 df_ref use;
885 FOR_EACH_INSN_USE (use, insn)
886 if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER
887 && (GET_MODE_SIZE (GET_MODE (DF_REF_REAL_REG (use)))
888 == 2 * UNITS_PER_WORD)
889 && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use))
890 && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use) + 1))
891 dead_debug_add (&debug, use, DF_REF_REGNO (use));
892 }
893 else if (INSN_P (insn))
894 {
895 bool any_changed;
896
897 /* No matter if the instruction is needed or not, we remove
898 any regno in the defs from the live set. */
899 any_changed = df_word_lr_simulate_defs (insn, local_live);
900 if (any_changed)
901 mark_insn (insn, true);
902
903 /* On the other hand, we do not allow the dead uses to set
904 anything in local_live. */
905 if (marked_insn_p (insn))
906 df_word_lr_simulate_uses (insn, local_live);
907
908 /* Insert debug temps for dead REGs used in subsequent debug
909 insns. We may have to emit a debug temp even if the insn
910 was marked, in case the debug use was after the point of
911 death. */
912 if (debug.used && !bitmap_empty_p (debug.used))
913 {
914 df_ref def;
915
916 FOR_EACH_INSN_DEF (def, insn)
917 dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn,
918 marked_insn_p (insn)
919 && !control_flow_insn_p (insn)
920 ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
921 : DEBUG_TEMP_BEFORE_WITH_VALUE);
922 }
923
924 if (dump_file)
925 {
926 fprintf (dump_file, "finished processing insn %d live out = ",
927 INSN_UID (insn));
928 df_print_word_regset (dump_file, local_live);
929 }
930 }
931
932 block_changed = !bitmap_equal_p (local_live, DF_WORD_LR_IN (bb));
933 if (block_changed)
934 bitmap_copy (DF_WORD_LR_IN (bb), local_live);
935
936 dead_debug_local_finish (&debug, NULL);
937 BITMAP_FREE (local_live);
938 return block_changed;
939 }
940
941
942 /* Process basic block BB. Return true if the live_in set has
943 changed. REDO_OUT is true if the info at the bottom of the block
944 needs to be recalculated before starting. AU is the proper set of
945 artificial uses. Track global substitution of uses of dead pseudos
946 in debug insns using GLOBAL_DEBUG. */
947
948 static bool
949 dce_process_block (basic_block bb, bool redo_out, bitmap au,
950 struct dead_debug_global *global_debug)
951 {
952 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
953 rtx_insn *insn;
954 bool block_changed;
955 df_ref def;
956 struct dead_debug_local debug;
957
958 if (redo_out)
959 {
960 /* Need to redo the live_out set of this block if when one of
961 the succs of this block has had a change in it live in
962 set. */
963 edge e;
964 edge_iterator ei;
965 df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
966 bitmap_clear (DF_LR_OUT (bb));
967 FOR_EACH_EDGE (e, ei, bb->succs)
968 (*con_fun_n) (e);
969 }
970
971 if (dump_file)
972 {
973 fprintf (dump_file, "processing block %d lr out = ", bb->index);
974 df_print_regset (dump_file, DF_LR_OUT (bb));
975 }
976
977 bitmap_copy (local_live, DF_LR_OUT (bb));
978
979 df_simulate_initialize_backwards (bb, local_live);
980 dead_debug_local_init (&debug, NULL, global_debug);
981
982 FOR_BB_INSNS_REVERSE (bb, insn)
983 if (DEBUG_INSN_P (insn))
984 {
985 df_ref use;
986 FOR_EACH_INSN_USE (use, insn)
987 if (!bitmap_bit_p (local_live, DF_REF_REGNO (use))
988 && !bitmap_bit_p (au, DF_REF_REGNO (use)))
989 dead_debug_add (&debug, use, DF_REF_REGNO (use));
990 }
991 else if (INSN_P (insn))
992 {
993 bool needed = marked_insn_p (insn);
994
995 /* The insn is needed if there is someone who uses the output. */
996 if (!needed)
997 FOR_EACH_INSN_DEF (def, insn)
998 if (bitmap_bit_p (local_live, DF_REF_REGNO (def))
999 || bitmap_bit_p (au, DF_REF_REGNO (def)))
1000 {
1001 needed = true;
1002 mark_insn (insn, true);
1003 break;
1004 }
1005
1006 /* No matter if the instruction is needed or not, we remove
1007 any regno in the defs from the live set. */
1008 df_simulate_defs (insn, local_live);
1009
1010 /* On the other hand, we do not allow the dead uses to set
1011 anything in local_live. */
1012 if (needed)
1013 df_simulate_uses (insn, local_live);
1014
1015 /* Insert debug temps for dead REGs used in subsequent debug
1016 insns. We may have to emit a debug temp even if the insn
1017 was marked, in case the debug use was after the point of
1018 death. */
1019 if (debug.used && !bitmap_empty_p (debug.used))
1020 FOR_EACH_INSN_DEF (def, insn)
1021 dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn,
1022 needed && !control_flow_insn_p (insn)
1023 ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
1024 : DEBUG_TEMP_BEFORE_WITH_VALUE);
1025 }
1026
1027 dead_debug_local_finish (&debug, NULL);
1028 df_simulate_finalize_backwards (bb, local_live);
1029
1030 block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
1031 if (block_changed)
1032 bitmap_copy (DF_LR_IN (bb), local_live);
1033
1034 BITMAP_FREE (local_live);
1035 return block_changed;
1036 }
1037
1038
1039 /* Perform fast DCE once initialization is done. If WORD_LEVEL is
1040 true, use the word level dce, otherwise do it at the pseudo
1041 level. */
1042
1043 static void
1044 fast_dce (bool word_level)
1045 {
1046 int *postorder = df_get_postorder (DF_BACKWARD);
1047 int n_blocks = df_get_n_blocks (DF_BACKWARD);
1048 /* The set of blocks that have been seen on this iteration. */
1049 bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1050 /* The set of blocks that need to have the out vectors reset because
1051 the in of one of their successors has changed. */
1052 bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1053 bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1054 bool global_changed = true;
1055
1056 /* These regs are considered always live so if they end up dying
1057 because of some def, we need to bring the back again. Calling
1058 df_simulate_fixup_sets has the disadvantage of calling
1059 bb_has_eh_pred once per insn, so we cache the information
1060 here. */
1061 bitmap au = &df->regular_block_artificial_uses;
1062 bitmap au_eh = &df->eh_block_artificial_uses;
1063 int i;
1064 struct dead_debug_global global_debug;
1065
1066 prescan_insns_for_dce (true);
1067
1068 for (i = 0; i < n_blocks; i++)
1069 bitmap_set_bit (all_blocks, postorder[i]);
1070
1071 dead_debug_global_init (&global_debug, NULL);
1072
1073 while (global_changed)
1074 {
1075 global_changed = false;
1076
1077 for (i = 0; i < n_blocks; i++)
1078 {
1079 int index = postorder[i];
1080 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, index);
1081 bool local_changed;
1082
1083 if (index < NUM_FIXED_BLOCKS)
1084 {
1085 bitmap_set_bit (processed, index);
1086 continue;
1087 }
1088
1089 if (word_level)
1090 local_changed
1091 = word_dce_process_block (bb, bitmap_bit_p (redo_out, index),
1092 &global_debug);
1093 else
1094 local_changed
1095 = dce_process_block (bb, bitmap_bit_p (redo_out, index),
1096 bb_has_eh_pred (bb) ? au_eh : au,
1097 &global_debug);
1098 bitmap_set_bit (processed, index);
1099
1100 if (local_changed)
1101 {
1102 edge e;
1103 edge_iterator ei;
1104 FOR_EACH_EDGE (e, ei, bb->preds)
1105 if (bitmap_bit_p (processed, e->src->index))
1106 /* Be tricky about when we need to iterate the
1107 analysis. We only have redo the analysis if the
1108 bitmaps change at the top of a block that is the
1109 entry to a loop. */
1110 global_changed = true;
1111 else
1112 bitmap_set_bit (redo_out, e->src->index);
1113 }
1114 }
1115
1116 if (global_changed)
1117 {
1118 /* Turn off the RUN_DCE flag to prevent recursive calls to
1119 dce. */
1120 int old_flag = df_clear_flags (DF_LR_RUN_DCE);
1121
1122 /* So something was deleted that requires a redo. Do it on
1123 the cheap. */
1124 delete_unmarked_insns ();
1125 bitmap_clear (marked);
1126 bitmap_clear (processed);
1127 bitmap_clear (redo_out);
1128
1129 /* We do not need to rescan any instructions. We only need
1130 to redo the dataflow equations for the blocks that had a
1131 change at the top of the block. Then we need to redo the
1132 iteration. */
1133 if (word_level)
1134 df_analyze_problem (df_word_lr, all_blocks, postorder, n_blocks);
1135 else
1136 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
1137
1138 if (old_flag & DF_LR_RUN_DCE)
1139 df_set_flags (DF_LR_RUN_DCE);
1140
1141 prescan_insns_for_dce (true);
1142 }
1143 }
1144
1145 dead_debug_global_finish (&global_debug, NULL);
1146
1147 delete_unmarked_insns ();
1148
1149 BITMAP_FREE (processed);
1150 BITMAP_FREE (redo_out);
1151 BITMAP_FREE (all_blocks);
1152 }
1153
1154
1155 /* Fast register level DCE. */
1156
1157 static unsigned int
1158 rest_of_handle_fast_dce (void)
1159 {
1160 init_dce (true);
1161 fast_dce (false);
1162 fini_dce (true);
1163 return 0;
1164 }
1165
1166
1167 /* Fast byte level DCE. */
1168
1169 void
1170 run_word_dce (void)
1171 {
1172 int old_flags;
1173
1174 if (!flag_dce)
1175 return;
1176
1177 timevar_push (TV_DCE);
1178 old_flags = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1179 df_word_lr_add_problem ();
1180 init_dce (true);
1181 fast_dce (true);
1182 fini_dce (true);
1183 df_set_flags (old_flags);
1184 timevar_pop (TV_DCE);
1185 }
1186
1187
1188 /* This is an internal call that is used by the df live register
1189 problem to run fast dce as a side effect of creating the live
1190 information. The stack is organized so that the lr problem is run,
1191 this pass is run, which updates the live info and the df scanning
1192 info, and then returns to allow the rest of the problems to be run.
1193
1194 This can be called by elsewhere but it will not update the bit
1195 vectors for any other problems than LR. */
1196
1197 void
1198 run_fast_df_dce (void)
1199 {
1200 if (flag_dce)
1201 {
1202 /* If dce is able to delete something, it has to happen
1203 immediately. Otherwise there will be problems handling the
1204 eq_notes. */
1205 int old_flags =
1206 df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1207
1208 df_in_progress = true;
1209 rest_of_handle_fast_dce ();
1210 df_in_progress = false;
1211
1212 df_set_flags (old_flags);
1213 }
1214 }
1215
1216
1217 /* Run a fast DCE pass. */
1218
1219 void
1220 run_fast_dce (void)
1221 {
1222 if (flag_dce)
1223 rest_of_handle_fast_dce ();
1224 }
1225
1226
1227 namespace {
1228
1229 const pass_data pass_data_fast_rtl_dce =
1230 {
1231 RTL_PASS, /* type */
1232 "rtl_dce", /* name */
1233 OPTGROUP_NONE, /* optinfo_flags */
1234 TV_DCE, /* tv_id */
1235 0, /* properties_required */
1236 0, /* properties_provided */
1237 0, /* properties_destroyed */
1238 0, /* todo_flags_start */
1239 TODO_df_finish, /* todo_flags_finish */
1240 };
1241
1242 class pass_fast_rtl_dce : public rtl_opt_pass
1243 {
1244 public:
1245 pass_fast_rtl_dce (gcc::context *ctxt)
1246 : rtl_opt_pass (pass_data_fast_rtl_dce, ctxt)
1247 {}
1248
1249 /* opt_pass methods: */
1250 virtual bool gate (function *)
1251 {
1252 return optimize > 0 && flag_dce && dbg_cnt (dce_fast);
1253 }
1254
1255 virtual unsigned int execute (function *)
1256 {
1257 return rest_of_handle_fast_dce ();
1258 }
1259
1260 }; // class pass_fast_rtl_dce
1261
1262 } // anon namespace
1263
1264 rtl_opt_pass *
1265 make_pass_fast_rtl_dce (gcc::context *ctxt)
1266 {
1267 return new pass_fast_rtl_dce (ctxt);
1268 }