bind_c_array_params_2.f90: Add "-mno-explicit-relocs" for alpha*-*-* targets.
[gcc.git] / gcc / dce.c
1 /* RTL dead code elimination.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hashtab.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "flags.h"
31 #include "except.h"
32 #include "df.h"
33 #include "cselib.h"
34 #include "dce.h"
35 #include "tree-pass.h"
36 #include "dbgcnt.h"
37 #include "tm_p.h"
38 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
39
40
41 /* -------------------------------------------------------------------------
42 Core mark/delete routines
43 ------------------------------------------------------------------------- */
44
45 /* True if we are invoked while the df engine is running; in this case,
46 we don't want to reenter it. */
47 static bool df_in_progress = false;
48
49 /* True if we are allowed to alter the CFG in this pass. */
50 static bool can_alter_cfg = false;
51
52 /* Instructions that have been marked but whose dependencies have not
53 yet been processed. */
54 static VEC(rtx,heap) *worklist;
55
56 /* Bitmap of instructions marked as needed indexed by INSN_UID. */
57 static sbitmap marked;
58
59 /* Bitmap obstacks used for block processing by the fast algorithm. */
60 static bitmap_obstack dce_blocks_bitmap_obstack;
61 static bitmap_obstack dce_tmp_bitmap_obstack;
62
63 static bool find_call_stack_args (rtx, bool, bool, bitmap);
64
65 /* A subroutine for which BODY is part of the instruction being tested;
66 either the top-level pattern, or an element of a PARALLEL. The
67 instruction is known not to be a bare USE or CLOBBER. */
68
69 static bool
70 deletable_insn_p_1 (rtx body)
71 {
72 switch (GET_CODE (body))
73 {
74 case PREFETCH:
75 case TRAP_IF:
76 /* The UNSPEC case was added here because the ia-64 claims that
77 USEs do not work after reload and generates UNSPECS rather
78 than USEs. Since dce is run after reload we need to avoid
79 deleting these even if they are dead. If it turns out that
80 USEs really do work after reload, the ia-64 should be
81 changed, and the UNSPEC case can be removed. */
82 case UNSPEC:
83 return false;
84
85 default:
86 return !volatile_refs_p (body);
87 }
88 }
89
90
91 /* Return true if INSN is a normal instruction that can be deleted by
92 the DCE pass. */
93
94 static bool
95 deletable_insn_p (rtx insn, bool fast, bitmap arg_stores)
96 {
97 rtx body, x;
98 int i;
99
100 if (CALL_P (insn)
101 /* We cannot delete calls inside of the recursive dce because
102 this may cause basic blocks to be deleted and this messes up
103 the rest of the stack of optimization passes. */
104 && (!df_in_progress)
105 /* We cannot delete pure or const sibling calls because it is
106 hard to see the result. */
107 && (!SIBLING_CALL_P (insn))
108 /* We can delete dead const or pure calls as long as they do not
109 infinite loop. */
110 && (RTL_CONST_OR_PURE_CALL_P (insn)
111 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
112 return find_call_stack_args (insn, false, fast, arg_stores);
113
114 /* Don't delete jumps, notes and the like. */
115 if (!NONJUMP_INSN_P (insn))
116 return false;
117
118 /* Don't delete insns that may throw if we cannot do so. */
119 if (!(cfun->can_delete_dead_exceptions && can_alter_cfg)
120 && !insn_nothrow_p (insn))
121 return false;
122
123 body = PATTERN (insn);
124 switch (GET_CODE (body))
125 {
126 case USE:
127 case VAR_LOCATION:
128 return false;
129
130 case CLOBBER:
131 if (fast)
132 {
133 /* A CLOBBER of a dead pseudo register serves no purpose.
134 That is not necessarily true for hard registers until
135 after reload. */
136 x = XEXP (body, 0);
137 return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
138 }
139 else
140 /* Because of the way that use-def chains are built, it is not
141 possible to tell if the clobber is dead because it can
142 never be the target of a use-def chain. */
143 return false;
144
145 case PARALLEL:
146 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
147 if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
148 return false;
149 return true;
150
151 default:
152 return deletable_insn_p_1 (body);
153 }
154 }
155
156
157 /* Return true if INSN has been marked as needed. */
158
159 static inline int
160 marked_insn_p (rtx insn)
161 {
162 /* Artificial defs are always needed and they do not have an insn.
163 We should never see them here. */
164 gcc_assert (insn);
165 return TEST_BIT (marked, INSN_UID (insn));
166 }
167
168
169 /* If INSN has not yet been marked as needed, mark it now, and add it to
170 the worklist. */
171
172 static void
173 mark_insn (rtx insn, bool fast)
174 {
175 if (!marked_insn_p (insn))
176 {
177 if (!fast)
178 VEC_safe_push (rtx, heap, worklist, insn);
179 SET_BIT (marked, INSN_UID (insn));
180 if (dump_file)
181 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
182 if (CALL_P (insn)
183 && !df_in_progress
184 && !SIBLING_CALL_P (insn)
185 && (RTL_CONST_OR_PURE_CALL_P (insn)
186 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
187 find_call_stack_args (insn, true, fast, NULL);
188 }
189 }
190
191
192 /* A note_stores callback used by mark_nonreg_stores. DATA is the
193 instruction containing DEST. */
194
195 static void
196 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
197 {
198 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
199 mark_insn ((rtx) data, true);
200 }
201
202
203 /* A note_stores callback used by mark_nonreg_stores. DATA is the
204 instruction containing DEST. */
205
206 static void
207 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
208 {
209 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
210 mark_insn ((rtx) data, false);
211 }
212
213
214 /* Mark INSN if BODY stores to a non-register destination. */
215
216 static void
217 mark_nonreg_stores (rtx body, rtx insn, bool fast)
218 {
219 if (fast)
220 note_stores (body, mark_nonreg_stores_1, insn);
221 else
222 note_stores (body, mark_nonreg_stores_2, insn);
223 }
224
225
226 /* Return true if store to MEM, starting OFF bytes from stack pointer,
227 is a call argument store, and clear corresponding bits from SP_BYTES
228 bitmap if it is. */
229
230 static bool
231 check_argument_store (rtx mem, HOST_WIDE_INT off, HOST_WIDE_INT min_sp_off,
232 HOST_WIDE_INT max_sp_off, bitmap sp_bytes)
233 {
234 HOST_WIDE_INT byte;
235 for (byte = off; byte < off + GET_MODE_SIZE (GET_MODE (mem)); byte++)
236 {
237 if (byte < min_sp_off
238 || byte >= max_sp_off
239 || !bitmap_clear_bit (sp_bytes, byte - min_sp_off))
240 return false;
241 }
242 return true;
243 }
244
245
246 /* Try to find all stack stores of CALL_INSN arguments if
247 ACCUMULATE_OUTGOING_ARGS. If all stack stores have been found
248 and it is therefore safe to eliminate the call, return true,
249 otherwise return false. This function should be first called
250 with DO_MARK false, and only when the CALL_INSN is actually
251 going to be marked called again with DO_MARK true. */
252
253 static bool
254 find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
255 bitmap arg_stores)
256 {
257 rtx p, insn, prev_insn;
258 bool ret;
259 HOST_WIDE_INT min_sp_off, max_sp_off;
260 bitmap sp_bytes;
261
262 gcc_assert (CALL_P (call_insn));
263 if (!ACCUMULATE_OUTGOING_ARGS)
264 return true;
265
266 if (!do_mark)
267 {
268 gcc_assert (arg_stores);
269 bitmap_clear (arg_stores);
270 }
271
272 min_sp_off = INTTYPE_MAXIMUM (HOST_WIDE_INT);
273 max_sp_off = 0;
274
275 /* First determine the minimum and maximum offset from sp for
276 stored arguments. */
277 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
278 if (GET_CODE (XEXP (p, 0)) == USE
279 && MEM_P (XEXP (XEXP (p, 0), 0)))
280 {
281 rtx mem = XEXP (XEXP (p, 0), 0), addr;
282 HOST_WIDE_INT off = 0, size;
283 if (!MEM_SIZE_KNOWN_P (mem))
284 return false;
285 size = MEM_SIZE (mem);
286 addr = XEXP (mem, 0);
287 if (GET_CODE (addr) == PLUS
288 && REG_P (XEXP (addr, 0))
289 && CONST_INT_P (XEXP (addr, 1)))
290 {
291 off = INTVAL (XEXP (addr, 1));
292 addr = XEXP (addr, 0);
293 }
294 if (addr != stack_pointer_rtx)
295 {
296 if (!REG_P (addr))
297 return false;
298 /* If not fast, use chains to see if addr wasn't set to
299 sp + offset. */
300 if (!fast)
301 {
302 df_ref *use_rec;
303 struct df_link *defs;
304 rtx set;
305
306 for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
307 if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
308 break;
309
310 if (*use_rec == NULL)
311 return false;
312
313 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
314 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
315 break;
316
317 if (defs == NULL)
318 return false;
319
320 set = single_set (DF_REF_INSN (defs->ref));
321 if (!set)
322 return false;
323
324 if (GET_CODE (SET_SRC (set)) != PLUS
325 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
326 || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
327 return false;
328
329 off += INTVAL (XEXP (SET_SRC (set), 1));
330 }
331 else
332 return false;
333 }
334 min_sp_off = MIN (min_sp_off, off);
335 max_sp_off = MAX (max_sp_off, off + size);
336 }
337
338 if (min_sp_off >= max_sp_off)
339 return true;
340 sp_bytes = BITMAP_ALLOC (NULL);
341
342 /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
343 which contain arguments. Checking has been done in the previous
344 loop. */
345 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
346 if (GET_CODE (XEXP (p, 0)) == USE
347 && MEM_P (XEXP (XEXP (p, 0), 0)))
348 {
349 rtx mem = XEXP (XEXP (p, 0), 0), addr;
350 HOST_WIDE_INT off = 0, byte;
351 addr = XEXP (mem, 0);
352 if (GET_CODE (addr) == PLUS
353 && REG_P (XEXP (addr, 0))
354 && CONST_INT_P (XEXP (addr, 1)))
355 {
356 off = INTVAL (XEXP (addr, 1));
357 addr = XEXP (addr, 0);
358 }
359 if (addr != stack_pointer_rtx)
360 {
361 df_ref *use_rec;
362 struct df_link *defs;
363 rtx set;
364
365 for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
366 if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
367 break;
368
369 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
370 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
371 break;
372
373 set = single_set (DF_REF_INSN (defs->ref));
374 off += INTVAL (XEXP (SET_SRC (set), 1));
375 }
376 for (byte = off; byte < off + MEM_SIZE (mem); byte++)
377 {
378 if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
379 gcc_unreachable ();
380 }
381 }
382
383 /* Walk backwards, looking for argument stores. The search stops
384 when seeing another call, sp adjustment or memory store other than
385 argument store. */
386 ret = false;
387 for (insn = PREV_INSN (call_insn); insn; insn = prev_insn)
388 {
389 rtx set, mem, addr;
390 HOST_WIDE_INT off;
391
392 if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn)))
393 prev_insn = NULL_RTX;
394 else
395 prev_insn = PREV_INSN (insn);
396
397 if (CALL_P (insn))
398 break;
399
400 if (!NONDEBUG_INSN_P (insn))
401 continue;
402
403 set = single_set (insn);
404 if (!set || SET_DEST (set) == stack_pointer_rtx)
405 break;
406
407 if (!MEM_P (SET_DEST (set)))
408 continue;
409
410 mem = SET_DEST (set);
411 addr = XEXP (mem, 0);
412 off = 0;
413 if (GET_CODE (addr) == PLUS
414 && REG_P (XEXP (addr, 0))
415 && CONST_INT_P (XEXP (addr, 1)))
416 {
417 off = INTVAL (XEXP (addr, 1));
418 addr = XEXP (addr, 0);
419 }
420 if (addr != stack_pointer_rtx)
421 {
422 if (!REG_P (addr))
423 break;
424 if (!fast)
425 {
426 df_ref *use_rec;
427 struct df_link *defs;
428 rtx set;
429
430 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
431 if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
432 break;
433
434 if (*use_rec == NULL)
435 break;
436
437 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
438 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
439 break;
440
441 if (defs == NULL)
442 break;
443
444 set = single_set (DF_REF_INSN (defs->ref));
445 if (!set)
446 break;
447
448 if (GET_CODE (SET_SRC (set)) != PLUS
449 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
450 || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
451 break;
452
453 off += INTVAL (XEXP (SET_SRC (set), 1));
454 }
455 else
456 break;
457 }
458
459 if (GET_MODE_SIZE (GET_MODE (mem)) == 0
460 || !check_argument_store (mem, off, min_sp_off,
461 max_sp_off, sp_bytes))
462 break;
463
464 if (!deletable_insn_p (insn, fast, NULL))
465 break;
466
467 if (do_mark)
468 mark_insn (insn, fast);
469 else
470 bitmap_set_bit (arg_stores, INSN_UID (insn));
471
472 if (bitmap_empty_p (sp_bytes))
473 {
474 ret = true;
475 break;
476 }
477 }
478
479 BITMAP_FREE (sp_bytes);
480 if (!ret && arg_stores)
481 bitmap_clear (arg_stores);
482
483 return ret;
484 }
485
486
487 /* Remove all REG_EQUAL and REG_EQUIV notes referring to the registers INSN
488 writes to. */
489
490 static void
491 remove_reg_equal_equiv_notes_for_defs (rtx insn)
492 {
493 df_ref *def_rec;
494
495 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
496 remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (*def_rec));
497 }
498
499 /* Scan all BBs for debug insns and reset those that reference values
500 defined in unmarked insns. */
501
502 static void
503 reset_unmarked_insns_debug_uses (void)
504 {
505 basic_block bb;
506 rtx insn, next;
507
508 FOR_EACH_BB_REVERSE (bb)
509 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
510 if (DEBUG_INSN_P (insn))
511 {
512 df_ref *use_rec;
513
514 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
515 {
516 df_ref use = *use_rec;
517 struct df_link *defs;
518 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
519 {
520 rtx ref_insn;
521 if (DF_REF_IS_ARTIFICIAL (defs->ref))
522 continue;
523 ref_insn = DF_REF_INSN (defs->ref);
524 if (!marked_insn_p (ref_insn))
525 break;
526 }
527 if (!defs)
528 continue;
529 /* ??? FIXME could we propagate the values assigned to
530 each of the DEFs? */
531 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
532 df_insn_rescan_debug_internal (insn);
533 break;
534 }
535 }
536 }
537
538 /* Delete every instruction that hasn't been marked. */
539
540 static void
541 delete_unmarked_insns (void)
542 {
543 basic_block bb;
544 rtx insn, next;
545 bool must_clean = false;
546
547 FOR_EACH_BB_REVERSE (bb)
548 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
549 if (NONDEBUG_INSN_P (insn))
550 {
551 /* Always delete no-op moves. */
552 if (noop_move_p (insn))
553 ;
554
555 /* Otherwise rely only on the DCE algorithm. */
556 else if (marked_insn_p (insn))
557 continue;
558
559 /* Beware that reaching a dbg counter limit here can result
560 in miscompiled file. This occurs when a group of insns
561 must be deleted together, typically because the kept insn
562 depends on the output from the deleted insn. Deleting
563 this insns in reverse order (both at the bb level and
564 when looking at the blocks) minimizes this, but does not
565 eliminate it, since it is possible for the using insn to
566 be top of a block and the producer to be at the bottom of
567 the block. However, in most cases this will only result
568 in an uninitialized use of an insn that is dead anyway.
569
570 However, there is one rare case that will cause a
571 miscompile: deletion of non-looping pure and constant
572 calls on a machine where ACCUMULATE_OUTGOING_ARGS is true.
573 In this case it is possible to remove the call, but leave
574 the argument pushes to the stack. Because of the changes
575 to the stack pointer, this will almost always lead to a
576 miscompile. */
577 if (!dbg_cnt (dce))
578 continue;
579
580 if (dump_file)
581 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
582
583 /* Before we delete the insn we have to remove the REG_EQUAL notes
584 for the destination regs in order to avoid dangling notes. */
585 remove_reg_equal_equiv_notes_for_defs (insn);
586
587 /* If a pure or const call is deleted, this may make the cfg
588 have unreachable blocks. We rememeber this and call
589 delete_unreachable_blocks at the end. */
590 if (CALL_P (insn))
591 must_clean = true;
592
593 /* Now delete the insn. */
594 delete_insn_and_edges (insn);
595 }
596
597 /* Deleted a pure or const call. */
598 if (must_clean)
599 delete_unreachable_blocks ();
600 }
601
602
603 /* Go through the instructions and mark those whose necessity is not
604 dependent on inter-instruction information. Make sure all other
605 instructions are not marked. */
606
607 static void
608 prescan_insns_for_dce (bool fast)
609 {
610 basic_block bb;
611 rtx insn, prev;
612 bitmap arg_stores = NULL;
613
614 if (dump_file)
615 fprintf (dump_file, "Finding needed instructions:\n");
616
617 if (!df_in_progress && ACCUMULATE_OUTGOING_ARGS)
618 arg_stores = BITMAP_ALLOC (NULL);
619
620 FOR_EACH_BB (bb)
621 {
622 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, prev)
623 if (NONDEBUG_INSN_P (insn))
624 {
625 /* Don't mark argument stores now. They will be marked
626 if needed when the associated CALL is marked. */
627 if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn)))
628 continue;
629 if (deletable_insn_p (insn, fast, arg_stores))
630 mark_nonreg_stores (PATTERN (insn), insn, fast);
631 else
632 mark_insn (insn, fast);
633 }
634 /* find_call_stack_args only looks at argument stores in the
635 same bb. */
636 if (arg_stores)
637 bitmap_clear (arg_stores);
638 }
639
640 if (arg_stores)
641 BITMAP_FREE (arg_stores);
642
643 if (dump_file)
644 fprintf (dump_file, "Finished finding needed instructions:\n");
645 }
646
647
648 /* UD-based DSE routines. */
649
650 /* Mark instructions that define artificially-used registers, such as
651 the frame pointer and the stack pointer. */
652
653 static void
654 mark_artificial_uses (void)
655 {
656 basic_block bb;
657 struct df_link *defs;
658 df_ref *use_rec;
659
660 FOR_ALL_BB (bb)
661 {
662 for (use_rec = df_get_artificial_uses (bb->index);
663 *use_rec; use_rec++)
664 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
665 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
666 mark_insn (DF_REF_INSN (defs->ref), false);
667 }
668 }
669
670
671 /* Mark every instruction that defines a register value that INSN uses. */
672
673 static void
674 mark_reg_dependencies (rtx insn)
675 {
676 struct df_link *defs;
677 df_ref *use_rec;
678
679 if (DEBUG_INSN_P (insn))
680 return;
681
682 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
683 {
684 df_ref use = *use_rec;
685 if (dump_file)
686 {
687 fprintf (dump_file, "Processing use of ");
688 print_simple_rtl (dump_file, DF_REF_REG (use));
689 fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
690 }
691 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
692 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
693 mark_insn (DF_REF_INSN (defs->ref), false);
694 }
695 }
696
697
698 /* Initialize global variables for a new DCE pass. */
699
700 static void
701 init_dce (bool fast)
702 {
703 if (!df_in_progress)
704 {
705 if (!fast)
706 df_chain_add_problem (DF_UD_CHAIN);
707 df_analyze ();
708 }
709
710 if (dump_file)
711 df_dump (dump_file);
712
713 if (fast)
714 {
715 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
716 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
717 can_alter_cfg = false;
718 }
719 else
720 can_alter_cfg = true;
721
722 marked = sbitmap_alloc (get_max_uid () + 1);
723 sbitmap_zero (marked);
724 }
725
726
727 /* Free the data allocated by init_dce. */
728
729 static void
730 fini_dce (bool fast)
731 {
732 sbitmap_free (marked);
733
734 if (fast)
735 {
736 bitmap_obstack_release (&dce_blocks_bitmap_obstack);
737 bitmap_obstack_release (&dce_tmp_bitmap_obstack);
738 }
739 }
740
741
742 /* UD-chain based DCE. */
743
744 static unsigned int
745 rest_of_handle_ud_dce (void)
746 {
747 rtx insn;
748
749 init_dce (false);
750
751 prescan_insns_for_dce (false);
752 mark_artificial_uses ();
753 while (VEC_length (rtx, worklist) > 0)
754 {
755 insn = VEC_pop (rtx, worklist);
756 mark_reg_dependencies (insn);
757 }
758 VEC_free (rtx, heap, worklist);
759
760 if (MAY_HAVE_DEBUG_INSNS)
761 reset_unmarked_insns_debug_uses ();
762
763 /* Before any insns are deleted, we must remove the chains since
764 they are not bidirectional. */
765 df_remove_problem (df_chain);
766 delete_unmarked_insns ();
767
768 fini_dce (false);
769 return 0;
770 }
771
772
773 static bool
774 gate_ud_dce (void)
775 {
776 return optimize > 1 && flag_dce
777 && dbg_cnt (dce_ud);
778 }
779
780 struct rtl_opt_pass pass_ud_rtl_dce =
781 {
782 {
783 RTL_PASS,
784 "ud_dce", /* name */
785 gate_ud_dce, /* gate */
786 rest_of_handle_ud_dce, /* execute */
787 NULL, /* sub */
788 NULL, /* next */
789 0, /* static_pass_number */
790 TV_DCE, /* tv_id */
791 0, /* properties_required */
792 0, /* properties_provided */
793 0, /* properties_destroyed */
794 0, /* todo_flags_start */
795 TODO_df_finish | TODO_verify_rtl_sharing |
796 TODO_ggc_collect /* todo_flags_finish */
797 }
798 };
799
800
801 /* -------------------------------------------------------------------------
802 Fast DCE functions
803 ------------------------------------------------------------------------- */
804
805 /* Process basic block BB. Return true if the live_in set has
806 changed. REDO_OUT is true if the info at the bottom of the block
807 needs to be recalculated before starting. AU is the proper set of
808 artificial uses. */
809
810 static bool
811 word_dce_process_block (basic_block bb, bool redo_out)
812 {
813 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
814 rtx insn;
815 bool block_changed;
816 struct dead_debug debug;
817
818 if (redo_out)
819 {
820 /* Need to redo the live_out set of this block if when one of
821 the succs of this block has had a change in it live in
822 set. */
823 edge e;
824 edge_iterator ei;
825 df_confluence_function_n con_fun_n = df_word_lr->problem->con_fun_n;
826 bitmap_clear (DF_WORD_LR_OUT (bb));
827 FOR_EACH_EDGE (e, ei, bb->succs)
828 (*con_fun_n) (e);
829 }
830
831 if (dump_file)
832 {
833 fprintf (dump_file, "processing block %d live out = ", bb->index);
834 df_print_word_regset (dump_file, DF_WORD_LR_OUT (bb));
835 }
836
837 bitmap_copy (local_live, DF_WORD_LR_OUT (bb));
838 dead_debug_init (&debug, NULL);
839
840 FOR_BB_INSNS_REVERSE (bb, insn)
841 if (DEBUG_INSN_P (insn))
842 {
843 df_ref *use_rec;
844 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
845 if (DF_REF_REGNO (*use_rec) >= FIRST_PSEUDO_REGISTER
846 && (GET_MODE_SIZE (GET_MODE (DF_REF_REAL_REG (*use_rec)))
847 == 2 * UNITS_PER_WORD)
848 && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (*use_rec))
849 && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (*use_rec) + 1))
850 dead_debug_add (&debug, *use_rec, DF_REF_REGNO (*use_rec));
851 }
852 else if (INSN_P (insn))
853 {
854 bool any_changed;
855
856 /* No matter if the instruction is needed or not, we remove
857 any regno in the defs from the live set. */
858 any_changed = df_word_lr_simulate_defs (insn, local_live);
859 if (any_changed)
860 mark_insn (insn, true);
861
862 /* On the other hand, we do not allow the dead uses to set
863 anything in local_live. */
864 if (marked_insn_p (insn))
865 df_word_lr_simulate_uses (insn, local_live);
866
867 /* Insert debug temps for dead REGs used in subsequent debug
868 insns. We may have to emit a debug temp even if the insn
869 was marked, in case the debug use was after the point of
870 death. */
871 if (debug.used && !bitmap_empty_p (debug.used))
872 {
873 df_ref *def_rec;
874
875 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
876 dead_debug_insert_temp (&debug, DF_REF_REGNO (*def_rec), insn,
877 DEBUG_TEMP_BEFORE_WITH_VALUE);
878 }
879
880 if (dump_file)
881 {
882 fprintf (dump_file, "finished processing insn %d live out = ",
883 INSN_UID (insn));
884 df_print_word_regset (dump_file, local_live);
885 }
886 }
887
888 block_changed = !bitmap_equal_p (local_live, DF_WORD_LR_IN (bb));
889 if (block_changed)
890 bitmap_copy (DF_WORD_LR_IN (bb), local_live);
891
892 dead_debug_finish (&debug, NULL);
893 BITMAP_FREE (local_live);
894 return block_changed;
895 }
896
897
898 /* Process basic block BB. Return true if the live_in set has
899 changed. REDO_OUT is true if the info at the bottom of the block
900 needs to be recalculated before starting. AU is the proper set of
901 artificial uses. */
902
903 static bool
904 dce_process_block (basic_block bb, bool redo_out, bitmap au)
905 {
906 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
907 rtx insn;
908 bool block_changed;
909 df_ref *def_rec;
910 struct dead_debug debug;
911
912 if (redo_out)
913 {
914 /* Need to redo the live_out set of this block if when one of
915 the succs of this block has had a change in it live in
916 set. */
917 edge e;
918 edge_iterator ei;
919 df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
920 bitmap_clear (DF_LR_OUT (bb));
921 FOR_EACH_EDGE (e, ei, bb->succs)
922 (*con_fun_n) (e);
923 }
924
925 if (dump_file)
926 {
927 fprintf (dump_file, "processing block %d lr out = ", bb->index);
928 df_print_regset (dump_file, DF_LR_OUT (bb));
929 }
930
931 bitmap_copy (local_live, DF_LR_OUT (bb));
932
933 df_simulate_initialize_backwards (bb, local_live);
934 dead_debug_init (&debug, NULL);
935
936 FOR_BB_INSNS_REVERSE (bb, insn)
937 if (DEBUG_INSN_P (insn))
938 {
939 df_ref *use_rec;
940 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
941 if (!bitmap_bit_p (local_live, DF_REF_REGNO (*use_rec))
942 && !bitmap_bit_p (au, DF_REF_REGNO (*use_rec)))
943 dead_debug_add (&debug, *use_rec, DF_REF_REGNO (*use_rec));
944 }
945 else if (INSN_P (insn))
946 {
947 bool needed = marked_insn_p (insn);
948
949 /* The insn is needed if there is someone who uses the output. */
950 if (!needed)
951 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
952 if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec))
953 || bitmap_bit_p (au, DF_REF_REGNO (*def_rec)))
954 {
955 needed = true;
956 mark_insn (insn, true);
957 break;
958 }
959
960 /* No matter if the instruction is needed or not, we remove
961 any regno in the defs from the live set. */
962 df_simulate_defs (insn, local_live);
963
964 /* On the other hand, we do not allow the dead uses to set
965 anything in local_live. */
966 if (needed)
967 df_simulate_uses (insn, local_live);
968
969 /* Insert debug temps for dead REGs used in subsequent debug
970 insns. We may have to emit a debug temp even if the insn
971 was marked, in case the debug use was after the point of
972 death. */
973 if (debug.used && !bitmap_empty_p (debug.used))
974 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
975 dead_debug_insert_temp (&debug, DF_REF_REGNO (*def_rec), insn,
976 DEBUG_TEMP_BEFORE_WITH_VALUE);
977 }
978
979 dead_debug_finish (&debug, NULL);
980 df_simulate_finalize_backwards (bb, local_live);
981
982 block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
983 if (block_changed)
984 bitmap_copy (DF_LR_IN (bb), local_live);
985
986 BITMAP_FREE (local_live);
987 return block_changed;
988 }
989
990
991 /* Perform fast DCE once initialization is done. If WORD_LEVEL is
992 true, use the word level dce, otherwise do it at the pseudo
993 level. */
994
995 static void
996 fast_dce (bool word_level)
997 {
998 int *postorder = df_get_postorder (DF_BACKWARD);
999 int n_blocks = df_get_n_blocks (DF_BACKWARD);
1000 /* The set of blocks that have been seen on this iteration. */
1001 bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1002 /* The set of blocks that need to have the out vectors reset because
1003 the in of one of their successors has changed. */
1004 bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1005 bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1006 bool global_changed = true;
1007
1008 /* These regs are considered always live so if they end up dying
1009 because of some def, we need to bring the back again. Calling
1010 df_simulate_fixup_sets has the disadvantage of calling
1011 bb_has_eh_pred once per insn, so we cache the information
1012 here. */
1013 bitmap au = &df->regular_block_artificial_uses;
1014 bitmap au_eh = &df->eh_block_artificial_uses;
1015 int i;
1016
1017 prescan_insns_for_dce (true);
1018
1019 for (i = 0; i < n_blocks; i++)
1020 bitmap_set_bit (all_blocks, postorder[i]);
1021
1022 while (global_changed)
1023 {
1024 global_changed = false;
1025
1026 for (i = 0; i < n_blocks; i++)
1027 {
1028 int index = postorder[i];
1029 basic_block bb = BASIC_BLOCK (index);
1030 bool local_changed;
1031
1032 if (index < NUM_FIXED_BLOCKS)
1033 {
1034 bitmap_set_bit (processed, index);
1035 continue;
1036 }
1037
1038 if (word_level)
1039 local_changed
1040 = word_dce_process_block (bb, bitmap_bit_p (redo_out, index));
1041 else
1042 local_changed
1043 = dce_process_block (bb, bitmap_bit_p (redo_out, index),
1044 bb_has_eh_pred (bb) ? au_eh : au);
1045 bitmap_set_bit (processed, index);
1046
1047 if (local_changed)
1048 {
1049 edge e;
1050 edge_iterator ei;
1051 FOR_EACH_EDGE (e, ei, bb->preds)
1052 if (bitmap_bit_p (processed, e->src->index))
1053 /* Be tricky about when we need to iterate the
1054 analysis. We only have redo the analysis if the
1055 bitmaps change at the top of a block that is the
1056 entry to a loop. */
1057 global_changed = true;
1058 else
1059 bitmap_set_bit (redo_out, e->src->index);
1060 }
1061 }
1062
1063 if (global_changed)
1064 {
1065 /* Turn off the RUN_DCE flag to prevent recursive calls to
1066 dce. */
1067 int old_flag = df_clear_flags (DF_LR_RUN_DCE);
1068
1069 /* So something was deleted that requires a redo. Do it on
1070 the cheap. */
1071 delete_unmarked_insns ();
1072 sbitmap_zero (marked);
1073 bitmap_clear (processed);
1074 bitmap_clear (redo_out);
1075
1076 /* We do not need to rescan any instructions. We only need
1077 to redo the dataflow equations for the blocks that had a
1078 change at the top of the block. Then we need to redo the
1079 iteration. */
1080 if (word_level)
1081 df_analyze_problem (df_word_lr, all_blocks, postorder, n_blocks);
1082 else
1083 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
1084
1085 if (old_flag & DF_LR_RUN_DCE)
1086 df_set_flags (DF_LR_RUN_DCE);
1087
1088 prescan_insns_for_dce (true);
1089 }
1090 }
1091
1092 delete_unmarked_insns ();
1093
1094 BITMAP_FREE (processed);
1095 BITMAP_FREE (redo_out);
1096 BITMAP_FREE (all_blocks);
1097 }
1098
1099
1100 /* Fast register level DCE. */
1101
1102 static unsigned int
1103 rest_of_handle_fast_dce (void)
1104 {
1105 init_dce (true);
1106 fast_dce (false);
1107 fini_dce (true);
1108 return 0;
1109 }
1110
1111
1112 /* Fast byte level DCE. */
1113
1114 void
1115 run_word_dce (void)
1116 {
1117 int old_flags;
1118
1119 if (!flag_dce)
1120 return;
1121
1122 timevar_push (TV_DCE);
1123 old_flags = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1124 df_word_lr_add_problem ();
1125 init_dce (true);
1126 fast_dce (true);
1127 fini_dce (true);
1128 df_set_flags (old_flags);
1129 timevar_pop (TV_DCE);
1130 }
1131
1132
1133 /* This is an internal call that is used by the df live register
1134 problem to run fast dce as a side effect of creating the live
1135 information. The stack is organized so that the lr problem is run,
1136 this pass is run, which updates the live info and the df scanning
1137 info, and then returns to allow the rest of the problems to be run.
1138
1139 This can be called by elsewhere but it will not update the bit
1140 vectors for any other problems than LR. */
1141
1142 void
1143 run_fast_df_dce (void)
1144 {
1145 if (flag_dce)
1146 {
1147 /* If dce is able to delete something, it has to happen
1148 immediately. Otherwise there will be problems handling the
1149 eq_notes. */
1150 int old_flags =
1151 df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1152
1153 df_in_progress = true;
1154 rest_of_handle_fast_dce ();
1155 df_in_progress = false;
1156
1157 df_set_flags (old_flags);
1158 }
1159 }
1160
1161
1162 /* Run a fast DCE pass. */
1163
1164 void
1165 run_fast_dce (void)
1166 {
1167 if (flag_dce)
1168 rest_of_handle_fast_dce ();
1169 }
1170
1171
1172 static bool
1173 gate_fast_dce (void)
1174 {
1175 return optimize > 0 && flag_dce
1176 && dbg_cnt (dce_fast);
1177 }
1178
1179 struct rtl_opt_pass pass_fast_rtl_dce =
1180 {
1181 {
1182 RTL_PASS,
1183 "rtl_dce", /* name */
1184 gate_fast_dce, /* gate */
1185 rest_of_handle_fast_dce, /* execute */
1186 NULL, /* sub */
1187 NULL, /* next */
1188 0, /* static_pass_number */
1189 TV_DCE, /* tv_id */
1190 0, /* properties_required */
1191 0, /* properties_provided */
1192 0, /* properties_destroyed */
1193 0, /* todo_flags_start */
1194 TODO_df_finish | TODO_verify_rtl_sharing |
1195 TODO_ggc_collect /* todo_flags_finish */
1196 }
1197 };