flow.c (delete_noop_moves): Add missing EOL to dump message.
[gcc.git] / gcc / flow.c
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
25
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
29
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
33
34 ** find_basic_blocks **
35
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
40
41 find_basic_blocks also finds any unreachable loops and deletes them.
42
43 ** life_analysis **
44
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
48
49 ** live-register info **
50
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
53
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
58
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
65
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
76
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
83
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
87
88 ** Other actions of life_analysis **
89
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
92
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
95
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
101
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
104
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED, REG_N_THROWING_CALLS_CROSSED and REG_BASIC_BLOCK.
108
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
111
112 /* TODO:
113
114 Split out from life_analysis:
115 - local property discovery
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
119 */
120 \f
121 #include "config.h"
122 #include "system.h"
123 #include "coretypes.h"
124 #include "tm.h"
125 #include "tree.h"
126 #include "rtl.h"
127 #include "tm_p.h"
128 #include "hard-reg-set.h"
129 #include "basic-block.h"
130 #include "insn-config.h"
131 #include "regs.h"
132 #include "flags.h"
133 #include "output.h"
134 #include "function.h"
135 #include "except.h"
136 #include "toplev.h"
137 #include "recog.h"
138 #include "expr.h"
139 #include "timevar.h"
140
141 #include "obstack.h"
142 #include "splay-tree.h"
143 #include "tree-pass.h"
144 #include "params.h"
145
146 #ifndef HAVE_epilogue
147 #define HAVE_epilogue 0
148 #endif
149 #ifndef HAVE_prologue
150 #define HAVE_prologue 0
151 #endif
152 #ifndef HAVE_sibcall_epilogue
153 #define HAVE_sibcall_epilogue 0
154 #endif
155
156 #ifndef EPILOGUE_USES
157 #define EPILOGUE_USES(REGNO) 0
158 #endif
159 #ifndef EH_USES
160 #define EH_USES(REGNO) 0
161 #endif
162
163 #ifdef HAVE_conditional_execution
164 #ifndef REVERSE_CONDEXEC_PREDICATES_P
165 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) \
166 (GET_CODE ((x)) == reversed_comparison_code ((y), NULL))
167 #endif
168 #endif
169
170 /* This is the maximum number of times we process any given block if the
171 latest loop depth count is smaller than this number. Only used for the
172 failure strategy to avoid infinite loops in calculate_global_regs_live. */
173 #define MAX_LIVENESS_ROUNDS 20
174
175 /* Nonzero if the second flow pass has completed. */
176 int flow2_completed;
177
178 /* Maximum register number used in this function, plus one. */
179
180 int max_regno;
181
182 /* Indexed by n, giving various register information */
183
184 varray_type reg_n_info;
185
186 /* Regset of regs live when calls to `setjmp'-like functions happen. */
187 /* ??? Does this exist only for the setjmp-clobbered warning message? */
188
189 static regset regs_live_at_setjmp;
190
191 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
192 that have to go in the same hard reg.
193 The first two regs in the list are a pair, and the next two
194 are another pair, etc. */
195 rtx regs_may_share;
196
197 /* Set of registers that may be eliminable. These are handled specially
198 in updating regs_ever_live. */
199
200 static HARD_REG_SET elim_reg_set;
201
202 /* Holds information for tracking conditional register life information. */
203 struct reg_cond_life_info
204 {
205 /* A boolean expression of conditions under which a register is dead. */
206 rtx condition;
207 /* Conditions under which a register is dead at the basic block end. */
208 rtx orig_condition;
209
210 /* A boolean expression of conditions under which a register has been
211 stored into. */
212 rtx stores;
213
214 /* ??? Could store mask of bytes that are dead, so that we could finally
215 track lifetimes of multi-word registers accessed via subregs. */
216 };
217
218 /* For use in communicating between propagate_block and its subroutines.
219 Holds all information needed to compute life and def-use information. */
220
221 struct propagate_block_info
222 {
223 /* The basic block we're considering. */
224 basic_block bb;
225
226 /* Bit N is set if register N is conditionally or unconditionally live. */
227 regset reg_live;
228
229 /* Bit N is set if register N is set this insn. */
230 regset new_set;
231
232 /* Element N is the next insn that uses (hard or pseudo) register N
233 within the current basic block; or zero, if there is no such insn. */
234 rtx *reg_next_use;
235
236 /* Contains a list of all the MEMs we are tracking for dead store
237 elimination. */
238 rtx mem_set_list;
239
240 /* If non-null, record the set of registers set unconditionally in the
241 basic block. */
242 regset local_set;
243
244 /* If non-null, record the set of registers set conditionally in the
245 basic block. */
246 regset cond_local_set;
247
248 #ifdef HAVE_conditional_execution
249 /* Indexed by register number, holds a reg_cond_life_info for each
250 register that is not unconditionally live or dead. */
251 splay_tree reg_cond_dead;
252
253 /* Bit N is set if register N is in an expression in reg_cond_dead. */
254 regset reg_cond_reg;
255 #endif
256
257 /* The length of mem_set_list. */
258 int mem_set_list_len;
259
260 /* Nonzero if the value of CC0 is live. */
261 int cc0_live;
262
263 /* Flags controlling the set of information propagate_block collects. */
264 int flags;
265 /* Index of instruction being processed. */
266 int insn_num;
267 };
268
269 /* Number of dead insns removed. */
270 static int ndead;
271
272 /* When PROP_REG_INFO set, array contains pbi->insn_num of instruction
273 where given register died. When the register is marked alive, we use the
274 information to compute amount of instructions life range cross.
275 (remember, we are walking backward). This can be computed as current
276 pbi->insn_num - reg_deaths[regno].
277 At the end of processing each basic block, the remaining live registers
278 are inspected and live ranges are increased same way so liverange of global
279 registers are computed correctly.
280
281 The array is maintained clear for dead registers, so it can be safely reused
282 for next basic block without expensive memset of the whole array after
283 reseting pbi->insn_num to 0. */
284
285 static int *reg_deaths;
286
287 /* Forward declarations */
288 static int verify_wide_reg_1 (rtx *, void *);
289 static void verify_wide_reg (int, basic_block);
290 static void verify_local_live_at_start (regset, basic_block);
291 static void notice_stack_pointer_modification_1 (rtx, rtx, void *);
292 static void notice_stack_pointer_modification (void);
293 static void mark_reg (rtx, void *);
294 static void mark_regs_live_at_end (regset);
295 static void calculate_global_regs_live (sbitmap, sbitmap, int);
296 static void propagate_block_delete_insn (rtx);
297 static rtx propagate_block_delete_libcall (rtx, rtx);
298 static int insn_dead_p (struct propagate_block_info *, rtx, int, rtx);
299 static int libcall_dead_p (struct propagate_block_info *, rtx, rtx);
300 static void mark_set_regs (struct propagate_block_info *, rtx, rtx);
301 static void mark_set_1 (struct propagate_block_info *, enum rtx_code, rtx,
302 rtx, rtx, int);
303 static int find_regno_partial (rtx *, void *);
304
305 #ifdef HAVE_conditional_execution
306 static int mark_regno_cond_dead (struct propagate_block_info *, int, rtx);
307 static void free_reg_cond_life_info (splay_tree_value);
308 static int flush_reg_cond_reg_1 (splay_tree_node, void *);
309 static void flush_reg_cond_reg (struct propagate_block_info *, int);
310 static rtx elim_reg_cond (rtx, unsigned int);
311 static rtx ior_reg_cond (rtx, rtx, int);
312 static rtx not_reg_cond (rtx);
313 static rtx and_reg_cond (rtx, rtx, int);
314 #endif
315 #ifdef AUTO_INC_DEC
316 static void attempt_auto_inc (struct propagate_block_info *, rtx, rtx, rtx,
317 rtx, rtx);
318 static void find_auto_inc (struct propagate_block_info *, rtx, rtx);
319 static int try_pre_increment_1 (struct propagate_block_info *, rtx);
320 static int try_pre_increment (rtx, rtx, HOST_WIDE_INT);
321 #endif
322 static void mark_used_reg (struct propagate_block_info *, rtx, rtx, rtx);
323 static void mark_used_regs (struct propagate_block_info *, rtx, rtx, rtx);
324 void debug_flow_info (void);
325 static void add_to_mem_set_list (struct propagate_block_info *, rtx);
326 static int invalidate_mems_from_autoinc (rtx *, void *);
327 static void invalidate_mems_from_set (struct propagate_block_info *, rtx);
328 static void clear_log_links (sbitmap);
329 static int count_or_remove_death_notes_bb (basic_block, int);
330 static void allocate_bb_life_data (void);
331 \f
332 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
333 note associated with the BLOCK. */
334
335 rtx
336 first_insn_after_basic_block_note (basic_block block)
337 {
338 rtx insn;
339
340 /* Get the first instruction in the block. */
341 insn = BB_HEAD (block);
342
343 if (insn == NULL_RTX)
344 return NULL_RTX;
345 if (LABEL_P (insn))
346 insn = NEXT_INSN (insn);
347 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
348
349 return NEXT_INSN (insn);
350 }
351 \f
352 /* Perform data flow analysis for the whole control flow graph.
353 FLAGS is a set of PROP_* flags to be used in accumulating flow info. */
354
355 void
356 life_analysis (FILE *file, int flags)
357 {
358 #ifdef ELIMINABLE_REGS
359 int i;
360 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
361 #endif
362
363 /* Record which registers will be eliminated. We use this in
364 mark_used_regs. */
365
366 CLEAR_HARD_REG_SET (elim_reg_set);
367
368 #ifdef ELIMINABLE_REGS
369 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
370 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
371 #else
372 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
373 #endif
374
375
376 #ifdef CANNOT_CHANGE_MODE_CLASS
377 if (flags & PROP_REG_INFO)
378 init_subregs_of_mode ();
379 #endif
380
381 if (! optimize)
382 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC | PROP_ALLOW_CFG_CHANGES);
383
384 /* The post-reload life analysis have (on a global basis) the same
385 registers live as was computed by reload itself. elimination
386 Otherwise offsets and such may be incorrect.
387
388 Reload will make some registers as live even though they do not
389 appear in the rtl.
390
391 We don't want to create new auto-incs after reload, since they
392 are unlikely to be useful and can cause problems with shared
393 stack slots. */
394 if (reload_completed)
395 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
396
397 /* We want alias analysis information for local dead store elimination. */
398 if (optimize && (flags & PROP_SCAN_DEAD_STORES))
399 init_alias_analysis ();
400
401 /* Always remove no-op moves. Do this before other processing so
402 that we don't have to keep re-scanning them. */
403 delete_noop_moves ();
404
405 /* Some targets can emit simpler epilogues if they know that sp was
406 not ever modified during the function. After reload, of course,
407 we've already emitted the epilogue so there's no sense searching. */
408 if (! reload_completed)
409 notice_stack_pointer_modification ();
410
411 /* Allocate and zero out data structures that will record the
412 data from lifetime analysis. */
413 allocate_reg_life_data ();
414 allocate_bb_life_data ();
415
416 /* Find the set of registers live on function exit. */
417 mark_regs_live_at_end (EXIT_BLOCK_PTR->il.rtl->global_live_at_start);
418
419 /* "Update" life info from zero. It'd be nice to begin the
420 relaxation with just the exit and noreturn blocks, but that set
421 is not immediately handy. */
422
423 if (flags & PROP_REG_INFO)
424 {
425 memset (regs_ever_live, 0, sizeof (regs_ever_live));
426 memset (regs_asm_clobbered, 0, sizeof (regs_asm_clobbered));
427 }
428 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
429 if (reg_deaths)
430 {
431 free (reg_deaths);
432 reg_deaths = NULL;
433 }
434
435 /* Clean up. */
436 if (optimize && (flags & PROP_SCAN_DEAD_STORES))
437 end_alias_analysis ();
438
439 if (file)
440 dump_flow_info (file);
441
442 /* Removing dead insns should have made jumptables really dead. */
443 delete_dead_jumptables ();
444 }
445
446 /* A subroutine of verify_wide_reg, called through for_each_rtx.
447 Search for REGNO. If found, return 2 if it is not wider than
448 word_mode. */
449
450 static int
451 verify_wide_reg_1 (rtx *px, void *pregno)
452 {
453 rtx x = *px;
454 unsigned int regno = *(int *) pregno;
455
456 if (REG_P (x) && REGNO (x) == regno)
457 {
458 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
459 return 2;
460 return 1;
461 }
462 return 0;
463 }
464
465 /* A subroutine of verify_local_live_at_start. Search through insns
466 of BB looking for register REGNO. */
467
468 static void
469 verify_wide_reg (int regno, basic_block bb)
470 {
471 rtx head = BB_HEAD (bb), end = BB_END (bb);
472
473 while (1)
474 {
475 if (INSN_P (head))
476 {
477 int r = for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno);
478 if (r == 1)
479 return;
480 if (r == 2)
481 break;
482 }
483 if (head == end)
484 break;
485 head = NEXT_INSN (head);
486 }
487 if (dump_file)
488 {
489 fprintf (dump_file, "Register %d died unexpectedly.\n", regno);
490 dump_bb (bb, dump_file, 0);
491 }
492 fatal_error ("internal consistency failure");
493 }
494
495 /* A subroutine of update_life_info. Verify that there are no untoward
496 changes in live_at_start during a local update. */
497
498 static void
499 verify_local_live_at_start (regset new_live_at_start, basic_block bb)
500 {
501 if (reload_completed)
502 {
503 /* After reload, there are no pseudos, nor subregs of multi-word
504 registers. The regsets should exactly match. */
505 if (! REG_SET_EQUAL_P (new_live_at_start,
506 bb->il.rtl->global_live_at_start))
507 {
508 if (dump_file)
509 {
510 fprintf (dump_file,
511 "live_at_start mismatch in bb %d, aborting\nNew:\n",
512 bb->index);
513 debug_bitmap_file (dump_file, new_live_at_start);
514 fputs ("Old:\n", dump_file);
515 dump_bb (bb, dump_file, 0);
516 }
517 fatal_error ("internal consistency failure");
518 }
519 }
520 else
521 {
522 unsigned i;
523 reg_set_iterator rsi;
524
525 /* Find the set of changed registers. */
526 XOR_REG_SET (new_live_at_start, bb->il.rtl->global_live_at_start);
527
528 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i, rsi)
529 {
530 /* No registers should die. */
531 if (REGNO_REG_SET_P (bb->il.rtl->global_live_at_start, i))
532 {
533 if (dump_file)
534 {
535 fprintf (dump_file,
536 "Register %d died unexpectedly.\n", i);
537 dump_bb (bb, dump_file, 0);
538 }
539 fatal_error ("internal consistency failure");
540 }
541 /* Verify that the now-live register is wider than word_mode. */
542 verify_wide_reg (i, bb);
543 }
544 }
545 }
546
547 /* Updates life information starting with the basic blocks set in BLOCKS.
548 If BLOCKS is null, consider it to be the universal set.
549
550 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholing,
551 we are only expecting local modifications to basic blocks. If we find
552 extra registers live at the beginning of a block, then we either killed
553 useful data, or we have a broken split that wants data not provided.
554 If we find registers removed from live_at_start, that means we have
555 a broken peephole that is killing a register it shouldn't.
556
557 ??? This is not true in one situation -- when a pre-reload splitter
558 generates subregs of a multi-word pseudo, current life analysis will
559 lose the kill. So we _can_ have a pseudo go live. How irritating.
560
561 It is also not true when a peephole decides that it doesn't need one
562 or more of the inputs.
563
564 Including PROP_REG_INFO does not properly refresh regs_ever_live
565 unless the caller resets it to zero. */
566
567 int
568 update_life_info (sbitmap blocks, enum update_life_extent extent,
569 int prop_flags)
570 {
571 regset tmp;
572 unsigned i = 0;
573 int stabilized_prop_flags = prop_flags;
574 basic_block bb;
575
576 tmp = ALLOC_REG_SET (&reg_obstack);
577 ndead = 0;
578
579 if ((prop_flags & PROP_REG_INFO) && !reg_deaths)
580 reg_deaths = xcalloc (sizeof (*reg_deaths), max_regno);
581
582 timevar_push ((extent == UPDATE_LIFE_LOCAL || blocks)
583 ? TV_LIFE_UPDATE : TV_LIFE);
584
585 /* Changes to the CFG are only allowed when
586 doing a global update for the entire CFG. */
587 gcc_assert (!(prop_flags & PROP_ALLOW_CFG_CHANGES)
588 || (extent != UPDATE_LIFE_LOCAL && !blocks));
589
590 /* For a global update, we go through the relaxation process again. */
591 if (extent != UPDATE_LIFE_LOCAL)
592 {
593 for ( ; ; )
594 {
595 int changed = 0;
596
597 calculate_global_regs_live (blocks, blocks,
598 prop_flags & (PROP_SCAN_DEAD_CODE
599 | PROP_SCAN_DEAD_STORES
600 | PROP_ALLOW_CFG_CHANGES));
601
602 if ((prop_flags & (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
603 != (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
604 break;
605
606 /* Removing dead code may allow the CFG to be simplified which
607 in turn may allow for further dead code detection / removal. */
608 FOR_EACH_BB_REVERSE (bb)
609 {
610 COPY_REG_SET (tmp, bb->il.rtl->global_live_at_end);
611 changed |= propagate_block (bb, tmp, NULL, NULL,
612 prop_flags & (PROP_SCAN_DEAD_CODE
613 | PROP_SCAN_DEAD_STORES
614 | PROP_KILL_DEAD_CODE));
615 }
616
617 /* Don't pass PROP_SCAN_DEAD_CODE or PROP_KILL_DEAD_CODE to
618 subsequent propagate_block calls, since removing or acting as
619 removing dead code can affect global register liveness, which
620 is supposed to be finalized for this call after this loop. */
621 stabilized_prop_flags
622 &= ~(PROP_SCAN_DEAD_CODE | PROP_SCAN_DEAD_STORES
623 | PROP_KILL_DEAD_CODE);
624
625 if (! changed)
626 break;
627
628 /* We repeat regardless of what cleanup_cfg says. If there were
629 instructions deleted above, that might have been only a
630 partial improvement (see PARAM_MAX_FLOW_MEMORY_LOCATIONS usage).
631 Further improvement may be possible. */
632 cleanup_cfg (CLEANUP_EXPENSIVE);
633
634 /* Zap the life information from the last round. If we don't
635 do this, we can wind up with registers that no longer appear
636 in the code being marked live at entry. */
637 FOR_EACH_BB (bb)
638 {
639 CLEAR_REG_SET (bb->il.rtl->global_live_at_start);
640 CLEAR_REG_SET (bb->il.rtl->global_live_at_end);
641 }
642 }
643
644 /* If asked, remove notes from the blocks we'll update. */
645 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
646 count_or_remove_death_notes (blocks,
647 prop_flags & PROP_POST_REGSTACK ? -1 : 1);
648 }
649
650 /* Clear log links in case we are asked to (re)compute them. */
651 if (prop_flags & PROP_LOG_LINKS)
652 clear_log_links (blocks);
653
654 if (blocks)
655 {
656 sbitmap_iterator sbi;
657
658 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i, sbi)
659 {
660 bb = BASIC_BLOCK (i);
661 if (bb)
662 {
663 /* The bitmap may be flawed in that one of the basic
664 blocks may have been deleted before you get here. */
665 COPY_REG_SET (tmp, bb->il.rtl->global_live_at_end);
666 propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
667
668 if (extent == UPDATE_LIFE_LOCAL)
669 verify_local_live_at_start (tmp, bb);
670 }
671 };
672 }
673 else
674 {
675 FOR_EACH_BB_REVERSE (bb)
676 {
677 COPY_REG_SET (tmp, bb->il.rtl->global_live_at_end);
678
679 propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
680
681 if (extent == UPDATE_LIFE_LOCAL)
682 verify_local_live_at_start (tmp, bb);
683 }
684 }
685
686 FREE_REG_SET (tmp);
687
688 if (prop_flags & PROP_REG_INFO)
689 {
690 reg_set_iterator rsi;
691
692 /* The only pseudos that are live at the beginning of the function
693 are those that were not set anywhere in the function. local-alloc
694 doesn't know how to handle these correctly, so mark them as not
695 local to any one basic block. */
696 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->il.rtl->global_live_at_end,
697 FIRST_PSEUDO_REGISTER, i, rsi)
698 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
699
700 /* We have a problem with any pseudoreg that lives across the setjmp.
701 ANSI says that if a user variable does not change in value between
702 the setjmp and the longjmp, then the longjmp preserves it. This
703 includes longjmp from a place where the pseudo appears dead.
704 (In principle, the value still exists if it is in scope.)
705 If the pseudo goes in a hard reg, some other value may occupy
706 that hard reg where this pseudo is dead, thus clobbering the pseudo.
707 Conclusion: such a pseudo must not go in a hard reg. */
708 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
709 FIRST_PSEUDO_REGISTER, i, rsi)
710 {
711 if (regno_reg_rtx[i] != 0)
712 {
713 REG_LIVE_LENGTH (i) = -1;
714 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
715 }
716 }
717 }
718 if (reg_deaths)
719 {
720 free (reg_deaths);
721 reg_deaths = NULL;
722 }
723 timevar_pop ((extent == UPDATE_LIFE_LOCAL || blocks)
724 ? TV_LIFE_UPDATE : TV_LIFE);
725 if (ndead && dump_file)
726 fprintf (dump_file, "deleted %i dead insns\n", ndead);
727 return ndead;
728 }
729
730 /* Update life information in all blocks where BB_DIRTY is set. */
731
732 int
733 update_life_info_in_dirty_blocks (enum update_life_extent extent, int prop_flags)
734 {
735 sbitmap update_life_blocks = sbitmap_alloc (last_basic_block);
736 int n = 0;
737 basic_block bb;
738 int retval = 0;
739
740 sbitmap_zero (update_life_blocks);
741 FOR_EACH_BB (bb)
742 {
743 if (bb->flags & BB_DIRTY)
744 {
745 SET_BIT (update_life_blocks, bb->index);
746 n++;
747 }
748 }
749
750 if (n)
751 retval = update_life_info (update_life_blocks, extent, prop_flags);
752
753 sbitmap_free (update_life_blocks);
754 return retval;
755 }
756
757 /* Free the variables allocated by find_basic_blocks. */
758
759 void
760 free_basic_block_vars (void)
761 {
762 if (basic_block_info)
763 {
764 clear_edges ();
765 basic_block_info = NULL;
766 }
767 n_basic_blocks = 0;
768 last_basic_block = 0;
769 n_edges = 0;
770
771 label_to_block_map = NULL;
772
773 ENTRY_BLOCK_PTR->aux = NULL;
774 ENTRY_BLOCK_PTR->il.rtl->global_live_at_end = NULL;
775 EXIT_BLOCK_PTR->aux = NULL;
776 EXIT_BLOCK_PTR->il.rtl->global_live_at_start = NULL;
777 }
778
779 /* Delete any insns that copy a register to itself. */
780
781 int
782 delete_noop_moves (void)
783 {
784 rtx insn, next;
785 basic_block bb;
786 int nnoops = 0;
787
788 FOR_EACH_BB (bb)
789 {
790 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); insn = next)
791 {
792 next = NEXT_INSN (insn);
793 if (INSN_P (insn) && noop_move_p (insn))
794 {
795 rtx note;
796
797 /* If we're about to remove the first insn of a libcall
798 then move the libcall note to the next real insn and
799 update the retval note. */
800 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX))
801 && XEXP (note, 0) != insn)
802 {
803 rtx new_libcall_insn = next_real_insn (insn);
804 rtx retval_note = find_reg_note (XEXP (note, 0),
805 REG_RETVAL, NULL_RTX);
806 REG_NOTES (new_libcall_insn)
807 = gen_rtx_INSN_LIST (REG_LIBCALL, XEXP (note, 0),
808 REG_NOTES (new_libcall_insn));
809 XEXP (retval_note, 0) = new_libcall_insn;
810 }
811
812 delete_insn_and_edges (insn);
813 nnoops++;
814 }
815 }
816 }
817
818 if (nnoops && dump_file)
819 fprintf (dump_file, "deleted %i noop moves\n", nnoops);
820
821 return nnoops;
822 }
823
824 /* Delete any jump tables never referenced. We can't delete them at the
825 time of removing tablejump insn as they are referenced by the preceding
826 insns computing the destination, so we delay deleting and garbagecollect
827 them once life information is computed. */
828 void
829 delete_dead_jumptables (void)
830 {
831 basic_block bb;
832
833 /* A dead jump table does not belong to any basic block. Scan insns
834 between two adjacent basic blocks. */
835 FOR_EACH_BB (bb)
836 {
837 rtx insn, next;
838
839 for (insn = NEXT_INSN (BB_END (bb));
840 insn && !NOTE_INSN_BASIC_BLOCK_P (insn);
841 insn = next)
842 {
843 next = NEXT_INSN (insn);
844 if (LABEL_P (insn)
845 && LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
846 && JUMP_P (next)
847 && (GET_CODE (PATTERN (next)) == ADDR_VEC
848 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
849 {
850 rtx label = insn, jump = next;
851
852 if (dump_file)
853 fprintf (dump_file, "Dead jumptable %i removed\n",
854 INSN_UID (insn));
855
856 next = NEXT_INSN (next);
857 delete_insn (jump);
858 delete_insn (label);
859 }
860 }
861 }
862 }
863
864 /* Determine if the stack pointer is constant over the life of the function.
865 Only useful before prologues have been emitted. */
866
867 static void
868 notice_stack_pointer_modification_1 (rtx x, rtx pat ATTRIBUTE_UNUSED,
869 void *data ATTRIBUTE_UNUSED)
870 {
871 if (x == stack_pointer_rtx
872 /* The stack pointer is only modified indirectly as the result
873 of a push until later in flow. See the comments in rtl.texi
874 regarding Embedded Side-Effects on Addresses. */
875 || (MEM_P (x)
876 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == RTX_AUTOINC
877 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
878 current_function_sp_is_unchanging = 0;
879 }
880
881 static void
882 notice_stack_pointer_modification (void)
883 {
884 basic_block bb;
885 rtx insn;
886
887 /* Assume that the stack pointer is unchanging if alloca hasn't
888 been used. */
889 current_function_sp_is_unchanging = !current_function_calls_alloca;
890 if (! current_function_sp_is_unchanging)
891 return;
892
893 FOR_EACH_BB (bb)
894 FOR_BB_INSNS (bb, insn)
895 {
896 if (INSN_P (insn))
897 {
898 /* Check if insn modifies the stack pointer. */
899 note_stores (PATTERN (insn),
900 notice_stack_pointer_modification_1,
901 NULL);
902 if (! current_function_sp_is_unchanging)
903 return;
904 }
905 }
906 }
907
908 /* Mark a register in SET. Hard registers in large modes get all
909 of their component registers set as well. */
910
911 static void
912 mark_reg (rtx reg, void *xset)
913 {
914 regset set = (regset) xset;
915 int regno = REGNO (reg);
916
917 gcc_assert (GET_MODE (reg) != BLKmode);
918
919 SET_REGNO_REG_SET (set, regno);
920 if (regno < FIRST_PSEUDO_REGISTER)
921 {
922 int n = hard_regno_nregs[regno][GET_MODE (reg)];
923 while (--n > 0)
924 SET_REGNO_REG_SET (set, regno + n);
925 }
926 }
927
928 /* Mark those regs which are needed at the end of the function as live
929 at the end of the last basic block. */
930
931 static void
932 mark_regs_live_at_end (regset set)
933 {
934 unsigned int i;
935
936 /* If exiting needs the right stack value, consider the stack pointer
937 live at the end of the function. */
938 if ((HAVE_epilogue && epilogue_completed)
939 || ! EXIT_IGNORE_STACK
940 || (! FRAME_POINTER_REQUIRED
941 && ! current_function_calls_alloca
942 && flag_omit_frame_pointer)
943 || current_function_sp_is_unchanging)
944 {
945 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
946 }
947
948 /* Mark the frame pointer if needed at the end of the function. If
949 we end up eliminating it, it will be removed from the live list
950 of each basic block by reload. */
951
952 if (! reload_completed || frame_pointer_needed)
953 {
954 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
955 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
956 /* If they are different, also mark the hard frame pointer as live. */
957 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
958 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
959 #endif
960 }
961
962 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
963 /* Many architectures have a GP register even without flag_pic.
964 Assume the pic register is not in use, or will be handled by
965 other means, if it is not fixed. */
966 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
967 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
968 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
969 #endif
970
971 /* Mark all global registers, and all registers used by the epilogue
972 as being live at the end of the function since they may be
973 referenced by our caller. */
974 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
975 if (global_regs[i] || EPILOGUE_USES (i))
976 SET_REGNO_REG_SET (set, i);
977
978 if (HAVE_epilogue && epilogue_completed)
979 {
980 /* Mark all call-saved registers that we actually used. */
981 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
982 if (regs_ever_live[i] && ! LOCAL_REGNO (i)
983 && ! TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
984 SET_REGNO_REG_SET (set, i);
985 }
986
987 #ifdef EH_RETURN_DATA_REGNO
988 /* Mark the registers that will contain data for the handler. */
989 if (reload_completed && current_function_calls_eh_return)
990 for (i = 0; ; ++i)
991 {
992 unsigned regno = EH_RETURN_DATA_REGNO(i);
993 if (regno == INVALID_REGNUM)
994 break;
995 SET_REGNO_REG_SET (set, regno);
996 }
997 #endif
998 #ifdef EH_RETURN_STACKADJ_RTX
999 if ((! HAVE_epilogue || ! epilogue_completed)
1000 && current_function_calls_eh_return)
1001 {
1002 rtx tmp = EH_RETURN_STACKADJ_RTX;
1003 if (tmp && REG_P (tmp))
1004 mark_reg (tmp, set);
1005 }
1006 #endif
1007 #ifdef EH_RETURN_HANDLER_RTX
1008 if ((! HAVE_epilogue || ! epilogue_completed)
1009 && current_function_calls_eh_return)
1010 {
1011 rtx tmp = EH_RETURN_HANDLER_RTX;
1012 if (tmp && REG_P (tmp))
1013 mark_reg (tmp, set);
1014 }
1015 #endif
1016
1017 /* Mark function return value. */
1018 diddle_return_value (mark_reg, set);
1019 }
1020
1021 /* Propagate global life info around the graph of basic blocks. Begin
1022 considering blocks with their corresponding bit set in BLOCKS_IN.
1023 If BLOCKS_IN is null, consider it the universal set.
1024
1025 BLOCKS_OUT is set for every block that was changed. */
1026
1027 static void
1028 calculate_global_regs_live (sbitmap blocks_in, sbitmap blocks_out, int flags)
1029 {
1030 basic_block *queue, *qhead, *qtail, *qend, bb;
1031 regset tmp, new_live_at_end, invalidated_by_call;
1032 regset registers_made_dead;
1033 bool failure_strategy_required = false;
1034 int *block_accesses;
1035
1036 /* The registers that are modified within this in block. */
1037 regset *local_sets;
1038
1039 /* The registers that are conditionally modified within this block.
1040 In other words, regs that are set only as part of a COND_EXEC. */
1041 regset *cond_local_sets;
1042
1043 unsigned int i;
1044
1045 /* Some passes used to forget clear aux field of basic block causing
1046 sick behavior here. */
1047 #ifdef ENABLE_CHECKING
1048 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1049 gcc_assert (!bb->aux);
1050 #endif
1051
1052 tmp = ALLOC_REG_SET (&reg_obstack);
1053 new_live_at_end = ALLOC_REG_SET (&reg_obstack);
1054 invalidated_by_call = ALLOC_REG_SET (&reg_obstack);
1055 registers_made_dead = ALLOC_REG_SET (&reg_obstack);
1056
1057 /* Inconveniently, this is only readily available in hard reg set form. */
1058 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1059 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1060 SET_REGNO_REG_SET (invalidated_by_call, i);
1061
1062 /* Allocate space for the sets of local properties. */
1063 local_sets = xcalloc (last_basic_block, sizeof (regset));
1064 cond_local_sets = xcalloc (last_basic_block, sizeof (regset));
1065
1066 /* Create a worklist. Allocate an extra slot for the `head == tail'
1067 style test for an empty queue doesn't work with a full queue. */
1068 queue = xmalloc ((n_basic_blocks + 1) * sizeof (*queue));
1069 qtail = queue;
1070 qhead = qend = queue + n_basic_blocks;
1071
1072 /* Queue the blocks set in the initial mask. Do this in reverse block
1073 number order so that we are more likely for the first round to do
1074 useful work. We use AUX non-null to flag that the block is queued. */
1075 if (blocks_in)
1076 {
1077 FOR_EACH_BB (bb)
1078 if (TEST_BIT (blocks_in, bb->index))
1079 {
1080 *--qhead = bb;
1081 bb->aux = bb;
1082 }
1083 }
1084 else
1085 {
1086 FOR_EACH_BB (bb)
1087 {
1088 *--qhead = bb;
1089 bb->aux = bb;
1090 }
1091 }
1092
1093 block_accesses = xcalloc (last_basic_block, sizeof (int));
1094
1095 /* We clean aux when we remove the initially-enqueued bbs, but we
1096 don't enqueue ENTRY and EXIT initially, so clean them upfront and
1097 unconditionally. */
1098 ENTRY_BLOCK_PTR->aux = EXIT_BLOCK_PTR->aux = NULL;
1099
1100 if (blocks_out)
1101 sbitmap_zero (blocks_out);
1102
1103 /* We work through the queue until there are no more blocks. What
1104 is live at the end of this block is precisely the union of what
1105 is live at the beginning of all its successors. So, we set its
1106 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
1107 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
1108 this block by walking through the instructions in this block in
1109 reverse order and updating as we go. If that changed
1110 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
1111 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
1112
1113 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
1114 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
1115 must either be live at the end of the block, or used within the
1116 block. In the latter case, it will certainly never disappear
1117 from GLOBAL_LIVE_AT_START. In the former case, the register
1118 could go away only if it disappeared from GLOBAL_LIVE_AT_START
1119 for one of the successor blocks. By induction, that cannot
1120 occur.
1121
1122 ??? This reasoning doesn't work if we start from non-empty initial
1123 GLOBAL_LIVE_AT_START sets. And there are actually two problems:
1124 1) Updating may not terminate (endless oscillation).
1125 2) Even if it does (and it usually does), the resulting information
1126 may be inaccurate. Consider for example the following case:
1127
1128 a = ...;
1129 while (...) {...} -- 'a' not mentioned at all
1130 ... = a;
1131
1132 If the use of 'a' is deleted between two calculations of liveness
1133 information and the initial sets are not cleared, the information
1134 about a's liveness will get stuck inside the loop and the set will
1135 appear not to be dead.
1136
1137 We do not attempt to solve 2) -- the information is conservatively
1138 correct (i.e. we never claim that something live is dead) and the
1139 amount of optimization opportunities missed due to this problem is
1140 not significant.
1141
1142 1) is more serious. In order to fix it, we monitor the number of times
1143 each block is processed. Once one of the blocks has been processed more
1144 times than the maximum number of rounds, we use the following strategy:
1145 When a register disappears from one of the sets, we add it to a MAKE_DEAD
1146 set, remove all registers in this set from all GLOBAL_LIVE_AT_* sets and
1147 add the blocks with changed sets into the queue. Thus we are guaranteed
1148 to terminate (the worst case corresponds to all registers in MADE_DEAD,
1149 in which case the original reasoning above is valid), but in general we
1150 only fix up a few offending registers.
1151
1152 The maximum number of rounds for computing liveness is the largest of
1153 MAX_LIVENESS_ROUNDS and the latest loop depth count for this function. */
1154
1155 while (qhead != qtail)
1156 {
1157 int rescan, changed;
1158 basic_block bb;
1159 edge e;
1160 edge_iterator ei;
1161
1162 bb = *qhead++;
1163 if (qhead == qend)
1164 qhead = queue;
1165 bb->aux = NULL;
1166
1167 /* Should we start using the failure strategy? */
1168 if (bb != ENTRY_BLOCK_PTR)
1169 {
1170 int max_liveness_rounds =
1171 MAX (MAX_LIVENESS_ROUNDS, cfun->max_loop_depth);
1172
1173 block_accesses[bb->index]++;
1174 if (block_accesses[bb->index] > max_liveness_rounds)
1175 failure_strategy_required = true;
1176 }
1177
1178 /* Begin by propagating live_at_start from the successor blocks. */
1179 CLEAR_REG_SET (new_live_at_end);
1180
1181 if (EDGE_COUNT (bb->succs) > 0)
1182 FOR_EACH_EDGE (e, ei, bb->succs)
1183 {
1184 basic_block sb = e->dest;
1185
1186 /* Call-clobbered registers die across exception and
1187 call edges. */
1188 /* ??? Abnormal call edges ignored for the moment, as this gets
1189 confused by sibling call edges, which crashes reg-stack. */
1190 if (e->flags & EDGE_EH)
1191 bitmap_ior_and_compl_into (new_live_at_end,
1192 sb->il.rtl->global_live_at_start,
1193 invalidated_by_call);
1194 else
1195 IOR_REG_SET (new_live_at_end, sb->il.rtl->global_live_at_start);
1196
1197 /* If a target saves one register in another (instead of on
1198 the stack) the save register will need to be live for EH. */
1199 if (e->flags & EDGE_EH)
1200 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1201 if (EH_USES (i))
1202 SET_REGNO_REG_SET (new_live_at_end, i);
1203 }
1204 else
1205 {
1206 /* This might be a noreturn function that throws. And
1207 even if it isn't, getting the unwind info right helps
1208 debugging. */
1209 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1210 if (EH_USES (i))
1211 SET_REGNO_REG_SET (new_live_at_end, i);
1212 }
1213
1214 /* The all-important stack pointer must always be live. */
1215 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
1216
1217 /* Before reload, there are a few registers that must be forced
1218 live everywhere -- which might not already be the case for
1219 blocks within infinite loops. */
1220 if (! reload_completed)
1221 {
1222 /* Any reference to any pseudo before reload is a potential
1223 reference of the frame pointer. */
1224 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
1225
1226 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1227 /* Pseudos with argument area equivalences may require
1228 reloading via the argument pointer. */
1229 if (fixed_regs[ARG_POINTER_REGNUM])
1230 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
1231 #endif
1232
1233 /* Any constant, or pseudo with constant equivalences, may
1234 require reloading from memory using the pic register. */
1235 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1236 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1237 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
1238 }
1239
1240 if (bb == ENTRY_BLOCK_PTR)
1241 {
1242 COPY_REG_SET (bb->il.rtl->global_live_at_end, new_live_at_end);
1243 continue;
1244 }
1245
1246 /* On our first pass through this block, we'll go ahead and continue.
1247 Recognize first pass by checking if local_set is NULL for this
1248 basic block. On subsequent passes, we get to skip out early if
1249 live_at_end wouldn't have changed. */
1250
1251 if (local_sets[bb->index] == NULL)
1252 {
1253 local_sets[bb->index] = ALLOC_REG_SET (&reg_obstack);
1254 cond_local_sets[bb->index] = ALLOC_REG_SET (&reg_obstack);
1255 rescan = 1;
1256 }
1257 else
1258 {
1259 /* If any bits were removed from live_at_end, we'll have to
1260 rescan the block. This wouldn't be necessary if we had
1261 precalculated local_live, however with PROP_SCAN_DEAD_CODE
1262 local_live is really dependent on live_at_end. */
1263 rescan = bitmap_intersect_compl_p (bb->il.rtl->global_live_at_end,
1264 new_live_at_end);
1265
1266 if (!rescan)
1267 {
1268 regset cond_local_set;
1269
1270 /* If any of the registers in the new live_at_end set are
1271 conditionally set in this basic block, we must rescan.
1272 This is because conditional lifetimes at the end of the
1273 block do not just take the live_at_end set into
1274 account, but also the liveness at the start of each
1275 successor block. We can miss changes in those sets if
1276 we only compare the new live_at_end against the
1277 previous one. */
1278 cond_local_set = cond_local_sets[bb->index];
1279 rescan = bitmap_intersect_p (new_live_at_end, cond_local_set);
1280 }
1281
1282 if (!rescan)
1283 {
1284 regset local_set;
1285
1286 /* Find the set of changed bits. Take this opportunity
1287 to notice that this set is empty and early out. */
1288 bitmap_xor (tmp, bb->il.rtl->global_live_at_end, new_live_at_end);
1289 if (bitmap_empty_p (tmp))
1290 continue;
1291
1292 /* If any of the changed bits overlap with local_sets[bb],
1293 we'll have to rescan the block. */
1294 local_set = local_sets[bb->index];
1295 rescan = bitmap_intersect_p (tmp, local_set);
1296 }
1297 }
1298
1299 /* Let our caller know that BB changed enough to require its
1300 death notes updated. */
1301 if (blocks_out)
1302 SET_BIT (blocks_out, bb->index);
1303
1304 if (! rescan)
1305 {
1306 /* Add to live_at_start the set of all registers in
1307 new_live_at_end that aren't in the old live_at_end. */
1308
1309 changed = bitmap_ior_and_compl_into (bb->il.rtl->global_live_at_start,
1310 new_live_at_end,
1311 bb->il.rtl->global_live_at_end);
1312 COPY_REG_SET (bb->il.rtl->global_live_at_end, new_live_at_end);
1313 if (! changed)
1314 continue;
1315 }
1316 else
1317 {
1318 COPY_REG_SET (bb->il.rtl->global_live_at_end, new_live_at_end);
1319
1320 /* Rescan the block insn by insn to turn (a copy of) live_at_end
1321 into live_at_start. */
1322 propagate_block (bb, new_live_at_end,
1323 local_sets[bb->index],
1324 cond_local_sets[bb->index],
1325 flags);
1326
1327 /* If live_at start didn't change, no need to go farther. */
1328 if (REG_SET_EQUAL_P (bb->il.rtl->global_live_at_start,
1329 new_live_at_end))
1330 continue;
1331
1332 if (failure_strategy_required)
1333 {
1334 /* Get the list of registers that were removed from the
1335 bb->global_live_at_start set. */
1336 bitmap_and_compl (tmp, bb->il.rtl->global_live_at_start,
1337 new_live_at_end);
1338 if (!bitmap_empty_p (tmp))
1339 {
1340 bool pbb_changed;
1341 basic_block pbb;
1342
1343 /* It should not happen that one of registers we have
1344 removed last time is disappears again before any other
1345 register does. */
1346 pbb_changed = bitmap_ior_into (registers_made_dead, tmp);
1347 gcc_assert (pbb_changed);
1348
1349 /* Now remove the registers from all sets. */
1350 FOR_EACH_BB (pbb)
1351 {
1352 pbb_changed = false;
1353
1354 pbb_changed
1355 |= bitmap_and_compl_into
1356 (pbb->il.rtl->global_live_at_start,
1357 registers_made_dead);
1358 pbb_changed
1359 |= bitmap_and_compl_into
1360 (pbb->il.rtl->global_live_at_end,
1361 registers_made_dead);
1362 if (!pbb_changed)
1363 continue;
1364
1365 /* Note the (possible) change. */
1366 if (blocks_out)
1367 SET_BIT (blocks_out, pbb->index);
1368
1369 /* Makes sure to really rescan the block. */
1370 if (local_sets[pbb->index])
1371 {
1372 FREE_REG_SET (local_sets[pbb->index]);
1373 FREE_REG_SET (cond_local_sets[pbb->index]);
1374 local_sets[pbb->index] = 0;
1375 }
1376
1377 /* Add it to the queue. */
1378 if (pbb->aux == NULL)
1379 {
1380 *qtail++ = pbb;
1381 if (qtail == qend)
1382 qtail = queue;
1383 pbb->aux = pbb;
1384 }
1385 }
1386 continue;
1387 }
1388 } /* end of failure_strategy_required */
1389
1390 COPY_REG_SET (bb->il.rtl->global_live_at_start, new_live_at_end);
1391 }
1392
1393 /* Queue all predecessors of BB so that we may re-examine
1394 their live_at_end. */
1395 FOR_EACH_EDGE (e, ei, bb->preds)
1396 {
1397 basic_block pb = e->src;
1398 if (pb->aux == NULL)
1399 {
1400 *qtail++ = pb;
1401 if (qtail == qend)
1402 qtail = queue;
1403 pb->aux = pb;
1404 }
1405 }
1406 }
1407
1408 FREE_REG_SET (tmp);
1409 FREE_REG_SET (new_live_at_end);
1410 FREE_REG_SET (invalidated_by_call);
1411 FREE_REG_SET (registers_made_dead);
1412
1413 if (blocks_out)
1414 {
1415 sbitmap_iterator sbi;
1416
1417 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i, sbi)
1418 {
1419 basic_block bb = BASIC_BLOCK (i);
1420 FREE_REG_SET (local_sets[bb->index]);
1421 FREE_REG_SET (cond_local_sets[bb->index]);
1422 };
1423 }
1424 else
1425 {
1426 FOR_EACH_BB (bb)
1427 {
1428 FREE_REG_SET (local_sets[bb->index]);
1429 FREE_REG_SET (cond_local_sets[bb->index]);
1430 }
1431 }
1432
1433 free (block_accesses);
1434 free (queue);
1435 free (cond_local_sets);
1436 free (local_sets);
1437 }
1438
1439 \f
1440 /* This structure is used to pass parameters to and from the
1441 the function find_regno_partial(). It is used to pass in the
1442 register number we are looking, as well as to return any rtx
1443 we find. */
1444
1445 typedef struct {
1446 unsigned regno_to_find;
1447 rtx retval;
1448 } find_regno_partial_param;
1449
1450
1451 /* Find the rtx for the reg numbers specified in 'data' if it is
1452 part of an expression which only uses part of the register. Return
1453 it in the structure passed in. */
1454 static int
1455 find_regno_partial (rtx *ptr, void *data)
1456 {
1457 find_regno_partial_param *param = (find_regno_partial_param *)data;
1458 unsigned reg = param->regno_to_find;
1459 param->retval = NULL_RTX;
1460
1461 if (*ptr == NULL_RTX)
1462 return 0;
1463
1464 switch (GET_CODE (*ptr))
1465 {
1466 case ZERO_EXTRACT:
1467 case SIGN_EXTRACT:
1468 case STRICT_LOW_PART:
1469 if (REG_P (XEXP (*ptr, 0)) && REGNO (XEXP (*ptr, 0)) == reg)
1470 {
1471 param->retval = XEXP (*ptr, 0);
1472 return 1;
1473 }
1474 break;
1475
1476 case SUBREG:
1477 if (REG_P (SUBREG_REG (*ptr))
1478 && REGNO (SUBREG_REG (*ptr)) == reg)
1479 {
1480 param->retval = SUBREG_REG (*ptr);
1481 return 1;
1482 }
1483 break;
1484
1485 default:
1486 break;
1487 }
1488
1489 return 0;
1490 }
1491
1492 /* Process all immediate successors of the entry block looking for pseudo
1493 registers which are live on entry. Find all of those whose first
1494 instance is a partial register reference of some kind, and initialize
1495 them to 0 after the entry block. This will prevent bit sets within
1496 registers whose value is unknown, and may contain some kind of sticky
1497 bits we don't want. */
1498
1499 static int
1500 initialize_uninitialized_subregs (void)
1501 {
1502 rtx insn;
1503 edge e;
1504 unsigned reg, did_something = 0;
1505 find_regno_partial_param param;
1506 edge_iterator ei;
1507
1508 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
1509 {
1510 basic_block bb = e->dest;
1511 regset map = bb->il.rtl->global_live_at_start;
1512 reg_set_iterator rsi;
1513
1514 EXECUTE_IF_SET_IN_REG_SET (map, FIRST_PSEUDO_REGISTER, reg, rsi)
1515 {
1516 int uid = REGNO_FIRST_UID (reg);
1517 rtx i;
1518
1519 /* Find an insn which mentions the register we are looking for.
1520 Its preferable to have an instance of the register's rtl since
1521 there may be various flags set which we need to duplicate.
1522 If we can't find it, its probably an automatic whose initial
1523 value doesn't matter, or hopefully something we don't care about. */
1524 for (i = get_insns (); i && INSN_UID (i) != uid; i = NEXT_INSN (i))
1525 ;
1526 if (i != NULL_RTX)
1527 {
1528 /* Found the insn, now get the REG rtx, if we can. */
1529 param.regno_to_find = reg;
1530 for_each_rtx (&i, find_regno_partial, &param);
1531 if (param.retval != NULL_RTX)
1532 {
1533 start_sequence ();
1534 emit_move_insn (param.retval,
1535 CONST0_RTX (GET_MODE (param.retval)));
1536 insn = get_insns ();
1537 end_sequence ();
1538 insert_insn_on_edge (insn, e);
1539 did_something = 1;
1540 }
1541 }
1542 }
1543 }
1544
1545 if (did_something)
1546 commit_edge_insertions ();
1547 return did_something;
1548 }
1549
1550 \f
1551 /* Subroutines of life analysis. */
1552
1553 /* Allocate the permanent data structures that represent the results
1554 of life analysis. */
1555
1556 static void
1557 allocate_bb_life_data (void)
1558 {
1559 basic_block bb;
1560
1561 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1562 {
1563 bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
1564 bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
1565 }
1566
1567 regs_live_at_setjmp = ALLOC_REG_SET (&reg_obstack);
1568 }
1569
1570 void
1571 allocate_reg_life_data (void)
1572 {
1573 int i;
1574
1575 max_regno = max_reg_num ();
1576 gcc_assert (!reg_deaths);
1577 reg_deaths = xcalloc (sizeof (*reg_deaths), max_regno);
1578
1579 /* Recalculate the register space, in case it has grown. Old style
1580 vector oriented regsets would set regset_{size,bytes} here also. */
1581 allocate_reg_info (max_regno, FALSE, FALSE);
1582
1583 /* Reset all the data we'll collect in propagate_block and its
1584 subroutines. */
1585 for (i = 0; i < max_regno; i++)
1586 {
1587 REG_N_SETS (i) = 0;
1588 REG_N_REFS (i) = 0;
1589 REG_N_DEATHS (i) = 0;
1590 REG_N_CALLS_CROSSED (i) = 0;
1591 REG_N_THROWING_CALLS_CROSSED (i) = 0;
1592 REG_LIVE_LENGTH (i) = 0;
1593 REG_FREQ (i) = 0;
1594 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
1595 }
1596 }
1597
1598 /* Delete dead instructions for propagate_block. */
1599
1600 static void
1601 propagate_block_delete_insn (rtx insn)
1602 {
1603 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
1604
1605 /* If the insn referred to a label, and that label was attached to
1606 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
1607 pretty much mandatory to delete it, because the ADDR_VEC may be
1608 referencing labels that no longer exist.
1609
1610 INSN may reference a deleted label, particularly when a jump
1611 table has been optimized into a direct jump. There's no
1612 real good way to fix up the reference to the deleted label
1613 when the label is deleted, so we just allow it here. */
1614
1615 if (inote && LABEL_P (inote))
1616 {
1617 rtx label = XEXP (inote, 0);
1618 rtx next;
1619
1620 /* The label may be forced if it has been put in the constant
1621 pool. If that is the only use we must discard the table
1622 jump following it, but not the label itself. */
1623 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
1624 && (next = next_nonnote_insn (label)) != NULL
1625 && JUMP_P (next)
1626 && (GET_CODE (PATTERN (next)) == ADDR_VEC
1627 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
1628 {
1629 rtx pat = PATTERN (next);
1630 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1631 int len = XVECLEN (pat, diff_vec_p);
1632 int i;
1633
1634 for (i = 0; i < len; i++)
1635 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
1636
1637 delete_insn_and_edges (next);
1638 ndead++;
1639 }
1640 }
1641
1642 delete_insn_and_edges (insn);
1643 ndead++;
1644 }
1645
1646 /* Delete dead libcalls for propagate_block. Return the insn
1647 before the libcall. */
1648
1649 static rtx
1650 propagate_block_delete_libcall (rtx insn, rtx note)
1651 {
1652 rtx first = XEXP (note, 0);
1653 rtx before = PREV_INSN (first);
1654
1655 delete_insn_chain_and_edges (first, insn);
1656 ndead++;
1657 return before;
1658 }
1659
1660 /* Update the life-status of regs for one insn. Return the previous insn. */
1661
1662 rtx
1663 propagate_one_insn (struct propagate_block_info *pbi, rtx insn)
1664 {
1665 rtx prev = PREV_INSN (insn);
1666 int flags = pbi->flags;
1667 int insn_is_dead = 0;
1668 int libcall_is_dead = 0;
1669 rtx note;
1670 unsigned i;
1671
1672 if (! INSN_P (insn))
1673 return prev;
1674
1675 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
1676 if (flags & PROP_SCAN_DEAD_CODE)
1677 {
1678 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
1679 libcall_is_dead = (insn_is_dead && note != 0
1680 && libcall_dead_p (pbi, note, insn));
1681 }
1682
1683 /* If an instruction consists of just dead store(s) on final pass,
1684 delete it. */
1685 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
1686 {
1687 /* If we're trying to delete a prologue or epilogue instruction
1688 that isn't flagged as possibly being dead, something is wrong.
1689 But if we are keeping the stack pointer depressed, we might well
1690 be deleting insns that are used to compute the amount to update
1691 it by, so they are fine. */
1692 if (reload_completed
1693 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
1694 && (TYPE_RETURNS_STACK_DEPRESSED
1695 (TREE_TYPE (current_function_decl))))
1696 && (((HAVE_epilogue || HAVE_prologue)
1697 && prologue_epilogue_contains (insn))
1698 || (HAVE_sibcall_epilogue
1699 && sibcall_epilogue_contains (insn)))
1700 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
1701 fatal_insn ("Attempt to delete prologue/epilogue insn:", insn);
1702
1703 /* Record sets. Do this even for dead instructions, since they
1704 would have killed the values if they hadn't been deleted. To
1705 be consistent, we also have to emit a clobber when we delete
1706 an insn that clobbers a live register. */
1707 pbi->flags |= PROP_DEAD_INSN;
1708 mark_set_regs (pbi, PATTERN (insn), insn);
1709 pbi->flags &= ~PROP_DEAD_INSN;
1710
1711 /* CC0 is now known to be dead. Either this insn used it,
1712 in which case it doesn't anymore, or clobbered it,
1713 so the next insn can't use it. */
1714 pbi->cc0_live = 0;
1715
1716 if (libcall_is_dead)
1717 prev = propagate_block_delete_libcall (insn, note);
1718 else
1719 {
1720
1721 /* If INSN contains a RETVAL note and is dead, but the libcall
1722 as a whole is not dead, then we want to remove INSN, but
1723 not the whole libcall sequence.
1724
1725 However, we need to also remove the dangling REG_LIBCALL
1726 note so that we do not have mis-matched LIBCALL/RETVAL
1727 notes. In theory we could find a new location for the
1728 REG_RETVAL note, but it hardly seems worth the effort.
1729
1730 NOTE at this point will be the RETVAL note if it exists. */
1731 if (note)
1732 {
1733 rtx libcall_note;
1734
1735 libcall_note
1736 = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
1737 remove_note (XEXP (note, 0), libcall_note);
1738 }
1739
1740 /* Similarly if INSN contains a LIBCALL note, remove the
1741 dangling REG_RETVAL note. */
1742 note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
1743 if (note)
1744 {
1745 rtx retval_note;
1746
1747 retval_note
1748 = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
1749 remove_note (XEXP (note, 0), retval_note);
1750 }
1751
1752 /* Now delete INSN. */
1753 propagate_block_delete_insn (insn);
1754 }
1755
1756 return prev;
1757 }
1758
1759 /* See if this is an increment or decrement that can be merged into
1760 a following memory address. */
1761 #ifdef AUTO_INC_DEC
1762 {
1763 rtx x = single_set (insn);
1764
1765 /* Does this instruction increment or decrement a register? */
1766 if ((flags & PROP_AUTOINC)
1767 && x != 0
1768 && REG_P (SET_DEST (x))
1769 && (GET_CODE (SET_SRC (x)) == PLUS
1770 || GET_CODE (SET_SRC (x)) == MINUS)
1771 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
1772 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
1773 /* Ok, look for a following memory ref we can combine with.
1774 If one is found, change the memory ref to a PRE_INC
1775 or PRE_DEC, cancel this insn, and return 1.
1776 Return 0 if nothing has been done. */
1777 && try_pre_increment_1 (pbi, insn))
1778 return prev;
1779 }
1780 #endif /* AUTO_INC_DEC */
1781
1782 CLEAR_REG_SET (pbi->new_set);
1783
1784 /* If this is not the final pass, and this insn is copying the value of
1785 a library call and it's dead, don't scan the insns that perform the
1786 library call, so that the call's arguments are not marked live. */
1787 if (libcall_is_dead)
1788 {
1789 /* Record the death of the dest reg. */
1790 mark_set_regs (pbi, PATTERN (insn), insn);
1791
1792 insn = XEXP (note, 0);
1793 return PREV_INSN (insn);
1794 }
1795 else if (GET_CODE (PATTERN (insn)) == SET
1796 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1797 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1798 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
1799 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
1800 {
1801 /* We have an insn to pop a constant amount off the stack.
1802 (Such insns use PLUS regardless of the direction of the stack,
1803 and any insn to adjust the stack by a constant is always a pop
1804 or part of a push.)
1805 These insns, if not dead stores, have no effect on life, though
1806 they do have an effect on the memory stores we are tracking. */
1807 invalidate_mems_from_set (pbi, stack_pointer_rtx);
1808 /* Still, we need to update local_set, lest ifcvt.c:dead_or_predicable
1809 concludes that the stack pointer is not modified. */
1810 mark_set_regs (pbi, PATTERN (insn), insn);
1811 }
1812 else
1813 {
1814 /* Any regs live at the time of a call instruction must not go
1815 in a register clobbered by calls. Find all regs now live and
1816 record this for them. */
1817
1818 if (CALL_P (insn) && (flags & PROP_REG_INFO))
1819 {
1820 reg_set_iterator rsi;
1821 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i, rsi)
1822 REG_N_CALLS_CROSSED (i)++;
1823 if (can_throw_internal (insn))
1824 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i, rsi)
1825 REG_N_THROWING_CALLS_CROSSED (i)++;
1826 }
1827
1828 /* Record sets. Do this even for dead instructions, since they
1829 would have killed the values if they hadn't been deleted. */
1830 mark_set_regs (pbi, PATTERN (insn), insn);
1831
1832 if (CALL_P (insn))
1833 {
1834 regset live_at_end;
1835 bool sibcall_p;
1836 rtx note, cond;
1837 int i;
1838
1839 cond = NULL_RTX;
1840 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
1841 cond = COND_EXEC_TEST (PATTERN (insn));
1842
1843 /* Non-constant calls clobber memory, constant calls do not
1844 clobber memory, though they may clobber outgoing arguments
1845 on the stack. */
1846 if (! CONST_OR_PURE_CALL_P (insn))
1847 {
1848 free_EXPR_LIST_list (&pbi->mem_set_list);
1849 pbi->mem_set_list_len = 0;
1850 }
1851 else
1852 invalidate_mems_from_set (pbi, stack_pointer_rtx);
1853
1854 /* There may be extra registers to be clobbered. */
1855 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1856 note;
1857 note = XEXP (note, 1))
1858 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1859 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
1860 cond, insn, pbi->flags);
1861
1862 /* Calls change all call-used and global registers; sibcalls do not
1863 clobber anything that must be preserved at end-of-function,
1864 except for return values. */
1865
1866 sibcall_p = SIBLING_CALL_P (insn);
1867 live_at_end = EXIT_BLOCK_PTR->il.rtl->global_live_at_start;
1868 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1869 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i)
1870 && ! (sibcall_p
1871 && REGNO_REG_SET_P (live_at_end, i)
1872 && ! refers_to_regno_p (i, i+1,
1873 current_function_return_rtx,
1874 (rtx *) 0)))
1875 {
1876 enum rtx_code code = global_regs[i] ? SET : CLOBBER;
1877 /* We do not want REG_UNUSED notes for these registers. */
1878 mark_set_1 (pbi, code, regno_reg_rtx[i], cond, insn,
1879 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
1880 }
1881 }
1882
1883 /* If an insn doesn't use CC0, it becomes dead since we assume
1884 that every insn clobbers it. So show it dead here;
1885 mark_used_regs will set it live if it is referenced. */
1886 pbi->cc0_live = 0;
1887
1888 /* Record uses. */
1889 if (! insn_is_dead)
1890 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
1891
1892 /* Sometimes we may have inserted something before INSN (such as a move)
1893 when we make an auto-inc. So ensure we will scan those insns. */
1894 #ifdef AUTO_INC_DEC
1895 prev = PREV_INSN (insn);
1896 #endif
1897
1898 if (! insn_is_dead && CALL_P (insn))
1899 {
1900 int i;
1901 rtx note, cond;
1902
1903 cond = NULL_RTX;
1904 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
1905 cond = COND_EXEC_TEST (PATTERN (insn));
1906
1907 /* Calls use their arguments, and may clobber memory which
1908 address involves some register. */
1909 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1910 note;
1911 note = XEXP (note, 1))
1912 /* We find USE or CLOBBER entities in a FUNCTION_USAGE list: both
1913 of which mark_used_regs knows how to handle. */
1914 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0), cond, insn);
1915
1916 /* The stack ptr is used (honorarily) by a CALL insn. */
1917 if ((flags & PROP_REG_INFO)
1918 && !REGNO_REG_SET_P (pbi->reg_live, STACK_POINTER_REGNUM))
1919 reg_deaths[STACK_POINTER_REGNUM] = pbi->insn_num;
1920 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
1921
1922 /* Calls may also reference any of the global registers,
1923 so they are made live. */
1924 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1925 if (global_regs[i])
1926 mark_used_reg (pbi, regno_reg_rtx[i], cond, insn);
1927 }
1928 }
1929
1930 pbi->insn_num++;
1931
1932 return prev;
1933 }
1934
1935 /* Initialize a propagate_block_info struct for public consumption.
1936 Note that the structure itself is opaque to this file, but that
1937 the user can use the regsets provided here. */
1938
1939 struct propagate_block_info *
1940 init_propagate_block_info (basic_block bb, regset live, regset local_set,
1941 regset cond_local_set, int flags)
1942 {
1943 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
1944
1945 pbi->bb = bb;
1946 pbi->reg_live = live;
1947 pbi->mem_set_list = NULL_RTX;
1948 pbi->mem_set_list_len = 0;
1949 pbi->local_set = local_set;
1950 pbi->cond_local_set = cond_local_set;
1951 pbi->cc0_live = 0;
1952 pbi->flags = flags;
1953 pbi->insn_num = 0;
1954
1955 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
1956 pbi->reg_next_use = xcalloc (max_reg_num (), sizeof (rtx));
1957 else
1958 pbi->reg_next_use = NULL;
1959
1960 pbi->new_set = BITMAP_ALLOC (NULL);
1961
1962 #ifdef HAVE_conditional_execution
1963 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
1964 free_reg_cond_life_info);
1965 pbi->reg_cond_reg = BITMAP_ALLOC (NULL);
1966
1967 /* If this block ends in a conditional branch, for each register
1968 live from one side of the branch and not the other, record the
1969 register as conditionally dead. */
1970 if (JUMP_P (BB_END (bb))
1971 && any_condjump_p (BB_END (bb)))
1972 {
1973 regset diff = ALLOC_REG_SET (&reg_obstack);
1974 basic_block bb_true, bb_false;
1975 unsigned i;
1976
1977 /* Identify the successor blocks. */
1978 bb_true = EDGE_SUCC (bb, 0)->dest;
1979 if (!single_succ_p (bb))
1980 {
1981 bb_false = EDGE_SUCC (bb, 1)->dest;
1982
1983 if (EDGE_SUCC (bb, 0)->flags & EDGE_FALLTHRU)
1984 {
1985 basic_block t = bb_false;
1986 bb_false = bb_true;
1987 bb_true = t;
1988 }
1989 else
1990 gcc_assert (EDGE_SUCC (bb, 1)->flags & EDGE_FALLTHRU);
1991 }
1992 else
1993 {
1994 /* This can happen with a conditional jump to the next insn. */
1995 gcc_assert (JUMP_LABEL (BB_END (bb)) == BB_HEAD (bb_true));
1996
1997 /* Simplest way to do nothing. */
1998 bb_false = bb_true;
1999 }
2000
2001 /* Compute which register lead different lives in the successors. */
2002 bitmap_xor (diff, bb_true->il.rtl->global_live_at_start,
2003 bb_false->il.rtl->global_live_at_start);
2004
2005 if (!bitmap_empty_p (diff))
2006 {
2007 /* Extract the condition from the branch. */
2008 rtx set_src = SET_SRC (pc_set (BB_END (bb)));
2009 rtx cond_true = XEXP (set_src, 0);
2010 rtx reg = XEXP (cond_true, 0);
2011 enum rtx_code inv_cond;
2012
2013 if (GET_CODE (reg) == SUBREG)
2014 reg = SUBREG_REG (reg);
2015
2016 /* We can only track conditional lifetimes if the condition is
2017 in the form of a reversible comparison of a register against
2018 zero. If the condition is more complex than that, then it is
2019 safe not to record any information. */
2020 inv_cond = reversed_comparison_code (cond_true, BB_END (bb));
2021 if (inv_cond != UNKNOWN
2022 && REG_P (reg)
2023 && XEXP (cond_true, 1) == const0_rtx)
2024 {
2025 rtx cond_false
2026 = gen_rtx_fmt_ee (inv_cond,
2027 GET_MODE (cond_true), XEXP (cond_true, 0),
2028 XEXP (cond_true, 1));
2029 reg_set_iterator rsi;
2030
2031 if (GET_CODE (XEXP (set_src, 1)) == PC)
2032 {
2033 rtx t = cond_false;
2034 cond_false = cond_true;
2035 cond_true = t;
2036 }
2037
2038 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
2039
2040 /* For each such register, mark it conditionally dead. */
2041 EXECUTE_IF_SET_IN_REG_SET (diff, 0, i, rsi)
2042 {
2043 struct reg_cond_life_info *rcli;
2044 rtx cond;
2045
2046 rcli = xmalloc (sizeof (*rcli));
2047
2048 if (REGNO_REG_SET_P (bb_true->il.rtl->global_live_at_start,
2049 i))
2050 cond = cond_false;
2051 else
2052 cond = cond_true;
2053 rcli->condition = cond;
2054 rcli->stores = const0_rtx;
2055 rcli->orig_condition = cond;
2056
2057 splay_tree_insert (pbi->reg_cond_dead, i,
2058 (splay_tree_value) rcli);
2059 }
2060 }
2061 }
2062
2063 FREE_REG_SET (diff);
2064 }
2065 #endif
2066
2067 /* If this block has no successors, any stores to the frame that aren't
2068 used later in the block are dead. So make a pass over the block
2069 recording any such that are made and show them dead at the end. We do
2070 a very conservative and simple job here. */
2071 if (optimize
2072 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
2073 && (TYPE_RETURNS_STACK_DEPRESSED
2074 (TREE_TYPE (current_function_decl))))
2075 && (flags & PROP_SCAN_DEAD_STORES)
2076 && (EDGE_COUNT (bb->succs) == 0
2077 || (single_succ_p (bb)
2078 && single_succ (bb) == EXIT_BLOCK_PTR
2079 && ! current_function_calls_eh_return)))
2080 {
2081 rtx insn, set;
2082 for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
2083 if (NONJUMP_INSN_P (insn)
2084 && (set = single_set (insn))
2085 && MEM_P (SET_DEST (set)))
2086 {
2087 rtx mem = SET_DEST (set);
2088 rtx canon_mem = canon_rtx (mem);
2089
2090 if (XEXP (canon_mem, 0) == frame_pointer_rtx
2091 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
2092 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
2093 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
2094 add_to_mem_set_list (pbi, canon_mem);
2095 }
2096 }
2097
2098 return pbi;
2099 }
2100
2101 /* Release a propagate_block_info struct. */
2102
2103 void
2104 free_propagate_block_info (struct propagate_block_info *pbi)
2105 {
2106 free_EXPR_LIST_list (&pbi->mem_set_list);
2107
2108 BITMAP_FREE (pbi->new_set);
2109
2110 #ifdef HAVE_conditional_execution
2111 splay_tree_delete (pbi->reg_cond_dead);
2112 BITMAP_FREE (pbi->reg_cond_reg);
2113 #endif
2114
2115 if (pbi->flags & PROP_REG_INFO)
2116 {
2117 int num = pbi->insn_num;
2118 unsigned i;
2119 reg_set_iterator rsi;
2120
2121 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i, rsi)
2122 {
2123 REG_LIVE_LENGTH (i) += num - reg_deaths[i];
2124 reg_deaths[i] = 0;
2125 }
2126 }
2127 if (pbi->reg_next_use)
2128 free (pbi->reg_next_use);
2129
2130 free (pbi);
2131 }
2132
2133 /* Compute the registers live at the beginning of a basic block BB from
2134 those live at the end.
2135
2136 When called, REG_LIVE contains those live at the end. On return, it
2137 contains those live at the beginning.
2138
2139 LOCAL_SET, if non-null, will be set with all registers killed
2140 unconditionally by this basic block.
2141 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
2142 killed conditionally by this basic block. If there is any unconditional
2143 set of a register, then the corresponding bit will be set in LOCAL_SET
2144 and cleared in COND_LOCAL_SET.
2145 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
2146 case, the resulting set will be equal to the union of the two sets that
2147 would otherwise be computed.
2148
2149 Return nonzero if an INSN is deleted (i.e. by dead code removal). */
2150
2151 int
2152 propagate_block (basic_block bb, regset live, regset local_set,
2153 regset cond_local_set, int flags)
2154 {
2155 struct propagate_block_info *pbi;
2156 rtx insn, prev;
2157 int changed;
2158
2159 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
2160
2161 if (flags & PROP_REG_INFO)
2162 {
2163 unsigned i;
2164 reg_set_iterator rsi;
2165
2166 /* Process the regs live at the end of the block.
2167 Mark them as not local to any one basic block. */
2168 EXECUTE_IF_SET_IN_REG_SET (live, 0, i, rsi)
2169 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
2170 }
2171
2172 /* Scan the block an insn at a time from end to beginning. */
2173
2174 changed = 0;
2175 for (insn = BB_END (bb); ; insn = prev)
2176 {
2177 /* If this is a call to `setjmp' et al, warn if any
2178 non-volatile datum is live. */
2179 if ((flags & PROP_REG_INFO)
2180 && CALL_P (insn)
2181 && find_reg_note (insn, REG_SETJMP, NULL))
2182 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
2183
2184 prev = propagate_one_insn (pbi, insn);
2185 if (!prev)
2186 changed |= insn != get_insns ();
2187 else
2188 changed |= NEXT_INSN (prev) != insn;
2189
2190 if (insn == BB_HEAD (bb))
2191 break;
2192 }
2193
2194 free_propagate_block_info (pbi);
2195
2196 return changed;
2197 }
2198 \f
2199 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
2200 (SET expressions whose destinations are registers dead after the insn).
2201 NEEDED is the regset that says which regs are alive after the insn.
2202
2203 Unless CALL_OK is nonzero, an insn is needed if it contains a CALL.
2204
2205 If X is the entire body of an insn, NOTES contains the reg notes
2206 pertaining to the insn. */
2207
2208 static int
2209 insn_dead_p (struct propagate_block_info *pbi, rtx x, int call_ok,
2210 rtx notes ATTRIBUTE_UNUSED)
2211 {
2212 enum rtx_code code = GET_CODE (x);
2213
2214 /* Don't eliminate insns that may trap. */
2215 if (flag_non_call_exceptions && may_trap_p (x))
2216 return 0;
2217
2218 #ifdef AUTO_INC_DEC
2219 /* As flow is invoked after combine, we must take existing AUTO_INC
2220 expressions into account. */
2221 for (; notes; notes = XEXP (notes, 1))
2222 {
2223 if (REG_NOTE_KIND (notes) == REG_INC)
2224 {
2225 int regno = REGNO (XEXP (notes, 0));
2226
2227 /* Don't delete insns to set global regs. */
2228 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2229 || REGNO_REG_SET_P (pbi->reg_live, regno))
2230 return 0;
2231 }
2232 }
2233 #endif
2234
2235 /* If setting something that's a reg or part of one,
2236 see if that register's altered value will be live. */
2237
2238 if (code == SET)
2239 {
2240 rtx r = SET_DEST (x);
2241
2242 #ifdef HAVE_cc0
2243 if (GET_CODE (r) == CC0)
2244 return ! pbi->cc0_live;
2245 #endif
2246
2247 /* A SET that is a subroutine call cannot be dead. */
2248 if (GET_CODE (SET_SRC (x)) == CALL)
2249 {
2250 if (! call_ok)
2251 return 0;
2252 }
2253
2254 /* Don't eliminate loads from volatile memory or volatile asms. */
2255 else if (volatile_refs_p (SET_SRC (x)))
2256 return 0;
2257
2258 if (MEM_P (r))
2259 {
2260 rtx temp, canon_r;
2261
2262 if (MEM_VOLATILE_P (r) || GET_MODE (r) == BLKmode)
2263 return 0;
2264
2265 canon_r = canon_rtx (r);
2266
2267 /* Walk the set of memory locations we are currently tracking
2268 and see if one is an identical match to this memory location.
2269 If so, this memory write is dead (remember, we're walking
2270 backwards from the end of the block to the start). Since
2271 rtx_equal_p does not check the alias set or flags, we also
2272 must have the potential for them to conflict (anti_dependence). */
2273 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
2274 if (anti_dependence (r, XEXP (temp, 0)))
2275 {
2276 rtx mem = XEXP (temp, 0);
2277
2278 if (rtx_equal_p (XEXP (canon_r, 0), XEXP (mem, 0))
2279 && (GET_MODE_SIZE (GET_MODE (canon_r))
2280 <= GET_MODE_SIZE (GET_MODE (mem))))
2281 return 1;
2282
2283 #ifdef AUTO_INC_DEC
2284 /* Check if memory reference matches an auto increment. Only
2285 post increment/decrement or modify are valid. */
2286 if (GET_MODE (mem) == GET_MODE (r)
2287 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
2288 || GET_CODE (XEXP (mem, 0)) == POST_INC
2289 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
2290 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
2291 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
2292 return 1;
2293 #endif
2294 }
2295 }
2296 else
2297 {
2298 while (GET_CODE (r) == SUBREG
2299 || GET_CODE (r) == STRICT_LOW_PART
2300 || GET_CODE (r) == ZERO_EXTRACT)
2301 r = XEXP (r, 0);
2302
2303 if (REG_P (r))
2304 {
2305 int regno = REGNO (r);
2306
2307 /* Obvious. */
2308 if (REGNO_REG_SET_P (pbi->reg_live, regno))
2309 return 0;
2310
2311 /* If this is a hard register, verify that subsequent
2312 words are not needed. */
2313 if (regno < FIRST_PSEUDO_REGISTER)
2314 {
2315 int n = hard_regno_nregs[regno][GET_MODE (r)];
2316
2317 while (--n > 0)
2318 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
2319 return 0;
2320 }
2321
2322 /* Don't delete insns to set global regs. */
2323 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2324 return 0;
2325
2326 /* Make sure insns to set the stack pointer aren't deleted. */
2327 if (regno == STACK_POINTER_REGNUM)
2328 return 0;
2329
2330 /* ??? These bits might be redundant with the force live bits
2331 in calculate_global_regs_live. We would delete from
2332 sequential sets; whether this actually affects real code
2333 for anything but the stack pointer I don't know. */
2334 /* Make sure insns to set the frame pointer aren't deleted. */
2335 if (regno == FRAME_POINTER_REGNUM
2336 && (! reload_completed || frame_pointer_needed))
2337 return 0;
2338 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2339 if (regno == HARD_FRAME_POINTER_REGNUM
2340 && (! reload_completed || frame_pointer_needed))
2341 return 0;
2342 #endif
2343
2344 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2345 /* Make sure insns to set arg pointer are never deleted
2346 (if the arg pointer isn't fixed, there will be a USE
2347 for it, so we can treat it normally). */
2348 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2349 return 0;
2350 #endif
2351
2352 /* Otherwise, the set is dead. */
2353 return 1;
2354 }
2355 }
2356 }
2357
2358 /* If performing several activities, insn is dead if each activity
2359 is individually dead. Also, CLOBBERs and USEs can be ignored; a
2360 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
2361 worth keeping. */
2362 else if (code == PARALLEL)
2363 {
2364 int i = XVECLEN (x, 0);
2365
2366 for (i--; i >= 0; i--)
2367 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
2368 && GET_CODE (XVECEXP (x, 0, i)) != USE
2369 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
2370 return 0;
2371
2372 return 1;
2373 }
2374
2375 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
2376 is not necessarily true for hard registers until after reload. */
2377 else if (code == CLOBBER)
2378 {
2379 if (REG_P (XEXP (x, 0))
2380 && (REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
2381 || reload_completed)
2382 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
2383 return 1;
2384 }
2385
2386 /* ??? A base USE is a historical relic. It ought not be needed anymore.
2387 Instances where it is still used are either (1) temporary and the USE
2388 escaped the pass, (2) cruft and the USE need not be emitted anymore,
2389 or (3) hiding bugs elsewhere that are not properly representing data
2390 flow. */
2391
2392 return 0;
2393 }
2394
2395 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
2396 return 1 if the entire library call is dead.
2397 This is true if INSN copies a register (hard or pseudo)
2398 and if the hard return reg of the call insn is dead.
2399 (The caller should have tested the destination of the SET inside
2400 INSN already for death.)
2401
2402 If this insn doesn't just copy a register, then we don't
2403 have an ordinary libcall. In that case, cse could not have
2404 managed to substitute the source for the dest later on,
2405 so we can assume the libcall is dead.
2406
2407 PBI is the block info giving pseudoregs live before this insn.
2408 NOTE is the REG_RETVAL note of the insn. */
2409
2410 static int
2411 libcall_dead_p (struct propagate_block_info *pbi, rtx note, rtx insn)
2412 {
2413 rtx x = single_set (insn);
2414
2415 if (x)
2416 {
2417 rtx r = SET_SRC (x);
2418
2419 if (REG_P (r) || GET_CODE (r) == SUBREG)
2420 {
2421 rtx call = XEXP (note, 0);
2422 rtx call_pat;
2423 int i;
2424
2425 /* Find the call insn. */
2426 while (call != insn && !CALL_P (call))
2427 call = NEXT_INSN (call);
2428
2429 /* If there is none, do nothing special,
2430 since ordinary death handling can understand these insns. */
2431 if (call == insn)
2432 return 0;
2433
2434 /* See if the hard reg holding the value is dead.
2435 If this is a PARALLEL, find the call within it. */
2436 call_pat = PATTERN (call);
2437 if (GET_CODE (call_pat) == PARALLEL)
2438 {
2439 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
2440 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
2441 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
2442 break;
2443
2444 /* This may be a library call that is returning a value
2445 via invisible pointer. Do nothing special, since
2446 ordinary death handling can understand these insns. */
2447 if (i < 0)
2448 return 0;
2449
2450 call_pat = XVECEXP (call_pat, 0, i);
2451 }
2452
2453 if (! insn_dead_p (pbi, call_pat, 1, REG_NOTES (call)))
2454 return 0;
2455
2456 while ((insn = PREV_INSN (insn)) != call)
2457 {
2458 if (! INSN_P (insn))
2459 continue;
2460 if (! insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn)))
2461 return 0;
2462 }
2463 return 1;
2464 }
2465 }
2466 return 0;
2467 }
2468
2469 /* 1 if register REGNO was alive at a place where `setjmp' was called
2470 and was set more than once or is an argument.
2471 Such regs may be clobbered by `longjmp'. */
2472
2473 int
2474 regno_clobbered_at_setjmp (int regno)
2475 {
2476 if (n_basic_blocks == NUM_FIXED_BLOCKS)
2477 return 0;
2478
2479 return ((REG_N_SETS (regno) > 1
2480 || REGNO_REG_SET_P (ENTRY_BLOCK_PTR->il.rtl->global_live_at_end,
2481 regno))
2482 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
2483 }
2484 \f
2485 /* Add MEM to PBI->MEM_SET_LIST. MEM should be canonical. Respect the
2486 maximal list size; look for overlaps in mode and select the largest. */
2487 static void
2488 add_to_mem_set_list (struct propagate_block_info *pbi, rtx mem)
2489 {
2490 rtx i;
2491
2492 /* We don't know how large a BLKmode store is, so we must not
2493 take them into consideration. */
2494 if (GET_MODE (mem) == BLKmode)
2495 return;
2496
2497 for (i = pbi->mem_set_list; i ; i = XEXP (i, 1))
2498 {
2499 rtx e = XEXP (i, 0);
2500 if (rtx_equal_p (XEXP (mem, 0), XEXP (e, 0)))
2501 {
2502 if (GET_MODE_SIZE (GET_MODE (mem)) > GET_MODE_SIZE (GET_MODE (e)))
2503 {
2504 #ifdef AUTO_INC_DEC
2505 /* If we must store a copy of the mem, we can just modify
2506 the mode of the stored copy. */
2507 if (pbi->flags & PROP_AUTOINC)
2508 PUT_MODE (e, GET_MODE (mem));
2509 else
2510 #endif
2511 XEXP (i, 0) = mem;
2512 }
2513 return;
2514 }
2515 }
2516
2517 if (pbi->mem_set_list_len < PARAM_VALUE (PARAM_MAX_FLOW_MEMORY_LOCATIONS))
2518 {
2519 #ifdef AUTO_INC_DEC
2520 /* Store a copy of mem, otherwise the address may be
2521 scrogged by find_auto_inc. */
2522 if (pbi->flags & PROP_AUTOINC)
2523 mem = shallow_copy_rtx (mem);
2524 #endif
2525 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
2526 pbi->mem_set_list_len++;
2527 }
2528 }
2529
2530 /* INSN references memory, possibly using autoincrement addressing modes.
2531 Find any entries on the mem_set_list that need to be invalidated due
2532 to an address change. */
2533
2534 static int
2535 invalidate_mems_from_autoinc (rtx *px, void *data)
2536 {
2537 rtx x = *px;
2538 struct propagate_block_info *pbi = data;
2539
2540 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
2541 {
2542 invalidate_mems_from_set (pbi, XEXP (x, 0));
2543 return -1;
2544 }
2545
2546 return 0;
2547 }
2548
2549 /* EXP is a REG or MEM. Remove any dependent entries from
2550 pbi->mem_set_list. */
2551
2552 static void
2553 invalidate_mems_from_set (struct propagate_block_info *pbi, rtx exp)
2554 {
2555 rtx temp = pbi->mem_set_list;
2556 rtx prev = NULL_RTX;
2557 rtx next;
2558
2559 while (temp)
2560 {
2561 next = XEXP (temp, 1);
2562 if ((REG_P (exp) && reg_overlap_mentioned_p (exp, XEXP (temp, 0)))
2563 /* When we get an EXP that is a mem here, we want to check if EXP
2564 overlaps the *address* of any of the mems in the list (i.e. not
2565 whether the mems actually overlap; that's done elsewhere). */
2566 || (MEM_P (exp)
2567 && reg_overlap_mentioned_p (exp, XEXP (XEXP (temp, 0), 0))))
2568 {
2569 /* Splice this entry out of the list. */
2570 if (prev)
2571 XEXP (prev, 1) = next;
2572 else
2573 pbi->mem_set_list = next;
2574 free_EXPR_LIST_node (temp);
2575 pbi->mem_set_list_len--;
2576 }
2577 else
2578 prev = temp;
2579 temp = next;
2580 }
2581 }
2582
2583 /* Process the registers that are set within X. Their bits are set to
2584 1 in the regset DEAD, because they are dead prior to this insn.
2585
2586 If INSN is nonzero, it is the insn being processed.
2587
2588 FLAGS is the set of operations to perform. */
2589
2590 static void
2591 mark_set_regs (struct propagate_block_info *pbi, rtx x, rtx insn)
2592 {
2593 rtx cond = NULL_RTX;
2594 rtx link;
2595 enum rtx_code code;
2596 int flags = pbi->flags;
2597
2598 if (insn)
2599 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2600 {
2601 if (REG_NOTE_KIND (link) == REG_INC)
2602 mark_set_1 (pbi, SET, XEXP (link, 0),
2603 (GET_CODE (x) == COND_EXEC
2604 ? COND_EXEC_TEST (x) : NULL_RTX),
2605 insn, flags);
2606 }
2607 retry:
2608 switch (code = GET_CODE (x))
2609 {
2610 case SET:
2611 if (GET_CODE (XEXP (x, 1)) == ASM_OPERANDS)
2612 flags |= PROP_ASM_SCAN;
2613 /* Fall through */
2614 case CLOBBER:
2615 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, flags);
2616 return;
2617
2618 case COND_EXEC:
2619 cond = COND_EXEC_TEST (x);
2620 x = COND_EXEC_CODE (x);
2621 goto retry;
2622
2623 case PARALLEL:
2624 {
2625 int i;
2626
2627 /* We must scan forwards. If we have an asm, we need to set
2628 the PROP_ASM_SCAN flag before scanning the clobbers. */
2629 for (i = 0; i < XVECLEN (x, 0); i++)
2630 {
2631 rtx sub = XVECEXP (x, 0, i);
2632 switch (code = GET_CODE (sub))
2633 {
2634 case COND_EXEC:
2635 gcc_assert (!cond);
2636
2637 cond = COND_EXEC_TEST (sub);
2638 sub = COND_EXEC_CODE (sub);
2639 if (GET_CODE (sub) == SET)
2640 goto mark_set;
2641 if (GET_CODE (sub) == CLOBBER)
2642 goto mark_clob;
2643 break;
2644
2645 case SET:
2646 mark_set:
2647 if (GET_CODE (XEXP (sub, 1)) == ASM_OPERANDS)
2648 flags |= PROP_ASM_SCAN;
2649 /* Fall through */
2650 case CLOBBER:
2651 mark_clob:
2652 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, flags);
2653 break;
2654
2655 case ASM_OPERANDS:
2656 flags |= PROP_ASM_SCAN;
2657 break;
2658
2659 default:
2660 break;
2661 }
2662 }
2663 break;
2664 }
2665
2666 default:
2667 break;
2668 }
2669 }
2670
2671 /* Process a single set, which appears in INSN. REG (which may not
2672 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
2673 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
2674 If the set is conditional (because it appear in a COND_EXEC), COND
2675 will be the condition. */
2676
2677 static void
2678 mark_set_1 (struct propagate_block_info *pbi, enum rtx_code code, rtx reg, rtx cond, rtx insn, int flags)
2679 {
2680 int regno_first = -1, regno_last = -1;
2681 unsigned long not_dead = 0;
2682 int i;
2683
2684 /* Modifying just one hardware register of a multi-reg value or just a
2685 byte field of a register does not mean the value from before this insn
2686 is now dead. Of course, if it was dead after it's unused now. */
2687
2688 switch (GET_CODE (reg))
2689 {
2690 case PARALLEL:
2691 /* Some targets place small structures in registers for return values of
2692 functions. We have to detect this case specially here to get correct
2693 flow information. */
2694 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
2695 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
2696 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
2697 flags);
2698 return;
2699
2700 case SIGN_EXTRACT:
2701 /* SIGN_EXTRACT cannot be an lvalue. */
2702 gcc_unreachable ();
2703
2704 case ZERO_EXTRACT:
2705 case STRICT_LOW_PART:
2706 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
2707 do
2708 reg = XEXP (reg, 0);
2709 while (GET_CODE (reg) == SUBREG
2710 || GET_CODE (reg) == ZERO_EXTRACT
2711 || GET_CODE (reg) == STRICT_LOW_PART);
2712 if (MEM_P (reg))
2713 break;
2714 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
2715 /* Fall through. */
2716
2717 case REG:
2718 regno_last = regno_first = REGNO (reg);
2719 if (regno_first < FIRST_PSEUDO_REGISTER)
2720 regno_last += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
2721 break;
2722
2723 case SUBREG:
2724 if (REG_P (SUBREG_REG (reg)))
2725 {
2726 enum machine_mode outer_mode = GET_MODE (reg);
2727 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
2728
2729 /* Identify the range of registers affected. This is moderately
2730 tricky for hard registers. See alter_subreg. */
2731
2732 regno_last = regno_first = REGNO (SUBREG_REG (reg));
2733 if (regno_first < FIRST_PSEUDO_REGISTER)
2734 {
2735 regno_first += subreg_regno_offset (regno_first, inner_mode,
2736 SUBREG_BYTE (reg),
2737 outer_mode);
2738 regno_last = (regno_first
2739 + hard_regno_nregs[regno_first][outer_mode] - 1);
2740
2741 /* Since we've just adjusted the register number ranges, make
2742 sure REG matches. Otherwise some_was_live will be clear
2743 when it shouldn't have been, and we'll create incorrect
2744 REG_UNUSED notes. */
2745 reg = gen_rtx_REG (outer_mode, regno_first);
2746 }
2747 else
2748 {
2749 /* If the number of words in the subreg is less than the number
2750 of words in the full register, we have a well-defined partial
2751 set. Otherwise the high bits are undefined.
2752
2753 This is only really applicable to pseudos, since we just took
2754 care of multi-word hard registers. */
2755 if (((GET_MODE_SIZE (outer_mode)
2756 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
2757 < ((GET_MODE_SIZE (inner_mode)
2758 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
2759 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
2760 regno_first);
2761
2762 reg = SUBREG_REG (reg);
2763 }
2764 }
2765 else
2766 reg = SUBREG_REG (reg);
2767 break;
2768
2769 default:
2770 break;
2771 }
2772
2773 /* If this set is a MEM, then it kills any aliased writes and any
2774 other MEMs which use it.
2775 If this set is a REG, then it kills any MEMs which use the reg. */
2776 if (optimize && (flags & PROP_SCAN_DEAD_STORES))
2777 {
2778 if (REG_P (reg) || MEM_P (reg))
2779 invalidate_mems_from_set (pbi, reg);
2780
2781 /* If the memory reference had embedded side effects (autoincrement
2782 address modes) then we may need to kill some entries on the
2783 memory set list. */
2784 if (insn && MEM_P (reg))
2785 for_each_rtx (&PATTERN (insn), invalidate_mems_from_autoinc, pbi);
2786
2787 if (MEM_P (reg) && ! side_effects_p (reg)
2788 /* ??? With more effort we could track conditional memory life. */
2789 && ! cond)
2790 add_to_mem_set_list (pbi, canon_rtx (reg));
2791 }
2792
2793 if (REG_P (reg)
2794 && ! (regno_first == FRAME_POINTER_REGNUM
2795 && (! reload_completed || frame_pointer_needed))
2796 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2797 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
2798 && (! reload_completed || frame_pointer_needed))
2799 #endif
2800 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2801 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
2802 #endif
2803 )
2804 {
2805 int some_was_live = 0, some_was_dead = 0;
2806
2807 for (i = regno_first; i <= regno_last; ++i)
2808 {
2809 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
2810 if (pbi->local_set)
2811 {
2812 /* Order of the set operation matters here since both
2813 sets may be the same. */
2814 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
2815 if (cond != NULL_RTX
2816 && ! REGNO_REG_SET_P (pbi->local_set, i))
2817 SET_REGNO_REG_SET (pbi->cond_local_set, i);
2818 else
2819 SET_REGNO_REG_SET (pbi->local_set, i);
2820 }
2821 if (code != CLOBBER || needed_regno)
2822 SET_REGNO_REG_SET (pbi->new_set, i);
2823
2824 some_was_live |= needed_regno;
2825 some_was_dead |= ! needed_regno;
2826 }
2827
2828 #ifdef HAVE_conditional_execution
2829 /* Consider conditional death in deciding that the register needs
2830 a death note. */
2831 if (some_was_live && ! not_dead
2832 /* The stack pointer is never dead. Well, not strictly true,
2833 but it's very difficult to tell from here. Hopefully
2834 combine_stack_adjustments will fix up the most egregious
2835 errors. */
2836 && regno_first != STACK_POINTER_REGNUM)
2837 {
2838 for (i = regno_first; i <= regno_last; ++i)
2839 if (! mark_regno_cond_dead (pbi, i, cond))
2840 not_dead |= ((unsigned long) 1) << (i - regno_first);
2841 }
2842 #endif
2843
2844 /* Additional data to record if this is the final pass. */
2845 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
2846 | PROP_DEATH_NOTES | PROP_AUTOINC))
2847 {
2848 rtx y;
2849 int blocknum = pbi->bb->index;
2850
2851 y = NULL_RTX;
2852 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
2853 {
2854 y = pbi->reg_next_use[regno_first];
2855
2856 /* The next use is no longer next, since a store intervenes. */
2857 for (i = regno_first; i <= regno_last; ++i)
2858 pbi->reg_next_use[i] = 0;
2859 }
2860
2861 if (flags & PROP_REG_INFO)
2862 {
2863 for (i = regno_first; i <= regno_last; ++i)
2864 {
2865 /* Count (weighted) references, stores, etc. This counts a
2866 register twice if it is modified, but that is correct. */
2867 REG_N_SETS (i) += 1;
2868 REG_N_REFS (i) += 1;
2869 REG_FREQ (i) += REG_FREQ_FROM_BB (pbi->bb);
2870
2871 /* The insns where a reg is live are normally counted
2872 elsewhere, but we want the count to include the insn
2873 where the reg is set, and the normal counting mechanism
2874 would not count it. */
2875 REG_LIVE_LENGTH (i) += 1;
2876 }
2877
2878 /* If this is a hard reg, record this function uses the reg. */
2879 if (regno_first < FIRST_PSEUDO_REGISTER)
2880 {
2881 for (i = regno_first; i <= regno_last; i++)
2882 regs_ever_live[i] = 1;
2883 if (flags & PROP_ASM_SCAN)
2884 for (i = regno_first; i <= regno_last; i++)
2885 regs_asm_clobbered[i] = 1;
2886 }
2887 else
2888 {
2889 /* Keep track of which basic blocks each reg appears in. */
2890 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
2891 REG_BASIC_BLOCK (regno_first) = blocknum;
2892 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
2893 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
2894 }
2895 }
2896
2897 if (! some_was_dead)
2898 {
2899 if (flags & PROP_LOG_LINKS)
2900 {
2901 /* Make a logical link from the next following insn
2902 that uses this register, back to this insn.
2903 The following insns have already been processed.
2904
2905 We don't build a LOG_LINK for hard registers containing
2906 in ASM_OPERANDs. If these registers get replaced,
2907 we might wind up changing the semantics of the insn,
2908 even if reload can make what appear to be valid
2909 assignments later.
2910
2911 We don't build a LOG_LINK for global registers to
2912 or from a function call. We don't want to let
2913 combine think that it knows what is going on with
2914 global registers. */
2915 if (y && (BLOCK_NUM (y) == blocknum)
2916 && (regno_first >= FIRST_PSEUDO_REGISTER
2917 || (asm_noperands (PATTERN (y)) < 0
2918 && ! ((CALL_P (insn)
2919 || CALL_P (y))
2920 && global_regs[regno_first]))))
2921 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
2922 }
2923 }
2924 else if (not_dead)
2925 ;
2926 else if (! some_was_live)
2927 {
2928 if (flags & PROP_REG_INFO)
2929 REG_N_DEATHS (regno_first) += 1;
2930
2931 if (flags & PROP_DEATH_NOTES
2932 #ifdef STACK_REGS
2933 && (!(flags & PROP_POST_REGSTACK)
2934 || !IN_RANGE (REGNO (reg), FIRST_STACK_REG,
2935 LAST_STACK_REG))
2936 #endif
2937 )
2938 {
2939 /* Note that dead stores have already been deleted
2940 when possible. If we get here, we have found a
2941 dead store that cannot be eliminated (because the
2942 same insn does something useful). Indicate this
2943 by marking the reg being set as dying here. */
2944 REG_NOTES (insn)
2945 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2946 }
2947 }
2948 else
2949 {
2950 if (flags & PROP_DEATH_NOTES
2951 #ifdef STACK_REGS
2952 && (!(flags & PROP_POST_REGSTACK)
2953 || !IN_RANGE (REGNO (reg), FIRST_STACK_REG,
2954 LAST_STACK_REG))
2955 #endif
2956 )
2957 {
2958 /* This is a case where we have a multi-word hard register
2959 and some, but not all, of the words of the register are
2960 needed in subsequent insns. Write REG_UNUSED notes
2961 for those parts that were not needed. This case should
2962 be rare. */
2963
2964 for (i = regno_first; i <= regno_last; ++i)
2965 if (! REGNO_REG_SET_P (pbi->reg_live, i))
2966 REG_NOTES (insn)
2967 = alloc_EXPR_LIST (REG_UNUSED,
2968 regno_reg_rtx[i],
2969 REG_NOTES (insn));
2970 }
2971 }
2972 }
2973
2974 /* Mark the register as being dead. */
2975 if (some_was_live
2976 /* The stack pointer is never dead. Well, not strictly true,
2977 but it's very difficult to tell from here. Hopefully
2978 combine_stack_adjustments will fix up the most egregious
2979 errors. */
2980 && regno_first != STACK_POINTER_REGNUM)
2981 {
2982 for (i = regno_first; i <= regno_last; ++i)
2983 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
2984 {
2985 if ((pbi->flags & PROP_REG_INFO)
2986 && REGNO_REG_SET_P (pbi->reg_live, i))
2987 {
2988 REG_LIVE_LENGTH (i) += pbi->insn_num - reg_deaths[i];
2989 reg_deaths[i] = 0;
2990 }
2991 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
2992 }
2993 if (flags & PROP_DEAD_INSN)
2994 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, reg), insn);
2995 }
2996 }
2997 else if (REG_P (reg))
2998 {
2999 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3000 pbi->reg_next_use[regno_first] = 0;
3001
3002 if ((flags & PROP_REG_INFO) != 0
3003 && (flags & PROP_ASM_SCAN) != 0
3004 && regno_first < FIRST_PSEUDO_REGISTER)
3005 {
3006 for (i = regno_first; i <= regno_last; i++)
3007 regs_asm_clobbered[i] = 1;
3008 }
3009 }
3010
3011 /* If this is the last pass and this is a SCRATCH, show it will be dying
3012 here and count it. */
3013 else if (GET_CODE (reg) == SCRATCH)
3014 {
3015 if (flags & PROP_DEATH_NOTES
3016 #ifdef STACK_REGS
3017 && (!(flags & PROP_POST_REGSTACK)
3018 || !IN_RANGE (REGNO (reg), FIRST_STACK_REG, LAST_STACK_REG))
3019 #endif
3020 )
3021 REG_NOTES (insn)
3022 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
3023 }
3024 }
3025 \f
3026 #ifdef HAVE_conditional_execution
3027 /* Mark REGNO conditionally dead.
3028 Return true if the register is now unconditionally dead. */
3029
3030 static int
3031 mark_regno_cond_dead (struct propagate_block_info *pbi, int regno, rtx cond)
3032 {
3033 /* If this is a store to a predicate register, the value of the
3034 predicate is changing, we don't know that the predicate as seen
3035 before is the same as that seen after. Flush all dependent
3036 conditions from reg_cond_dead. This will make all such
3037 conditionally live registers unconditionally live. */
3038 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
3039 flush_reg_cond_reg (pbi, regno);
3040
3041 /* If this is an unconditional store, remove any conditional
3042 life that may have existed. */
3043 if (cond == NULL_RTX)
3044 splay_tree_remove (pbi->reg_cond_dead, regno);
3045 else
3046 {
3047 splay_tree_node node;
3048 struct reg_cond_life_info *rcli;
3049 rtx ncond;
3050
3051 /* Otherwise this is a conditional set. Record that fact.
3052 It may have been conditionally used, or there may be a
3053 subsequent set with a complementary condition. */
3054
3055 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
3056 if (node == NULL)
3057 {
3058 /* The register was unconditionally live previously.
3059 Record the current condition as the condition under
3060 which it is dead. */
3061 rcli = xmalloc (sizeof (*rcli));
3062 rcli->condition = cond;
3063 rcli->stores = cond;
3064 rcli->orig_condition = const0_rtx;
3065 splay_tree_insert (pbi->reg_cond_dead, regno,
3066 (splay_tree_value) rcli);
3067
3068 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
3069
3070 /* Not unconditionally dead. */
3071 return 0;
3072 }
3073 else
3074 {
3075 /* The register was conditionally live previously.
3076 Add the new condition to the old. */
3077 rcli = (struct reg_cond_life_info *) node->value;
3078 ncond = rcli->condition;
3079 ncond = ior_reg_cond (ncond, cond, 1);
3080 if (rcli->stores == const0_rtx)
3081 rcli->stores = cond;
3082 else if (rcli->stores != const1_rtx)
3083 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
3084
3085 /* If the register is now unconditionally dead, remove the entry
3086 in the splay_tree. A register is unconditionally dead if the
3087 dead condition ncond is true. A register is also unconditionally
3088 dead if the sum of all conditional stores is an unconditional
3089 store (stores is true), and the dead condition is identically the
3090 same as the original dead condition initialized at the end of
3091 the block. This is a pointer compare, not an rtx_equal_p
3092 compare. */
3093 if (ncond == const1_rtx
3094 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
3095 splay_tree_remove (pbi->reg_cond_dead, regno);
3096 else
3097 {
3098 rcli->condition = ncond;
3099
3100 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
3101
3102 /* Not unconditionally dead. */
3103 return 0;
3104 }
3105 }
3106 }
3107
3108 return 1;
3109 }
3110
3111 /* Called from splay_tree_delete for pbi->reg_cond_life. */
3112
3113 static void
3114 free_reg_cond_life_info (splay_tree_value value)
3115 {
3116 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
3117 free (rcli);
3118 }
3119
3120 /* Helper function for flush_reg_cond_reg. */
3121
3122 static int
3123 flush_reg_cond_reg_1 (splay_tree_node node, void *data)
3124 {
3125 struct reg_cond_life_info *rcli;
3126 int *xdata = (int *) data;
3127 unsigned int regno = xdata[0];
3128
3129 /* Don't need to search if last flushed value was farther on in
3130 the in-order traversal. */
3131 if (xdata[1] >= (int) node->key)
3132 return 0;
3133
3134 /* Splice out portions of the expression that refer to regno. */
3135 rcli = (struct reg_cond_life_info *) node->value;
3136 rcli->condition = elim_reg_cond (rcli->condition, regno);
3137 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
3138 rcli->stores = elim_reg_cond (rcli->stores, regno);
3139
3140 /* If the entire condition is now false, signal the node to be removed. */
3141 if (rcli->condition == const0_rtx)
3142 {
3143 xdata[1] = node->key;
3144 return -1;
3145 }
3146 else
3147 gcc_assert (rcli->condition != const1_rtx);
3148
3149 return 0;
3150 }
3151
3152 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
3153
3154 static void
3155 flush_reg_cond_reg (struct propagate_block_info *pbi, int regno)
3156 {
3157 int pair[2];
3158
3159 pair[0] = regno;
3160 pair[1] = -1;
3161 while (splay_tree_foreach (pbi->reg_cond_dead,
3162 flush_reg_cond_reg_1, pair) == -1)
3163 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
3164
3165 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
3166 }
3167
3168 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
3169 For ior/and, the ADD flag determines whether we want to add the new
3170 condition X to the old one unconditionally. If it is zero, we will
3171 only return a new expression if X allows us to simplify part of
3172 OLD, otherwise we return NULL to the caller.
3173 If ADD is nonzero, we will return a new condition in all cases. The
3174 toplevel caller of one of these functions should always pass 1 for
3175 ADD. */
3176
3177 static rtx
3178 ior_reg_cond (rtx old, rtx x, int add)
3179 {
3180 rtx op0, op1;
3181
3182 if (COMPARISON_P (old))
3183 {
3184 if (COMPARISON_P (x)
3185 && REVERSE_CONDEXEC_PREDICATES_P (x, old)
3186 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3187 return const1_rtx;
3188 if (GET_CODE (x) == GET_CODE (old)
3189 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3190 return old;
3191 if (! add)
3192 return NULL;
3193 return gen_rtx_IOR (0, old, x);
3194 }
3195
3196 switch (GET_CODE (old))
3197 {
3198 case IOR:
3199 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
3200 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
3201 if (op0 != NULL || op1 != NULL)
3202 {
3203 if (op0 == const0_rtx)
3204 return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x);
3205 if (op1 == const0_rtx)
3206 return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x);
3207 if (op0 == const1_rtx || op1 == const1_rtx)
3208 return const1_rtx;
3209 if (op0 == NULL)
3210 op0 = gen_rtx_IOR (0, XEXP (old, 0), x);
3211 else if (rtx_equal_p (x, op0))
3212 /* (x | A) | x ~ (x | A). */
3213 return old;
3214 if (op1 == NULL)
3215 op1 = gen_rtx_IOR (0, XEXP (old, 1), x);
3216 else if (rtx_equal_p (x, op1))
3217 /* (A | x) | x ~ (A | x). */
3218 return old;
3219 return gen_rtx_IOR (0, op0, op1);
3220 }
3221 if (! add)
3222 return NULL;
3223 return gen_rtx_IOR (0, old, x);
3224
3225 case AND:
3226 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
3227 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
3228 if (op0 != NULL || op1 != NULL)
3229 {
3230 if (op0 == const1_rtx)
3231 return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x);
3232 if (op1 == const1_rtx)
3233 return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x);
3234 if (op0 == const0_rtx || op1 == const0_rtx)
3235 return const0_rtx;
3236 if (op0 == NULL)
3237 op0 = gen_rtx_IOR (0, XEXP (old, 0), x);
3238 else if (rtx_equal_p (x, op0))
3239 /* (x & A) | x ~ x. */
3240 return op0;
3241 if (op1 == NULL)
3242 op1 = gen_rtx_IOR (0, XEXP (old, 1), x);
3243 else if (rtx_equal_p (x, op1))
3244 /* (A & x) | x ~ x. */
3245 return op1;
3246 return gen_rtx_AND (0, op0, op1);
3247 }
3248 if (! add)
3249 return NULL;
3250 return gen_rtx_IOR (0, old, x);
3251
3252 case NOT:
3253 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
3254 if (op0 != NULL)
3255 return not_reg_cond (op0);
3256 if (! add)
3257 return NULL;
3258 return gen_rtx_IOR (0, old, x);
3259
3260 default:
3261 gcc_unreachable ();
3262 }
3263 }
3264
3265 static rtx
3266 not_reg_cond (rtx x)
3267 {
3268 if (x == const0_rtx)
3269 return const1_rtx;
3270 else if (x == const1_rtx)
3271 return const0_rtx;
3272 if (GET_CODE (x) == NOT)
3273 return XEXP (x, 0);
3274 if (COMPARISON_P (x)
3275 && REG_P (XEXP (x, 0)))
3276 {
3277 gcc_assert (XEXP (x, 1) == const0_rtx);
3278
3279 return gen_rtx_fmt_ee (reversed_comparison_code (x, NULL),
3280 VOIDmode, XEXP (x, 0), const0_rtx);
3281 }
3282 return gen_rtx_NOT (0, x);
3283 }
3284
3285 static rtx
3286 and_reg_cond (rtx old, rtx x, int add)
3287 {
3288 rtx op0, op1;
3289
3290 if (COMPARISON_P (old))
3291 {
3292 if (COMPARISON_P (x)
3293 && GET_CODE (x) == reversed_comparison_code (old, NULL)
3294 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3295 return const0_rtx;
3296 if (GET_CODE (x) == GET_CODE (old)
3297 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3298 return old;
3299 if (! add)
3300 return NULL;
3301 return gen_rtx_AND (0, old, x);
3302 }
3303
3304 switch (GET_CODE (old))
3305 {
3306 case IOR:
3307 op0 = and_reg_cond (XEXP (old, 0), x, 0);
3308 op1 = and_reg_cond (XEXP (old, 1), x, 0);
3309 if (op0 != NULL || op1 != NULL)
3310 {
3311 if (op0 == const0_rtx)
3312 return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x);
3313 if (op1 == const0_rtx)
3314 return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x);
3315 if (op0 == const1_rtx || op1 == const1_rtx)
3316 return const1_rtx;
3317 if (op0 == NULL)
3318 op0 = gen_rtx_AND (0, XEXP (old, 0), x);
3319 else if (rtx_equal_p (x, op0))
3320 /* (x | A) & x ~ x. */
3321 return op0;
3322 if (op1 == NULL)
3323 op1 = gen_rtx_AND (0, XEXP (old, 1), x);
3324 else if (rtx_equal_p (x, op1))
3325 /* (A | x) & x ~ x. */
3326 return op1;
3327 return gen_rtx_IOR (0, op0, op1);
3328 }
3329 if (! add)
3330 return NULL;
3331 return gen_rtx_AND (0, old, x);
3332
3333 case AND:
3334 op0 = and_reg_cond (XEXP (old, 0), x, 0);
3335 op1 = and_reg_cond (XEXP (old, 1), x, 0);
3336 if (op0 != NULL || op1 != NULL)
3337 {
3338 if (op0 == const1_rtx)
3339 return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x);
3340 if (op1 == const1_rtx)
3341 return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x);
3342 if (op0 == const0_rtx || op1 == const0_rtx)
3343 return const0_rtx;
3344 if (op0 == NULL)
3345 op0 = gen_rtx_AND (0, XEXP (old, 0), x);
3346 else if (rtx_equal_p (x, op0))
3347 /* (x & A) & x ~ (x & A). */
3348 return old;
3349 if (op1 == NULL)
3350 op1 = gen_rtx_AND (0, XEXP (old, 1), x);
3351 else if (rtx_equal_p (x, op1))
3352 /* (A & x) & x ~ (A & x). */
3353 return old;
3354 return gen_rtx_AND (0, op0, op1);
3355 }
3356 if (! add)
3357 return NULL;
3358 return gen_rtx_AND (0, old, x);
3359
3360 case NOT:
3361 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
3362 if (op0 != NULL)
3363 return not_reg_cond (op0);
3364 if (! add)
3365 return NULL;
3366 return gen_rtx_AND (0, old, x);
3367
3368 default:
3369 gcc_unreachable ();
3370 }
3371 }
3372
3373 /* Given a condition X, remove references to reg REGNO and return the
3374 new condition. The removal will be done so that all conditions
3375 involving REGNO are considered to evaluate to false. This function
3376 is used when the value of REGNO changes. */
3377
3378 static rtx
3379 elim_reg_cond (rtx x, unsigned int regno)
3380 {
3381 rtx op0, op1;
3382
3383 if (COMPARISON_P (x))
3384 {
3385 if (REGNO (XEXP (x, 0)) == regno)
3386 return const0_rtx;
3387 return x;
3388 }
3389
3390 switch (GET_CODE (x))
3391 {
3392 case AND:
3393 op0 = elim_reg_cond (XEXP (x, 0), regno);
3394 op1 = elim_reg_cond (XEXP (x, 1), regno);
3395 if (op0 == const0_rtx || op1 == const0_rtx)
3396 return const0_rtx;
3397 if (op0 == const1_rtx)
3398 return op1;
3399 if (op1 == const1_rtx)
3400 return op0;
3401 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
3402 return x;
3403 return gen_rtx_AND (0, op0, op1);
3404
3405 case IOR:
3406 op0 = elim_reg_cond (XEXP (x, 0), regno);
3407 op1 = elim_reg_cond (XEXP (x, 1), regno);
3408 if (op0 == const1_rtx || op1 == const1_rtx)
3409 return const1_rtx;
3410 if (op0 == const0_rtx)
3411 return op1;
3412 if (op1 == const0_rtx)
3413 return op0;
3414 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
3415 return x;
3416 return gen_rtx_IOR (0, op0, op1);
3417
3418 case NOT:
3419 op0 = elim_reg_cond (XEXP (x, 0), regno);
3420 if (op0 == const0_rtx)
3421 return const1_rtx;
3422 if (op0 == const1_rtx)
3423 return const0_rtx;
3424 if (op0 != XEXP (x, 0))
3425 return not_reg_cond (op0);
3426 return x;
3427
3428 default:
3429 gcc_unreachable ();
3430 }
3431 }
3432 #endif /* HAVE_conditional_execution */
3433 \f
3434 #ifdef AUTO_INC_DEC
3435
3436 /* Try to substitute the auto-inc expression INC as the address inside
3437 MEM which occurs in INSN. Currently, the address of MEM is an expression
3438 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
3439 that has a single set whose source is a PLUS of INCR_REG and something
3440 else. */
3441
3442 static void
3443 attempt_auto_inc (struct propagate_block_info *pbi, rtx inc, rtx insn,
3444 rtx mem, rtx incr, rtx incr_reg)
3445 {
3446 int regno = REGNO (incr_reg);
3447 rtx set = single_set (incr);
3448 rtx q = SET_DEST (set);
3449 rtx y = SET_SRC (set);
3450 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
3451 int changed;
3452
3453 /* Make sure this reg appears only once in this insn. */
3454 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
3455 return;
3456
3457 if (dead_or_set_p (incr, incr_reg)
3458 /* Mustn't autoinc an eliminable register. */
3459 && (regno >= FIRST_PSEUDO_REGISTER
3460 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
3461 {
3462 /* This is the simple case. Try to make the auto-inc. If
3463 we can't, we are done. Otherwise, we will do any
3464 needed updates below. */
3465 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
3466 return;
3467 }
3468 else if (REG_P (q)
3469 /* PREV_INSN used here to check the semi-open interval
3470 [insn,incr). */
3471 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
3472 /* We must also check for sets of q as q may be
3473 a call clobbered hard register and there may
3474 be a call between PREV_INSN (insn) and incr. */
3475 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
3476 {
3477 /* We have *p followed sometime later by q = p+size.
3478 Both p and q must be live afterward,
3479 and q is not used between INSN and its assignment.
3480 Change it to q = p, ...*q..., q = q+size.
3481 Then fall into the usual case. */
3482 rtx insns, temp;
3483
3484 start_sequence ();
3485 emit_move_insn (q, incr_reg);
3486 insns = get_insns ();
3487 end_sequence ();
3488
3489 /* If we can't make the auto-inc, or can't make the
3490 replacement into Y, exit. There's no point in making
3491 the change below if we can't do the auto-inc and doing
3492 so is not correct in the pre-inc case. */
3493
3494 XEXP (inc, 0) = q;
3495 validate_change (insn, &XEXP (mem, 0), inc, 1);
3496 validate_change (incr, &XEXP (y, opnum), q, 1);
3497 if (! apply_change_group ())
3498 return;
3499
3500 /* We now know we'll be doing this change, so emit the
3501 new insn(s) and do the updates. */
3502 emit_insn_before (insns, insn);
3503
3504 if (BB_HEAD (pbi->bb) == insn)
3505 BB_HEAD (pbi->bb) = insns;
3506
3507 /* INCR will become a NOTE and INSN won't contain a
3508 use of INCR_REG. If a use of INCR_REG was just placed in
3509 the insn before INSN, make that the next use.
3510 Otherwise, invalidate it. */
3511 if (NONJUMP_INSN_P (PREV_INSN (insn))
3512 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
3513 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
3514 pbi->reg_next_use[regno] = PREV_INSN (insn);
3515 else
3516 pbi->reg_next_use[regno] = 0;
3517
3518 incr_reg = q;
3519 regno = REGNO (q);
3520
3521 if ((pbi->flags & PROP_REG_INFO)
3522 && !REGNO_REG_SET_P (pbi->reg_live, regno))
3523 reg_deaths[regno] = pbi->insn_num;
3524
3525 /* REGNO is now used in INCR which is below INSN, but
3526 it previously wasn't live here. If we don't mark
3527 it as live, we'll put a REG_DEAD note for it
3528 on this insn, which is incorrect. */
3529 SET_REGNO_REG_SET (pbi->reg_live, regno);
3530
3531 /* If there are any calls between INSN and INCR, show
3532 that REGNO now crosses them. */
3533 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
3534 if (CALL_P (temp))
3535 {
3536 REG_N_CALLS_CROSSED (regno)++;
3537 if (can_throw_internal (temp))
3538 REG_N_THROWING_CALLS_CROSSED (regno)++;
3539 }
3540
3541 /* Invalidate alias info for Q since we just changed its value. */
3542 clear_reg_alias_info (q);
3543 }
3544 else
3545 return;
3546
3547 /* If we haven't returned, it means we were able to make the
3548 auto-inc, so update the status. First, record that this insn
3549 has an implicit side effect. */
3550
3551 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
3552
3553 /* Modify the old increment-insn to simply copy
3554 the already-incremented value of our register. */
3555 changed = validate_change (incr, &SET_SRC (set), incr_reg, 0);
3556 gcc_assert (changed);
3557
3558 /* If that makes it a no-op (copying the register into itself) delete
3559 it so it won't appear to be a "use" and a "set" of this
3560 register. */
3561 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
3562 {
3563 /* If the original source was dead, it's dead now. */
3564 rtx note;
3565
3566 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
3567 {
3568 remove_note (incr, note);
3569 if (XEXP (note, 0) != incr_reg)
3570 {
3571 unsigned int regno = REGNO (XEXP (note, 0));
3572
3573 if ((pbi->flags & PROP_REG_INFO)
3574 && REGNO_REG_SET_P (pbi->reg_live, regno))
3575 {
3576 REG_LIVE_LENGTH (regno) += pbi->insn_num - reg_deaths[regno];
3577 reg_deaths[regno] = 0;
3578 }
3579 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
3580 }
3581 }
3582
3583 SET_INSN_DELETED (incr);
3584 }
3585
3586 if (regno >= FIRST_PSEUDO_REGISTER)
3587 {
3588 /* Count an extra reference to the reg. When a reg is
3589 incremented, spilling it is worse, so we want to make
3590 that less likely. */
3591 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
3592
3593 /* Count the increment as a setting of the register,
3594 even though it isn't a SET in rtl. */
3595 REG_N_SETS (regno)++;
3596 }
3597 }
3598
3599 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
3600 reference. */
3601
3602 static void
3603 find_auto_inc (struct propagate_block_info *pbi, rtx x, rtx insn)
3604 {
3605 rtx addr = XEXP (x, 0);
3606 HOST_WIDE_INT offset = 0;
3607 rtx set, y, incr, inc_val;
3608 int regno;
3609 int size = GET_MODE_SIZE (GET_MODE (x));
3610
3611 if (JUMP_P (insn))
3612 return;
3613
3614 /* Here we detect use of an index register which might be good for
3615 postincrement, postdecrement, preincrement, or predecrement. */
3616
3617 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3618 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
3619
3620 if (!REG_P (addr))
3621 return;
3622
3623 regno = REGNO (addr);
3624
3625 /* Is the next use an increment that might make auto-increment? */
3626 incr = pbi->reg_next_use[regno];
3627 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
3628 return;
3629 set = single_set (incr);
3630 if (set == 0 || GET_CODE (set) != SET)
3631 return;
3632 y = SET_SRC (set);
3633
3634 if (GET_CODE (y) != PLUS)
3635 return;
3636
3637 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
3638 inc_val = XEXP (y, 1);
3639 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
3640 inc_val = XEXP (y, 0);
3641 else
3642 return;
3643
3644 if (GET_CODE (inc_val) == CONST_INT)
3645 {
3646 if (HAVE_POST_INCREMENT
3647 && (INTVAL (inc_val) == size && offset == 0))
3648 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
3649 incr, addr);
3650 else if (HAVE_POST_DECREMENT
3651 && (INTVAL (inc_val) == -size && offset == 0))
3652 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
3653 incr, addr);
3654 else if (HAVE_PRE_INCREMENT
3655 && (INTVAL (inc_val) == size && offset == size))
3656 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
3657 incr, addr);
3658 else if (HAVE_PRE_DECREMENT
3659 && (INTVAL (inc_val) == -size && offset == -size))
3660 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
3661 incr, addr);
3662 else if (HAVE_POST_MODIFY_DISP && offset == 0)
3663 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
3664 gen_rtx_PLUS (Pmode,
3665 addr,
3666 inc_val)),
3667 insn, x, incr, addr);
3668 else if (HAVE_PRE_MODIFY_DISP && offset == INTVAL (inc_val))
3669 attempt_auto_inc (pbi, gen_rtx_PRE_MODIFY (Pmode, addr,
3670 gen_rtx_PLUS (Pmode,
3671 addr,
3672 inc_val)),
3673 insn, x, incr, addr);
3674 }
3675 else if (REG_P (inc_val)
3676 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
3677 NEXT_INSN (incr)))
3678
3679 {
3680 if (HAVE_POST_MODIFY_REG && offset == 0)
3681 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
3682 gen_rtx_PLUS (Pmode,
3683 addr,
3684 inc_val)),
3685 insn, x, incr, addr);
3686 }
3687 }
3688
3689 #endif /* AUTO_INC_DEC */
3690 \f
3691 static void
3692 mark_used_reg (struct propagate_block_info *pbi, rtx reg,
3693 rtx cond ATTRIBUTE_UNUSED, rtx insn)
3694 {
3695 unsigned int regno_first, regno_last, i;
3696 int some_was_live, some_was_dead, some_not_set;
3697
3698 regno_last = regno_first = REGNO (reg);
3699 if (regno_first < FIRST_PSEUDO_REGISTER)
3700 regno_last += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
3701
3702 /* Find out if any of this register is live after this instruction. */
3703 some_was_live = some_was_dead = 0;
3704 for (i = regno_first; i <= regno_last; ++i)
3705 {
3706 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
3707 some_was_live |= needed_regno;
3708 some_was_dead |= ! needed_regno;
3709 }
3710
3711 /* Find out if any of the register was set this insn. */
3712 some_not_set = 0;
3713 for (i = regno_first; i <= regno_last; ++i)
3714 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
3715
3716 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3717 {
3718 /* Record where each reg is used, so when the reg is set we know
3719 the next insn that uses it. */
3720 pbi->reg_next_use[regno_first] = insn;
3721 }
3722
3723 if (pbi->flags & PROP_REG_INFO)
3724 {
3725 if (regno_first < FIRST_PSEUDO_REGISTER)
3726 {
3727 /* If this is a register we are going to try to eliminate,
3728 don't mark it live here. If we are successful in
3729 eliminating it, it need not be live unless it is used for
3730 pseudos, in which case it will have been set live when it
3731 was allocated to the pseudos. If the register will not
3732 be eliminated, reload will set it live at that point.
3733
3734 Otherwise, record that this function uses this register. */
3735 /* ??? The PPC backend tries to "eliminate" on the pic
3736 register to itself. This should be fixed. In the mean
3737 time, hack around it. */
3738
3739 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
3740 && (regno_first == FRAME_POINTER_REGNUM
3741 || regno_first == ARG_POINTER_REGNUM)))
3742 for (i = regno_first; i <= regno_last; ++i)
3743 regs_ever_live[i] = 1;
3744 }
3745 else
3746 {
3747 /* Keep track of which basic block each reg appears in. */
3748
3749 int blocknum = pbi->bb->index;
3750 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
3751 REG_BASIC_BLOCK (regno_first) = blocknum;
3752 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
3753 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
3754
3755 /* Count (weighted) number of uses of each reg. */
3756 REG_FREQ (regno_first) += REG_FREQ_FROM_BB (pbi->bb);
3757 REG_N_REFS (regno_first)++;
3758 }
3759 for (i = regno_first; i <= regno_last; ++i)
3760 if (! REGNO_REG_SET_P (pbi->reg_live, i))
3761 {
3762 gcc_assert (!reg_deaths[i]);
3763 reg_deaths[i] = pbi->insn_num;
3764 }
3765 }
3766
3767 /* Record and count the insns in which a reg dies. If it is used in
3768 this insn and was dead below the insn then it dies in this insn.
3769 If it was set in this insn, we do not make a REG_DEAD note;
3770 likewise if we already made such a note. */
3771 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
3772 && some_was_dead
3773 && some_not_set)
3774 {
3775 /* Check for the case where the register dying partially
3776 overlaps the register set by this insn. */
3777 if (regno_first != regno_last)
3778 for (i = regno_first; i <= regno_last; ++i)
3779 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
3780
3781 /* If none of the words in X is needed, make a REG_DEAD note.
3782 Otherwise, we must make partial REG_DEAD notes. */
3783 if (! some_was_live)
3784 {
3785 if ((pbi->flags & PROP_DEATH_NOTES)
3786 #ifdef STACK_REGS
3787 && (!(pbi->flags & PROP_POST_REGSTACK)
3788 || !IN_RANGE (REGNO (reg), FIRST_STACK_REG, LAST_STACK_REG))
3789 #endif
3790 && ! find_regno_note (insn, REG_DEAD, regno_first))
3791 REG_NOTES (insn)
3792 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
3793
3794 if (pbi->flags & PROP_REG_INFO)
3795 REG_N_DEATHS (regno_first)++;
3796 }
3797 else
3798 {
3799 /* Don't make a REG_DEAD note for a part of a register
3800 that is set in the insn. */
3801 for (i = regno_first; i <= regno_last; ++i)
3802 if (! REGNO_REG_SET_P (pbi->reg_live, i)
3803 && ! dead_or_set_regno_p (insn, i))
3804 REG_NOTES (insn)
3805 = alloc_EXPR_LIST (REG_DEAD,
3806 regno_reg_rtx[i],
3807 REG_NOTES (insn));
3808 }
3809 }
3810
3811 /* Mark the register as being live. */
3812 for (i = regno_first; i <= regno_last; ++i)
3813 {
3814 #ifdef HAVE_conditional_execution
3815 int this_was_live = REGNO_REG_SET_P (pbi->reg_live, i);
3816 #endif
3817
3818 SET_REGNO_REG_SET (pbi->reg_live, i);
3819
3820 #ifdef HAVE_conditional_execution
3821 /* If this is a conditional use, record that fact. If it is later
3822 conditionally set, we'll know to kill the register. */
3823 if (cond != NULL_RTX)
3824 {
3825 splay_tree_node node;
3826 struct reg_cond_life_info *rcli;
3827 rtx ncond;
3828
3829 if (this_was_live)
3830 {
3831 node = splay_tree_lookup (pbi->reg_cond_dead, i);
3832 if (node == NULL)
3833 {
3834 /* The register was unconditionally live previously.
3835 No need to do anything. */
3836 }
3837 else
3838 {
3839 /* The register was conditionally live previously.
3840 Subtract the new life cond from the old death cond. */
3841 rcli = (struct reg_cond_life_info *) node->value;
3842 ncond = rcli->condition;
3843 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
3844
3845 /* If the register is now unconditionally live,
3846 remove the entry in the splay_tree. */
3847 if (ncond == const0_rtx)
3848 splay_tree_remove (pbi->reg_cond_dead, i);
3849 else
3850 {
3851 rcli->condition = ncond;
3852 SET_REGNO_REG_SET (pbi->reg_cond_reg,
3853 REGNO (XEXP (cond, 0)));
3854 }
3855 }
3856 }
3857 else
3858 {
3859 /* The register was not previously live at all. Record
3860 the condition under which it is still dead. */
3861 rcli = xmalloc (sizeof (*rcli));
3862 rcli->condition = not_reg_cond (cond);
3863 rcli->stores = const0_rtx;
3864 rcli->orig_condition = const0_rtx;
3865 splay_tree_insert (pbi->reg_cond_dead, i,
3866 (splay_tree_value) rcli);
3867
3868 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
3869 }
3870 }
3871 else if (this_was_live)
3872 {
3873 /* The register may have been conditionally live previously, but
3874 is now unconditionally live. Remove it from the conditionally
3875 dead list, so that a conditional set won't cause us to think
3876 it dead. */
3877 splay_tree_remove (pbi->reg_cond_dead, i);
3878 }
3879 #endif
3880 }
3881 }
3882
3883 /* Scan expression X for registers which have to be marked used in PBI.
3884 X is considered to be the SET_DEST rtx of SET. TRUE is returned if
3885 X could be handled by this function. */
3886
3887 static bool
3888 mark_used_dest_regs (struct propagate_block_info *pbi, rtx x, rtx cond, rtx insn)
3889 {
3890 int regno;
3891 bool mark_dest = false;
3892 rtx dest = x;
3893
3894 /* On some platforms calls return values spread over several
3895 locations. These locations are wrapped in a EXPR_LIST rtx
3896 together with a CONST_INT offset. */
3897 if (GET_CODE (x) == EXPR_LIST
3898 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3899 x = XEXP (x, 0);
3900
3901 if (x == NULL_RTX)
3902 return false;
3903
3904 /* If storing into MEM, don't show it as being used. But do
3905 show the address as being used. */
3906 if (MEM_P (x))
3907 {
3908 #ifdef AUTO_INC_DEC
3909 if (pbi->flags & PROP_AUTOINC)
3910 find_auto_inc (pbi, x, insn);
3911 #endif
3912 mark_used_regs (pbi, XEXP (x, 0), cond, insn);
3913 return true;
3914 }
3915
3916 /* Storing in STRICT_LOW_PART is like storing in a reg
3917 in that this SET might be dead, so ignore it in TESTREG.
3918 but in some other ways it is like using the reg.
3919
3920 Storing in a SUBREG or a bit field is like storing the entire
3921 register in that if the register's value is not used
3922 then this SET is not needed. */
3923 while (GET_CODE (x) == STRICT_LOW_PART
3924 || GET_CODE (x) == ZERO_EXTRACT
3925 || GET_CODE (x) == SUBREG)
3926 {
3927 #ifdef CANNOT_CHANGE_MODE_CLASS
3928 if ((pbi->flags & PROP_REG_INFO) && GET_CODE (x) == SUBREG)
3929 record_subregs_of_mode (x);
3930 #endif
3931
3932 /* Modifying a single register in an alternate mode
3933 does not use any of the old value. But these other
3934 ways of storing in a register do use the old value. */
3935 if (GET_CODE (x) == SUBREG
3936 && !((REG_BYTES (SUBREG_REG (x))
3937 + UNITS_PER_WORD - 1) / UNITS_PER_WORD
3938 > (REG_BYTES (x)
3939 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
3940 ;
3941 else
3942 mark_dest = true;
3943
3944 x = XEXP (x, 0);
3945 }
3946
3947 /* If this is a store into a register or group of registers,
3948 recursively scan the value being stored. */
3949 if (REG_P (x)
3950 && (regno = REGNO (x),
3951 !(regno == FRAME_POINTER_REGNUM
3952 && (!reload_completed || frame_pointer_needed)))
3953 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3954 && !(regno == HARD_FRAME_POINTER_REGNUM
3955 && (!reload_completed || frame_pointer_needed))
3956 #endif
3957 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3958 && !(regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3959 #endif
3960 )
3961 {
3962 if (mark_dest)
3963 mark_used_regs (pbi, dest, cond, insn);
3964 return true;
3965 }
3966 return false;
3967 }
3968
3969 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
3970 This is done assuming the registers needed from X are those that
3971 have 1-bits in PBI->REG_LIVE.
3972
3973 INSN is the containing instruction. If INSN is dead, this function
3974 is not called. */
3975
3976 static void
3977 mark_used_regs (struct propagate_block_info *pbi, rtx x, rtx cond, rtx insn)
3978 {
3979 RTX_CODE code;
3980 int flags = pbi->flags;
3981
3982 retry:
3983 if (!x)
3984 return;
3985 code = GET_CODE (x);
3986 switch (code)
3987 {
3988 case LABEL_REF:
3989 case SYMBOL_REF:
3990 case CONST_INT:
3991 case CONST:
3992 case CONST_DOUBLE:
3993 case CONST_VECTOR:
3994 case PC:
3995 case ADDR_VEC:
3996 case ADDR_DIFF_VEC:
3997 return;
3998
3999 #ifdef HAVE_cc0
4000 case CC0:
4001 pbi->cc0_live = 1;
4002 return;
4003 #endif
4004
4005 case CLOBBER:
4006 /* If we are clobbering a MEM, mark any registers inside the address
4007 as being used. */
4008 if (MEM_P (XEXP (x, 0)))
4009 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
4010 return;
4011
4012 case MEM:
4013 /* Don't bother watching stores to mems if this is not the
4014 final pass. We'll not be deleting dead stores this round. */
4015 if (optimize && (flags & PROP_SCAN_DEAD_STORES))
4016 {
4017 /* Invalidate the data for the last MEM stored, but only if MEM is
4018 something that can be stored into. */
4019 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4020 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4021 /* Needn't clear the memory set list. */
4022 ;
4023 else
4024 {
4025 rtx temp = pbi->mem_set_list;
4026 rtx prev = NULL_RTX;
4027 rtx next;
4028
4029 while (temp)
4030 {
4031 next = XEXP (temp, 1);
4032 if (anti_dependence (XEXP (temp, 0), x))
4033 {
4034 /* Splice temp out of the list. */
4035 if (prev)
4036 XEXP (prev, 1) = next;
4037 else
4038 pbi->mem_set_list = next;
4039 free_EXPR_LIST_node (temp);
4040 pbi->mem_set_list_len--;
4041 }
4042 else
4043 prev = temp;
4044 temp = next;
4045 }
4046 }
4047
4048 /* If the memory reference had embedded side effects (autoincrement
4049 address modes. Then we may need to kill some entries on the
4050 memory set list. */
4051 if (insn)
4052 for_each_rtx (&PATTERN (insn), invalidate_mems_from_autoinc, pbi);
4053 }
4054
4055 #ifdef AUTO_INC_DEC
4056 if (flags & PROP_AUTOINC)
4057 find_auto_inc (pbi, x, insn);
4058 #endif
4059 break;
4060
4061 case SUBREG:
4062 #ifdef CANNOT_CHANGE_MODE_CLASS
4063 if (flags & PROP_REG_INFO)
4064 record_subregs_of_mode (x);
4065 #endif
4066
4067 /* While we're here, optimize this case. */
4068 x = SUBREG_REG (x);
4069 if (!REG_P (x))
4070 goto retry;
4071 /* Fall through. */
4072
4073 case REG:
4074 /* See a register other than being set => mark it as needed. */
4075 mark_used_reg (pbi, x, cond, insn);
4076 return;
4077
4078 case SET:
4079 {
4080 rtx dest = SET_DEST (x);
4081 int i;
4082 bool ret = false;
4083
4084 if (GET_CODE (dest) == PARALLEL)
4085 for (i = 0; i < XVECLEN (dest, 0); i++)
4086 ret |= mark_used_dest_regs (pbi, XVECEXP (dest, 0, i), cond, insn);
4087 else
4088 ret = mark_used_dest_regs (pbi, dest, cond, insn);
4089
4090 if (ret)
4091 {
4092 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4093 return;
4094 }
4095 }
4096 break;
4097
4098 case ASM_OPERANDS:
4099 case UNSPEC_VOLATILE:
4100 case TRAP_IF:
4101 case ASM_INPUT:
4102 {
4103 /* Traditional and volatile asm instructions must be considered to use
4104 and clobber all hard registers, all pseudo-registers and all of
4105 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4106
4107 Consider for instance a volatile asm that changes the fpu rounding
4108 mode. An insn should not be moved across this even if it only uses
4109 pseudo-regs because it might give an incorrectly rounded result.
4110
4111 ?!? Unfortunately, marking all hard registers as live causes massive
4112 problems for the register allocator and marking all pseudos as live
4113 creates mountains of uninitialized variable warnings.
4114
4115 So for now, just clear the memory set list and mark any regs
4116 we can find in ASM_OPERANDS as used. */
4117 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4118 {
4119 free_EXPR_LIST_list (&pbi->mem_set_list);
4120 pbi->mem_set_list_len = 0;
4121 }
4122
4123 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4124 We can not just fall through here since then we would be confused
4125 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4126 traditional asms unlike their normal usage. */
4127 if (code == ASM_OPERANDS)
4128 {
4129 int j;
4130
4131 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4132 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
4133 }
4134 break;
4135 }
4136
4137 case COND_EXEC:
4138 gcc_assert (!cond);
4139
4140 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
4141
4142 cond = COND_EXEC_TEST (x);
4143 x = COND_EXEC_CODE (x);
4144 goto retry;
4145
4146 default:
4147 break;
4148 }
4149
4150 /* Recursively scan the operands of this expression. */
4151
4152 {
4153 const char * const fmt = GET_RTX_FORMAT (code);
4154 int i;
4155
4156 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4157 {
4158 if (fmt[i] == 'e')
4159 {
4160 /* Tail recursive case: save a function call level. */
4161 if (i == 0)
4162 {
4163 x = XEXP (x, 0);
4164 goto retry;
4165 }
4166 mark_used_regs (pbi, XEXP (x, i), cond, insn);
4167 }
4168 else if (fmt[i] == 'E')
4169 {
4170 int j;
4171 for (j = 0; j < XVECLEN (x, i); j++)
4172 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
4173 }
4174 }
4175 }
4176 }
4177 \f
4178 #ifdef AUTO_INC_DEC
4179
4180 static int
4181 try_pre_increment_1 (struct propagate_block_info *pbi, rtx insn)
4182 {
4183 /* Find the next use of this reg. If in same basic block,
4184 make it do pre-increment or pre-decrement if appropriate. */
4185 rtx x = single_set (insn);
4186 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4187 * INTVAL (XEXP (SET_SRC (x), 1)));
4188 int regno = REGNO (SET_DEST (x));
4189 rtx y = pbi->reg_next_use[regno];
4190 if (y != 0
4191 && SET_DEST (x) != stack_pointer_rtx
4192 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4193 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4194 mode would be better. */
4195 && ! dead_or_set_p (y, SET_DEST (x))
4196 && try_pre_increment (y, SET_DEST (x), amount))
4197 {
4198 /* We have found a suitable auto-increment and already changed
4199 insn Y to do it. So flush this increment instruction. */
4200 propagate_block_delete_insn (insn);
4201
4202 /* Count a reference to this reg for the increment insn we are
4203 deleting. When a reg is incremented, spilling it is worse,
4204 so we want to make that less likely. */
4205 if (regno >= FIRST_PSEUDO_REGISTER)
4206 {
4207 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
4208 REG_N_SETS (regno)++;
4209 }
4210
4211 /* Flush any remembered memories depending on the value of
4212 the incremented register. */
4213 invalidate_mems_from_set (pbi, SET_DEST (x));
4214
4215 return 1;
4216 }
4217 return 0;
4218 }
4219
4220 /* Try to change INSN so that it does pre-increment or pre-decrement
4221 addressing on register REG in order to add AMOUNT to REG.
4222 AMOUNT is negative for pre-decrement.
4223 Returns 1 if the change could be made.
4224 This checks all about the validity of the result of modifying INSN. */
4225
4226 static int
4227 try_pre_increment (rtx insn, rtx reg, HOST_WIDE_INT amount)
4228 {
4229 rtx use;
4230
4231 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4232 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4233 int pre_ok = 0;
4234 /* Nonzero if we can try to make a post-increment or post-decrement.
4235 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4236 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4237 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4238 int post_ok = 0;
4239
4240 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4241 int do_post = 0;
4242
4243 /* From the sign of increment, see which possibilities are conceivable
4244 on this target machine. */
4245 if (HAVE_PRE_INCREMENT && amount > 0)
4246 pre_ok = 1;
4247 if (HAVE_POST_INCREMENT && amount > 0)
4248 post_ok = 1;
4249
4250 if (HAVE_PRE_DECREMENT && amount < 0)
4251 pre_ok = 1;
4252 if (HAVE_POST_DECREMENT && amount < 0)
4253 post_ok = 1;
4254
4255 if (! (pre_ok || post_ok))
4256 return 0;
4257
4258 /* It is not safe to add a side effect to a jump insn
4259 because if the incremented register is spilled and must be reloaded
4260 there would be no way to store the incremented value back in memory. */
4261
4262 if (JUMP_P (insn))
4263 return 0;
4264
4265 use = 0;
4266 if (pre_ok)
4267 use = find_use_as_address (PATTERN (insn), reg, 0);
4268 if (post_ok && (use == 0 || use == (rtx) (size_t) 1))
4269 {
4270 use = find_use_as_address (PATTERN (insn), reg, -amount);
4271 do_post = 1;
4272 }
4273
4274 if (use == 0 || use == (rtx) (size_t) 1)
4275 return 0;
4276
4277 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4278 return 0;
4279
4280 /* See if this combination of instruction and addressing mode exists. */
4281 if (! validate_change (insn, &XEXP (use, 0),
4282 gen_rtx_fmt_e (amount > 0
4283 ? (do_post ? POST_INC : PRE_INC)
4284 : (do_post ? POST_DEC : PRE_DEC),
4285 Pmode, reg), 0))
4286 return 0;
4287
4288 /* Record that this insn now has an implicit side effect on X. */
4289 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4290 return 1;
4291 }
4292
4293 #endif /* AUTO_INC_DEC */
4294 \f
4295 /* Find the place in the rtx X where REG is used as a memory address.
4296 Return the MEM rtx that so uses it.
4297 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4298 (plus REG (const_int PLUSCONST)).
4299
4300 If such an address does not appear, return 0.
4301 If REG appears more than once, or is used other than in such an address,
4302 return (rtx) 1. */
4303
4304 rtx
4305 find_use_as_address (rtx x, rtx reg, HOST_WIDE_INT plusconst)
4306 {
4307 enum rtx_code code = GET_CODE (x);
4308 const char * const fmt = GET_RTX_FORMAT (code);
4309 int i;
4310 rtx value = 0;
4311 rtx tem;
4312
4313 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4314 return x;
4315
4316 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4317 && XEXP (XEXP (x, 0), 0) == reg
4318 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4319 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4320 return x;
4321
4322 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4323 {
4324 /* If REG occurs inside a MEM used in a bit-field reference,
4325 that is unacceptable. */
4326 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4327 return (rtx) (size_t) 1;
4328 }
4329
4330 if (x == reg)
4331 return (rtx) (size_t) 1;
4332
4333 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4334 {
4335 if (fmt[i] == 'e')
4336 {
4337 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4338 if (value == 0)
4339 value = tem;
4340 else if (tem != 0)
4341 return (rtx) (size_t) 1;
4342 }
4343 else if (fmt[i] == 'E')
4344 {
4345 int j;
4346 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4347 {
4348 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4349 if (value == 0)
4350 value = tem;
4351 else if (tem != 0)
4352 return (rtx) (size_t) 1;
4353 }
4354 }
4355 }
4356
4357 return value;
4358 }
4359 \f
4360 /* Write information about registers and basic blocks into FILE.
4361 This is part of making a debugging dump. */
4362
4363 void
4364 dump_regset (regset r, FILE *outf)
4365 {
4366 unsigned i;
4367 reg_set_iterator rsi;
4368
4369 if (r == NULL)
4370 {
4371 fputs (" (nil)", outf);
4372 return;
4373 }
4374
4375 EXECUTE_IF_SET_IN_REG_SET (r, 0, i, rsi)
4376 {
4377 fprintf (outf, " %d", i);
4378 if (i < FIRST_PSEUDO_REGISTER)
4379 fprintf (outf, " [%s]",
4380 reg_names[i]);
4381 }
4382 }
4383
4384 /* Print a human-readable representation of R on the standard error
4385 stream. This function is designed to be used from within the
4386 debugger. */
4387
4388 void
4389 debug_regset (regset r)
4390 {
4391 dump_regset (r, stderr);
4392 putc ('\n', stderr);
4393 }
4394
4395 /* Recompute register set/reference counts immediately prior to register
4396 allocation.
4397
4398 This avoids problems with set/reference counts changing to/from values
4399 which have special meanings to the register allocators.
4400
4401 Additionally, the reference counts are the primary component used by the
4402 register allocators to prioritize pseudos for allocation to hard regs.
4403 More accurate reference counts generally lead to better register allocation.
4404
4405 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4406 possibly other information which is used by the register allocators. */
4407
4408 void
4409 recompute_reg_usage (void)
4410 {
4411 allocate_reg_life_data ();
4412 /* distribute_notes in combiner fails to convert some of the
4413 REG_UNUSED notes to REG_DEAD notes. This causes CHECK_DEAD_NOTES
4414 in sched1 to die. To solve this update the DEATH_NOTES
4415 here. */
4416 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO | PROP_DEATH_NOTES);
4417
4418 if (dump_file)
4419 dump_flow_info (dump_file);
4420 }
4421
4422 struct tree_opt_pass pass_recompute_reg_usage =
4423 {
4424 "life2", /* name */
4425 NULL, /* gate */
4426 recompute_reg_usage, /* execute */
4427 NULL, /* sub */
4428 NULL, /* next */
4429 0, /* static_pass_number */
4430 0, /* tv_id */
4431 0, /* properties_required */
4432 0, /* properties_provided */
4433 0, /* properties_destroyed */
4434 0, /* todo_flags_start */
4435 TODO_dump_func, /* todo_flags_finish */
4436 'f' /* letter */
4437 };
4438
4439 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
4440 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
4441 of the number of registers that died.
4442 If KILL is 1, remove old REG_DEAD / REG_UNUSED notes. If it is 0, don't.
4443 if it is -1, remove them unless they pertain to a stack reg. */
4444
4445 int
4446 count_or_remove_death_notes (sbitmap blocks, int kill)
4447 {
4448 int count = 0;
4449 unsigned int i = 0;
4450 basic_block bb;
4451
4452 /* This used to be a loop over all the blocks with a membership test
4453 inside the loop. That can be amazingly expensive on a large CFG
4454 when only a small number of bits are set in BLOCKs (for example,
4455 the calls from the scheduler typically have very few bits set).
4456
4457 For extra credit, someone should convert BLOCKS to a bitmap rather
4458 than an sbitmap. */
4459 if (blocks)
4460 {
4461 sbitmap_iterator sbi;
4462
4463 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i, sbi)
4464 {
4465 basic_block bb = BASIC_BLOCK (i);
4466 /* The bitmap may be flawed in that one of the basic blocks
4467 may have been deleted before you get here. */
4468 if (bb)
4469 count += count_or_remove_death_notes_bb (bb, kill);
4470 };
4471 }
4472 else
4473 {
4474 FOR_EACH_BB (bb)
4475 {
4476 count += count_or_remove_death_notes_bb (bb, kill);
4477 }
4478 }
4479
4480 return count;
4481 }
4482
4483 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from basic
4484 block BB. Returns a count of the number of registers that died. */
4485
4486 static int
4487 count_or_remove_death_notes_bb (basic_block bb, int kill)
4488 {
4489 int count = 0;
4490 rtx insn;
4491
4492 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
4493 {
4494 if (INSN_P (insn))
4495 {
4496 rtx *pprev = &REG_NOTES (insn);
4497 rtx link = *pprev;
4498
4499 while (link)
4500 {
4501 switch (REG_NOTE_KIND (link))
4502 {
4503 case REG_DEAD:
4504 if (REG_P (XEXP (link, 0)))
4505 {
4506 rtx reg = XEXP (link, 0);
4507 int n;
4508
4509 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
4510 n = 1;
4511 else
4512 n = hard_regno_nregs[REGNO (reg)][GET_MODE (reg)];
4513 count += n;
4514 }
4515
4516 /* Fall through. */
4517
4518 case REG_UNUSED:
4519 if (kill > 0
4520 || (kill
4521 #ifdef STACK_REGS
4522 && (!REG_P (XEXP (link, 0))
4523 || !IN_RANGE (REGNO (XEXP (link, 0)),
4524 FIRST_STACK_REG, LAST_STACK_REG))
4525 #endif
4526 ))
4527 {
4528 rtx next = XEXP (link, 1);
4529 free_EXPR_LIST_node (link);
4530 *pprev = link = next;
4531 break;
4532 }
4533 /* Fall through. */
4534
4535 default:
4536 pprev = &XEXP (link, 1);
4537 link = *pprev;
4538 break;
4539 }
4540 }
4541 }
4542
4543 if (insn == BB_END (bb))
4544 break;
4545 }
4546
4547 return count;
4548 }
4549
4550 /* Clear LOG_LINKS fields of insns in a selected blocks or whole chain
4551 if blocks is NULL. */
4552
4553 static void
4554 clear_log_links (sbitmap blocks)
4555 {
4556 rtx insn;
4557
4558 if (!blocks)
4559 {
4560 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4561 if (INSN_P (insn))
4562 free_INSN_LIST_list (&LOG_LINKS (insn));
4563 }
4564 else
4565 {
4566 unsigned int i = 0;
4567 sbitmap_iterator sbi;
4568
4569 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i, sbi)
4570 {
4571 basic_block bb = BASIC_BLOCK (i);
4572
4573 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
4574 insn = NEXT_INSN (insn))
4575 if (INSN_P (insn))
4576 free_INSN_LIST_list (&LOG_LINKS (insn));
4577 }
4578 }
4579 }
4580
4581 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
4582 correspond to the hard registers, if any, set in that map. This
4583 could be done far more efficiently by having all sorts of special-cases
4584 with moving single words, but probably isn't worth the trouble. */
4585
4586 void
4587 reg_set_to_hard_reg_set (HARD_REG_SET *to, bitmap from)
4588 {
4589 unsigned i;
4590 bitmap_iterator bi;
4591
4592 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4593 {
4594 if (i >= FIRST_PSEUDO_REGISTER)
4595 return;
4596 SET_HARD_REG_BIT (*to, i);
4597 }
4598 }
4599 \f
4600
4601 static bool
4602 gate_remove_death_notes (void)
4603 {
4604 return flag_profile_values;
4605 }
4606
4607 static void
4608 rest_of_handle_remove_death_notes (void)
4609 {
4610 count_or_remove_death_notes (NULL, 1);
4611 }
4612
4613 struct tree_opt_pass pass_remove_death_notes =
4614 {
4615 "ednotes", /* name */
4616 gate_remove_death_notes, /* gate */
4617 rest_of_handle_remove_death_notes, /* execute */
4618 NULL, /* sub */
4619 NULL, /* next */
4620 0, /* static_pass_number */
4621 0, /* tv_id */
4622 0, /* properties_required */
4623 0, /* properties_provided */
4624 0, /* properties_destroyed */
4625 0, /* todo_flags_start */
4626 0, /* todo_flags_finish */
4627 0 /* letter */
4628 };
4629
4630 /* Perform life analysis. */
4631 static void
4632 rest_of_handle_life (void)
4633 {
4634 regclass_init ();
4635
4636 life_analysis (dump_file, PROP_FINAL);
4637 if (optimize)
4638 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE | CLEANUP_LOG_LINKS
4639 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
4640
4641 if (extra_warnings)
4642 {
4643 setjmp_vars_warning (DECL_INITIAL (current_function_decl));
4644 setjmp_args_warning ();
4645 }
4646
4647 if (optimize)
4648 {
4649 if (initialize_uninitialized_subregs ())
4650 {
4651 /* Insns were inserted, and possibly pseudos created, so
4652 things might look a bit different. */
4653 allocate_reg_life_data ();
4654 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
4655 PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
4656 }
4657 }
4658
4659 no_new_pseudos = 1;
4660 }
4661
4662 struct tree_opt_pass pass_life =
4663 {
4664 "life1", /* name */
4665 NULL, /* gate */
4666 rest_of_handle_life, /* execute */
4667 NULL, /* sub */
4668 NULL, /* next */
4669 0, /* static_pass_number */
4670 TV_FLOW, /* tv_id */
4671 0, /* properties_required */
4672 0, /* properties_provided */
4673 0, /* properties_destroyed */
4674 TODO_verify_flow, /* todo_flags_start */
4675 TODO_dump_func |
4676 TODO_ggc_collect, /* todo_flags_finish */
4677 'f' /* letter */
4678 };
4679
4680 static void
4681 rest_of_handle_flow2 (void)
4682 {
4683 /* If optimizing, then go ahead and split insns now. */
4684 #ifndef STACK_REGS
4685 if (optimize > 0)
4686 #endif
4687 split_all_insns (0);
4688
4689 if (flag_branch_target_load_optimize)
4690 branch_target_load_optimize (epilogue_completed);
4691
4692 if (optimize)
4693 cleanup_cfg (CLEANUP_EXPENSIVE);
4694
4695 /* On some machines, the prologue and epilogue code, or parts thereof,
4696 can be represented as RTL. Doing so lets us schedule insns between
4697 it and the rest of the code and also allows delayed branch
4698 scheduling to operate in the epilogue. */
4699 thread_prologue_and_epilogue_insns (get_insns ());
4700 epilogue_completed = 1;
4701 flow2_completed = 1;
4702 }
4703
4704 struct tree_opt_pass pass_flow2 =
4705 {
4706 "flow2", /* name */
4707 NULL, /* gate */
4708 rest_of_handle_flow2, /* execute */
4709 NULL, /* sub */
4710 NULL, /* next */
4711 0, /* static_pass_number */
4712 TV_FLOW2, /* tv_id */
4713 0, /* properties_required */
4714 0, /* properties_provided */
4715 0, /* properties_destroyed */
4716 TODO_verify_flow, /* todo_flags_start */
4717 TODO_dump_func |
4718 TODO_ggc_collect, /* todo_flags_finish */
4719 'w' /* letter */
4720 };
4721