/* Data flow analysis for GNU compiler.
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
+ Inc.
This file is part of GCC.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
/* This file contains the data flow analysis pass of the compiler. It
computes data flow information which tells combine_instructions
life_analysis fills in certain vectors containing information about
register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
- REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
+ REG_N_CALLS_CROSSED, REG_N_THROWING_CALLS_CROSSED and REG_BASIC_BLOCK.
life_analysis sets current_function_sp_is_unchanging if the function
doesn't modify the stack pointer. */
/* TODO:
Split out from life_analysis:
- - local property discovery (bb->local_live, bb->local_set)
+ - local property discovery
- global property computation
- log links creation
- pre/post modify transformation
#include "obstack.h"
#include "splay-tree.h"
-
-/* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
- the stack pointer does not matter. The value is tested only in
- functions that have frame pointers.
- No definition is equivalent to always zero. */
-#ifndef EXIT_IGNORE_STACK
-#define EXIT_IGNORE_STACK 0
-#endif
+#include "tree-pass.h"
+#include "params.h"
#ifndef HAVE_epilogue
#define HAVE_epilogue 0
#ifdef HAVE_conditional_execution
#ifndef REVERSE_CONDEXEC_PREDICATES_P
-#define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
+#define REVERSE_CONDEXEC_PREDICATES_P(x, y) \
+ (GET_CODE ((x)) == reversed_comparison_code ((y), NULL))
#endif
#endif
+/* This is the maximum number of times we process any given block if the
+ latest loop depth count is smaller than this number. Only used for the
+ failure strategy to avoid infinite loops in calculate_global_regs_live. */
+#define MAX_LIVENESS_ROUNDS 20
+
/* Nonzero if the second flow pass has completed. */
int flow2_completed;
varray_type reg_n_info;
-/* Size of a regset for the current function,
- in (1) bytes and (2) elements. */
-
-int regset_bytes;
-int regset_size;
-
/* Regset of regs live when calls to `setjmp'-like functions happen. */
/* ??? Does this exist only for the setjmp-clobbered warning message? */
-regset regs_live_at_setjmp;
+static regset regs_live_at_setjmp;
/* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
that have to go in the same hard reg.
are another pair, etc. */
rtx regs_may_share;
-/* Callback that determines if it's ok for a function to have no
- noreturn attribute. */
-int (*lang_missing_noreturn_ok_p) (tree);
-
/* Set of registers that may be eliminable. These are handled specially
in updating regs_ever_live. */
/* Flags controlling the set of information propagate_block collects. */
int flags;
+ /* Index of instruction being processed. */
+ int insn_num;
};
/* Number of dead insns removed. */
static int ndead;
-/* Maximum length of pbi->mem_set_list before we start dropping
- new elements on the floor. */
-#define MAX_MEM_SET_LIST_LEN 100
+/* When PROP_REG_INFO set, array contains pbi->insn_num of instruction
+ where given register died. When the register is marked alive, we use the
+ information to compute amount of instructions life range cross.
+ (remember, we are walking backward). This can be computed as current
+ pbi->insn_num - reg_deaths[regno].
+ At the end of processing each basic block, the remaining live registers
+ are inspected and live ranges are increased same way so liverange of global
+ registers are computed correctly.
+
+ The array is maintained clear for dead registers, so it can be safely reused
+ for next basic block without expensive memset of the whole array after
+ reseting pbi->insn_num to 0. */
+
+static int *reg_deaths;
/* Forward declarations */
static int verify_wide_reg_1 (rtx *, void *);
static void verify_wide_reg (int, basic_block);
static void verify_local_live_at_start (regset, basic_block);
static void notice_stack_pointer_modification_1 (rtx, rtx, void *);
-static void notice_stack_pointer_modification (rtx);
+static void notice_stack_pointer_modification (void);
static void mark_reg (rtx, void *);
static void mark_regs_live_at_end (regset);
static void calculate_global_regs_live (sbitmap, sbitmap, int);
static void invalidate_mems_from_set (struct propagate_block_info *, rtx);
static void clear_log_links (sbitmap);
static int count_or_remove_death_notes_bb (basic_block, int);
-\f
-
-void
-check_function_return_warnings (void)
-{
- if (warn_missing_noreturn
- && !TREE_THIS_VOLATILE (cfun->decl)
- && EXIT_BLOCK_PTR->pred == NULL
- && (lang_missing_noreturn_ok_p
- && !lang_missing_noreturn_ok_p (cfun->decl)))
- warning ("function might be possible candidate for attribute `noreturn'");
-
- /* If we have a path to EXIT, then we do return. */
- if (TREE_THIS_VOLATILE (cfun->decl)
- && EXIT_BLOCK_PTR->pred != NULL)
- warning ("`noreturn' function does return");
-
- /* If the clobber_return_insn appears in some basic block, then we
- do reach the end without returning a value. */
- else if (warn_return_type
- && cfun->x_clobber_return_insn != NULL
- && EXIT_BLOCK_PTR->pred != NULL)
- {
- int max_uid = get_max_uid ();
-
- /* If clobber_return_insn was excised by jump1, then renumber_insns
- can make max_uid smaller than the number still recorded in our rtx.
- That's fine, since this is a quick way of verifying that the insn
- is no longer in the chain. */
- if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
- {
- rtx insn;
-
- for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- if (insn == cfun->x_clobber_return_insn)
- {
- warning ("control reaches end of non-void function");
- break;
- }
- }
- }
-}
+static void allocate_bb_life_data (void);
\f
/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
note associated with the BLOCK. */
rtx insn;
/* Get the first instruction in the block. */
- insn = block->head;
+ insn = BB_HEAD (block);
if (insn == NULL_RTX)
return NULL_RTX;
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
insn = NEXT_INSN (insn);
- if (!NOTE_INSN_BASIC_BLOCK_P (insn))
- abort ();
+ gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
return NEXT_INSN (insn);
}
\f
-/* Perform data flow analysis.
- F is the first insn of the function; FLAGS is a set of PROP_* flags
- to be used in accumulating flow info. */
+/* Perform data flow analysis for the whole control flow graph.
+ FLAGS is a set of PROP_* flags to be used in accumulating flow info. */
void
-life_analysis (rtx f, FILE *file, int flags)
+life_analysis (int flags)
{
#ifdef ELIMINABLE_REGS
int i;
#ifdef CANNOT_CHANGE_MODE_CLASS
if (flags & PROP_REG_INFO)
- bitmap_initialize (&subregs_of_mode, 1);
+ init_subregs_of_mode ();
#endif
if (! optimize)
/* Always remove no-op moves. Do this before other processing so
that we don't have to keep re-scanning them. */
- delete_noop_moves (f);
+ delete_noop_moves ();
/* Some targets can emit simpler epilogues if they know that sp was
not ever modified during the function. After reload, of course,
we've already emitted the epilogue so there's no sense searching. */
if (! reload_completed)
- notice_stack_pointer_modification (f);
+ notice_stack_pointer_modification ();
/* Allocate and zero out data structures that will record the
data from lifetime analysis. */
allocate_bb_life_data ();
/* Find the set of registers live on function exit. */
- mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
+ mark_regs_live_at_end (EXIT_BLOCK_PTR->il.rtl->global_live_at_start);
/* "Update" life info from zero. It'd be nice to begin the
relaxation with just the exit and noreturn blocks, but that set
memset (regs_asm_clobbered, 0, sizeof (regs_asm_clobbered));
}
update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
+ if (reg_deaths)
+ {
+ free (reg_deaths);
+ reg_deaths = NULL;
+ }
/* Clean up. */
if (optimize && (flags & PROP_SCAN_DEAD_STORES))
end_alias_analysis ();
- if (file)
- dump_flow_info (file);
-
- free_basic_block_vars (1);
+ if (dump_file)
+ dump_flow_info (dump_file, dump_flags);
- /* Removing dead insns should've made jumptables really dead. */
+ /* Removing dead insns should have made jumptables really dead. */
delete_dead_jumptables ();
}
rtx x = *px;
unsigned int regno = *(int *) pregno;
- if (GET_CODE (x) == REG && REGNO (x) == regno)
+ if (REG_P (x) && REGNO (x) == regno)
{
if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
return 2;
static void
verify_wide_reg (int regno, basic_block bb)
{
- rtx head = bb->head, end = bb->end;
+ rtx head = BB_HEAD (bb), end = BB_END (bb);
while (1)
{
break;
head = NEXT_INSN (head);
}
-
- if (rtl_dump_file)
+ if (dump_file)
{
- fprintf (rtl_dump_file, "Register %d died unexpectedly.\n", regno);
- dump_bb (bb, rtl_dump_file);
+ fprintf (dump_file, "Register %d died unexpectedly.\n", regno);
+ dump_bb (bb, dump_file, 0);
}
- abort ();
+ fatal_error ("internal consistency failure");
}
/* A subroutine of update_life_info. Verify that there are no untoward
{
/* After reload, there are no pseudos, nor subregs of multi-word
registers. The regsets should exactly match. */
- if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
+ if (! REG_SET_EQUAL_P (new_live_at_start,
+ bb->il.rtl->global_live_at_start))
{
- if (rtl_dump_file)
+ if (dump_file)
{
- fprintf (rtl_dump_file,
+ fprintf (dump_file,
"live_at_start mismatch in bb %d, aborting\nNew:\n",
bb->index);
- debug_bitmap_file (rtl_dump_file, new_live_at_start);
- fputs ("Old:\n", rtl_dump_file);
- dump_bb (bb, rtl_dump_file);
+ debug_bitmap_file (dump_file, new_live_at_start);
+ fputs ("Old:\n", dump_file);
+ dump_bb (bb, dump_file, 0);
}
- abort ();
+ fatal_error ("internal consistency failure");
}
}
else
{
- int i;
+ unsigned i;
+ reg_set_iterator rsi;
/* Find the set of changed registers. */
- XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
+ XOR_REG_SET (new_live_at_start, bb->il.rtl->global_live_at_start);
- EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
+ EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i, rsi)
{
/* No registers should die. */
- if (REGNO_REG_SET_P (bb->global_live_at_start, i))
+ if (REGNO_REG_SET_P (bb->il.rtl->global_live_at_start, i))
{
- if (rtl_dump_file)
+ if (dump_file)
{
- fprintf (rtl_dump_file,
+ fprintf (dump_file,
"Register %d died unexpectedly.\n", i);
- dump_bb (bb, rtl_dump_file);
+ dump_bb (bb, dump_file, 0);
}
- abort ();
+ fatal_error ("internal consistency failure");
}
-
/* Verify that the now-live register is wider than word_mode. */
verify_wide_reg (i, bb);
- });
+ }
}
}
unless the caller resets it to zero. */
int
-update_life_info (sbitmap blocks, enum update_life_extent extent, int prop_flags)
+update_life_info (sbitmap blocks, enum update_life_extent extent,
+ int prop_flags)
{
regset tmp;
- regset_head tmp_head;
- int i;
+ unsigned i = 0;
int stabilized_prop_flags = prop_flags;
basic_block bb;
- tmp = INITIALIZE_REG_SET (tmp_head);
+ tmp = ALLOC_REG_SET (®_obstack);
ndead = 0;
+ if ((prop_flags & PROP_REG_INFO) && !reg_deaths)
+ reg_deaths = XCNEWVEC (int, max_regno);
+
timevar_push ((extent == UPDATE_LIFE_LOCAL || blocks)
? TV_LIFE_UPDATE : TV_LIFE);
/* Changes to the CFG are only allowed when
doing a global update for the entire CFG. */
- if ((prop_flags & PROP_ALLOW_CFG_CHANGES)
- && (extent == UPDATE_LIFE_LOCAL || blocks))
- abort ();
+ gcc_assert (!(prop_flags & PROP_ALLOW_CFG_CHANGES)
+ || (extent != UPDATE_LIFE_LOCAL && !blocks));
/* For a global update, we go through the relaxation process again. */
if (extent != UPDATE_LIFE_LOCAL)
in turn may allow for further dead code detection / removal. */
FOR_EACH_BB_REVERSE (bb)
{
- COPY_REG_SET (tmp, bb->global_live_at_end);
+ COPY_REG_SET (tmp, bb->il.rtl->global_live_at_end);
changed |= propagate_block (bb, tmp, NULL, NULL,
prop_flags & (PROP_SCAN_DEAD_CODE
| PROP_SCAN_DEAD_STORES
/* We repeat regardless of what cleanup_cfg says. If there were
instructions deleted above, that might have been only a
- partial improvement (see MAX_MEM_SET_LIST_LEN usage).
+ partial improvement (see PARAM_MAX_FLOW_MEMORY_LOCATIONS usage).
Further improvement may be possible. */
cleanup_cfg (CLEANUP_EXPENSIVE);
/* Zap the life information from the last round. If we don't
do this, we can wind up with registers that no longer appear
- in the code being marked live at entry, which twiggs bogus
- warnings from regno_uninitialized. */
+ in the code being marked live at entry. */
FOR_EACH_BB (bb)
{
- CLEAR_REG_SET (bb->global_live_at_start);
- CLEAR_REG_SET (bb->global_live_at_end);
+ CLEAR_REG_SET (bb->il.rtl->global_live_at_start);
+ CLEAR_REG_SET (bb->il.rtl->global_live_at_end);
}
}
/* If asked, remove notes from the blocks we'll update. */
if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
- count_or_remove_death_notes (blocks, 1);
+ count_or_remove_death_notes (blocks,
+ prop_flags & PROP_POST_REGSTACK ? -1 : 1);
}
/* Clear log links in case we are asked to (re)compute them. */
if (blocks)
{
- EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
+ sbitmap_iterator sbi;
+
+ EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i, sbi)
{
bb = BASIC_BLOCK (i);
-
- COPY_REG_SET (tmp, bb->global_live_at_end);
- propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
-
- if (extent == UPDATE_LIFE_LOCAL)
- verify_local_live_at_start (tmp, bb);
- });
+ if (bb)
+ {
+ /* The bitmap may be flawed in that one of the basic
+ blocks may have been deleted before you get here. */
+ COPY_REG_SET (tmp, bb->il.rtl->global_live_at_end);
+ propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
+
+ if (extent == UPDATE_LIFE_LOCAL)
+ verify_local_live_at_start (tmp, bb);
+ }
+ };
}
else
{
FOR_EACH_BB_REVERSE (bb)
{
- COPY_REG_SET (tmp, bb->global_live_at_end);
+ COPY_REG_SET (tmp, bb->il.rtl->global_live_at_end);
propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
if (prop_flags & PROP_REG_INFO)
{
+ reg_set_iterator rsi;
+
/* The only pseudos that are live at the beginning of the function
are those that were not set anywhere in the function. local-alloc
doesn't know how to handle these correctly, so mark them as not
local to any one basic block. */
- EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
- FIRST_PSEUDO_REGISTER, i,
- { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
+ EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->il.rtl->global_live_at_end,
+ FIRST_PSEUDO_REGISTER, i, rsi)
+ REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
/* We have a problem with any pseudoreg that lives across the setjmp.
ANSI says that if a user variable does not change in value between
that hard reg where this pseudo is dead, thus clobbering the pseudo.
Conclusion: such a pseudo must not go in a hard reg. */
EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
- FIRST_PSEUDO_REGISTER, i,
- {
- if (regno_reg_rtx[i] != 0)
- {
- REG_LIVE_LENGTH (i) = -1;
- REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
- }
- });
+ FIRST_PSEUDO_REGISTER, i, rsi)
+ {
+ if (regno_reg_rtx[i] != 0)
+ {
+ REG_LIVE_LENGTH (i) = -1;
+ REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
+ }
+ }
+ }
+ if (reg_deaths)
+ {
+ free (reg_deaths);
+ reg_deaths = NULL;
}
timevar_pop ((extent == UPDATE_LIFE_LOCAL || blocks)
? TV_LIFE_UPDATE : TV_LIFE);
- if (ndead && rtl_dump_file)
- fprintf (rtl_dump_file, "deleted %i dead insns\n", ndead);
+ if (ndead && dump_file)
+ fprintf (dump_file, "deleted %i dead insns\n", ndead);
return ndead;
}
sbitmap_zero (update_life_blocks);
FOR_EACH_BB (bb)
{
- if (extent == UPDATE_LIFE_LOCAL)
+ if (bb->flags & BB_DIRTY)
{
- if (bb->flags & BB_DIRTY)
- {
- SET_BIT (update_life_blocks, bb->index);
- n++;
- }
- }
- else
- {
- /* ??? Bootstrap with -march=pentium4 fails to terminate
- with only a partial life update. */
SET_BIT (update_life_blocks, bb->index);
- if (bb->flags & BB_DIRTY)
- n++;
+ n++;
}
}
return retval;
}
-/* Free the variables allocated by find_basic_blocks.
-
- KEEP_HEAD_END_P is nonzero if basic_block_info is not to be freed. */
+/* Free the variables allocated by find_basic_blocks. */
void
-free_basic_block_vars (int keep_head_end_p)
+free_basic_block_vars (void)
{
- if (! keep_head_end_p)
+ if (basic_block_info)
{
- if (basic_block_info)
- {
- clear_edges ();
- VARRAY_FREE (basic_block_info);
- }
- n_basic_blocks = 0;
- last_basic_block = 0;
-
- ENTRY_BLOCK_PTR->aux = NULL;
- ENTRY_BLOCK_PTR->global_live_at_end = NULL;
- EXIT_BLOCK_PTR->aux = NULL;
- EXIT_BLOCK_PTR->global_live_at_start = NULL;
+ clear_edges ();
+ basic_block_info = NULL;
}
+ n_basic_blocks = 0;
+ last_basic_block = 0;
+ n_edges = 0;
+
+ label_to_block_map = NULL;
+
+ ENTRY_BLOCK_PTR->aux = NULL;
+ ENTRY_BLOCK_PTR->il.rtl->global_live_at_end = NULL;
+ EXIT_BLOCK_PTR->aux = NULL;
+ EXIT_BLOCK_PTR->il.rtl->global_live_at_start = NULL;
}
/* Delete any insns that copy a register to itself. */
int
-delete_noop_moves (rtx f ATTRIBUTE_UNUSED)
+delete_noop_moves (void)
{
rtx insn, next;
basic_block bb;
FOR_EACH_BB (bb)
{
- for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = next)
+ for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); insn = next)
{
next = NEXT_INSN (insn);
if (INSN_P (insn) && noop_move_p (insn))
}
}
}
- if (nnoops && rtl_dump_file)
- fprintf (rtl_dump_file, "deleted %i noop moves", nnoops);
+
+ if (nnoops && dump_file)
+ fprintf (dump_file, "deleted %i noop moves\n", nnoops);
+
return nnoops;
}
void
delete_dead_jumptables (void)
{
- rtx insn, next;
- for (insn = get_insns (); insn; insn = next)
+ basic_block bb;
+
+ /* A dead jump table does not belong to any basic block. Scan insns
+ between two adjacent basic blocks. */
+ FOR_EACH_BB (bb)
{
- next = NEXT_INSN (insn);
- if (GET_CODE (insn) == CODE_LABEL
- && LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
- && GET_CODE (next) == JUMP_INSN
- && (GET_CODE (PATTERN (next)) == ADDR_VEC
- || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
+ rtx insn, next;
+
+ for (insn = NEXT_INSN (BB_END (bb));
+ insn && !NOTE_INSN_BASIC_BLOCK_P (insn);
+ insn = next)
{
- if (rtl_dump_file)
- fprintf (rtl_dump_file, "Dead jumptable %i removed\n", INSN_UID (insn));
- delete_insn (NEXT_INSN (insn));
- delete_insn (insn);
- next = NEXT_INSN (next);
+ next = NEXT_INSN (insn);
+ if (LABEL_P (insn)
+ && LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
+ && JUMP_P (next)
+ && (GET_CODE (PATTERN (next)) == ADDR_VEC
+ || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
+ {
+ rtx label = insn, jump = next;
+
+ if (dump_file)
+ fprintf (dump_file, "Dead jumptable %i removed\n",
+ INSN_UID (insn));
+
+ next = NEXT_INSN (next);
+ delete_insn (jump);
+ delete_insn (label);
+ }
}
}
}
/* The stack pointer is only modified indirectly as the result
of a push until later in flow. See the comments in rtl.texi
regarding Embedded Side-Effects on Addresses. */
- || (GET_CODE (x) == MEM
- && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
+ || (MEM_P (x)
+ && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == RTX_AUTOINC
&& XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
current_function_sp_is_unchanging = 0;
}
static void
-notice_stack_pointer_modification (rtx f)
+notice_stack_pointer_modification (void)
{
+ basic_block bb;
rtx insn;
/* Assume that the stack pointer is unchanging if alloca hasn't
if (! current_function_sp_is_unchanging)
return;
- for (insn = f; insn; insn = NEXT_INSN (insn))
- {
- if (INSN_P (insn))
- {
- /* Check if insn modifies the stack pointer. */
- note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
- NULL);
- if (! current_function_sp_is_unchanging)
- return;
- }
- }
+ FOR_EACH_BB (bb)
+ FOR_BB_INSNS (bb, insn)
+ {
+ if (INSN_P (insn))
+ {
+ /* Check if insn modifies the stack pointer. */
+ note_stores (PATTERN (insn),
+ notice_stack_pointer_modification_1,
+ NULL);
+ if (! current_function_sp_is_unchanging)
+ return;
+ }
+ }
}
/* Mark a register in SET. Hard registers in large modes get all
regset set = (regset) xset;
int regno = REGNO (reg);
- if (GET_MODE (reg) == BLKmode)
- abort ();
+ gcc_assert (GET_MODE (reg) != BLKmode);
SET_REGNO_REG_SET (set, regno);
if (regno < FIRST_PSEUDO_REGISTER)
{
- int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
+ int n = hard_regno_nregs[regno][GET_MODE (reg)];
while (--n > 0)
SET_REGNO_REG_SET (set, regno + n);
}
{
basic_block *queue, *qhead, *qtail, *qend, bb;
regset tmp, new_live_at_end, invalidated_by_call;
- regset_head tmp_head, invalidated_by_call_head;
- regset_head new_live_at_end_head;
- int i;
+ regset registers_made_dead;
+ bool failure_strategy_required = false;
+ int *block_accesses;
+
+ /* The registers that are modified within this in block. */
+ regset *local_sets;
+
+ /* The registers that are conditionally modified within this block.
+ In other words, regs that are set only as part of a COND_EXEC. */
+ regset *cond_local_sets;
+
+ unsigned int i;
/* Some passes used to forget clear aux field of basic block causing
sick behavior here. */
#ifdef ENABLE_CHECKING
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
- if (bb->aux)
- abort ();
+ gcc_assert (!bb->aux);
#endif
- tmp = INITIALIZE_REG_SET (tmp_head);
- new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
- invalidated_by_call = INITIALIZE_REG_SET (invalidated_by_call_head);
+ tmp = ALLOC_REG_SET (®_obstack);
+ new_live_at_end = ALLOC_REG_SET (®_obstack);
+ invalidated_by_call = ALLOC_REG_SET (®_obstack);
+ registers_made_dead = ALLOC_REG_SET (®_obstack);
/* Inconveniently, this is only readily available in hard reg set form. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
SET_REGNO_REG_SET (invalidated_by_call, i);
- /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
- because the `head == tail' style test for an empty queue doesn't
- work with a full queue. */
- queue = xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
+ /* Allocate space for the sets of local properties. */
+ local_sets = XCNEWVEC (bitmap, last_basic_block);
+ cond_local_sets = XCNEWVEC (bitmap, last_basic_block);
+
+ /* Create a worklist. Allocate an extra slot for the `head == tail'
+ style test for an empty queue doesn't work with a full queue. */
+ queue = XNEWVEC (basic_block, n_basic_blocks + 1);
qtail = queue;
- qhead = qend = queue + n_basic_blocks + 2;
+ qhead = qend = queue + n_basic_blocks;
/* Queue the blocks set in the initial mask. Do this in reverse block
number order so that we are more likely for the first round to do
}
}
+ block_accesses = XCNEWVEC (int, last_basic_block);
+
/* We clean aux when we remove the initially-enqueued bbs, but we
don't enqueue ENTRY and EXIT initially, so clean them upfront and
unconditionally. */
from GLOBAL_LIVE_AT_START. In the former case, the register
could go away only if it disappeared from GLOBAL_LIVE_AT_START
for one of the successor blocks. By induction, that cannot
- occur. */
+ occur.
+
+ ??? This reasoning doesn't work if we start from non-empty initial
+ GLOBAL_LIVE_AT_START sets. And there are actually two problems:
+ 1) Updating may not terminate (endless oscillation).
+ 2) Even if it does (and it usually does), the resulting information
+ may be inaccurate. Consider for example the following case:
+
+ a = ...;
+ while (...) {...} -- 'a' not mentioned at all
+ ... = a;
+
+ If the use of 'a' is deleted between two calculations of liveness
+ information and the initial sets are not cleared, the information
+ about a's liveness will get stuck inside the loop and the set will
+ appear not to be dead.
+
+ We do not attempt to solve 2) -- the information is conservatively
+ correct (i.e. we never claim that something live is dead) and the
+ amount of optimization opportunities missed due to this problem is
+ not significant.
+
+ 1) is more serious. In order to fix it, we monitor the number of times
+ each block is processed. Once one of the blocks has been processed more
+ times than the maximum number of rounds, we use the following strategy:
+ When a register disappears from one of the sets, we add it to a MAKE_DEAD
+ set, remove all registers in this set from all GLOBAL_LIVE_AT_* sets and
+ add the blocks with changed sets into the queue. Thus we are guaranteed
+ to terminate (the worst case corresponds to all registers in MADE_DEAD,
+ in which case the original reasoning above is valid), but in general we
+ only fix up a few offending registers.
+
+ The maximum number of rounds for computing liveness is the largest of
+ MAX_LIVENESS_ROUNDS and the latest loop depth count for this function. */
+
while (qhead != qtail)
{
int rescan, changed;
basic_block bb;
edge e;
+ edge_iterator ei;
bb = *qhead++;
if (qhead == qend)
qhead = queue;
bb->aux = NULL;
+ /* Should we start using the failure strategy? */
+ if (bb != ENTRY_BLOCK_PTR)
+ {
+ int max_liveness_rounds =
+ MAX (MAX_LIVENESS_ROUNDS, cfun->max_loop_depth);
+
+ block_accesses[bb->index]++;
+ if (block_accesses[bb->index] > max_liveness_rounds)
+ failure_strategy_required = true;
+ }
+
/* Begin by propagating live_at_start from the successor blocks. */
CLEAR_REG_SET (new_live_at_end);
- if (bb->succ)
- for (e = bb->succ; e; e = e->succ_next)
+ if (EDGE_COUNT (bb->succs) > 0)
+ FOR_EACH_EDGE (e, ei, bb->succs)
{
basic_block sb = e->dest;
/* ??? Abnormal call edges ignored for the moment, as this gets
confused by sibling call edges, which crashes reg-stack. */
if (e->flags & EDGE_EH)
- {
- bitmap_operation (tmp, sb->global_live_at_start,
- invalidated_by_call, BITMAP_AND_COMPL);
- IOR_REG_SET (new_live_at_end, tmp);
- }
+ bitmap_ior_and_compl_into (new_live_at_end,
+ sb->il.rtl->global_live_at_start,
+ invalidated_by_call);
else
- IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
+ IOR_REG_SET (new_live_at_end, sb->il.rtl->global_live_at_start);
/* If a target saves one register in another (instead of on
the stack) the save register will need to be live for EH. */
if (bb == ENTRY_BLOCK_PTR)
{
- COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
+ COPY_REG_SET (bb->il.rtl->global_live_at_end, new_live_at_end);
continue;
}
/* On our first pass through this block, we'll go ahead and continue.
- Recognize first pass by local_set NULL. On subsequent passes, we
- get to skip out early if live_at_end wouldn't have changed. */
+ Recognize first pass by checking if local_set is NULL for this
+ basic block. On subsequent passes, we get to skip out early if
+ live_at_end wouldn't have changed. */
- if (bb->local_set == NULL)
+ if (local_sets[bb->index] == NULL)
{
- bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
- bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
+ local_sets[bb->index] = ALLOC_REG_SET (®_obstack);
+ cond_local_sets[bb->index] = ALLOC_REG_SET (®_obstack);
rescan = 1;
}
else
rescan the block. This wouldn't be necessary if we had
precalculated local_live, however with PROP_SCAN_DEAD_CODE
local_live is really dependent on live_at_end. */
- CLEAR_REG_SET (tmp);
- rescan = bitmap_operation (tmp, bb->global_live_at_end,
- new_live_at_end, BITMAP_AND_COMPL);
+ rescan = bitmap_intersect_compl_p (bb->il.rtl->global_live_at_end,
+ new_live_at_end);
- if (! rescan)
+ if (!rescan)
{
- /* If any of the registers in the new live_at_end set are
- conditionally set in this basic block, we must rescan.
- This is because conditional lifetimes at the end of the
- block do not just take the live_at_end set into account,
- but also the liveness at the start of each successor
- block. We can miss changes in those sets if we only
- compare the new live_at_end against the previous one. */
- CLEAR_REG_SET (tmp);
- rescan = bitmap_operation (tmp, new_live_at_end,
- bb->cond_local_set, BITMAP_AND);
+ regset cond_local_set;
+
+ /* If any of the registers in the new live_at_end set are
+ conditionally set in this basic block, we must rescan.
+ This is because conditional lifetimes at the end of the
+ block do not just take the live_at_end set into
+ account, but also the liveness at the start of each
+ successor block. We can miss changes in those sets if
+ we only compare the new live_at_end against the
+ previous one. */
+ cond_local_set = cond_local_sets[bb->index];
+ rescan = bitmap_intersect_p (new_live_at_end, cond_local_set);
}
- if (! rescan)
+ if (!rescan)
{
+ regset local_set;
+
/* Find the set of changed bits. Take this opportunity
to notice that this set is empty and early out. */
- CLEAR_REG_SET (tmp);
- changed = bitmap_operation (tmp, bb->global_live_at_end,
- new_live_at_end, BITMAP_XOR);
- if (! changed)
+ bitmap_xor (tmp, bb->il.rtl->global_live_at_end, new_live_at_end);
+ if (bitmap_empty_p (tmp))
continue;
-
- /* If any of the changed bits overlap with local_set,
- we'll have to rescan the block. Detect overlap by
- the AND with ~local_set turning off bits. */
- rescan = bitmap_operation (tmp, tmp, bb->local_set,
- BITMAP_AND_COMPL);
+
+ /* If any of the changed bits overlap with local_sets[bb],
+ we'll have to rescan the block. */
+ local_set = local_sets[bb->index];
+ rescan = bitmap_intersect_p (tmp, local_set);
}
}
{
/* Add to live_at_start the set of all registers in
new_live_at_end that aren't in the old live_at_end. */
-
- bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
- BITMAP_AND_COMPL);
- COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
-
- changed = bitmap_operation (bb->global_live_at_start,
- bb->global_live_at_start,
- tmp, BITMAP_IOR);
+
+ changed = bitmap_ior_and_compl_into (bb->il.rtl->global_live_at_start,
+ new_live_at_end,
+ bb->il.rtl->global_live_at_end);
+ COPY_REG_SET (bb->il.rtl->global_live_at_end, new_live_at_end);
if (! changed)
continue;
}
else
{
- COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
+ COPY_REG_SET (bb->il.rtl->global_live_at_end, new_live_at_end);
/* Rescan the block insn by insn to turn (a copy of) live_at_end
into live_at_start. */
- propagate_block (bb, new_live_at_end, bb->local_set,
- bb->cond_local_set, flags);
+ propagate_block (bb, new_live_at_end,
+ local_sets[bb->index],
+ cond_local_sets[bb->index],
+ flags);
/* If live_at start didn't change, no need to go farther. */
- if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
+ if (REG_SET_EQUAL_P (bb->il.rtl->global_live_at_start,
+ new_live_at_end))
continue;
- COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
+ if (failure_strategy_required)
+ {
+ /* Get the list of registers that were removed from the
+ bb->global_live_at_start set. */
+ bitmap_and_compl (tmp, bb->il.rtl->global_live_at_start,
+ new_live_at_end);
+ if (!bitmap_empty_p (tmp))
+ {
+ bool pbb_changed;
+ basic_block pbb;
+
+ /* It should not happen that one of registers we have
+ removed last time is disappears again before any other
+ register does. */
+ pbb_changed = bitmap_ior_into (registers_made_dead, tmp);
+ gcc_assert (pbb_changed);
+
+ /* Now remove the registers from all sets. */
+ FOR_EACH_BB (pbb)
+ {
+ pbb_changed = false;
+
+ pbb_changed
+ |= bitmap_and_compl_into
+ (pbb->il.rtl->global_live_at_start,
+ registers_made_dead);
+ pbb_changed
+ |= bitmap_and_compl_into
+ (pbb->il.rtl->global_live_at_end,
+ registers_made_dead);
+ if (!pbb_changed)
+ continue;
+
+ /* Note the (possible) change. */
+ if (blocks_out)
+ SET_BIT (blocks_out, pbb->index);
+
+ /* Makes sure to really rescan the block. */
+ if (local_sets[pbb->index])
+ {
+ FREE_REG_SET (local_sets[pbb->index]);
+ FREE_REG_SET (cond_local_sets[pbb->index]);
+ local_sets[pbb->index] = 0;
+ }
+
+ /* Add it to the queue. */
+ if (pbb->aux == NULL)
+ {
+ *qtail++ = pbb;
+ if (qtail == qend)
+ qtail = queue;
+ pbb->aux = pbb;
+ }
+ }
+ continue;
+ }
+ } /* end of failure_strategy_required */
+
+ COPY_REG_SET (bb->il.rtl->global_live_at_start, new_live_at_end);
}
/* Queue all predecessors of BB so that we may re-examine
their live_at_end. */
- for (e = bb->pred; e; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, bb->preds)
{
basic_block pb = e->src;
+
+ gcc_assert ((e->flags & EDGE_FAKE) == 0);
+
if (pb->aux == NULL)
{
*qtail++ = pb;
FREE_REG_SET (tmp);
FREE_REG_SET (new_live_at_end);
FREE_REG_SET (invalidated_by_call);
+ FREE_REG_SET (registers_made_dead);
if (blocks_out)
{
- EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
+ sbitmap_iterator sbi;
+
+ EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i, sbi)
{
basic_block bb = BASIC_BLOCK (i);
- FREE_REG_SET (bb->local_set);
- FREE_REG_SET (bb->cond_local_set);
- });
+ FREE_REG_SET (local_sets[bb->index]);
+ FREE_REG_SET (cond_local_sets[bb->index]);
+ };
}
else
{
FOR_EACH_BB (bb)
{
- FREE_REG_SET (bb->local_set);
- FREE_REG_SET (bb->cond_local_set);
+ FREE_REG_SET (local_sets[bb->index]);
+ FREE_REG_SET (cond_local_sets[bb->index]);
}
}
+ free (block_accesses);
free (queue);
+ free (cond_local_sets);
+ free (local_sets);
}
\f
case ZERO_EXTRACT:
case SIGN_EXTRACT:
case STRICT_LOW_PART:
- if (GET_CODE (XEXP (*ptr, 0)) == REG && REGNO (XEXP (*ptr, 0)) == reg)
+ if (REG_P (XEXP (*ptr, 0)) && REGNO (XEXP (*ptr, 0)) == reg)
{
param->retval = XEXP (*ptr, 0);
return 1;
break;
case SUBREG:
- if (GET_CODE (SUBREG_REG (*ptr)) == REG
+ if (REG_P (SUBREG_REG (*ptr))
&& REGNO (SUBREG_REG (*ptr)) == reg)
{
param->retval = SUBREG_REG (*ptr);
registers whose value is unknown, and may contain some kind of sticky
bits we don't want. */
-int
+static int
initialize_uninitialized_subregs (void)
{
rtx insn;
edge e;
- int reg, did_something = 0;
+ unsigned reg, did_something = 0;
find_regno_partial_param param;
+ edge_iterator ei;
- for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
+ FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
{
basic_block bb = e->dest;
- regset map = bb->global_live_at_start;
- EXECUTE_IF_SET_IN_REG_SET (map,
- FIRST_PSEUDO_REGISTER, reg,
+ regset map = bb->il.rtl->global_live_at_start;
+ reg_set_iterator rsi;
+
+ EXECUTE_IF_SET_IN_REG_SET (map, FIRST_PSEUDO_REGISTER, reg, rsi)
{
int uid = REGNO_FIRST_UID (reg);
rtx i;
did_something = 1;
}
}
- });
+ }
}
if (did_something)
/* Subroutines of life analysis. */
/* Allocate the permanent data structures that represent the results
- of life analysis. Not static since used also for stupid life analysis. */
+ of life analysis. */
-void
+static void
allocate_bb_life_data (void)
{
basic_block bb;
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
{
- bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
- bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
+ bb->il.rtl->global_live_at_start = ALLOC_REG_SET (®_obstack);
+ bb->il.rtl->global_live_at_end = ALLOC_REG_SET (®_obstack);
}
- regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
+ regs_live_at_setjmp = ALLOC_REG_SET (®_obstack);
}
void
int i;
max_regno = max_reg_num ();
+ gcc_assert (!reg_deaths);
+ reg_deaths = XCNEWVEC (int, max_regno);
/* Recalculate the register space, in case it has grown. Old style
vector oriented regsets would set regset_{size,bytes} here also. */
REG_N_REFS (i) = 0;
REG_N_DEATHS (i) = 0;
REG_N_CALLS_CROSSED (i) = 0;
+ REG_N_THROWING_CALLS_CROSSED (i) = 0;
REG_LIVE_LENGTH (i) = 0;
REG_FREQ (i) = 0;
REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
real good way to fix up the reference to the deleted label
when the label is deleted, so we just allow it here. */
- if (inote && GET_CODE (inote) == CODE_LABEL)
+ if (inote && LABEL_P (inote))
{
rtx label = XEXP (inote, 0);
rtx next;
jump following it, but not the label itself. */
if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
&& (next = next_nonnote_insn (label)) != NULL
- && GET_CODE (next) == JUMP_INSN
+ && JUMP_P (next)
&& (GET_CODE (PATTERN (next)) == ADDR_VEC
|| GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
{
int insn_is_dead = 0;
int libcall_is_dead = 0;
rtx note;
- int i;
+ unsigned i;
if (! INSN_P (insn))
return prev;
fatal_insn ("Attempt to delete prologue/epilogue insn:", insn);
/* Record sets. Do this even for dead instructions, since they
- would have killed the values if they hadn't been deleted. */
+ would have killed the values if they hadn't been deleted. To
+ be consistent, we also have to emit a clobber when we delete
+ an insn that clobbers a live register. */
+ pbi->flags |= PROP_DEAD_INSN;
mark_set_regs (pbi, PATTERN (insn), insn);
+ pbi->flags &= ~PROP_DEAD_INSN;
/* CC0 is now known to be dead. Either this insn used it,
in which case it doesn't anymore, or clobbered it,
pbi->cc0_live = 0;
if (libcall_is_dead)
- prev = propagate_block_delete_libcall ( insn, note);
+ prev = propagate_block_delete_libcall (insn, note);
else
{
/* Does this instruction increment or decrement a register? */
if ((flags & PROP_AUTOINC)
&& x != 0
- && GET_CODE (SET_DEST (x)) == REG
+ && REG_P (SET_DEST (x))
&& (GET_CODE (SET_SRC (x)) == PLUS
|| GET_CODE (SET_SRC (x)) == MINUS)
&& XEXP (SET_SRC (x), 0) == SET_DEST (x)
&& GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
&& XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
&& GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
- /* We have an insn to pop a constant amount off the stack.
- (Such insns use PLUS regardless of the direction of the stack,
- and any insn to adjust the stack by a constant is always a pop.)
- These insns, if not dead stores, have no effect on life, though
- they do have an effect on the memory stores we are tracking. */
- invalidate_mems_from_set (pbi, stack_pointer_rtx);
+ {
+ /* We have an insn to pop a constant amount off the stack.
+ (Such insns use PLUS regardless of the direction of the stack,
+ and any insn to adjust the stack by a constant is always a pop
+ or part of a push.)
+ These insns, if not dead stores, have no effect on life, though
+ they do have an effect on the memory stores we are tracking. */
+ invalidate_mems_from_set (pbi, stack_pointer_rtx);
+ /* Still, we need to update local_set, lest ifcvt.c:dead_or_predicable
+ concludes that the stack pointer is not modified. */
+ mark_set_regs (pbi, PATTERN (insn), insn);
+ }
else
{
- rtx note;
/* Any regs live at the time of a call instruction must not go
in a register clobbered by calls. Find all regs now live and
record this for them. */
- if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
- EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
- { REG_N_CALLS_CROSSED (i)++; });
+ if (CALL_P (insn) && (flags & PROP_REG_INFO))
+ {
+ reg_set_iterator rsi;
+ EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i, rsi)
+ REG_N_CALLS_CROSSED (i)++;
+ if (can_throw_internal (insn))
+ EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i, rsi)
+ REG_N_THROWING_CALLS_CROSSED (i)++;
+ }
/* Record sets. Do this even for dead instructions, since they
would have killed the values if they hadn't been deleted. */
mark_set_regs (pbi, PATTERN (insn), insn);
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
regset live_at_end;
bool sibcall_p;
except for return values. */
sibcall_p = SIBLING_CALL_P (insn);
- live_at_end = EXIT_BLOCK_PTR->global_live_at_start;
+ live_at_end = EXIT_BLOCK_PTR->il.rtl->global_live_at_start;
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i)
&& ! (sibcall_p
current_function_return_rtx,
(rtx *) 0)))
{
+ enum rtx_code code = global_regs[i] ? SET : CLOBBER;
/* We do not want REG_UNUSED notes for these registers. */
- mark_set_1 (pbi, CLOBBER, regno_reg_rtx[i], cond, insn,
+ mark_set_1 (pbi, code, regno_reg_rtx[i], cond, insn,
pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
}
}
/* Record uses. */
if (! insn_is_dead)
mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
- if ((flags & PROP_EQUAL_NOTES)
- && ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX))
- || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX))))
- mark_used_regs (pbi, XEXP (note, 0), NULL_RTX, insn);
/* Sometimes we may have inserted something before INSN (such as a move)
when we make an auto-inc. So ensure we will scan those insns. */
prev = PREV_INSN (insn);
#endif
- if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
+ if (! insn_is_dead && CALL_P (insn))
{
int i;
rtx note, cond;
mark_used_regs (pbi, XEXP (XEXP (note, 0), 0), cond, insn);
/* The stack ptr is used (honorarily) by a CALL insn. */
+ if ((flags & PROP_REG_INFO)
+ && !REGNO_REG_SET_P (pbi->reg_live, STACK_POINTER_REGNUM))
+ reg_deaths[STACK_POINTER_REGNUM] = pbi->insn_num;
SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
/* Calls may also reference any of the global registers,
}
}
- /* On final pass, update counts of how many insns in which each reg
- is live. */
- if (flags & PROP_REG_INFO)
- EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
- { REG_LIVE_LENGTH (i)++; });
+ pbi->insn_num++;
return prev;
}
init_propagate_block_info (basic_block bb, regset live, regset local_set,
regset cond_local_set, int flags)
{
- struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
+ struct propagate_block_info *pbi = XNEW (struct propagate_block_info);
pbi->bb = bb;
pbi->reg_live = live;
pbi->cond_local_set = cond_local_set;
pbi->cc0_live = 0;
pbi->flags = flags;
+ pbi->insn_num = 0;
if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
- pbi->reg_next_use = xcalloc (max_reg_num (), sizeof (rtx));
+ pbi->reg_next_use = XCNEWVEC (rtx, max_reg_num ());
else
pbi->reg_next_use = NULL;
- pbi->new_set = BITMAP_XMALLOC ();
+ pbi->new_set = BITMAP_ALLOC (NULL);
#ifdef HAVE_conditional_execution
pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
free_reg_cond_life_info);
- pbi->reg_cond_reg = BITMAP_XMALLOC ();
+ pbi->reg_cond_reg = BITMAP_ALLOC (NULL);
/* If this block ends in a conditional branch, for each register
live from one side of the branch and not the other, record the
register as conditionally dead. */
- if (GET_CODE (bb->end) == JUMP_INSN
- && any_condjump_p (bb->end))
+ if (JUMP_P (BB_END (bb))
+ && any_condjump_p (BB_END (bb)))
{
- regset_head diff_head;
- regset diff = INITIALIZE_REG_SET (diff_head);
+ regset diff = ALLOC_REG_SET (®_obstack);
basic_block bb_true, bb_false;
- int i;
+ unsigned i;
/* Identify the successor blocks. */
- bb_true = bb->succ->dest;
- if (bb->succ->succ_next != NULL)
+ bb_true = EDGE_SUCC (bb, 0)->dest;
+ if (!single_succ_p (bb))
{
- bb_false = bb->succ->succ_next->dest;
+ bb_false = EDGE_SUCC (bb, 1)->dest;
- if (bb->succ->flags & EDGE_FALLTHRU)
+ if (EDGE_SUCC (bb, 0)->flags & EDGE_FALLTHRU)
{
basic_block t = bb_false;
bb_false = bb_true;
bb_true = t;
}
- else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
- abort ();
+ else
+ gcc_assert (EDGE_SUCC (bb, 1)->flags & EDGE_FALLTHRU);
}
else
{
/* This can happen with a conditional jump to the next insn. */
- if (JUMP_LABEL (bb->end) != bb_true->head)
- abort ();
+ gcc_assert (JUMP_LABEL (BB_END (bb)) == BB_HEAD (bb_true));
/* Simplest way to do nothing. */
bb_false = bb_true;
}
/* Compute which register lead different lives in the successors. */
- if (bitmap_operation (diff, bb_true->global_live_at_start,
- bb_false->global_live_at_start, BITMAP_XOR))
- {
+ bitmap_xor (diff, bb_true->il.rtl->global_live_at_start,
+ bb_false->il.rtl->global_live_at_start);
+
+ if (!bitmap_empty_p (diff))
+ {
/* Extract the condition from the branch. */
- rtx set_src = SET_SRC (pc_set (bb->end));
+ rtx set_src = SET_SRC (pc_set (BB_END (bb)));
rtx cond_true = XEXP (set_src, 0);
rtx reg = XEXP (cond_true, 0);
+ enum rtx_code inv_cond;
if (GET_CODE (reg) == SUBREG)
reg = SUBREG_REG (reg);
/* We can only track conditional lifetimes if the condition is
- in the form of a comparison of a register against zero.
- If the condition is more complex than that, then it is safe
- not to record any information. */
- if (GET_CODE (reg) == REG
+ in the form of a reversible comparison of a register against
+ zero. If the condition is more complex than that, then it is
+ safe not to record any information. */
+ inv_cond = reversed_comparison_code (cond_true, BB_END (bb));
+ if (inv_cond != UNKNOWN
+ && REG_P (reg)
&& XEXP (cond_true, 1) == const0_rtx)
{
rtx cond_false
- = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
+ = gen_rtx_fmt_ee (inv_cond,
GET_MODE (cond_true), XEXP (cond_true, 0),
XEXP (cond_true, 1));
+ reg_set_iterator rsi;
+
if (GET_CODE (XEXP (set_src, 1)) == PC)
{
rtx t = cond_false;
SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
/* For each such register, mark it conditionally dead. */
- EXECUTE_IF_SET_IN_REG_SET
- (diff, 0, i,
- {
- struct reg_cond_life_info *rcli;
- rtx cond;
-
- rcli = xmalloc (sizeof (*rcli));
-
- if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
- cond = cond_false;
- else
- cond = cond_true;
- rcli->condition = cond;
- rcli->stores = const0_rtx;
- rcli->orig_condition = cond;
-
- splay_tree_insert (pbi->reg_cond_dead, i,
- (splay_tree_value) rcli);
- });
+ EXECUTE_IF_SET_IN_REG_SET (diff, 0, i, rsi)
+ {
+ struct reg_cond_life_info *rcli;
+ rtx cond;
+
+ rcli = XNEW (struct reg_cond_life_info);
+
+ if (REGNO_REG_SET_P (bb_true->il.rtl->global_live_at_start,
+ i))
+ cond = cond_false;
+ else
+ cond = cond_true;
+ rcli->condition = cond;
+ rcli->stores = const0_rtx;
+ rcli->orig_condition = cond;
+
+ splay_tree_insert (pbi->reg_cond_dead, i,
+ (splay_tree_value) rcli);
+ }
}
}
&& (TYPE_RETURNS_STACK_DEPRESSED
(TREE_TYPE (current_function_decl))))
&& (flags & PROP_SCAN_DEAD_STORES)
- && (bb->succ == NULL
- || (bb->succ->succ_next == NULL
- && bb->succ->dest == EXIT_BLOCK_PTR
+ && (EDGE_COUNT (bb->succs) == 0
+ || (single_succ_p (bb)
+ && single_succ (bb) == EXIT_BLOCK_PTR
&& ! current_function_calls_eh_return)))
{
rtx insn, set;
- for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
- if (GET_CODE (insn) == INSN
+ for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
+ if (NONJUMP_INSN_P (insn)
&& (set = single_set (insn))
- && GET_CODE (SET_DEST (set)) == MEM)
+ && MEM_P (SET_DEST (set)))
{
rtx mem = SET_DEST (set);
rtx canon_mem = canon_rtx (mem);
{
free_EXPR_LIST_list (&pbi->mem_set_list);
- BITMAP_XFREE (pbi->new_set);
+ BITMAP_FREE (pbi->new_set);
#ifdef HAVE_conditional_execution
splay_tree_delete (pbi->reg_cond_dead);
- BITMAP_XFREE (pbi->reg_cond_reg);
+ BITMAP_FREE (pbi->reg_cond_reg);
#endif
+ if (pbi->flags & PROP_REG_INFO)
+ {
+ int num = pbi->insn_num;
+ unsigned i;
+ reg_set_iterator rsi;
+
+ EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i, rsi)
+ {
+ REG_LIVE_LENGTH (i) += num - reg_deaths[i];
+ reg_deaths[i] = 0;
+ }
+ }
if (pbi->reg_next_use)
free (pbi->reg_next_use);
if (flags & PROP_REG_INFO)
{
- int i;
+ unsigned i;
+ reg_set_iterator rsi;
/* Process the regs live at the end of the block.
Mark them as not local to any one basic block. */
- EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
- { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
+ EXECUTE_IF_SET_IN_REG_SET (live, 0, i, rsi)
+ REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
}
/* Scan the block an insn at a time from end to beginning. */
changed = 0;
- for (insn = bb->end;; insn = prev)
+ for (insn = BB_END (bb); ; insn = prev)
{
/* If this is a call to `setjmp' et al, warn if any
non-volatile datum is live. */
if ((flags & PROP_REG_INFO)
- && GET_CODE (insn) == CALL_INSN
+ && CALL_P (insn)
&& find_reg_note (insn, REG_SETJMP, NULL))
IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
else
changed |= NEXT_INSN (prev) != insn;
- if (insn == bb->head)
+ if (insn == BB_HEAD (bb))
break;
}
else if (volatile_refs_p (SET_SRC (x)))
return 0;
- if (GET_CODE (r) == MEM)
+ if (MEM_P (r))
{
rtx temp, canon_r;
rtx_equal_p does not check the alias set or flags, we also
must have the potential for them to conflict (anti_dependence). */
for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
- if (unchanging_anti_dependence (r, XEXP (temp, 0)))
+ if (anti_dependence (r, XEXP (temp, 0)))
{
rtx mem = XEXP (temp, 0);
|| GET_CODE (r) == ZERO_EXTRACT)
r = XEXP (r, 0);
- if (GET_CODE (r) == REG)
+ if (REG_P (r))
{
int regno = REGNO (r);
words are not needed. */
if (regno < FIRST_PSEUDO_REGISTER)
{
- int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
+ int n = hard_regno_nregs[regno][GET_MODE (r)];
while (--n > 0)
if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
}
/* A CLOBBER of a pseudo-register that is dead serves no purpose. That
- is not necessarily true for hard registers. */
- else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
- && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
- && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
- return 1;
-
- /* We do not check other CLOBBER or USE here. An insn consisting of just
- a CLOBBER or just a USE should not be deleted. */
+ is not necessarily true for hard registers until after reload. */
+ else if (code == CLOBBER)
+ {
+ if (REG_P (XEXP (x, 0))
+ && (REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
+ || reload_completed)
+ && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
+ return 1;
+ }
+
+ /* ??? A base USE is a historical relic. It ought not be needed anymore.
+ Instances where it is still used are either (1) temporary and the USE
+ escaped the pass, (2) cruft and the USE need not be emitted anymore,
+ or (3) hiding bugs elsewhere that are not properly representing data
+ flow. */
+
return 0;
}
{
rtx r = SET_SRC (x);
- if (GET_CODE (r) == REG)
+ if (REG_P (r) || GET_CODE (r) == SUBREG)
{
rtx call = XEXP (note, 0);
rtx call_pat;
int i;
/* Find the call insn. */
- while (call != insn && GET_CODE (call) != CALL_INSN)
+ while (call != insn && !CALL_P (call))
call = NEXT_INSN (call);
/* If there is none, do nothing special,
call_pat = XVECEXP (call_pat, 0, i);
}
- return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
+ if (! insn_dead_p (pbi, call_pat, 1, REG_NOTES (call)))
+ return 0;
+
+ while ((insn = PREV_INSN (insn)) != call)
+ {
+ if (! INSN_P (insn))
+ continue;
+ if (! insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn)))
+ return 0;
+ }
+ return 1;
}
}
- return 1;
-}
-
-/* Return 1 if register REGNO was used before it was set, i.e. if it is
- live at function entry. Don't count global register variables, variables
- in registers that can be used for function arg passing, or variables in
- fixed hard registers. */
-
-int
-regno_uninitialized (unsigned int regno)
-{
- if (n_basic_blocks == 0
- || (regno < FIRST_PSEUDO_REGISTER
- && (global_regs[regno]
- || fixed_regs[regno]
- || FUNCTION_ARG_REGNO_P (regno))))
- return 0;
-
- return REGNO_REG_SET_P (ENTRY_BLOCK_PTR->global_live_at_end, regno);
+ return 0;
}
/* 1 if register REGNO was alive at a place where `setjmp' was called
int
regno_clobbered_at_setjmp (int regno)
{
- if (n_basic_blocks == 0)
+ if (n_basic_blocks == NUM_FIXED_BLOCKS)
return 0;
return ((REG_N_SETS (regno) > 1
- || REGNO_REG_SET_P (ENTRY_BLOCK_PTR->global_live_at_end, regno))
+ || REGNO_REG_SET_P (ENTRY_BLOCK_PTR->il.rtl->global_live_at_end,
+ regno))
&& REGNO_REG_SET_P (regs_live_at_setjmp, regno));
}
\f
}
}
- if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN)
+ if (pbi->mem_set_list_len < PARAM_VALUE (PARAM_MAX_FLOW_MEMORY_LOCATIONS))
{
#ifdef AUTO_INC_DEC
/* Store a copy of mem, otherwise the address may be
rtx x = *px;
struct propagate_block_info *pbi = data;
- if (GET_RTX_CLASS (GET_CODE (x)) == 'a')
+ if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
{
invalidate_mems_from_set (pbi, XEXP (x, 0));
return -1;
return 0;
}
-/* EXP is a REG. Remove any dependent entries from pbi->mem_set_list. */
+/* EXP is a REG or MEM. Remove any dependent entries from
+ pbi->mem_set_list. */
static void
invalidate_mems_from_set (struct propagate_block_info *pbi, rtx exp)
while (temp)
{
next = XEXP (temp, 1);
- if (reg_overlap_mentioned_p (exp, XEXP (temp, 0)))
+ if ((REG_P (exp) && reg_overlap_mentioned_p (exp, XEXP (temp, 0)))
+ /* When we get an EXP that is a mem here, we want to check if EXP
+ overlaps the *address* of any of the mems in the list (i.e. not
+ whether the mems actually overlap; that's done elsewhere). */
+ || (MEM_P (exp)
+ && reg_overlap_mentioned_p (exp, XEXP (XEXP (temp, 0), 0))))
{
/* Splice this entry out of the list. */
if (prev)
case SET:
if (GET_CODE (XEXP (x, 1)) == ASM_OPERANDS)
flags |= PROP_ASM_SCAN;
- /* Fall thru */
+ /* Fall through */
case CLOBBER:
mark_set_1 (pbi, code, SET_DEST (x), cond, insn, flags);
return;
{
int i;
- for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
+ /* We must scan forwards. If we have an asm, we need to set
+ the PROP_ASM_SCAN flag before scanning the clobbers. */
+ for (i = 0; i < XVECLEN (x, 0); i++)
{
rtx sub = XVECEXP (x, 0, i);
switch (code = GET_CODE (sub))
{
case COND_EXEC:
- if (cond != NULL_RTX)
- abort ();
+ gcc_assert (!cond);
cond = COND_EXEC_TEST (sub);
sub = COND_EXEC_CODE (sub);
mark_set:
if (GET_CODE (XEXP (sub, 1)) == ASM_OPERANDS)
flags |= PROP_ASM_SCAN;
- /* Fall thru */
+ /* Fall through */
case CLOBBER:
mark_clob:
mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, flags);
break;
+ case ASM_OPERANDS:
+ flags |= PROP_ASM_SCAN;
+ break;
+
default:
break;
}
flags);
return;
- case ZERO_EXTRACT:
case SIGN_EXTRACT:
+ /* SIGN_EXTRACT cannot be an lvalue. */
+ gcc_unreachable ();
+
+ case ZERO_EXTRACT:
case STRICT_LOW_PART:
/* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
do
reg = XEXP (reg, 0);
while (GET_CODE (reg) == SUBREG
|| GET_CODE (reg) == ZERO_EXTRACT
- || GET_CODE (reg) == SIGN_EXTRACT
|| GET_CODE (reg) == STRICT_LOW_PART);
- if (GET_CODE (reg) == MEM)
+ if (MEM_P (reg))
break;
not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
/* Fall through. */
case REG:
regno_last = regno_first = REGNO (reg);
if (regno_first < FIRST_PSEUDO_REGISTER)
- regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
+ regno_last += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
break;
case SUBREG:
- if (GET_CODE (SUBREG_REG (reg)) == REG)
+ if (REG_P (SUBREG_REG (reg)))
{
enum machine_mode outer_mode = GET_MODE (reg);
enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
SUBREG_BYTE (reg),
outer_mode);
regno_last = (regno_first
- + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
+ + hard_regno_nregs[regno_first][outer_mode] - 1);
/* Since we've just adjusted the register number ranges, make
sure REG matches. Otherwise some_was_live will be clear
break;
}
- /* If this set is a MEM, then it kills any aliased writes.
+ /* If this set is a MEM, then it kills any aliased writes and any
+ other MEMs which use it.
If this set is a REG, then it kills any MEMs which use the reg. */
if (optimize && (flags & PROP_SCAN_DEAD_STORES))
{
- if (GET_CODE (reg) == REG)
+ if (REG_P (reg) || MEM_P (reg))
invalidate_mems_from_set (pbi, reg);
/* If the memory reference had embedded side effects (autoincrement
- address modes. Then we may need to kill some entries on the
+ address modes) then we may need to kill some entries on the
memory set list. */
- if (insn && GET_CODE (reg) == MEM)
+ if (insn && MEM_P (reg))
for_each_rtx (&PATTERN (insn), invalidate_mems_from_autoinc, pbi);
- if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
+ if (MEM_P (reg) && ! side_effects_p (reg)
/* ??? With more effort we could track conditional memory life. */
&& ! cond)
add_to_mem_set_list (pbi, canon_rtx (reg));
}
- if (GET_CODE (reg) == REG
+ if (REG_P (reg)
&& ! (regno_first == FRAME_POINTER_REGNUM
&& (! reload_completed || frame_pointer_needed))
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
else
SET_REGNO_REG_SET (pbi->local_set, i);
}
- if (code != CLOBBER)
+ if (code != CLOBBER || needed_regno)
SET_REGNO_REG_SET (pbi->new_set, i);
some_was_live |= needed_regno;
in ASM_OPERANDs. If these registers get replaced,
we might wind up changing the semantics of the insn,
even if reload can make what appear to be valid
- assignments later. */
+ assignments later.
+
+ We don't build a LOG_LINK for global registers to
+ or from a function call. We don't want to let
+ combine think that it knows what is going on with
+ global registers. */
if (y && (BLOCK_NUM (y) == blocknum)
&& (regno_first >= FIRST_PSEUDO_REGISTER
- || asm_noperands (PATTERN (y)) < 0))
+ || (asm_noperands (PATTERN (y)) < 0
+ && ! ((CALL_P (insn)
+ || CALL_P (y))
+ && global_regs[regno_first]))))
LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
}
}
if (flags & PROP_REG_INFO)
REG_N_DEATHS (regno_first) += 1;
- if (flags & PROP_DEATH_NOTES)
+ if (flags & PROP_DEATH_NOTES
+#ifdef STACK_REGS
+ && (!(flags & PROP_POST_REGSTACK)
+ || !IN_RANGE (REGNO (reg), FIRST_STACK_REG,
+ LAST_STACK_REG))
+#endif
+ )
{
/* Note that dead stores have already been deleted
when possible. If we get here, we have found a
}
else
{
- if (flags & PROP_DEATH_NOTES)
+ if (flags & PROP_DEATH_NOTES
+#ifdef STACK_REGS
+ && (!(flags & PROP_POST_REGSTACK)
+ || !IN_RANGE (REGNO (reg), FIRST_STACK_REG,
+ LAST_STACK_REG))
+#endif
+ )
{
/* This is a case where we have a multi-word hard register
and some, but not all, of the words of the register are
{
for (i = regno_first; i <= regno_last; ++i)
if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
- CLEAR_REGNO_REG_SET (pbi->reg_live, i);
+ {
+ if ((pbi->flags & PROP_REG_INFO)
+ && REGNO_REG_SET_P (pbi->reg_live, i))
+ {
+ REG_LIVE_LENGTH (i) += pbi->insn_num - reg_deaths[i];
+ reg_deaths[i] = 0;
+ }
+ CLEAR_REGNO_REG_SET (pbi->reg_live, i);
+ }
+ if (flags & PROP_DEAD_INSN)
+ emit_insn_after (gen_rtx_CLOBBER (VOIDmode, reg), insn);
}
}
- else if (GET_CODE (reg) == REG)
+ else if (REG_P (reg))
{
if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
pbi->reg_next_use[regno_first] = 0;
here and count it. */
else if (GET_CODE (reg) == SCRATCH)
{
- if (flags & PROP_DEATH_NOTES)
+ if (flags & PROP_DEATH_NOTES
+#ifdef STACK_REGS
+ && (!(flags & PROP_POST_REGSTACK)
+ || !IN_RANGE (REGNO (reg), FIRST_STACK_REG, LAST_STACK_REG))
+#endif
+ )
REG_NOTES (insn)
= alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
}
/* Otherwise this is a conditional set. Record that fact.
It may have been conditionally used, or there may be a
- subsequent set with a complimentary condition. */
+ subsequent set with a complementary condition. */
node = splay_tree_lookup (pbi->reg_cond_dead, regno);
if (node == NULL)
/* The register was unconditionally live previously.
Record the current condition as the condition under
which it is dead. */
- rcli = xmalloc (sizeof (*rcli));
+ rcli = XNEW (struct reg_cond_life_info);
rcli->condition = cond;
rcli->stores = cond;
rcli->orig_condition = const0_rtx;
xdata[1] = node->key;
return -1;
}
- else if (rcli->condition == const1_rtx)
- abort ();
+ else
+ gcc_assert (rcli->condition != const1_rtx);
return 0;
}
{
rtx op0, op1;
- if (GET_RTX_CLASS (GET_CODE (old)) == '<')
+ if (COMPARISON_P (old))
{
- if (GET_RTX_CLASS (GET_CODE (x)) == '<'
- && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
+ if (COMPARISON_P (x)
+ && REVERSE_CONDEXEC_PREDICATES_P (x, old)
&& REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
return const1_rtx;
if (GET_CODE (x) == GET_CODE (old)
return gen_rtx_IOR (0, old, x);
default:
- abort ();
+ gcc_unreachable ();
}
}
static rtx
not_reg_cond (rtx x)
{
- enum rtx_code x_code;
-
if (x == const0_rtx)
return const1_rtx;
else if (x == const1_rtx)
return const0_rtx;
- x_code = GET_CODE (x);
- if (x_code == NOT)
+ if (GET_CODE (x) == NOT)
return XEXP (x, 0);
- if (GET_RTX_CLASS (x_code) == '<'
- && GET_CODE (XEXP (x, 0)) == REG)
+ if (COMPARISON_P (x)
+ && REG_P (XEXP (x, 0)))
{
- if (XEXP (x, 1) != const0_rtx)
- abort ();
+ gcc_assert (XEXP (x, 1) == const0_rtx);
- return gen_rtx_fmt_ee (reverse_condition (x_code),
+ return gen_rtx_fmt_ee (reversed_comparison_code (x, NULL),
VOIDmode, XEXP (x, 0), const0_rtx);
}
return gen_rtx_NOT (0, x);
{
rtx op0, op1;
- if (GET_RTX_CLASS (GET_CODE (old)) == '<')
+ if (COMPARISON_P (old))
{
- if (GET_RTX_CLASS (GET_CODE (x)) == '<'
- && GET_CODE (x) == reverse_condition (GET_CODE (old))
+ if (COMPARISON_P (x)
+ && GET_CODE (x) == reversed_comparison_code (old, NULL)
&& REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
return const0_rtx;
if (GET_CODE (x) == GET_CODE (old)
return gen_rtx_AND (0, old, x);
default:
- abort ();
+ gcc_unreachable ();
}
}
{
rtx op0, op1;
- if (GET_RTX_CLASS (GET_CODE (x)) == '<')
+ if (COMPARISON_P (x))
{
if (REGNO (XEXP (x, 0)) == regno)
return const0_rtx;
return x;
default:
- abort ();
+ gcc_unreachable ();
}
}
#endif /* HAVE_conditional_execution */
rtx q = SET_DEST (set);
rtx y = SET_SRC (set);
int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
+ int changed;
/* Make sure this reg appears only once in this insn. */
if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
return;
}
- else if (GET_CODE (q) == REG
+ else if (REG_P (q)
/* PREV_INSN used here to check the semi-open interval
[insn,incr). */
&& ! reg_used_between_p (q, PREV_INSN (insn), incr)
new insn(s) and do the updates. */
emit_insn_before (insns, insn);
- if (pbi->bb->head == insn)
- pbi->bb->head = insns;
+ if (BB_HEAD (pbi->bb) == insn)
+ BB_HEAD (pbi->bb) = insns;
/* INCR will become a NOTE and INSN won't contain a
use of INCR_REG. If a use of INCR_REG was just placed in
the insn before INSN, make that the next use.
Otherwise, invalidate it. */
- if (GET_CODE (PREV_INSN (insn)) == INSN
+ if (NONJUMP_INSN_P (PREV_INSN (insn))
&& GET_CODE (PATTERN (PREV_INSN (insn))) == SET
&& SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
pbi->reg_next_use[regno] = PREV_INSN (insn);
incr_reg = q;
regno = REGNO (q);
+ if ((pbi->flags & PROP_REG_INFO)
+ && !REGNO_REG_SET_P (pbi->reg_live, regno))
+ reg_deaths[regno] = pbi->insn_num;
+
/* REGNO is now used in INCR which is below INSN, but
it previously wasn't live here. If we don't mark
it as live, we'll put a REG_DEAD note for it
/* If there are any calls between INSN and INCR, show
that REGNO now crosses them. */
for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
- if (GET_CODE (temp) == CALL_INSN)
- REG_N_CALLS_CROSSED (regno)++;
+ if (CALL_P (temp))
+ {
+ REG_N_CALLS_CROSSED (regno)++;
+ if (can_throw_internal (temp))
+ REG_N_THROWING_CALLS_CROSSED (regno)++;
+ }
/* Invalidate alias info for Q since we just changed its value. */
clear_reg_alias_info (q);
/* Modify the old increment-insn to simply copy
the already-incremented value of our register. */
- if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
- abort ();
+ changed = validate_change (incr, &SET_SRC (set), incr_reg, 0);
+ gcc_assert (changed);
/* If that makes it a no-op (copying the register into itself) delete
it so it won't appear to be a "use" and a "set" of this
{
remove_note (incr, note);
if (XEXP (note, 0) != incr_reg)
- CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
+ {
+ unsigned int regno = REGNO (XEXP (note, 0));
+
+ if ((pbi->flags & PROP_REG_INFO)
+ && REGNO_REG_SET_P (pbi->reg_live, regno))
+ {
+ REG_LIVE_LENGTH (regno) += pbi->insn_num - reg_deaths[regno];
+ reg_deaths[regno] = 0;
+ }
+ CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
+ }
}
- PUT_CODE (incr, NOTE);
- NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
- NOTE_SOURCE_FILE (incr) = 0;
+ SET_INSN_DELETED (incr);
}
if (regno >= FIRST_PSEUDO_REGISTER)
int regno;
int size = GET_MODE_SIZE (GET_MODE (x));
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
return;
/* Here we detect use of an index register which might be good for
if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
- if (GET_CODE (addr) != REG)
+ if (!REG_P (addr))
return;
regno = REGNO (addr);
inc_val)),
insn, x, incr, addr);
}
- else if (GET_CODE (inc_val) == REG
+ else if (REG_P (inc_val)
&& ! reg_set_between_p (inc_val, PREV_INSN (insn),
NEXT_INSN (incr)))
regno_last = regno_first = REGNO (reg);
if (regno_first < FIRST_PSEUDO_REGISTER)
- regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
+ regno_last += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
/* Find out if any of this register is live after this instruction. */
some_was_live = some_was_dead = 0;
REG_FREQ (regno_first) += REG_FREQ_FROM_BB (pbi->bb);
REG_N_REFS (regno_first)++;
}
+ for (i = regno_first; i <= regno_last; ++i)
+ if (! REGNO_REG_SET_P (pbi->reg_live, i))
+ {
+ gcc_assert (!reg_deaths[i]);
+ reg_deaths[i] = pbi->insn_num;
+ }
}
/* Record and count the insns in which a reg dies. If it is used in
if (! some_was_live)
{
if ((pbi->flags & PROP_DEATH_NOTES)
+#ifdef STACK_REGS
+ && (!(pbi->flags & PROP_POST_REGSTACK)
+ || !IN_RANGE (REGNO (reg), FIRST_STACK_REG, LAST_STACK_REG))
+#endif
&& ! find_regno_note (insn, REG_DEAD, regno_first))
REG_NOTES (insn)
= alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
{
/* The register was not previously live at all. Record
the condition under which it is still dead. */
- rcli = xmalloc (sizeof (*rcli));
+ rcli = XNEW (struct reg_cond_life_info);
rcli->condition = not_reg_cond (cond);
rcli->stores = const0_rtx;
rcli->orig_condition = const0_rtx;
}
}
+/* Scan expression X for registers which have to be marked used in PBI.
+ X is considered to be the SET_DEST rtx of SET. TRUE is returned if
+ X could be handled by this function. */
+
+static bool
+mark_used_dest_regs (struct propagate_block_info *pbi, rtx x, rtx cond, rtx insn)
+{
+ int regno;
+ bool mark_dest = false;
+ rtx dest = x;
+
+ /* On some platforms calls return values spread over several
+ locations. These locations are wrapped in a EXPR_LIST rtx
+ together with a CONST_INT offset. */
+ if (GET_CODE (x) == EXPR_LIST
+ && GET_CODE (XEXP (x, 1)) == CONST_INT)
+ x = XEXP (x, 0);
+
+ if (x == NULL_RTX)
+ return false;
+
+ /* If storing into MEM, don't show it as being used. But do
+ show the address as being used. */
+ if (MEM_P (x))
+ {
+#ifdef AUTO_INC_DEC
+ if (pbi->flags & PROP_AUTOINC)
+ find_auto_inc (pbi, x, insn);
+#endif
+ mark_used_regs (pbi, XEXP (x, 0), cond, insn);
+ return true;
+ }
+
+ /* Storing in STRICT_LOW_PART is like storing in a reg
+ in that this SET might be dead, so ignore it in TESTREG.
+ but in some other ways it is like using the reg.
+
+ Storing in a SUBREG or a bit field is like storing the entire
+ register in that if the register's value is not used
+ then this SET is not needed. */
+ while (GET_CODE (x) == STRICT_LOW_PART
+ || GET_CODE (x) == ZERO_EXTRACT
+ || GET_CODE (x) == SUBREG)
+ {
+#ifdef CANNOT_CHANGE_MODE_CLASS
+ if ((pbi->flags & PROP_REG_INFO) && GET_CODE (x) == SUBREG)
+ record_subregs_of_mode (x);
+#endif
+
+ /* Modifying a single register in an alternate mode
+ does not use any of the old value. But these other
+ ways of storing in a register do use the old value. */
+ if (GET_CODE (x) == SUBREG
+ && !((REG_BYTES (SUBREG_REG (x))
+ + UNITS_PER_WORD - 1) / UNITS_PER_WORD
+ > (REG_BYTES (x)
+ + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
+ ;
+ else
+ mark_dest = true;
+
+ x = XEXP (x, 0);
+ }
+
+ /* If this is a store into a register or group of registers,
+ recursively scan the value being stored. */
+ if (REG_P (x)
+ && (regno = REGNO (x),
+ !(regno == FRAME_POINTER_REGNUM
+ && (!reload_completed || frame_pointer_needed)))
+#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+ && !(regno == HARD_FRAME_POINTER_REGNUM
+ && (!reload_completed || frame_pointer_needed))
+#endif
+#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
+ && !(regno == ARG_POINTER_REGNUM && fixed_regs[regno])
+#endif
+ )
+ {
+ if (mark_dest)
+ mark_used_regs (pbi, dest, cond, insn);
+ return true;
+ }
+ return false;
+}
+
/* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
This is done assuming the registers needed from X are those that
have 1-bits in PBI->REG_LIVE.
mark_used_regs (struct propagate_block_info *pbi, rtx x, rtx cond, rtx insn)
{
RTX_CODE code;
- int regno;
int flags = pbi->flags;
retry:
case CLOBBER:
/* If we are clobbering a MEM, mark any registers inside the address
as being used. */
- if (GET_CODE (XEXP (x, 0)) == MEM)
+ if (MEM_P (XEXP (x, 0)))
mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
return;
while (temp)
{
next = XEXP (temp, 1);
- if (unchanging_anti_dependence (XEXP (temp, 0), x))
+ if (anti_dependence (XEXP (temp, 0), x))
{
/* Splice temp out of the list. */
if (prev)
case SUBREG:
#ifdef CANNOT_CHANGE_MODE_CLASS
- if ((flags & PROP_REG_INFO)
- && GET_CODE (SUBREG_REG (x)) == REG
- && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
- bitmap_set_bit (&subregs_of_mode, REGNO (SUBREG_REG (x))
- * MAX_MACHINE_MODE
- + GET_MODE (x));
+ if (flags & PROP_REG_INFO)
+ record_subregs_of_mode (x);
#endif
/* While we're here, optimize this case. */
x = SUBREG_REG (x);
- if (GET_CODE (x) != REG)
+ if (!REG_P (x))
goto retry;
/* Fall through. */
case SET:
{
- rtx testreg = SET_DEST (x);
- int mark_dest = 0;
-
- /* If storing into MEM, don't show it as being used. But do
- show the address as being used. */
- if (GET_CODE (testreg) == MEM)
- {
-#ifdef AUTO_INC_DEC
- if (flags & PROP_AUTOINC)
- find_auto_inc (pbi, testreg, insn);
-#endif
- mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
- mark_used_regs (pbi, SET_SRC (x), cond, insn);
- return;
- }
-
- /* Storing in STRICT_LOW_PART is like storing in a reg
- in that this SET might be dead, so ignore it in TESTREG.
- but in some other ways it is like using the reg.
-
- Storing in a SUBREG or a bit field is like storing the entire
- register in that if the register's value is not used
- then this SET is not needed. */
- while (GET_CODE (testreg) == STRICT_LOW_PART
- || GET_CODE (testreg) == ZERO_EXTRACT
- || GET_CODE (testreg) == SIGN_EXTRACT
- || GET_CODE (testreg) == SUBREG)
- {
-#ifdef CANNOT_CHANGE_MODE_CLASS
- if ((flags & PROP_REG_INFO)
- && GET_CODE (testreg) == SUBREG
- && GET_CODE (SUBREG_REG (testreg)) == REG
- && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER)
- bitmap_set_bit (&subregs_of_mode, REGNO (SUBREG_REG (testreg))
- * MAX_MACHINE_MODE
- + GET_MODE (testreg));
-#endif
-
- /* Modifying a single register in an alternate mode
- does not use any of the old value. But these other
- ways of storing in a register do use the old value. */
- if (GET_CODE (testreg) == SUBREG
- && !((REG_BYTES (SUBREG_REG (testreg))
- + UNITS_PER_WORD - 1) / UNITS_PER_WORD
- > (REG_BYTES (testreg)
- + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
- ;
- else
- mark_dest = 1;
-
- testreg = XEXP (testreg, 0);
- }
-
- /* If this is a store into a register or group of registers,
- recursively scan the value being stored. */
-
- if ((GET_CODE (testreg) == PARALLEL
- && GET_MODE (testreg) == BLKmode)
- || (GET_CODE (testreg) == REG
- && (regno = REGNO (testreg),
- ! (regno == FRAME_POINTER_REGNUM
- && (! reload_completed || frame_pointer_needed)))
-#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
- && ! (regno == HARD_FRAME_POINTER_REGNUM
- && (! reload_completed || frame_pointer_needed))
-#endif
-#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
- && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
-#endif
- ))
+ rtx dest = SET_DEST (x);
+ int i;
+ bool ret = false;
+
+ if (GET_CODE (dest) == PARALLEL)
+ for (i = 0; i < XVECLEN (dest, 0); i++)
+ ret |= mark_used_dest_regs (pbi, XVECEXP (dest, 0, i), cond, insn);
+ else
+ ret = mark_used_dest_regs (pbi, dest, cond, insn);
+
+ if (ret)
{
- if (mark_dest)
- mark_used_regs (pbi, SET_DEST (x), cond, insn);
mark_used_regs (pbi, SET_SRC (x), cond, insn);
return;
}
}
case COND_EXEC:
- if (cond != NULL_RTX)
- abort ();
+ gcc_assert (!cond);
mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
because if the incremented register is spilled and must be reloaded
there would be no way to store the incremented value back in memory. */
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
return 0;
use = 0;
void
dump_regset (regset r, FILE *outf)
{
- int i;
+ unsigned i;
+ reg_set_iterator rsi;
+
if (r == NULL)
{
fputs (" (nil)", outf);
return;
}
- EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
+ EXECUTE_IF_SET_IN_REG_SET (r, 0, i, rsi)
{
fprintf (outf, " %d", i);
if (i < FIRST_PSEUDO_REGISTER)
fprintf (outf, " [%s]",
reg_names[i]);
- });
+ }
}
/* Print a human-readable representation of R on the standard error
register allocators to prioritize pseudos for allocation to hard regs.
More accurate reference counts generally lead to better register allocation.
- F is the first insn to be scanned.
-
- LOOP_STEP denotes how much loop_depth should be incremented per
- loop nesting level in order to increase the ref count more for
- references in a loop.
-
It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
possibly other information which is used by the register allocators. */
-void
-recompute_reg_usage (rtx f ATTRIBUTE_UNUSED, int loop_step ATTRIBUTE_UNUSED)
+static unsigned int
+recompute_reg_usage (void)
{
allocate_reg_life_data ();
- update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
+ /* distribute_notes in combiner fails to convert some of the
+ REG_UNUSED notes to REG_DEAD notes. This causes CHECK_DEAD_NOTES
+ in sched1 to die. To solve this update the DEATH_NOTES
+ here. */
+ update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO | PROP_DEATH_NOTES);
+
+ if (dump_file)
+ dump_flow_info (dump_file, dump_flags);
+ return 0;
}
+struct tree_opt_pass pass_recompute_reg_usage =
+{
+ "life2", /* name */
+ NULL, /* gate */
+ recompute_reg_usage, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func, /* todo_flags_finish */
+ 'f' /* letter */
+};
+
/* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
blocks. If BLOCKS is NULL, assume the universal set. Returns a count
- of the number of registers that died. */
+ of the number of registers that died.
+ If KILL is 1, remove old REG_DEAD / REG_UNUSED notes. If it is 0, don't.
+ if it is -1, remove them unless they pertain to a stack reg. */
int
count_or_remove_death_notes (sbitmap blocks, int kill)
{
int count = 0;
- int i;
+ unsigned int i = 0;
basic_block bb;
-
/* This used to be a loop over all the blocks with a membership test
inside the loop. That can be amazingly expensive on a large CFG
when only a small number of bits are set in BLOCKs (for example,
than an sbitmap. */
if (blocks)
{
- EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
+ sbitmap_iterator sbi;
+
+ EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i, sbi)
{
- count += count_or_remove_death_notes_bb (BASIC_BLOCK (i), kill);
- });
+ basic_block bb = BASIC_BLOCK (i);
+ /* The bitmap may be flawed in that one of the basic blocks
+ may have been deleted before you get here. */
+ if (bb)
+ count += count_or_remove_death_notes_bb (bb, kill);
+ };
}
else
{
int count = 0;
rtx insn;
- for (insn = bb->head;; insn = NEXT_INSN (insn))
+ for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
{
if (INSN_P (insn))
{
switch (REG_NOTE_KIND (link))
{
case REG_DEAD:
- if (GET_CODE (XEXP (link, 0)) == REG)
+ if (REG_P (XEXP (link, 0)))
{
rtx reg = XEXP (link, 0);
int n;
if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
n = 1;
else
- n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
+ n = hard_regno_nregs[REGNO (reg)][GET_MODE (reg)];
count += n;
}
/* Fall through. */
case REG_UNUSED:
- if (kill)
+ if (kill > 0
+ || (kill
+#ifdef STACK_REGS
+ && (!REG_P (XEXP (link, 0))
+ || !IN_RANGE (REGNO (XEXP (link, 0)),
+ FIRST_STACK_REG, LAST_STACK_REG))
+#endif
+ ))
{
rtx next = XEXP (link, 1);
free_EXPR_LIST_node (link);
}
}
- if (insn == bb->end)
+ if (insn == BB_END (bb))
break;
}
clear_log_links (sbitmap blocks)
{
rtx insn;
- int i;
if (!blocks)
{
free_INSN_LIST_list (&LOG_LINKS (insn));
}
else
- EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
- {
- basic_block bb = BASIC_BLOCK (i);
+ {
+ unsigned int i = 0;
+ sbitmap_iterator sbi;
- for (insn = bb->head; insn != NEXT_INSN (bb->end);
- insn = NEXT_INSN (insn))
- if (INSN_P (insn))
- free_INSN_LIST_list (&LOG_LINKS (insn));
- });
+ EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i, sbi)
+ {
+ basic_block bb = BASIC_BLOCK (i);
+
+ for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
+ insn = NEXT_INSN (insn))
+ if (INSN_P (insn))
+ free_INSN_LIST_list (&LOG_LINKS (insn));
+ }
+ }
}
/* Given a register bitmap, turn on the bits in a HARD_REG_SET that
void
reg_set_to_hard_reg_set (HARD_REG_SET *to, bitmap from)
{
- int i;
+ unsigned i;
+ bitmap_iterator bi;
+
+ EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
+ {
+ if (i >= FIRST_PSEUDO_REGISTER)
+ return;
+ SET_HARD_REG_BIT (*to, i);
+ }
+}
+\f
+
+static bool
+gate_remove_death_notes (void)
+{
+ return flag_profile_values;
+}
+
+static unsigned int
+rest_of_handle_remove_death_notes (void)
+{
+ count_or_remove_death_notes (NULL, 1);
+ return 0;
+}
+
+struct tree_opt_pass pass_remove_death_notes =
+{
+ "ednotes", /* name */
+ gate_remove_death_notes, /* gate */
+ rest_of_handle_remove_death_notes, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
+ 0 /* letter */
+};
+
+/* Perform life analysis. */
+static unsigned int
+rest_of_handle_life (void)
+{
+ regclass_init ();
+
+ life_analysis (PROP_FINAL);
+ if (optimize)
+ cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE | CLEANUP_LOG_LINKS
+ | (flag_thread_jumps ? CLEANUP_THREADING : 0));
+
+ if (extra_warnings)
+ {
+ setjmp_vars_warning (DECL_INITIAL (current_function_decl));
+ setjmp_args_warning ();
+ }
- EXECUTE_IF_SET_IN_BITMAP
- (from, 0, i,
- {
- if (i >= FIRST_PSEUDO_REGISTER)
- return;
- SET_HARD_REG_BIT (*to, i);
- });
+ if (optimize)
+ {
+ if (initialize_uninitialized_subregs ())
+ {
+ /* Insns were inserted, and possibly pseudos created, so
+ things might look a bit different. */
+ allocate_reg_life_data ();
+ update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
+ PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
+ }
+ }
+
+ no_new_pseudos = 1;
+ return 0;
}
+
+struct tree_opt_pass pass_life =
+{
+ "life1", /* name */
+ NULL, /* gate */
+ rest_of_handle_life, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_FLOW, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ TODO_verify_flow, /* todo_flags_start */
+ TODO_dump_func |
+ TODO_ggc_collect, /* todo_flags_finish */
+ 'f' /* letter */
+};
+
+static unsigned int
+rest_of_handle_flow2 (void)
+{
+ /* If optimizing, then go ahead and split insns now. */
+#ifndef STACK_REGS
+ if (optimize > 0)
+#endif
+ split_all_insns (0);
+
+ if (flag_branch_target_load_optimize)
+ branch_target_load_optimize (epilogue_completed);
+
+ if (optimize)
+ cleanup_cfg (CLEANUP_EXPENSIVE);
+
+ /* On some machines, the prologue and epilogue code, or parts thereof,
+ can be represented as RTL. Doing so lets us schedule insns between
+ it and the rest of the code and also allows delayed branch
+ scheduling to operate in the epilogue. */
+ thread_prologue_and_epilogue_insns (get_insns ());
+ epilogue_completed = 1;
+ flow2_completed = 1;
+ return 0;
+}
+
+struct tree_opt_pass pass_flow2 =
+{
+ "flow2", /* name */
+ NULL, /* gate */
+ rest_of_handle_flow2, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_FLOW2, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ TODO_verify_flow, /* todo_flags_start */
+ TODO_dump_func |
+ TODO_ggc_collect, /* todo_flags_finish */
+ 'w' /* letter */
+};
+