X-Git-Url: https://git.libre-soc.org/?a=blobdiff_plain;f=gcc%2Fdf-problems.c;h=ff08abd6daa468f68ca1e4f7b5bee886bc0e434f;hb=8349613899e2e1cf996052e2dba79e0551bfe880;hp=1a28e9de434bcc7e6ff314c50ac0f3d62360b69e;hpb=d725a1a5c025055d65ffa52009ee12bec0f34301;p=gcc.git diff --git a/gcc/df-problems.c b/gcc/df-problems.c index 1a28e9de434..ff08abd6daa 100644 --- a/gcc/df-problems.c +++ b/gcc/df-problems.c @@ -1,6 +1,5 @@ /* Standard problems for dataflow support routines. - Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, - 2008, 2009, 2010 Free Software Foundation, Inc. + Copyright (C) 1999-2015 Free Software Foundation, Inc. Originally contributed by Michael P. Hayes (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com) Major rewrite contributed by Danny Berlin (dberlin@dberlin.org) @@ -30,69 +29,42 @@ along with GCC; see the file COPYING3. If not see #include "tm_p.h" #include "insn-config.h" #include "recog.h" +#include "hashtab.h" +#include "hash-set.h" +#include "vec.h" +#include "machmode.h" +#include "hard-reg-set.h" +#include "input.h" #include "function.h" #include "regs.h" -#include "output.h" #include "alloc-pool.h" #include "flags.h" -#include "hard-reg-set.h" +#include "predict.h" +#include "dominance.h" +#include "cfg.h" +#include "cfganal.h" #include "basic-block.h" #include "sbitmap.h" #include "bitmap.h" +#include "target.h" #include "timevar.h" #include "df.h" #include "except.h" #include "dce.h" -#include "vecprim.h" +#include "valtrack.h" +#include "dumpfile.h" +#include "rtl-iter.h" /* Note that turning REG_DEAD_DEBUGGING on will cause gcc.c-torture/unsorted/dump-noaddr.c to fail because it prints addresses in the dumps. */ -#if 0 -#define REG_DEAD_DEBUGGING -#endif +#define REG_DEAD_DEBUGGING 0 #define DF_SPARSE_THRESHOLD 32 static bitmap_head seen_in_block; static bitmap_head seen_in_insn; - -/*---------------------------------------------------------------------------- - Public functions access functions for the dataflow problems. -----------------------------------------------------------------------------*/ -/* Get the live at out set for BB no matter what problem happens to be - defined. This function is used by the register allocators who - choose different dataflow problems depending on the optimization - level. */ - -bitmap -df_get_live_out (basic_block bb) -{ - gcc_assert (df_lr); - - if (df_live) - return DF_LIVE_OUT (bb); - else - return DF_LR_OUT (bb); -} - -/* Get the live at in set for BB no matter what problem happens to be - defined. This function is used by the register allocators who - choose different dataflow problems depending on the optimization - level. */ - -bitmap -df_get_live_in (basic_block bb) -{ - gcc_assert (df_lr); - - if (df_live) - return DF_LIVE_IN (bb); - else - return DF_LR_IN (bb); -} - /*---------------------------------------------------------------------------- Utility functions. ----------------------------------------------------------------------------*/ @@ -100,22 +72,6 @@ df_get_live_in (basic_block bb) /* Generic versions to get the void* version of the block info. Only used inside the problem instance vectors. */ -/* Grow the bb_info array. */ - -void -df_grow_bb_info (struct dataflow *dflow) -{ - unsigned int new_size = last_basic_block + 1; - if (dflow->block_info_size < new_size) - { - new_size += new_size / 4; - dflow->block_info = XRESIZEVEC (void *, dflow->block_info, new_size); - memset (dflow->block_info + dflow->block_info_size, 0, - (new_size - dflow->block_info_size) *sizeof (void *)); - dflow->block_info_size = new_size; - } -} - /* Dump a def-use or use-def chain for REF to FILE. */ void @@ -125,10 +81,13 @@ df_chain_dump (struct df_link *link, FILE *file) for (; link; link = link->next) { fprintf (file, "%c%d(bb %d insn %d) ", - DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u', + DF_REF_REG_DEF_P (link->ref) + ? 'd' + : (DF_REF_FLAGS (link->ref) & DF_REF_IN_NOTE) ? 'e' : 'u', DF_REF_ID (link->ref), DF_REF_BBNO (link->ref), - DF_REF_IS_ARTIFICIAL (link->ref) ? -1 : DF_REF_INSN_UID (link->ref)); + DF_REF_IS_ARTIFICIAL (link->ref) + ? -1 : DF_REF_INSN_UID (link->ref)); } fprintf (file, "}"); } @@ -165,6 +124,17 @@ df_print_bb_index (basic_block bb, FILE *file) pseudo reaches. In and out bitvectors are built for each basic block. The id field in the ref is used to index into these sets. See df.h for details. + + If the DF_RD_PRUNE_DEAD_DEFS changeable flag is set, only DEFs reaching + existing uses are included in the global reaching DEFs set, or in other + words only DEFs that are still live. This is a kind of pruned version + of the traditional reaching definitions problem that is much less + complex to compute and produces enough information to compute UD-chains. + In this context, live must be interpreted in the DF_LR sense: Uses that + are upward exposed but maybe not initialized on all paths through the + CFG. For a USE that is not reached by a DEF on all paths, we still want + to make those DEFs that do reach the USE visible, and pruning based on + DF_LIVE would make that impossible. ----------------------------------------------------------------------------*/ /* This problem plays a large number of games for the sake of @@ -202,17 +172,6 @@ struct df_rd_problem_data bitmap_obstack rd_bitmaps; }; -/* Set basic block info. */ - -static void -df_rd_set_bb_info (unsigned int index, - struct df_rd_bb_info *bb_info) -{ - gcc_assert (df_rd); - gcc_assert (index < df_rd->block_info_size); - df_rd->block_info[index] = bb_info; -} - /* Free basic block info. */ @@ -228,7 +187,6 @@ df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, bitmap_clear (&bb_info->gen); bitmap_clear (&bb_info->in); bitmap_clear (&bb_info->out); - pool_free (df_rd->block_pool, bb_info); } } @@ -243,10 +201,6 @@ df_rd_alloc (bitmap all_blocks) bitmap_iterator bi; struct df_rd_problem_data *problem_data; - if (!df_rd->block_pool) - df_rd->block_pool = create_alloc_pool ("df_rd_block pool", - sizeof (struct df_rd_bb_info), 50); - if (df_rd->problem_data) { problem_data = (struct df_rd_problem_data *) df_rd->problem_data; @@ -268,13 +222,14 @@ df_rd_alloc (bitmap all_blocks) df_grow_bb_info (df_rd); /* Because of the clustering of all use sites for the same pseudo, - we have to process all of the blocks before doing the - analysis. */ + we have to process all of the blocks before doing the analysis. */ EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) { struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index); - if (bb_info) + + /* When bitmaps are already initialized, just clear them. */ + if (bb_info->kill.obstack) { bitmap_clear (&bb_info->kill); bitmap_clear (&bb_info->sparse_kill); @@ -282,8 +237,6 @@ df_rd_alloc (bitmap all_blocks) } else { - bb_info = (struct df_rd_bb_info *) pool_alloc (df_rd->block_pool); - df_rd_set_bb_info (bb_index, bb_info); bitmap_initialize (&bb_info->kill, &problem_data->rd_bitmaps); bitmap_initialize (&bb_info->sparse_kill, &problem_data->rd_bitmaps); bitmap_initialize (&bb_info->gen, &problem_data->rd_bitmaps); @@ -302,35 +255,30 @@ void df_rd_simulate_artificial_defs_at_top (basic_block bb, bitmap local_rd) { int bb_index = bb->index; - df_ref *def_rec; - for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) - { - df_ref def = *def_rec; - if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) - { - unsigned int dregno = DF_REF_REGNO (def); - if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))) - bitmap_clear_range (local_rd, - DF_DEFS_BEGIN (dregno), - DF_DEFS_COUNT (dregno)); - bitmap_set_bit (local_rd, DF_REF_ID (def)); - } - } + df_ref def; + FOR_EACH_ARTIFICIAL_DEF (def, bb_index) + if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) + { + unsigned int dregno = DF_REF_REGNO (def); + if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))) + bitmap_clear_range (local_rd, + DF_DEFS_BEGIN (dregno), + DF_DEFS_COUNT (dregno)); + bitmap_set_bit (local_rd, DF_REF_ID (def)); + } } /* Add the effect of the defs of INSN to the reaching definitions bitmap LOCAL_RD. */ void -df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn, +df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx_insn *insn, bitmap local_rd) { - unsigned uid = INSN_UID (insn); - df_ref *def_rec; + df_ref def; - for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) + FOR_EACH_INSN_DEF (def, insn) { - df_ref def = *def_rec; unsigned int dregno = DF_REF_REGNO (def); if ((!(df->changeable_flags & DF_NO_HARD_REGS)) || (dregno >= FIRST_PSEUDO_REGISTER)) @@ -353,12 +301,11 @@ df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn, static void df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info, - df_ref *def_rec, + df_ref def, int top_flag) { - while (*def_rec) + for (; def; def = DF_REF_NEXT_LOC (def)) { - df_ref def = *def_rec; if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP)) { unsigned int regno = DF_REF_REGNO (def); @@ -383,7 +330,7 @@ df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info, if (n_defs > DF_SPARSE_THRESHOLD) { bitmap_set_bit (&bb_info->sparse_kill, regno); - bitmap_clear_range(&bb_info->gen, begin, n_defs); + bitmap_clear_range (&bb_info->gen, begin, n_defs); } else { @@ -401,7 +348,6 @@ df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info, } } } - def_rec++; } } @@ -410,9 +356,9 @@ df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info, static void df_rd_bb_local_compute (unsigned int bb_index) { - basic_block bb = BASIC_BLOCK (bb_index); + basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index); struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index); - rtx insn; + rtx_insn *insn; bitmap_clear (&seen_in_block); bitmap_clear (&seen_in_insn); @@ -479,12 +425,16 @@ df_rd_local_compute (bitmap all_blocks) /* Set up the knockout bit vectors to be applied across EH_EDGES. */ EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, regno, bi) { - if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD) - bitmap_set_bit (sparse_invalidated, regno); - else - bitmap_set_range (dense_invalidated, - DF_DEFS_BEGIN (regno), - DF_DEFS_COUNT (regno)); + if (! HARD_REGISTER_NUM_P (regno) + || !(df->changeable_flags & DF_NO_HARD_REGS)) + { + if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD) + bitmap_set_bit (sparse_invalidated, regno); + else + bitmap_set_range (dense_invalidated, + DF_DEFS_BEGIN (regno), + DF_DEFS_COUNT (regno)); + } } bitmap_clear (&seen_in_block); @@ -511,14 +461,15 @@ df_rd_init_solution (bitmap all_blocks) /* In of target gets or of out of source. */ -static void +static bool df_rd_confluence_n (edge e) { bitmap op1 = &df_rd_get_bb_info (e->dest->index)->in; bitmap op2 = &df_rd_get_bb_info (e->src->index)->out; + bool changed = false; if (e->flags & EDGE_FAKE) - return; + return false; if (e->flags & EDGE_EH) { @@ -531,8 +482,7 @@ df_rd_confluence_n (edge e) bitmap_head tmp; bitmap_initialize (&tmp, &df_bitmap_obstack); - bitmap_copy (&tmp, op2); - bitmap_and_compl_into (&tmp, dense_invalidated); + bitmap_and_compl (&tmp, op2, dense_invalidated); EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi) { @@ -540,11 +490,12 @@ df_rd_confluence_n (edge e) DF_DEFS_BEGIN (regno), DF_DEFS_COUNT (regno)); } - bitmap_ior_into (op1, &tmp); + changed |= bitmap_ior_into (op1, &tmp); bitmap_clear (&tmp); + return changed; } else - bitmap_ior_into (op1, op2); + return bitmap_ior_into (op1, op2); } @@ -561,13 +512,13 @@ df_rd_transfer_function (int bb_index) bitmap gen = &bb_info->gen; bitmap kill = &bb_info->kill; bitmap sparse_kill = &bb_info->sparse_kill; + bool changed = false; if (bitmap_empty_p (sparse_kill)) - return bitmap_ior_and_compl (out, gen, in, kill); + changed = bitmap_ior_and_compl (out, gen, in, kill); else { struct df_rd_problem_data *problem_data; - bool changed = false; bitmap_head tmp; /* Note that TMP is _not_ a temporary bitmap if we end up replacing @@ -575,14 +526,13 @@ df_rd_transfer_function (int bb_index) problem_data = (struct df_rd_problem_data *) df_rd->problem_data; bitmap_initialize (&tmp, &problem_data->rd_bitmaps); - bitmap_copy (&tmp, in); + bitmap_and_compl (&tmp, in, kill); EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi) { bitmap_clear_range (&tmp, DF_DEFS_BEGIN (regno), DF_DEFS_COUNT (regno)); } - bitmap_and_compl_into (&tmp, kill); bitmap_ior_into (&tmp, gen); changed = !bitmap_equal_p (&tmp, out); if (changed) @@ -591,11 +541,31 @@ df_rd_transfer_function (int bb_index) bb_info->out = tmp; } else - bitmap_clear (&tmp); - return changed; + bitmap_clear (&tmp); + } + + if (df->changeable_flags & DF_RD_PRUNE_DEAD_DEFS) + { + /* Create a mask of DEFs for all registers live at the end of this + basic block, and mask out DEFs of registers that are not live. + Computing the mask looks costly, but the benefit of the pruning + outweighs the cost. */ + struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index); + bitmap regs_live_out = &df_lr_get_bb_info (bb_index)->out; + bitmap live_defs = BITMAP_ALLOC (&df_bitmap_obstack); + unsigned int regno; + bitmap_iterator bi; + + EXECUTE_IF_SET_IN_BITMAP (regs_live_out, 0, regno, bi) + bitmap_set_range (live_defs, + DF_DEFS_BEGIN (regno), + DF_DEFS_COUNT (regno)); + changed |= bitmap_and_into (&bb_info->out, live_defs); + BITMAP_FREE (live_defs); } -} + return changed; +} /* Free all storage associated with the problem. */ @@ -607,11 +577,11 @@ df_rd_free (void) if (problem_data) { - free_alloc_pool (df_rd->block_pool); bitmap_obstack_release (&problem_data->rd_bitmaps); df_rd->block_info_size = 0; free (df_rd->block_info); + df_rd->block_info = NULL; free (df_rd->problem_data); } free (df_rd); @@ -625,29 +595,72 @@ df_rd_start_dump (FILE *file) { struct df_rd_problem_data *problem_data = (struct df_rd_problem_data *) df_rd->problem_data; - unsigned int m = DF_REG_SIZE(df); + unsigned int m = DF_REG_SIZE (df); unsigned int regno; if (!df_rd->block_info) return; - fprintf (file, ";; Reaching defs:\n\n"); + fprintf (file, ";; Reaching defs:\n"); - fprintf (file, " sparse invalidated \t"); + fprintf (file, ";; sparse invalidated \t"); dump_bitmap (file, &problem_data->sparse_invalidated_by_call); - fprintf (file, " dense invalidated \t"); + fprintf (file, ";; dense invalidated \t"); dump_bitmap (file, &problem_data->dense_invalidated_by_call); + fprintf (file, ";; reg->defs[] map:\t"); for (regno = 0; regno < m; regno++) if (DF_DEFS_COUNT (regno)) fprintf (file, "%d[%d,%d] ", regno, DF_DEFS_BEGIN (regno), - DF_DEFS_COUNT (regno)); + DF_DEFS_BEGIN (regno) + DF_DEFS_COUNT (regno) - 1); fprintf (file, "\n"); - } +static void +df_rd_dump_defs_set (bitmap defs_set, const char *prefix, FILE *file) +{ + bitmap_head tmp; + unsigned int regno; + unsigned int m = DF_REG_SIZE (df); + bool first_reg = true; + + fprintf (file, "%s\t(%d) ", prefix, (int) bitmap_count_bits (defs_set)); + + bitmap_initialize (&tmp, &df_bitmap_obstack); + for (regno = 0; regno < m; regno++) + { + if (HARD_REGISTER_NUM_P (regno) + && (df->changeable_flags & DF_NO_HARD_REGS)) + continue; + bitmap_set_range (&tmp, DF_DEFS_BEGIN (regno), DF_DEFS_COUNT (regno)); + bitmap_and_into (&tmp, defs_set); + if (! bitmap_empty_p (&tmp)) + { + bitmap_iterator bi; + unsigned int ix; + bool first_def = true; + + if (! first_reg) + fprintf (file, ","); + first_reg = false; + + fprintf (file, "%u[", regno); + EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, ix, bi) + { + fprintf (file, "%s%u", first_def ? "" : ",", ix); + first_def = false; + } + fprintf (file, "]"); + } + bitmap_clear (&tmp); + } + + fprintf (file, "\n"); + bitmap_clear (&tmp); +} + /* Debugging info at top of bb. */ static void @@ -657,16 +670,13 @@ df_rd_top_dump (basic_block bb, FILE *file) if (!bb_info) return; - fprintf (file, ";; rd in \t(%d)\n", (int) bitmap_count_bits (&bb_info->in)); - dump_bitmap (file, &bb_info->in); - fprintf (file, ";; rd gen \t(%d)\n", (int) bitmap_count_bits (&bb_info->gen)); - dump_bitmap (file, &bb_info->gen); - fprintf (file, ";; rd kill\t(%d)\n", (int) bitmap_count_bits (&bb_info->kill)); - dump_bitmap (file, &bb_info->kill); + df_rd_dump_defs_set (&bb_info->in, ";; rd in ", file); + df_rd_dump_defs_set (&bb_info->gen, ";; rd gen ", file); + df_rd_dump_defs_set (&bb_info->kill, ";; rd kill", file); } -/* Debugging info at top of bb. */ +/* Debugging info at bottom of bb. */ static void df_rd_bottom_dump (basic_block bb, FILE *file) @@ -675,8 +685,7 @@ df_rd_bottom_dump (basic_block bb, FILE *file) if (!bb_info) return; - fprintf (file, ";; rd out \t(%d)\n", (int) bitmap_count_bits (&bb_info->out)); - dump_bitmap (file, &bb_info->out); + df_rd_dump_defs_set (&bb_info->out, ";; rd out ", file); } /* All of the information associated with every instance of the problem. */ @@ -700,9 +709,12 @@ static struct df_problem problem_RD = df_rd_start_dump, /* Debugging. */ df_rd_top_dump, /* Debugging start block. */ df_rd_bottom_dump, /* Debugging end block. */ + NULL, /* Debugging start insn. */ + NULL, /* Debugging end insn. */ NULL, /* Incremental solution verify start. */ NULL, /* Incremental solution verify end. */ NULL, /* Dependent problem. */ + sizeof (struct df_rd_bb_info),/* Size of entry of block_info array. */ TV_DF_RD, /* Timing variable. */ true /* Reset blocks on dropping out of blocks_to_analyze. */ }; @@ -738,19 +750,6 @@ struct df_lr_problem_data bitmap_obstack lr_bitmaps; }; - -/* Set basic block info. */ - -static void -df_lr_set_bb_info (unsigned int index, - struct df_lr_bb_info *bb_info) -{ - gcc_assert (df_lr); - gcc_assert (index < df_lr->block_info_size); - df_lr->block_info[index] = bb_info; -} - - /* Free basic block info. */ static void @@ -764,7 +763,6 @@ df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, bitmap_clear (&bb_info->def); bitmap_clear (&bb_info->in); bitmap_clear (&bb_info->out); - pool_free (df_lr->block_pool, bb_info); } } @@ -779,10 +777,6 @@ df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) bitmap_iterator bi; struct df_lr_problem_data *problem_data; - if (!df_lr->block_pool) - df_lr->block_pool = create_alloc_pool ("df_lr_block pool", - sizeof (struct df_lr_bb_info), 50); - df_grow_bb_info (df_lr); if (df_lr->problem_data) problem_data = (struct df_lr_problem_data *) df_lr->problem_data; @@ -799,15 +793,15 @@ df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi) { struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index); - if (bb_info) + + /* When bitmaps are already initialized, just clear them. */ + if (bb_info->use.obstack) { bitmap_clear (&bb_info->def); bitmap_clear (&bb_info->use); } else { - bb_info = (struct df_lr_bb_info *) pool_alloc (df_lr->block_pool); - df_lr_set_bb_info (bb_index, bb_info); bitmap_initialize (&bb_info->use, &problem_data->lr_bitmaps); bitmap_initialize (&bb_info->def, &problem_data->lr_bitmaps); bitmap_initialize (&bb_info->in, &problem_data->lr_bitmaps); @@ -842,84 +836,64 @@ df_lr_reset (bitmap all_blocks) static void df_lr_bb_local_compute (unsigned int bb_index) { - basic_block bb = BASIC_BLOCK (bb_index); + basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index); struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index); - rtx insn; - df_ref *def_rec; - df_ref *use_rec; + rtx_insn *insn; + df_ref def, use; /* Process the registers set in an exception handler. */ - for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) - { - df_ref def = *def_rec; - if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0) - { - unsigned int dregno = DF_REF_REGNO (def); - bitmap_set_bit (&bb_info->def, dregno); - bitmap_clear_bit (&bb_info->use, dregno); - } - } + FOR_EACH_ARTIFICIAL_DEF (def, bb_index) + if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0) + { + unsigned int dregno = DF_REF_REGNO (def); + bitmap_set_bit (&bb_info->def, dregno); + bitmap_clear_bit (&bb_info->use, dregno); + } /* Process the hardware registers that are always live. */ - for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) - { - df_ref use = *use_rec; - /* Add use to set of uses in this BB. */ - if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0) - bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use)); - } + FOR_EACH_ARTIFICIAL_USE (use, bb_index) + /* Add use to set of uses in this BB. */ + if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0) + bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use)); FOR_BB_INSNS_REVERSE (bb, insn) { - unsigned int uid = INSN_UID (insn); - if (!NONDEBUG_INSN_P (insn)) continue; - for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) - { - df_ref def = *def_rec; - /* If the def is to only part of the reg, it does - not kill the other defs that reach here. */ - if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))) - { - unsigned int dregno = DF_REF_REGNO (def); - bitmap_set_bit (&bb_info->def, dregno); - bitmap_clear_bit (&bb_info->use, dregno); - } - } + df_insn_info *insn_info = DF_INSN_INFO_GET (insn); + FOR_EACH_INSN_INFO_DEF (def, insn_info) + /* If the def is to only part of the reg, it does + not kill the other defs that reach here. */ + if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))) + { + unsigned int dregno = DF_REF_REGNO (def); + bitmap_set_bit (&bb_info->def, dregno); + bitmap_clear_bit (&bb_info->use, dregno); + } - for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++) - { - df_ref use = *use_rec; - /* Add use to set of uses in this BB. */ - bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use)); - } + FOR_EACH_INSN_INFO_USE (use, insn_info) + /* Add use to set of uses in this BB. */ + bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use)); } /* Process the registers set in an exception handler or the hard frame pointer if this block is the target of a non local goto. */ - for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) - { - df_ref def = *def_rec; - if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) - { - unsigned int dregno = DF_REF_REGNO (def); - bitmap_set_bit (&bb_info->def, dregno); - bitmap_clear_bit (&bb_info->use, dregno); - } - } + FOR_EACH_ARTIFICIAL_DEF (def, bb_index) + if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) + { + unsigned int dregno = DF_REF_REGNO (def); + bitmap_set_bit (&bb_info->def, dregno); + bitmap_clear_bit (&bb_info->use, dregno); + } #ifdef EH_USES /* Process the uses that are live into an exception handler. */ - for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) - { - df_ref use = *use_rec; - /* Add use to set of uses in this BB. */ - if (DF_REF_FLAGS (use) & DF_REF_AT_TOP) - bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use)); - } + FOR_EACH_ARTIFICIAL_USE (use, bb_index) + /* Add use to set of uses in this BB. */ + if (DF_REF_FLAGS (use) & DF_REF_AT_TOP) + bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use)); #endif /* If the df_live problem is not defined, such as at -O0 and -O1, we @@ -936,7 +910,7 @@ df_lr_bb_local_compute (unsigned int bb_index) static void df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED) { - unsigned int bb_index; + unsigned int bb_index, i; bitmap_iterator bi; bitmap_clear (&df->hardware_regs_used); @@ -944,27 +918,32 @@ df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED) /* The all-important stack pointer must always be live. */ bitmap_set_bit (&df->hardware_regs_used, STACK_POINTER_REGNUM); + /* Global regs are always live, too. */ + for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) + if (global_regs[i]) + bitmap_set_bit (&df->hardware_regs_used, i); + /* Before reload, there are a few registers that must be forced live everywhere -- which might not already be the case for blocks within infinite loops. */ if (!reload_completed) { + unsigned int pic_offset_table_regnum = PIC_OFFSET_TABLE_REGNUM; /* Any reference to any pseudo before reload is a potential reference of the frame pointer. */ bitmap_set_bit (&df->hardware_regs_used, FRAME_POINTER_REGNUM); -#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM /* Pseudos with argument area equivalences may require reloading via the argument pointer. */ - if (fixed_regs[ARG_POINTER_REGNUM]) + if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM + && fixed_regs[ARG_POINTER_REGNUM]) bitmap_set_bit (&df->hardware_regs_used, ARG_POINTER_REGNUM); -#endif /* Any constant, or pseudo with constant equivalences, may require reloading from memory using the pic register. */ - if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM - && fixed_regs[PIC_OFFSET_TABLE_REGNUM]) - bitmap_set_bit (&df->hardware_regs_used, PIC_OFFSET_TABLE_REGNUM); + if (pic_offset_table_regnum != INVALID_REGNUM + && fixed_regs[pic_offset_table_regnum]) + bitmap_set_bit (&df->hardware_regs_used, pic_offset_table_regnum); } EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi) @@ -1008,28 +987,30 @@ static void df_lr_confluence_0 (basic_block bb) { bitmap op1 = &df_lr_get_bb_info (bb->index)->out; - if (bb != EXIT_BLOCK_PTR) + if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)) bitmap_copy (op1, &df->hardware_regs_used); } /* Confluence function that ignores fake edges. */ -static void +static bool df_lr_confluence_n (edge e) { bitmap op1 = &df_lr_get_bb_info (e->src->index)->out; bitmap op2 = &df_lr_get_bb_info (e->dest->index)->in; + bool changed = false; /* Call-clobbered registers die across exception and call edges. */ /* ??? Abnormal call edges ignored for the moment, as this gets confused by sibling call edges, which crashes reg-stack. */ if (e->flags & EDGE_EH) - bitmap_ior_and_compl_into (op1, op2, regs_invalidated_by_call_regset); + changed = bitmap_ior_and_compl_into (op1, op2, regs_invalidated_by_call_regset); else - bitmap_ior_into (op1, op2); + changed = bitmap_ior_into (op1, op2); - bitmap_ior_into (op1, &df->hardware_regs_used); + changed |= bitmap_ior_into (op1, &df->hardware_regs_used); + return changed; } @@ -1091,10 +1072,10 @@ df_lr_free (void) = (struct df_lr_problem_data *) df_lr->problem_data; if (df_lr->block_info) { - free_alloc_pool (df_lr->block_pool); df_lr->block_info_size = 0; free (df_lr->block_info); + df_lr->block_info = NULL; bitmap_obstack_release (&problem_data->lr_bitmaps); free (df_lr->problem_data); df_lr->problem_data = NULL; @@ -1172,10 +1153,10 @@ df_lr_verify_solution_start (void) df_lr->solutions_dirty = true; problem_data = (struct df_lr_problem_data *)df_lr->problem_data; - problem_data->in = XNEWVEC (bitmap_head, last_basic_block); - problem_data->out = XNEWVEC (bitmap_head, last_basic_block); + problem_data->in = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun)); + problem_data->out = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun)); - FOR_ALL_BB (bb) + FOR_ALL_BB_FN (bb, cfun) { bitmap_initialize (&problem_data->in[bb->index], &problem_data->lr_bitmaps); bitmap_initialize (&problem_data->out[bb->index], &problem_data->lr_bitmaps); @@ -1204,7 +1185,7 @@ df_lr_verify_solution_end (void) in df_lr_finalize for details. */ df_lr->solutions_dirty = false; else - FOR_ALL_BB (bb) + FOR_ALL_BB_FN (bb, cfun) { if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LR_IN (bb))) || (!bitmap_equal_p (&problem_data->out[bb->index], DF_LR_OUT (bb)))) @@ -1216,7 +1197,7 @@ df_lr_verify_solution_end (void) /* Cannot delete them immediately because you may want to dump them if the comparison fails. */ - FOR_ALL_BB (bb) + FOR_ALL_BB_FN (bb, cfun) { bitmap_clear (&problem_data->in[bb->index]); bitmap_clear (&problem_data->out[bb->index]); @@ -1250,9 +1231,12 @@ static struct df_problem problem_LR = NULL, /* Debugging. */ df_lr_top_dump, /* Debugging start block. */ df_lr_bottom_dump, /* Debugging end block. */ + NULL, /* Debugging start insn. */ + NULL, /* Debugging end insn. */ df_lr_verify_solution_start,/* Incremental solution verify start. */ df_lr_verify_solution_end, /* Incremental solution verify end. */ NULL, /* Dependent problem. */ + sizeof (struct df_lr_bb_info),/* Size of entry of block_info array. */ TV_DF_LR, /* Timing variable. */ false /* Reset blocks on dropping out of blocks_to_analyze. */ }; @@ -1268,7 +1252,7 @@ df_lr_add_problem (void) df_add_problem (&problem_LR); /* These will be initialized when df_scan_blocks processes each block. */ - df_lr->out_of_date_transfer_functions = BITMAP_ALLOC (NULL); + df_lr->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack); } @@ -1290,7 +1274,7 @@ df_lr_verify_transfer_functions (void) bitmap_initialize (&saved_use, &bitmap_default_obstack); bitmap_initialize (&all_blocks, &bitmap_default_obstack); - FOR_ALL_BB (bb) + FOR_ALL_BB_FN (bb, cfun) { struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index); bitmap_set_bit (&all_blocks, bb->index); @@ -1370,17 +1354,6 @@ struct df_live_problem_data combined lr and live analysis. */ static bitmap_head df_live_scratch; -/* Set basic block info. */ - -static void -df_live_set_bb_info (unsigned int index, - struct df_live_bb_info *bb_info) -{ - gcc_assert (df_live); - gcc_assert (index < df_live->block_info_size); - df_live->block_info[index] = bb_info; -} - /* Free basic block info. */ @@ -1395,7 +1368,6 @@ df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, bitmap_clear (&bb_info->kill); bitmap_clear (&bb_info->in); bitmap_clear (&bb_info->out); - pool_free (df_live->block_pool, bb_info); } } @@ -1410,9 +1382,6 @@ df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) bitmap_iterator bi; struct df_live_problem_data *problem_data; - if (!df_live->block_pool) - df_live->block_pool = create_alloc_pool ("df_live_block pool", - sizeof (struct df_live_bb_info), 100); if (df_live->problem_data) problem_data = (struct df_live_problem_data *) df_live->problem_data; else @@ -1431,15 +1400,15 @@ df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions, 0, bb_index, bi) { struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index); - if (bb_info) + + /* When bitmaps are already initialized, just clear them. */ + if (bb_info->kill.obstack) { bitmap_clear (&bb_info->kill); bitmap_clear (&bb_info->gen); } else { - bb_info = (struct df_live_bb_info *) pool_alloc (df_live->block_pool); - df_live_set_bb_info (bb_index, bb_info); bitmap_initialize (&bb_info->kill, &problem_data->live_bitmaps); bitmap_initialize (&bb_info->gen, &problem_data->live_bitmaps); bitmap_initialize (&bb_info->in, &problem_data->live_bitmaps); @@ -1473,10 +1442,10 @@ df_live_reset (bitmap all_blocks) static void df_live_bb_local_compute (unsigned int bb_index) { - basic_block bb = BASIC_BLOCK (bb_index); + basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index); struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index); - rtx insn; - df_ref *def_rec; + rtx_insn *insn; + df_ref def; int luid = 0; FOR_BB_INSNS (bb, insn) @@ -1497,9 +1466,8 @@ df_live_bb_local_compute (unsigned int bb_index) continue; luid++; - for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++) + FOR_EACH_INSN_INFO_DEF (def, insn_info) { - df_ref def = *def_rec; unsigned int regno = DF_REF_REGNO (def); if (DF_REF_FLAGS_IS_SET (def, @@ -1516,11 +1484,8 @@ df_live_bb_local_compute (unsigned int bb_index) } } - for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) - { - df_ref def = *def_rec; - bitmap_set_bit (&bb_info->gen, DF_REF_REGNO (def)); - } + FOR_EACH_ARTIFICIAL_DEF (def, bb_index) + bitmap_set_bit (&bb_info->gen, DF_REF_REGNO (def)); } @@ -1566,16 +1531,16 @@ df_live_init (bitmap all_blocks) /* Forward confluence function that ignores fake edges. */ -static void +static bool df_live_confluence_n (edge e) { bitmap op1 = &df_live_get_bb_info (e->dest->index)->in; bitmap op2 = &df_live_get_bb_info (e->src->index)->out; if (e->flags & EDGE_FAKE) - return; + return false; - bitmap_ior_into (op1, op2); + return bitmap_ior_into (op1, op2); } @@ -1639,9 +1604,9 @@ df_live_free (void) = (struct df_live_problem_data *) df_live->problem_data; if (df_live->block_info) { - free_alloc_pool (df_live->block_pool); df_live->block_info_size = 0; free (df_live->block_info); + df_live->block_info = NULL; bitmap_clear (&df_live_scratch); bitmap_obstack_release (&problem_data->live_bitmaps); free (problem_data); @@ -1721,10 +1686,10 @@ df_live_verify_solution_start (void) df_live->solutions_dirty = true; problem_data = (struct df_live_problem_data *)df_live->problem_data; - problem_data->in = XNEWVEC (bitmap_head, last_basic_block); - problem_data->out = XNEWVEC (bitmap_head, last_basic_block); + problem_data->in = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun)); + problem_data->out = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun)); - FOR_ALL_BB (bb) + FOR_ALL_BB_FN (bb, cfun) { bitmap_initialize (&problem_data->in[bb->index], &problem_data->live_bitmaps); bitmap_initialize (&problem_data->out[bb->index], &problem_data->live_bitmaps); @@ -1747,7 +1712,7 @@ df_live_verify_solution_end (void) if (!problem_data->out) return; - FOR_ALL_BB (bb) + FOR_ALL_BB_FN (bb, cfun) { if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LIVE_IN (bb))) || (!bitmap_equal_p (&problem_data->out[bb->index], DF_LIVE_OUT (bb)))) @@ -1759,7 +1724,7 @@ df_live_verify_solution_end (void) /* Cannot delete them immediately because you may want to dump them if the comparison fails. */ - FOR_ALL_BB (bb) + FOR_ALL_BB_FN (bb, cfun) { bitmap_clear (&problem_data->in[bb->index]); bitmap_clear (&problem_data->out[bb->index]); @@ -1793,9 +1758,12 @@ static struct df_problem problem_LIVE = NULL, /* Debugging. */ df_live_top_dump, /* Debugging start block. */ df_live_bottom_dump, /* Debugging end block. */ + NULL, /* Debugging start insn. */ + NULL, /* Debugging end insn. */ df_live_verify_solution_start,/* Incremental solution verify start. */ df_live_verify_solution_end, /* Incremental solution verify end. */ &problem_LR, /* Dependent problem. */ + sizeof (struct df_live_bb_info),/* Size of entry of block_info array. */ TV_DF_LIVE, /* Timing variable. */ false /* Reset blocks on dropping out of blocks_to_analyze. */ }; @@ -1811,7 +1779,7 @@ df_live_add_problem (void) df_add_problem (&problem_LIVE); /* These will be initialized when df_scan_blocks processes each block. */ - df_live->out_of_date_transfer_functions = BITMAP_ALLOC (NULL); + df_live->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack); } @@ -1822,7 +1790,7 @@ void df_live_set_all_dirty (void) { basic_block bb; - FOR_ALL_BB (bb) + FOR_ALL_BB_FN (bb, cfun) bitmap_set_bit (df_live->out_of_date_transfer_functions, bb->index); } @@ -1848,7 +1816,7 @@ df_live_verify_transfer_functions (void) df_grow_insn_info (); - FOR_ALL_BB (bb) + FOR_ALL_BB_FN (bb, cfun) { struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index); bitmap_set_bit (&all_blocks, bb->index); @@ -1992,36 +1960,32 @@ df_chain_remove_problem (void) EXECUTE_IF_SET_IN_BITMAP (df_chain->out_of_date_transfer_functions, 0, bb_index, bi) { - rtx insn; - df_ref *def_rec; - df_ref *use_rec; - basic_block bb = BASIC_BLOCK (bb_index); + rtx_insn *insn; + df_ref def, use; + basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index); if (df_chain_problem_p (DF_DU_CHAIN)) - for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++) - DF_REF_CHAIN (*def_rec) = NULL; + FOR_EACH_ARTIFICIAL_DEF (def, bb_index) + DF_REF_CHAIN (def) = NULL; if (df_chain_problem_p (DF_UD_CHAIN)) - for (use_rec = df_get_artificial_uses (bb->index); *use_rec; use_rec++) - DF_REF_CHAIN (*use_rec) = NULL; + FOR_EACH_ARTIFICIAL_USE (use, bb_index) + DF_REF_CHAIN (use) = NULL; FOR_BB_INSNS (bb, insn) - { - unsigned int uid = INSN_UID (insn); - - if (INSN_P (insn)) - { - if (df_chain_problem_p (DF_DU_CHAIN)) - for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) - DF_REF_CHAIN (*def_rec) = NULL; - if (df_chain_problem_p (DF_UD_CHAIN)) - { - for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++) - DF_REF_CHAIN (*use_rec) = NULL; - for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++) - DF_REF_CHAIN (*use_rec) = NULL; - } - } - } + if (INSN_P (insn)) + { + df_insn_info *insn_info = DF_INSN_INFO_GET (insn); + if (df_chain_problem_p (DF_DU_CHAIN)) + FOR_EACH_INSN_INFO_DEF (def, insn_info) + DF_REF_CHAIN (def) = NULL; + if (df_chain_problem_p (DF_UD_CHAIN)) + { + FOR_EACH_INSN_INFO_USE (use, insn_info) + DF_REF_CHAIN (use) = NULL; + FOR_EACH_INSN_INFO_EQ_USE (use, insn_info) + DF_REF_CHAIN (use) = NULL; + } + } } bitmap_clear (df_chain->out_of_date_transfer_functions); @@ -2065,15 +2029,14 @@ df_chain_reset (bitmap blocks_to_clear ATTRIBUTE_UNUSED) static void df_chain_create_bb_process_use (bitmap local_rd, - df_ref *use_rec, + df_ref use, int top_flag) { bitmap_iterator bi; unsigned int def_index; - while (*use_rec) + for (; use; use = DF_REF_NEXT_LOC (use)) { - df_ref use = *use_rec; unsigned int uregno = DF_REF_REGNO (use); if ((!(df->changeable_flags & DF_NO_HARD_REGS)) || (uregno >= FIRST_PSEUDO_REGISTER)) @@ -2102,8 +2065,6 @@ df_chain_create_bb_process_use (bitmap local_rd, } } } - - use_rec++; } } @@ -2113,9 +2074,9 @@ df_chain_create_bb_process_use (bitmap local_rd, static void df_chain_create_bb (unsigned int bb_index) { - basic_block bb = BASIC_BLOCK (bb_index); + basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index); struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index); - rtx insn; + rtx_insn *insn; bitmap_head cpy; bitmap_initialize (&cpy, &bitmap_default_obstack); @@ -2194,111 +2155,110 @@ df_chain_free (void) /* Debugging info. */ static void -df_chain_top_dump (basic_block bb, FILE *file) +df_chain_bb_dump (basic_block bb, FILE *file, bool top) { + /* Artificials are only hard regs. */ + if (df->changeable_flags & DF_NO_HARD_REGS) + return; + if (df_chain_problem_p (DF_UD_CHAIN)) + { + df_ref use; + + fprintf (file, + ";; UD chains for artificial uses at %s\n", + top ? "top" : "bottom"); + FOR_EACH_ARTIFICIAL_USE (use, bb->index) + if ((top && (DF_REF_FLAGS (use) & DF_REF_AT_TOP)) + || (!top && !(DF_REF_FLAGS (use) & DF_REF_AT_TOP))) + { + fprintf (file, ";; reg %d ", DF_REF_REGNO (use)); + df_chain_dump (DF_REF_CHAIN (use), file); + fprintf (file, "\n"); + } + } if (df_chain_problem_p (DF_DU_CHAIN)) { - rtx insn; - df_ref *def_rec = df_get_artificial_defs (bb->index); - if (*def_rec) - { - - fprintf (file, ";; DU chains for artificial defs\n"); - while (*def_rec) - { - df_ref def = *def_rec; - fprintf (file, ";; reg %d ", DF_REF_REGNO (def)); - df_chain_dump (DF_REF_CHAIN (def), file); - fprintf (file, "\n"); - def_rec++; - } - } - - FOR_BB_INSNS (bb, insn) - { - if (INSN_P (insn)) - { - struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn); - def_rec = DF_INSN_INFO_DEFS (insn_info); - if (*def_rec) - { - fprintf (file, ";; DU chains for insn luid %d uid %d\n", - DF_INSN_INFO_LUID (insn_info), INSN_UID (insn)); - - while (*def_rec) - { - df_ref def = *def_rec; - fprintf (file, ";; reg %d ", DF_REF_REGNO (def)); - if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE) - fprintf (file, "read/write "); - df_chain_dump (DF_REF_CHAIN (def), file); - fprintf (file, "\n"); - def_rec++; - } - } - } - } + df_ref def; + + fprintf (file, + ";; DU chains for artificial defs at %s\n", + top ? "top" : "bottom"); + FOR_EACH_ARTIFICIAL_DEF (def, bb->index) + if ((top && (DF_REF_FLAGS (def) & DF_REF_AT_TOP)) + || (!top && !(DF_REF_FLAGS (def) & DF_REF_AT_TOP))) + { + fprintf (file, ";; reg %d ", DF_REF_REGNO (def)); + df_chain_dump (DF_REF_CHAIN (def), file); + fprintf (file, "\n"); + } } } +static void +df_chain_top_dump (basic_block bb, FILE *file) +{ + df_chain_bb_dump (bb, file, /*top=*/true); +} static void df_chain_bottom_dump (basic_block bb, FILE *file) { - if (df_chain_problem_p (DF_UD_CHAIN)) - { - rtx insn; - df_ref *use_rec = df_get_artificial_uses (bb->index); - - if (*use_rec) - { - fprintf (file, ";; UD chains for artificial uses\n"); - while (*use_rec) - { - df_ref use = *use_rec; - fprintf (file, ";; reg %d ", DF_REF_REGNO (use)); - df_chain_dump (DF_REF_CHAIN (use), file); - fprintf (file, "\n"); - use_rec++; - } - } - - FOR_BB_INSNS (bb, insn) - { - if (INSN_P (insn)) - { - struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn); - df_ref *eq_use_rec = DF_INSN_INFO_EQ_USES (insn_info); - use_rec = DF_INSN_INFO_USES (insn_info); - if (*use_rec || *eq_use_rec) - { - fprintf (file, ";; UD chains for insn luid %d uid %d\n", - DF_INSN_INFO_LUID (insn_info), INSN_UID (insn)); + df_chain_bb_dump (bb, file, /*top=*/false); +} - while (*use_rec) - { - df_ref use = *use_rec; - fprintf (file, ";; reg %d ", DF_REF_REGNO (use)); - if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE) - fprintf (file, "read/write "); - df_chain_dump (DF_REF_CHAIN (use), file); - fprintf (file, "\n"); - use_rec++; - } - while (*eq_use_rec) - { - df_ref use = *eq_use_rec; - fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use)); - df_chain_dump (DF_REF_CHAIN (use), file); - fprintf (file, "\n"); - eq_use_rec++; - } - } - } - } +static void +df_chain_insn_top_dump (const rtx_insn *insn, FILE *file) +{ + if (df_chain_problem_p (DF_UD_CHAIN) && INSN_P (insn)) + { + struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn); + df_ref use; + + fprintf (file, ";; UD chains for insn luid %d uid %d\n", + DF_INSN_INFO_LUID (insn_info), INSN_UID (insn)); + FOR_EACH_INSN_INFO_USE (use, insn_info) + if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (use)) + || !(df->changeable_flags & DF_NO_HARD_REGS)) + { + fprintf (file, ";; reg %d ", DF_REF_REGNO (use)); + if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE) + fprintf (file, "read/write "); + df_chain_dump (DF_REF_CHAIN (use), file); + fprintf (file, "\n"); + } + FOR_EACH_INSN_INFO_EQ_USE (use, insn_info) + if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (use)) + || !(df->changeable_flags & DF_NO_HARD_REGS)) + { + fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use)); + df_chain_dump (DF_REF_CHAIN (use), file); + fprintf (file, "\n"); + } } } +static void +df_chain_insn_bottom_dump (const rtx_insn *insn, FILE *file) +{ + if (df_chain_problem_p (DF_DU_CHAIN) && INSN_P (insn)) + { + struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn); + df_ref def; + fprintf (file, ";; DU chains for insn luid %d uid %d\n", + DF_INSN_INFO_LUID (insn_info), INSN_UID (insn)); + FOR_EACH_INSN_INFO_DEF (def, insn_info) + if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (def)) + || !(df->changeable_flags & DF_NO_HARD_REGS)) + { + fprintf (file, ";; reg %d ", DF_REF_REGNO (def)); + if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE) + fprintf (file, "read/write "); + df_chain_dump (DF_REF_CHAIN (def), file); + fprintf (file, "\n"); + } + fprintf (file, "\n"); + } +} static struct df_problem problem_CHAIN = { @@ -2319,9 +2279,12 @@ static struct df_problem problem_CHAIN = NULL, /* Debugging. */ df_chain_top_dump, /* Debugging start block. */ df_chain_bottom_dump, /* Debugging end block. */ + df_chain_insn_top_dump, /* Debugging start insn. */ + df_chain_insn_bottom_dump, /* Debugging end insn. */ NULL, /* Incremental solution verify start. */ NULL, /* Incremental solution verify end. */ &problem_RD, /* Dependent problem. */ + sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */ TV_DF_CHAIN, /* Timing variable. */ false /* Reset blocks on dropping out of blocks_to_analyze. */ }; @@ -2336,177 +2299,63 @@ df_chain_add_problem (unsigned int chain_flags) { df_add_problem (&problem_CHAIN); df_chain->local_flags = chain_flags; - df_chain->out_of_date_transfer_functions = BITMAP_ALLOC (NULL); + df_chain->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack); } #undef df_chain_problem_p /*---------------------------------------------------------------------------- - BYTE LEVEL LIVE REGISTERS + WORD LEVEL LIVE REGISTERS Find the locations in the function where any use of a pseudo can reach in the backwards direction. In and out bitvectors are built - for each basic block. There are two mapping functions, - df_byte_lr_get_regno_start and df_byte_lr_get_regno_len that are - used to map regnos into bit vector positions. - - This problem differs from the regular df_lr function in the way - that subregs, *_extracts and strict_low_parts are handled. In lr - these are consider partial kills, here, the exact set of bytes is - modeled. Note that any reg that has none of these operations is - only modeled with a single bit since all operations access the - entire register. - - This problem is more brittle that the regular lr. It currently can - be used in dce incrementally, but cannot be used in an environment - where insns are created or modified. The problem is that the - mapping of regnos to bitmap positions is relatively compact, in - that if a pseudo does not do any of the byte wise operations, only - one slot is allocated, rather than a slot for each byte. If insn - are created, where a subreg is used for a reg that had no subregs, - the mapping would be wrong. Likewise, there are no checks to see - that new pseudos have been added. These issues could be addressed - by adding a problem specific flag to not use the compact mapping, - if there was a need to do so. + for each basic block. We only track pseudo registers that have a + size of 2 * UNITS_PER_WORD; bitmaps are indexed by 2 * regno and + contain two bits corresponding to each of the subwords. ----------------------------------------------------------------------------*/ /* Private data used to verify the solution for this problem. */ -struct df_byte_lr_problem_data +struct df_word_lr_problem_data { - /* Expanded versions of bitvectors used in lr. */ - bitmap_head invalidated_by_call; - bitmap_head hardware_regs_used; - - /* Indexed by regno, this is true if there are subregs, extracts or - strict_low_parts for this regno. */ - bitmap_head needs_expansion; - - /* The start position and len for each regno in the various bit - vectors. */ - unsigned int* regno_start; - unsigned int* regno_len; /* An obstack for the bitmaps we need for this problem. */ - bitmap_obstack byte_lr_bitmaps; + bitmap_obstack word_lr_bitmaps; }; -/* Get the starting location for REGNO in the df_byte_lr bitmaps. */ - -int -df_byte_lr_get_regno_start (unsigned int regno) -{ - struct df_byte_lr_problem_data *problem_data - = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;; - return problem_data->regno_start[regno]; -} - - -/* Get the len for REGNO in the df_byte_lr bitmaps. */ - -int -df_byte_lr_get_regno_len (unsigned int regno) -{ - struct df_byte_lr_problem_data *problem_data - = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;; - return problem_data->regno_len[regno]; -} - - -/* Set basic block info. */ - -static void -df_byte_lr_set_bb_info (unsigned int index, - struct df_byte_lr_bb_info *bb_info) -{ - gcc_assert (df_byte_lr); - gcc_assert (index < df_byte_lr->block_info_size); - df_byte_lr->block_info[index] = bb_info; -} - - /* Free basic block info. */ static void -df_byte_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, +df_word_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, void *vbb_info) { - struct df_byte_lr_bb_info *bb_info = (struct df_byte_lr_bb_info *) vbb_info; + struct df_word_lr_bb_info *bb_info = (struct df_word_lr_bb_info *) vbb_info; if (bb_info) { bitmap_clear (&bb_info->use); bitmap_clear (&bb_info->def); bitmap_clear (&bb_info->in); bitmap_clear (&bb_info->out); - pool_free (df_byte_lr->block_pool, bb_info); } } -/* Check all of the refs in REF_REC to see if any of them are - extracts, subregs or strict_low_parts. */ +/* Allocate or reset bitmaps for DF_WORD_LR blocks. The solution bits are + not touched unless the block is new. */ static void -df_byte_lr_check_regs (df_ref *ref_rec) +df_word_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) { - struct df_byte_lr_problem_data *problem_data - = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; - - for (; *ref_rec; ref_rec++) - { - df_ref ref = *ref_rec; - if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT - | DF_REF_ZERO_EXTRACT - | DF_REF_STRICT_LOW_PART) - || GET_CODE (DF_REF_REG (ref)) == SUBREG) - bitmap_set_bit (&problem_data->needs_expansion, DF_REF_REGNO (ref)); - } -} + unsigned int bb_index; + bitmap_iterator bi; + basic_block bb; + struct df_word_lr_problem_data *problem_data + = XNEW (struct df_word_lr_problem_data); + df_word_lr->problem_data = problem_data; -/* Expand bitmap SRC which is indexed by regno to DEST which is indexed by - regno_start and regno_len. */ - -static void -df_byte_lr_expand_bitmap (bitmap dest, bitmap src) -{ - struct df_byte_lr_problem_data *problem_data - = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; - bitmap_iterator bi; - unsigned int i; - - bitmap_clear (dest); - EXECUTE_IF_SET_IN_BITMAP (src, 0, i, bi) - { - bitmap_set_range (dest, problem_data->regno_start[i], - problem_data->regno_len[i]); - } -} - - -/* Allocate or reset bitmaps for DF_BYTE_LR blocks. The solution bits are - not touched unless the block is new. */ - -static void -df_byte_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) -{ - unsigned int bb_index; - bitmap_iterator bi; - basic_block bb; - unsigned int regno; - unsigned int index = 0; - unsigned int max_reg = max_reg_num(); - struct df_byte_lr_problem_data *problem_data - = XNEW (struct df_byte_lr_problem_data); - - df_byte_lr->problem_data = problem_data; - - if (!df_byte_lr->block_pool) - df_byte_lr->block_pool = create_alloc_pool ("df_byte_lr_block pool", - sizeof (struct df_byte_lr_bb_info), 50); - - df_grow_bb_info (df_byte_lr); + df_grow_bb_info (df_word_lr); /* Create the mapping from regnos to slots. This does not change unless the problem is destroyed and recreated. In particular, if @@ -2514,306 +2363,205 @@ df_byte_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) want to redo the mapping because this would invalidate everything else. */ - bitmap_obstack_initialize (&problem_data->byte_lr_bitmaps); - problem_data->regno_start = XNEWVEC (unsigned int, max_reg); - problem_data->regno_len = XNEWVEC (unsigned int, max_reg); - bitmap_initialize (&problem_data->hardware_regs_used, - &problem_data->byte_lr_bitmaps); - bitmap_initialize (&problem_data->invalidated_by_call, - &problem_data->byte_lr_bitmaps); - bitmap_initialize (&problem_data->needs_expansion, - &problem_data->byte_lr_bitmaps); - - /* Discover which regno's use subregs, extracts or - strict_low_parts. */ - FOR_EACH_BB (bb) - { - rtx insn; - FOR_BB_INSNS (bb, insn) - { - if (INSN_P (insn)) - { - struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn); - df_byte_lr_check_regs (DF_INSN_INFO_DEFS (insn_info)); - df_byte_lr_check_regs (DF_INSN_INFO_USES (insn_info)); - } - } - bitmap_set_bit (df_byte_lr->out_of_date_transfer_functions, bb->index); - } - - bitmap_set_bit (df_byte_lr->out_of_date_transfer_functions, ENTRY_BLOCK); - bitmap_set_bit (df_byte_lr->out_of_date_transfer_functions, EXIT_BLOCK); + bitmap_obstack_initialize (&problem_data->word_lr_bitmaps); - /* Allocate the slots for each regno. */ - for (regno = 0; regno < max_reg; regno++) - { - int len; - problem_data->regno_start[regno] = index; - if (bitmap_bit_p (&problem_data->needs_expansion, regno)) - len = GET_MODE_SIZE (GET_MODE (regno_reg_rtx[regno])); - else - len = 1; + FOR_EACH_BB_FN (bb, cfun) + bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, bb->index); - problem_data->regno_len[regno] = len; - index += len; - } + bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, ENTRY_BLOCK); + bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, EXIT_BLOCK); - df_byte_lr_expand_bitmap (&problem_data->hardware_regs_used, - &df->hardware_regs_used); - df_byte_lr_expand_bitmap (&problem_data->invalidated_by_call, - regs_invalidated_by_call_regset); - - EXECUTE_IF_SET_IN_BITMAP (df_byte_lr->out_of_date_transfer_functions, 0, bb_index, bi) + EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi) { - struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index); - if (bb_info) + struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index); + + /* When bitmaps are already initialized, just clear them. */ + if (bb_info->use.obstack) { bitmap_clear (&bb_info->def); bitmap_clear (&bb_info->use); } else { - bb_info = (struct df_byte_lr_bb_info *) pool_alloc (df_byte_lr->block_pool); - df_byte_lr_set_bb_info (bb_index, bb_info); - bitmap_initialize (&bb_info->use, &problem_data->byte_lr_bitmaps); - bitmap_initialize (&bb_info->def, &problem_data->byte_lr_bitmaps); - bitmap_initialize (&bb_info->in, &problem_data->byte_lr_bitmaps); - bitmap_initialize (&bb_info->out, &problem_data->byte_lr_bitmaps); + bitmap_initialize (&bb_info->use, &problem_data->word_lr_bitmaps); + bitmap_initialize (&bb_info->def, &problem_data->word_lr_bitmaps); + bitmap_initialize (&bb_info->in, &problem_data->word_lr_bitmaps); + bitmap_initialize (&bb_info->out, &problem_data->word_lr_bitmaps); } } - df_byte_lr->optional_p = true; + df_word_lr->optional_p = true; } /* Reset the global solution for recalculation. */ static void -df_byte_lr_reset (bitmap all_blocks) +df_word_lr_reset (bitmap all_blocks) { unsigned int bb_index; bitmap_iterator bi; EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) { - struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index); + struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index); gcc_assert (bb_info); bitmap_clear (&bb_info->in); bitmap_clear (&bb_info->out); } } +/* Examine REF, and if it is for a reg we're interested in, set or + clear the bits corresponding to its subwords from the bitmap + according to IS_SET. LIVE is the bitmap we should update. We do + not track hard regs or pseudos of any size other than 2 * + UNITS_PER_WORD. + We return true if we changed the bitmap, or if we encountered a register + we're not tracking. */ + +bool +df_word_lr_mark_ref (df_ref ref, bool is_set, regset live) +{ + rtx orig_reg = DF_REF_REG (ref); + rtx reg = orig_reg; + machine_mode reg_mode; + unsigned regno; + /* Left at -1 for whole accesses. */ + int which_subword = -1; + bool changed = false; + + if (GET_CODE (reg) == SUBREG) + reg = SUBREG_REG (orig_reg); + regno = REGNO (reg); + reg_mode = GET_MODE (reg); + if (regno < FIRST_PSEUDO_REGISTER + || GET_MODE_SIZE (reg_mode) != 2 * UNITS_PER_WORD) + return true; + + if (GET_CODE (orig_reg) == SUBREG + && df_read_modify_subreg_p (orig_reg)) + { + gcc_assert (DF_REF_FLAGS_IS_SET (ref, DF_REF_PARTIAL)); + if (subreg_lowpart_p (orig_reg)) + which_subword = 0; + else + which_subword = 1; + } + if (is_set) + { + if (which_subword != 1) + changed |= bitmap_set_bit (live, regno * 2); + if (which_subword != 0) + changed |= bitmap_set_bit (live, regno * 2 + 1); + } + else + { + if (which_subword != 1) + changed |= bitmap_clear_bit (live, regno * 2); + if (which_subword != 0) + changed |= bitmap_clear_bit (live, regno * 2 + 1); + } + return changed; +} /* Compute local live register info for basic block BB. */ static void -df_byte_lr_bb_local_compute (unsigned int bb_index) +df_word_lr_bb_local_compute (unsigned int bb_index) { - struct df_byte_lr_problem_data *problem_data - = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; - basic_block bb = BASIC_BLOCK (bb_index); - struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index); - rtx insn; - df_ref *def_rec; - df_ref *use_rec; + basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index); + struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index); + rtx_insn *insn; + df_ref def, use; - /* Process the registers set in an exception handler. */ - for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) - { - df_ref def = *def_rec; - if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0) - { - unsigned int dregno = DF_REF_REGNO (def); - unsigned int start = problem_data->regno_start[dregno]; - unsigned int len = problem_data->regno_len[dregno]; - bitmap_set_range (&bb_info->def, start, len); - bitmap_clear_range (&bb_info->use, start, len); - } - } + /* Ensure that artificial refs don't contain references to pseudos. */ + FOR_EACH_ARTIFICIAL_DEF (def, bb_index) + gcc_assert (DF_REF_REGNO (def) < FIRST_PSEUDO_REGISTER); - /* Process the hardware registers that are always live. */ - for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) - { - df_ref use = *use_rec; - /* Add use to set of uses in this BB. */ - if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0) - { - unsigned int uregno = DF_REF_REGNO (use); - unsigned int start = problem_data->regno_start[uregno]; - unsigned int len = problem_data->regno_len[uregno]; - bitmap_set_range (&bb_info->use, start, len); - } - } + FOR_EACH_ARTIFICIAL_USE (use, bb_index) + gcc_assert (DF_REF_REGNO (use) < FIRST_PSEUDO_REGISTER); FOR_BB_INSNS_REVERSE (bb, insn) { - unsigned int uid = INSN_UID (insn); - - if (!INSN_P (insn)) + if (!NONDEBUG_INSN_P (insn)) continue; - for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) - { - df_ref def = *def_rec; - /* If the def is to only part of the reg, it does - not kill the other defs that reach here. */ - if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL))) - { - unsigned int dregno = DF_REF_REGNO (def); - unsigned int start = problem_data->regno_start[dregno]; - unsigned int len = problem_data->regno_len[dregno]; - unsigned int sb; - unsigned int lb; - if (!df_compute_accessed_bytes (def, DF_MM_MUST, &sb, &lb)) - { - start += sb; - len = lb - sb; - } - if (len) - { - bitmap_set_range (&bb_info->def, start, len); - bitmap_clear_range (&bb_info->use, start, len); - } - } - } - - for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++) - { - df_ref use = *use_rec; - unsigned int uregno = DF_REF_REGNO (use); - unsigned int start = problem_data->regno_start[uregno]; - unsigned int len = problem_data->regno_len[uregno]; - unsigned int sb; - unsigned int lb; - if (!df_compute_accessed_bytes (use, DF_MM_MAY, &sb, &lb)) - { - start += sb; - len = lb - sb; - } - /* Add use to set of uses in this BB. */ - if (len) - bitmap_set_range (&bb_info->use, start, len); - } - } - - /* Process the registers set in an exception handler or the hard - frame pointer if this block is the target of a non local - goto. */ - for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) - { - df_ref def = *def_rec; - if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) - { - unsigned int dregno = DF_REF_REGNO (def); - unsigned int start = problem_data->regno_start[dregno]; - unsigned int len = problem_data->regno_len[dregno]; - bitmap_set_range (&bb_info->def, start, len); - bitmap_clear_range (&bb_info->use, start, len); - } - } - -#ifdef EH_USES - /* Process the uses that are live into an exception handler. */ - for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) - { - df_ref use = *use_rec; - /* Add use to set of uses in this BB. */ - if (DF_REF_FLAGS (use) & DF_REF_AT_TOP) - { - unsigned int uregno = DF_REF_REGNO (use); - unsigned int start = problem_data->regno_start[uregno]; - unsigned int len = problem_data->regno_len[uregno]; - bitmap_set_range (&bb_info->use, start, len); - } + df_insn_info *insn_info = DF_INSN_INFO_GET (insn); + FOR_EACH_INSN_INFO_DEF (def, insn_info) + /* If the def is to only part of the reg, it does + not kill the other defs that reach here. */ + if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL))) + { + df_word_lr_mark_ref (def, true, &bb_info->def); + df_word_lr_mark_ref (def, false, &bb_info->use); + } + FOR_EACH_INSN_INFO_USE (use, insn_info) + df_word_lr_mark_ref (use, true, &bb_info->use); } -#endif } /* Compute local live register info for each basic block within BLOCKS. */ static void -df_byte_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED) +df_word_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED) { unsigned int bb_index; bitmap_iterator bi; - EXECUTE_IF_SET_IN_BITMAP (df_byte_lr->out_of_date_transfer_functions, 0, bb_index, bi) + EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi) { if (bb_index == EXIT_BLOCK) { - /* The exit block is special for this problem and its bits are - computed from thin air. */ - struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (EXIT_BLOCK); - df_byte_lr_expand_bitmap (&bb_info->use, df->exit_block_uses); + unsigned regno; + bitmap_iterator bi; + EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, FIRST_PSEUDO_REGISTER, + regno, bi) + gcc_unreachable (); } else - df_byte_lr_bb_local_compute (bb_index); + df_word_lr_bb_local_compute (bb_index); } - bitmap_clear (df_byte_lr->out_of_date_transfer_functions); + bitmap_clear (df_word_lr->out_of_date_transfer_functions); } /* Initialize the solution vectors. */ static void -df_byte_lr_init (bitmap all_blocks) +df_word_lr_init (bitmap all_blocks) { unsigned int bb_index; bitmap_iterator bi; EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) { - struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index); + struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index); bitmap_copy (&bb_info->in, &bb_info->use); bitmap_clear (&bb_info->out); } } -/* Confluence function that processes infinite loops. This might be a - noreturn function that throws. And even if it isn't, getting the - unwind info right helps debugging. */ -static void -df_byte_lr_confluence_0 (basic_block bb) -{ - struct df_byte_lr_problem_data *problem_data - = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; - bitmap op1 = &df_byte_lr_get_bb_info (bb->index)->out; - if (bb != EXIT_BLOCK_PTR) - bitmap_copy (op1, &problem_data->hardware_regs_used); -} - - /* Confluence function that ignores fake edges. */ -static void -df_byte_lr_confluence_n (edge e) +static bool +df_word_lr_confluence_n (edge e) { - struct df_byte_lr_problem_data *problem_data - = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; - bitmap op1 = &df_byte_lr_get_bb_info (e->src->index)->out; - bitmap op2 = &df_byte_lr_get_bb_info (e->dest->index)->in; - - /* Call-clobbered registers die across exception and call edges. */ - /* ??? Abnormal call edges ignored for the moment, as this gets - confused by sibling call edges, which crashes reg-stack. */ - if (e->flags & EDGE_EH) - bitmap_ior_and_compl_into (op1, op2, &problem_data->invalidated_by_call); - else - bitmap_ior_into (op1, op2); + bitmap op1 = &df_word_lr_get_bb_info (e->src->index)->out; + bitmap op2 = &df_word_lr_get_bb_info (e->dest->index)->in; - bitmap_ior_into (op1, &problem_data->hardware_regs_used); + return bitmap_ior_into (op1, op2); } /* Transfer function. */ static bool -df_byte_lr_transfer_function (int bb_index) +df_word_lr_transfer_function (int bb_index) { - struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index); + struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index); bitmap in = &bb_info->in; bitmap out = &bb_info->out; bitmap use = &bb_info->use; @@ -2826,85 +2574,85 @@ df_byte_lr_transfer_function (int bb_index) /* Free all storage associated with the problem. */ static void -df_byte_lr_free (void) +df_word_lr_free (void) { - struct df_byte_lr_problem_data *problem_data - = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; - + struct df_word_lr_problem_data *problem_data + = (struct df_word_lr_problem_data *)df_word_lr->problem_data; - if (df_byte_lr->block_info) + if (df_word_lr->block_info) { - free_alloc_pool (df_byte_lr->block_pool); - df_byte_lr->block_info_size = 0; - free (df_byte_lr->block_info); + df_word_lr->block_info_size = 0; + free (df_word_lr->block_info); + df_word_lr->block_info = NULL; } - BITMAP_FREE (df_byte_lr->out_of_date_transfer_functions); - bitmap_obstack_release (&problem_data->byte_lr_bitmaps); - free (problem_data->regno_start); - free (problem_data->regno_len); + BITMAP_FREE (df_word_lr->out_of_date_transfer_functions); + bitmap_obstack_release (&problem_data->word_lr_bitmaps); free (problem_data); - free (df_byte_lr); + free (df_word_lr); } /* Debugging info at top of bb. */ static void -df_byte_lr_top_dump (basic_block bb, FILE *file) +df_word_lr_top_dump (basic_block bb, FILE *file) { - struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb->index); + struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index); if (!bb_info) return; fprintf (file, ";; blr in \t"); - df_print_byte_regset (file, &bb_info->in); + df_print_word_regset (file, &bb_info->in); fprintf (file, ";; blr use \t"); - df_print_byte_regset (file, &bb_info->use); + df_print_word_regset (file, &bb_info->use); fprintf (file, ";; blr def \t"); - df_print_byte_regset (file, &bb_info->def); + df_print_word_regset (file, &bb_info->def); } /* Debugging info at bottom of bb. */ static void -df_byte_lr_bottom_dump (basic_block bb, FILE *file) +df_word_lr_bottom_dump (basic_block bb, FILE *file) { - struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb->index); + struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index); if (!bb_info) return; fprintf (file, ";; blr out \t"); - df_print_byte_regset (file, &bb_info->out); + df_print_word_regset (file, &bb_info->out); } /* All of the information associated with every instance of the problem. */ -static struct df_problem problem_BYTE_LR = +static struct df_problem problem_WORD_LR = { - DF_BYTE_LR, /* Problem id. */ + DF_WORD_LR, /* Problem id. */ DF_BACKWARD, /* Direction. */ - df_byte_lr_alloc, /* Allocate the problem specific data. */ - df_byte_lr_reset, /* Reset global information. */ - df_byte_lr_free_bb_info, /* Free basic block info. */ - df_byte_lr_local_compute, /* Local compute function. */ - df_byte_lr_init, /* Init the solution specific data. */ + df_word_lr_alloc, /* Allocate the problem specific data. */ + df_word_lr_reset, /* Reset global information. */ + df_word_lr_free_bb_info, /* Free basic block info. */ + df_word_lr_local_compute, /* Local compute function. */ + df_word_lr_init, /* Init the solution specific data. */ df_worklist_dataflow, /* Worklist solver. */ - df_byte_lr_confluence_0, /* Confluence operator 0. */ - df_byte_lr_confluence_n, /* Confluence operator n. */ - df_byte_lr_transfer_function, /* Transfer function. */ + NULL, /* Confluence operator 0. */ + df_word_lr_confluence_n, /* Confluence operator n. */ + df_word_lr_transfer_function, /* Transfer function. */ NULL, /* Finalize function. */ - df_byte_lr_free, /* Free all of the problem information. */ - df_byte_lr_free, /* Remove this problem from the stack of dataflow problems. */ + df_word_lr_free, /* Free all of the problem information. */ + df_word_lr_free, /* Remove this problem from the stack of dataflow problems. */ NULL, /* Debugging. */ - df_byte_lr_top_dump, /* Debugging start block. */ - df_byte_lr_bottom_dump, /* Debugging end block. */ + df_word_lr_top_dump, /* Debugging start block. */ + df_word_lr_bottom_dump, /* Debugging end block. */ + NULL, /* Debugging start insn. */ + NULL, /* Debugging end insn. */ NULL, /* Incremental solution verify start. */ NULL, /* Incremental solution verify end. */ NULL, /* Dependent problem. */ - TV_DF_BYTE_LR, /* Timing variable. */ + sizeof (struct df_word_lr_bb_info),/* Size of entry of block_info array. */ + TV_DF_WORD_LR, /* Timing variable. */ false /* Reset blocks on dropping out of blocks_to_analyze. */ }; @@ -2914,163 +2662,45 @@ static struct df_problem problem_BYTE_LR = solution. */ void -df_byte_lr_add_problem (void) +df_word_lr_add_problem (void) { - df_add_problem (&problem_BYTE_LR); + df_add_problem (&problem_WORD_LR); /* These will be initialized when df_scan_blocks processes each block. */ - df_byte_lr->out_of_date_transfer_functions = BITMAP_ALLOC (NULL); + df_word_lr->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack); } -/* Simulate the effects of the defs of INSN on LIVE. */ +/* Simulate the effects of the defs of INSN on LIVE. Return true if we changed + any bits, which is used by the caller to determine whether a set is + necessary. We also return true if there are other reasons not to delete + an insn. */ -void -df_byte_lr_simulate_defs (rtx insn, bitmap live) +bool +df_word_lr_simulate_defs (rtx_insn *insn, bitmap live) { - struct df_byte_lr_problem_data *problem_data - = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; - df_ref *def_rec; - unsigned int uid = INSN_UID (insn); - - for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) - { - df_ref def = *def_rec; - - /* If the def is to only part of the reg, it does - not kill the other defs that reach here. */ - if (!(DF_REF_FLAGS (def) & DF_REF_CONDITIONAL)) - { - unsigned int dregno = DF_REF_REGNO (def); - unsigned int start = problem_data->regno_start[dregno]; - unsigned int len = problem_data->regno_len[dregno]; - unsigned int sb; - unsigned int lb; - if (!df_compute_accessed_bytes (def, DF_MM_MUST, &sb, &lb)) - { - start += sb; - len = lb - sb; - } + bool changed = false; + df_ref def; - if (len) - bitmap_clear_range (live, start, len); - } - } + FOR_EACH_INSN_DEF (def, insn) + if (DF_REF_FLAGS (def) & DF_REF_CONDITIONAL) + changed = true; + else + changed |= df_word_lr_mark_ref (def, false, live); + return changed; } /* Simulate the effects of the uses of INSN on LIVE. */ void -df_byte_lr_simulate_uses (rtx insn, bitmap live) +df_word_lr_simulate_uses (rtx_insn *insn, bitmap live) { - struct df_byte_lr_problem_data *problem_data - = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; - df_ref *use_rec; - unsigned int uid = INSN_UID (insn); - - for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++) - { - df_ref use = *use_rec; - unsigned int uregno = DF_REF_REGNO (use); - unsigned int start = problem_data->regno_start[uregno]; - unsigned int len = problem_data->regno_len[uregno]; - unsigned int sb; - unsigned int lb; - - if (!df_compute_accessed_bytes (use, DF_MM_MAY, &sb, &lb)) - { - start += sb; - len = lb - sb; - } - - /* Add use to set of uses in this BB. */ - if (len) - bitmap_set_range (live, start, len); - } -} - - -/* Apply the artificial uses and defs at the top of BB in a forwards - direction. */ - -void -df_byte_lr_simulate_artificial_refs_at_top (basic_block bb, bitmap live) -{ - struct df_byte_lr_problem_data *problem_data - = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; - df_ref *def_rec; -#ifdef EH_USES - df_ref *use_rec; -#endif - int bb_index = bb->index; - -#ifdef EH_USES - for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) - { - df_ref use = *use_rec; - if (DF_REF_FLAGS (use) & DF_REF_AT_TOP) - { - unsigned int uregno = DF_REF_REGNO (use); - unsigned int start = problem_data->regno_start[uregno]; - unsigned int len = problem_data->regno_len[uregno]; - bitmap_set_range (live, start, len); - } - } -#endif - - for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) - { - df_ref def = *def_rec; - if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) - { - unsigned int dregno = DF_REF_REGNO (def); - unsigned int start = problem_data->regno_start[dregno]; - unsigned int len = problem_data->regno_len[dregno]; - bitmap_clear_range (live, start, len); - } - } -} - - -/* Apply the artificial uses and defs at the end of BB in a backwards - direction. */ - -void -df_byte_lr_simulate_artificial_refs_at_end (basic_block bb, bitmap live) -{ - struct df_byte_lr_problem_data *problem_data - = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; - df_ref *def_rec; - df_ref *use_rec; - int bb_index = bb->index; - - for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) - { - df_ref def = *def_rec; - if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0) - { - unsigned int dregno = DF_REF_REGNO (def); - unsigned int start = problem_data->regno_start[dregno]; - unsigned int len = problem_data->regno_len[dregno]; - bitmap_clear_range (live, start, len); - } - } + df_ref use; - for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) - { - df_ref use = *use_rec; - if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0) - { - unsigned int uregno = DF_REF_REGNO (use); - unsigned int start = problem_data->regno_start[uregno]; - unsigned int len = problem_data->regno_len[uregno]; - bitmap_set_range (live, start, len); - } - } + FOR_EACH_INSN_USE (use, insn) + df_word_lr_mark_ref (use, true, live); } - - /*---------------------------------------------------------------------------- This problem computes REG_DEAD and REG_UNUSED notes. @@ -3082,9 +2712,9 @@ df_note_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) df_note->optional_p = true; } -#ifdef REG_DEAD_DEBUGGING +/* This is only used if REG_DEAD_DEBUGGING is in effect. */ static void -df_print_note (const char *prefix, rtx insn, rtx note) +df_print_note (const char *prefix, rtx_insn *insn, rtx note) { if (dump_file) { @@ -3093,7 +2723,6 @@ df_print_note (const char *prefix, rtx insn, rtx note) fprintf (dump_file, "\n"); } } -#endif /* After reg-stack, the x86 floating point stack regs are difficult to @@ -3116,16 +2745,13 @@ df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED) #endif -/* Remove all of the REG_DEAD or REG_UNUSED notes from INSN and add - them to OLD_DEAD_NOTES and OLD_UNUSED_NOTES. */ +/* Remove all of the REG_DEAD or REG_UNUSED notes from INSN. */ static void -df_kill_notes (rtx insn, rtx *old_dead_notes, rtx *old_unused_notes) +df_remove_dead_and_unused_notes (rtx_insn *insn) { rtx *pprev = ®_NOTES (insn); rtx link = *pprev; - rtx dead = NULL; - rtx unused = NULL; while (link) { @@ -3142,11 +2768,9 @@ df_kill_notes (rtx insn, rtx *old_dead_notes, rtx *old_unused_notes) else { rtx next = XEXP (link, 1); -#ifdef REG_DEAD_DEBUGGING - df_print_note ("deleting: ", insn, link); -#endif - XEXP (link, 1) = dead; - dead = link; + if (REG_DEAD_DEBUGGING) + df_print_note ("deleting: ", insn, link); + free_EXPR_LIST_node (link); *pprev = link = next; } break; @@ -3162,11 +2786,9 @@ df_kill_notes (rtx insn, rtx *old_dead_notes, rtx *old_unused_notes) else { rtx next = XEXP (link, 1); -#ifdef REG_DEAD_DEBUGGING - df_print_note ("deleting: ", insn, link); -#endif - XEXP (link, 1) = unused; - unused = link; + if (REG_DEAD_DEBUGGING) + df_print_note ("deleting: ", insn, link); + free_EXPR_LIST_node (link); *pprev = link = next; } break; @@ -3177,43 +2799,74 @@ df_kill_notes (rtx insn, rtx *old_dead_notes, rtx *old_unused_notes) break; } } - - *old_dead_notes = dead; - *old_unused_notes = unused; } +/* Remove REG_EQUAL/REG_EQUIV notes referring to dead pseudos using LIVE + as the bitmap of currently live registers. */ -/* Set a NOTE_TYPE note for REG in INSN. Try to pull it from the OLD - list, otherwise create a new one. */ - -static inline rtx -df_set_note (enum reg_note note_type, rtx insn, rtx old, rtx reg) +static void +df_remove_dead_eq_notes (rtx_insn *insn, bitmap live) { - rtx curr = old; - rtx prev = NULL; + rtx *pprev = ®_NOTES (insn); + rtx link = *pprev; + + while (link) + { + switch (REG_NOTE_KIND (link)) + { + case REG_EQUAL: + case REG_EQUIV: + { + /* Remove the notes that refer to dead registers. As we have at most + one REG_EQUAL/EQUIV note, all of EQ_USES will refer to this note + so we need to purge the complete EQ_USES vector when removing + the note using df_notes_rescan. */ + df_ref use; + bool deleted = false; + + FOR_EACH_INSN_EQ_USE (use, insn) + if (DF_REF_REGNO (use) > FIRST_PSEUDO_REGISTER + && DF_REF_LOC (use) + && (DF_REF_FLAGS (use) & DF_REF_IN_NOTE) + && !bitmap_bit_p (live, DF_REF_REGNO (use)) + && loc_mentioned_in_p (DF_REF_LOC (use), XEXP (link, 0))) + { + deleted = true; + break; + } + if (deleted) + { + rtx next; + if (REG_DEAD_DEBUGGING) + df_print_note ("deleting: ", insn, link); + next = XEXP (link, 1); + free_EXPR_LIST_node (link); + *pprev = link = next; + df_notes_rescan (insn); + } + else + { + pprev = &XEXP (link, 1); + link = *pprev; + } + break; + } - gcc_assert (!DEBUG_INSN_P (insn)); + default: + pprev = &XEXP (link, 1); + link = *pprev; + break; + } + } +} - while (curr) - if (XEXP (curr, 0) == reg) - { - if (prev) - XEXP (prev, 1) = XEXP (curr, 1); - else - old = XEXP (curr, 1); - XEXP (curr, 1) = REG_NOTES (insn); - REG_NOTES (insn) = curr; - return old; - } - else - { - prev = curr; - curr = XEXP (curr, 1); - } +/* Set a NOTE_TYPE note for REG in INSN. */ - /* Did not find the note. */ +static inline void +df_set_note (enum reg_note note_type, rtx_insn *insn, rtx reg) +{ + gcc_checking_assert (!DEBUG_INSN_P (insn)); add_reg_note (insn, note_type, reg); - return old; } /* A subroutine of df_set_unused_notes_for_mw, with a selection of its @@ -3242,6 +2895,7 @@ df_whole_mw_reg_unused_p (struct df_mw_hardreg *mws, return true; } + /* Set the REG_UNUSED notes for the multiword hardreg defs in INSN based on the bits in LIVE. Do not generate notes for registers in artificial uses. DO_NOT_GEN is updated so that REG_DEAD notes are @@ -3249,27 +2903,27 @@ df_whole_mw_reg_unused_p (struct df_mw_hardreg *mws, instruction. */ -static rtx -df_set_unused_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws, +static void +df_set_unused_notes_for_mw (rtx_insn *insn, struct df_mw_hardreg *mws, bitmap live, bitmap do_not_gen, - bitmap artificial_uses) + bitmap artificial_uses, + struct dead_debug_local *debug) { unsigned int r; -#ifdef REG_DEAD_DEBUGGING - if (dump_file) + if (REG_DEAD_DEBUGGING && dump_file) fprintf (dump_file, "mw_set_unused looking at mws[%d..%d]\n", mws->start_regno, mws->end_regno); -#endif if (df_whole_mw_reg_unused_p (mws, live, artificial_uses)) { unsigned int regno = mws->start_regno; - old = df_set_note (REG_UNUSED, insn, old, mws->mw_reg); + df_set_note (REG_UNUSED, insn, mws->mw_reg); + dead_debug_insert_temp (debug, regno, insn, DEBUG_TEMP_AFTER_WITH_REG); + + if (REG_DEAD_DEBUGGING) + df_print_note ("adding 1: ", insn, REG_NOTES (insn)); -#ifdef REG_DEAD_DEBUGGING - df_print_note ("adding 1: ", insn, REG_NOTES (insn)); -#endif bitmap_set_bit (do_not_gen, regno); /* Only do this if the value is totally dead. */ } @@ -3279,14 +2933,13 @@ df_set_unused_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws, if (!bitmap_bit_p (live, r) && !bitmap_bit_p (artificial_uses, r)) { - old = df_set_note (REG_UNUSED, insn, old, regno_reg_rtx[r]); -#ifdef REG_DEAD_DEBUGGING - df_print_note ("adding 2: ", insn, REG_NOTES (insn)); -#endif + df_set_note (REG_UNUSED, insn, regno_reg_rtx[r]); + dead_debug_insert_temp (debug, r, insn, DEBUG_TEMP_AFTER_WITH_REG); + if (REG_DEAD_DEBUGGING) + df_print_note ("adding 2: ", insn, REG_NOTES (insn)); } bitmap_set_bit (do_not_gen, r); } - return old; } @@ -3323,8 +2976,8 @@ df_whole_mw_reg_dead_p (struct df_mw_hardreg *mws, from being set if the instruction both reads and writes the register. */ -static rtx -df_set_dead_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws, +static void +df_set_dead_notes_for_mw (rtx_insn *insn, struct df_mw_hardreg *mws, bitmap live, bitmap do_not_gen, bitmap artificial_uses, bool *added_notes_p) { @@ -3333,8 +2986,7 @@ df_set_dead_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws, *added_notes_p = false; -#ifdef REG_DEAD_DEBUGGING - if (dump_file) + if (REG_DEAD_DEBUGGING && dump_file) { fprintf (dump_file, "mw_set_dead looking at mws[%d..%d]\n do_not_gen =", mws->start_regno, mws->end_regno); @@ -3344,20 +2996,18 @@ df_set_dead_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws, fprintf (dump_file, " artificial uses ="); df_print_regset (dump_file, artificial_uses); } -#endif if (df_whole_mw_reg_dead_p (mws, live, artificial_uses, do_not_gen)) { - /* Add a dead note for the entire multi word register. */ if (is_debug) { *added_notes_p = true; - return old; + return; } - old = df_set_note (REG_DEAD, insn, old, mws->mw_reg); -#ifdef REG_DEAD_DEBUGGING - df_print_note ("adding 1: ", insn, REG_NOTES (insn)); -#endif + /* Add a dead note for the entire multi word register. */ + df_set_note (REG_DEAD, insn, mws->mw_reg); + if (REG_DEAD_DEBUGGING) + df_print_note ("adding 1: ", insn, REG_NOTES (insn)); } else { @@ -3369,34 +3019,32 @@ df_set_dead_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws, if (is_debug) { *added_notes_p = true; - return old; + return; } - old = df_set_note (REG_DEAD, insn, old, regno_reg_rtx[r]); -#ifdef REG_DEAD_DEBUGGING - df_print_note ("adding 2: ", insn, REG_NOTES (insn)); -#endif + df_set_note (REG_DEAD, insn, regno_reg_rtx[r]); + if (REG_DEAD_DEBUGGING) + df_print_note ("adding 2: ", insn, REG_NOTES (insn)); } } - return old; + return; } /* Create a REG_UNUSED note if necessary for DEF in INSN updating LIVE. Do not generate notes for registers in ARTIFICIAL_USES. */ -static rtx -df_create_unused_note (rtx insn, rtx old, df_ref def, - bitmap live, bitmap artificial_uses) +static void +df_create_unused_note (rtx_insn *insn, df_ref def, + bitmap live, bitmap artificial_uses, + struct dead_debug_local *debug) { unsigned int dregno = DF_REF_REGNO (def); -#ifdef REG_DEAD_DEBUGGING - if (dump_file) + if (REG_DEAD_DEBUGGING && dump_file) { fprintf (dump_file, " regular looking at def "); df_ref_debug (def, dump_file); } -#endif if (!((DF_REF_FLAGS (def) & DF_REF_MW_HARDREG) || bitmap_bit_p (live, dregno) @@ -3405,167 +3053,15 @@ df_create_unused_note (rtx insn, rtx old, df_ref def, { rtx reg = (DF_REF_LOC (def)) ? *DF_REF_REAL_LOC (def): DF_REF_REG (def); - old = df_set_note (REG_UNUSED, insn, old, reg); -#ifdef REG_DEAD_DEBUGGING - df_print_note ("adding 3: ", insn, REG_NOTES (insn)); -#endif + df_set_note (REG_UNUSED, insn, reg); + dead_debug_insert_temp (debug, dregno, insn, DEBUG_TEMP_AFTER_WITH_REG); + if (REG_DEAD_DEBUGGING) + df_print_note ("adding 3: ", insn, REG_NOTES (insn)); } - return old; + return; } -/* Node of a linked list of uses of dead REGs in debug insns. */ -struct dead_debug_use -{ - df_ref use; - struct dead_debug_use *next; -}; - -/* Linked list of the above, with a bitmap of the REGs in the - list. */ -struct dead_debug -{ - struct dead_debug_use *head; - bitmap used; - bitmap to_rescan; -}; - -/* Initialize DEBUG to an empty list, and clear USED, if given. */ -static inline void -dead_debug_init (struct dead_debug *debug, bitmap used) -{ - debug->head = NULL; - debug->used = used; - debug->to_rescan = NULL; - if (used) - bitmap_clear (used); -} - -/* Reset all debug insns with pending uses. Release the bitmap in it, - unless it is USED. USED must be the same bitmap passed to - dead_debug_init. */ -static inline void -dead_debug_finish (struct dead_debug *debug, bitmap used) -{ - struct dead_debug_use *head; - rtx insn = NULL; - - if (debug->used != used) - BITMAP_FREE (debug->used); - - while ((head = debug->head)) - { - insn = DF_REF_INSN (head->use); - if (!head->next || DF_REF_INSN (head->next->use) != insn) - { - INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC (); - df_insn_rescan_debug_internal (insn); - if (debug->to_rescan) - bitmap_clear_bit (debug->to_rescan, INSN_UID (insn)); - } - debug->head = head->next; - XDELETE (head); - } - - if (debug->to_rescan) - { - bitmap_iterator bi; - unsigned int uid; - - EXECUTE_IF_SET_IN_BITMAP (debug->to_rescan, 0, uid, bi) - { - struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid); - if (insn_info) - df_insn_rescan (insn_info->insn); - } - BITMAP_FREE (debug->to_rescan); - } -} - -/* Add USE to DEBUG. It must be a dead reference to UREGNO in a debug - insn. Create a bitmap for DEBUG as needed. */ -static inline void -dead_debug_add (struct dead_debug *debug, df_ref use, unsigned int uregno) -{ - struct dead_debug_use *newddu = XNEW (struct dead_debug_use); - - newddu->use = use; - newddu->next = debug->head; - debug->head = newddu; - - if (!debug->used) - debug->used = BITMAP_ALLOC (NULL); - - bitmap_set_bit (debug->used, uregno); -} - -/* If UREGNO is referenced by any entry in DEBUG, emit a debug insn - before INSN that binds the REG to a debug temp, and replace all - uses of UREGNO in DEBUG with uses of the debug temp. INSN must be - the insn where UREGNO dies. */ -static inline void -dead_debug_insert_before (struct dead_debug *debug, unsigned int uregno, - rtx insn) -{ - struct dead_debug_use **tailp = &debug->head; - struct dead_debug_use *cur; - struct dead_debug_use *uses = NULL; - struct dead_debug_use **usesp = &uses; - rtx reg = NULL; - rtx dval; - rtx bind; - - if (!debug->used || !bitmap_clear_bit (debug->used, uregno)) - return; - - /* Move all uses of uregno from debug->head to uses, setting mode to - the widest referenced mode. */ - while ((cur = *tailp)) - { - if (DF_REF_REGNO (cur->use) == uregno) - { - *usesp = cur; - usesp = &cur->next; - *tailp = cur->next; - cur->next = NULL; - if (!reg - || (GET_MODE_BITSIZE (GET_MODE (reg)) - < GET_MODE_BITSIZE (GET_MODE (*DF_REF_REAL_LOC (cur->use))))) - reg = *DF_REF_REAL_LOC (cur->use); - } - else - tailp = &(*tailp)->next; - } - - gcc_assert (reg); - - /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */ - dval = make_debug_expr_from_rtl (reg); - - /* Emit a debug bind insn before the insn in which reg dies. */ - bind = gen_rtx_VAR_LOCATION (GET_MODE (reg), - DEBUG_EXPR_TREE_DECL (dval), reg, - VAR_INIT_STATUS_INITIALIZED); - - bind = emit_debug_insn_before (bind, insn); - df_insn_rescan (bind); - - /* Adjust all uses. */ - while ((cur = uses)) - { - if (GET_MODE (*DF_REF_REAL_LOC (cur->use)) == GET_MODE (reg)) - *DF_REF_REAL_LOC (cur->use) = dval; - else - *DF_REF_REAL_LOC (cur->use) - = gen_lowpart_SUBREG (GET_MODE (*DF_REF_REAL_LOC (cur->use)), dval); - /* ??? Should we simplify subreg of subreg? */ - if (debug->to_rescan == NULL) - debug->to_rescan = BITMAP_ALLOC (NULL); - bitmap_set_bit (debug->to_rescan, INSN_UID (DF_REF_INSN (cur->use))); - uses = cur->next; - XDELETE (cur); - } -} /* Recompute the REG_DEAD and REG_UNUSED notes and compute register info: lifetime, bb, and number of defs and uses for basic block @@ -3575,67 +3071,54 @@ static void df_note_bb_compute (unsigned int bb_index, bitmap live, bitmap do_not_gen, bitmap artificial_uses) { - basic_block bb = BASIC_BLOCK (bb_index); - rtx insn; - df_ref *def_rec; - df_ref *use_rec; - struct dead_debug debug; + basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index); + rtx_insn *insn; + df_ref def, use; + struct dead_debug_local debug; - dead_debug_init (&debug, NULL); + dead_debug_local_init (&debug, NULL, NULL); bitmap_copy (live, df_get_live_out (bb)); bitmap_clear (artificial_uses); -#ifdef REG_DEAD_DEBUGGING - if (dump_file) + if (REG_DEAD_DEBUGGING && dump_file) { fprintf (dump_file, "live at bottom "); df_print_regset (dump_file, live); } -#endif /* Process the artificial defs and uses at the bottom of the block to begin processing. */ - for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) + FOR_EACH_ARTIFICIAL_DEF (def, bb_index) { - df_ref def = *def_rec; -#ifdef REG_DEAD_DEBUGGING - if (dump_file) + if (REG_DEAD_DEBUGGING && dump_file) fprintf (dump_file, "artificial def %d\n", DF_REF_REGNO (def)); -#endif if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0) bitmap_clear_bit (live, DF_REF_REGNO (def)); } - for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) - { - df_ref use = *use_rec; - if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0) - { - unsigned int regno = DF_REF_REGNO (use); - bitmap_set_bit (live, regno); + FOR_EACH_ARTIFICIAL_USE (use, bb_index) + if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0) + { + unsigned int regno = DF_REF_REGNO (use); + bitmap_set_bit (live, regno); - /* Notes are not generated for any of the artificial registers - at the bottom of the block. */ - bitmap_set_bit (artificial_uses, regno); - } - } + /* Notes are not generated for any of the artificial registers + at the bottom of the block. */ + bitmap_set_bit (artificial_uses, regno); + } -#ifdef REG_DEAD_DEBUGGING - if (dump_file) + if (REG_DEAD_DEBUGGING && dump_file) { fprintf (dump_file, "live before artificials out "); df_print_regset (dump_file, live); } -#endif FOR_BB_INSNS_REVERSE (bb, insn) { - unsigned int uid = INSN_UID (insn); - struct df_mw_hardreg **mws_rec; - rtx old_dead_notes; - rtx old_unused_notes; + df_insn_info *insn_info = DF_INSN_INFO_GET (insn); + df_mw_hardreg *mw; int debug_insn; if (!INSN_P (insn)) @@ -3644,44 +3127,35 @@ df_note_bb_compute (unsigned int bb_index, debug_insn = DEBUG_INSN_P (insn); bitmap_clear (do_not_gen); - df_kill_notes (insn, &old_dead_notes, &old_unused_notes); + df_remove_dead_and_unused_notes (insn); /* Process the defs. */ if (CALL_P (insn)) { -#ifdef REG_DEAD_DEBUGGING - if (dump_file) + if (REG_DEAD_DEBUGGING && dump_file) { - fprintf (dump_file, "processing call %d\n live =", INSN_UID (insn)); + fprintf (dump_file, "processing call %d\n live =", + INSN_UID (insn)); df_print_regset (dump_file, live); } -#endif + /* We only care about real sets for calls. Clobbers cannot be depended on to really die. */ - mws_rec = DF_INSN_UID_MWS (uid); - while (*mws_rec) - { - struct df_mw_hardreg *mws = *mws_rec; - if ((DF_MWS_REG_DEF_P (mws)) - && !df_ignore_stack_reg (mws->start_regno)) - old_unused_notes - = df_set_unused_notes_for_mw (insn, old_unused_notes, - mws, live, do_not_gen, - artificial_uses); - mws_rec++; - } + FOR_EACH_INSN_INFO_MW (mw, insn_info) + if ((DF_MWS_REG_DEF_P (mw)) + && !df_ignore_stack_reg (mw->start_regno)) + df_set_unused_notes_for_mw (insn, mw, live, do_not_gen, + artificial_uses, &debug); /* All of the defs except the return value are some sort of clobber. This code is for the return. */ - for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) + FOR_EACH_INSN_INFO_DEF (def, insn_info) { - df_ref def = *def_rec; unsigned int dregno = DF_REF_REGNO (def); if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)) { - old_unused_notes - = df_create_unused_note (insn, old_unused_notes, - def, live, artificial_uses); + df_create_unused_note (insn, + def, live, artificial_uses, &debug); bitmap_set_bit (do_not_gen, dregno); } @@ -3692,25 +3166,16 @@ df_note_bb_compute (unsigned int bb_index, else { /* Regular insn. */ - mws_rec = DF_INSN_UID_MWS (uid); - while (*mws_rec) - { - struct df_mw_hardreg *mws = *mws_rec; - if (DF_MWS_REG_DEF_P (mws)) - old_unused_notes - = df_set_unused_notes_for_mw (insn, old_unused_notes, - mws, live, do_not_gen, - artificial_uses); - mws_rec++; - } + FOR_EACH_INSN_INFO_MW (mw, insn_info) + if (DF_MWS_REG_DEF_P (mw)) + df_set_unused_notes_for_mw (insn, mw, live, do_not_gen, + artificial_uses, &debug); - for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) + FOR_EACH_INSN_INFO_DEF (def, insn_info) { - df_ref def = *def_rec; unsigned int dregno = DF_REF_REGNO (def); - old_unused_notes - = df_create_unused_note (insn, old_unused_notes, - def, live, artificial_uses); + df_create_unused_note (insn, + def, live, artificial_uses, &debug); if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)) bitmap_set_bit (do_not_gen, dregno); @@ -3721,52 +3186,49 @@ df_note_bb_compute (unsigned int bb_index, } /* Process the uses. */ - mws_rec = DF_INSN_UID_MWS (uid); - while (*mws_rec) - { - struct df_mw_hardreg *mws = *mws_rec; - if ((DF_MWS_REG_DEF_P (mws)) - && !df_ignore_stack_reg (mws->start_regno)) - { - bool really_add_notes = debug_insn != 0; + FOR_EACH_INSN_INFO_MW (mw, insn_info) + if (DF_MWS_REG_USE_P (mw) + && !df_ignore_stack_reg (mw->start_regno)) + { + bool really_add_notes = debug_insn != 0; - old_dead_notes - = df_set_dead_notes_for_mw (insn, old_dead_notes, - mws, live, do_not_gen, - artificial_uses, - &really_add_notes); + df_set_dead_notes_for_mw (insn, mw, live, do_not_gen, + artificial_uses, + &really_add_notes); - if (really_add_notes) - debug_insn = -1; - } - mws_rec++; - } + if (really_add_notes) + debug_insn = -1; + } - for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++) + FOR_EACH_INSN_INFO_USE (use, insn_info) { - df_ref use = *use_rec; unsigned int uregno = DF_REF_REGNO (use); -#ifdef REG_DEAD_DEBUGGING - if (dump_file && !debug_insn) + if (REG_DEAD_DEBUGGING && dump_file && !debug_insn) { fprintf (dump_file, " regular looking at use "); df_ref_debug (use, dump_file); } -#endif + if (!bitmap_bit_p (live, uregno)) { if (debug_insn) { if (debug_insn > 0) { - dead_debug_add (&debug, use, uregno); + /* We won't add REG_UNUSED or REG_DEAD notes for + these, so we don't have to mess with them in + debug insns either. */ + if (!bitmap_bit_p (artificial_uses, uregno) + && !df_ignore_stack_reg (uregno)) + dead_debug_add (&debug, use, uregno); continue; } break; } else - dead_debug_insert_before (&debug, uregno, insn); + dead_debug_insert_temp (&debug, uregno, insn, + DEBUG_TEMP_BEFORE_WITH_REG); if ( (!(DF_REF_FLAGS (use) & (DF_REF_MW_HARDREG | DF_REF_READ_WRITE))) @@ -3776,29 +3238,17 @@ df_note_bb_compute (unsigned int bb_index, { rtx reg = (DF_REF_LOC (use)) ? *DF_REF_REAL_LOC (use) : DF_REF_REG (use); - old_dead_notes = df_set_note (REG_DEAD, insn, old_dead_notes, reg); + df_set_note (REG_DEAD, insn, reg); -#ifdef REG_DEAD_DEBUGGING - df_print_note ("adding 4: ", insn, REG_NOTES (insn)); -#endif + if (REG_DEAD_DEBUGGING) + df_print_note ("adding 4: ", insn, REG_NOTES (insn)); } /* This register is now live. */ bitmap_set_bit (live, uregno); } } - while (old_unused_notes) - { - rtx next = XEXP (old_unused_notes, 1); - free_EXPR_LIST_node (old_unused_notes); - old_unused_notes = next; - } - while (old_dead_notes) - { - rtx next = XEXP (old_dead_notes, 1); - free_EXPR_LIST_node (old_dead_notes); - old_dead_notes = next; - } + df_remove_dead_eq_notes (insn, live); if (debug_insn == -1) { @@ -3809,7 +3259,7 @@ df_note_bb_compute (unsigned int bb_index, } } - dead_debug_finish (&debug, NULL); + dead_debug_local_finish (&debug, NULL); } @@ -3825,13 +3275,13 @@ df_note_compute (bitmap all_blocks) bitmap_initialize (&do_not_gen, &df_bitmap_obstack); bitmap_initialize (&artificial_uses, &df_bitmap_obstack); -#ifdef REG_DEAD_DEBUGGING - if (dump_file) - print_rtl_with_bb (dump_file, get_insns()); -#endif - EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) { + /* ??? Unlike fast DCE, we don't use global_debug for uses of dead + pseudos in debug insns because we don't always (re)visit blocks + with death points after visiting dead uses. Even changing this + loop to postorder would still leave room for visiting a death + point before visiting a subsequent debug use. */ df_note_bb_compute (bb_index, &live, &do_not_gen, &artificial_uses); } @@ -3871,9 +3321,12 @@ static struct df_problem problem_NOTE = NULL, /* Debugging. */ NULL, /* Debugging start block. */ NULL, /* Debugging end block. */ + NULL, /* Debugging start insn. */ + NULL, /* Debugging end insn. */ NULL, /* Incremental solution verify start. */ NULL, /* Incremental solution verify end. */ &problem_LR, /* Dependent problem. */ + sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */ TV_DF_NOTE, /* Timing variable. */ false /* Reset blocks on dropping out of blocks_to_analyze. */ }; @@ -3913,46 +3366,51 @@ df_note_add_problem (void) /* Find the set of DEFs for INSN. */ void -df_simulate_find_defs (rtx insn, bitmap defs) +df_simulate_find_defs (rtx_insn *insn, bitmap defs) { - df_ref *def_rec; - unsigned int uid = INSN_UID (insn); + df_ref def; - for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) - { - df_ref def = *def_rec; - bitmap_set_bit (defs, DF_REF_REGNO (def)); - } + FOR_EACH_INSN_DEF (def, insn) + bitmap_set_bit (defs, DF_REF_REGNO (def)); +} + +/* Find the set of uses for INSN. This includes partial defs. */ + +static void +df_simulate_find_uses (rtx_insn *insn, bitmap uses) +{ + df_ref def, use; + struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn); + + FOR_EACH_INSN_INFO_DEF (def, insn_info) + if (DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)) + bitmap_set_bit (uses, DF_REF_REGNO (def)); + FOR_EACH_INSN_INFO_USE (use, insn_info) + bitmap_set_bit (uses, DF_REF_REGNO (use)); } /* Find the set of real DEFs, which are not clobbers, for INSN. */ void -df_simulate_find_noclobber_defs (rtx insn, bitmap defs) +df_simulate_find_noclobber_defs (rtx_insn *insn, bitmap defs) { - df_ref *def_rec; - unsigned int uid = INSN_UID (insn); + df_ref def; - for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) - { - df_ref def = *def_rec; - if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))) - bitmap_set_bit (defs, DF_REF_REGNO (def)); - } + FOR_EACH_INSN_DEF (def, insn) + if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))) + bitmap_set_bit (defs, DF_REF_REGNO (def)); } /* Simulate the effects of the defs of INSN on LIVE. */ void -df_simulate_defs (rtx insn, bitmap live) +df_simulate_defs (rtx_insn *insn, bitmap live) { - df_ref *def_rec; - unsigned int uid = INSN_UID (insn); + df_ref def; - for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) + FOR_EACH_INSN_DEF (def, insn) { - df_ref def = *def_rec; unsigned int dregno = DF_REF_REGNO (def); /* If the def is to only part of the reg, it does @@ -3966,20 +3424,16 @@ df_simulate_defs (rtx insn, bitmap live) /* Simulate the effects of the uses of INSN on LIVE. */ void -df_simulate_uses (rtx insn, bitmap live) +df_simulate_uses (rtx_insn *insn, bitmap live) { - df_ref *use_rec; - unsigned int uid = INSN_UID (insn); + df_ref use; if (DEBUG_INSN_P (insn)) return; - for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++) - { - df_ref use = *use_rec; - /* Add use to set of uses in this BB. */ - bitmap_set_bit (live, DF_REF_REGNO (use)); - } + FOR_EACH_INSN_USE (use, insn) + /* Add use to set of uses in this BB. */ + bitmap_set_bit (live, DF_REF_REGNO (use)); } @@ -4015,30 +3469,23 @@ df_simulate_fixup_sets (basic_block bb, bitmap live) void df_simulate_initialize_backwards (basic_block bb, bitmap live) { - df_ref *def_rec; - df_ref *use_rec; + df_ref def, use; int bb_index = bb->index; - for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) - { - df_ref def = *def_rec; - if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0) - bitmap_clear_bit (live, DF_REF_REGNO (def)); - } + FOR_EACH_ARTIFICIAL_DEF (def, bb_index) + if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0) + bitmap_clear_bit (live, DF_REF_REGNO (def)); - for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) - { - df_ref use = *use_rec; - if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0) - bitmap_set_bit (live, DF_REF_REGNO (use)); - } + FOR_EACH_ARTIFICIAL_USE (use, bb_index) + if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0) + bitmap_set_bit (live, DF_REF_REGNO (use)); } /* Simulate the backwards effects of INSN on the bitmap LIVE. */ void -df_simulate_one_insn_backwards (basic_block bb, rtx insn, bitmap live) +df_simulate_one_insn_backwards (basic_block bb, rtx_insn *insn, bitmap live) { if (!NONDEBUG_INSN_P (insn)) return; @@ -4055,26 +3502,20 @@ df_simulate_one_insn_backwards (basic_block bb, rtx insn, bitmap live) void df_simulate_finalize_backwards (basic_block bb, bitmap live) { - df_ref *def_rec; + df_ref def; #ifdef EH_USES - df_ref *use_rec; + df_ref use; #endif int bb_index = bb->index; - for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) - { - df_ref def = *def_rec; - if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) - bitmap_clear_bit (live, DF_REF_REGNO (def)); - } + FOR_EACH_ARTIFICIAL_DEF (def, bb_index) + if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) + bitmap_clear_bit (live, DF_REF_REGNO (def)); #ifdef EH_USES - for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) - { - df_ref use = *use_rec; - if (DF_REF_FLAGS (use) & DF_REF_AT_TOP) - bitmap_set_bit (live, DF_REF_REGNO (use)); - } + FOR_EACH_ARTIFICIAL_USE (use, bb_index) + if (DF_REF_FLAGS (use) & DF_REF_AT_TOP) + bitmap_set_bit (live, DF_REF_REGNO (use)); #endif } /*---------------------------------------------------------------------------- @@ -4096,21 +3537,18 @@ df_simulate_finalize_backwards (basic_block bb, bitmap live) void df_simulate_initialize_forwards (basic_block bb, bitmap live) { - df_ref *def_rec; + df_ref def; int bb_index = bb->index; - for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) - { - df_ref def = *def_rec; - if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) - bitmap_set_bit (live, DF_REF_REGNO (def)); - } + FOR_EACH_ARTIFICIAL_DEF (def, bb_index) + if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) + bitmap_set_bit (live, DF_REF_REGNO (def)); } /* Simulate the forwards effects of INSN on the bitmap LIVE. */ void -df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live) +df_simulate_one_insn_forwards (basic_block bb, rtx_insn *insn, bitmap live) { rtx link; if (! INSN_P (insn)) @@ -4135,15 +3573,7 @@ df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live) case REG_UNUSED: { rtx reg = XEXP (link, 0); - int regno = REGNO (reg); - if (regno < FIRST_PSEUDO_REGISTER) - { - int n = hard_regno_nregs[regno][GET_MODE (reg)]; - while (--n >= 0) - bitmap_clear_bit (live, regno + n); - } - else - bitmap_clear_bit (live, regno); + bitmap_clear_range (live, REGNO (reg), REG_NREGS (reg)); } break; default: @@ -4152,7 +3582,326 @@ df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live) } df_simulate_fixup_sets (bb, live); } + +/* Used by the next two functions to encode information about the + memory references we found. */ +#define MEMREF_NORMAL 1 +#define MEMREF_VOLATILE 2 +/* Return an OR of MEMREF_NORMAL or MEMREF_VOLATILE for the MEMs in X. */ + +static int +find_memory (rtx_insn *insn) +{ + int flags = 0; + subrtx_iterator::array_type array; + FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST) + { + const_rtx x = *iter; + if (GET_CODE (x) == ASM_OPERANDS && MEM_VOLATILE_P (x)) + flags |= MEMREF_VOLATILE; + else if (MEM_P (x)) + { + if (MEM_VOLATILE_P (x)) + flags |= MEMREF_VOLATILE; + else if (!MEM_READONLY_P (x)) + flags |= MEMREF_NORMAL; + } + } + return flags; +} + +/* A subroutine of can_move_insns_across_p called through note_stores. + DATA points to an integer in which we set either the bit for + MEMREF_NORMAL or the bit for MEMREF_VOLATILE if we find a MEM + of either kind. */ + +static void +find_memory_stores (rtx x, const_rtx pat ATTRIBUTE_UNUSED, + void *data ATTRIBUTE_UNUSED) +{ + int *pflags = (int *)data; + if (GET_CODE (x) == SUBREG) + x = XEXP (x, 0); + /* Treat stores to SP as stores to memory, this will prevent problems + when there are references to the stack frame. */ + if (x == stack_pointer_rtx) + *pflags |= MEMREF_VOLATILE; + if (!MEM_P (x)) + return; + *pflags |= MEM_VOLATILE_P (x) ? MEMREF_VOLATILE : MEMREF_NORMAL; +} + +/* Scan BB backwards, using df_simulate functions to keep track of + lifetimes, up to insn POINT. The result is stored in LIVE. */ + +void +simulate_backwards_to_point (basic_block bb, regset live, rtx point) +{ + rtx_insn *insn; + bitmap_copy (live, df_get_live_out (bb)); + df_simulate_initialize_backwards (bb, live); + + /* Scan and update life information until we reach the point we're + interested in. */ + for (insn = BB_END (bb); insn != point; insn = PREV_INSN (insn)) + df_simulate_one_insn_backwards (bb, insn, live); +} + +/* Return true if it is safe to move a group of insns, described by + the range FROM to TO, backwards across another group of insns, + described by ACROSS_FROM to ACROSS_TO. It is assumed that there + are no insns between ACROSS_TO and FROM, but they may be in + different basic blocks; MERGE_BB is the block from which the + insns will be moved. The caller must pass in a regset MERGE_LIVE + which specifies the registers live after TO. + + This function may be called in one of two cases: either we try to + move identical instructions from all successor blocks into their + predecessor, or we try to move from only one successor block. If + OTHER_BRANCH_LIVE is nonnull, it indicates that we're dealing with + the second case. It should contain a set of registers live at the + end of ACROSS_TO which must not be clobbered by moving the insns. + In that case, we're also more careful about moving memory references + and trapping insns. + + We return false if it is not safe to move the entire group, but it + may still be possible to move a subgroup. PMOVE_UPTO, if nonnull, + is set to point at the last moveable insn in such a case. */ + +bool +can_move_insns_across (rtx_insn *from, rtx_insn *to, + rtx_insn *across_from, rtx_insn *across_to, + basic_block merge_bb, regset merge_live, + regset other_branch_live, rtx_insn **pmove_upto) +{ + rtx_insn *insn, *next, *max_to; + bitmap merge_set, merge_use, local_merge_live; + bitmap test_set, test_use; + unsigned i, fail = 0; + bitmap_iterator bi; + int memrefs_in_across = 0; + int mem_sets_in_across = 0; + bool trapping_insns_in_across = false; + + if (pmove_upto != NULL) + *pmove_upto = NULL; + + /* Find real bounds, ignoring debug insns. */ + while (!NONDEBUG_INSN_P (from) && from != to) + from = NEXT_INSN (from); + while (!NONDEBUG_INSN_P (to) && from != to) + to = PREV_INSN (to); + + for (insn = across_to; ; insn = next) + { + if (CALL_P (insn)) + { + if (RTL_CONST_OR_PURE_CALL_P (insn)) + /* Pure functions can read from memory. Const functions can + read from arguments that the ABI has forced onto the stack. + Neither sort of read can be volatile. */ + memrefs_in_across |= MEMREF_NORMAL; + else + { + memrefs_in_across |= MEMREF_VOLATILE; + mem_sets_in_across |= MEMREF_VOLATILE; + } + } + if (NONDEBUG_INSN_P (insn)) + { + if (volatile_insn_p (PATTERN (insn))) + return false; + memrefs_in_across |= find_memory (insn); + note_stores (PATTERN (insn), find_memory_stores, + &mem_sets_in_across); + /* This is used just to find sets of the stack pointer. */ + memrefs_in_across |= mem_sets_in_across; + trapping_insns_in_across |= may_trap_p (PATTERN (insn)); + } + next = PREV_INSN (insn); + if (insn == across_from) + break; + } + + /* Collect: + MERGE_SET = set of registers set in MERGE_BB + MERGE_USE = set of registers used in MERGE_BB and live at its top + MERGE_LIVE = set of registers live at the point inside the MERGE + range that we've reached during scanning + TEST_SET = set of registers set between ACROSS_FROM and ACROSS_END. + TEST_USE = set of registers used between ACROSS_FROM and ACROSS_END, + and live before ACROSS_FROM. */ + + merge_set = BITMAP_ALLOC (®_obstack); + merge_use = BITMAP_ALLOC (®_obstack); + local_merge_live = BITMAP_ALLOC (®_obstack); + test_set = BITMAP_ALLOC (®_obstack); + test_use = BITMAP_ALLOC (®_obstack); + + /* Compute the set of registers set and used in the ACROSS range. */ + if (other_branch_live != NULL) + bitmap_copy (test_use, other_branch_live); + df_simulate_initialize_backwards (merge_bb, test_use); + for (insn = across_to; ; insn = next) + { + if (NONDEBUG_INSN_P (insn)) + { + df_simulate_find_defs (insn, test_set); + df_simulate_defs (insn, test_use); + df_simulate_uses (insn, test_use); + } + next = PREV_INSN (insn); + if (insn == across_from) + break; + } + + /* Compute an upper bound for the amount of insns moved, by finding + the first insn in MERGE that sets a register in TEST_USE, or uses + a register in TEST_SET. We also check for calls, trapping operations, + and memory references. */ + max_to = NULL; + for (insn = from; ; insn = next) + { + if (CALL_P (insn)) + break; + if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG) + break; + if (NONDEBUG_INSN_P (insn)) + { + if (may_trap_or_fault_p (PATTERN (insn)) + && (trapping_insns_in_across + || other_branch_live != NULL + || volatile_insn_p (PATTERN (insn)))) + break; + + /* We cannot move memory stores past each other, or move memory + reads past stores, at least not without tracking them and + calling true_dependence on every pair. + + If there is no other branch and no memory references or + sets in the ACROSS range, we can move memory references + freely, even volatile ones. + + Otherwise, the rules are as follows: volatile memory + references and stores can't be moved at all, and any type + of memory reference can't be moved if there are volatile + accesses or stores in the ACROSS range. That leaves + normal reads, which can be moved, as the trapping case is + dealt with elsewhere. */ + if (other_branch_live != NULL || memrefs_in_across != 0) + { + int mem_ref_flags = 0; + int mem_set_flags = 0; + note_stores (PATTERN (insn), find_memory_stores, &mem_set_flags); + mem_ref_flags = find_memory (insn); + /* Catch sets of the stack pointer. */ + mem_ref_flags |= mem_set_flags; + + if ((mem_ref_flags | mem_set_flags) & MEMREF_VOLATILE) + break; + if ((memrefs_in_across & MEMREF_VOLATILE) && mem_ref_flags != 0) + break; + if (mem_set_flags != 0 + || (mem_sets_in_across != 0 && mem_ref_flags != 0)) + break; + } + df_simulate_find_uses (insn, merge_use); + /* We're only interested in uses which use a value live at + the top, not one previously set in this block. */ + bitmap_and_compl_into (merge_use, merge_set); + df_simulate_find_defs (insn, merge_set); + if (bitmap_intersect_p (merge_set, test_use) + || bitmap_intersect_p (merge_use, test_set)) + break; + if (!HAVE_cc0 || !sets_cc0_p (insn)) + max_to = insn; + } + next = NEXT_INSN (insn); + if (insn == to) + break; + } + if (max_to != to) + fail = 1; + + if (max_to == NULL_RTX || (fail && pmove_upto == NULL)) + goto out; + + /* Now, lower this upper bound by also taking into account that + a range of insns moved across ACROSS must not leave a register + live at the end that will be clobbered in ACROSS. We need to + find a point where TEST_SET & LIVE == 0. + + Insns in the MERGE range that set registers which are also set + in the ACROSS range may still be moved as long as we also move + later insns which use the results of the set, and make the + register dead again. This is verified by the condition stated + above. We only need to test it for registers that are set in + the moved region. + + MERGE_LIVE is provided by the caller and holds live registers after + TO. */ + bitmap_copy (local_merge_live, merge_live); + for (insn = to; insn != max_to; insn = PREV_INSN (insn)) + df_simulate_one_insn_backwards (merge_bb, insn, local_merge_live); + + /* We're not interested in registers that aren't set in the moved + region at all. */ + bitmap_and_into (local_merge_live, merge_set); + for (;;) + { + if (NONDEBUG_INSN_P (insn)) + { + if (!bitmap_intersect_p (test_set, local_merge_live) + && (!HAVE_cc0 || !sets_cc0_p (insn))) + { + max_to = insn; + break; + } + + df_simulate_one_insn_backwards (merge_bb, insn, + local_merge_live); + } + if (insn == from) + { + fail = 1; + goto out; + } + insn = PREV_INSN (insn); + } + + if (max_to != to) + fail = 1; + + if (pmove_upto) + *pmove_upto = max_to; + + /* For small register class machines, don't lengthen lifetimes of + hard registers before reload. */ + if (! reload_completed + && targetm.small_register_classes_for_mode_p (VOIDmode)) + { + EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi) + { + if (i < FIRST_PSEUDO_REGISTER + && ! fixed_regs[i] + && ! global_regs[i]) + { + fail = 1; + break; + } + } + } + + out: + BITMAP_FREE (merge_set); + BITMAP_FREE (merge_use); + BITMAP_FREE (local_merge_live); + BITMAP_FREE (test_set); + BITMAP_FREE (test_use); + + return !fail; +} /*---------------------------------------------------------------------------- @@ -4213,17 +3962,6 @@ struct df_md_problem_data only for live registers. */ static bitmap_head df_md_scratch; -/* Set basic block info. */ - -static void -df_md_set_bb_info (unsigned int index, - struct df_md_bb_info *bb_info) -{ - gcc_assert (df_md); - gcc_assert (index < df_md->block_info_size); - df_md->block_info[index] = bb_info; -} - static void df_md_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, @@ -4237,7 +3975,6 @@ df_md_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, bitmap_clear (&bb_info->init); bitmap_clear (&bb_info->in); bitmap_clear (&bb_info->out); - pool_free (df_md->block_pool, bb_info); } } @@ -4252,10 +3989,6 @@ df_md_alloc (bitmap all_blocks) bitmap_iterator bi; struct df_md_problem_data *problem_data; - if (!df_md->block_pool) - df_md->block_pool = create_alloc_pool ("df_md_block pool", - sizeof (struct df_md_bb_info), 50); - df_grow_bb_info (df_md); if (df_md->problem_data) problem_data = (struct df_md_problem_data *) df_md->problem_data; @@ -4270,7 +4003,8 @@ df_md_alloc (bitmap all_blocks) EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) { struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index); - if (bb_info) + /* When bitmaps are already initialized, just clear them. */ + if (bb_info->init.obstack) { bitmap_clear (&bb_info->init); bitmap_clear (&bb_info->gen); @@ -4280,8 +4014,6 @@ df_md_alloc (bitmap all_blocks) } else { - bb_info = (struct df_md_bb_info *) pool_alloc (df_md->block_pool); - df_md_set_bb_info (bb_index, bb_info); bitmap_initialize (&bb_info->init, &problem_data->md_bitmaps); bitmap_initialize (&bb_info->gen, &problem_data->md_bitmaps); bitmap_initialize (&bb_info->kill, &problem_data->md_bitmaps); @@ -4300,20 +4032,17 @@ void df_md_simulate_artificial_defs_at_top (basic_block bb, bitmap local_md) { int bb_index = bb->index; - df_ref *def_rec; - for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) - { - df_ref def = *def_rec; - if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) - { - unsigned int dregno = DF_REF_REGNO (def); - if (DF_REF_FLAGS (def) - & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER)) - bitmap_set_bit (local_md, dregno); - else - bitmap_clear_bit (local_md, dregno); - } - } + df_ref def; + FOR_EACH_ARTIFICIAL_DEF (def, bb_index) + if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) + { + unsigned int dregno = DF_REF_REGNO (def); + if (DF_REF_FLAGS (def) + & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER)) + bitmap_set_bit (local_md, dregno); + else + bitmap_clear_bit (local_md, dregno); + } } @@ -4321,15 +4050,13 @@ df_md_simulate_artificial_defs_at_top (basic_block bb, bitmap local_md) LOCAL_MD. */ void -df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn, - bitmap local_md) +df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx_insn *insn, + bitmap local_md) { - unsigned uid = INSN_UID (insn); - df_ref *def_rec; + df_ref def; - for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) + FOR_EACH_INSN_DEF (def, insn) { - df_ref def = *def_rec; unsigned int dregno = DF_REF_REGNO (def); if ((!(df->changeable_flags & DF_NO_HARD_REGS)) || (dregno >= FIRST_PSEUDO_REGISTER)) @@ -4345,13 +4072,12 @@ df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn, static void df_md_bb_local_compute_process_def (struct df_md_bb_info *bb_info, - df_ref *def_rec, + df_ref def, int top_flag) { - df_ref def; bitmap_clear (&seen_in_insn); - while ((def = *def_rec++) != NULL) + for (; def; def = DF_REF_NEXT_LOC (def)) { unsigned int dregno = DF_REF_REGNO (def); if (((!(df->changeable_flags & DF_NO_HARD_REGS)) @@ -4385,9 +4111,9 @@ df_md_bb_local_compute_process_def (struct df_md_bb_info *bb_info, static void df_md_bb_local_compute (unsigned int bb_index) { - basic_block bb = BASIC_BLOCK (bb_index); + basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index); struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index); - rtx insn; + rtx_insn *insn; /* Artificials are only hard regs. */ if (!(df->changeable_flags & DF_NO_HARD_REGS)) @@ -4429,8 +4155,8 @@ df_md_local_compute (bitmap all_blocks) bitmap_clear (&seen_in_insn); - frontiers = XNEWVEC (bitmap_head, last_basic_block); - FOR_ALL_BB (bb) + frontiers = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun)); + FOR_ALL_BB_FN (bb, cfun) bitmap_initialize (&frontiers[bb->index], &bitmap_default_obstack); compute_dominance_frontiers (frontiers); @@ -4441,14 +4167,14 @@ df_md_local_compute (bitmap all_blocks) bitmap kill = &df_md_get_bb_info (bb_index)->kill; EXECUTE_IF_SET_IN_BITMAP (&frontiers[bb_index], 0, df_bb_index, bi2) { - basic_block bb = BASIC_BLOCK (df_bb_index); + basic_block bb = BASIC_BLOCK_FOR_FN (cfun, df_bb_index); if (bitmap_bit_p (all_blocks, df_bb_index)) bitmap_ior_and_into (&df_md_get_bb_info (df_bb_index)->init, kill, df_get_live_in (bb)); } } - FOR_ALL_BB (bb) + FOR_ALL_BB_FN (bb, cfun) bitmap_clear (&frontiers[bb->index]); free (frontiers); } @@ -4474,7 +4200,7 @@ df_md_reset (bitmap all_blocks) static bool df_md_transfer_function (int bb_index) { - basic_block bb = BASIC_BLOCK (bb_index); + basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index); struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index); bitmap in = &bb_info->in; bitmap out = &bb_info->out; @@ -4519,19 +4245,20 @@ df_md_confluence_0 (basic_block bb) /* In of target gets or of out of source. */ -static void +static bool df_md_confluence_n (edge e) { bitmap op1 = &df_md_get_bb_info (e->dest->index)->in; bitmap op2 = &df_md_get_bb_info (e->src->index)->out; if (e->flags & EDGE_FAKE) - return; + return false; if (e->flags & EDGE_EH) - bitmap_ior_and_compl_into (op1, op2, regs_invalidated_by_call_regset); + return bitmap_ior_and_compl_into (op1, op2, + regs_invalidated_by_call_regset); else - bitmap_ior_into (op1, op2); + return bitmap_ior_into (op1, op2); } /* Free all storage associated with the problem. */ @@ -4543,12 +4270,12 @@ df_md_free (void) = (struct df_md_problem_data *) df_md->problem_data; bitmap_obstack_release (&problem_data->md_bitmaps); - free_alloc_pool (df_md->block_pool); free (problem_data); df_md->problem_data = NULL; df_md->block_info_size = 0; free (df_md->block_info); + df_md->block_info = NULL; free (df_md); } @@ -4604,9 +4331,12 @@ static struct df_problem problem_MD = NULL, /* Debugging. */ df_md_top_dump, /* Debugging start block. */ df_md_bottom_dump, /* Debugging end block. */ + NULL, /* Debugging start insn. */ + NULL, /* Debugging end insn. */ NULL, /* Incremental solution verify start. */ NULL, /* Incremental solution verify end. */ NULL, /* Dependent problem. */ + sizeof (struct df_md_bb_info),/* Size of entry of block_info array. */ TV_DF_MD, /* Timing variable. */ false /* Reset blocks on dropping out of blocks_to_analyze. */ };