/* Scanning of rtl for dataflow analysis.
Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
- 2008 Free Software Foundation, Inc.
- Originally contributed by Michael P. Hayes
+ 2008, 2009, 2010 Free Software Foundation, Inc.
+ Originally contributed by Michael P. Hayes
(m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
and Kenneth Zadeck (zadeck@naturalbridge.com).
#include "target-def.h"
#include "df.h"
#include "tree-pass.h"
+#include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
+
+DEF_VEC_P(df_ref);
+DEF_VEC_ALLOC_P_STACK(df_ref);
+
+#define VEC_df_ref_stack_alloc(alloc) VEC_stack_alloc (df_ref, alloc)
+
+typedef struct df_mw_hardreg *df_mw_hardreg_ptr;
+
+DEF_VEC_P(df_mw_hardreg_ptr);
+DEF_VEC_ALLOC_P_STACK(df_mw_hardreg_ptr);
+
+#define VEC_df_mw_hardreg_ptr_stack_alloc(alloc) \
+ VEC_stack_alloc (df_mw_hardreg_ptr, alloc)
#ifndef HAVE_epilogue
#define HAVE_epilogue 0
#define EPILOGUE_USES(REGNO) 0
#endif
-/* The bitmap_obstack is used to hold some static variables that
- should not be reset after each function is compiled. */
-
-static bitmap_obstack persistent_obstack;
+/* The following two macros free the vecs that hold either the refs or
+ the mw refs. They are a little tricky because the vec has 0
+ elements is special and is not to be freed. */
+#define df_scan_free_ref_vec(V) \
+ do { \
+ if (V && *V) \
+ free (V); \
+ } while (0)
+
+#define df_scan_free_mws_vec(V) \
+ do { \
+ if (V && *V) \
+ free (V); \
+ } while (0)
/* The set of hard registers in eliminables[i].from. */
static HARD_REG_SET elim_reg_set;
-/* This is a bitmap copy of regs_invalidated_by_call so that we can
- easily add it into bitmaps, etc. */
-
-bitmap df_invalidated_by_call = NULL;
-
/* Initialize ur_in and ur_out as if all hard registers were partially
available. */
struct df_collection_rec
{
- struct df_ref ** def_vec;
- unsigned int next_def;
- struct df_ref ** use_vec;
- unsigned int next_use;
- struct df_ref ** eq_use_vec;
- unsigned int next_eq_use;
- struct df_mw_hardreg **mw_vec;
- unsigned int next_mw;
+ VEC(df_ref,stack) *def_vec;
+ VEC(df_ref,stack) *use_vec;
+ VEC(df_ref,stack) *eq_use_vec;
+ VEC(df_mw_hardreg_ptr,stack) *mw_vec;
};
-static struct df_ref * df_null_ref_rec[1];
+static df_ref df_null_ref_rec[1];
static struct df_mw_hardreg * df_null_mw_rec[1];
-static void df_ref_record (struct df_collection_rec *,
- rtx, rtx *,
- basic_block, rtx, enum df_ref_type,
- enum df_ref_flags, int, int, enum machine_mode);
-static void df_def_record_1 (struct df_collection_rec *,
- rtx, basic_block, rtx,
- enum df_ref_flags);
-static void df_defs_record (struct df_collection_rec *,
- rtx, basic_block, rtx,
- enum df_ref_flags);
+static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
+ rtx, rtx *,
+ basic_block, struct df_insn_info *,
+ enum df_ref_type, int ref_flags);
+static void df_def_record_1 (struct df_collection_rec *, rtx,
+ basic_block, struct df_insn_info *,
+ int ref_flags);
+static void df_defs_record (struct df_collection_rec *, rtx,
+ basic_block, struct df_insn_info *,
+ int ref_flags);
static void df_uses_record (struct df_collection_rec *,
rtx *, enum df_ref_type,
- basic_block, rtx, enum df_ref_flags,
- int, int, enum machine_mode);
-
-static struct df_ref *df_ref_create_structure (struct df_collection_rec *, rtx, rtx *,
- basic_block, rtx, enum df_ref_type,
- enum df_ref_flags,
- int, int, enum machine_mode);
-
-static void df_insn_refs_collect (struct df_collection_rec*,
- basic_block, rtx);
+ basic_block, struct df_insn_info *,
+ int ref_flags);
+
+static df_ref df_ref_create_structure (enum df_ref_class,
+ struct df_collection_rec *, rtx, rtx *,
+ basic_block, struct df_insn_info *,
+ enum df_ref_type, int ref_flags);
+static void df_insn_refs_collect (struct df_collection_rec*,
+ basic_block, struct df_insn_info *);
static void df_canonize_collection_rec (struct df_collection_rec *);
static void df_get_regular_block_artificial_uses (bitmap);
static void df_get_exit_block_use_set (bitmap);
static void df_get_entry_block_def_set (bitmap);
static void df_grow_ref_info (struct df_ref_info *, unsigned int);
-static void df_ref_chain_delete_du_chain (struct df_ref **);
-static void df_ref_chain_delete (struct df_ref **);
+static void df_ref_chain_delete_du_chain (df_ref *);
+static void df_ref_chain_delete (df_ref *);
-static void df_refs_add_to_chains (struct df_collection_rec *,
+static void df_refs_add_to_chains (struct df_collection_rec *,
basic_block, rtx);
static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
-static void df_install_ref (struct df_ref *, struct df_reg_info *,
+static void df_install_ref (df_ref, struct df_reg_info *,
struct df_ref_info *, bool);
static int df_ref_compare (const void *, const void *);
/* Problem data for the scanning dataflow function. */
struct df_scan_problem_data
{
- alloc_pool ref_pool;
- alloc_pool ref_extract_pool;
+ alloc_pool ref_base_pool;
+ alloc_pool ref_artificial_pool;
+ alloc_pool ref_regular_pool;
alloc_pool insn_pool;
alloc_pool reg_pool;
alloc_pool mw_reg_pool;
- alloc_pool mw_link_pool;
bitmap_obstack reg_bitmaps;
bitmap_obstack insn_bitmaps;
};
typedef struct df_scan_bb_info *df_scan_bb_info_t;
-static void
+
+/* Internal function to shut down the scanning problem. */
+static void
df_scan_free_internal (void)
{
struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
+ unsigned int i;
+ basic_block bb;
+
+ /* The vectors that hold the refs are not pool allocated because
+ they come in many sizes. This makes them impossible to delete
+ all at once. */
+ for (i = 0; i < DF_INSN_SIZE(); i++)
+ {
+ struct df_insn_info *insn_info = DF_INSN_UID_GET(i);
+ /* Skip the insns that have no insn_info or have been
+ deleted. */
+ if (insn_info)
+ {
+ df_scan_free_ref_vec (insn_info->defs);
+ df_scan_free_ref_vec (insn_info->uses);
+ df_scan_free_ref_vec (insn_info->eq_uses);
+ df_scan_free_mws_vec (insn_info->mw_hardregs);
+ }
+ }
+
+ FOR_ALL_BB (bb)
+ {
+ unsigned int bb_index = bb->index;
+ struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
+ if (bb_info)
+ {
+ df_scan_free_ref_vec (bb_info->artificial_defs);
+ df_scan_free_ref_vec (bb_info->artificial_uses);
+ }
+ }
free (df->def_info.refs);
free (df->def_info.begin);
df_scan->block_info = NULL;
df_scan->block_info_size = 0;
- BITMAP_FREE (df->hardware_regs_used);
- BITMAP_FREE (df->regular_block_artificial_uses);
- BITMAP_FREE (df->eh_block_artificial_uses);
+ bitmap_clear (&df->hardware_regs_used);
+ bitmap_clear (&df->regular_block_artificial_uses);
+ bitmap_clear (&df->eh_block_artificial_uses);
BITMAP_FREE (df->entry_block_defs);
BITMAP_FREE (df->exit_block_uses);
- BITMAP_FREE (df->insns_to_delete);
- BITMAP_FREE (df->insns_to_rescan);
- BITMAP_FREE (df->insns_to_notes_rescan);
+ bitmap_clear (&df->insns_to_delete);
+ bitmap_clear (&df->insns_to_rescan);
+ bitmap_clear (&df->insns_to_notes_rescan);
- free_alloc_pool (df_scan->block_pool);
- free_alloc_pool (problem_data->ref_pool);
- free_alloc_pool (problem_data->ref_extract_pool);
+ free_alloc_pool (problem_data->ref_base_pool);
+ free_alloc_pool (problem_data->ref_artificial_pool);
+ free_alloc_pool (problem_data->ref_regular_pool);
free_alloc_pool (problem_data->insn_pool);
free_alloc_pool (problem_data->reg_pool);
free_alloc_pool (problem_data->mw_reg_pool);
- free_alloc_pool (problem_data->mw_link_pool);
bitmap_obstack_release (&problem_data->reg_bitmaps);
bitmap_obstack_release (&problem_data->insn_bitmaps);
free (df_scan->problem_data);
}
-/* Set basic block info. */
-
-static void
-df_scan_set_bb_info (unsigned int index,
- struct df_scan_bb_info *bb_info)
-{
- gcc_assert (df_scan);
- df_grow_bb_info (df_scan);
- df_scan->block_info[index] = (void *) bb_info;
-}
-
-
/* Free basic block info. */
static void
{
struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
unsigned int bb_index = bb->index;
- if (bb_info)
+
+ /* See if bb_info is initialized. */
+ if (bb_info->artificial_defs)
{
rtx insn;
FOR_BB_INSNS (bb, insn)
/* Record defs within INSN. */
df_insn_delete (bb, INSN_UID (insn));
}
-
+
if (bb_index < df_scan->block_info_size)
bb_info = df_scan_get_bb_info (bb_index);
-
+
/* Get rid of any artificial uses or defs. */
- df_ref_chain_delete_du_chain (bb_info->artificial_defs);
- df_ref_chain_delete_du_chain (bb_info->artificial_uses);
- df_ref_chain_delete (bb_info->artificial_defs);
- df_ref_chain_delete (bb_info->artificial_uses);
- bb_info->artificial_defs = NULL;
- bb_info->artificial_uses = NULL;
- pool_free (df_scan->block_pool, bb_info);
+ if (bb_info->artificial_defs)
+ {
+ df_ref_chain_delete_du_chain (bb_info->artificial_defs);
+ df_ref_chain_delete_du_chain (bb_info->artificial_uses);
+ df_ref_chain_delete (bb_info->artificial_defs);
+ df_ref_chain_delete (bb_info->artificial_uses);
+ bb_info->artificial_defs = NULL;
+ bb_info->artificial_uses = NULL;
+ }
}
}
/* Allocate the problem data for the scanning problem. This should be
called when the problem is created or when the entire function is to
be rescanned. */
-void
+void
df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
{
struct df_scan_problem_data *problem_data;
if (df_scan->problem_data)
df_scan_free_internal ();
- df_scan->block_pool
- = create_alloc_pool ("df_scan_block pool",
- sizeof (struct df_scan_bb_info),
- block_size);
-
problem_data = XNEW (struct df_scan_problem_data);
df_scan->problem_data = problem_data;
df_scan->computed = true;
- problem_data->ref_pool
- = create_alloc_pool ("df_scan_ref pool",
- sizeof (struct df_ref), block_size);
- problem_data->ref_extract_pool
- = create_alloc_pool ("df_scan_ref extract pool",
- sizeof (struct df_ref_extract), block_size);
- problem_data->insn_pool
- = create_alloc_pool ("df_scan_insn pool",
+ problem_data->ref_base_pool
+ = create_alloc_pool ("df_scan ref base",
+ sizeof (struct df_base_ref), block_size);
+ problem_data->ref_artificial_pool
+ = create_alloc_pool ("df_scan ref artificial",
+ sizeof (struct df_artificial_ref), block_size);
+ problem_data->ref_regular_pool
+ = create_alloc_pool ("df_scan ref regular",
+ sizeof (struct df_regular_ref), block_size);
+ problem_data->insn_pool
+ = create_alloc_pool ("df_scan insn",
sizeof (struct df_insn_info), block_size);
- problem_data->reg_pool
- = create_alloc_pool ("df_scan_reg pool",
+ problem_data->reg_pool
+ = create_alloc_pool ("df_scan reg",
sizeof (struct df_reg_info), block_size);
- problem_data->mw_reg_pool
- = create_alloc_pool ("df_scan_mw_reg pool",
+ problem_data->mw_reg_pool
+ = create_alloc_pool ("df_scan mw_reg",
sizeof (struct df_mw_hardreg), block_size);
- problem_data->mw_link_pool
- = create_alloc_pool ("df_scan_mw_link pool",
- sizeof (struct df_link), block_size);
bitmap_obstack_initialize (&problem_data->reg_bitmaps);
bitmap_obstack_initialize (&problem_data->insn_bitmaps);
- insn_num += insn_num / 4;
+ insn_num += insn_num / 4;
df_grow_reg_info ();
df_grow_insn_info ();
{
unsigned int bb_index = bb->index;
struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
- if (!bb_info)
- {
- bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
- df_scan_set_bb_info (bb_index, bb_info);
- }
bb_info->artificial_defs = NULL;
bb_info->artificial_uses = NULL;
}
- df->hardware_regs_used = BITMAP_ALLOC (&problem_data->reg_bitmaps);
- df->regular_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
- df->eh_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
+ bitmap_initialize (&df->hardware_regs_used, &problem_data->reg_bitmaps);
+ bitmap_initialize (&df->regular_block_artificial_uses, &problem_data->reg_bitmaps);
+ bitmap_initialize (&df->eh_block_artificial_uses, &problem_data->reg_bitmaps);
df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
- df->insns_to_delete = BITMAP_ALLOC (&problem_data->insn_bitmaps);
- df->insns_to_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
- df->insns_to_notes_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
+ bitmap_initialize (&df->insns_to_delete, &problem_data->insn_bitmaps);
+ bitmap_initialize (&df->insns_to_rescan, &problem_data->insn_bitmaps);
+ bitmap_initialize (&df->insns_to_notes_rescan, &problem_data->insn_bitmaps);
df_scan->optional_p = false;
}
/* Free all of the data associated with the scan problem. */
-static void
+static void
df_scan_free (void)
{
if (df_scan->problem_data)
}
/* Dump the preamble for DF_SCAN dump. */
-static void
+static void
df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
{
int i;
+ int dcount = 0;
+ int ucount = 0;
+ int ecount = 0;
+ int icount = 0;
+ int ccount = 0;
+ basic_block bb;
+ rtx insn;
fprintf (file, ";; invalidated by call \t");
- df_print_regset (file, df_invalidated_by_call);
+ df_print_regset (file, regs_invalidated_by_call_regset);
fprintf (file, ";; hardware regs used \t");
- df_print_regset (file, df->hardware_regs_used);
+ df_print_regset (file, &df->hardware_regs_used);
fprintf (file, ";; regular block artificial uses \t");
- df_print_regset (file, df->regular_block_artificial_uses);
+ df_print_regset (file, &df->regular_block_artificial_uses);
fprintf (file, ";; eh block artificial uses \t");
- df_print_regset (file, df->eh_block_artificial_uses);
+ df_print_regset (file, &df->eh_block_artificial_uses);
fprintf (file, ";; entry block defs \t");
df_print_regset (file, df->entry_block_defs);
fprintf (file, ";; exit block uses \t");
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (df_regs_ever_live_p (i))
fprintf (file, " %d[%s]", i, reg_names[i]);
+ fprintf (file, "\n;; ref usage \t");
+
+ for (i = 0; i < (int)df->regs_inited; i++)
+ if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
+ {
+ const char * sep = "";
+
+ fprintf (file, "r%d={", i);
+ if (DF_REG_DEF_COUNT (i))
+ {
+ fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
+ sep = ",";
+ dcount += DF_REG_DEF_COUNT (i);
+ }
+ if (DF_REG_USE_COUNT (i))
+ {
+ fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
+ sep = ",";
+ ucount += DF_REG_USE_COUNT (i);
+ }
+ if (DF_REG_EQ_USE_COUNT (i))
+ {
+ fprintf (file, "%s%dd", sep, DF_REG_EQ_USE_COUNT (i));
+ ecount += DF_REG_EQ_USE_COUNT (i);
+ }
+ fprintf (file, "} ");
+ }
- fprintf (file, "\n");
+ FOR_EACH_BB (bb)
+ FOR_BB_INSNS (bb, insn)
+ if (INSN_P (insn))
+ {
+ if (CALL_P (insn))
+ ccount++;
+ else
+ icount++;
+ }
+
+ fprintf (file, "\n;; total ref usage %d{%dd,%du,%de} in %d{%d regular + %d call} insns.\n",
+ dcount + ucount + ecount, dcount, ucount, ecount, icount + ccount, icount, ccount);
}
/* Dump the bb_info for a given basic block. */
-static void
+static void
df_scan_start_block (basic_block bb, FILE *file)
{
struct df_scan_bb_info *bb_info
NULL, /* Local compute function. */
NULL, /* Init the solution specific data. */
NULL, /* Iterative solver. */
- NULL, /* Confluence operator 0. */
- NULL, /* Confluence operator n. */
+ NULL, /* Confluence operator 0. */
+ NULL, /* Confluence operator n. */
NULL, /* Transfer function. */
NULL, /* Finalize function. */
df_scan_free, /* Free all of the problem information. */
NULL, /* Incremental solution verify start. */
NULL, /* Incremental solution verify end. */
NULL, /* Dependent problem. */
+ sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
TV_DF_SCAN, /* Timing variable. */
false /* Reset blocks on dropping out of blocks_to_analyze. */
};
/* First, grow the reg_info information. If the current size is less than
- the number of psuedos, grow to 25% more than the number of
- pseudos.
+ the number of pseudos, grow to 25% more than the number of
+ pseudos.
Second, assure that all of the slots up to max_reg_num have been
filled with reg_info structures. */
-void
+void
df_grow_reg_info (void)
{
unsigned int max_reg = max_reg_num ();
if (df->regs_size < new_size)
{
new_size += new_size / 4;
- df->def_regs = xrealloc (df->def_regs,
- new_size *sizeof (struct df_reg_info*));
- df->use_regs = xrealloc (df->use_regs,
- new_size *sizeof (struct df_reg_info*));
- df->eq_use_regs = xrealloc (df->eq_use_regs,
- new_size *sizeof (struct df_reg_info*));
- df->def_info.begin = xrealloc (df->def_info.begin,
- new_size *sizeof (int));
- df->def_info.count = xrealloc (df->def_info.count,
- new_size *sizeof (int));
- df->use_info.begin = xrealloc (df->use_info.begin,
- new_size *sizeof (int));
- df->use_info.count = xrealloc (df->use_info.count,
- new_size *sizeof (int));
+ df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
+ df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
+ df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
+ new_size);
+ df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
+ df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
+ df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
+ df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
df->regs_size = new_size;
}
{
struct df_reg_info *reg_info;
- reg_info = pool_alloc (problem_data->reg_pool);
+ reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
memset (reg_info, 0, sizeof (struct df_reg_info));
df->def_regs[i] = reg_info;
- reg_info = pool_alloc (problem_data->reg_pool);
+ reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
memset (reg_info, 0, sizeof (struct df_reg_info));
df->use_regs[i] = reg_info;
- reg_info = pool_alloc (problem_data->reg_pool);
+ reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
memset (reg_info, 0, sizeof (struct df_reg_info));
df->eq_use_regs[i] = reg_info;
df->def_info.begin[i] = 0;
df->use_info.begin[i] = 0;
df->use_info.count[i] = 0;
}
-
+
df->regs_inited = max_reg;
}
/* Grow the ref information. */
-static void
+static void
df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
{
if (ref_info->refs_size < new_size)
{
- ref_info->refs = xrealloc (ref_info->refs,
- new_size *sizeof (struct df_ref *));
+ ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
memset (ref_info->refs + ref_info->refs_size, 0,
- (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
+ (new_size - ref_info->refs_size) *sizeof (df_ref));
ref_info->refs_size = new_size;
}
}
ref_info->total_size. */
static void
-df_check_and_grow_ref_info (struct df_ref_info *ref_info,
+df_check_and_grow_ref_info (struct df_ref_info *ref_info,
unsigned bitmap_addend)
{
if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
number of instructions, grow to 25% more than the number of
instructions. */
-void
+void
df_grow_insn_info (void)
{
unsigned int new_size = get_max_uid () + 1;
if (DF_INSN_SIZE () < new_size)
{
new_size += new_size / 4;
- df->insns = xrealloc (df->insns,
- new_size *sizeof (struct df_insn_info *));
+ df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
memset (df->insns + df->insns_size, 0,
(new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
DF_INSN_SIZE () = new_size;
df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
- df_get_regular_block_artificial_uses (df->regular_block_artificial_uses);
- df_get_eh_block_artificial_uses (df->eh_block_artificial_uses);
+ df_get_regular_block_artificial_uses (&df->regular_block_artificial_uses);
+ df_get_eh_block_artificial_uses (&df->eh_block_artificial_uses);
- bitmap_ior_into (df->eh_block_artificial_uses,
- df->regular_block_artificial_uses);
+ bitmap_ior_into (&df->eh_block_artificial_uses,
+ &df->regular_block_artificial_uses);
/* ENTRY and EXIT blocks have special defs/uses. */
df_get_entry_block_def_set (df->entry_block_defs);
/* Create a new ref of type DF_REF_TYPE for register REG at address
- LOC within INSN of BB. This function is only used externally.
-
- If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
- DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
- fields if they were constants. Otherwise they should be -1 if
- those flags were set. */
+ LOC within INSN of BB. This function is only used externally. */
-struct df_ref *
-df_ref_create (rtx reg, rtx *loc, rtx insn,
+df_ref
+df_ref_create (rtx reg, rtx *loc, rtx insn,
basic_block bb,
- enum df_ref_type ref_type,
- enum df_ref_flags ref_flags,
- int width, int offset, enum machine_mode mode)
+ enum df_ref_type ref_type,
+ int ref_flags)
{
- struct df_ref *ref;
+ df_ref ref;
struct df_reg_info **reg_info;
struct df_ref_info *ref_info;
- struct df_ref **ref_rec;
- struct df_ref ***ref_rec_ptr;
+ df_ref *ref_rec;
+ df_ref **ref_rec_ptr;
unsigned int count = 0;
bool add_to_table;
+ enum df_ref_class cl;
df_grow_reg_info ();
/* You cannot hack artificial refs. */
gcc_assert (insn);
- ref = df_ref_create_structure (NULL, reg, loc, bb, insn,
- ref_type, ref_flags,
- width, offset, mode);
- if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
+ if (loc)
+ cl = DF_REF_REGULAR;
+ else
+ cl = DF_REF_BASE;
+ ref = df_ref_create_structure (cl, NULL, reg, loc, bb, DF_INSN_INFO_GET (insn),
+ ref_type, ref_flags);
+
+ if (DF_REF_REG_DEF_P (ref))
{
reg_info = df->def_regs;
ref_info = &df->def_info;
add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
-
+
if (add_to_table)
switch (ref_info->ref_order)
{
ref_rec = *ref_rec_ptr;
if (count)
{
- ref_rec = xrealloc (ref_rec, (count+2) * sizeof (struct df_ref*));
+ ref_rec = XRESIZEVEC (df_ref, ref_rec, count+2);
*ref_rec_ptr = ref_rec;
ref_rec[count] = ref;
ref_rec[count+1] = NULL;
- qsort (ref_rec, count + 1, sizeof (struct df_ref *), df_ref_compare);
+ qsort (ref_rec, count + 1, sizeof (df_ref), df_ref_compare);
}
else
{
- struct df_ref **ref_rec = XNEWVEC (struct df_ref*, 2);
+ df_ref *ref_rec = XNEWVEC (df_ref, 2);
ref_rec[0] = ref;
ref_rec[1] = NULL;
*ref_rec_ptr = ref_rec;
#endif
/* By adding the ref directly, df_insn_rescan my not find any
differences even though the block will have changed. So we need
- to mark the block dirty ourselves. */
- df_set_bb_dirty (bb);
+ to mark the block dirty ourselves. */
+ if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
+ df_set_bb_dirty (bb);
return ref;
}
----------------------------------------------------------------------------*/
static void
-df_free_ref (struct df_ref *ref)
+df_free_ref (df_ref ref)
{
struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
- if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
- pool_free (problem_data->ref_extract_pool, (struct df_ref_extract *)ref);
- else
- pool_free (problem_data->ref_pool, ref);
+ switch (DF_REF_CLASS (ref))
+ {
+ case DF_REF_BASE:
+ pool_free (problem_data->ref_base_pool, ref);
+ break;
+
+ case DF_REF_ARTIFICIAL:
+ pool_free (problem_data->ref_artificial_pool, ref);
+ break;
+
+ case DF_REF_REGULAR:
+ pool_free (problem_data->ref_regular_pool, ref);
+ break;
+ }
}
Also delete the def-use or use-def chain if it exists. */
static void
-df_reg_chain_unlink (struct df_ref *ref)
+df_reg_chain_unlink (df_ref ref)
{
- struct df_ref *next = DF_REF_NEXT_REG (ref);
- struct df_ref *prev = DF_REF_PREV_REG (ref);
+ df_ref next = DF_REF_NEXT_REG (ref);
+ df_ref prev = DF_REF_PREV_REG (ref);
int id = DF_REF_ID (ref);
struct df_reg_info *reg_info;
- struct df_ref **refs = NULL;
+ df_ref *refs = NULL;
- if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
+ if (DF_REF_REG_DEF_P (ref))
{
- reg_info = DF_REG_DEF_GET (DF_REF_REGNO (ref));
+ int regno = DF_REF_REGNO (ref);
+ reg_info = DF_REG_DEF_GET (regno);
refs = df->def_info.refs;
}
- else
+ else
{
if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
{
{
if (df->analyze_subset)
{
- if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BB (ref)->index))
+ if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
refs[id] = NULL;
}
else
refs[id] = NULL;
}
-
+
/* Delete any def-use or use-def chains that start here. It is
possible that there is trash in this field. This happens for
insns that have been deleted when rescanning has been deferred
code skips deleted insns. */
if (df_chain && DF_REF_CHAIN (ref))
df_chain_unlink (ref);
-
+
reg_info->n_refs--;
if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
{
/* Remove REF from VEC. */
static void
-df_ref_compress_rec (struct df_ref ***vec_ptr, struct df_ref *ref)
+df_ref_compress_rec (df_ref **vec_ptr, df_ref ref)
{
- struct df_ref **vec = *vec_ptr;
+ df_ref *vec = *vec_ptr;
if (vec[1])
{
while (*vec && *vec != ref)
vec++;
-
+
while (*vec)
{
*vec = *(vec+1);
/* Unlink REF from all def-use/use-def chains, etc. */
void
-df_ref_remove (struct df_ref *ref)
+df_ref_remove (df_ref ref)
{
#if 0
if (dump_file)
{
if (DF_REF_IS_ARTIFICIAL (ref))
{
- struct df_scan_bb_info *bb_info
- = df_scan_get_bb_info (DF_REF_BB (ref)->index);
+ struct df_scan_bb_info *bb_info
+ = df_scan_get_bb_info (DF_REF_BBNO (ref));
df_ref_compress_rec (&bb_info->artificial_defs, ref);
}
else
{
if (DF_REF_IS_ARTIFICIAL (ref))
{
- struct df_scan_bb_info *bb_info
- = df_scan_get_bb_info (DF_REF_BB (ref)->index);
+ struct df_scan_bb_info *bb_info
+ = df_scan_get_bb_info (DF_REF_BBNO (ref));
df_ref_compress_rec (&bb_info->artificial_uses, ref);
}
- else
+ else
{
unsigned int uid = DF_REF_INSN_UID (ref);
struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
/* By deleting the ref directly, df_insn_rescan my not find any
differences even though the block will have changed. So we need
- to mark the block dirty ourselves. */
- df_set_bb_dirty (DF_REF_BB (ref));
+ to mark the block dirty ourselves. */
+ if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
+ df_set_bb_dirty (DF_REF_BB (ref));
df_reg_chain_unlink (ref);
}
struct df_insn_info *insn_rec;
df_grow_insn_info ();
- insn_rec = DF_INSN_GET (insn);
+ insn_rec = DF_INSN_INFO_GET (insn);
if (!insn_rec)
{
- insn_rec = pool_alloc (problem_data->insn_pool);
- DF_INSN_SET (insn, insn_rec);
+ insn_rec = (struct df_insn_info *) pool_alloc (problem_data->insn_pool);
+ DF_INSN_INFO_SET (insn, insn_rec);
}
memset (insn_rec, 0, sizeof (struct df_insn_info));
insn_rec->insn = insn;
/* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
static void
-df_ref_chain_delete_du_chain (struct df_ref **ref_rec)
+df_ref_chain_delete_du_chain (df_ref *ref_rec)
{
while (*ref_rec)
{
- struct df_ref *ref = *ref_rec;
- /* CHAIN is allocated by DF_CHAIN. So make sure to
+ df_ref ref = *ref_rec;
+ /* CHAIN is allocated by DF_CHAIN. So make sure to
pass df_scan instance for the problem. */
if (DF_REF_CHAIN (ref))
df_chain_unlink (ref);
/* Delete all refs in the ref chain. */
static void
-df_ref_chain_delete (struct df_ref **ref_rec)
+df_ref_chain_delete (df_ref *ref_rec)
{
- struct df_ref **start = ref_rec;
+ df_ref *start = ref_rec;
while (*ref_rec)
{
df_reg_chain_unlink (*ref_rec);
except when called from df_process_deferred_rescans to mark the block
as dirty. */
-void
+void
df_insn_delete (basic_block bb, unsigned int uid)
{
struct df_insn_info *insn_info = NULL;
{
if (insn_info)
{
- bitmap_clear_bit (df->insns_to_rescan, uid);
- bitmap_clear_bit (df->insns_to_notes_rescan, uid);
- bitmap_set_bit (df->insns_to_delete, uid);
+ bitmap_clear_bit (&df->insns_to_rescan, uid);
+ bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
+ bitmap_set_bit (&df->insns_to_delete, uid);
}
if (dump_file)
fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
if (dump_file)
fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
- bitmap_clear_bit (df->insns_to_delete, uid);
- bitmap_clear_bit (df->insns_to_rescan, uid);
- bitmap_clear_bit (df->insns_to_notes_rescan, uid);
+ bitmap_clear_bit (&df->insns_to_delete, uid);
+ bitmap_clear_bit (&df->insns_to_rescan, uid);
+ bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
if (insn_info)
{
- struct df_scan_problem_data *problem_data
+ struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
/* In general, notes do not have the insn_info fields
if (insn_info->defs)
{
df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
-
+
if (df_chain)
{
df_ref_chain_delete_du_chain (insn_info->defs);
- df_ref_chain_delete_du_chain (insn_info->uses);
+ df_ref_chain_delete_du_chain (insn_info->uses);
df_ref_chain_delete_du_chain (insn_info->eq_uses);
}
-
+
df_ref_chain_delete (insn_info->defs);
df_ref_chain_delete (insn_info->uses);
df_ref_chain_delete (insn_info->eq_uses);
static void
df_free_collection_rec (struct df_collection_rec *collection_rec)
{
- struct df_scan_problem_data *problem_data
+ unsigned int ix;
+ struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
- struct df_ref **ref;
- struct df_mw_hardreg **mw;
-
- if (collection_rec->def_vec)
- for (ref = collection_rec->def_vec; *ref; ref++)
- df_free_ref (*ref);
- if (collection_rec->use_vec)
- for (ref = collection_rec->use_vec; *ref; ref++)
- df_free_ref (*ref);
- if (collection_rec->eq_use_vec)
- for (ref = collection_rec->eq_use_vec; *ref; ref++)
- df_free_ref (*ref);
- if (collection_rec->mw_vec)
- for (mw = collection_rec->mw_vec; *mw; mw++)
- pool_free (problem_data->mw_reg_pool, *mw);
+ df_ref ref;
+ struct df_mw_hardreg *mw;
+
+ FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, ix, ref)
+ df_free_ref (ref);
+ FOR_EACH_VEC_ELT (df_ref, collection_rec->use_vec, ix, ref)
+ df_free_ref (ref);
+ FOR_EACH_VEC_ELT (df_ref, collection_rec->eq_use_vec, ix, ref)
+ df_free_ref (ref);
+ FOR_EACH_VEC_ELT (df_mw_hardreg_ptr, collection_rec->mw_vec, ix, mw)
+ pool_free (problem_data->mw_reg_pool, mw);
+
+ VEC_free (df_ref, stack, collection_rec->def_vec);
+ VEC_free (df_ref, stack, collection_rec->use_vec);
+ VEC_free (df_ref, stack, collection_rec->eq_use_vec);
+ VEC_free (df_mw_hardreg_ptr, stack, collection_rec->mw_vec);
}
-
/* Rescan INSN. Return TRUE if the rescanning produced any changes. */
-bool
+bool
df_insn_rescan (rtx insn)
{
unsigned int uid = INSN_UID (insn);
struct df_insn_info *insn_info = NULL;
basic_block bb = BLOCK_FOR_INSN (insn);
struct df_collection_rec collection_rec;
- collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
if ((!df) || (!INSN_P (insn)))
return false;
}
if (dump_file)
fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
-
- bitmap_clear_bit (df->insns_to_delete, uid);
- bitmap_clear_bit (df->insns_to_notes_rescan, uid);
- bitmap_set_bit (df->insns_to_rescan, INSN_UID (insn));
+
+ bitmap_clear_bit (&df->insns_to_delete, uid);
+ bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
+ bitmap_set_bit (&df->insns_to_rescan, INSN_UID (insn));
return false;
}
- bitmap_clear_bit (df->insns_to_delete, uid);
- bitmap_clear_bit (df->insns_to_rescan, uid);
- bitmap_clear_bit (df->insns_to_notes_rescan, uid);
+ collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
+ collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
+ collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
+ collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
+
+ bitmap_clear_bit (&df->insns_to_delete, uid);
+ bitmap_clear_bit (&df->insns_to_rescan, uid);
+ bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
if (insn_info)
{
+ int luid;
bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
/* If there's no change, return false. */
if (the_same)
if (dump_file)
fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
- /* There's change - we need to delete the existing info. */
+ /* There's change - we need to delete the existing info.
+ Since the insn isn't moved, we can salvage its LUID. */
+ luid = DF_INSN_LUID (insn);
df_insn_delete (NULL, uid);
df_insn_create_insn_record (insn);
+ DF_INSN_LUID (insn) = luid;
}
else
{
- df_insn_create_insn_record (insn);
- df_insn_refs_collect (&collection_rec, bb, insn);
+ struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
+ df_insn_refs_collect (&collection_rec, bb, insn_info);
if (dump_file)
fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
}
df_refs_add_to_chains (&collection_rec, bb, insn);
- df_set_bb_dirty (bb);
+ if (DEBUG_INSN_P (insn))
+ df_set_bb_dirty_nonlr (bb);
+ else
+ df_set_bb_dirty (bb);
+
+ VEC_free (df_ref, stack, collection_rec.def_vec);
+ VEC_free (df_ref, stack, collection_rec.use_vec);
+ VEC_free (df_ref, stack, collection_rec.eq_use_vec);
+ VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
+
+ return true;
+}
+
+/* Same as df_insn_rescan, but don't mark the basic block as
+ dirty. */
+
+bool
+df_insn_rescan_debug_internal (rtx insn)
+{
+ unsigned int uid = INSN_UID (insn);
+ struct df_insn_info *insn_info;
+
+ gcc_assert (DEBUG_INSN_P (insn)
+ && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
+
+ if (!df)
+ return false;
+
+ insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
+ if (!insn_info)
+ return false;
+
+ if (dump_file)
+ fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
+
+ bitmap_clear_bit (&df->insns_to_delete, uid);
+ bitmap_clear_bit (&df->insns_to_rescan, uid);
+ bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
+
+ if (!insn_info->defs)
+ return false;
+
+ if (insn_info->defs == df_null_ref_rec
+ && insn_info->uses == df_null_ref_rec
+ && insn_info->eq_uses == df_null_ref_rec
+ && insn_info->mw_hardregs == df_null_mw_rec)
+ return false;
+
+ df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
+
+ if (df_chain)
+ {
+ df_ref_chain_delete_du_chain (insn_info->defs);
+ df_ref_chain_delete_du_chain (insn_info->uses);
+ df_ref_chain_delete_du_chain (insn_info->eq_uses);
+ }
+
+ df_ref_chain_delete (insn_info->defs);
+ df_ref_chain_delete (insn_info->uses);
+ df_ref_chain_delete (insn_info->eq_uses);
+
+ insn_info->defs = df_null_ref_rec;
+ insn_info->uses = df_null_ref_rec;
+ insn_info->eq_uses = df_null_ref_rec;
+ insn_info->mw_hardregs = df_null_mw_rec;
+
return true;
}
basic_block bb;
bitmap_iterator bi;
unsigned int uid;
- bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
-
+ bitmap_head tmp;
+
+ bitmap_initialize (&tmp, &df_bitmap_obstack);
+
if (df->changeable_flags & DF_NO_INSN_RESCAN)
{
df_clear_flags (DF_NO_INSN_RESCAN);
no_insn_rescan = true;
}
-
+
if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
{
df_clear_flags (DF_DEFER_INSN_RESCAN);
defer_insn_rescan = true;
}
- bitmap_copy (tmp, df->insns_to_delete);
- EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
+ bitmap_copy (&tmp, &df->insns_to_delete);
+ EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
{
struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
if (insn_info)
df_insn_delete (NULL, uid);
}
- BITMAP_FREE (tmp);
- bitmap_clear (df->insns_to_delete);
- bitmap_clear (df->insns_to_rescan);
- bitmap_clear (df->insns_to_notes_rescan);
+ bitmap_clear (&tmp);
+ bitmap_clear (&df->insns_to_delete);
+ bitmap_clear (&df->insns_to_rescan);
+ bitmap_clear (&df->insns_to_notes_rescan);
- FOR_EACH_BB (bb)
+ FOR_EACH_BB (bb)
{
rtx insn;
FOR_BB_INSNS (bb, insn)
bool defer_insn_rescan = false;
bitmap_iterator bi;
unsigned int uid;
- bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
-
+ bitmap_head tmp;
+
+ bitmap_initialize (&tmp, &df_bitmap_obstack);
+
if (df->changeable_flags & DF_NO_INSN_RESCAN)
{
df_clear_flags (DF_NO_INSN_RESCAN);
no_insn_rescan = true;
}
-
+
if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
{
df_clear_flags (DF_DEFER_INSN_RESCAN);
if (dump_file)
fprintf (dump_file, "starting the processing of deferred insns\n");
- bitmap_copy (tmp, df->insns_to_delete);
- EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
+ bitmap_copy (&tmp, &df->insns_to_delete);
+ EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
{
struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
if (insn_info)
df_insn_delete (NULL, uid);
}
- bitmap_copy (tmp, df->insns_to_rescan);
- EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
+ bitmap_copy (&tmp, &df->insns_to_rescan);
+ EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
{
struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
if (insn_info)
df_insn_rescan (insn_info->insn);
}
- bitmap_copy (tmp, df->insns_to_notes_rescan);
- EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
+ bitmap_copy (&tmp, &df->insns_to_notes_rescan);
+ EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
{
struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
if (insn_info)
if (dump_file)
fprintf (dump_file, "ending the processing of deferred insns\n");
- BITMAP_FREE (tmp);
- bitmap_clear (df->insns_to_delete);
- bitmap_clear (df->insns_to_rescan);
- bitmap_clear (df->insns_to_notes_rescan);
+ bitmap_clear (&tmp);
+ bitmap_clear (&df->insns_to_delete);
+ bitmap_clear (&df->insns_to_rescan);
+ bitmap_clear (&df->insns_to_notes_rescan);
if (no_insn_rescan)
df_set_flags (DF_NO_INSN_RESCAN);
INCLUDE_EQ_USES. */
static unsigned int
-df_count_refs (bool include_defs, bool include_uses,
+df_count_refs (bool include_defs, bool include_uses,
bool include_eq_uses)
{
unsigned int regno;
int size = 0;
unsigned int m = df->regs_inited;
-
+
for (regno = 0; regno < m; regno++)
{
if (include_defs)
or reg-def chains. This version processes the refs in reg order
which is likely to be best if processing the whole function. */
-static void
+static void
df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
- bool include_defs,
- bool include_uses,
+ bool include_defs,
+ bool include_uses,
bool include_eq_uses)
{
unsigned int m = df->regs_inited;
else
start = 0;
- ref_info->total_size
+ ref_info->total_size
= df_count_refs (include_defs, include_uses, include_eq_uses);
df_check_and_grow_ref_info (ref_info, 1);
ref_info->begin[regno] = offset;
if (include_defs)
{
- struct df_ref *ref = DF_REG_DEF_CHAIN (regno);
- while (ref)
+ df_ref ref = DF_REG_DEF_CHAIN (regno);
+ while (ref)
{
ref_info->refs[offset] = ref;
DF_REF_ID (ref) = offset++;
count++;
ref = DF_REF_NEXT_REG (ref);
- gcc_assert (offset < ref_info->refs_size);
+ gcc_checking_assert (offset < ref_info->refs_size);
}
}
if (include_uses)
{
- struct df_ref *ref = DF_REG_USE_CHAIN (regno);
- while (ref)
+ df_ref ref = DF_REG_USE_CHAIN (regno);
+ while (ref)
{
ref_info->refs[offset] = ref;
DF_REF_ID (ref) = offset++;
count++;
ref = DF_REF_NEXT_REG (ref);
- gcc_assert (offset < ref_info->refs_size);
+ gcc_checking_assert (offset < ref_info->refs_size);
}
}
if (include_eq_uses)
{
- struct df_ref *ref = DF_REG_EQ_USE_CHAIN (regno);
- while (ref)
+ df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
+ while (ref)
{
ref_info->refs[offset] = ref;
DF_REF_ID (ref) = offset++;
count++;
ref = DF_REF_NEXT_REG (ref);
- gcc_assert (offset < ref_info->refs_size);
+ gcc_checking_assert (offset < ref_info->refs_size);
}
}
ref_info->count[regno] = count;
}
-
+
/* The bitmap size is not decremented when refs are deleted. So
reset it now that we have squished out all of the empty
slots. */
which is likely to be best if processing some segment of the
function. */
-static void
+static void
df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
- bool include_defs,
- bool include_uses,
+ bool include_defs,
+ bool include_uses,
bool include_eq_uses)
{
bitmap_iterator bi;
unsigned int m = df->regs_inited;
unsigned int offset = 0;
unsigned int r;
- unsigned int start
+ unsigned int start
= (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
{
basic_block bb = BASIC_BLOCK (bb_index);
rtx insn;
- struct df_ref **ref_rec;
+ df_ref *ref_rec;
if (include_defs)
for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
if (INSN_P (insn))
{
unsigned int uid = INSN_UID (insn);
-
+
if (include_defs)
for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
{
offset += ref_info->count[r];
ref_info->count[r] = 0;
}
-
+
EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
{
basic_block bb = BASIC_BLOCK (bb_index);
rtx insn;
- struct df_ref **ref_rec;
+ df_ref *ref_rec;
if (include_defs)
for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
unsigned int regno = DF_REF_REGNO (ref);
if (regno >= start)
{
if (include_uses)
for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
unsigned int regno = DF_REF_REGNO (ref);
if (regno >= start)
{
if (INSN_P (insn))
{
unsigned int uid = INSN_UID (insn);
-
+
if (include_defs)
for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
unsigned int regno = DF_REF_REGNO (ref);
if (regno >= start)
{
if (include_uses)
for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
unsigned int regno = DF_REF_REGNO (ref);
if (regno >= start)
{
if (include_eq_uses)
for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
unsigned int regno = DF_REF_REGNO (ref);
if (regno >= start)
{
/* Take build ref table for either the uses or defs from the reg-use
or reg-def chains. */
-static void
+static void
df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
- bool include_defs,
- bool include_uses,
+ bool include_defs,
+ bool include_uses,
bool include_eq_uses)
{
if (df->analyze_subset)
- df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
+ df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
include_uses, include_eq_uses);
else
- df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
+ df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
include_uses, include_eq_uses);
}
/* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
-static unsigned int
-df_add_refs_to_table (unsigned int offset,
- struct df_ref_info *ref_info,
- struct df_ref **ref_vec)
+static unsigned int
+df_add_refs_to_table (unsigned int offset,
+ struct df_ref_info *ref_info,
+ df_ref *ref_vec)
{
while (*ref_vec)
{
- struct df_ref *ref = *ref_vec;
+ df_ref ref = *ref_vec;
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|| (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
{
eq_uses if INCLUDE_EQ_USES. */
static unsigned int
-df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
+df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
struct df_ref_info *ref_info,
- bool include_defs, bool include_uses,
+ bool include_defs, bool include_uses,
bool include_eq_uses)
{
rtx insn;
if (include_defs)
- offset = df_add_refs_to_table (offset, ref_info,
+ offset = df_add_refs_to_table (offset, ref_info,
df_get_artificial_defs (bb->index));
if (include_uses)
- offset = df_add_refs_to_table (offset, ref_info,
+ offset = df_add_refs_to_table (offset, ref_info,
df_get_artificial_uses (bb->index));
FOR_BB_INSNS (bb, insn)
{
unsigned int uid = INSN_UID (insn);
if (include_defs)
- offset = df_add_refs_to_table (offset, ref_info,
+ offset = df_add_refs_to_table (offset, ref_info,
DF_INSN_UID_DEFS (uid));
if (include_uses)
- offset = df_add_refs_to_table (offset, ref_info,
+ offset = df_add_refs_to_table (offset, ref_info,
DF_INSN_UID_USES (uid));
if (include_eq_uses)
- offset = df_add_refs_to_table (offset, ref_info,
+ offset = df_add_refs_to_table (offset, ref_info,
DF_INSN_UID_EQ_USES (uid));
}
return offset;
static void
df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
- bool include_defs, bool include_uses,
+ bool include_defs, bool include_uses,
bool include_eq_uses)
{
basic_block bb;
EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
{
- offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
- include_defs, include_uses,
+ offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
+ include_defs, include_uses,
include_eq_uses);
}
else
{
FOR_ALL_BB (bb)
- offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
- include_defs, include_uses,
+ offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
+ include_defs, include_uses,
include_eq_uses);
ref_info->table_size = offset;
}
/* If the use refs in DF are not organized, reorganize them. */
-void
+void
df_maybe_reorganize_use_refs (enum df_ref_order order)
{
if (order == df->use_info.ref_order)
gcc_unreachable ();
break;
}
-
+
df->use_info.ref_order = order;
}
/* If the def refs in DF are not organized, reorganize them. */
-void
+void
df_maybe_reorganize_def_refs (enum df_ref_order order)
{
if (order == df->def_info.ref_order)
gcc_unreachable ();
break;
}
-
- df->def_info.ref_order = order;
-}
-
-
-/* Change the BB of all refs in the ref chain from OLD_BB to NEW_BB.
- Assumes that all refs in the chain have the same BB. */
-
-static void
-df_ref_chain_change_bb (struct df_ref **ref_rec,
- basic_block old_bb,
- basic_block new_bb)
-{
- while (*ref_rec)
- {
- struct df_ref *ref = *ref_rec;
- gcc_assert (DF_REF_BB (ref) == old_bb);
- DF_REF_BB (ref) = new_bb;
- ref_rec++;
- }
+ df->def_info.ref_order = order;
}
/* Change all of the basic block references in INSN to use the insn's
- current basic block. This function is called from routines that move
- instructions from one block to another. */
+ current basic block. This function is called from routines that move
+ instructions from one block to another. */
void
df_insn_change_bb (rtx insn, basic_block new_bb)
if (!INSN_P (insn))
return;
- df_ref_chain_change_bb (insn_info->defs, old_bb, new_bb);
- df_ref_chain_change_bb (insn_info->uses, old_bb, new_bb);
- df_ref_chain_change_bb (insn_info->eq_uses, old_bb, new_bb);
-
df_set_bb_dirty (new_bb);
if (old_bb)
{
if (dump_file)
- fprintf (dump_file, " from %d to %d\n",
+ fprintf (dump_file, " from %d to %d\n",
old_bb->index, new_bb->index);
df_set_bb_dirty (old_bb);
}
/* Helper function for df_ref_change_reg_with_loc. */
static void
-df_ref_change_reg_with_loc_1 (struct df_reg_info *old, struct df_reg_info *new,
+df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
+ struct df_reg_info *new_df,
int new_regno, rtx loc)
{
- struct df_ref *the_ref = old->reg_chain;
+ df_ref the_ref = old_df->reg_chain;
while (the_ref)
{
- if (DF_REF_LOC(the_ref) && (*DF_REF_LOC(the_ref) == loc))
+ if ((!DF_REF_IS_ARTIFICIAL (the_ref))
+ && DF_REF_LOC (the_ref)
+ && (*DF_REF_LOC (the_ref) == loc))
{
- struct df_ref *next_ref = the_ref->next_reg;
- struct df_ref *prev_ref = the_ref->prev_reg;
- struct df_ref **ref_vec, **ref_vec_t;
+ df_ref next_ref = DF_REF_NEXT_REG (the_ref);
+ df_ref prev_ref = DF_REF_PREV_REG (the_ref);
+ df_ref *ref_vec, *ref_vec_t;
+ struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
unsigned int count = 0;
DF_REF_REGNO (the_ref) = new_regno;
/* Pull the_ref out of the old regno chain. */
if (prev_ref)
- prev_ref->next_reg = next_ref;
+ DF_REF_NEXT_REG (prev_ref) = next_ref;
else
- old->reg_chain = next_ref;
+ old_df->reg_chain = next_ref;
if (next_ref)
- next_ref->prev_reg = prev_ref;
- old->n_refs--;
+ DF_REF_PREV_REG (next_ref) = prev_ref;
+ old_df->n_refs--;
/* Put the ref into the new regno chain. */
- the_ref->prev_reg = NULL;
- the_ref->next_reg = new->reg_chain;
- if (new->reg_chain)
- new->reg_chain->prev_reg = the_ref;
- new->reg_chain = the_ref;
- new->n_refs++;
- df_set_bb_dirty (DF_REF_BB (the_ref));
-
- /* Need to resort the record that the ref was in because the
- regno is a sorting key. First, find the right record. */
- if (DF_REF_IS_ARTIFICIAL (the_ref))
- {
- unsigned int bb_index = DF_REF_BB (the_ref)->index;
- if (DF_REF_REG_DEF_P (the_ref))
- ref_vec = df_get_artificial_defs (bb_index);
- else
- ref_vec = df_get_artificial_uses (bb_index);
- }
+ DF_REF_PREV_REG (the_ref) = NULL;
+ DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
+ if (new_df->reg_chain)
+ DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
+ new_df->reg_chain = the_ref;
+ new_df->n_refs++;
+ if (DF_REF_BB (the_ref))
+ df_set_bb_dirty (DF_REF_BB (the_ref));
+
+ /* Need to sort the record again that the ref was in because
+ the regno is a sorting key. First, find the right
+ record. */
+ if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
+ ref_vec = insn_info->eq_uses;
else
- {
- struct df_insn_info *insn_info
- = DF_INSN_GET (DF_REF_INSN (the_ref));
- if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
- ref_vec = insn_info->eq_uses;
- else
- ref_vec = insn_info->uses;
- if (dump_file)
- fprintf (dump_file, "changing reg in insn %d\n",
- INSN_UID (DF_REF_INSN (the_ref)));
- }
+ ref_vec = insn_info->uses;
+ if (dump_file)
+ fprintf (dump_file, "changing reg in insn %d\n",
+ DF_REF_INSN_UID (the_ref));
+
ref_vec_t = ref_vec;
/* Find the length. */
count++;
ref_vec_t++;
}
- qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
+ qsort (ref_vec, count, sizeof (df_ref ), df_ref_compare);
the_ref = next_ref;
}
else
- the_ref = the_ref->next_reg;
+ the_ref = DF_REF_NEXT_REG (the_ref);
}
}
/* Change the regno of all refs that contained LOC from OLD_REGNO to
- NEW_REGNO. Refs that do not match LOC are not changed. This call
- is to support the SET_REGNO macro. */
+ NEW_REGNO. Refs that do not match LOC are not changed which means
+ that artificial refs are not changed since they have no loc. This
+ call is to support the SET_REGNO macro. */
void
df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
df_grow_reg_info ();
- df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
+ df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
DF_REG_DEF_GET (new_regno), new_regno, loc);
- df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
+ df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
DF_REG_USE_GET (new_regno), new_regno, loc);
- df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
+ df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
}
struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
unsigned int deleted = 0;
unsigned int count = 0;
- struct df_scan_problem_data *problem_data
+ struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
if (!*mw_vec)
/* Shove the remaining ones down one to fill the gap. While
this looks n**2, it is highly unusual to have any mw regs
in eq_notes and the chances of more than one are almost
- non existent. */
+ non existent. */
while (*temp_vec)
{
*temp_vec = *(temp_vec + 1);
if (count == 0)
{
- free (insn_info->mw_hardregs);
+ df_scan_free_mws_vec (insn_info->mw_hardregs);
insn_info->mw_hardregs = df_null_mw_rec;
return 0;
}
insn_info->eq_uses = df_null_ref_rec;
insn_info->mw_hardregs = df_null_mw_rec;
}
-
- bitmap_clear_bit (df->insns_to_delete, uid);
+
+ bitmap_clear_bit (&df->insns_to_delete, uid);
/* If the insn is set to be rescanned, it does not need to also
be notes rescanned. */
- if (!bitmap_bit_p (df->insns_to_rescan, uid))
- bitmap_set_bit (df->insns_to_notes_rescan, INSN_UID (insn));
+ if (!bitmap_bit_p (&df->insns_to_rescan, uid))
+ bitmap_set_bit (&df->insns_to_notes_rescan, INSN_UID (insn));
return;
}
- bitmap_clear_bit (df->insns_to_delete, uid);
- bitmap_clear_bit (df->insns_to_notes_rescan, uid);
+ bitmap_clear_bit (&df->insns_to_delete, uid);
+ bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
if (insn_info)
{
rtx note;
struct df_collection_rec collection_rec;
unsigned int num_deleted;
+ unsigned int mw_len;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 1000);
+ collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
+ collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
df_ref_chain_delete (insn_info->eq_uses);
case REG_EQUAL:
df_uses_record (&collection_rec,
&XEXP (note, 0), DF_REF_REG_USE,
- bb, insn, DF_REF_IN_NOTE, -1, -1, 0);
+ bb, insn_info, DF_REF_IN_NOTE);
default:
break;
}
/* Find some place to put any new mw_hardregs. */
df_canonize_collection_rec (&collection_rec);
- if (collection_rec.next_mw)
+ mw_len = VEC_length (df_mw_hardreg_ptr, collection_rec.mw_vec);
+ if (mw_len)
{
unsigned int count = 0;
struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
{
/* Append to the end of the existing record after
expanding it if necessary. */
- if (collection_rec.next_mw > num_deleted)
+ if (mw_len > num_deleted)
{
- insn_info->mw_hardregs =
- xrealloc (insn_info->mw_hardregs,
- (count + 1 + collection_rec.next_mw)
- * sizeof (struct df_ref*));
+ insn_info->mw_hardregs =
+ XRESIZEVEC (struct df_mw_hardreg *,
+ insn_info->mw_hardregs,
+ count + 1 + mw_len);
}
- memcpy (&insn_info->mw_hardregs[count], collection_rec.mw_vec,
- (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
- qsort (insn_info->mw_hardregs, count + collection_rec.next_mw,
+ memcpy (&insn_info->mw_hardregs[count],
+ VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
+ mw_len * sizeof (struct df_mw_hardreg *));
+ insn_info->mw_hardregs[count + mw_len] = NULL;
+ qsort (insn_info->mw_hardregs, count + mw_len,
sizeof (struct df_mw_hardreg *), df_mw_compare);
}
else
{
- /* No vector there. */
- insn_info->mw_hardregs
- = XNEWVEC (struct df_mw_hardreg*,
- count + 1 + collection_rec.next_mw);
- memcpy (insn_info->mw_hardregs, collection_rec.mw_vec,
- (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
+ /* No vector there. */
+ insn_info->mw_hardregs
+ = XNEWVEC (struct df_mw_hardreg*, 1 + mw_len);
+ memcpy (insn_info->mw_hardregs,
+ VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
+ mw_len * sizeof (struct df_mw_hardreg *));
+ insn_info->mw_hardregs[mw_len] = NULL;
}
}
/* Get rid of the mw_rec so that df_refs_add_to_chains will
ignore it. */
- collection_rec.mw_vec = NULL;
- collection_rec.next_mw = 0;
+ VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
df_refs_add_to_chains (&collection_rec, bb, insn);
+ VEC_free (df_ref, stack, collection_rec.eq_use_vec);
}
else
df_insn_rescan (insn);
----------------------------------------------------------------------------*/
-/* Return true if the contents of two df_ref's are identical.
+/* Return true if the contents of two df_ref's are identical.
It ignores DF_REF_MARKER. */
static bool
-df_ref_equal_p (struct df_ref *ref1, struct df_ref *ref2)
+df_ref_equal_p (df_ref ref1, df_ref ref2)
{
if (!ref2)
return false;
- /* The two flag tests here are only to make sure we do not look at
- the offset and width if they are not there. The flags are
- compared in the next set of tests. */
- if ((DF_REF_FLAGS_IS_SET (ref1, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
- && (DF_REF_FLAGS_IS_SET (ref2, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
- && ((DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
- || (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
- || (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2))))
+ if (ref1 == ref2)
+ return true;
+
+ if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
+ || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
+ || DF_REF_REG (ref1) != DF_REF_REG (ref2)
+ || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
+ || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
+ != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
+ || DF_REF_BB (ref1) != DF_REF_BB (ref2)
+ || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
return false;
- return (ref1 == ref2) ||
- (DF_REF_REG (ref1) == DF_REF_REG (ref2)
- && DF_REF_REGNO (ref1) == DF_REF_REGNO (ref2)
- && DF_REF_LOC (ref1) == DF_REF_LOC (ref2)
- && DF_REF_INSN (ref1) == DF_REF_INSN (ref2)
- && DF_REF_TYPE (ref1) == DF_REF_TYPE (ref2)
- && ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
- == (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
- && DF_REF_BB (ref1) == DF_REF_BB (ref2));
+ switch (DF_REF_CLASS (ref1))
+ {
+ case DF_REF_ARTIFICIAL:
+ case DF_REF_BASE:
+ return true;
+
+ case DF_REF_REGULAR:
+ return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
+
+ default:
+ gcc_unreachable ();
+ }
+ return false;
}
static int
df_ref_compare (const void *r1, const void *r2)
{
- const struct df_ref *const ref1 = *(const struct df_ref *const*)r1;
- const struct df_ref *const ref2 = *(const struct df_ref *const*)r2;
+ const df_ref ref1 = *(const df_ref *)r1;
+ const df_ref ref2 = *(const df_ref *)r2;
if (ref1 == ref2)
return 0;
+ if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
+ return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
+
if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
-
+
if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
- if ((DF_REF_REG (ref1) != DF_REF_REG (ref2))
- || (DF_REF_LOC (ref1) != DF_REF_LOC (ref2)))
+ if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
+ return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
+
+ /* Cannot look at the LOC field on artificial refs. */
+ if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
+ && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
return 1;
}
- /* The flags are the same at this point so it is safe to only look
- at ref1. */
- if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
- {
- if (DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
- return DF_REF_EXTRACT_OFFSET (ref1) - DF_REF_EXTRACT_OFFSET (ref2);
- if (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
- return DF_REF_EXTRACT_WIDTH (ref1) - DF_REF_EXTRACT_WIDTH (ref2);
- if (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2))
- return DF_REF_EXTRACT_MODE (ref1) - DF_REF_EXTRACT_MODE (ref2);
- }
- return 0;
+ return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
}
static void
-df_swap_refs (struct df_ref **ref_vec, int i, int j)
+df_swap_refs (VEC(df_ref,stack) **ref_vec, int i, int j)
{
- struct df_ref *tmp = ref_vec[i];
- ref_vec[i] = ref_vec[j];
- ref_vec[j] = tmp;
+ df_ref tmp = VEC_index (df_ref, *ref_vec, i);
+ VEC_replace (df_ref, *ref_vec, i, VEC_index (df_ref, *ref_vec, j));
+ VEC_replace (df_ref, *ref_vec, j, tmp);
}
/* Sort and compress a set of refs. */
-static unsigned int
-df_sort_and_compress_refs (struct df_ref **ref_vec, unsigned int count)
+static void
+df_sort_and_compress_refs (VEC(df_ref,stack) **ref_vec)
{
+ unsigned int count;
unsigned int i;
unsigned int dist = 0;
- ref_vec[count] = NULL;
+ count = VEC_length (df_ref, *ref_vec);
+
/* If there are 1 or 0 elements, there is nothing to do. */
if (count < 2)
- return count;
+ return;
else if (count == 2)
{
- if (df_ref_compare (&ref_vec[0], &ref_vec[1]) > 0)
+ df_ref r0 = VEC_index (df_ref, *ref_vec, 0);
+ df_ref r1 = VEC_index (df_ref, *ref_vec, 1);
+ if (df_ref_compare (&r0, &r1) > 0)
df_swap_refs (ref_vec, 0, 1);
}
else
{
for (i = 0; i < count - 1; i++)
- if (df_ref_compare (&ref_vec[i], &ref_vec[i+1]) >= 0)
- break;
+ {
+ df_ref r0 = VEC_index (df_ref, *ref_vec, i);
+ df_ref r1 = VEC_index (df_ref, *ref_vec, i + 1);
+ if (df_ref_compare (&r0, &r1) >= 0)
+ break;
+ }
/* If the array is already strictly ordered,
which is the most common case for large COUNT case
(which happens for CALL INSNs),
no need to sort and filter out duplicate.
- Simply return the count.
+ Simply return the count.
Make sure DF_GET_ADD_REFS adds refs in the increasing order
of DF_REF_COMPARE. */
if (i == count - 1)
- return count;
- qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
+ return;
+ VEC_qsort (df_ref, *ref_vec, df_ref_compare);
}
for (i=0; i<count-dist; i++)
{
/* Find the next ref that is not equal to the current ref. */
- while (df_ref_equal_p (ref_vec[i], ref_vec[i + dist + 1]))
+ while (i + dist + 1 < count
+ && df_ref_equal_p (VEC_index (df_ref, *ref_vec, i),
+ VEC_index (df_ref, *ref_vec, i + dist + 1)))
{
- df_free_ref (ref_vec[i + dist + 1]);
+ df_free_ref (VEC_index (df_ref, *ref_vec, i + dist + 1));
dist++;
}
/* Copy it down to the next position. */
- if (dist)
- ref_vec[i+1] = ref_vec[i + dist + 1];
+ if (dist && i + dist + 1 < count)
+ VEC_replace (df_ref, *ref_vec, i + 1,
+ VEC_index (df_ref, *ref_vec, i + dist + 1));
}
count -= dist;
- ref_vec[count] = NULL;
- return count;
+ VEC_truncate (df_ref, *ref_vec, count);
}
-/* Return true if the contents of two df_ref's are identical.
+/* Return true if the contents of two df_ref's are identical.
It ignores DF_REF_MARKER. */
static bool
/* Sort and compress a set of refs. */
-static unsigned int
-df_sort_and_compress_mws (struct df_mw_hardreg **mw_vec, unsigned int count)
+static void
+df_sort_and_compress_mws (VEC(df_mw_hardreg_ptr,stack) **mw_vec)
{
- struct df_scan_problem_data *problem_data
+ unsigned int count;
+ struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
unsigned int i;
unsigned int dist = 0;
- mw_vec[count] = NULL;
+ count = VEC_length (df_mw_hardreg_ptr, *mw_vec);
if (count < 2)
- return count;
+ return;
else if (count == 2)
{
- if (df_mw_compare (&mw_vec[0], &mw_vec[1]) > 0)
+ struct df_mw_hardreg *m0 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 0);
+ struct df_mw_hardreg *m1 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 1);
+ if (df_mw_compare (&m0, &m1) > 0)
{
- struct df_mw_hardreg *tmp = mw_vec[0];
- mw_vec[0] = mw_vec[1];
- mw_vec[1] = tmp;
+ struct df_mw_hardreg *tmp = VEC_index (df_mw_hardreg_ptr,
+ *mw_vec, 0);
+ VEC_replace (df_mw_hardreg_ptr, *mw_vec, 0,
+ VEC_index (df_mw_hardreg_ptr, *mw_vec, 1));
+ VEC_replace (df_mw_hardreg_ptr, *mw_vec, 1, tmp);
}
}
else
- qsort (mw_vec, count, sizeof (struct df_mw_hardreg *), df_mw_compare);
+ VEC_qsort (df_mw_hardreg_ptr, *mw_vec, df_mw_compare);
for (i=0; i<count-dist; i++)
{
/* Find the next ref that is not equal to the current ref. */
- while (df_mw_equal_p (mw_vec[i], mw_vec[i + dist + 1]))
+ while (i + dist + 1 < count
+ && df_mw_equal_p (VEC_index (df_mw_hardreg_ptr, *mw_vec, i),
+ VEC_index (df_mw_hardreg_ptr, *mw_vec,
+ i + dist + 1)))
{
- pool_free (problem_data->mw_reg_pool, mw_vec[i + dist + 1]);
+ pool_free (problem_data->mw_reg_pool,
+ VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
dist++;
}
/* Copy it down to the next position. */
- if (dist)
- mw_vec[i+1] = mw_vec[i + dist + 1];
+ if (dist && i + dist + 1 < count)
+ VEC_replace (df_mw_hardreg_ptr, *mw_vec, i + 1,
+ VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
}
count -= dist;
- mw_vec[count] = NULL;
- return count;
+ VEC_truncate (df_mw_hardreg_ptr, *mw_vec, count);
}
static void
df_canonize_collection_rec (struct df_collection_rec *collection_rec)
{
- if (collection_rec->def_vec)
- collection_rec->next_def
- = df_sort_and_compress_refs (collection_rec->def_vec,
- collection_rec->next_def);
- if (collection_rec->use_vec)
- collection_rec->next_use
- = df_sort_and_compress_refs (collection_rec->use_vec,
- collection_rec->next_use);
- if (collection_rec->eq_use_vec)
- collection_rec->next_eq_use
- = df_sort_and_compress_refs (collection_rec->eq_use_vec,
- collection_rec->next_eq_use);
- if (collection_rec->mw_vec)
- collection_rec->next_mw
- = df_sort_and_compress_mws (collection_rec->mw_vec,
- collection_rec->next_mw);
+ df_sort_and_compress_refs (&collection_rec->def_vec);
+ df_sort_and_compress_refs (&collection_rec->use_vec);
+ df_sort_and_compress_refs (&collection_rec->eq_use_vec);
+ df_sort_and_compress_mws (&collection_rec->mw_vec);
}
/* Add the new df_ref to appropriate reg_info/ref_info chains. */
static void
-df_install_ref (struct df_ref *this_ref,
- struct df_reg_info *reg_info,
+df_install_ref (df_ref this_ref,
+ struct df_reg_info *reg_info,
struct df_ref_info *ref_info,
bool add_to_table)
{
unsigned int regno = DF_REF_REGNO (this_ref);
/* Add the ref to the reg_{def,use,eq_use} chain. */
- struct df_ref *head = reg_info->reg_chain;
+ df_ref head = reg_info->reg_chain;
reg_info->reg_chain = this_ref;
reg_info->n_refs++;
df->hard_regs_live_count[regno]++;
}
- gcc_assert (DF_REF_NEXT_REG (this_ref) == NULL);
- gcc_assert (DF_REF_PREV_REG (this_ref) == NULL);
+ gcc_checking_assert (DF_REF_NEXT_REG (this_ref) == NULL
+ && DF_REF_PREV_REG (this_ref) == NULL);
DF_REF_NEXT_REG (this_ref) = head;
if (head)
DF_REF_PREV_REG (head) = this_ref;
-
+
if (add_to_table)
{
gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
/* Add the ref to the big array of defs. */
ref_info->refs[ref_info->table_size] = this_ref;
ref_info->table_size++;
- }
+ }
else
DF_REF_ID (this_ref) = -1;
-
+
ref_info->total_size++;
}
eq_uses) and installs the entire group into the insn. It also adds
each of these refs into the appropriate chains. */
-static struct df_ref **
+static df_ref *
df_install_refs (basic_block bb,
- struct df_ref **old_vec, unsigned int count,
- struct df_reg_info **reg_info,
+ VEC(df_ref,stack)* old_vec,
+ struct df_reg_info **reg_info,
struct df_ref_info *ref_info,
bool is_notes)
{
+ unsigned int count;
+
+ count = VEC_length (df_ref, old_vec);
if (count)
{
- unsigned int i;
- struct df_ref **new_vec = XNEWVEC (struct df_ref*, count + 1);
+ df_ref *new_vec = XNEWVEC (df_ref, count + 1);
bool add_to_table;
+ df_ref this_ref;
+ unsigned int ix;
switch (ref_info->ref_order)
{
if (add_to_table && df->analyze_subset)
add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
- for (i = 0; i < count; i++)
+ FOR_EACH_VEC_ELT (df_ref, old_vec, ix, this_ref)
{
- struct df_ref *this_ref = old_vec[i];
- new_vec[i] = this_ref;
- df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
+ new_vec[ix] = this_ref;
+ df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
ref_info, add_to_table);
}
-
+
new_vec[count] = NULL;
return new_vec;
}
insn. */
static struct df_mw_hardreg **
-df_install_mws (struct df_mw_hardreg **old_vec, unsigned int count)
+df_install_mws (VEC(df_mw_hardreg_ptr,stack) *old_vec)
{
+ unsigned int count;
+
+ count = VEC_length (df_mw_hardreg_ptr, old_vec);
if (count)
{
- struct df_mw_hardreg **new_vec
+ struct df_mw_hardreg **new_vec
= XNEWVEC (struct df_mw_hardreg*, count + 1);
- memcpy (new_vec, old_vec,
- sizeof (struct df_mw_hardreg*) * (count + 1));
+ memcpy (new_vec, VEC_address (df_mw_hardreg_ptr, old_vec),
+ sizeof (struct df_mw_hardreg*) * count);
+ new_vec[count] = NULL;
return new_vec;
}
else
chains and update other necessary information. */
static void
-df_refs_add_to_chains (struct df_collection_rec *collection_rec,
+df_refs_add_to_chains (struct df_collection_rec *collection_rec,
basic_block bb, rtx insn)
{
if (insn)
{
- struct df_insn_info *insn_rec = DF_INSN_GET (insn);
+ struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
/* If there is a vector in the collection rec, add it to the
insn. A null rec is a signal that the caller will handle the
chain specially. */
if (collection_rec->def_vec)
{
- if (insn_rec->defs && *insn_rec->defs)
- free (insn_rec->defs);
- insn_rec->defs
- = df_install_refs (bb, collection_rec->def_vec,
- collection_rec->next_def,
+ df_scan_free_ref_vec (insn_rec->defs);
+ insn_rec->defs
+ = df_install_refs (bb, collection_rec->def_vec,
df->def_regs,
&df->def_info, false);
}
if (collection_rec->use_vec)
{
- if (insn_rec->uses && *insn_rec->uses)
- free (insn_rec->uses);
- insn_rec->uses
- = df_install_refs (bb, collection_rec->use_vec,
- collection_rec->next_use,
+ df_scan_free_ref_vec (insn_rec->uses);
+ insn_rec->uses
+ = df_install_refs (bb, collection_rec->use_vec,
df->use_regs,
&df->use_info, false);
}
if (collection_rec->eq_use_vec)
{
- if (insn_rec->eq_uses && *insn_rec->eq_uses)
- free (insn_rec->eq_uses);
- insn_rec->eq_uses
- = df_install_refs (bb, collection_rec->eq_use_vec,
- collection_rec->next_eq_use,
+ df_scan_free_ref_vec (insn_rec->eq_uses);
+ insn_rec->eq_uses
+ = df_install_refs (bb, collection_rec->eq_use_vec,
df->eq_use_regs,
&df->use_info, true);
}
if (collection_rec->mw_vec)
{
- if (insn_rec->mw_hardregs && *insn_rec->mw_hardregs)
- free (insn_rec->mw_hardregs);
- insn_rec->mw_hardregs
- = df_install_mws (collection_rec->mw_vec,
- collection_rec->next_mw);
+ df_scan_free_mws_vec (insn_rec->mw_hardregs);
+ insn_rec->mw_hardregs
+ = df_install_mws (collection_rec->mw_vec);
}
}
else
{
struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
- if (bb_info->artificial_defs && *bb_info->artificial_defs)
- free (bb_info->artificial_defs);
- bb_info->artificial_defs
- = df_install_refs (bb, collection_rec->def_vec,
- collection_rec->next_def,
+ df_scan_free_ref_vec (bb_info->artificial_defs);
+ bb_info->artificial_defs
+ = df_install_refs (bb, collection_rec->def_vec,
df->def_regs,
&df->def_info, false);
- if (bb_info->artificial_uses && *bb_info->artificial_uses)
- free (bb_info->artificial_uses);
- bb_info->artificial_uses
- = df_install_refs (bb, collection_rec->use_vec,
- collection_rec->next_use,
+ df_scan_free_ref_vec (bb_info->artificial_uses);
+ bb_info->artificial_uses
+ = df_install_refs (bb, collection_rec->use_vec,
df->use_regs,
&df->use_info, false);
}
}
-/* Allocate a ref and initialize its fields.
-
- If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
- DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the fields
- if they were constants. Otherwise they should be -1 if those flags
- were set. */
+/* Allocate a ref and initialize its fields. */
-static struct df_ref *
-df_ref_create_structure (struct df_collection_rec *collection_rec,
- rtx reg, rtx *loc,
- basic_block bb, rtx insn,
- enum df_ref_type ref_type,
- enum df_ref_flags ref_flags,
- int width, int offset, enum machine_mode mode)
+static df_ref
+df_ref_create_structure (enum df_ref_class cl,
+ struct df_collection_rec *collection_rec,
+ rtx reg, rtx *loc,
+ basic_block bb, struct df_insn_info *info,
+ enum df_ref_type ref_type,
+ int ref_flags)
{
- struct df_ref *this_ref;
+ df_ref this_ref = NULL;
int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
- if (ref_flags & (DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
+ switch (cl)
{
- this_ref = pool_alloc (problem_data->ref_extract_pool);
- DF_REF_EXTRACT_WIDTH (this_ref) = width;
- DF_REF_EXTRACT_OFFSET (this_ref) = offset;
- DF_REF_EXTRACT_MODE (this_ref) = mode;
+ case DF_REF_BASE:
+ this_ref = (df_ref) pool_alloc (problem_data->ref_base_pool);
+ gcc_checking_assert (loc == NULL);
+ break;
+
+ case DF_REF_ARTIFICIAL:
+ this_ref = (df_ref) pool_alloc (problem_data->ref_artificial_pool);
+ this_ref->artificial_ref.bb = bb;
+ gcc_checking_assert (loc == NULL);
+ break;
+
+ case DF_REF_REGULAR:
+ this_ref = (df_ref) pool_alloc (problem_data->ref_regular_pool);
+ this_ref->regular_ref.loc = loc;
+ gcc_checking_assert (loc);
+ break;
}
- else
- this_ref = pool_alloc (problem_data->ref_pool);
+
+ DF_REF_CLASS (this_ref) = cl;
DF_REF_ID (this_ref) = -1;
DF_REF_REG (this_ref) = reg;
DF_REF_REGNO (this_ref) = regno;
- DF_REF_LOC (this_ref) = loc;
- DF_REF_INSN (this_ref) = insn;
- DF_REF_CHAIN (this_ref) = NULL;
DF_REF_TYPE (this_ref) = ref_type;
+ DF_REF_INSN_INFO (this_ref) = info;
+ DF_REF_CHAIN (this_ref) = NULL;
DF_REF_FLAGS (this_ref) = ref_flags;
- DF_REF_BB (this_ref) = bb;
DF_REF_NEXT_REG (this_ref) = NULL;
DF_REF_PREV_REG (this_ref) = NULL;
DF_REF_ORDER (this_ref) = df->ref_order++;
DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
/* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
- if ((regno < FIRST_PSEUDO_REGISTER)
+ if ((regno < FIRST_PSEUDO_REGISTER)
&& (!DF_REF_IS_ARTIFICIAL (this_ref)))
{
- if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
+ if (DF_REF_REG_DEF_P (this_ref))
{
if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
if (collection_rec)
{
- if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
- collection_rec->def_vec[collection_rec->next_def++] = this_ref;
+ if (DF_REF_REG_DEF_P (this_ref))
+ VEC_safe_push (df_ref, stack, collection_rec->def_vec, this_ref);
else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
- collection_rec->eq_use_vec[collection_rec->next_eq_use++] = this_ref;
+ VEC_safe_push (df_ref, stack, collection_rec->eq_use_vec, this_ref);
else
- collection_rec->use_vec[collection_rec->next_use++] = this_ref;
+ VEC_safe_push (df_ref, stack, collection_rec->use_vec, this_ref);
}
return this_ref;
/* Create new references of type DF_REF_TYPE for each part of register REG
- at address LOC within INSN of BB.
-
- If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
- DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
- fields if they were constants. Otherwise they should be -1 if
- those flags were set. */
+ at address LOC within INSN of BB. */
static void
-df_ref_record (struct df_collection_rec *collection_rec,
- rtx reg, rtx *loc,
- basic_block bb, rtx insn,
- enum df_ref_type ref_type,
- enum df_ref_flags ref_flags,
- int width, int offset, enum machine_mode mode)
+df_ref_record (enum df_ref_class cl,
+ struct df_collection_rec *collection_rec,
+ rtx reg, rtx *loc,
+ basic_block bb, struct df_insn_info *insn_info,
+ enum df_ref_type ref_type,
+ int ref_flags)
{
unsigned int regno;
- gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
+ gcc_checking_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
if (regno < FIRST_PSEUDO_REGISTER)
= (struct df_scan_problem_data *) df_scan->problem_data;
unsigned int i;
unsigned int endregno;
- struct df_ref *ref;
+ df_ref ref;
if (GET_CODE (reg) == SUBREG)
{
/* If this is a multiword hardreg, we create some extra
datastructures that will enable us to easily build REG_DEAD
and REG_UNUSED notes. */
- if ((endregno != regno + 1) && insn)
+ if ((endregno != regno + 1) && insn_info)
{
- /* Sets to a subreg of a multiword register are partial.
+ /* Sets to a subreg of a multiword register are partial.
Sets to a non-subreg of a multiword register are not. */
if (GET_CODE (reg) == SUBREG)
ref_flags |= DF_REF_PARTIAL;
ref_flags |= DF_REF_MW_HARDREG;
- hardreg = pool_alloc (problem_data->mw_reg_pool);
+ hardreg = (struct df_mw_hardreg *) pool_alloc (problem_data->mw_reg_pool);
hardreg->type = ref_type;
hardreg->flags = ref_flags;
hardreg->mw_reg = reg;
hardreg->start_regno = regno;
hardreg->end_regno = endregno - 1;
hardreg->mw_order = df->ref_order++;
- collection_rec->mw_vec[collection_rec->next_mw++] = hardreg;
+ VEC_safe_push (df_mw_hardreg_ptr, stack, collection_rec->mw_vec,
+ hardreg);
}
for (i = regno; i < endregno; i++)
{
- ref = df_ref_create_structure (collection_rec, regno_reg_rtx[i], loc,
- bb, insn, ref_type, ref_flags,
- width, offset, mode);
+ ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
+ bb, insn_info, ref_type, ref_flags);
gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
}
}
else
{
- struct df_ref *ref;
- ref = df_ref_create_structure (collection_rec, reg, loc, bb, insn,
- ref_type, ref_flags, width, offset, mode);
+ df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
+ ref_type, ref_flags);
}
}
static void
df_def_record_1 (struct df_collection_rec *collection_rec,
- rtx x, basic_block bb, rtx insn,
- enum df_ref_flags flags)
+ rtx x, basic_block bb, struct df_insn_info *insn_info,
+ int flags)
{
rtx *loc;
rtx dst;
- int offset = -1;
- int width = -1;
- enum machine_mode mode = 0;
/* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
construct. */
if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
|| GET_CODE (temp) == SET)
df_def_record_1 (collection_rec,
- temp, bb, insn,
- GET_CODE (temp) == CLOBBER
+ temp, bb, insn_info,
+ GET_CODE (temp) == CLOBBER
? flags | DF_REF_MUST_CLOBBER : flags);
}
return;
if (GET_CODE (dst) == ZERO_EXTRACT)
{
flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
-
- if (GET_CODE (XEXP (dst, 1)) == CONST_INT
- && GET_CODE (XEXP (dst, 2)) == CONST_INT)
- {
- width = INTVAL (XEXP (dst, 1));
- offset = INTVAL (XEXP (dst, 2));
- mode = GET_MODE (dst);
- }
loc = &XEXP (dst, 0);
dst = *loc;
/* At this point if we do not have a reg or a subreg, just return. */
if (REG_P (dst))
{
- df_ref_record (collection_rec,
- dst, loc, bb, insn, DF_REF_REG_DEF, flags,
- width, offset, mode);
+ df_ref_record (DF_REF_REGULAR, collection_rec,
+ dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
/* We want to keep sp alive everywhere - by making all
writes to sp also use of sp. */
if (REGNO (dst) == STACK_POINTER_REGNUM)
- df_ref_record (collection_rec,
- dst, NULL, bb, insn, DF_REF_REG_USE, flags,
- width, offset, mode);
+ df_ref_record (DF_REF_BASE, collection_rec,
+ dst, NULL, bb, insn_info, DF_REF_REG_USE, flags);
}
else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
{
flags |= DF_REF_SUBREG;
- df_ref_record (collection_rec,
- dst, loc, bb, insn, DF_REF_REG_DEF, flags,
- width, offset, mode);
+ df_ref_record (DF_REF_REGULAR, collection_rec,
+ dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
}
}
/* Process all the registers defined in the pattern rtx, X. */
static void
-df_defs_record (struct df_collection_rec *collection_rec,
- rtx x, basic_block bb, rtx insn, enum df_ref_flags flags)
+df_defs_record (struct df_collection_rec *collection_rec,
+ rtx x, basic_block bb, struct df_insn_info *insn_info,
+ int flags)
{
RTX_CODE code = GET_CODE (x);
if (code == SET || code == CLOBBER)
{
/* Mark the single def within the pattern. */
- enum df_ref_flags clobber_flags = flags;
+ int clobber_flags = flags;
clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
- df_def_record_1 (collection_rec, x, bb, insn, clobber_flags);
+ df_def_record_1 (collection_rec, x, bb, insn_info, clobber_flags);
}
else if (code == COND_EXEC)
{
- df_defs_record (collection_rec, COND_EXEC_CODE (x),
- bb, insn, DF_REF_CONDITIONAL);
+ df_defs_record (collection_rec, COND_EXEC_CODE (x),
+ bb, insn_info, DF_REF_CONDITIONAL);
}
else if (code == PARALLEL)
{
/* Mark the multiple defs within the pattern. */
for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
- df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn, flags);
+ df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn_info, flags);
}
}
-/* Process all the registers used in the rtx at address LOC.
-
- If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
- DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
- fields if they were constants. Otherwise they should be -1 if
- those flags were set. */
+/* Process all the registers used in the rtx at address LOC. */
static void
df_uses_record (struct df_collection_rec *collection_rec,
rtx *loc, enum df_ref_type ref_type,
- basic_block bb, rtx insn, enum df_ref_flags flags,
- int width, int offset, enum machine_mode mode)
+ basic_block bb, struct df_insn_info *insn_info,
+ int flags)
{
RTX_CODE code;
rtx x;
if (MEM_P (XEXP (x, 0)))
df_uses_record (collection_rec,
&XEXP (XEXP (x, 0), 0),
- DF_REF_REG_MEM_STORE, bb, insn, flags,
- width, offset, mode);
+ DF_REF_REG_MEM_STORE,
+ bb, insn_info,
+ flags);
/* If we're clobbering a REG then we have a def so ignore. */
return;
case MEM:
df_uses_record (collection_rec,
- &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
- bb, insn, flags & DF_REF_IN_NOTE,
- width, offset, mode);
+ &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
+ bb, insn_info, flags & DF_REF_IN_NOTE);
return;
case SUBREG:
if (!REG_P (SUBREG_REG (x)))
{
loc = &SUBREG_REG (x);
- df_uses_record (collection_rec, loc, ref_type, bb, insn, flags,
- width, offset, mode);
+ df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags);
return;
}
/* ... Fall through ... */
case REG:
- df_ref_record (collection_rec,
- x, loc, bb, insn, ref_type, flags,
- width, offset, mode);
+ df_ref_record (DF_REF_REGULAR, collection_rec,
+ x, loc, bb, insn_info,
+ ref_type, flags);
return;
case SIGN_EXTRACT:
case ZERO_EXTRACT:
{
- /* If the parameters to the zero or sign extract are
- constants, strip them off and recurse, otherwise there is
- no information that we can gain from this operation. */
- if (GET_CODE (XEXP (x, 1)) == CONST_INT
- && GET_CODE (XEXP (x, 2)) == CONST_INT)
- {
- width = INTVAL (XEXP (x, 1));
- offset = INTVAL (XEXP (x, 2));
- mode = GET_MODE (x);
-
- if (code == ZERO_EXTRACT)
- flags |= DF_REF_ZERO_EXTRACT;
- else
- flags |= DF_REF_SIGN_EXTRACT;
-
- df_uses_record (collection_rec,
- &XEXP (x, 0), ref_type, bb, insn, flags,
- width, offset, mode);
- return;
- }
+ df_uses_record (collection_rec,
+ &XEXP (x, 1), ref_type, bb, insn_info, flags);
+ df_uses_record (collection_rec,
+ &XEXP (x, 2), ref_type, bb, insn_info, flags);
+
+ /* If the parameters to the zero or sign extract are
+ constants, strip them off and recurse, otherwise there is
+ no information that we can gain from this operation. */
+ if (code == ZERO_EXTRACT)
+ flags |= DF_REF_ZERO_EXTRACT;
+ else
+ flags |= DF_REF_SIGN_EXTRACT;
+
+ df_uses_record (collection_rec,
+ &XEXP (x, 0), ref_type, bb, insn_info, flags);
+ return;
}
break;
rtx dst = SET_DEST (x);
gcc_assert (!(flags & DF_REF_IN_NOTE));
df_uses_record (collection_rec,
- &SET_SRC (x), DF_REF_REG_USE, bb, insn, flags,
- width, offset, mode);
+ &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags);
switch (GET_CODE (dst))
{
case SUBREG:
if (df_read_modify_subreg_p (dst))
{
- df_uses_record (collection_rec, &SUBREG_REG (dst),
- DF_REF_REG_USE, bb, insn,
- flags | DF_REF_READ_WRITE | DF_REF_SUBREG,
- width, offset, mode);
+ df_uses_record (collection_rec, &SUBREG_REG (dst),
+ DF_REF_REG_USE, bb, insn_info,
+ flags | DF_REF_READ_WRITE | DF_REF_SUBREG);
break;
}
/* Fall through. */
break;
case MEM:
df_uses_record (collection_rec, &XEXP (dst, 0),
- DF_REF_REG_MEM_STORE, bb, insn, flags,
- width, offset, mode);
+ DF_REF_REG_MEM_STORE, bb, insn_info, flags);
break;
case STRICT_LOW_PART:
{
/* A strict_low_part uses the whole REG and not just the
SUBREG. */
dst = XEXP (dst, 0);
- df_uses_record (collection_rec,
- (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
- DF_REF_REG_USE, bb, insn,
- DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART,
- width, offset, mode);
+ df_uses_record (collection_rec,
+ (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
+ DF_REF_REG_USE, bb, insn_info,
+ DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART);
}
break;
case ZERO_EXTRACT:
{
- if (GET_CODE (XEXP (dst, 1)) == CONST_INT
- && GET_CODE (XEXP (dst, 2)) == CONST_INT)
- {
- width = INTVAL (XEXP (dst, 1));
- offset = INTVAL (XEXP (dst, 2));
- mode = GET_MODE (dst);
- }
- else
- {
- df_uses_record (collection_rec, &XEXP (dst, 1),
- DF_REF_REG_USE, bb, insn, flags,
- width, offset, mode);
- df_uses_record (collection_rec, &XEXP (dst, 2),
- DF_REF_REG_USE, bb, insn, flags,
- width, offset, mode);
- }
-
- df_uses_record (collection_rec, &XEXP (dst, 0),
- DF_REF_REG_USE, bb, insn,
- DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
- width, offset, mode);
+ df_uses_record (collection_rec, &XEXP (dst, 1),
+ DF_REF_REG_USE, bb, insn_info, flags);
+ df_uses_record (collection_rec, &XEXP (dst, 2),
+ DF_REF_REG_USE, bb, insn_info, flags);
+ if (GET_CODE (XEXP (dst,0)) == MEM)
+ df_uses_record (collection_rec, &XEXP (dst, 0),
+ DF_REF_REG_USE, bb, insn_info,
+ flags);
+ else
+ df_uses_record (collection_rec, &XEXP (dst, 0),
+ DF_REF_REG_USE, bb, insn_info,
+ DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT);
}
break;
for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
- DF_REF_REG_USE, bb, insn, flags,
- width, offset, mode);
+ DF_REF_REG_USE, bb, insn_info, flags);
return;
}
break;
}
+ case VAR_LOCATION:
+ df_uses_record (collection_rec,
+ &PAT_VAR_LOCATION_LOC (x),
+ DF_REF_REG_USE, bb, insn_info, flags);
+ return;
+
case PRE_DEC:
case POST_DEC:
case PRE_INC:
case POST_INC:
case PRE_MODIFY:
case POST_MODIFY:
+ gcc_assert (!DEBUG_INSN_P (insn_info->insn));
/* Catch the def of the register being modified. */
- df_ref_record (collection_rec, XEXP (x, 0), &XEXP (x, 0), bb, insn,
+ df_ref_record (DF_REF_REGULAR, collection_rec, XEXP (x, 0), &XEXP (x, 0),
+ bb, insn_info,
DF_REF_REG_DEF,
- flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY,
- width, offset, mode);
+ flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
/* ... Fall through to handle uses ... */
loc = &XEXP (x, 0);
goto retry;
}
- df_uses_record (collection_rec, &XEXP (x, i), ref_type,
- bb, insn, flags,
- width, offset, mode);
+ df_uses_record (collection_rec, &XEXP (x, i), ref_type,
+ bb, insn_info, flags);
}
else if (fmt[i] == 'E')
{
int j;
for (j = 0; j < XVECLEN (x, i); j++)
df_uses_record (collection_rec,
- &XVECEXP (x, i, j), ref_type,
- bb, insn, flags,
- width, offset, mode);
+ &XVECEXP (x, i, j), ref_type,
+ bb, insn_info, flags);
}
}
}
static void
df_get_conditional_uses (struct df_collection_rec *collection_rec)
{
- unsigned int i;
- for (i = 0; i < collection_rec->next_def; i++)
+ unsigned int ix;
+ df_ref ref;
+
+ FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, ix, ref)
{
- struct df_ref *ref = collection_rec->def_vec[i];
if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
{
- int width = -1;
- int offset = -1;
- enum machine_mode mode = 0;
- struct df_ref *use;
-
- if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
- {
- width = DF_REF_EXTRACT_WIDTH (ref);
- offset = DF_REF_EXTRACT_OFFSET (ref);
- mode = DF_REF_EXTRACT_MODE (ref);
- }
+ df_ref use;
- use = df_ref_create_structure (collection_rec, DF_REF_REG (ref),
+ use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
DF_REF_LOC (ref), DF_REF_BB (ref),
- DF_REF_INSN (ref), DF_REF_REG_USE,
- DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL,
- width, offset, mode);
+ DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
+ DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
DF_REF_REGNO (use) = DF_REF_REGNO (ref);
}
}
static void
df_get_call_refs (struct df_collection_rec * collection_rec,
- basic_block bb,
- rtx insn,
- enum df_ref_flags flags)
+ basic_block bb,
+ struct df_insn_info *insn_info,
+ int flags)
{
rtx note;
bitmap_iterator bi;
unsigned int ui;
bool is_sibling_call;
unsigned int i;
- bitmap defs_generated = BITMAP_ALLOC (&df_bitmap_obstack);
+ df_ref def;
+ bitmap_head defs_generated;
+
+ bitmap_initialize (&defs_generated, &df_bitmap_obstack);
/* Do not generate clobbers for registers that are the result of the
call. This causes ordering problems in the chain building code
depending on which def is seen first. */
- for (i=0; i<collection_rec->next_def; i++)
- {
- struct df_ref *def = collection_rec->def_vec[i];
- bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
- }
+ FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, i, def)
+ bitmap_set_bit (&defs_generated, DF_REF_REGNO (def));
/* Record the registers used to pass arguments, and explicitly
noted as clobbered. */
- for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
+ for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
note = XEXP (note, 1))
{
if (GET_CODE (XEXP (note, 0)) == USE)
df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
- DF_REF_REG_USE, bb, insn, flags, -1, -1, 0);
+ DF_REF_REG_USE, bb, insn_info, flags);
else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
{
if (REG_P (XEXP (XEXP (note, 0), 0)))
{
unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
- if (!bitmap_bit_p (defs_generated, regno))
+ if (!bitmap_bit_p (&defs_generated, regno))
df_defs_record (collection_rec, XEXP (note, 0), bb,
- insn, flags);
+ insn_info, flags);
}
else
df_uses_record (collection_rec, &XEXP (note, 0),
- DF_REF_REG_USE, bb, insn, flags, -1, -1, 0);
+ DF_REF_REG_USE, bb, insn_info, flags);
}
}
/* The stack ptr is used (honorarily) by a CALL insn. */
- df_ref_record (collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
- NULL, bb, insn, DF_REF_REG_USE, DF_REF_CALL_STACK_USAGE | flags,
- -1, -1, 0);
+ df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
+ NULL, bb, insn_info, DF_REF_REG_USE,
+ DF_REF_CALL_STACK_USAGE | flags);
/* Calls may also reference any of the global registers,
so they are recorded as used. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (global_regs[i])
{
- df_ref_record (collection_rec, regno_reg_rtx[i],
- NULL, bb, insn, DF_REF_REG_USE, flags, -1, -1, 0);
- df_ref_record (collection_rec, regno_reg_rtx[i],
- NULL, bb, insn, DF_REF_REG_DEF, flags, -1, -1, 0);
+ df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
+ NULL, bb, insn_info, DF_REF_REG_USE, flags);
+ df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
+ NULL, bb, insn_info, DF_REF_REG_DEF, flags);
}
- is_sibling_call = SIBLING_CALL_P (insn);
- EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
+ is_sibling_call = SIBLING_CALL_P (insn_info->insn);
+ EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, ui, bi)
{
if (!global_regs[ui]
- && (!bitmap_bit_p (defs_generated, ui))
+ && (!bitmap_bit_p (&defs_generated, ui))
&& (!is_sibling_call
|| !bitmap_bit_p (df->exit_block_uses, ui)
- || refers_to_regno_p (ui, ui+1,
+ || refers_to_regno_p (ui, ui+1,
crtl->return_rtx, NULL)))
- df_ref_record (collection_rec, regno_reg_rtx[ui],
- NULL, bb, insn, DF_REF_REG_DEF, DF_REF_MAY_CLOBBER | flags,
- -1, -1, 0);
+ df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[ui],
+ NULL, bb, insn_info, DF_REF_REG_DEF,
+ DF_REF_MAY_CLOBBER | flags);
}
- BITMAP_FREE (defs_generated);
+ bitmap_clear (&defs_generated);
return;
}
and reg chains. */
static void
-df_insn_refs_collect (struct df_collection_rec* collection_rec,
- basic_block bb, rtx insn)
+df_insn_refs_collect (struct df_collection_rec* collection_rec,
+ basic_block bb, struct df_insn_info *insn_info)
{
rtx note;
- bool is_cond_exec = (GET_CODE (PATTERN (insn)) == COND_EXEC);
+ bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
/* Clear out the collection record. */
- collection_rec->next_def = 0;
- collection_rec->next_use = 0;
- collection_rec->next_eq_use = 0;
- collection_rec->next_mw = 0;
+ VEC_truncate (df_ref, collection_rec->def_vec, 0);
+ VEC_truncate (df_ref, collection_rec->use_vec, 0);
+ VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
+ VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
/* Record register defs. */
- df_defs_record (collection_rec, PATTERN (insn), bb, insn, 0);
+ df_defs_record (collection_rec, PATTERN (insn_info->insn), bb, insn_info, 0);
- /* Process REG_EQUIV/REG_EQUAL notes */
- for (note = REG_NOTES (insn); note;
+ /* Process REG_EQUIV/REG_EQUAL notes. */
+ for (note = REG_NOTES (insn_info->insn); note;
note = XEXP (note, 1))
{
switch (REG_NOTE_KIND (note))
case REG_EQUAL:
df_uses_record (collection_rec,
&XEXP (note, 0), DF_REF_REG_USE,
- bb, insn, DF_REF_IN_NOTE, -1, -1, 0);
+ bb, insn_info, DF_REF_IN_NOTE);
break;
case REG_NON_LOCAL_GOTO:
/* The frame ptr is used by a non-local goto. */
- df_ref_record (collection_rec,
+ df_ref_record (DF_REF_BASE, collection_rec,
regno_reg_rtx[FRAME_POINTER_REGNUM],
- NULL,
- bb, insn,
- DF_REF_REG_USE, 0, -1, -1, 0);
-#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
- df_ref_record (collection_rec,
+ NULL, bb, insn_info,
+ DF_REF_REG_USE, 0);
+#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
+ df_ref_record (DF_REF_BASE, collection_rec,
regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
- NULL,
- bb, insn,
- DF_REF_REG_USE, 0, -1, -1, 0);
+ NULL, bb, insn_info,
+ DF_REF_REG_USE, 0);
#endif
break;
default:
}
}
- if (CALL_P (insn))
- df_get_call_refs (collection_rec, bb, insn,
+ if (CALL_P (insn_info->insn))
+ df_get_call_refs (collection_rec, bb, insn_info,
(is_cond_exec) ? DF_REF_CONDITIONAL : 0);
/* Record the register uses. */
df_uses_record (collection_rec,
- &PATTERN (insn), DF_REF_REG_USE, bb, insn, 0,
- -1, -1, 0);
+ &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0);
/* DF_REF_CONDITIONAL needs corresponding USES. */
if (is_cond_exec)
/* Scan the block an insn at a time from beginning to end. */
FOR_BB_INSNS (bb, insn)
{
- struct df_insn_info *insn_info = DF_INSN_GET (insn);
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
/* Inserting labels does not always trigger the incremental
rescanning. */
if (!insn_info)
{
gcc_assert (!INSN_P (insn));
- df_insn_create_insn_record (insn);
+ insn_info = df_insn_create_insn_record (insn);
}
- DF_INSN_LUID (insn) = luid;
+ DF_INSN_INFO_LUID (insn_info) = luid;
if (INSN_P (insn))
luid++;
}
}
-/* Returns true if the function entry needs to
- define the static chain register. */
-
-static bool
-df_need_static_chain_reg (struct function *fun)
-{
- tree fun_context = decl_function_context (fun->decl);
- return fun_context
- && DECL_NO_STATIC_CHAIN (fun_context) == false;
-}
-
-
/* Collect all artificial refs at the block level for BB and add them
to COLLECTION_REC. */
static void
df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
{
- collection_rec->next_def = 0;
- collection_rec->next_use = 0;
- collection_rec->next_eq_use = 0;
- collection_rec->next_mw = 0;
+ VEC_truncate (df_ref, collection_rec->def_vec, 0);
+ VEC_truncate (df_ref, collection_rec->use_vec, 0);
+ VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
+ VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
if (bb->index == ENTRY_BLOCK)
{
unsigned regno = EH_RETURN_DATA_REGNO (i);
if (regno == INVALID_REGNUM)
break;
- df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
- bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1, 0);
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
+ bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
}
}
#endif
-
-#ifdef EH_USES
- if (bb_has_eh_pred (bb))
- {
- unsigned int i;
- /* This code is putting in an artificial ref for the use at the
- TOP of the block that receives the exception. It is too
- cumbersome to actually put the ref on the edge. We could
- either model this at the top of the receiver block or the
- bottom of the sender block.
-
- The bottom of the sender block is problematic because not all
- out-edges of the a block are eh-edges. However, it is true
- that all edges into a block are either eh-edges or none of
- them are eh-edges. Thus, we can model this at the top of the
- eh-receiver for all of the edges at once. */
- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (EH_USES (i))
- df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
- bb, NULL, DF_REF_REG_USE, DF_REF_AT_TOP, -1, -1, 0);
- }
-#endif
-
/* Add the hard_frame_pointer if this block is the target of a
non-local goto. */
if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
- df_ref_record (collection_rec, hard_frame_pointer_rtx, NULL,
- bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1, 0);
-
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
+ bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
+
/* Add the artificial uses. */
if (bb->index >= NUM_FIXED_BLOCKS)
{
bitmap_iterator bi;
unsigned int regno;
- bitmap au = bb_has_eh_pred (bb)
- ? df->eh_block_artificial_uses
- : df->regular_block_artificial_uses;
+ bitmap au = bb_has_eh_pred (bb)
+ ? &df->eh_block_artificial_uses
+ : &df->regular_block_artificial_uses;
EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
{
- df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
- bb, NULL, DF_REF_REG_USE, 0, -1, -1, 0);
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
+ bb, NULL, DF_REF_REG_USE, 0);
}
}
basic_block bb = BASIC_BLOCK (bb_index);
rtx insn;
int luid = 0;
- struct df_scan_bb_info *bb_info;
struct df_collection_rec collection_rec;
- collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
if (!df)
return;
- bb_info = df_scan_get_bb_info (bb_index);
-
- /* Need to make sure that there is a record in the basic block info. */
- if (!bb_info)
- {
- bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
- df_scan_set_bb_info (bb_index, bb_info);
- bb_info->artificial_defs = NULL;
- bb_info->artificial_uses = NULL;
- }
+ df_grow_bb_info (df_scan);
+ collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
+ collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
+ collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
+ collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
if (scan_insns)
/* Scan the block an insn at a time from beginning to end. */
FOR_BB_INSNS (bb, insn)
{
- struct df_insn_info *insn_info = DF_INSN_GET (insn);
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
gcc_assert (!insn_info);
- df_insn_create_insn_record (insn);
+ insn_info = df_insn_create_insn_record (insn);
if (INSN_P (insn))
{
/* Record refs within INSN. */
- DF_INSN_LUID (insn) = luid++;
- df_insn_refs_collect (&collection_rec, bb, insn);
+ DF_INSN_INFO_LUID (insn_info) = luid++;
+ df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
df_refs_add_to_chains (&collection_rec, bb, insn);
}
- DF_INSN_LUID (insn) = luid;
+ DF_INSN_INFO_LUID (insn_info) = luid;
}
/* Other block level artificial refs */
df_bb_refs_collect (&collection_rec, bb);
df_refs_add_to_chains (&collection_rec, bb, NULL);
+ VEC_free (df_ref, stack, collection_rec.def_vec);
+ VEC_free (df_ref, stack, collection_rec.use_vec);
+ VEC_free (df_ref, stack, collection_rec.eq_use_vec);
+ VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
+
/* Now that the block has been processed, set the block as dirty so
LR and LIVE will get it processed. */
df_set_bb_dirty (bb);
static void
df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
{
+#ifdef EH_USES
+ unsigned int i;
+#endif
+
bitmap_clear (regular_block_artificial_uses);
if (reload_completed)
/* Any reference to any pseudo before reload is a potential
reference of the frame pointer. */
bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
-
-#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+
+#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
#endif
if (fixed_regs[ARG_POINTER_REGNUM])
bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
#endif
-
+
/* Any constant, or pseudo with constant equivalences, may
require reloading from memory using the pic register. */
if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
}
/* The all-important stack pointer must always be live. */
bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
+
+#ifdef EH_USES
+ /* EH_USES registers are used:
+ 1) at all insns that might throw (calls or with -fnon-call-exceptions
+ trapping insns)
+ 2) in all EH edges
+ 3) to support backtraces and/or debugging, anywhere between their
+ initialization and where they the saved registers are restored
+ from them, including the cases where we don't reach the epilogue
+ (noreturn call or infinite loop). */
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ if (EH_USES (i))
+ bitmap_set_bit (regular_block_artificial_uses, i);
+#endif
}
if (frame_pointer_needed)
{
bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
-#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
#endif
}
gcc_assert (GET_MODE (reg) != BLKmode);
- bitmap_set_bit (set, regno);
if (regno < FIRST_PSEUDO_REGISTER)
{
int n = hard_regno_nregs[regno][GET_MODE (reg)];
- while (--n > 0)
- bitmap_set_bit (set, regno + n);
+ bitmap_set_range (set, regno, n);
}
+ else
+ bitmap_set_bit (set, regno);
}
bitmap_set_bit (entry_block_defs, i);
#endif
}
-
+
+ /* The always important stack pointer. */
+ bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
+
/* Once the prologue has been generated, all of these registers
should just show up in the first regular block. */
if (HAVE_prologue && epilogue_completed)
if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
bitmap_set_bit (entry_block_defs, i);
}
- else
- {
- /* The always important stack pointer. */
- bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
-
- /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
- only STATIC_CHAIN_REGNUM is defined. If they are different,
- we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
-#ifdef STATIC_CHAIN_INCOMING_REGNUM
- bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
-#else
-#ifdef STATIC_CHAIN_REGNUM
- bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
-#endif
-#endif
- }
r = targetm.calls.struct_value_rtx (current_function_decl, true);
if (r && REG_P (r))
bitmap_set_bit (entry_block_defs, REGNO (r));
+ /* If the function has an incoming STATIC_CHAIN, it has to show up
+ in the entry def set. */
+ r = targetm.calls.static_chain (current_function_decl, true);
+ if (r && REG_P (r))
+ bitmap_set_bit (entry_block_defs, REGNO (r));
+
if ((!reload_completed) || frame_pointer_needed)
{
/* Any reference to any pseudo before reload is a potential
reference of the frame pointer. */
bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
-#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
/* If they are different, also mark the hard frame pointer as live. */
if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
/* These registers are live everywhere. */
if (!reload_completed)
{
-#ifdef EH_USES
- /* The ia-64, the only machine that uses this, does not define these
- until after reload. */
- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (EH_USES (i))
- {
- bitmap_set_bit (entry_block_defs, i);
- }
-#endif
-
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
/* Pseudos with argument area equivalences may require
reloading via the argument pointer. */
if (fixed_regs[ARG_POINTER_REGNUM])
bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
#endif
-
+
#ifdef PIC_OFFSET_TABLE_REGNUM
/* Any constant, or pseudo with constant equivalences, may
require reloading from memory using the pic register. */
if (REG_P (INCOMING_RETURN_ADDR_RTX))
bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
#endif
-
- targetm.live_on_entry (entry_block_defs);
-
- /* If the function has an incoming STATIC_CHAIN,
- it has to show up in the entry def set. */
- if (df_need_static_chain_reg (cfun))
- {
-#ifdef STATIC_CHAIN_INCOMING_REGNUM
- bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
-#else
-#ifdef STATIC_CHAIN_REGNUM
- bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
-#endif
-#endif
- }
+
+ targetm.extra_live_on_entry (entry_block_defs);
}
/* Return the (conservative) set of hard registers that are defined on
- entry to the function.
- It uses df->entry_block_defs to determine which register
+ entry to the function.
+ It uses df->entry_block_defs to determine which register
reference to include. */
static void
-df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
+df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
bitmap entry_block_defs)
{
- unsigned int i;
+ unsigned int i;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
{
- df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
- ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0, -1, -1, 0);
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
+ ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0);
}
df_canonize_collection_rec (collection_rec);
{
struct df_collection_rec collection_rec;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.def_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
-
+ collection_rec.def_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
df_entry_block_defs_collect (&collection_rec, entry_block_defs);
/* Process bb_refs chain */
df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
+ VEC_free (df_ref, stack, collection_rec.def_vec);
}
void
df_update_entry_block_defs (void)
{
- bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
+ bitmap_head refs;
bool changed = false;
- df_get_entry_block_def_set (refs);
+ bitmap_initialize (&refs, &df_bitmap_obstack);
+ df_get_entry_block_def_set (&refs);
if (df->entry_block_defs)
{
- if (!bitmap_equal_p (df->entry_block_defs, refs))
+ if (!bitmap_equal_p (df->entry_block_defs, &refs))
{
struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
df_ref_chain_delete_du_chain (bb_info->artificial_defs);
{
struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
+ gcc_unreachable ();
df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
changed = true;
}
if (changed)
{
- df_record_entry_block_defs (refs);
- bitmap_copy (df->entry_block_defs, refs);
+ df_record_entry_block_defs (&refs);
+ bitmap_copy (df->entry_block_defs, &refs);
df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
}
- BITMAP_FREE (refs);
+ bitmap_clear (&refs);
}
static void
df_get_exit_block_use_set (bitmap exit_block_uses)
{
- unsigned int i;
+ unsigned int i;
bitmap_clear (exit_block_uses);
/* Stack pointer is always live at the exit. */
bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
-
+
/* Mark the frame pointer if needed at the end of the function.
If we end up eliminating it, it will be removed from the live
list of each basic block by reload. */
-
+
if ((!reload_completed) || frame_pointer_needed)
{
bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
-#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
/* If they are different, also mark the hard frame pointer as live. */
if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
#endif
}
-#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
/* Many architectures have a GP register even without flag_pic.
Assume the pic register is not in use, or will be handled by
other means, if it is not fixed. */
- if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
+ if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
+ && (unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
&& fixed_regs[PIC_OFFSET_TABLE_REGNUM])
bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
-#endif
-
+
/* Mark all global registers, and all registers used by the
epilogue as being live at the end of the function since they
may be referenced by our caller. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (global_regs[i] || EPILOGUE_USES (i))
bitmap_set_bit (exit_block_uses, i);
-
+
if (HAVE_epilogue && epilogue_completed)
{
/* Mark all call-saved registers that we actually used. */
&& !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
bitmap_set_bit (exit_block_uses, i);
}
-
+
#ifdef EH_RETURN_DATA_REGNO
/* Mark the registers that will contain data for the handler. */
if (reload_completed && crtl->calls_eh_return)
if (tmp && REG_P (tmp))
df_mark_reg (tmp, exit_block_uses);
}
-#endif
+#endif
/* Mark function return value. */
diddle_return_value (df_mark_reg, (void*) exit_block_uses);
}
-/* Return the refs of hard registers that are used in the exit block.
+/* Return the refs of hard registers that are used in the exit block.
It uses df->exit_block_uses to determine register to include. */
static void
df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
{
- unsigned int i;
+ unsigned int i;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
- df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
- EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, 0);
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
+ EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
/* It is deliberate that this is not put in the exit block uses but
I do not know why. */
- if (reload_completed
+ if (reload_completed
&& !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
&& bb_has_eh_pred (EXIT_BLOCK_PTR)
&& fixed_regs[ARG_POINTER_REGNUM])
- df_ref_record (collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
- EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, 0);
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
+ EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
#endif
df_canonize_collection_rec (collection_rec);
}
-/* Record the set of hard registers that are used in the exit block.
+/* Record the set of hard registers that are used in the exit block.
It uses df->exit_block_uses to determine which bit to include. */
static void
{
struct df_collection_rec collection_rec;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.use_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
+ collection_rec.use_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
df_exit_block_uses_collect (&collection_rec, exit_block_uses);
/* Process bb_refs chain */
df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
+ VEC_free (df_ref, stack, collection_rec.use_vec);
}
void
df_update_exit_block_uses (void)
{
- bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
+ bitmap_head refs;
bool changed = false;
- df_get_exit_block_use_set (refs);
+ bitmap_initialize (&refs, &df_bitmap_obstack);
+ df_get_exit_block_use_set (&refs);
if (df->exit_block_uses)
{
- if (!bitmap_equal_p (df->exit_block_uses, refs))
+ if (!bitmap_equal_p (df->exit_block_uses, &refs))
{
struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
df_ref_chain_delete_du_chain (bb_info->artificial_uses);
{
struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
+ gcc_unreachable ();
df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
changed = true;
}
if (changed)
{
- df_record_exit_block_uses (refs);
- bitmap_copy (df->exit_block_uses, refs);
+ df_record_exit_block_uses (&refs);
+ bitmap_copy (df->exit_block_uses,& refs);
df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
}
- BITMAP_FREE (refs);
+ bitmap_clear (&refs);
}
static bool initialized = false;
/* Initialize some platform specific structures. */
-void
+void
df_hard_reg_init (void)
{
- int i;
#ifdef ELIMINABLE_REGS
+ int i;
static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
#endif
if (initialized)
return;
- bitmap_obstack_initialize (&persistent_obstack);
-
/* Record which registers will be eliminated. We use this in
mark_used_regs. */
CLEAR_HARD_REG_SET (elim_reg_set);
-
+
#ifdef ELIMINABLE_REGS
for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
#else
SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
#endif
-
- df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
-
- /* Inconveniently, this is only readily available in hard reg set
- form. */
- for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
- if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
- bitmap_set_bit (df_invalidated_by_call, i);
-
+
initialized = true;
}
/* Recompute the parts of scanning that are based on regs_ever_live
- because something changed in that array. */
+ because something changed in that array. */
-void
+void
df_update_entry_exit_and_calls (void)
{
basic_block bb;
/* The call insns need to be rescanned because there may be changes
in the set of registers clobbered across the call. */
- FOR_EACH_BB (bb)
+ FOR_EACH_BB (bb)
{
rtx insn;
FOR_BB_INSNS (bb, insn)
this array. See the comment in df.h for df->hard_regs_live_count
for the conditions that this array is set. */
-bool
+bool
df_hard_reg_used_p (unsigned int reg)
{
- gcc_assert (df);
return df->hard_regs_live_count[reg] != 0;
}
unsigned int
df_hard_reg_used_count (unsigned int reg)
{
- gcc_assert (df);
return df->hard_regs_live_count[reg];
}
/* Get the value of regs_ever_live[REGNO]. */
-bool
+bool
df_regs_ever_live_p (unsigned int regno)
{
return regs_ever_live[regno];
/* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
to change, schedule that change for the next update. */
-void
+void
df_set_regs_ever_live (unsigned int regno, bool value)
{
if (regs_ever_live[regno] == value)
{
unsigned int i;
bool changed = df->redo_entry_and_exit;
-
+
if (reset)
memset (regs_ever_live, 0, sizeof (regs_ever_live));
df_reg_chain_mark (refs, regno, is_def, is_eq_use)
df_reg_chain_verify_unmarked (refs)
- df_refs_verify (ref*, ref*, bool)
+ df_refs_verify (VEC(stack,df_ref)*, ref*, bool)
df_mws_verify (mw*, mw*, bool)
df_insn_refs_verify (collection_rec, bb, insn, bool)
df_bb_refs_verify (bb, refs, bool)
/* Mark all refs in the reg chain. Verify that all of the registers
-are in the correct chain. */
+are in the correct chain. */
static unsigned int
-df_reg_chain_mark (struct df_ref *refs, unsigned int regno,
+df_reg_chain_mark (df_ref refs, unsigned int regno,
bool is_def, bool is_eq_use)
{
unsigned int count = 0;
- struct df_ref *ref;
+ df_ref ref;
for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
{
gcc_assert (!DF_REF_IS_REG_MARKED (ref));
/* Check to make sure the ref is in the correct chain. */
gcc_assert (DF_REF_REGNO (ref) == regno);
if (is_def)
- gcc_assert (DF_REF_TYPE(ref) == DF_REF_REG_DEF);
+ gcc_assert (DF_REF_REG_DEF_P (ref));
else
- gcc_assert (DF_REF_TYPE(ref) != DF_REF_REG_DEF);
+ gcc_assert (!DF_REF_REG_DEF_P (ref));
if (is_eq_use)
gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
else
gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
- if (ref->next_reg)
- gcc_assert (ref->next_reg->prev_reg == ref);
+ if (DF_REF_NEXT_REG (ref))
+ gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
count++;
DF_REF_REG_MARK (ref);
}
}
-/* Verify that all of the registers in the chain are unmarked. */
+/* Verify that all of the registers in the chain are unmarked. */
static void
-df_reg_chain_verify_unmarked (struct df_ref *refs)
+df_reg_chain_verify_unmarked (df_ref refs)
{
- struct df_ref *ref;
+ df_ref ref;
for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
gcc_assert (!DF_REF_IS_REG_MARKED (ref));
}
/* Verify that NEW_REC and OLD_REC have exactly the same members. */
static bool
-df_refs_verify (struct df_ref **new_rec, struct df_ref **old_rec,
+df_refs_verify (VEC(df_ref,stack) *new_rec, df_ref *old_rec,
bool abort_if_fail)
{
- while ((*new_rec) && (*old_rec))
+ unsigned int ix;
+ df_ref new_ref;
+
+ FOR_EACH_VEC_ELT (df_ref, new_rec, ix, new_ref)
{
- if (!df_ref_equal_p (*new_rec, *old_rec))
+ if (*old_rec == NULL || !df_ref_equal_p (new_ref, *old_rec))
{
if (abort_if_fail)
gcc_assert (0);
DF_REF_REG_UNMARK (*old_rec);
}
- new_rec++;
old_rec++;
}
if (abort_if_fail)
- gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
+ gcc_assert (*old_rec == NULL);
else
- return ((*new_rec == NULL) && (*old_rec == NULL));
+ return *old_rec == NULL;
return false;
}
/* Verify that NEW_REC and OLD_REC have exactly the same members. */
static bool
-df_mws_verify (struct df_mw_hardreg **new_rec, struct df_mw_hardreg **old_rec,
+df_mws_verify (VEC(df_mw_hardreg_ptr,stack) *new_rec,
+ struct df_mw_hardreg **old_rec,
bool abort_if_fail)
{
- while ((*new_rec) && (*old_rec))
+ unsigned int ix;
+ struct df_mw_hardreg *new_reg;
+
+ FOR_EACH_VEC_ELT (df_mw_hardreg_ptr, new_rec, ix, new_reg)
{
- if (!df_mw_equal_p (*new_rec, *old_rec))
+ if (*old_rec == NULL || !df_mw_equal_p (new_reg, *old_rec))
{
if (abort_if_fail)
gcc_assert (0);
else
return false;
}
- new_rec++;
old_rec++;
}
if (abort_if_fail)
- gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
+ gcc_assert (*old_rec == NULL);
else
- return ((*new_rec == NULL) && (*old_rec == NULL));
+ return *old_rec == NULL;
return false;
}
/* Return true if the existing insn refs information is complete and
correct. Otherwise (i.e. if there's any missing or extra refs),
- return the correct df_ref chain in REFS_RETURN.
+ return the correct df_ref chain in REFS_RETURN.
If ABORT_IF_FAIL, leave the refs that are verified (already in the
ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
static bool
df_insn_refs_verify (struct df_collection_rec *collection_rec,
- basic_block bb,
+ basic_block bb,
rtx insn,
bool abort_if_fail)
{
bool ret1, ret2, ret3, ret4;
unsigned int uid = INSN_UID (insn);
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
- df_insn_refs_collect (collection_rec, bb, insn);
+ df_insn_refs_collect (collection_rec, bb, insn_info);
if (!DF_INSN_UID_DEFS (uid))
{
/* The insn_rec was created but it was never filled out. */
if (abort_if_fail)
gcc_assert (0);
- else
+ else
return false;
}
/* Unfortunately we cannot opt out early if one of these is not
right because the marks will not get cleared. */
- ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
+ ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
abort_if_fail);
- ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
+ ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
abort_if_fail);
- ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
+ ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
abort_if_fail);
- ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
+ ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
abort_if_fail);
return (ret1 && ret2 && ret3 && ret4);
}
rtx insn;
struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
struct df_collection_rec collection_rec;
-
+
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
+ collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
+ collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
+ collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
+ collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
gcc_assert (bb_info);
- /* Scan the block an insn at a time from beginning to end. */
+ /* Scan the block, one insn at a time, from beginning to end. */
FOR_BB_INSNS_REVERSE (bb, insn)
{
if (!INSN_P (insn))
df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
df_free_collection_rec (&collection_rec);
-
+
return true;
}
-/* Returns true if the entry block has correct and complete df_ref set.
+/* Returns true if the entry block has correct and complete df_ref set.
If not it either aborts if ABORT_IF_FAIL is true or returns false. */
static bool
df_entry_block_bitmap_verify (bool abort_if_fail)
{
- bitmap entry_block_defs = BITMAP_ALLOC (&df_bitmap_obstack);
+ bitmap_head entry_block_defs;
bool is_eq;
- df_get_entry_block_def_set (entry_block_defs);
+ bitmap_initialize (&entry_block_defs, &df_bitmap_obstack);
+ df_get_entry_block_def_set (&entry_block_defs);
- is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
+ is_eq = bitmap_equal_p (&entry_block_defs, df->entry_block_defs);
if (!is_eq && abort_if_fail)
{
print_current_pass (stderr);
fprintf (stderr, "entry_block_defs = ");
- df_print_regset (stderr, entry_block_defs);
+ df_print_regset (stderr, &entry_block_defs);
fprintf (stderr, "df->entry_block_defs = ");
df_print_regset (stderr, df->entry_block_defs);
gcc_assert (0);
}
- BITMAP_FREE (entry_block_defs);
+ bitmap_clear (&entry_block_defs);
return is_eq;
}
-/* Returns true if the exit block has correct and complete df_ref set.
+/* Returns true if the exit block has correct and complete df_ref set.
If not it either aborts if ABORT_IF_FAIL is true or returns false. */
static bool
df_exit_block_bitmap_verify (bool abort_if_fail)
{
- bitmap exit_block_uses = BITMAP_ALLOC (&df_bitmap_obstack);
+ bitmap_head exit_block_uses;
bool is_eq;
- df_get_exit_block_use_set (exit_block_uses);
+ bitmap_initialize (&exit_block_uses, &df_bitmap_obstack);
+ df_get_exit_block_use_set (&exit_block_uses);
- is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
+ is_eq = bitmap_equal_p (&exit_block_uses, df->exit_block_uses);
if (!is_eq && abort_if_fail)
{
print_current_pass (stderr);
fprintf (stderr, "exit_block_uses = ");
- df_print_regset (stderr, exit_block_uses);
+ df_print_regset (stderr, &exit_block_uses);
fprintf (stderr, "df->exit_block_uses = ");
df_print_regset (stderr, df->exit_block_uses);
gcc_assert (0);
}
- BITMAP_FREE (exit_block_uses);
+ bitmap_clear (&exit_block_uses);
return is_eq;
}
{
unsigned int i;
basic_block bb;
- bitmap regular_block_artificial_uses;
- bitmap eh_block_artificial_uses;
+ bitmap_head regular_block_artificial_uses;
+ bitmap_head eh_block_artificial_uses;
if (!df)
return;
/* (1) All of the refs are marked by going thru the reg chains. */
for (i = 0; i < DF_REG_SIZE (df); i++)
{
- gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
+ gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
== DF_REG_DEF_COUNT(i));
- gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
+ gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
== DF_REG_USE_COUNT(i));
- gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
+ gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
== DF_REG_EQ_USE_COUNT(i));
}
/* (2) There are various bitmaps whose value may change over the
course of the compilation. This step recomputes them to make
sure that they have not slipped out of date. */
- regular_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
- eh_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
+ bitmap_initialize (®ular_block_artificial_uses, &df_bitmap_obstack);
+ bitmap_initialize (&eh_block_artificial_uses, &df_bitmap_obstack);
- df_get_regular_block_artificial_uses (regular_block_artificial_uses);
- df_get_eh_block_artificial_uses (eh_block_artificial_uses);
+ df_get_regular_block_artificial_uses (®ular_block_artificial_uses);
+ df_get_eh_block_artificial_uses (&eh_block_artificial_uses);
- bitmap_ior_into (eh_block_artificial_uses,
- regular_block_artificial_uses);
+ bitmap_ior_into (&eh_block_artificial_uses,
+ ®ular_block_artificial_uses);
/* Check artificial_uses bitmaps didn't change. */
- gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
- df->regular_block_artificial_uses));
- gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
- df->eh_block_artificial_uses));
+ gcc_assert (bitmap_equal_p (®ular_block_artificial_uses,
+ &df->regular_block_artificial_uses));
+ gcc_assert (bitmap_equal_p (&eh_block_artificial_uses,
+ &df->eh_block_artificial_uses));
- BITMAP_FREE (regular_block_artificial_uses);
- BITMAP_FREE (eh_block_artificial_uses);
+ bitmap_clear (®ular_block_artificial_uses);
+ bitmap_clear (&eh_block_artificial_uses);
/* Verify entry block and exit block. These only verify the bitmaps,
the refs are verified in df_bb_verify. */
df_entry_block_bitmap_verify (true);
df_exit_block_bitmap_verify (true);
-
+
/* (3) All of the insns in all of the blocks are traversed and the
marks are cleared both in the artificial refs attached to the
blocks and the real refs inside the insns. It is a failure to