/* Standard problems for dataflow support routines.
- Copyright (C) 1999-2014 Free Software Foundation, Inc.
+ Copyright (C) 1999-2015 Free Software Foundation, Inc.
Originally contributed by Michael P. Hayes
(m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
#include "tm_p.h"
#include "insn-config.h"
#include "recog.h"
+#include "hashtab.h"
+#include "hash-set.h"
+#include "vec.h"
+#include "machmode.h"
+#include "hard-reg-set.h"
+#include "input.h"
#include "function.h"
#include "regs.h"
#include "alloc-pool.h"
#include "flags.h"
-#include "hard-reg-set.h"
+#include "predict.h"
+#include "dominance.h"
+#include "cfg.h"
+#include "cfganal.h"
#include "basic-block.h"
#include "sbitmap.h"
#include "bitmap.h"
#include "dce.h"
#include "valtrack.h"
#include "dumpfile.h"
+#include "rtl-iter.h"
/* Note that turning REG_DEAD_DEBUGGING on will cause
gcc.c-torture/unsorted/dump-noaddr.c to fail because it prints
df_rd_simulate_artificial_defs_at_top (basic_block bb, bitmap local_rd)
{
int bb_index = bb->index;
- df_ref *def_rec;
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- {
- unsigned int dregno = DF_REF_REGNO (def);
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_clear_range (local_rd,
- DF_DEFS_BEGIN (dregno),
- DF_DEFS_COUNT (dregno));
- bitmap_set_bit (local_rd, DF_REF_ID (def));
- }
- }
+ df_ref def;
+ FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
+ if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
+ {
+ unsigned int dregno = DF_REF_REGNO (def);
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ bitmap_clear_range (local_rd,
+ DF_DEFS_BEGIN (dregno),
+ DF_DEFS_COUNT (dregno));
+ bitmap_set_bit (local_rd, DF_REF_ID (def));
+ }
}
/* Add the effect of the defs of INSN to the reaching definitions bitmap
LOCAL_RD. */
void
-df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
+df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx_insn *insn,
bitmap local_rd)
{
- unsigned uid = INSN_UID (insn);
- df_ref *def_rec;
+ df_ref def;
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ FOR_EACH_INSN_DEF (def, insn)
{
- df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|| (dregno >= FIRST_PSEUDO_REGISTER))
static void
df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
- df_ref *def_rec,
+ df_ref def,
int top_flag)
{
- while (*def_rec)
+ for (; def; def = DF_REF_NEXT_LOC (def))
{
- df_ref def = *def_rec;
if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
{
unsigned int regno = DF_REF_REGNO (def);
}
}
}
- def_rec++;
}
}
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
- rtx insn;
+ rtx_insn *insn;
bitmap_clear (&seen_in_block);
bitmap_clear (&seen_in_insn);
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
- rtx insn;
- df_ref *def_rec;
- df_ref *use_rec;
+ rtx_insn *insn;
+ df_ref def, use;
/* Process the registers set in an exception handler. */
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
- {
- unsigned int dregno = DF_REF_REGNO (def);
- bitmap_set_bit (&bb_info->def, dregno);
- bitmap_clear_bit (&bb_info->use, dregno);
- }
- }
+ FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
+ if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
+ {
+ unsigned int dregno = DF_REF_REGNO (def);
+ bitmap_set_bit (&bb_info->def, dregno);
+ bitmap_clear_bit (&bb_info->use, dregno);
+ }
/* Process the hardware registers that are always live. */
- for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- /* Add use to set of uses in this BB. */
- if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
- bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
- }
+ FOR_EACH_ARTIFICIAL_USE (use, bb_index)
+ /* Add use to set of uses in this BB. */
+ if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
+ bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
FOR_BB_INSNS_REVERSE (bb, insn)
{
- unsigned int uid = INSN_UID (insn);
-
if (!NONDEBUG_INSN_P (insn))
continue;
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- {
- unsigned int dregno = DF_REF_REGNO (def);
- bitmap_set_bit (&bb_info->def, dregno);
- bitmap_clear_bit (&bb_info->use, dregno);
- }
- }
+ df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ {
+ unsigned int dregno = DF_REF_REGNO (def);
+ bitmap_set_bit (&bb_info->def, dregno);
+ bitmap_clear_bit (&bb_info->use, dregno);
+ }
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- /* Add use to set of uses in this BB. */
- bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
- }
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
+ /* Add use to set of uses in this BB. */
+ bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
}
/* Process the registers set in an exception handler or the hard
frame pointer if this block is the target of a non local
goto. */
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- {
- unsigned int dregno = DF_REF_REGNO (def);
- bitmap_set_bit (&bb_info->def, dregno);
- bitmap_clear_bit (&bb_info->use, dregno);
- }
- }
+ FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
+ if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
+ {
+ unsigned int dregno = DF_REF_REGNO (def);
+ bitmap_set_bit (&bb_info->def, dregno);
+ bitmap_clear_bit (&bb_info->use, dregno);
+ }
#ifdef EH_USES
/* Process the uses that are live into an exception handler. */
- for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- /* Add use to set of uses in this BB. */
- if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
- bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
- }
+ FOR_EACH_ARTIFICIAL_USE (use, bb_index)
+ /* Add use to set of uses in this BB. */
+ if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
+ bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
#endif
/* If the df_live problem is not defined, such as at -O0 and -O1, we
reference of the frame pointer. */
bitmap_set_bit (&df->hardware_regs_used, FRAME_POINTER_REGNUM);
-#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
/* Pseudos with argument area equivalences may require
reloading via the argument pointer. */
- if (fixed_regs[ARG_POINTER_REGNUM])
+ if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
+ && fixed_regs[ARG_POINTER_REGNUM])
bitmap_set_bit (&df->hardware_regs_used, ARG_POINTER_REGNUM);
-#endif
/* Any constant, or pseudo with constant equivalences, may
require reloading from memory using the pic register. */
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
- rtx insn;
- df_ref *def_rec;
+ rtx_insn *insn;
+ df_ref def;
int luid = 0;
FOR_BB_INSNS (bb, insn)
continue;
luid++;
- for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
- df_ref def = *def_rec;
unsigned int regno = DF_REF_REGNO (def);
if (DF_REF_FLAGS_IS_SET (def,
}
}
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- bitmap_set_bit (&bb_info->gen, DF_REF_REGNO (def));
- }
+ FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
+ bitmap_set_bit (&bb_info->gen, DF_REF_REGNO (def));
}
EXECUTE_IF_SET_IN_BITMAP (df_chain->out_of_date_transfer_functions, 0, bb_index, bi)
{
- rtx insn;
- df_ref *def_rec;
- df_ref *use_rec;
+ rtx_insn *insn;
+ df_ref def, use;
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
if (df_chain_problem_p (DF_DU_CHAIN))
- for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
- DF_REF_CHAIN (*def_rec) = NULL;
+ FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
+ DF_REF_CHAIN (def) = NULL;
if (df_chain_problem_p (DF_UD_CHAIN))
- for (use_rec = df_get_artificial_uses (bb->index); *use_rec; use_rec++)
- DF_REF_CHAIN (*use_rec) = NULL;
+ FOR_EACH_ARTIFICIAL_USE (use, bb_index)
+ DF_REF_CHAIN (use) = NULL;
FOR_BB_INSNS (bb, insn)
- {
- unsigned int uid = INSN_UID (insn);
-
- if (INSN_P (insn))
- {
- if (df_chain_problem_p (DF_DU_CHAIN))
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- DF_REF_CHAIN (*def_rec) = NULL;
- if (df_chain_problem_p (DF_UD_CHAIN))
- {
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
- DF_REF_CHAIN (*use_rec) = NULL;
- for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
- DF_REF_CHAIN (*use_rec) = NULL;
- }
- }
- }
+ if (INSN_P (insn))
+ {
+ df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ if (df_chain_problem_p (DF_DU_CHAIN))
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
+ DF_REF_CHAIN (def) = NULL;
+ if (df_chain_problem_p (DF_UD_CHAIN))
+ {
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
+ DF_REF_CHAIN (use) = NULL;
+ FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
+ DF_REF_CHAIN (use) = NULL;
+ }
+ }
}
bitmap_clear (df_chain->out_of_date_transfer_functions);
static void
df_chain_create_bb_process_use (bitmap local_rd,
- df_ref *use_rec,
+ df_ref use,
int top_flag)
{
bitmap_iterator bi;
unsigned int def_index;
- while (*use_rec)
+ for (; use; use = DF_REF_NEXT_LOC (use))
{
- df_ref use = *use_rec;
unsigned int uregno = DF_REF_REGNO (use);
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|| (uregno >= FIRST_PSEUDO_REGISTER))
}
}
}
-
- use_rec++;
}
}
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
- rtx insn;
+ rtx_insn *insn;
bitmap_head cpy;
bitmap_initialize (&cpy, &bitmap_default_obstack);
return;
if (df_chain_problem_p (DF_UD_CHAIN))
{
+ df_ref use;
+
fprintf (file,
";; UD chains for artificial uses at %s\n",
top ? "top" : "bottom");
- df_ref *use_rec = df_get_artificial_uses (bb->index);
- if (*use_rec)
- {
- while (*use_rec)
- {
- df_ref use = *use_rec;
- if ((top && (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
- || (!top && !(DF_REF_FLAGS (use) & DF_REF_AT_TOP)))
- {
- fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
- df_chain_dump (DF_REF_CHAIN (use), file);
- fprintf (file, "\n");
- }
- use_rec++;
- }
- }
+ FOR_EACH_ARTIFICIAL_USE (use, bb->index)
+ if ((top && (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
+ || (!top && !(DF_REF_FLAGS (use) & DF_REF_AT_TOP)))
+ {
+ fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
+ df_chain_dump (DF_REF_CHAIN (use), file);
+ fprintf (file, "\n");
+ }
}
if (df_chain_problem_p (DF_DU_CHAIN))
{
+ df_ref def;
+
fprintf (file,
";; DU chains for artificial defs at %s\n",
top ? "top" : "bottom");
- df_ref *def_rec = df_get_artificial_defs (bb->index);
- if (*def_rec)
- {
- while (*def_rec)
- {
- df_ref def = *def_rec;
-
- if ((top && (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
- || (!top && !(DF_REF_FLAGS (def) & DF_REF_AT_TOP)))
- {
- fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
- df_chain_dump (DF_REF_CHAIN (def), file);
- fprintf (file, "\n");
- }
- def_rec++;
- }
- }
+ FOR_EACH_ARTIFICIAL_DEF (def, bb->index)
+ if ((top && (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
+ || (!top && !(DF_REF_FLAGS (def) & DF_REF_AT_TOP)))
+ {
+ fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
+ df_chain_dump (DF_REF_CHAIN (def), file);
+ fprintf (file, "\n");
+ }
}
}
}
static void
-df_chain_insn_top_dump (const_rtx insn, FILE *file)
+df_chain_insn_top_dump (const rtx_insn *insn, FILE *file)
{
if (df_chain_problem_p (DF_UD_CHAIN) && INSN_P (insn))
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
- df_ref *use_rec = DF_INSN_INFO_USES (insn_info);
- df_ref *eq_use_rec = DF_INSN_INFO_EQ_USES (insn_info);
+ df_ref use;
+
fprintf (file, ";; UD chains for insn luid %d uid %d\n",
DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
- if (*use_rec || *eq_use_rec)
- {
- while (*use_rec)
- {
- df_ref use = *use_rec;
- if (! HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
- || !(df->changeable_flags & DF_NO_HARD_REGS))
- {
- fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
- if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
- fprintf (file, "read/write ");
- df_chain_dump (DF_REF_CHAIN (use), file);
- fprintf (file, "\n");
- }
- use_rec++;
- }
- while (*eq_use_rec)
- {
- df_ref use = *eq_use_rec;
- if (! HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
- || !(df->changeable_flags & DF_NO_HARD_REGS))
- {
- fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
- df_chain_dump (DF_REF_CHAIN (use), file);
- fprintf (file, "\n");
- }
- eq_use_rec++;
- }
- }
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
+ if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
+ || !(df->changeable_flags & DF_NO_HARD_REGS))
+ {
+ fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
+ if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
+ fprintf (file, "read/write ");
+ df_chain_dump (DF_REF_CHAIN (use), file);
+ fprintf (file, "\n");
+ }
+ FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
+ if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
+ || !(df->changeable_flags & DF_NO_HARD_REGS))
+ {
+ fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
+ df_chain_dump (DF_REF_CHAIN (use), file);
+ fprintf (file, "\n");
+ }
}
}
static void
-df_chain_insn_bottom_dump (const_rtx insn, FILE *file)
+df_chain_insn_bottom_dump (const rtx_insn *insn, FILE *file)
{
if (df_chain_problem_p (DF_DU_CHAIN) && INSN_P (insn))
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
- df_ref *def_rec = DF_INSN_INFO_DEFS (insn_info);
+ df_ref def;
fprintf (file, ";; DU chains for insn luid %d uid %d\n",
DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
- if (*def_rec)
- {
- while (*def_rec)
- {
- df_ref def = *def_rec;
- if (! HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
- || !(df->changeable_flags & DF_NO_HARD_REGS))
- {
- fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
- if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
- fprintf (file, "read/write ");
- df_chain_dump (DF_REF_CHAIN (def), file);
- fprintf (file, "\n");
- }
- def_rec++;
- }
- }
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
+ if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
+ || !(df->changeable_flags & DF_NO_HARD_REGS))
+ {
+ fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
+ if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
+ fprintf (file, "read/write ");
+ df_chain_dump (DF_REF_CHAIN (def), file);
+ fprintf (file, "\n");
+ }
fprintf (file, "\n");
}
}
{
rtx orig_reg = DF_REF_REG (ref);
rtx reg = orig_reg;
- enum machine_mode reg_mode;
+ machine_mode reg_mode;
unsigned regno;
/* Left at -1 for whole accesses. */
int which_subword = -1;
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
- rtx insn;
- df_ref *def_rec;
- df_ref *use_rec;
+ rtx_insn *insn;
+ df_ref def, use;
/* Ensure that artificial refs don't contain references to pseudos. */
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- gcc_assert (DF_REF_REGNO (def) < FIRST_PSEUDO_REGISTER);
- }
+ FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
+ gcc_assert (DF_REF_REGNO (def) < FIRST_PSEUDO_REGISTER);
- for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- gcc_assert (DF_REF_REGNO (use) < FIRST_PSEUDO_REGISTER);
- }
+ FOR_EACH_ARTIFICIAL_USE (use, bb_index)
+ gcc_assert (DF_REF_REGNO (use) < FIRST_PSEUDO_REGISTER);
FOR_BB_INSNS_REVERSE (bb, insn)
{
- unsigned int uid = INSN_UID (insn);
-
if (!NONDEBUG_INSN_P (insn))
continue;
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
- {
- df_word_lr_mark_ref (def, true, &bb_info->def);
- df_word_lr_mark_ref (def, false, &bb_info->use);
- }
- }
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- df_word_lr_mark_ref (use, true, &bb_info->use);
- }
+
+ df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
+ {
+ df_word_lr_mark_ref (def, true, &bb_info->def);
+ df_word_lr_mark_ref (def, false, &bb_info->use);
+ }
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
+ df_word_lr_mark_ref (use, true, &bb_info->use);
}
}
an insn. */
bool
-df_word_lr_simulate_defs (rtx insn, bitmap live)
+df_word_lr_simulate_defs (rtx_insn *insn, bitmap live)
{
bool changed = false;
- df_ref *def_rec;
- unsigned int uid = INSN_UID (insn);
+ df_ref def;
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if (DF_REF_FLAGS (def) & DF_REF_CONDITIONAL)
- changed = true;
- else
- changed |= df_word_lr_mark_ref (*def_rec, false, live);
- }
+ FOR_EACH_INSN_DEF (def, insn)
+ if (DF_REF_FLAGS (def) & DF_REF_CONDITIONAL)
+ changed = true;
+ else
+ changed |= df_word_lr_mark_ref (def, false, live);
return changed;
}
/* Simulate the effects of the uses of INSN on LIVE. */
void
-df_word_lr_simulate_uses (rtx insn, bitmap live)
+df_word_lr_simulate_uses (rtx_insn *insn, bitmap live)
{
- df_ref *use_rec;
- unsigned int uid = INSN_UID (insn);
+ df_ref use;
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
- df_word_lr_mark_ref (*use_rec, true, live);
+ FOR_EACH_INSN_USE (use, insn)
+ df_word_lr_mark_ref (use, true, live);
}
\f
/*----------------------------------------------------------------------------
/* This is only used if REG_DEAD_DEBUGGING is in effect. */
static void
-df_print_note (const char *prefix, rtx insn, rtx note)
+df_print_note (const char *prefix, rtx_insn *insn, rtx note)
{
if (dump_file)
{
/* Remove all of the REG_DEAD or REG_UNUSED notes from INSN. */
static void
-df_remove_dead_and_unused_notes (rtx insn)
+df_remove_dead_and_unused_notes (rtx_insn *insn)
{
rtx *pprev = ®_NOTES (insn);
rtx link = *pprev;
as the bitmap of currently live registers. */
static void
-df_remove_dead_eq_notes (rtx insn, bitmap live)
+df_remove_dead_eq_notes (rtx_insn *insn, bitmap live)
{
rtx *pprev = ®_NOTES (insn);
rtx link = *pprev;
one REG_EQUAL/EQUIV note, all of EQ_USES will refer to this note
so we need to purge the complete EQ_USES vector when removing
the note using df_notes_rescan. */
- df_ref *use_rec;
+ df_ref use;
bool deleted = false;
- for (use_rec = DF_INSN_EQ_USES (insn); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- if (DF_REF_REGNO (use) > FIRST_PSEUDO_REGISTER
- && DF_REF_LOC (use)
- && (DF_REF_FLAGS (use) & DF_REF_IN_NOTE)
- && ! bitmap_bit_p (live, DF_REF_REGNO (use))
- && loc_mentioned_in_p (DF_REF_LOC (use), XEXP (link, 0)))
- {
- deleted = true;
- break;
- }
- }
+ FOR_EACH_INSN_EQ_USE (use, insn)
+ if (DF_REF_REGNO (use) > FIRST_PSEUDO_REGISTER
+ && DF_REF_LOC (use)
+ && (DF_REF_FLAGS (use) & DF_REF_IN_NOTE)
+ && !bitmap_bit_p (live, DF_REF_REGNO (use))
+ && loc_mentioned_in_p (DF_REF_LOC (use), XEXP (link, 0)))
+ {
+ deleted = true;
+ break;
+ }
if (deleted)
{
rtx next;
/* Set a NOTE_TYPE note for REG in INSN. */
static inline void
-df_set_note (enum reg_note note_type, rtx insn, rtx reg)
+df_set_note (enum reg_note note_type, rtx_insn *insn, rtx reg)
{
gcc_checking_assert (!DEBUG_INSN_P (insn));
add_reg_note (insn, note_type, reg);
*/
static void
-df_set_unused_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
+df_set_unused_notes_for_mw (rtx_insn *insn, struct df_mw_hardreg *mws,
bitmap live, bitmap do_not_gen,
bitmap artificial_uses,
struct dead_debug_local *debug)
register. */
static void
-df_set_dead_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
+df_set_dead_notes_for_mw (rtx_insn *insn, struct df_mw_hardreg *mws,
bitmap live, bitmap do_not_gen,
bitmap artificial_uses, bool *added_notes_p)
{
LIVE. Do not generate notes for registers in ARTIFICIAL_USES. */
static void
-df_create_unused_note (rtx insn, df_ref def,
+df_create_unused_note (rtx_insn *insn, df_ref def,
bitmap live, bitmap artificial_uses,
struct dead_debug_local *debug)
{
bitmap live, bitmap do_not_gen, bitmap artificial_uses)
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
- rtx insn;
- df_ref *def_rec;
- df_ref *use_rec;
+ rtx_insn *insn;
+ df_ref def, use;
struct dead_debug_local debug;
dead_debug_local_init (&debug, NULL, NULL);
/* Process the artificial defs and uses at the bottom of the block
to begin processing. */
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
{
- df_ref def = *def_rec;
-
if (REG_DEAD_DEBUGGING && dump_file)
fprintf (dump_file, "artificial def %d\n", DF_REF_REGNO (def));
bitmap_clear_bit (live, DF_REF_REGNO (def));
}
- for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
- {
- unsigned int regno = DF_REF_REGNO (use);
- bitmap_set_bit (live, regno);
+ FOR_EACH_ARTIFICIAL_USE (use, bb_index)
+ if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
+ {
+ unsigned int regno = DF_REF_REGNO (use);
+ bitmap_set_bit (live, regno);
- /* Notes are not generated for any of the artificial registers
- at the bottom of the block. */
- bitmap_set_bit (artificial_uses, regno);
- }
- }
+ /* Notes are not generated for any of the artificial registers
+ at the bottom of the block. */
+ bitmap_set_bit (artificial_uses, regno);
+ }
if (REG_DEAD_DEBUGGING && dump_file)
{
FOR_BB_INSNS_REVERSE (bb, insn)
{
- unsigned int uid = INSN_UID (insn);
- struct df_mw_hardreg **mws_rec;
+ df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ df_mw_hardreg *mw;
int debug_insn;
if (!INSN_P (insn))
{
if (REG_DEAD_DEBUGGING && dump_file)
{
- fprintf (dump_file, "processing call %d\n live =", INSN_UID (insn));
+ fprintf (dump_file, "processing call %d\n live =",
+ INSN_UID (insn));
df_print_regset (dump_file, live);
}
/* We only care about real sets for calls. Clobbers cannot
be depended on to really die. */
- mws_rec = DF_INSN_UID_MWS (uid);
- while (*mws_rec)
- {
- struct df_mw_hardreg *mws = *mws_rec;
- if ((DF_MWS_REG_DEF_P (mws))
- && !df_ignore_stack_reg (mws->start_regno))
- df_set_unused_notes_for_mw (insn,
- mws, live, do_not_gen,
+ FOR_EACH_INSN_INFO_MW (mw, insn_info)
+ if ((DF_MWS_REG_DEF_P (mw))
+ && !df_ignore_stack_reg (mw->start_regno))
+ df_set_unused_notes_for_mw (insn, mw, live, do_not_gen,
artificial_uses, &debug);
- mws_rec++;
- }
/* All of the defs except the return value are some sort of
clobber. This code is for the return. */
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
- df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
{
else
{
/* Regular insn. */
- mws_rec = DF_INSN_UID_MWS (uid);
- while (*mws_rec)
- {
- struct df_mw_hardreg *mws = *mws_rec;
- if (DF_MWS_REG_DEF_P (mws))
- df_set_unused_notes_for_mw (insn,
- mws, live, do_not_gen,
- artificial_uses, &debug);
- mws_rec++;
- }
+ FOR_EACH_INSN_INFO_MW (mw, insn_info)
+ if (DF_MWS_REG_DEF_P (mw))
+ df_set_unused_notes_for_mw (insn, mw, live, do_not_gen,
+ artificial_uses, &debug);
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
- df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
df_create_unused_note (insn,
def, live, artificial_uses, &debug);
}
/* Process the uses. */
- mws_rec = DF_INSN_UID_MWS (uid);
- while (*mws_rec)
- {
- struct df_mw_hardreg *mws = *mws_rec;
- if (DF_MWS_REG_USE_P (mws)
- && !df_ignore_stack_reg (mws->start_regno))
- {
- bool really_add_notes = debug_insn != 0;
+ FOR_EACH_INSN_INFO_MW (mw, insn_info)
+ if (DF_MWS_REG_USE_P (mw)
+ && !df_ignore_stack_reg (mw->start_regno))
+ {
+ bool really_add_notes = debug_insn != 0;
- df_set_dead_notes_for_mw (insn,
- mws, live, do_not_gen,
- artificial_uses,
- &really_add_notes);
+ df_set_dead_notes_for_mw (insn, mw, live, do_not_gen,
+ artificial_uses,
+ &really_add_notes);
- if (really_add_notes)
- debug_insn = -1;
- }
- mws_rec++;
- }
+ if (really_add_notes)
+ debug_insn = -1;
+ }
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
{
- df_ref use = *use_rec;
unsigned int uregno = DF_REF_REGNO (use);
if (REG_DEAD_DEBUGGING && dump_file && !debug_insn)
/* Find the set of DEFs for INSN. */
void
-df_simulate_find_defs (rtx insn, bitmap defs)
+df_simulate_find_defs (rtx_insn *insn, bitmap defs)
{
- df_ref *def_rec;
- unsigned int uid = INSN_UID (insn);
+ df_ref def;
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- bitmap_set_bit (defs, DF_REF_REGNO (def));
- }
+ FOR_EACH_INSN_DEF (def, insn)
+ bitmap_set_bit (defs, DF_REF_REGNO (def));
}
/* Find the set of uses for INSN. This includes partial defs. */
static void
-df_simulate_find_uses (rtx insn, bitmap uses)
+df_simulate_find_uses (rtx_insn *insn, bitmap uses)
{
- df_ref *rec;
- unsigned int uid = INSN_UID (insn);
+ df_ref def, use;
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
- for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
- {
- df_ref def = *rec;
- if (DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))
- bitmap_set_bit (uses, DF_REF_REGNO (def));
- }
- for (rec = DF_INSN_UID_USES (uid); *rec; rec++)
- {
- df_ref use = *rec;
- bitmap_set_bit (uses, DF_REF_REGNO (use));
- }
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
+ if (DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))
+ bitmap_set_bit (uses, DF_REF_REGNO (def));
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
+ bitmap_set_bit (uses, DF_REF_REGNO (use));
}
/* Find the set of real DEFs, which are not clobbers, for INSN. */
void
-df_simulate_find_noclobber_defs (rtx insn, bitmap defs)
+df_simulate_find_noclobber_defs (rtx_insn *insn, bitmap defs)
{
- df_ref *def_rec;
- unsigned int uid = INSN_UID (insn);
+ df_ref def;
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
- bitmap_set_bit (defs, DF_REF_REGNO (def));
- }
+ FOR_EACH_INSN_DEF (def, insn)
+ if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
+ bitmap_set_bit (defs, DF_REF_REGNO (def));
}
/* Simulate the effects of the defs of INSN on LIVE. */
void
-df_simulate_defs (rtx insn, bitmap live)
+df_simulate_defs (rtx_insn *insn, bitmap live)
{
- df_ref *def_rec;
- unsigned int uid = INSN_UID (insn);
+ df_ref def;
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ FOR_EACH_INSN_DEF (def, insn)
{
- df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
/* If the def is to only part of the reg, it does
/* Simulate the effects of the uses of INSN on LIVE. */
void
-df_simulate_uses (rtx insn, bitmap live)
+df_simulate_uses (rtx_insn *insn, bitmap live)
{
- df_ref *use_rec;
- unsigned int uid = INSN_UID (insn);
+ df_ref use;
if (DEBUG_INSN_P (insn))
return;
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- /* Add use to set of uses in this BB. */
- bitmap_set_bit (live, DF_REF_REGNO (use));
- }
+ FOR_EACH_INSN_USE (use, insn)
+ /* Add use to set of uses in this BB. */
+ bitmap_set_bit (live, DF_REF_REGNO (use));
}
void
df_simulate_initialize_backwards (basic_block bb, bitmap live)
{
- df_ref *def_rec;
- df_ref *use_rec;
+ df_ref def, use;
int bb_index = bb->index;
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
- bitmap_clear_bit (live, DF_REF_REGNO (def));
- }
+ FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
+ if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
+ bitmap_clear_bit (live, DF_REF_REGNO (def));
- for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
- bitmap_set_bit (live, DF_REF_REGNO (use));
- }
+ FOR_EACH_ARTIFICIAL_USE (use, bb_index)
+ if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
+ bitmap_set_bit (live, DF_REF_REGNO (use));
}
/* Simulate the backwards effects of INSN on the bitmap LIVE. */
void
-df_simulate_one_insn_backwards (basic_block bb, rtx insn, bitmap live)
+df_simulate_one_insn_backwards (basic_block bb, rtx_insn *insn, bitmap live)
{
if (!NONDEBUG_INSN_P (insn))
return;
void
df_simulate_finalize_backwards (basic_block bb, bitmap live)
{
- df_ref *def_rec;
+ df_ref def;
#ifdef EH_USES
- df_ref *use_rec;
+ df_ref use;
#endif
int bb_index = bb->index;
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- bitmap_clear_bit (live, DF_REF_REGNO (def));
- }
+ FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
+ if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
+ bitmap_clear_bit (live, DF_REF_REGNO (def));
#ifdef EH_USES
- for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
- bitmap_set_bit (live, DF_REF_REGNO (use));
- }
+ FOR_EACH_ARTIFICIAL_USE (use, bb_index)
+ if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
+ bitmap_set_bit (live, DF_REF_REGNO (use));
#endif
}
/*----------------------------------------------------------------------------
void
df_simulate_initialize_forwards (basic_block bb, bitmap live)
{
- df_ref *def_rec;
+ df_ref def;
int bb_index = bb->index;
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- bitmap_set_bit (live, DF_REF_REGNO (def));
- }
+ FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
+ if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
+ bitmap_set_bit (live, DF_REF_REGNO (def));
}
/* Simulate the forwards effects of INSN on the bitmap LIVE. */
void
-df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live)
+df_simulate_one_insn_forwards (basic_block bb, rtx_insn *insn, bitmap live)
{
rtx link;
if (! INSN_P (insn))
case REG_UNUSED:
{
rtx reg = XEXP (link, 0);
- int regno = REGNO (reg);
- if (HARD_REGISTER_NUM_P (regno))
- bitmap_clear_range (live, regno,
- hard_regno_nregs[regno][GET_MODE (reg)]);
- else
- bitmap_clear_bit (live, regno);
+ bitmap_clear_range (live, REGNO (reg), REG_NREGS (reg));
}
break;
default:
#define MEMREF_NORMAL 1
#define MEMREF_VOLATILE 2
-/* A subroutine of can_move_insns_across_p called through for_each_rtx.
- Return either MEMREF_NORMAL or MEMREF_VOLATILE if a memory is found. */
+/* Return an OR of MEMREF_NORMAL or MEMREF_VOLATILE for the MEMs in X. */
static int
-find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
+find_memory (rtx_insn *insn)
{
- rtx x = *px;
-
- if (GET_CODE (x) == ASM_OPERANDS && MEM_VOLATILE_P (x))
- return MEMREF_VOLATILE;
-
- if (!MEM_P (x))
- return 0;
- if (MEM_VOLATILE_P (x))
- return MEMREF_VOLATILE;
- if (MEM_READONLY_P (x))
- return 0;
-
- return MEMREF_NORMAL;
+ int flags = 0;
+ subrtx_iterator::array_type array;
+ FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
+ {
+ const_rtx x = *iter;
+ if (GET_CODE (x) == ASM_OPERANDS && MEM_VOLATILE_P (x))
+ flags |= MEMREF_VOLATILE;
+ else if (MEM_P (x))
+ {
+ if (MEM_VOLATILE_P (x))
+ flags |= MEMREF_VOLATILE;
+ else if (!MEM_READONLY_P (x))
+ flags |= MEMREF_NORMAL;
+ }
+ }
+ return flags;
}
/* A subroutine of can_move_insns_across_p called through note_stores.
void
simulate_backwards_to_point (basic_block bb, regset live, rtx point)
{
- rtx insn;
+ rtx_insn *insn;
bitmap_copy (live, df_get_live_out (bb));
df_simulate_initialize_backwards (bb, live);
is set to point at the last moveable insn in such a case. */
bool
-can_move_insns_across (rtx from, rtx to, rtx across_from, rtx across_to,
+can_move_insns_across (rtx_insn *from, rtx_insn *to,
+ rtx_insn *across_from, rtx_insn *across_to,
basic_block merge_bb, regset merge_live,
- regset other_branch_live, rtx *pmove_upto)
+ regset other_branch_live, rtx_insn **pmove_upto)
{
- rtx insn, next, max_to;
+ rtx_insn *insn, *next, *max_to;
bitmap merge_set, merge_use, local_merge_live;
bitmap test_set, test_use;
unsigned i, fail = 0;
bool trapping_insns_in_across = false;
if (pmove_upto != NULL)
- *pmove_upto = NULL_RTX;
+ *pmove_upto = NULL;
/* Find real bounds, ignoring debug insns. */
while (!NONDEBUG_INSN_P (from) && from != to)
{
if (volatile_insn_p (PATTERN (insn)))
return false;
- memrefs_in_across |= for_each_rtx (&PATTERN (insn), find_memory,
- NULL);
+ memrefs_in_across |= find_memory (insn);
note_stores (PATTERN (insn), find_memory_stores,
&mem_sets_in_across);
/* This is used just to find sets of the stack pointer. */
the first insn in MERGE that sets a register in TEST_USE, or uses
a register in TEST_SET. We also check for calls, trapping operations,
and memory references. */
- max_to = NULL_RTX;
+ max_to = NULL;
for (insn = from; ; insn = next)
{
if (CALL_P (insn))
int mem_ref_flags = 0;
int mem_set_flags = 0;
note_stores (PATTERN (insn), find_memory_stores, &mem_set_flags);
- mem_ref_flags = for_each_rtx (&PATTERN (insn), find_memory,
- NULL);
+ mem_ref_flags = find_memory (insn);
/* Catch sets of the stack pointer. */
mem_ref_flags |= mem_set_flags;
if (bitmap_intersect_p (merge_set, test_use)
|| bitmap_intersect_p (merge_use, test_set))
break;
-#ifdef HAVE_cc0
- if (!sets_cc0_p (insn))
-#endif
+ if (!HAVE_cc0 || !sets_cc0_p (insn))
max_to = insn;
}
next = NEXT_INSN (insn);
if (NONDEBUG_INSN_P (insn))
{
if (!bitmap_intersect_p (test_set, local_merge_live)
-#ifdef HAVE_cc0
- && !sets_cc0_p (insn)
-#endif
- )
+ && (!HAVE_cc0 || !sets_cc0_p (insn)))
{
max_to = insn;
break;
df_md_simulate_artificial_defs_at_top (basic_block bb, bitmap local_md)
{
int bb_index = bb->index;
- df_ref *def_rec;
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- {
- unsigned int dregno = DF_REF_REGNO (def);
- if (DF_REF_FLAGS (def)
- & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
- bitmap_set_bit (local_md, dregno);
- else
- bitmap_clear_bit (local_md, dregno);
- }
- }
+ df_ref def;
+ FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
+ if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
+ {
+ unsigned int dregno = DF_REF_REGNO (def);
+ if (DF_REF_FLAGS (def)
+ & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
+ bitmap_set_bit (local_md, dregno);
+ else
+ bitmap_clear_bit (local_md, dregno);
+ }
}
LOCAL_MD. */
void
-df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
- bitmap local_md)
+df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx_insn *insn,
+ bitmap local_md)
{
- unsigned uid = INSN_UID (insn);
- df_ref *def_rec;
+ df_ref def;
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ FOR_EACH_INSN_DEF (def, insn)
{
- df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|| (dregno >= FIRST_PSEUDO_REGISTER))
static void
df_md_bb_local_compute_process_def (struct df_md_bb_info *bb_info,
- df_ref *def_rec,
+ df_ref def,
int top_flag)
{
- df_ref def;
bitmap_clear (&seen_in_insn);
- while ((def = *def_rec++) != NULL)
+ for (; def; def = DF_REF_NEXT_LOC (def))
{
unsigned int dregno = DF_REF_REGNO (def);
if (((!(df->changeable_flags & DF_NO_HARD_REGS))
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
- rtx insn;
+ rtx_insn *insn;
/* Artificials are only hard regs. */
if (!(df->changeable_flags & DF_NO_HARD_REGS))