#include "rtl-iter.h"
#include "fibonacci_heap.h"
+struct btr_def;
+
/* Target register optimizations - these are performed after reload. */
-typedef struct btr_def_group_s
+struct btr_def_group
{
- struct btr_def_group_s *next;
+ btr_def_group *next;
rtx src;
- struct btr_def_s *members;
-} *btr_def_group;
+ btr_def *members;
+};
-typedef struct btr_user_s
+struct btr_user
{
- struct btr_user_s *next;
+ btr_user *next;
basic_block bb;
int luid;
rtx_insn *insn;
int n_reaching_defs;
int first_reaching_def;
char other_use_this_block;
-} *btr_user;
+};
/* btr_def structs appear on three lists:
1. A list of all btr_def structures (head is
group (head is in a BTR_DEF_GROUP struct, linked by
NEXT_THIS_GROUP field). */
-typedef struct btr_def_s
+struct btr_def
{
- struct btr_def_s *next_this_bb;
- struct btr_def_s *next_this_group;
+ btr_def *next_this_bb;
+ btr_def *next_this_group;
basic_block bb;
int luid;
rtx_insn *insn;
source (i.e. a label), group links together all the
insns with the same source. For other branch register
setting insns, group is NULL. */
- btr_def_group group;
- btr_user uses;
+ btr_def_group *group;
+ btr_user *uses;
/* If this def has a reaching use which is not a simple use
in a branch instruction, then has_ambiguous_use will be true,
and we will not attempt to migrate this definition. */
to clear out trs_live_at_end again. */
char own_end;
bitmap live_range;
-} *btr_def;
+};
-typedef fibonacci_heap <long, btr_def_s> btr_heap_t;
-typedef fibonacci_node <long, btr_def_s> btr_heap_node_t;
+typedef fibonacci_heap <long, btr_def> btr_heap_t;
+typedef fibonacci_node <long, btr_def> btr_heap_node_t;
static int issue_rate;
static int basic_block_freq (const_basic_block);
static int insn_sets_btr_p (const rtx_insn *, int, int *);
-static void find_btr_def_group (btr_def_group *, btr_def);
-static btr_def add_btr_def (btr_heap_t *, basic_block, int, rtx_insn *,
- unsigned int, int, btr_def_group *);
-static btr_user new_btr_user (basic_block, int, rtx_insn *);
+static void find_btr_def_group (btr_def_group **, btr_def *);
+static btr_def *add_btr_def (btr_heap_t *, basic_block, int, rtx_insn *,
+ unsigned int, int, btr_def_group **);
+static btr_user *new_btr_user (basic_block, int, rtx_insn *);
static void dump_hard_reg_set (HARD_REG_SET);
static void dump_btrs_live (int);
-static void note_other_use_this_block (unsigned int, btr_user);
-static void compute_defs_uses_and_gen (btr_heap_t *, btr_def *,btr_user *,
+static void note_other_use_this_block (unsigned int, btr_user *);
+static void compute_defs_uses_and_gen (btr_heap_t *, btr_def **, btr_user **,
sbitmap *, sbitmap *, HARD_REG_SET *);
static void compute_kill (sbitmap *, sbitmap *, HARD_REG_SET *);
static void compute_out (sbitmap *bb_out, sbitmap *, sbitmap *, int);
-static void link_btr_uses (btr_def *, btr_user *, sbitmap *, sbitmap *, int);
+static void link_btr_uses (btr_def **, btr_user **, sbitmap *, sbitmap *, int);
static void build_btr_def_use_webs (btr_heap_t *);
-static int block_at_edge_of_live_range_p (int, btr_def);
-static void clear_btr_from_live_range (btr_def def);
-static void add_btr_to_live_range (btr_def, int);
+static int block_at_edge_of_live_range_p (int, btr_def *);
+static void clear_btr_from_live_range (btr_def *def);
+static void add_btr_to_live_range (btr_def *, int);
static void augment_live_range (bitmap, HARD_REG_SET *, basic_block,
basic_block, int);
static int choose_btr (HARD_REG_SET);
-static void combine_btr_defs (btr_def, HARD_REG_SET *);
-static void btr_def_live_range (btr_def, HARD_REG_SET *);
-static void move_btr_def (basic_block, int, btr_def, bitmap, HARD_REG_SET *);
-static int migrate_btr_def (btr_def, int);
+static void combine_btr_defs (btr_def *, HARD_REG_SET *);
+static void btr_def_live_range (btr_def *, HARD_REG_SET *);
+static void move_btr_def (basic_block, int, btr_def *, bitmap, HARD_REG_SET *);
+static int migrate_btr_def (btr_def *, int);
static void migrate_btr_defs (enum reg_class, int);
static int can_move_up (const_basic_block, const rtx_insn *, int);
static void note_btr_set (rtx, const_rtx, void *);
to in the list starting with *ALL_BTR_DEF_GROUPS. If no such
group exists, create one. Add def to the group. */
static void
-find_btr_def_group (btr_def_group *all_btr_def_groups, btr_def def)
+find_btr_def_group (btr_def_group **all_btr_def_groups, btr_def *def)
{
if (insn_sets_btr_p (def->insn, 1, NULL))
{
- btr_def_group this_group;
+ btr_def_group *this_group;
rtx def_src = SET_SRC (single_set (def->insn));
/* ?? This linear search is an efficiency concern, particularly
if (!this_group)
{
- this_group = XOBNEW (&migrate_btrl_obstack, struct btr_def_group_s);
+ this_group = XOBNEW (&migrate_btrl_obstack, btr_def_group);
this_group->src = def_src;
this_group->members = NULL;
this_group->next = *all_btr_def_groups;
/* Create a new target register definition structure, for a definition in
block BB, instruction INSN, and insert it into ALL_BTR_DEFS. Return
the new definition. */
-static btr_def
+static btr_def *
add_btr_def (btr_heap_t *all_btr_defs, basic_block bb, int insn_luid,
rtx_insn *insn,
unsigned int dest_reg, int other_btr_uses_before_def,
- btr_def_group *all_btr_def_groups)
+ btr_def_group **all_btr_def_groups)
{
- btr_def this_def = XOBNEW (&migrate_btrl_obstack, struct btr_def_s);
+ btr_def *this_def = XOBNEW (&migrate_btrl_obstack, btr_def);
this_def->bb = bb;
this_def->luid = insn_luid;
this_def->insn = insn;
/* Create a new target register user structure, for a use in block BB,
instruction INSN. Return the new user. */
-static btr_user
+static btr_user *
new_btr_user (basic_block bb, int insn_luid, rtx_insn *insn)
{
/* This instruction reads target registers. We need
*/
rtx *usep = find_btr_use (PATTERN (insn));
rtx use;
- btr_user user = NULL;
+ btr_user *user = NULL;
if (usep)
{
usep = NULL;
}
use = usep ? *usep : NULL_RTX;
- user = XOBNEW (&migrate_btrl_obstack, struct btr_user_s);
+ user = XOBNEW (&migrate_btrl_obstack, btr_user);
user->bb = bb;
user->luid = insn_luid;
user->insn = insn;
If any of them use the same register, set their other_use_this_block
flag. */
static void
-note_other_use_this_block (unsigned int regno, btr_user users_this_bb)
+note_other_use_this_block (unsigned int regno, btr_user *users_this_bb)
{
- btr_user user;
+ btr_user *user;
for (user = users_this_bb; user != NULL; user = user->next)
if (user->use && REGNO (user->use) == regno)
}
struct defs_uses_info {
- btr_user users_this_bb;
+ btr_user *users_this_bb;
HARD_REG_SET btrs_written_in_block;
HARD_REG_SET btrs_live_in_block;
sbitmap bb_gen;
}
static void
-compute_defs_uses_and_gen (btr_heap_t *all_btr_defs, btr_def *def_array,
- btr_user *use_array, sbitmap *btr_defset,
+compute_defs_uses_and_gen (btr_heap_t *all_btr_defs, btr_def **def_array,
+ btr_user **use_array, sbitmap *btr_defset,
sbitmap *bb_gen, HARD_REG_SET *btrs_written)
{
/* Scan the code building up the set of all defs and all uses.
*/
int i;
int insn_luid = 0;
- btr_def_group all_btr_def_groups = NULL;
+ btr_def_group *all_btr_def_groups = NULL;
defs_uses_info info;
bitmap_vector_clear (bb_gen, last_basic_block_for_fn (cfun));
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
int reg;
- btr_def defs_this_bb = NULL;
+ btr_def *defs_this_bb = NULL;
rtx_insn *insn;
rtx_insn *last;
int can_throw = 0;
if (insn_sets_btr_p (insn, 0, ®no))
{
- btr_def def = add_btr_def (
+ btr_def *def = add_btr_def (
all_btr_defs, bb, insn_luid, insn, regno,
TEST_HARD_REG_BIT (info.btrs_live_in_block, regno),
&all_btr_def_groups);
else if (cfun->has_nonlocal_label
&& GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE)
{
- btr_user user;
+ btr_user *user;
/* Do the equivalent of calling note_other_use_this_block
for every target register. */
{
if (find_btr_use (PATTERN (insn)))
{
- btr_user user = new_btr_user (bb, insn_luid, insn);
+ btr_user *user = new_btr_user (bb, insn_luid, insn);
use_array[insn_uid] = user;
if (user->use)
}
static void
-link_btr_uses (btr_def *def_array, btr_user *use_array, sbitmap *bb_out,
+link_btr_uses (btr_def **def_array, btr_user **use_array, sbitmap *bb_out,
sbitmap *btr_defset, int max_uid)
{
int i;
{
int insn_uid = INSN_UID (insn);
- btr_def def = def_array[insn_uid];
- btr_user user = use_array[insn_uid];
+ btr_def *def = def_array[insn_uid];
+ btr_user *user = use_array[insn_uid];
if (def != NULL)
{
/* Remove all reaching defs of regno except
}
EXECUTE_IF_SET_IN_BITMAP (reaching_defs_of_reg, 0, uid, sbi)
{
- btr_def def = def_array[uid];
+ btr_def *def = def_array[uid];
/* We now know that def reaches user. */
build_btr_def_use_webs (btr_heap_t *all_btr_defs)
{
const int max_uid = get_max_uid ();
- btr_def *def_array = XCNEWVEC (btr_def, max_uid);
- btr_user *use_array = XCNEWVEC (btr_user, max_uid);
+ btr_def **def_array = XCNEWVEC (btr_def *, max_uid);
+ btr_user **use_array = XCNEWVEC (btr_user *, max_uid);
sbitmap *btr_defset = sbitmap_vector_alloc (
(last_btr - first_btr) + 1, max_uid);
sbitmap *bb_gen = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
live range of the definition DEF, AND there are other live
ranges of the same target register that include BB. */
static int
-block_at_edge_of_live_range_p (int bb, btr_def def)
+block_at_edge_of_live_range_p (int bb, btr_def *def)
{
if (def->other_btr_uses_before_def
&& BASIC_BLOCK_FOR_FN (cfun, bb) == def->bb)
return 1;
else if (def->other_btr_uses_after_use)
{
- btr_user user;
+ btr_user *user;
for (user = def->uses; user != NULL; user = user->next)
if (BASIC_BLOCK_FOR_FN (cfun, bb) == user->bb)
return 1;
to remove the target register from the live set of these blocks
only if they do not contain other live ranges for the same register. */
static void
-clear_btr_from_live_range (btr_def def)
+clear_btr_from_live_range (btr_def *def)
{
unsigned bb;
bitmap_iterator bi;
If OWN_END is set, also show that the register is live from our
definitions at the end of the basic block where it is defined. */
static void
-add_btr_to_live_range (btr_def def, int own_end)
+add_btr_to_live_range (btr_def *def, int own_end)
{
unsigned bb;
bitmap_iterator bi;
in this live range, but ignore the live range represented by DEF
when calculating this set. */
static void
-btr_def_live_range (btr_def def, HARD_REG_SET *btrs_live_in_range)
+btr_def_live_range (btr_def *def, HARD_REG_SET *btrs_live_in_range)
{
if (!def->live_range)
{
- btr_user user;
+ btr_user *user;
def->live_range = BITMAP_ALLOC (NULL);
group that are dominated by DEF, provided that there is a target
register available to allocate to the merged web. */
static void
-combine_btr_defs (btr_def def, HARD_REG_SET *btrs_live_in_range)
+combine_btr_defs (btr_def *def, HARD_REG_SET *btrs_live_in_range)
{
- btr_def other_def;
+ btr_def *other_def;
for (other_def = def->group->members;
other_def != NULL;
int btr;
HARD_REG_SET combined_btrs_live;
bitmap combined_live_range = BITMAP_ALLOC (NULL);
- btr_user user;
+ btr_user *user;
if (other_def->live_range == NULL)
{
user = other_def->uses;
while (user != NULL)
{
- btr_user next = user->next;
+ btr_user *next = user->next;
user->next = def->uses;
def->uses = user;
If this new position means that other defs in the
same group can be combined with DEF then combine them. */
static void
-move_btr_def (basic_block new_def_bb, int btr, btr_def def, bitmap live_range,
+move_btr_def (basic_block new_def_bb, int btr, btr_def *def, bitmap live_range,
HARD_REG_SET *btrs_live_in_range)
{
/* We can move the instruction.
rtx btr_rtx;
rtx_insn *new_insn;
machine_mode btr_mode;
- btr_user user;
+ btr_user *user;
rtx set;
if (dump_file)
MIN_COST, but we may be able to reduce it further).
Return zero if no further migration is possible. */
static int
-migrate_btr_def (btr_def def, int min_cost)
+migrate_btr_def (btr_def *def, int min_cost)
{
bitmap live_range;
HARD_REG_SET btrs_live_in_range;
basic_block attempt;
int give_up = 0;
int def_moved = 0;
- btr_user user;
+ btr_user *user;
int def_latency;
if (dump_file)
while (!all_btr_defs.empty ())
{
int min_cost = -all_btr_defs.min_key ();
- btr_def def = all_btr_defs.extract_min ();
+ btr_def *def = all_btr_defs.extract_min ();
if (migrate_btr_def (def, min_cost))
{
all_btr_defs.insert (-def->cost, def);