/* Description of a memory reference location. */
-typedef struct mem_ref_loc
+struct mem_ref_loc
{
tree *ref; /* The reference itself. */
gimple stmt; /* The statement in that it occurs. */
-} *mem_ref_loc_p;
+};
/* Description of a memory reference. */
-typedef struct im_mem_ref
+struct im_mem_ref
{
unsigned id; /* ID assigned to the memory reference
(its index in memory_accesses.refs_list) */
If it is only loaded, then it is independent
on all stores in the loop. */
bitmap_head dep_loop; /* The complement of INDEP_LOOP. */
-} *mem_ref_p;
+};
/* We use two bits per loop in the ref->{in,}dep_loop bitmaps, the first
to record (in)dependence against stores in the loop and its subloops, the
hash_table<mem_ref_hasher> *refs;
/* The list of memory references. */
- vec<mem_ref_p> refs_list;
+ vec<im_mem_ref *> refs_list;
/* The set of memory references accessed in each loop. */
vec<bitmap_head> refs_in_loop;
static bitmap_obstack lim_bitmap_obstack;
static obstack mem_ref_obstack;
-static bool ref_indep_loop_p (struct loop *, mem_ref_p);
+static bool ref_indep_loop_p (struct loop *, im_mem_ref *);
/* Minimum cost of an expensive expression. */
#define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE))
instead. */
static struct loop *
-outermost_indep_loop (struct loop *outer, struct loop *loop, mem_ref_p ref)
+outermost_indep_loop (struct loop *outer, struct loop *loop, im_mem_ref *ref)
{
struct loop *aloop;
/* Returns the memory reference contained in STMT. */
-static mem_ref_p
+static im_mem_ref *
mem_ref_in_stmt (gimple stmt)
{
bool store;
tree *mem = simple_mem_ref_in_stmt (stmt, &store);
hashval_t hash;
- mem_ref_p ref;
+ im_mem_ref *ref;
if (!mem)
return NULL;
if (gimple_vuse (stmt))
{
- mem_ref_p ref = mem_ref_in_stmt (stmt);
+ im_mem_ref *ref = mem_ref_in_stmt (stmt);
if (ref)
{
/* Allocates and returns a memory reference description for MEM whose hash
value is HASH and id is ID. */
-static mem_ref_p
+static im_mem_ref *
mem_ref_alloc (tree mem, unsigned hash, unsigned id)
{
- mem_ref_p ref = XOBNEW (&mem_ref_obstack, struct im_mem_ref);
+ im_mem_ref *ref = XOBNEW (&mem_ref_obstack, struct im_mem_ref);
ao_ref_init (&ref->mem, mem);
ref->id = id;
ref->hash = hash;
description REF. The reference occurs in statement STMT. */
static void
-record_mem_ref_loc (mem_ref_p ref, gimple stmt, tree *loc)
+record_mem_ref_loc (im_mem_ref *ref, gimple stmt, tree *loc)
{
mem_ref_loc aref;
aref.stmt = stmt;
necessary. Return whether a bit was changed. */
static bool
-set_ref_stored_in_loop (mem_ref_p ref, struct loop *loop)
+set_ref_stored_in_loop (im_mem_ref *ref, struct loop *loop)
{
if (!ref->stored)
ref->stored = BITMAP_ALLOC (&lim_bitmap_obstack);
/* Marks reference REF as stored in LOOP. */
static void
-mark_ref_stored (mem_ref_p ref, struct loop *loop)
+mark_ref_stored (im_mem_ref *ref, struct loop *loop)
{
while (loop != current_loops->tree_root
&& set_ref_stored_in_loop (ref, loop))
tree *mem = NULL;
hashval_t hash;
im_mem_ref **slot;
- mem_ref_p ref;
+ im_mem_ref *ref;
bool is_stored;
unsigned id;
slot = memory_accesses.refs->find_slot_with_hash (*mem, hash, INSERT);
if (*slot)
{
- ref = (mem_ref_p) *slot;
+ ref = *slot;
id = ref->id;
}
else
tree_to_aff_combination_expand. */
static bool
-mem_refs_may_alias_p (mem_ref_p mem1, mem_ref_p mem2,
+mem_refs_may_alias_p (im_mem_ref *mem1, im_mem_ref *mem2,
hash_map<tree, name_expansion *> **ttae_cache)
{
/* Perform BASE + OFFSET analysis -- if MEM1 and MEM2 are based on the same
template <typename FN>
static bool
-for_all_locs_in_loop (struct loop *loop, mem_ref_p ref, FN fn)
+for_all_locs_in_loop (struct loop *loop, im_mem_ref *ref, FN fn)
{
unsigned i;
- mem_ref_loc_p loc;
+ mem_ref_loc *loc;
/* Search for the cluster of locs in the accesses_in_loop vector
which is sorted after postorder index of the loop father. */
while (i > 0)
{
--i;
- mem_ref_loc_p l = &ref->accesses_in_loop[i];
+ mem_ref_loc *l = &ref->accesses_in_loop[i];
if (!flow_bb_inside_loop_p (loop, gimple_bb (l->stmt)))
break;
if (fn (l))
for (i = loc - ref->accesses_in_loop.address ();
i < ref->accesses_in_loop.length (); ++i)
{
- mem_ref_loc_p l = &ref->accesses_in_loop[i];
+ mem_ref_loc *l = &ref->accesses_in_loop[i];
if (!flow_bb_inside_loop_p (loop, gimple_bb (l->stmt)))
break;
if (fn (l))
struct rewrite_mem_ref_loc
{
rewrite_mem_ref_loc (tree tmp_var_) : tmp_var (tmp_var_) {}
- bool operator () (mem_ref_loc_p loc);
+ bool operator () (mem_ref_loc *loc);
tree tmp_var;
};
bool
-rewrite_mem_ref_loc::operator () (mem_ref_loc_p loc)
+rewrite_mem_ref_loc::operator () (mem_ref_loc *loc)
{
*loc->ref = tmp_var;
update_stmt (loc->stmt);
/* Rewrites all references to REF in LOOP by variable TMP_VAR. */
static void
-rewrite_mem_refs (struct loop *loop, mem_ref_p ref, tree tmp_var)
+rewrite_mem_refs (struct loop *loop, im_mem_ref *ref, tree tmp_var)
{
for_all_locs_in_loop (loop, ref, rewrite_mem_ref_loc (tmp_var));
}
struct first_mem_ref_loc_1
{
- first_mem_ref_loc_1 (mem_ref_loc_p *locp_) : locp (locp_) {}
- bool operator () (mem_ref_loc_p loc);
- mem_ref_loc_p *locp;
+ first_mem_ref_loc_1 (mem_ref_loc **locp_) : locp (locp_) {}
+ bool operator () (mem_ref_loc *loc);
+ mem_ref_loc **locp;
};
bool
-first_mem_ref_loc_1::operator () (mem_ref_loc_p loc)
+first_mem_ref_loc_1::operator () (mem_ref_loc *loc)
{
*locp = loc;
return true;
/* Returns the first reference location to REF in LOOP. */
-static mem_ref_loc_p
-first_mem_ref_loc (struct loop *loop, mem_ref_p ref)
+static mem_ref_loc *
+first_mem_ref_loc (struct loop *loop, im_mem_ref *ref)
{
- mem_ref_loc_p locp = NULL;
+ mem_ref_loc *locp = NULL;
for_all_locs_in_loop (loop, ref, first_mem_ref_loc_1 (&locp));
return locp;
}
struct sm_set_flag_if_changed
{
sm_set_flag_if_changed (tree flag_) : flag (flag_) {}
- bool operator () (mem_ref_loc_p loc);
+ bool operator () (mem_ref_loc *loc);
tree flag;
};
bool
-sm_set_flag_if_changed::operator () (mem_ref_loc_p loc)
+sm_set_flag_if_changed::operator () (mem_ref_loc *loc)
{
/* Only set the flag for writes. */
if (is_gimple_assign (loc->stmt)
set, set an appropriate flag indicating the store. */
static tree
-execute_sm_if_changed_flag_set (struct loop *loop, mem_ref_p ref)
+execute_sm_if_changed_flag_set (struct loop *loop, im_mem_ref *ref)
{
tree flag;
char *str = get_lsm_tmp_name (ref->mem.ref, ~0, "_flag");
to the reference from the temporary variable are emitted to exits. */
static void
-execute_sm (struct loop *loop, vec<edge> exits, mem_ref_p ref)
+execute_sm (struct loop *loop, vec<edge> exits, im_mem_ref *ref)
{
tree tmp_var, store_flag = NULL_TREE;
unsigned i;
hoist_memory_references (struct loop *loop, bitmap mem_refs,
vec<edge> exits)
{
- mem_ref_p ref;
+ im_mem_ref *ref;
unsigned i;
bitmap_iterator bi;
{
ref_always_accessed (struct loop *loop_, bool stored_p_)
: loop (loop_), stored_p (stored_p_) {}
- bool operator () (mem_ref_loc_p loc);
+ bool operator () (mem_ref_loc *loc);
struct loop *loop;
bool stored_p;
};
bool
-ref_always_accessed::operator () (mem_ref_loc_p loc)
+ref_always_accessed::operator () (mem_ref_loc *loc)
{
struct loop *must_exec;
make sure REF is always stored to in LOOP. */
static bool
-ref_always_accessed_p (struct loop *loop, mem_ref_p ref, bool stored_p)
+ref_always_accessed_p (struct loop *loop, im_mem_ref *ref, bool stored_p)
{
return for_all_locs_in_loop (loop, ref,
ref_always_accessed (loop, stored_p));
/* Returns true if REF1 and REF2 are independent. */
static bool
-refs_independent_p (mem_ref_p ref1, mem_ref_p ref2)
+refs_independent_p (im_mem_ref *ref1, im_mem_ref *ref2)
{
if (ref1 == ref2)
return true;
and its super-loops. */
static void
-record_dep_loop (struct loop *loop, mem_ref_p ref, bool stored_p)
+record_dep_loop (struct loop *loop, im_mem_ref *ref, bool stored_p)
{
/* We can propagate dependent-in-loop bits up the loop
hierarchy to all outer loops. */
LOOP. */
static bool
-ref_indep_loop_p_1 (struct loop *loop, mem_ref_p ref, bool stored_p)
+ref_indep_loop_p_1 (struct loop *loop, im_mem_ref *ref, bool stored_p)
{
bitmap refs_to_check;
unsigned i;
bitmap_iterator bi;
- mem_ref_p aref;
+ im_mem_ref *aref;
if (stored_p)
refs_to_check = &memory_accesses.refs_in_loop[loop->num];
LOOP. Wrapper over ref_indep_loop_p_1, caching its results. */
static bool
-ref_indep_loop_p_2 (struct loop *loop, mem_ref_p ref, bool stored_p)
+ref_indep_loop_p_2 (struct loop *loop, im_mem_ref *ref, bool stored_p)
{
stored_p |= (ref->stored && bitmap_bit_p (ref->stored, loop->num));
LOOP. */
static bool
-ref_indep_loop_p (struct loop *loop, mem_ref_p ref)
+ref_indep_loop_p (struct loop *loop, im_mem_ref *ref)
{
gcc_checking_assert (MEM_ANALYZABLE (ref));
/* Returns true if we can perform store motion of REF from LOOP. */
static bool
-can_sm_ref_p (struct loop *loop, mem_ref_p ref)
+can_sm_ref_p (struct loop *loop, im_mem_ref *ref)
{
tree base;
bitmap refs = &memory_accesses.all_refs_stored_in_loop[loop->num];
unsigned i;
bitmap_iterator bi;
- mem_ref_p ref;
+ im_mem_ref *ref;
EXECUTE_IF_AND_COMPL_IN_BITMAP (refs, sm_executed, 0, i, bi)
{
{
basic_block bb;
unsigned i;
- mem_ref_p ref;
+ im_mem_ref *ref;
free_aux_for_edges ();