enum tree_code, tree, tree *);
static tree vn_lookup_simplify_result (gimple_match_op *);
static vn_reference_t vn_reference_lookup_or_insert_for_pieces
- (tree, alias_set_type, tree, vec<vn_reference_op_s, va_heap>, tree);
+ (tree, alias_set_type, alias_set_type, tree,
+ vec<vn_reference_op_s, va_heap>, tree);
/* Return whether there is value numbering information for a given SSA name. */
bool
ao_ref_init_from_vn_reference (ao_ref *ref,
- alias_set_type set, tree type,
- vec<vn_reference_op_s> ops)
+ alias_set_type set, alias_set_type base_set,
+ tree type, vec<vn_reference_op_s> ops)
{
vn_reference_op_t op;
unsigned i;
poly_offset_int max_size;
poly_offset_int size = -1;
tree size_tree = NULL_TREE;
- alias_set_type base_alias_set = -1;
/* First get the final access size from just the outermost expression. */
op = &ops[0];
/* Record the base objects. */
case MEM_REF:
- base_alias_set = get_deref_alias_set (op->op0);
*op0_p = build2 (MEM_REF, op->type,
NULL_TREE, op->op0);
MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
ref->ref = NULL_TREE;
ref->base = base;
ref->ref_alias_set = set;
- if (base_alias_set != -1)
- ref->base_alias_set = base_alias_set;
- else
- ref->base_alias_set = get_alias_set (base);
+ ref->base_alias_set = base_set;
/* We discount volatiles from value-numbering elsewhere. */
ref->volatile_p = false;
vn_lookup_kind vn_walk_kind_, bool tbaa_p_)
: vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE),
vn_walk_kind (vn_walk_kind_), tbaa_p (tbaa_p_),
- saved_operands (vNULL), first_set (-2), known_ranges (NULL)
+ saved_operands (vNULL), first_set (-2), first_base_set (-2),
+ known_ranges (NULL)
{
if (!last_vuse_ptr)
last_vuse_ptr = &last_vuse;
ao_ref_init (&orig_ref, orig_ref_);
}
~vn_walk_cb_data ();
- void *finish (alias_set_type, tree);
- void *push_partial_def (const pd_data& pd, alias_set_type, HOST_WIDE_INT);
+ void *finish (alias_set_type, alias_set_type, tree);
+ void *push_partial_def (const pd_data& pd,
+ alias_set_type, alias_set_type, HOST_WIDE_INT);
vn_reference_t vr;
ao_ref orig_ref;
/* The first defs range to avoid splay tree setup in most cases. */
pd_range first_range;
alias_set_type first_set;
+ alias_set_type first_base_set;
splay_tree known_ranges;
obstack ranges_obstack;
};
}
void *
-vn_walk_cb_data::finish (alias_set_type set, tree val)
+vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val)
{
if (first_set != -2)
- set = first_set;
+ {
+ set = first_set;
+ base_set = first_base_set;
+ }
return vn_reference_lookup_or_insert_for_pieces
- (last_vuse, set, vr->type,
+ (last_vuse, set, base_set, vr->type,
saved_operands.exists () ? saved_operands : vr->operands, val);
}
void *
vn_walk_cb_data::push_partial_def (const pd_data &pd,
- alias_set_type set, HOST_WIDE_INT maxsizei)
+ alias_set_type set, alias_set_type base_set,
+ HOST_WIDE_INT maxsizei)
{
const HOST_WIDE_INT bufsize = 64;
/* We're using a fixed buffer for encoding so fail early if the object
first_range.offset = pd.offset;
first_range.size = pd.size;
first_set = set;
+ first_base_set = base_set;
last_vuse_ptr = NULL;
/* Continue looking for partial defs. */
return NULL;
"Successfully combined %u partial definitions\n", ndefs);
/* We are using the alias-set of the first store we encounter which
should be appropriate here. */
- return finish (first_set, val);
+ return finish (first_set, first_base_set, val);
}
else
{
if (slot)
{
if ((*slot)->result && data->saved_operands.exists ())
- return data->finish (vr->set, (*slot)->result);
+ return data->finish (vr->set, vr->base_set, (*slot)->result);
return *slot;
}
static vn_reference_t
vn_reference_lookup_or_insert_for_pieces (tree vuse,
alias_set_type set,
+ alias_set_type base_set,
tree type,
vec<vn_reference_op_s,
va_heap> operands,
vr1.operands = operands;
vr1.type = type;
vr1.set = set;
+ vr1.base_set = base_set;
vr1.hashcode = vn_reference_compute_hash (&vr1);
if (vn_reference_lookup_1 (&vr1, &result))
return result;
value_id = VN_INFO (value)->value_id;
else
value_id = get_or_alloc_constant_value_id (value);
- return vn_reference_insert_pieces (vuse, set, type,
+ return vn_reference_insert_pieces (vuse, set, base_set, type,
operands.copy (), value, value_id);
}
lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
}
vn_context_bb = saved_rpo_bb;
- if (valueized_anything)
+ ao_ref_init (&lhs_ref, lhs);
+ lhs_ref_ok = true;
+ if (valueized_anything
+ && ao_ref_init_from_vn_reference
+ (&lhs_ref, ao_ref_alias_set (&lhs_ref),
+ ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs), lhs_ops)
+ && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
{
- lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
- get_alias_set (lhs),
- TREE_TYPE (lhs), lhs_ops);
- if (lhs_ref_ok
- && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
- {
- *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
- return NULL;
- }
- }
- else
- {
- ao_ref_init (&lhs_ref, lhs);
- lhs_ref_ok = true;
+ *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
+ return NULL;
}
/* Besides valueizing the LHS we can also use access-path based
if (!val)
return (void *)-1;
}
- return data->finish (0, val);
+ return data->finish (0, 0, val);
}
/* For now handle clearing memory with partial defs. */
else if (known_eq (ref->size, maxsize)
pd.rhs = build_constructor (NULL_TREE, NULL);
pd.offset = offset2i - offseti;
pd.size = leni << LOG2_BITS_PER_UNIT;
- return data->push_partial_def (pd, 0, maxsizei);
+ return data->push_partial_def (pd, 0, 0, maxsizei);
}
}
&& gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
&& CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
{
- tree lhs = gimple_assign_lhs (def_stmt);
tree base2;
poly_int64 offset2, size2, maxsize2;
HOST_WIDE_INT offset2i, size2i;
- bool reverse;
- if (lhs_ref_ok)
- {
- base2 = ao_ref_base (&lhs_ref);
- offset2 = lhs_ref.offset;
- size2 = lhs_ref.size;
- maxsize2 = lhs_ref.max_size;
- reverse = reverse_storage_order_for_component_p (lhs);
- }
- else
- base2 = get_ref_base_and_extent (lhs,
- &offset2, &size2, &maxsize2, &reverse);
+ gcc_assert (lhs_ref_ok);
+ base2 = ao_ref_base (&lhs_ref);
+ offset2 = lhs_ref.offset;
+ size2 = lhs_ref.size;
+ maxsize2 = lhs_ref.max_size;
if (known_size_p (maxsize2)
&& known_eq (maxsize2, size2)
&& adjust_offsets_for_equal_base_address (base, &offset,
if (gimple_clobber_p (def_stmt))
return (void *)-1;
tree val = build_zero_cst (vr->type);
- return data->finish (get_alias_set (lhs), val);
+ return data->finish (ao_ref_alias_set (&lhs_ref),
+ ao_ref_base_alias_set (&lhs_ref), val);
}
else if (known_eq (ref->size, maxsize)
&& maxsize.is_constant (&maxsizei)
pd.rhs = gimple_assign_rhs1 (def_stmt);
pd.offset = offset2i - offseti;
pd.size = size2i;
- return data->push_partial_def (pd, get_alias_set (lhs), maxsizei);
+ return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
+ ao_ref_base_alias_set (&lhs_ref),
+ maxsizei);
}
}
}
poly_int64 offset2, size2, maxsize2;
HOST_WIDE_INT offset2i, size2i;
bool reverse;
- if (lhs_ref_ok)
- {
- base2 = ao_ref_base (&lhs_ref);
- offset2 = lhs_ref.offset;
- size2 = lhs_ref.size;
- maxsize2 = lhs_ref.max_size;
- reverse = reverse_storage_order_for_component_p (lhs);
- }
- else
- base2 = get_ref_base_and_extent (lhs,
- &offset2, &size2, &maxsize2, &reverse);
+ gcc_assert (lhs_ref_ok);
+ base2 = ao_ref_base (&lhs_ref);
+ offset2 = lhs_ref.offset;
+ size2 = lhs_ref.size;
+ maxsize2 = lhs_ref.max_size;
+ reverse = reverse_storage_order_for_component_p (lhs);
if (base2
&& !reverse
&& !storage_order_barrier_p (lhs)
}
if (val)
- return data->finish (get_alias_set (lhs), val);
+ return data->finish (ao_ref_alias_set (&lhs_ref),
+ ao_ref_base_alias_set (&lhs_ref), val);
}
}
else if (ranges_known_overlap_p (offseti, maxsizei, offset2i,
pd.rhs = rhs;
pd.offset = offset2i - offseti;
pd.size = size2i;
- return data->push_partial_def (pd, get_alias_set (lhs), maxsizei);
+ return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
+ ao_ref_base_alias_set (&lhs_ref),
+ maxsizei);
}
}
}
poly_int64 offset2, size2, maxsize2;
HOST_WIDE_INT offset2i, size2i, offseti;
bool reverse;
- if (lhs_ref_ok)
- {
- base2 = ao_ref_base (&lhs_ref);
- offset2 = lhs_ref.offset;
- size2 = lhs_ref.size;
- maxsize2 = lhs_ref.max_size;
- reverse = reverse_storage_order_for_component_p (lhs);
- }
- else
- base2 = get_ref_base_and_extent (lhs,
- &offset2, &size2, &maxsize2, &reverse);
+ gcc_assert (lhs_ref_ok);
+ base2 = ao_ref_base (&lhs_ref);
+ offset2 = lhs_ref.offset;
+ size2 = lhs_ref.size;
+ maxsize2 = lhs_ref.max_size;
+ reverse = reverse_storage_order_for_component_p (lhs);
tree def_rhs = gimple_assign_rhs1 (def_stmt);
if (!reverse
&& !storage_order_barrier_p (lhs)
if (val
&& (TREE_CODE (val) != SSA_NAME
|| ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
- return data->finish (get_alias_set (lhs), val);
+ return data->finish (ao_ref_alias_set (&lhs_ref),
+ ao_ref_base_alias_set (&lhs_ref), val);
}
else if (maxsize.is_constant (&maxsizei)
&& offset.is_constant (&offseti)
pd.rhs = SSA_VAL (def_rhs);
pd.offset = offset2i - offseti;
pd.size = size2i;
- return data->push_partial_def (pd, get_alias_set (lhs), maxsizei);
+ return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
+ ao_ref_base_alias_set (&lhs_ref),
+ maxsizei);
}
}
}
|| TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
|| handled_component_p (gimple_assign_rhs1 (def_stmt))))
{
- tree lhs = gimple_assign_lhs (def_stmt);
tree base2;
int i, j, k;
auto_vec<vn_reference_op_s> rhs;
vn_reference_op_t vro;
ao_ref r;
- if (!lhs_ref_ok)
- return (void *)-1;
+ gcc_assert (lhs_ref_ok);
/* See if the assignment kills REF. */
base2 = ao_ref_base (&lhs_ref);
if (val)
{
if (data->partial_defs.is_empty ())
- return data->finish (get_alias_set (lhs), val);
+ return data->finish (ao_ref_alias_set (&lhs_ref),
+ ao_ref_base_alias_set (&lhs_ref), val);
/* This is the only interesting case for partial-def handling
coming from targets that like to gimplify init-ctors as
aggregate copies from constant data like aarch64 for
pd.rhs = val;
pd.offset = 0;
pd.size = maxsizei;
- return data->push_partial_def (pd, get_alias_set (lhs),
+ return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
+ ao_ref_base_alias_set (&lhs_ref),
maxsizei);
}
}
return (void *)-1;
/* Adjust *ref from the new operands. */
- if (!ao_ref_init_from_vn_reference (&r, get_alias_set (rhs1),
+ ao_ref rhs1_ref;
+ ao_ref_init (&rhs1_ref, rhs1);
+ if (!ao_ref_init_from_vn_reference (&r, ao_ref_alias_set (&rhs1_ref),
+ ao_ref_base_alias_set (&rhs1_ref),
vr->type, vr->operands))
return (void *)-1;
/* This can happen with bitfields. */
data->orig_ref.ref = NULL_TREE;
/* Use the alias-set of this LHS for recording an eventual result. */
if (data->first_set == -2)
- data->first_set = get_alias_set (lhs);
+ {
+ data->first_set = ao_ref_alias_set (&lhs_ref);
+ data->first_base_set = ao_ref_base_alias_set (&lhs_ref);
+ }
/* Keep looking for the adjusted *REF / VR pair. */
return NULL;
/* Try folding the new reference to a constant. */
tree val = fully_constant_vn_reference_p (vr);
if (val)
- return data->finish (0, val);
+ return data->finish (0, 0, val);
/* Adjust *ref from the new operands. */
- if (!ao_ref_init_from_vn_reference (&r, 0, vr->type, vr->operands))
+ if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
return (void *)-1;
/* This can happen with bitfields. */
if (maybe_ne (ref->size, r.size))
data->orig_ref.ref = NULL_TREE;
/* Use the alias-set of this stmt for recording an eventual result. */
if (data->first_set == -2)
- data->first_set = 0;
+ {
+ data->first_set = 0;
+ data->first_base_set = 0;
+ }
/* Keep looking for the adjusted *REF / VR pair. */
return NULL;
vn_reference_t stored in the hashtable if something is found. */
tree
-vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
+vn_reference_lookup_pieces (tree vuse, alias_set_type set,
+ alias_set_type base_set, tree type,
vec<vn_reference_op_s> operands,
vn_reference_t *vnresult, vn_lookup_kind kind)
{
= valueize_refs (shared_lookup_references);
vr1.type = type;
vr1.set = set;
+ vr1.set = base_set;
vr1.hashcode = vn_reference_compute_hash (&vr1);
if ((cst = fully_constant_vn_reference_p (&vr1)))
return cst;
ao_ref r;
unsigned limit = param_sccvn_max_alias_queries_per_access;
vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true);
- if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
+ if (ao_ref_init_from_vn_reference (&r, set, base_set, type, vr1.operands))
*vnresult =
(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, true,
vn_reference_lookup_2,
vr1.operands = operands
= valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
vr1.type = TREE_TYPE (op);
- vr1.set = get_alias_set (op);
+ ao_ref op_ref;
+ ao_ref_init (&op_ref, op);
+ vr1.set = ao_ref_alias_set (&op_ref);
+ vr1.base_set = ao_ref_base_alias_set (&op_ref);
vr1.hashcode = vn_reference_compute_hash (&vr1);
if ((cst = fully_constant_vn_reference_p (&vr1)))
return cst;
/* Make sure to use a valueized reference if we valueized anything.
Otherwise preserve the full reference for advanced TBAA. */
if (!valuezied_anything
- || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
- vr1.operands))
+ || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
+ vr1.type, vr1.operands))
ao_ref_init (&r, op);
vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
last_vuse_ptr, kind, tbaa_p);
vr->operands = valueize_shared_reference_ops_from_call (call);
vr->type = gimple_expr_type (call);
vr->set = 0;
+ vr->base_set = 0;
vr->hashcode = vn_reference_compute_hash (vr);
vn_reference_lookup_1 (vr, vnresult);
}
vr1->vuse = vuse_ssa_val (vuse);
vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
vr1->type = TREE_TYPE (op);
- vr1->set = get_alias_set (op);
+ ao_ref op_ref;
+ ao_ref_init (&op_ref, op);
+ vr1->set = ao_ref_alias_set (&op_ref);
+ vr1->base_set = ao_ref_base_alias_set (&op_ref);
vr1->hashcode = vn_reference_compute_hash (vr1);
vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
vr1->result_vdef = vdef;
structure we created. */
vn_reference_t
-vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
+vn_reference_insert_pieces (tree vuse, alias_set_type set,
+ alias_set_type base_set, tree type,
vec<vn_reference_op_s> operands,
tree result, unsigned int value_id)
vr1->operands = valueize_refs (operands);
vr1->type = type;
vr1->set = set;
+ vr1->base_set = base_set;
vr1->hashcode = vn_reference_compute_hash (vr1);
if (result && TREE_CODE (result) == SSA_NAME)
result = SSA_VAL (result);
vr2->operands = vr1.operands.copy ();
vr2->type = vr1.type;
vr2->set = vr1.set;
+ vr2->base_set = vr1.base_set;
vr2->hashcode = vr1.hashcode;
vr2->result = lhs;
vr2->result_vdef = vdef_val;
{
/* If the TBAA state isn't compatible for downstream reads
we cannot value-number the VDEFs the same. */
- alias_set_type set = get_alias_set (lhs);
- if (vnresult->set != set
- && ! alias_set_subset_of (set, vnresult->set))
+ ao_ref lhs_ref;
+ ao_ref_init (&lhs_ref, lhs);
+ alias_set_type set = ao_ref_alias_set (&lhs_ref);
+ alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
+ if ((vnresult->set != set
+ && ! alias_set_subset_of (set, vnresult->set))
+ || (vnresult->base_set != base_set
+ && ! alias_set_subset_of (base_set, vnresult->base_set)))
resultsame = false;
}
}
/* We can only remove the later store if the former aliases
at least all accesses the later one does or if the store
was to readonly memory storing the same value. */
- alias_set_type set = get_alias_set (lhs);
+ ao_ref lhs_ref;
+ ao_ref_init (&lhs_ref, lhs);
+ alias_set_type set = ao_ref_alias_set (&lhs_ref);
+ alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
if (! vnresult
- || vnresult->set == set
- || alias_set_subset_of (set, vnresult->set))
+ || ((vnresult->set == set
+ || alias_set_subset_of (set, vnresult->set))
+ && (vnresult->base_set == base_set
+ || alias_set_subset_of (base_set, vnresult->base_set))))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{