/* There's no BB_EXECUTABLE but we can use BB_VISITED. */
#define BB_EXECUTABLE BB_VISITED
-static tree *last_vuse_ptr;
-static vn_lookup_kind vn_walk_kind;
static vn_lookup_kind default_vn_walk_kind;
/* vn_nary_op hashtable helpers. */
return NULL_TREE;
}
+struct vn_walk_cb_data
+{
+ vn_reference_t vr;
+ tree *last_vuse_ptr;
+ vn_lookup_kind vn_walk_kind;
+};
+
/* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
with the current VUSE and performs the expression lookup. */
static void *
-vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
+vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
{
- vn_reference_t vr = (vn_reference_t)vr_;
+ vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
+ vn_reference_t vr = data->vr;
vn_reference_s **slot;
hashval_t hash;
- if (last_vuse_ptr)
- *last_vuse_ptr = vuse;
+ if (data->last_vuse_ptr)
+ *data->last_vuse_ptr = vuse;
/* Fixup vuse and hash. */
if (vr->vuse)
*DISAMBIGUATE_ONLY is set to true. */
static void *
-vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
+vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
bool *disambiguate_only)
{
- vn_reference_t vr = (vn_reference_t)vr_;
+ vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
+ vn_reference_t vr = data->vr;
gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
tree base = ao_ref_base (ref);
HOST_WIDE_INT offseti, maxsizei;
accesses as we then can use TBAA to disambiguate against the
clobbering statement when looking up a load (thus the
VN_WALKREWRITE guard). */
- if (vn_walk_kind == VN_WALKREWRITE
+ if (data->vn_walk_kind == VN_WALKREWRITE
&& is_gimple_reg_type (TREE_TYPE (lhs))
&& types_compatible_p (TREE_TYPE (lhs), vr->type)
/* The overlap restriction breaks down when either access
|| (get_alias_set (lhs) != 0
&& ao_ref_alias_set (ref) != 0)))
{
- tree *saved_last_vuse_ptr = last_vuse_ptr;
+ tree *saved_last_vuse_ptr = data->last_vuse_ptr;
/* Do not update last_vuse_ptr in vn_reference_lookup_2. */
- last_vuse_ptr = NULL;
+ data->last_vuse_ptr = NULL;
tree saved_vuse = vr->vuse;
hashval_t saved_hashcode = vr->hashcode;
- void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), vr);
+ void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
/* Need to restore vr->vuse and vr->hashcode. */
vr->vuse = saved_vuse;
vr->hashcode = saved_hashcode;
- last_vuse_ptr = saved_last_vuse_ptr;
+ data->last_vuse_ptr = saved_last_vuse_ptr;
if (res && res != (void *)-1)
{
vn_reference_t vnresult = (vn_reference_t) res;
/* 5) For aggregate copies translate the reference through them if
the copy kills ref. */
- else if (vn_walk_kind == VN_WALKREWRITE
+ else if (data->vn_walk_kind == VN_WALKREWRITE
&& gimple_assign_single_p (def_stmt)
&& (DECL_P (gimple_assign_rhs1 (def_stmt))
|| TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
*ref = r;
/* Do not update last seen VUSE after translating. */
- last_vuse_ptr = NULL;
+ data->last_vuse_ptr = NULL;
/* Keep looking for the adjusted *REF / VR pair. */
return NULL;
/* 6) For memcpy copies translate the reference through them if
the copy kills ref. */
- else if (vn_walk_kind == VN_WALKREWRITE
+ else if (data->vn_walk_kind == VN_WALKREWRITE
&& is_gimple_reg_type (vr->type)
/* ??? Handle BCOPY as well. */
&& (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
*ref = r;
/* Do not update last seen VUSE after translating. */
- last_vuse_ptr = NULL;
+ data->last_vuse_ptr = NULL;
/* Keep looking for the adjusted *REF / VR pair. */
return NULL;
{
ao_ref r;
unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
- vn_walk_kind = kind;
+ vn_walk_cb_data data = { &vr1, NULL, kind };
if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
*vnresult =
(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
vn_reference_lookup_2,
vn_reference_lookup_3,
- vuse_valueize, limit, &vr1);
+ vuse_valueize, limit, &data);
gcc_checking_assert (vr1.operands == shared_lookup_references);
}
not exist in the hash table or if the result field of the structure
was NULL.. VNRESULT will be filled in with the vn_reference_t
stored in the hashtable if one exists. When TBAA_P is false assume
- we are looking up a store and treat it as having alias-set zero. */
+ we are looking up a store and treat it as having alias-set zero.
+ *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded. */
tree
vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
- vn_reference_t *vnresult, bool tbaa_p)
+ vn_reference_t *vnresult, bool tbaa_p, tree *last_vuse_ptr)
{
vec<vn_reference_op_s> operands;
struct vn_reference_s vr1;
ao_ref_init (&r, op);
if (! tbaa_p)
r.ref_alias_set = r.base_alias_set = 0;
- vn_walk_kind = kind;
+ vn_walk_cb_data data = { &vr1, last_vuse_ptr, kind };
wvnresult =
(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
vn_reference_lookup_2,
vn_reference_lookup_3,
- vuse_valueize, limit, &vr1);
+ vuse_valueize, limit, &data);
gcc_checking_assert (vr1.operands == shared_lookup_references);
if (wvnresult)
{
tree result;
last_vuse = gimple_vuse (stmt);
- last_vuse_ptr = &last_vuse;
result = vn_reference_lookup (op, gimple_vuse (stmt),
- default_vn_walk_kind, NULL, true);
- last_vuse_ptr = NULL;
+ default_vn_walk_kind, NULL, true, &last_vuse);
/* We handle type-punning through unions by value-numbering based
on offset and size of the access. Be prepared to handle a