ao_ref_init (&orig_ref, orig_ref_);
}
~vn_walk_cb_data ();
- void *push_partial_def (const pd_data& pd, tree, HOST_WIDE_INT);
+ void *push_partial_def (const pd_data& pd, tree,
+ alias_set_type, HOST_WIDE_INT);
vn_reference_t vr;
ao_ref orig_ref;
/* The first defs range to avoid splay tree setup in most cases. */
pd_range first_range;
tree first_vuse;
+ alias_set_type first_set;
splay_tree known_ranges;
obstack ranges_obstack;
};
}
/* Push PD to the vector of partial definitions returning a
- value when we are ready to combine things with VUSE and MAXSIZEI,
+ value when we are ready to combine things with VUSE, SET and MAXSIZEI,
NULL when we want to continue looking for partial defs or -1
on failure. */
void *
vn_walk_cb_data::push_partial_def (const pd_data &pd, tree vuse,
- HOST_WIDE_INT maxsizei)
+ alias_set_type set, HOST_WIDE_INT maxsizei)
{
const HOST_WIDE_INT bufsize = 64;
/* We're using a fixed buffer for encoding so fail early if the object
first_range.offset = pd.offset;
first_range.size = pd.size;
first_vuse = vuse;
+ first_set = set;
last_vuse_ptr = NULL;
/* Continue looking for partial defs. */
return NULL;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file,
"Successfully combined %u partial definitions\n", ndefs);
- /* ??? If we track partial defs alias-set we could use that if it
- is the same for all. Use zero for now. */
+ /* We are using the alias-set of the first store we encounter which
+ should be appropriate here. */
return vn_reference_lookup_or_insert_for_pieces
- (first_vuse, 0, vr->type, vr->operands, val);
+ (first_vuse, first_set, vr->type, vr->operands, val);
}
else
{
pd.rhs = build_constructor (NULL_TREE, NULL);
pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
pd.size = leni;
- return data->push_partial_def (pd, vuse, maxsizei);
+ return data->push_partial_def (pd, vuse, 0, maxsizei);
}
}
pd.rhs = gimple_assign_rhs1 (def_stmt);
pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
pd.size = size2i / BITS_PER_UNIT;
- return data->push_partial_def (pd, vuse, maxsizei);
+ return data->push_partial_def (pd, vuse, get_alias_set (lhs),
+ maxsizei);
}
}
}
pd.rhs = rhs;
pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
pd.size = size2i / BITS_PER_UNIT;
- return data->push_partial_def (pd, vuse, maxsizei);
+ return data->push_partial_def (pd, vuse, get_alias_set (lhs),
+ maxsizei);
}
}
}
pd.rhs = SSA_VAL (def_rhs);
pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
pd.size = size2i / BITS_PER_UNIT;
- return data->push_partial_def (pd, vuse, maxsizei);
+ return data->push_partial_def (pd, vuse, get_alias_set (lhs),
+ maxsizei);
}
}
}
|| TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
|| handled_component_p (gimple_assign_rhs1 (def_stmt))))
{
+ tree lhs = gimple_assign_lhs (def_stmt);
tree base2;
int i, j, k;
auto_vec<vn_reference_op_s> rhs;
{
if (data->partial_defs.is_empty ())
return vn_reference_lookup_or_insert_for_pieces
- (vuse, get_alias_set (rhs1), vr->type, vr->operands, val);
+ (vuse, get_alias_set (lhs), vr->type, vr->operands, val);
/* This is the only interesting case for partial-def handling
coming from targets that like to gimplify init-ctors as
aggregate copies from constant data like aarch64 for
pd.rhs = val;
pd.offset = 0;
pd.size = maxsizei / BITS_PER_UNIT;
- return data->push_partial_def (pd, vuse, maxsizei);
+ return data->push_partial_def (pd, vuse, get_alias_set (lhs),
+ maxsizei);
}
}
return NULL_TREE;
}
+ if (last_vuse_ptr)
+ *last_vuse_ptr = vr1.vuse;
return vn_reference_lookup_1 (&vr1, vnresult);
}