+2019-12-03 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/92751
+ * tree-ssa-sccvn.c (vn_walk_cb_data::push_partial_def): Fail
+ when a clobber ends up in the partial-def vector.
+ (vn_reference_lookup_3): Let clobbers be handled by the
+ assignment from CTOR handling.
+
2019-12-03 Jakub Jelinek <jakub@redhat.com>
PR tree-optimization/92734
--- /dev/null
+// { dg-do compile }
+// { dg-options "-O -fdump-tree-fre1" }
+
+inline void* operator new(__SIZE_TYPE__, void* p) { return p; }
+template<int N>
+struct Vec {
+ Vec(int v) : lo(v), hi(v) {};
+ Vec<N/2> lo, hi;
+};
+template<>
+struct Vec<1> {
+ Vec(int v) : val(v) {}
+ int val;
+};
+
+typedef int v4si __attribute__((vector_size(16)));
+void foo (v4si *dst)
+{
+ Vec<4> v(1);
+ v4si tem;
+ __builtin_memcpy (&tem, &v, sizeof (tem));
+ *dst = tem;
+}
+
+// FRE should be able to value-number 'tem' to a constant. */
+// { dg-final { scan-tree-dump "\\*dst_\[0-9\]*\\\(D\\\) = { 1, 1, 1, 1 };" "fre1" } }
if (partial_defs.is_empty ())
{
+ /* If we get a clobber upfront, fail. */
+ if (TREE_CLOBBER_P (pd.rhs))
+ return (void *)-1;
partial_defs.safe_push (pd);
first_range.offset = pd.offset;
first_range.size = pd.size;
&& ranges_known_overlap_p (r->offset, r->size + 1,
newr.offset, newr.size))
{
- /* Ignore partial defs already covered. */
+ /* Ignore partial defs already covered. Here we also drop shadowed
+ clobbers arriving here at the floor. */
if (known_subrange_p (newr.offset, newr.size, r->offset, r->size))
return NULL;
r->size = MAX (r->offset + r->size, newr.offset + newr.size) - r->offset;
rafter->offset + rafter->size) - r->offset;
splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset);
}
+ /* If we get a clobber, fail. */
+ if (TREE_CLOBBER_P (pd.rhs))
+ return (void *)-1;
partial_defs.safe_push (pd);
/* Now we have merged newr into the range tree. When we have covered
poly_int64 offset = ref->offset;
poly_int64 maxsize = ref->max_size;
- /* We can't deduce anything useful from clobbers. */
- if (gimple_clobber_p (def_stmt))
- return (void *)-1;
-
/* def_stmt may-defs *ref. See if we can derive a value for *ref
from that definition.
1) Memset. */
if (data->partial_defs.is_empty ()
&& known_subrange_p (offset, maxsize, offset2, size2))
{
+ /* While technically undefined behavior do not optimize
+ a full read from a clobber. */
+ if (gimple_clobber_p (def_stmt))
+ return (void *)-1;
tree val = build_zero_cst (vr->type);
return vn_reference_lookup_or_insert_for_pieces
(vuse, get_alias_set (lhs), vr->type, vr->operands, val);
&& size2.is_constant (&size2i)
&& size2i % BITS_PER_UNIT == 0)
{
+ /* Let clobbers be consumed by the partial-def tracker
+ which can choose to ignore them if they are shadowed
+ by a later def. */
pd_data pd;
pd.rhs = gimple_assign_rhs1 (def_stmt);
pd.offset = (offset2i - offseti) / BITS_PER_UNIT;