}
static bool
-refs_may_alias_p (tree ref1, ao_ref *ref2)
+refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
{
ao_ref r1;
ao_ref_init (&r1, ref1);
- return refs_may_alias_p_1 (&r1, ref2, true);
+ return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
}
bool
-refs_may_alias_p (tree ref1, tree ref2)
+refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
{
ao_ref r1, r2;
bool res;
ao_ref_init (&r1, ref1);
ao_ref_init (&r2, ref2);
- res = refs_may_alias_p_1 (&r1, &r2, true);
+ res = refs_may_alias_p_1 (&r1, &r2, tbaa_p);
if (res)
++alias_stats.refs_may_alias_p_may_alias;
else
otherwise return false. */
static bool
-ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref)
+ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
{
tree base, callee;
unsigned i;
{
ao_ref r;
ao_ref_init (&r, op);
- if (refs_may_alias_p_1 (&r, ref, true))
+ if (refs_may_alias_p_1 (&r, ref, tbaa_p))
return true;
}
}
}
static bool
-ref_maybe_used_by_call_p (gcall *call, ao_ref *ref)
+ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
{
bool res;
- res = ref_maybe_used_by_call_p_1 (call, ref);
+ res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
if (res)
++alias_stats.ref_maybe_used_by_call_p_may_alias;
else
true, otherwise return false. */
bool
-ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref)
+ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
{
if (is_gimple_assign (stmt))
{
|| gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
return false;
- return refs_may_alias_p (rhs, ref);
+ return refs_may_alias_p (rhs, ref, tbaa_p);
}
else if (is_gimple_call (stmt))
- return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref);
+ return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
{
tree retval = gimple_return_retval (return_stmt);
if (retval
&& TREE_CODE (retval) != SSA_NAME
&& !is_gimple_min_invariant (retval)
- && refs_may_alias_p (retval, ref))
+ && refs_may_alias_p (retval, ref, tbaa_p))
return true;
/* If ref escapes the function then the return acts as a use. */
tree base = ao_ref_base (ref);
}
bool
-ref_maybe_used_by_stmt_p (gimple *stmt, tree ref)
+ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
{
ao_ref r;
ao_ref_init (&r, ref);
- return ref_maybe_used_by_stmt_p (stmt, &r);
+ return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
}
/* If the call in statement CALL may clobber the memory reference REF
otherwise return false. */
bool
-stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref)
+stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
{
if (is_gimple_call (stmt))
{
{
ao_ref r;
ao_ref_init (&r, lhs);
- if (refs_may_alias_p_1 (ref, &r, true))
+ if (refs_may_alias_p_1 (ref, &r, tbaa_p))
return true;
}
{
ao_ref r;
ao_ref_init (&r, lhs);
- return refs_may_alias_p_1 (ref, &r, true);
+ return refs_may_alias_p_1 (ref, &r, tbaa_p);
}
}
else if (gimple_code (stmt) == GIMPLE_ASM)
}
bool
-stmt_may_clobber_ref_p (gimple *stmt, tree ref)
+stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
{
ao_ref r;
ao_ref_init (&r, ref);
- return stmt_may_clobber_ref_p_1 (stmt, &r);
+ return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
}
/* Return true if store1 and store2 described by corresponding tuples
extern bool ptrs_compare_unequal (tree, tree);
extern bool ref_may_alias_global_p (tree);
extern bool ref_may_alias_global_p (ao_ref *);
-extern bool refs_may_alias_p (tree, tree);
+extern bool refs_may_alias_p (tree, tree, bool = true);
extern bool refs_may_alias_p_1 (ao_ref *, ao_ref *, bool);
extern bool refs_anti_dependent_p (tree, tree);
extern bool refs_output_dependent_p (tree, tree);
-extern bool ref_maybe_used_by_stmt_p (gimple *, tree);
-extern bool ref_maybe_used_by_stmt_p (gimple *, ao_ref *);
+extern bool ref_maybe_used_by_stmt_p (gimple *, tree, bool = true);
+extern bool ref_maybe_used_by_stmt_p (gimple *, ao_ref *, bool = true);
extern bool stmt_may_clobber_global_p (gimple *);
-extern bool stmt_may_clobber_ref_p (gimple *, tree);
-extern bool stmt_may_clobber_ref_p_1 (gimple *, ao_ref *);
+extern bool stmt_may_clobber_ref_p (gimple *, tree, bool = true);
+extern bool stmt_may_clobber_ref_p_1 (gimple *, ao_ref *, bool = true);
extern bool call_may_clobber_ref_p (gcall *, tree);
extern bool call_may_clobber_ref_p_1 (gcall *, ao_ref *);
extern bool stmt_kills_ref_p (gimple *, tree);
if (access == last_access)
continue;
data_reference *dr_a = STMT_VINFO_DATA_REF (vinfo_for_stmt (access));
+ ao_ref ref;
+ bool ref_initialized_p = false;
for (gimple_stmt_iterator gsi = gsi_for_stmt (access);
gsi_stmt (gsi) != last_access; gsi_next (&gsi))
{
continue;
/* If we couldn't record a (single) data reference for this
- stmt we have to give up. */
- /* ??? Here and below if dependence analysis fails we can resort
- to the alias oracle which can handle more kinds of stmts. */
+ stmt we have to resort to the alias oracle. */
data_reference *dr_b = STMT_VINFO_DATA_REF (vinfo_for_stmt (stmt));
if (!dr_b)
- return false;
+ {
+ /* We are moving a store or sinking a load - this means
+ we cannot use TBAA for disambiguation. */
+ if (!ref_initialized_p)
+ ao_ref_init (&ref, DR_REF (dr_a));
+ if (stmt_may_clobber_ref_p_1 (stmt, &ref, false)
+ || ref_maybe_used_by_stmt_p (stmt, &ref, false))
+ return false;
+ continue;
+ }
bool dependent = false;
/* If we run into a store of this same instance (we've just
"failed ");
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
-
if (is_a <bb_vec_info> (vinfo))
- break;
-
+ {
+ /* In BB vectorization the ref can still participate
+ in dependence analysis, we just can't vectorize it. */
+ STMT_VINFO_VECTORIZABLE (stmt_info) = false;
+ continue;
+ }
return false;
}
}
}
}
- /* If we stopped analysis at the first dataref we could not analyze
- when trying to vectorize a basic-block mark the rest of the datarefs
- as not vectorizable and truncate the vector of datarefs. That
- avoids spending useless time in analyzing their dependence. */
- if (i != datarefs.length ())
- {
- gcc_assert (is_a <bb_vec_info> (vinfo));
- for (unsigned j = i; j < datarefs.length (); ++j)
- {
- data_reference_p dr = datarefs[j];
- STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (DR_STMT (dr))) = false;
- free_data_ref (dr);
- }
- datarefs.truncate (i);
- }
+ /* We used to stop processing and prune the list here. Verify we no
+ longer need to. */
+ gcc_assert (i == datarefs.length ());
return true;
}