+2014-11-24 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/55334
+ * function.h (struct function): Add last_clique member.
+ * tree-inline.c (remap_dependence_clique): New function.
+ (remap_gimple_op_r): Remap dependence cliques in MEM_REFs.
+ (copy_tree_body_r): Likewise.
+ (copy_cfg_body): Free dependence map.
+ (copy_gimple_seq_and_replace_locals): Likewise.
+ * tree-pretty-print.c (dump_generic_node): Dump
+ dependence info.
+ * tree-ssa-alias.c (refs_may_alias_p_1): Use dependence info
+ to answer alias query.
+ * tree-ssa-structalias.c: Include tree-phinodes.h, ssa-iterators.h,
+ tree-pretty-print.h and gimple-walk.h.
+ (struct variable_info): Add is_restrict_var flag and ruid
+ member.
+ (new_var_info): Initialize is_restrict_var.
+ (make_constraint_from_restrict): Likewise.
+ (create_variable_info_for): Exclude restricts from global vars
+ from new handling.
+ (intra_create_variable_infos): But not those from parameters.
+ (visit_loadstore): New function.
+ (maybe_set_dependence_info): Likewise.
+ (compute_dependence_clique): Likewise.
+ (compute_may_aliases): Call compute_dependence_clique.
+ * tree-data-ref.c (dr_analyze_indices): Copy dependence info
+ to fake MEM_REF.
+ (dr_may_alias_p): Use recorded dependence info to answer
+ alias query.
+ * tree-core.h (struct tree_base): Add clique, base struct in
+ union.
+ * tree.h (MR_DEPENDENCE_CLIQUE): New macro.
+ (MR_DEPENDENCE_BASE): Likewise.
+ * tree-inline.h (dependence_hasher): New hash-map kind.
+ (struct copy_body_data): Add dependence_map pointer.
+ * gimple-fold.c (maybe_canonicalize_mem_ref_addr): Avoid
+ throwing away dependence info.
+ * tree-streamer-in.c (unpack_value_fields): Stream dependence info.
+ * tree-streamer-out.c (streamer_pack_tree_bitfields): Likewise.
+
2014-11-23 Oleg Endo <olegendo@gcc.gnu.org>
PR target/53976
a string describing the reason for failure. */
const char * GTY((skip)) cannot_be_copied_reason;
+ /* Last assigned dependence info clique. */
+ unsigned short last_clique;
+
/* Collected bit flags. */
/* Number of units of general registers that need saving in stdarg
accessed is a decl that has the same access semantics as the MEM_REF. */
if (TREE_CODE (*t) == MEM_REF
&& TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
- && integer_zerop (TREE_OPERAND (*t, 1)))
+ && integer_zerop (TREE_OPERAND (*t, 1))
+ && MR_DEPENDENCE_CLIQUE (*t) == 0)
{
tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
+2014-11-24 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/55334
+ * gcc.dg/tree-ssa/restrict-5.c: New testcase.
+
2014-11-24 Eric Botcazou <ebotcazou@adacore.com>
* gnat.dg/opt45.adb: New test.
--- /dev/null
+/* { dg-do compile } */
+/* { dg-options "-O -fno-strict-aliasing -fdump-tree-lim1-details" } */
+
+static inline __attribute__((always_inline))
+void f(int * __restrict__ r,
+ int a[__restrict__ 16][16],
+ int b[__restrict__ 16][16],
+ int i, int j)
+{
+ int x;
+ *r = 0;
+ for (x = 1; x < 16; ++x)
+ *r = *r + a[i][x] * b[x][j];
+}
+
+void g(int *r, int a[16][16], int b[16][16], int i, int j)
+{
+ f (r, a, b, i ,j);
+}
+
+/* We should apply store motion to the store to *r. */
+
+/* { dg-final { scan-tree-dump "Executing store motion of \\\*r" "lim1" } } */
+/* { dg-final { cleanup-tree-dump "lim1" } } */
/* Internal function code. */
enum internal_fn ifn;
+
+ /* The following two fields are used for MEM_REF and TARGET_MEM_REF
+ expression trees and specify known data non-dependences. For
+ two memory references in a function they are known to not
+ alias if dependence_info.clique are equal and dependence_info.base
+ are distinct. */
+ struct {
+ unsigned short clique;
+ unsigned short base;
+ } dependence_info;
} GTY((skip(""))) u;
};
guaranteed.
As a band-aid, mark the access so we can special-case
it in dr_may_alias_p. */
+ tree old = ref;
ref = fold_build2_loc (EXPR_LOCATION (ref),
MEM_REF, TREE_TYPE (ref),
base, memoff);
+ MR_DEPENDENCE_CLIQUE (ref) = MR_DEPENDENCE_CLIQUE (old);
+ MR_DEPENDENCE_BASE (ref) = MR_DEPENDENCE_BASE (old);
access_fns.safe_push (access_fn);
}
}
return false;
}
+ if ((TREE_CODE (addr_a) == MEM_REF || TREE_CODE (addr_a) == TARGET_MEM_REF)
+ && (TREE_CODE (addr_b) == MEM_REF || TREE_CODE (addr_b) == TARGET_MEM_REF)
+ && MR_DEPENDENCE_CLIQUE (addr_a) == MR_DEPENDENCE_CLIQUE (addr_b)
+ && MR_DEPENDENCE_BASE (addr_a) != MR_DEPENDENCE_BASE (addr_b))
+ return false;
+
/* If we had an evolution in a pointer-based MEM_REF BASE_OBJECT we
do not know the size of the base-object. So we cannot do any
offset/overlap based analysis but have to rely on points-to
return (TREE_CODE (decl) == PARM_DECL);
}
+/* Remap the dependence CLIQUE from the source to the destination function
+ as specified in ID. */
+
+static unsigned short
+remap_dependence_clique (copy_body_data *id, unsigned short clique)
+{
+ if (clique == 0)
+ return 0;
+ if (!id->dependence_map)
+ id->dependence_map
+ = new hash_map<unsigned short, unsigned short, dependence_hasher>;
+ bool existed;
+ unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
+ if (!existed)
+ newc = ++cfun->last_clique;
+ return newc;
+}
+
/* Remap the GIMPLE operand pointed to by *TP. DATA is really a
'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
+ if (MR_DEPENDENCE_CLIQUE (old) != 0)
+ {
+ MR_DEPENDENCE_CLIQUE (*tp)
+ = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
+ MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
+ }
/* We cannot propagate the TREE_THIS_NOTRAP flag if we have
remapped a parameter as the property might be valid only
for the parameter itself. */
TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
+ if (MR_DEPENDENCE_CLIQUE (old) != 0)
+ {
+ MR_DEPENDENCE_CLIQUE (*tp)
+ = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
+ MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
+ }
/* We cannot propagate the TREE_THIS_NOTRAP flag if we have
remapped a parameter as the property might be valid only
for the parameter itself. */
delete id->eh_map;
id->eh_map = NULL;
}
+ if (id->dependence_map)
+ {
+ delete id->dependence_map;
+ id->dependence_map = NULL;
+ }
return new_fndecl;
}
delete id.decl_map;
if (id.debug_map)
delete id.debug_map;
+ if (id.dependence_map)
+ {
+ delete id.dependence_map;
+ id.dependence_map = NULL;
+ }
return copy;
}
CB_CGE_MOVE_CLONES
};
+struct dependence_hasher : default_hashmap_traits
+{
+ template<typename T>
+ static void
+ mark_deleted (T &e)
+ { gcc_unreachable (); }
+
+ template<typename T>
+ static void
+ mark_empty (T &e)
+ { e.m_key = 0; }
+
+ template<typename T>
+ static bool
+ is_deleted (T &)
+ { return false; }
+
+ template<typename T> static bool is_empty (T &e) { return e.m_key == 0; }
+};
+
/* Data required for function body duplication. */
struct copy_body_data
/* Cilk keywords currently need to replace some variables that
ordinary nested functions do not. */
bool remap_var_for_cilk;
+
+ /* A map from the inlined functions dependence info cliques to
+ equivalents in the function into which it is being inlined. */
+ hash_map<unsigned short, unsigned short, dependence_hasher> *dependence_map;
};
/* Weights of constructions for estimate_num_insns. */
/* Same value types ignoring qualifiers. */
&& (TYPE_MAIN_VARIANT (TREE_TYPE (node))
== TYPE_MAIN_VARIANT
- (TREE_TYPE (TREE_TYPE (TREE_OPERAND (node, 1))))))
+ (TREE_TYPE (TREE_TYPE (TREE_OPERAND (node, 1)))))
+ && (!(flags & TDF_ALIAS)
+ || MR_DEPENDENCE_CLIQUE (node) == 0))
{
if (TREE_CODE (TREE_OPERAND (node, 0)) != ADDR_EXPR)
{
dump_generic_node (buffer, TREE_OPERAND (node, 1),
spc, flags, false);
}
+ if ((flags & TDF_ALIAS)
+ && MR_DEPENDENCE_CLIQUE (node) != 0)
+ {
+ pp_string (buffer, " clique ");
+ pp_unsigned_wide_integer (buffer, MR_DEPENDENCE_CLIQUE (node));
+ pp_string (buffer, " base ");
+ pp_unsigned_wide_integer (buffer, MR_DEPENDENCE_BASE (node));
+ }
pp_right_bracket (buffer);
}
break;
/* Same value types ignoring qualifiers. */
&& (TYPE_MAIN_VARIANT (TREE_TYPE (op0))
== TYPE_MAIN_VARIANT
- (TREE_TYPE (TREE_TYPE (TREE_OPERAND (op0, 1))))))))
+ (TREE_TYPE (TREE_TYPE (TREE_OPERAND (op0, 1)))))
+ && MR_DEPENDENCE_CLIQUE (op0) == 0)))
{
op0 = TREE_OPERAND (op0, 0);
str = "->";
return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
ref2->ref, base2, offset2, max_size2);
+ /* Handle restrict based accesses.
+ ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
+ here. */
+ tree rbase1 = base1;
+ tree rbase2 = base2;
+ if (var1_p)
+ {
+ rbase1 = ref1->ref;
+ if (rbase1)
+ while (handled_component_p (rbase1))
+ rbase1 = TREE_OPERAND (rbase1, 0);
+ }
+ if (var2_p)
+ {
+ rbase2 = ref2->ref;
+ if (rbase2)
+ while (handled_component_p (rbase2))
+ rbase2 = TREE_OPERAND (rbase2, 0);
+ }
+ if (rbase1 && rbase2
+ && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
+ && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
+ /* If the accesses are in the same restrict clique... */
+ && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
+ /* But based on different pointers they do not alias. */
+ && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
+ return false;
+
ind1_p = (TREE_CODE (base1) == MEM_REF
|| TREE_CODE (base1) == TARGET_MEM_REF);
ind2_p = (TREE_CODE (base2) == MEM_REF
unpack_ts_type_common_value_fields (bp, expr);
if (CODE_CONTAINS_STRUCT (code, TS_EXP))
- SET_EXPR_LOCATION (expr, stream_input_location (bp, data_in));
+ {
+ SET_EXPR_LOCATION (expr, stream_input_location (bp, data_in));
+ if (code == MEM_REF
+ || code == TARGET_MEM_REF)
+ {
+ MR_DEPENDENCE_CLIQUE (expr)
+ = (unsigned)bp_unpack_value (bp, sizeof (short) * 8);
+ if (MR_DEPENDENCE_CLIQUE (expr) != 0)
+ MR_DEPENDENCE_BASE (expr)
+ = (unsigned)bp_unpack_value (bp, sizeof (short) * 8);
+ }
+ }
if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
unpack_ts_block_value_fields (data_in, bp, expr);
pack_ts_type_common_value_fields (bp, expr);
if (CODE_CONTAINS_STRUCT (code, TS_EXP))
- stream_output_location (ob, bp, EXPR_LOCATION (expr));
+ {
+ stream_output_location (ob, bp, EXPR_LOCATION (expr));
+ if (code == MEM_REF
+ || code == TARGET_MEM_REF)
+ {
+ bp_pack_value (bp, MR_DEPENDENCE_CLIQUE (expr), sizeof (short) * 8);
+ if (MR_DEPENDENCE_CLIQUE (expr) != 0)
+ bp_pack_value (bp, MR_DEPENDENCE_BASE (expr), sizeof (short) * 8);
+ }
+ }
if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
pack_ts_block_value_fields (ob, bp, expr);
#define TMR_STEP(NODE) (TREE_OPERAND (TARGET_MEM_REF_CHECK (NODE), 3))
#define TMR_INDEX2(NODE) (TREE_OPERAND (TARGET_MEM_REF_CHECK (NODE), 4))
+#define MR_DEPENDENCE_CLIQUE(NODE) \
+ (TREE_CHECK2 (NODE, MEM_REF, TARGET_MEM_REF)->base.u.dependence_info.clique)
+#define MR_DEPENDENCE_BASE(NODE) \
+ (TREE_CHECK2 (NODE, MEM_REF, TARGET_MEM_REF)->base.u.dependence_info.base)
+
/* The operands of a BIND_EXPR. */
#define BIND_EXPR_VARS(NODE) (TREE_OPERAND (BIND_EXPR_CHECK (NODE), 0))
#define BIND_EXPR_BODY(NODE) (TREE_OPERAND (BIND_EXPR_CHECK (NODE), 1))