From a8aa828b6e94aecb188786f49e8b1b7a7cfb5391 Mon Sep 17 00:00:00 2001 From: Richard Biener Date: Wed, 22 May 2019 07:44:24 +0000 Subject: [PATCH] re PR tree-optimization/90450 (Hash function in gather_mem_refs_stmt does not match with mem_ref_hasher::equal) 2019-05-22 Richard Biener PR tree-optimization/90450 * tree-ssa-loop-im.c (struct im_mem_ref): Add ref_decomposed. (mem_ref_hasher::equal): Check it. (mem_ref_alloc): Initialize it. (gather_mem_refs_stmt): Set it. From-SVN: r271503 --- gcc/ChangeLog | 8 ++++++++ gcc/tree-ssa-loop-im.c | 11 +++++++++-- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 310f62882a3..ff5ffdd2f76 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,11 @@ +2019-05-22 Richard Biener + + PR tree-optimization/90450 + * tree-ssa-loop-im.c (struct im_mem_ref): Add ref_decomposed. + (mem_ref_hasher::equal): Check it. + (mem_ref_alloc): Initialize it. + (gather_mem_refs_stmt): Set it. + 2019-05-22 Richard Biener * gimple-fold.c (arith_code_with_undefined_signed_overflow): diff --git a/gcc/tree-ssa-loop-im.c b/gcc/tree-ssa-loop-im.c index 56d8e8e4330..2064c2900fb 100644 --- a/gcc/tree-ssa-loop-im.c +++ b/gcc/tree-ssa-loop-im.c @@ -115,9 +115,10 @@ struct mem_ref_loc struct im_mem_ref { - unsigned id : 31; /* ID assigned to the memory reference + unsigned id : 30; /* ID assigned to the memory reference (its index in memory_accesses.refs_list) */ unsigned ref_canonical : 1; /* Whether mem.ref was canonicalized. */ + unsigned ref_decomposed : 1; /* Whether the ref was hashed from mem. */ hashval_t hash; /* Its hash value. */ /* The memory access itself and associated caching of alias-oracle @@ -173,7 +174,8 @@ inline bool mem_ref_hasher::equal (const im_mem_ref *mem1, const ao_ref *obj2) { if (obj2->max_size_known_p ()) - return (operand_equal_p (mem1->mem.base, obj2->base, 0) + return (mem1->ref_decomposed + && operand_equal_p (mem1->mem.base, obj2->base, 0) && known_eq (mem1->mem.offset, obj2->offset) && known_eq (mem1->mem.size, obj2->size) && known_eq (mem1->mem.max_size, obj2->max_size) @@ -1389,6 +1391,7 @@ mem_ref_alloc (ao_ref *mem, unsigned hash, unsigned id) ao_ref_init (&ref->mem, error_mark_node); ref->id = id; ref->ref_canonical = false; + ref->ref_decomposed = false; ref->hash = hash; ref->stored = NULL; bitmap_initialize (&ref->indep_loop, &lim_bitmap_obstack); @@ -1476,6 +1479,7 @@ gather_mem_refs_stmt (struct loop *loop, gimple *stmt) HOST_WIDE_INT offset, size, max_size; poly_int64 saved_maxsize = aor.max_size, mem_off; tree mem_base; + bool ref_decomposed; if (aor.max_size_known_p () && aor.offset.is_constant (&offset) && aor.size.is_constant (&size) @@ -1489,12 +1493,14 @@ gather_mem_refs_stmt (struct loop *loop, gimple *stmt) aor.size) && (mem_base = get_addr_base_and_unit_offset (aor.ref, &mem_off))) { + ref_decomposed = true; hash = iterative_hash_expr (ao_ref_base (&aor), 0); hash = iterative_hash_host_wide_int (offset, hash); hash = iterative_hash_host_wide_int (size, hash); } else { + ref_decomposed = false; hash = iterative_hash_expr (aor.ref, 0); aor.max_size = -1; } @@ -1543,6 +1549,7 @@ gather_mem_refs_stmt (struct loop *loop, gimple *stmt) { id = memory_accesses.refs_list.length (); ref = mem_ref_alloc (&aor, hash, id); + ref->ref_decomposed = ref_decomposed; memory_accesses.refs_list.safe_push (ref); *slot = ref; -- 2.30.2