+2020-05-08 Richard Biener <rguenther@suse.de>
+
+ * tree-ssa-sccvn.c (rpo_avail): Change type to
+ eliminate_dom_walker *.
+ (eliminate_with_rpo_vn): Adjust rpo_avail to make vn_valueize
+ use the DOM walker availability.
+ (vn_reference_fold_indirect): Use get_addr_base_and_unit_offset_1
+ with vn_valueize as valueization callback.
+ (vn_reference_maybe_forwprop_address): Likewise.
+ * tree-dfa.c (get_addr_base_and_unit_offset_1): Also valueize
+ array_ref_low_bound.
+
2020-05-08 Jakub Jelinek <jakub@redhat.com>
PR tree-optimization/94786
+2020-05-08 Richard Biener <rguenther@suse.de>
+
+ * gnat.dg/opt83.adb: New testcase.
+
2020-05-08 Jakub Jelinek <jakub@redhat.com>
PR tree-optimization/94786
--- /dev/null
+-- { dg-do compile }
+-- { dg-options "-O2" }
+
+-- rpo fre3 used to loop indefinitely replacing _2 with _8 and back,
+-- given MEM[(struct test__e &)_2][0]{lb: _7 sz: 16}._tag = A23s_29;
+-- and an earlier _8 = &*_2[0]{lb: _7 sz: 16}.
+
+procedure Opt83 is
+
+ type E is tagged record
+ I : Natural := 0;
+ end record;
+
+ type A is array (Natural range <>) of aliased E;
+
+ F : E;
+
+ R : access A;
+
+ procedure N is
+ begin
+ if R = null then
+ R := new A (0 .. 4);
+ end if;
+ end N;
+
+begin
+
+ N;
+
+ R (0) := F;
+
+end Opt83;
if (valueize
&& TREE_CODE (index) == SSA_NAME)
index = (*valueize) (index);
+ if (!poly_int_tree_p (index))
+ return NULL_TREE;
+ low_bound = array_ref_low_bound (exp);
+ if (valueize
+ && TREE_CODE (low_bound) == SSA_NAME)
+ low_bound = (*valueize) (low_bound);
+ if (!poly_int_tree_p (low_bound))
+ return NULL_TREE;
+ unit_size = array_ref_element_size (exp);
+ if (TREE_CODE (unit_size) != INTEGER_CST)
+ return NULL_TREE;
/* If the resulting bit-offset is constant, track it. */
- if (poly_int_tree_p (index)
- && (low_bound = array_ref_low_bound (exp),
- poly_int_tree_p (low_bound))
- && (unit_size = array_ref_element_size (exp),
- TREE_CODE (unit_size) == INTEGER_CST))
- {
- poly_offset_int woffset
- = wi::sext (wi::to_poly_offset (index)
- - wi::to_poly_offset (low_bound),
- TYPE_PRECISION (TREE_TYPE (index)));
- woffset *= wi::to_offset (unit_size);
- byte_offset += woffset.force_shwi ();
- }
- else
- return NULL_TREE;
+ poly_offset_int woffset
+ = wi::sext (wi::to_poly_offset (index)
+ - wi::to_poly_offset (low_bound),
+ TYPE_PRECISION (TREE_TYPE (index)));
+ woffset *= wi::to_offset (unit_size);
+ byte_offset += woffset.force_shwi ();
}
break;
/* The only thing we have to do is from &OBJ.foo.bar add the offset
from .foo.bar to the preceding MEM_REF offset and replace the
address with &OBJ. */
- addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
- &addr_offset);
+ addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (op->op0, 0),
+ &addr_offset, vn_valueize);
gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
if (addr_base != TREE_OPERAND (op->op0, 0))
{
poly_int64 addr_offset;
addr = gimple_assign_rhs1 (def_stmt);
- addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
- &addr_offset);
+ addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr, 0),
+ &addr_offset,
+ vn_valueize);
/* If that didn't work because the address isn't invariant propagate
the reference tree from the address operation in case the current
dereference isn't offsetted. */
};
/* Global RPO state for access from hooks. */
-static rpo_elim *rpo_avail;
+static eliminate_dom_walker *rpo_avail;
basic_block vn_context_bb;
/* Return true if BASE1 and BASE2 can be adjusted so they have the
{
eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
+ eliminate_dom_walker *saved_rpo_avail = rpo_avail;
+ rpo_avail = &walker;
walker.walk (cfun->cfg->x_entry_block_ptr);
+ rpo_avail = saved_rpo_avail;
+
return walker.eliminate_cleanup ();
}