{
vn_reference_t vr = (vn_reference_t)vr_;
gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
- tree fndecl;
tree base;
HOST_WIDE_INT offset, maxsize;
static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
from that defintion.
1) Memset. */
if (is_gimple_reg_type (vr->type)
- && is_gimple_call (def_stmt)
- && (fndecl = gimple_call_fndecl (def_stmt))
- && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
- && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
+ && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
&& integer_zerop (gimple_call_arg (def_stmt, 1))
&& host_integerp (gimple_call_arg (def_stmt, 2), 1)
&& TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
}
}
- /* For aggregate copies translate the reference through them if
+ /* 3) For aggregate copies translate the reference through them if
the copy kills ref. */
else if (vn_walk_kind == VN_WALKREWRITE
&& gimple_assign_single_p (def_stmt)
return NULL;
}
+ /* 4) For memcpy copies translate the reference through them if
+ the copy kills ref. */
+ else if (vn_walk_kind == VN_WALKREWRITE
+ && is_gimple_reg_type (vr->type)
+ /* ??? Handle BCOPY as well. */
+ && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
+ || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
+ || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
+ && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
+ || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
+ && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
+ || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
+ && host_integerp (gimple_call_arg (def_stmt, 2), 1))
+ {
+ tree lhs, rhs;
+ ao_ref r;
+ HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
+ vn_reference_op_s op;
+ HOST_WIDE_INT at;
+
+
+ /* Only handle non-variable, addressable refs. */
+ if (ref->size != maxsize
+ || offset % BITS_PER_UNIT != 0
+ || ref->size % BITS_PER_UNIT != 0)
+ return (void *)-1;
+
+ /* Extract a pointer base and an offset for the destination. */
+ lhs = gimple_call_arg (def_stmt, 0);
+ lhs_offset = 0;
+ if (TREE_CODE (lhs) == SSA_NAME)
+ lhs = SSA_VAL (lhs);
+ if (TREE_CODE (lhs) == ADDR_EXPR)
+ {
+ tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
+ &lhs_offset);
+ if (!tem)
+ return (void *)-1;
+ if (TREE_CODE (tem) == MEM_REF
+ && host_integerp (TREE_OPERAND (tem, 1), 1))
+ {
+ lhs = TREE_OPERAND (tem, 0);
+ lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
+ }
+ else if (DECL_P (tem))
+ lhs = build_fold_addr_expr (tem);
+ else
+ return (void *)-1;
+ }
+ if (TREE_CODE (lhs) != SSA_NAME
+ && TREE_CODE (lhs) != ADDR_EXPR)
+ return (void *)-1;
+
+ /* Extract a pointer base and an offset for the source. */
+ rhs = gimple_call_arg (def_stmt, 1);
+ rhs_offset = 0;
+ if (TREE_CODE (rhs) == SSA_NAME)
+ rhs = SSA_VAL (rhs);
+ if (TREE_CODE (rhs) == ADDR_EXPR)
+ {
+ tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
+ &rhs_offset);
+ if (!tem)
+ return (void *)-1;
+ if (TREE_CODE (tem) == MEM_REF
+ && host_integerp (TREE_OPERAND (tem, 1), 1))
+ {
+ rhs = TREE_OPERAND (tem, 0);
+ rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
+ }
+ else if (DECL_P (tem))
+ rhs = build_fold_addr_expr (tem);
+ else
+ return (void *)-1;
+ }
+ if (TREE_CODE (rhs) != SSA_NAME
+ && TREE_CODE (rhs) != ADDR_EXPR)
+ return (void *)-1;
+
+ copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
+
+ /* The bases of the destination and the references have to agree. */
+ if ((TREE_CODE (base) != MEM_REF
+ && !DECL_P (base))
+ || (TREE_CODE (base) == MEM_REF
+ && (TREE_OPERAND (base, 0) != lhs
+ || !host_integerp (TREE_OPERAND (base, 1), 1)))
+ || (DECL_P (base)
+ && (TREE_CODE (lhs) != ADDR_EXPR
+ || TREE_OPERAND (lhs, 0) != base)))
+ return (void *)-1;
+
+ /* And the access has to be contained within the memcpy destination. */
+ at = offset / BITS_PER_UNIT;
+ if (TREE_CODE (base) == MEM_REF)
+ at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
+ if (lhs_offset > at
+ || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
+ return (void *)-1;
+
+ /* Make room for 2 operands in the new reference. */
+ if (VEC_length (vn_reference_op_s, vr->operands) < 2)
+ {
+ VEC (vn_reference_op_s, heap) *old = vr->operands;
+ VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
+ if (old == shared_lookup_references
+ && vr->operands != old)
+ shared_lookup_references = NULL;
+ }
+ else
+ VEC_truncate (vn_reference_op_s, vr->operands, 2);
+
+ /* The looked-through reference is a simple MEM_REF. */
+ memset (&op, 0, sizeof (op));
+ op.type = vr->type;
+ op.opcode = MEM_REF;
+ op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
+ op.off = at - lhs_offset + rhs_offset;
+ VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
+ op.type = TYPE_MAIN_VARIANT (TREE_TYPE (rhs));
+ op.opcode = TREE_CODE (rhs);
+ op.op0 = rhs;
+ op.off = -1;
+ VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
+ vr->hashcode = vn_reference_compute_hash (vr);
+
+ /* Adjust *ref from the new operands. */
+ if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
+ return (void *)-1;
+ /* This can happen with bitfields. */
+ if (ref->size != r.size)
+ return (void *)-1;
+ *ref = r;
+
+ /* Do not update last seen VUSE after translating. */
+ last_vuse_ptr = NULL;
+
+ /* Keep looking for the adjusted *REF / VR pair. */
+ return NULL;
+ }
+
/* Bail out and stop walking. */
return (void *)-1;
}