+2017-12-20 Richard Sandiford <richard.sandiford@linaro.org>
+ Alan Hayward <alan.hayward@arm.com>
+ David Sherwood <david.sherwood@arm.com>
+
+ * tree-dfa.h (get_ref_base_and_extent): Return the base, size and
+ max_size as poly_int64_pods rather than HOST_WIDE_INTs.
+ (get_ref_base_and_extent_hwi): Declare.
+ * tree-dfa.c (get_ref_base_and_extent): Return the base, size and
+ max_size as poly_int64_pods rather than HOST_WIDE_INTs.
+ (get_ref_base_and_extent_hwi): New function.
+ * cfgexpand.c (expand_debug_expr): Update call to
+ get_ref_base_and_extent.
+ * dwarf2out.c (add_var_loc_to_decl): Likewise.
+ * gimple-fold.c (get_base_constructor): Return the offset as a
+ poly_int64_pod rather than a HOST_WIDE_INT.
+ (fold_const_aggregate_ref_1): Track polynomial sizes and offsets.
+ * ipa-polymorphic-call.c
+ (ipa_polymorphic_call_context::set_by_invariant)
+ (extr_type_from_vtbl_ptr_store): Track polynomial offsets.
+ (ipa_polymorphic_call_context::ipa_polymorphic_call_context)
+ (check_stmt_for_type_change): Use get_ref_base_and_extent_hwi
+ rather than get_ref_base_and_extent.
+ (ipa_polymorphic_call_context::get_dynamic_type): Likewise.
+ * ipa-prop.c (ipa_load_from_parm_agg, compute_complex_assign_jump_func)
+ (get_ancestor_addr_info, determine_locally_known_aggregate_parts):
+ Likewise.
+ * ipa-param-manipulation.c (ipa_get_adjustment_candidate): Update
+ call to get_ref_base_and_extent.
+ * tree-sra.c (create_access, get_access_for_expr): Likewise.
+ * tree-ssa-alias.c (ao_ref_base, aliasing_component_refs_p)
+ (stmt_kills_ref_p): Likewise.
+ * tree-ssa-dce.c (mark_aliased_reaching_defs_necessary_1): Likewise.
+ * tree-ssa-scopedtables.c (avail_expr_hash, equal_mem_array_ref_p):
+ Likewise.
+ * tree-ssa-sccvn.c (vn_reference_lookup_3): Likewise.
+ Use get_ref_base_and_extent_hwi rather than get_ref_base_and_extent
+ when calling native_encode_expr.
+ * tree-ssa-structalias.c (get_constraint_for_component_ref): Update
+ call to get_ref_base_and_extent.
+ (do_structure_copy): Use get_ref_base_and_extent_hwi rather than
+ get_ref_base_and_extent.
+ * var-tracking.c (track_expr_p): Likewise.
+
2017-12-20 Richard Sandiford <richard.sandiford@linaro.org>
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>
if (handled_component_p (TREE_OPERAND (exp, 0)))
{
- HOST_WIDE_INT bitoffset, bitsize, maxsize;
+ poly_int64 bitoffset, bitsize, maxsize, byteoffset;
bool reverse;
tree decl
= get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
|| TREE_CODE (decl) == RESULT_DECL)
&& (!TREE_ADDRESSABLE (decl)
|| target_for_debug_bind (decl))
- && (bitoffset % BITS_PER_UNIT) == 0
- && bitsize > 0
- && bitsize == maxsize)
+ && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
+ && known_gt (bitsize, 0)
+ && known_eq (bitsize, maxsize))
{
rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
- return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
+ return plus_constant (mode, base, byteoffset);
}
}
|| (TREE_CODE (realdecl) == MEM_REF
&& TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
{
- HOST_WIDE_INT maxsize;
bool reverse;
- tree innerdecl
- = get_ref_base_and_extent (realdecl, &bitpos, &bitsize, &maxsize,
- &reverse);
- if (!DECL_P (innerdecl)
+ tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
+ &bitsize, &reverse);
+ if (!innerdecl
+ || !DECL_P (innerdecl)
|| DECL_IGNORED_P (innerdecl)
|| TREE_STATIC (innerdecl)
- || bitsize <= 0
- || bitpos + bitsize > 256
- || bitsize != maxsize)
+ || bitsize == 0
+ || bitpos + bitsize > 256)
return NULL;
decl = innerdecl;
}
is not explicitly available, but it is known to be zero
such as 'static const int a;'. */
static tree
-get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
+get_base_constructor (tree base, poly_int64_pod *bit_offset,
tree (*valueize)(tree))
{
- HOST_WIDE_INT bit_offset2, size, max_size;
+ poly_int64 bit_offset2, size, max_size;
bool reverse;
if (TREE_CODE (base) == MEM_REF)
case COMPONENT_REF:
base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
&reverse);
- if (max_size == -1 || size != max_size)
+ if (!known_size_p (max_size) || maybe_ne (size, max_size))
return NULL_TREE;
*bit_offset += bit_offset2;
return get_base_constructor (base, bit_offset, valueize);
fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
{
tree ctor, idx, base;
- HOST_WIDE_INT offset, size, max_size;
+ poly_int64 offset, size, max_size;
tree tem;
bool reverse;
if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
&& valueize
&& (idx = (*valueize) (TREE_OPERAND (t, 1)))
- && TREE_CODE (idx) == INTEGER_CST)
+ && poly_int_tree_p (idx))
{
tree low_bound, unit_size;
/* If the resulting bit-offset is constant, track it. */
if ((low_bound = array_ref_low_bound (t),
- TREE_CODE (low_bound) == INTEGER_CST)
+ poly_int_tree_p (low_bound))
&& (unit_size = array_ref_element_size (t),
tree_fits_uhwi_p (unit_size)))
{
- offset_int woffset
- = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
+ poly_offset_int woffset
+ = wi::sext (wi::to_poly_offset (idx)
+ - wi::to_poly_offset (low_bound),
TYPE_PRECISION (TREE_TYPE (idx)));
- if (wi::fits_shwi_p (woffset))
+ if (woffset.to_shwi (&offset))
{
- offset = woffset.to_shwi ();
/* TODO: This code seems wrong, multiply then check
to see if it fits. */
offset *= tree_to_uhwi (unit_size);
return build_zero_cst (TREE_TYPE (t));
/* Out of bound array access. Value is undefined,
but don't fold. */
- if (offset < 0)
+ if (maybe_lt (offset, 0))
return NULL_TREE;
/* We can not determine ctor. */
if (!ctor)
if (ctor == error_mark_node)
return build_zero_cst (TREE_TYPE (t));
/* We do not know precise address. */
- if (max_size == -1 || max_size != size)
+ if (!known_size_p (max_size) || maybe_ne (max_size, size))
return NULL_TREE;
/* We can not determine ctor. */
if (!ctor)
return NULL_TREE;
/* Out of bound array access. Value is undefined, but don't fold. */
- if (offset < 0)
+ if (maybe_lt (offset, 0))
return NULL_TREE;
return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
*convert = true;
}
- HOST_WIDE_INT offset, size, max_size;
+ poly_int64 offset, size, max_size;
bool reverse;
tree base
= get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
- if (!base || size == -1 || max_size == -1)
+ if (!base || !known_size_p (size) || !known_size_p (max_size))
return NULL;
if (TREE_CODE (base) == MEM_REF)
tree otr_type,
HOST_WIDE_INT off)
{
- HOST_WIDE_INT offset2, size, max_size;
+ poly_int64 offset2, size, max_size;
bool reverse;
tree base;
cst = TREE_OPERAND (cst, 0);
base = get_ref_base_and_extent (cst, &offset2, &size, &max_size, &reverse);
- if (!DECL_P (base) || max_size == -1 || max_size != size)
+ if (!DECL_P (base) || !known_size_p (max_size) || maybe_ne (max_size, size))
return false;
/* Only type inconsistent programs can have otr_type that is
base_pointer = walk_ssa_copies (base_pointer, &visited);
if (TREE_CODE (base_pointer) == ADDR_EXPR)
{
- HOST_WIDE_INT size, max_size;
- HOST_WIDE_INT offset2;
+ HOST_WIDE_INT offset2, size;
bool reverse;
tree base
- = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
- &offset2, &size, &max_size, &reverse);
+ = get_ref_base_and_extent_hwi (TREE_OPERAND (base_pointer, 0),
+ &offset2, &size, &reverse);
+ if (!base)
+ break;
- if (max_size != -1 && max_size == size)
- combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base)),
- offset + offset2,
- true,
- NULL /* Do not change outer type. */);
+ combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base)),
+ offset + offset2,
+ true,
+ NULL /* Do not change outer type. */);
/* If this is a varying address, punt. */
- if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
- && max_size != -1
- && max_size == size)
+ if (TREE_CODE (base) == MEM_REF || DECL_P (base))
{
/* We found dereference of a pointer. Type of the pointer
and MEM_REF is meaningless, but we can look futher. */
extr_type_from_vtbl_ptr_store (gimple *stmt, struct type_change_info *tci,
HOST_WIDE_INT *type_offset)
{
- HOST_WIDE_INT offset, size, max_size;
+ poly_int64 offset, size, max_size;
tree lhs, rhs, base;
bool reverse;
}
return tci->offset > POINTER_SIZE ? error_mark_node : NULL_TREE;
}
- if (offset != tci->offset
- || size != POINTER_SIZE
- || max_size != POINTER_SIZE)
+ if (maybe_ne (offset, tci->offset)
+ || maybe_ne (size, POINTER_SIZE)
+ || maybe_ne (max_size, POINTER_SIZE))
{
if (dump_file)
- fprintf (dump_file, " wrong offset %i!=%i or size %i\n",
- (int)offset, (int)tci->offset, (int)size);
- return offset + POINTER_SIZE <= tci->offset
- || (max_size != -1
- && tci->offset + POINTER_SIZE > offset + max_size)
- ? error_mark_node : NULL;
+ {
+ fprintf (dump_file, " wrong offset ");
+ print_dec (offset, dump_file);
+ fprintf (dump_file, "!=%i or size ", (int) tci->offset);
+ print_dec (size, dump_file);
+ fprintf (dump_file, "\n");
+ }
+ return (known_le (offset + POINTER_SIZE, tci->offset)
+ || (known_size_p (max_size)
+ && known_gt (tci->offset + POINTER_SIZE,
+ offset + max_size))
+ ? error_mark_node : NULL);
}
}
{
tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
- HOST_WIDE_INT offset = 0, size, max_size;
+ HOST_WIDE_INT offset = 0;
bool reverse;
if (dump_file)
/* See if THIS parameter seems like instance pointer. */
if (TREE_CODE (op) == ADDR_EXPR)
{
- op = get_ref_base_and_extent (TREE_OPERAND (op, 0), &offset,
- &size, &max_size, &reverse);
- if (size != max_size || max_size == -1)
+ HOST_WIDE_INT size;
+ op = get_ref_base_and_extent_hwi (TREE_OPERAND (op, 0),
+ &offset, &size, &reverse);
+ if (!op)
{
tci->speculative++;
return csftc_abort_walking_p (tci->speculative);
}
- if (op && TREE_CODE (op) == MEM_REF)
+ if (TREE_CODE (op) == MEM_REF)
{
if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
{
if (gimple_code (call) == GIMPLE_CALL)
{
tree ref = gimple_call_fn (call);
- HOST_WIDE_INT offset2, size, max_size;
bool reverse;
if (TREE_CODE (ref) == OBJ_TYPE_REF)
&& !SSA_NAME_IS_DEFAULT_DEF (ref)
&& gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
{
+ HOST_WIDE_INT offset2, size;
tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
tree base_ref
- = get_ref_base_and_extent (ref_exp, &offset2, &size,
- &max_size, &reverse);
+ = get_ref_base_and_extent_hwi (ref_exp, &offset2,
+ &size, &reverse);
/* Finally verify that what we found looks like read from
OTR_OBJECT or from INSTANCE with offset OFFSET. */
bool *by_ref_p, bool *guaranteed_unmodified)
{
int index;
- HOST_WIDE_INT size, max_size;
+ HOST_WIDE_INT size;
bool reverse;
- tree base
- = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
+ tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
- if (max_size == -1 || max_size != size || *offset_p < 0)
+ if (!base)
return false;
if (DECL_P (base))
gcall *call, gimple *stmt, tree name,
tree param_type)
{
- HOST_WIDE_INT offset, size, max_size;
+ HOST_WIDE_INT offset, size;
tree op1, tc_ssa, base, ssa;
bool reverse;
int index;
op1 = TREE_OPERAND (op1, 0);
if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
return;
- base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
- if (TREE_CODE (base) != MEM_REF
- /* If this is a varying address, punt. */
- || max_size == -1
- || max_size != size)
+ base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
+ if (!base || TREE_CODE (base) != MEM_REF)
return;
offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
ssa = TREE_OPERAND (base, 0);
static tree
get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
{
- HOST_WIDE_INT size, max_size;
+ HOST_WIDE_INT size;
tree expr, parm, obj;
bool reverse;
return NULL_TREE;
expr = TREE_OPERAND (expr, 0);
obj = expr;
- expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
+ expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
- if (TREE_CODE (expr) != MEM_REF
- /* If this is a varying address, punt. */
- || max_size == -1
- || max_size != size
- || *offset < 0)
+ if (!expr || TREE_CODE (expr) != MEM_REF)
return NULL_TREE;
parm = TREE_OPERAND (expr, 0);
if (TREE_CODE (parm) != SSA_NAME
}
else if (TREE_CODE (arg) == ADDR_EXPR)
{
- HOST_WIDE_INT arg_max_size;
bool reverse;
arg = TREE_OPERAND (arg, 0);
- arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
- &arg_max_size, &reverse);
- if (arg_max_size == -1
- || arg_max_size != arg_size
- || arg_offset < 0)
+ arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
+ &arg_size, &reverse);
+ if (!arg_base)
return;
if (DECL_P (arg_base))
{
}
else
{
- HOST_WIDE_INT arg_max_size;
bool reverse;
gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
by_ref = false;
check_ref = false;
- arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
- &arg_max_size, &reverse);
- if (arg_max_size == -1
- || arg_max_size != arg_size
- || arg_offset < 0)
+ arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
+ &arg_size, &reverse);
+ if (!arg_base)
return;
ao_ref_init (&r, arg);
{
struct ipa_known_agg_contents_list *n, **p;
gimple *stmt = gsi_stmt (gsi);
- HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
+ HOST_WIDE_INT lhs_offset, lhs_size;
tree lhs, rhs, lhs_base;
bool reverse;
|| contains_bitfld_component_ref_p (lhs))
break;
- lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
- &lhs_max_size, &reverse);
- if (lhs_max_size == -1
- || lhs_max_size != lhs_size)
+ lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
+ &lhs_size, &reverse);
+ if (!lhs_base)
break;
if (check_ref)
true, the storage order of the reference is reversed. */
tree
-get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
- HOST_WIDE_INT *psize,
- HOST_WIDE_INT *pmax_size,
+get_ref_base_and_extent (tree exp, poly_int64_pod *poffset,
+ poly_int64_pod *psize,
+ poly_int64_pod *pmax_size,
bool *preverse)
{
- offset_int bitsize = -1;
- offset_int maxsize;
+ poly_offset_int bitsize = -1;
+ poly_offset_int maxsize;
tree size_tree = NULL_TREE;
- offset_int bit_offset = 0;
+ poly_offset_int bit_offset = 0;
bool seen_variable_array_ref = false;
/* First get the final access size and the storage order from just the
if (mode == BLKmode)
size_tree = TYPE_SIZE (TREE_TYPE (exp));
else
- bitsize = int (GET_MODE_BITSIZE (mode));
+ bitsize = GET_MODE_BITSIZE (mode);
}
if (size_tree != NULL_TREE
- && TREE_CODE (size_tree) == INTEGER_CST)
- bitsize = wi::to_offset (size_tree);
+ && poly_int_tree_p (size_tree))
+ bitsize = wi::to_poly_offset (size_tree);
*preverse = reverse_storage_order_for_component_p (exp);
switch (TREE_CODE (exp))
{
case BIT_FIELD_REF:
- bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
+ bit_offset += wi::to_poly_offset (TREE_OPERAND (exp, 2));
break;
case COMPONENT_REF:
tree field = TREE_OPERAND (exp, 1);
tree this_offset = component_ref_field_offset (exp);
- if (this_offset && TREE_CODE (this_offset) == INTEGER_CST)
+ if (this_offset && poly_int_tree_p (this_offset))
{
- offset_int woffset = (wi::to_offset (this_offset)
- << LOG2_BITS_PER_UNIT);
+ poly_offset_int woffset = (wi::to_poly_offset (this_offset)
+ << LOG2_BITS_PER_UNIT);
woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
bit_offset += woffset;
referenced the last field of a struct or a union member
then we have to adjust maxsize by the padding at the end
of our field. */
- if (seen_variable_array_ref && maxsize != -1)
+ if (seen_variable_array_ref && known_size_p (maxsize))
{
tree stype = TREE_TYPE (TREE_OPERAND (exp, 0));
tree next = DECL_CHAIN (field);
tree fsize = DECL_SIZE_UNIT (field);
tree ssize = TYPE_SIZE_UNIT (stype);
if (fsize == NULL
- || TREE_CODE (fsize) != INTEGER_CST
+ || !poly_int_tree_p (fsize)
|| ssize == NULL
- || TREE_CODE (ssize) != INTEGER_CST)
+ || !poly_int_tree_p (ssize))
maxsize = -1;
else
{
- offset_int tem = (wi::to_offset (ssize)
- - wi::to_offset (fsize));
+ poly_offset_int tem
+ = (wi::to_poly_offset (ssize)
+ - wi::to_poly_offset (fsize));
tem <<= LOG2_BITS_PER_UNIT;
tem -= woffset;
maxsize += tem;
/* We need to adjust maxsize to the whole structure bitsize.
But we can subtract any constant offset seen so far,
because that would get us out of the structure otherwise. */
- if (maxsize != -1
+ if (known_size_p (maxsize)
&& csize
- && TREE_CODE (csize) == INTEGER_CST)
- maxsize = wi::to_offset (csize) - bit_offset;
+ && poly_int_tree_p (csize))
+ maxsize = wi::to_poly_offset (csize) - bit_offset;
else
maxsize = -1;
}
tree low_bound, unit_size;
/* If the resulting bit-offset is constant, track it. */
- if (TREE_CODE (index) == INTEGER_CST
+ if (poly_int_tree_p (index)
&& (low_bound = array_ref_low_bound (exp),
- TREE_CODE (low_bound) == INTEGER_CST)
+ poly_int_tree_p (low_bound))
&& (unit_size = array_ref_element_size (exp),
TREE_CODE (unit_size) == INTEGER_CST))
{
- offset_int woffset
- = wi::sext (wi::to_offset (index) - wi::to_offset (low_bound),
+ poly_offset_int woffset
+ = wi::sext (wi::to_poly_offset (index)
+ - wi::to_poly_offset (low_bound),
TYPE_PRECISION (TREE_TYPE (index)));
woffset *= wi::to_offset (unit_size);
woffset <<= LOG2_BITS_PER_UNIT;
/* We need to adjust maxsize to the whole array bitsize.
But we can subtract any constant offset seen so far,
because that would get us outside of the array otherwise. */
- if (maxsize != -1
+ if (known_size_p (maxsize)
&& asize
- && TREE_CODE (asize) == INTEGER_CST)
- maxsize = wi::to_offset (asize) - bit_offset;
+ && poly_int_tree_p (asize))
+ maxsize = wi::to_poly_offset (asize) - bit_offset;
else
maxsize = -1;
base type boundary. This needs to include possible trailing
padding that is there for alignment purposes. */
if (seen_variable_array_ref
- && maxsize != -1
+ && known_size_p (maxsize)
&& (TYPE_SIZE (TREE_TYPE (exp)) == NULL_TREE
- || TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
- || (bit_offset + maxsize
- == wi::to_offset (TYPE_SIZE (TREE_TYPE (exp))))))
+ || !poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
+ || (maybe_eq
+ (bit_offset + maxsize,
+ wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp)))))))
maxsize = -1;
/* Hand back the decl for MEM[&decl, off]. */
exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
else
{
- offset_int off = mem_ref_offset (exp);
+ poly_offset_int off = mem_ref_offset (exp);
off <<= LOG2_BITS_PER_UNIT;
off += bit_offset;
- if (wi::fits_shwi_p (off))
+ poly_int64 off_hwi;
+ if (off.to_shwi (&off_hwi))
{
- bit_offset = off;
+ bit_offset = off_hwi;
exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
}
}
}
done:
- if (!wi::fits_shwi_p (bitsize) || wi::neg_p (bitsize))
+ if (!bitsize.to_shwi (psize) || maybe_lt (*psize, 0))
{
*poffset = 0;
*psize = -1;
return exp;
}
- *psize = bitsize.to_shwi ();
-
- if (!wi::fits_shwi_p (bit_offset))
+ /* ??? Due to negative offsets in ARRAY_REF we can end up with
+ negative bit_offset here. We might want to store a zero offset
+ in this case. */
+ if (!bit_offset.to_shwi (poffset))
{
*poffset = 0;
*pmax_size = -1;
if (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
|| (seen_variable_array_ref
&& (sz_tree == NULL_TREE
- || TREE_CODE (sz_tree) != INTEGER_CST
- || (bit_offset + maxsize == wi::to_offset (sz_tree)))))
+ || !poly_int_tree_p (sz_tree)
+ || maybe_eq (bit_offset + maxsize,
+ wi::to_poly_offset (sz_tree)))))
maxsize = -1;
}
/* If maxsize is unknown adjust it according to the size of the
base decl. */
- else if (maxsize == -1
- && DECL_SIZE (exp)
- && TREE_CODE (DECL_SIZE (exp)) == INTEGER_CST)
- maxsize = wi::to_offset (DECL_SIZE (exp)) - bit_offset;
+ else if (!known_size_p (maxsize)
+ && DECL_SIZE (exp)
+ && poly_int_tree_p (DECL_SIZE (exp)))
+ maxsize = wi::to_poly_offset (DECL_SIZE (exp)) - bit_offset;
}
else if (CONSTANT_CLASS_P (exp))
{
/* If maxsize is unknown adjust it according to the size of the
base type constant. */
- if (maxsize == -1
+ if (!known_size_p (maxsize)
&& TYPE_SIZE (TREE_TYPE (exp))
- && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
- maxsize = (wi::to_offset (TYPE_SIZE (TREE_TYPE (exp)))
+ && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp))))
+ maxsize = (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp)))
- bit_offset);
}
- /* ??? Due to negative offsets in ARRAY_REF we can end up with
- negative bit_offset here. We might want to store a zero offset
- in this case. */
- *poffset = bit_offset.to_shwi ();
- if (!wi::fits_shwi_p (maxsize) || wi::neg_p (maxsize))
+ if (!maxsize.to_shwi (pmax_size)
+ || maybe_lt (*pmax_size, 0)
+ || !endpoint_representable_p (*poffset, *pmax_size))
*pmax_size = -1;
- else
- {
- *pmax_size = maxsize.to_shwi ();
- if (*poffset > HOST_WIDE_INT_MAX - *pmax_size)
- *pmax_size = -1;
- }
/* Punt if *POFFSET + *PSIZE overflows in HOST_WIDE_INT, the callers don't
check for such overflows individually and assume it works. */
- if (*psize != -1 && *poffset > HOST_WIDE_INT_MAX - *psize)
+ if (!endpoint_representable_p (*poffset, *psize))
{
*poffset = 0;
*psize = -1;
return exp;
}
+/* Like get_ref_base_and_extent, but for cases in which we only care
+ about constant-width accesses at constant offsets. Return null
+ if the access is anything else. */
+
+tree
+get_ref_base_and_extent_hwi (tree exp, HOST_WIDE_INT *poffset,
+ HOST_WIDE_INT *psize, bool *preverse)
+{
+ poly_int64 offset, size, max_size;
+ HOST_WIDE_INT const_offset, const_size;
+ bool reverse;
+ tree decl = get_ref_base_and_extent (exp, &offset, &size, &max_size,
+ &reverse);
+ if (!offset.is_constant (&const_offset)
+ || !size.is_constant (&const_size)
+ || const_offset < 0
+ || !known_size_p (max_size)
+ || maybe_ne (max_size, const_size))
+ return NULL_TREE;
+
+ *poffset = const_offset;
+ *psize = const_size;
+ *preverse = reverse;
+ return decl;
+}
+
/* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
denotes the starting address of the memory access EXP.
Returns NULL_TREE if the offset is not constant or any component
extern tree ssa_default_def (struct function *, tree);
extern void set_ssa_default_def (struct function *, tree, tree);
extern tree get_or_create_ssa_default_def (struct function *, tree);
-extern tree get_ref_base_and_extent (tree, HOST_WIDE_INT *,
- HOST_WIDE_INT *, HOST_WIDE_INT *, bool *);
+extern tree get_ref_base_and_extent (tree, poly_int64_pod *, poly_int64_pod *,
+ poly_int64_pod *, bool *);
+extern tree get_ref_base_and_extent_hwi (tree, HOST_WIDE_INT *,
+ HOST_WIDE_INT *, bool *);
extern tree get_addr_base_and_unit_offset_1 (tree, HOST_WIDE_INT *,
tree (*) (tree));
extern tree get_addr_base_and_unit_offset (tree, HOST_WIDE_INT *);
create_access (tree expr, gimple *stmt, bool write)
{
struct access *access;
+ poly_int64 poffset, psize, pmax_size;
HOST_WIDE_INT offset, size, max_size;
tree base = expr;
bool reverse, ptr, unscalarizable_region = false;
- base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
+ base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
+ &reverse);
+ if (!poffset.is_constant (&offset)
+ || !psize.is_constant (&size)
+ || !pmax_size.is_constant (&max_size))
+ {
+ disqualify_candidate (base, "Encountered a polynomial-sized access.");
+ return NULL;
+ }
if (sra_mode == SRA_MODE_EARLY_IPA
&& TREE_CODE (base) == MEM_REF)
static struct access *
get_access_for_expr (tree expr)
{
- HOST_WIDE_INT offset, size, max_size;
+ poly_int64 poffset, psize, pmax_size;
+ HOST_WIDE_INT offset, max_size;
tree base;
bool reverse;
if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
expr = TREE_OPERAND (expr, 0);
- base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
- if (max_size == -1 || !DECL_P (base))
+ base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
+ &reverse);
+ if (!known_size_p (pmax_size)
+ || !pmax_size.is_constant (&max_size)
+ || !poffset.is_constant (&offset)
+ || !DECL_P (base))
return NULL;
if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
ao_ref_base (ao_ref *ref)
{
bool reverse;
- HOST_WIDE_INT offset, size, max_size;
if (ref->base)
return ref->base;
- ref->base = get_ref_base_and_extent (ref->ref, &offset, &size,
- &max_size, &reverse);
- ref->offset = offset;
- ref->size = size;
- ref->max_size = max_size;
+ ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
+ &ref->max_size, &reverse);
return ref->base;
}
return true;
else if (same_p == 1)
{
- HOST_WIDE_INT offadj, sztmp, msztmp;
+ poly_int64 offadj, sztmp, msztmp;
bool reverse;
get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
offset2 -= offadj;
return true;
else if (same_p == 1)
{
- HOST_WIDE_INT offadj, sztmp, msztmp;
+ poly_int64 offadj, sztmp, msztmp;
bool reverse;
get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
offset1 -= offadj;
the access properly. */
if (!ref->max_size_known_p ())
return false;
- HOST_WIDE_INT size, max_size, const_offset;
- poly_int64 ref_offset = ref->offset;
+ poly_int64 size, offset, max_size, ref_offset = ref->offset;
bool reverse;
- tree base
- = get_ref_base_and_extent (lhs, &const_offset, &size, &max_size,
- &reverse);
+ tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
+ &reverse);
/* We can get MEM[symbol: sZ, index: D.8862_1] here,
so base == ref->base does not always hold. */
- poly_int64 offset = const_offset;
if (base != ref->base)
{
/* Try using points-to info. */
}
/* For a must-alias check we need to be able to constrain
the access properly. */
- if (size == max_size
+ if (known_eq (size, max_size)
&& known_subrange_p (ref_offset, ref->max_size, offset, size))
return true;
}
&& !stmt_can_throw_internal (def_stmt))
{
tree base, lhs = gimple_get_lhs (def_stmt);
- HOST_WIDE_INT size, offset, max_size;
+ poly_int64 size, offset, max_size;
bool reverse;
ao_ref_base (ref);
base
{
/* For a must-alias check we need to be able to constrain
the accesses properly. */
- if (size == max_size
+ if (known_eq (size, max_size)
&& known_subrange_p (ref->offset, ref->max_size, offset, size))
return true;
/* Or they need to be exactly the same. */
{
tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
tree base2;
- HOST_WIDE_INT offset2, size2, maxsize2;
+ poly_int64 offset2, size2, maxsize2;
bool reverse;
base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
&reverse);
&& CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
{
tree base2;
- HOST_WIDE_INT offset2, size2, maxsize2;
+ poly_int64 offset2, size2, maxsize2;
bool reverse;
base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
&offset2, &size2, &maxsize2, &reverse);
&& is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
{
tree base2;
- HOST_WIDE_INT offset2, size2, maxsize2;
+ HOST_WIDE_INT offset2, size2;
bool reverse;
- base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
- &offset2, &size2, &maxsize2, &reverse);
- if (!reverse
- && maxsize2 != -1
- && maxsize2 == size2
+ base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
+ &offset2, &size2, &reverse);
+ if (base2
+ && !reverse
&& size2 % BITS_PER_UNIT == 0
&& offset2 % BITS_PER_UNIT == 0
&& operand_equal_p (base, base2, 0)
&& TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
{
tree base2;
- HOST_WIDE_INT offset2, size2, maxsize2;
+ poly_int64 offset2, size2, maxsize2;
bool reverse;
base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
&offset2, &size2, &maxsize2,
&reverse);
if (!reverse
- && maxsize2 != -1
- && maxsize2 == size2
+ && known_size_p (maxsize2)
+ && known_eq (maxsize2, size2)
&& operand_equal_p (base, base2, 0)
&& known_subrange_p (offset, maxsize, offset2, size2)
/* ??? We can't handle bitfield precision extracts without
Dealing with both MEM_REF and ARRAY_REF allows us not to care
about equivalence with other statements not considered here. */
bool reverse;
- HOST_WIDE_INT offset, size, max_size;
+ poly_int64 offset, size, max_size;
tree base = get_ref_base_and_extent (t, &offset, &size, &max_size,
&reverse);
/* Strictly, we could try to normalize variable-sized accesses too,
but here we just deal with the common case. */
- if (size != -1
- && size == max_size)
+ if (known_size_p (max_size)
+ && known_eq (size, max_size))
{
enum tree_code code = MEM_REF;
hstate.add_object (code);
if (!types_compatible_p (TREE_TYPE (t0), TREE_TYPE (t1)))
return false;
bool rev0;
- HOST_WIDE_INT off0, sz0, max0;
+ poly_int64 off0, sz0, max0;
tree base0 = get_ref_base_and_extent (t0, &off0, &sz0, &max0, &rev0);
- if (sz0 == -1
- || sz0 != max0)
+ if (!known_size_p (max0)
+ || maybe_ne (sz0, max0))
return false;
bool rev1;
- HOST_WIDE_INT off1, sz1, max1;
+ poly_int64 off1, sz1, max1;
tree base1 = get_ref_base_and_extent (t1, &off1, &sz1, &max1, &rev1);
- if (sz1 == -1
- || sz1 != max1)
+ if (!known_size_p (max1)
+ || maybe_ne (sz1, max1))
return false;
if (rev0 != rev1)
return false;
/* Types were compatible, so this is a sanity check. */
- gcc_assert (sz0 == sz1);
+ gcc_assert (known_eq (sz0, sz1));
- return (off0 == off1) && operand_equal_p (base0, base1, 0);
+ return known_eq (off0, off1) && operand_equal_p (base0, base1, 0);
}
/* Compare two hashable_expr structures for equivalence. They are
bool address_p, bool lhs_p)
{
tree orig_t = t;
- HOST_WIDE_INT bitsize = -1;
- HOST_WIDE_INT bitmaxsize = -1;
- HOST_WIDE_INT bitpos;
+ poly_int64 bitsize = -1;
+ poly_int64 bitmaxsize = -1;
+ poly_int64 bitpos;
bool reverse;
tree forzero;
ignore this constraint. When we handle pointer subtraction,
we may have to do something cute here. */
- if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
- && bitmaxsize != 0)
+ if (maybe_lt (poly_uint64 (bitpos), get_varinfo (result.var)->fullsize)
+ && maybe_ne (bitmaxsize, 0))
{
/* It's also not true that the constraint will actually start at the
right offset, it may start in some padding. We only care about
cexpr.offset = 0;
for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
{
- if (ranges_overlap_p (curr->offset, curr->size,
- bitpos, bitmaxsize))
+ if (ranges_maybe_overlap_p (poly_int64 (curr->offset),
+ curr->size, bitpos, bitmaxsize))
{
cexpr.var = curr->id;
results->safe_push (cexpr);
results->safe_push (cexpr);
}
}
- else if (bitmaxsize == 0)
+ else if (known_eq (bitmaxsize, 0))
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Access to zero-sized part of variable, "
/* If we do not know exactly where the access goes say so. Note
that only for non-structure accesses we know that we access
at most one subfiled of any variable. */
- if (bitpos == -1
- || bitsize != bitmaxsize
+ HOST_WIDE_INT const_bitpos;
+ if (!bitpos.is_constant (&const_bitpos)
+ || const_bitpos == -1
+ || maybe_ne (bitsize, bitmaxsize)
|| AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
|| result.offset == UNKNOWN_OFFSET)
result.offset = UNKNOWN_OFFSET;
else
- result.offset += bitpos;
+ result.offset += const_bitpos;
}
else if (result.type == ADDRESSOF)
{
&& (rhsp->type == SCALAR
|| rhsp->type == ADDRESSOF))
{
- HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
- HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
+ HOST_WIDE_INT lhssize, lhsoffset;
+ HOST_WIDE_INT rhssize, rhsoffset;
bool reverse;
unsigned k = 0;
- get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize,
- &reverse);
- get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize,
- &reverse);
+ if (!get_ref_base_and_extent_hwi (lhsop, &lhsoffset, &lhssize, &reverse)
+ || !get_ref_base_and_extent_hwi (rhsop, &rhsoffset, &rhssize,
+ &reverse))
+ {
+ process_all_all_constraints (lhsc, rhsc);
+ return;
+ }
for (j = 0; lhsc.iterate (j, &lhsp);)
{
varinfo_t lhsv, rhsv;
|| (TREE_CODE (realdecl) == MEM_REF
&& TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
{
- HOST_WIDE_INT bitsize, bitpos, maxsize;
+ HOST_WIDE_INT bitsize, bitpos;
bool reverse;
tree innerdecl
- = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
- &maxsize, &reverse);
- if (!DECL_P (innerdecl)
+ = get_ref_base_and_extent_hwi (realdecl, &bitpos,
+ &bitsize, &reverse);
+ if (!innerdecl
+ || !DECL_P (innerdecl)
|| DECL_IGNORED_P (innerdecl)
/* Do not track declarations for parts of tracked record
parameters since we want to track them as a whole. */
|| tracked_record_parameter_p (innerdecl)
|| TREE_STATIC (innerdecl)
- || bitsize <= 0
- || bitpos + bitsize > 256
- || bitsize != maxsize)
+ || bitsize == 0
+ || bitpos + bitsize > 256)
return 0;
else
realdecl = expr;