+2017-12-20 Richard Sandiford <richard.sandiford@linaro.org>
+ Alan Hayward <alan.hayward@arm.com>
+ David Sherwood <david.sherwood@arm.com>
+
+ * tree-dfa.h (get_addr_base_and_unit_offset_1): Return the offset
+ as a poly_int64_pod rather than a HOST_WIDE_INT.
+ (get_addr_base_and_unit_offset): Likewise.
+ * tree-dfa.c (get_addr_base_and_unit_offset_1): Likewise.
+ (get_addr_base_and_unit_offset): Likewise.
+ * doc/match-and-simplify.texi: Change off from HOST_WIDE_INT
+ to poly_int64 in example.
+ * fold-const.c (fold_binary_loc): Update call to
+ get_addr_base_and_unit_offset.
+ * gimple-fold.c (gimple_fold_builtin_memory_op): Likewise.
+ (maybe_canonicalize_mem_ref_addr): Likewise.
+ (gimple_fold_stmt_to_constant_1): Likewise.
+ * gimple-ssa-warn-restrict.c (builtin_memref::builtin_memref):
+ Likewise.
+ * ipa-param-manipulation.c (ipa_modify_call_arguments): Likewise.
+ * match.pd: Likewise.
+ * omp-low.c (lower_omp_target): Likewise.
+ * tree-sra.c (build_ref_for_offset): Likewise.
+ (build_debug_ref_for_model): Likewise.
+ * tree-ssa-address.c (maybe_fold_tmr): Likewise.
+ * tree-ssa-alias.c (ao_ref_init_from_ptr_and_size): Likewise.
+ * tree-ssa-ccp.c (optimize_memcpy): Likewise.
+ * tree-ssa-forwprop.c (forward_propagate_addr_expr_1): Likewise.
+ (constant_pointer_difference): Likewise.
+ * tree-ssa-loop-niter.c (expand_simple_operations): Likewise.
+ * tree-ssa-phiopt.c (jump_function_from_stmt): Likewise.
+ * tree-ssa-pre.c (create_component_ref_by_pieces_1): Likewise.
+ * tree-ssa-sccvn.c (vn_reference_fold_indirect): Likewise.
+ (vn_reference_maybe_forwprop_address, vn_reference_lookup_3): Likewise.
+ (set_ssa_val_to): Likewise.
+ * tree-ssa-strlen.c (get_addr_stridx, addr_stridxptr)
+ (maybe_diag_stxncpy_trunc): Likewise.
+ * tree-vrp.c (vrp_prop::check_array_ref): Likewise.
+ * tree.c (build_simple_mem_ref_loc): Likewise.
+ (array_at_struct_end_p): Likewise.
+
2017-12-20 Richard Sandiford <richard.sandiford@linaro.org>
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>
(pointer_plus (addr@@2 @@0) INTEGER_CST_P@@1)
(if (is_gimple_min_invariant (@@2)))
@{
- HOST_WIDE_INT off;
+ poly_int64 off;
tree base = get_addr_base_and_unit_offset (@@0, &off);
off += tree_to_uhwi (@@1);
/* Now with that we should be able to simply write
&& handled_component_p (TREE_OPERAND (arg0, 0)))
{
tree base;
- HOST_WIDE_INT coffset;
+ poly_int64 coffset;
base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
&coffset);
if (!base)
&& TREE_CODE (dest) == ADDR_EXPR)
{
tree src_base, dest_base, fn;
- HOST_WIDE_INT src_offset = 0, dest_offset = 0;
- HOST_WIDE_INT maxsize;
+ poly_int64 src_offset = 0, dest_offset = 0;
+ poly_uint64 maxsize;
srcvar = TREE_OPERAND (src, 0);
src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
&dest_offset);
if (dest_base == NULL)
dest_base = destvar;
- if (tree_fits_uhwi_p (len))
- maxsize = tree_to_uhwi (len);
- else
+ if (!poly_int_tree_p (len, &maxsize))
maxsize = -1;
if (SSA_VAR_P (src_base)
&& SSA_VAR_P (dest_base))
{
if (operand_equal_p (src_base, dest_base, 0)
- && ranges_overlap_p (src_offset, maxsize,
- dest_offset, maxsize))
+ && ranges_maybe_overlap_p (src_offset, maxsize,
+ dest_offset, maxsize))
return false;
}
else if (TREE_CODE (src_base) == MEM_REF
if (! operand_equal_p (TREE_OPERAND (src_base, 0),
TREE_OPERAND (dest_base, 0), 0))
return false;
- offset_int off = mem_ref_offset (src_base) + src_offset;
- if (!wi::fits_shwi_p (off))
- return false;
- src_offset = off.to_shwi ();
-
- off = mem_ref_offset (dest_base) + dest_offset;
- if (!wi::fits_shwi_p (off))
- return false;
- dest_offset = off.to_shwi ();
- if (ranges_overlap_p (src_offset, maxsize,
- dest_offset, maxsize))
+ poly_offset_int full_src_offset
+ = mem_ref_offset (src_base) + src_offset;
+ poly_offset_int full_dest_offset
+ = mem_ref_offset (dest_base) + dest_offset;
+ if (ranges_maybe_overlap_p (full_src_offset, maxsize,
+ full_dest_offset, maxsize))
return false;
}
else
|| handled_component_p (TREE_OPERAND (addr, 0))))
{
tree base;
- HOST_WIDE_INT coffset;
+ poly_int64 coffset;
base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
&coffset);
if (!base)
else if (TREE_CODE (rhs) == ADDR_EXPR
&& !is_gimple_min_invariant (rhs))
{
- HOST_WIDE_INT offset = 0;
+ poly_int64 offset = 0;
tree base;
base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
&offset,
if (TREE_CODE (expr) == ADDR_EXPR)
{
- HOST_WIDE_INT off;
+ poly_int64 off;
tree oper = TREE_OPERAND (expr, 0);
/* Determine the base object or pointer of the reference
and its constant offset from the beginning of the base. */
base = get_addr_base_and_unit_offset (oper, &off);
- if (base)
+ HOST_WIDE_INT const_off;
+ if (base && off.is_constant (&const_off))
{
- offrange[0] += off;
- offrange[1] += off;
+ offrange[0] += const_off;
+ offrange[1] += const_off;
/* Stash the reference for offset validation. */
ref = oper;
tree field = TREE_OPERAND (ref, 1);
tree fldoff = DECL_FIELD_OFFSET (field);
if (TREE_CODE (fldoff) == INTEGER_CST)
- refoff = off + wi::to_offset (fldoff);
+ refoff = const_off + wi::to_offset (fldoff);
}
}
else
off = build_int_cst (adj->alias_ptr_type, byte_offset);
else
{
- HOST_WIDE_INT base_offset;
+ poly_int64 base_offset;
tree prev_base;
bool addrof;
(cmp (convert1?@2 addr@0) (convert2? addr@1))
(with
{
- HOST_WIDE_INT off0, off1;
+ poly_int64 off0, off1;
tree base0 = get_addr_base_and_unit_offset (TREE_OPERAND (@0, 0), &off0);
tree base1 = get_addr_base_and_unit_offset (TREE_OPERAND (@1, 0), &off1);
if (base0 && TREE_CODE (base0) == MEM_REF)
}
(if (equal == 1)
(switch
- (if (cmp == EQ_EXPR)
- { constant_boolean_node (off0 == off1, type); })
- (if (cmp == NE_EXPR)
- { constant_boolean_node (off0 != off1, type); })
- (if (cmp == LT_EXPR)
- { constant_boolean_node (off0 < off1, type); })
- (if (cmp == LE_EXPR)
- { constant_boolean_node (off0 <= off1, type); })
- (if (cmp == GE_EXPR)
- { constant_boolean_node (off0 >= off1, type); })
- (if (cmp == GT_EXPR)
- { constant_boolean_node (off0 > off1, type); }))
+ (if (cmp == EQ_EXPR && (known_eq (off0, off1) || known_ne (off0, off1)))
+ { constant_boolean_node (known_eq (off0, off1), type); })
+ (if (cmp == NE_EXPR && (known_eq (off0, off1) || known_ne (off0, off1)))
+ { constant_boolean_node (known_ne (off0, off1), type); })
+ (if (cmp == LT_EXPR && (known_lt (off0, off1) || known_ge (off0, off1)))
+ { constant_boolean_node (known_lt (off0, off1), type); })
+ (if (cmp == LE_EXPR && (known_le (off0, off1) || known_gt (off0, off1)))
+ { constant_boolean_node (known_le (off0, off1), type); })
+ (if (cmp == GE_EXPR && (known_ge (off0, off1) || known_lt (off0, off1)))
+ { constant_boolean_node (known_ge (off0, off1), type); })
+ (if (cmp == GT_EXPR && (known_gt (off0, off1) || known_le (off0, off1)))
+ { constant_boolean_node (known_gt (off0, off1), type); }))
(if (equal == 0
&& DECL_P (base0) && DECL_P (base1)
/* If we compare this as integers require equal offset. */
&& (!INTEGRAL_TYPE_P (TREE_TYPE (@2))
- || off0 == off1))
+ || known_eq (off0, off1)))
(switch
(if (cmp == EQ_EXPR)
{ constant_boolean_node (false, type); })
|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
{
location_t clause_loc = OMP_CLAUSE_LOCATION (c);
- HOST_WIDE_INT offset = 0;
+ poly_int64 offset = 0;
gcc_assert (prev);
var = OMP_CLAUSE_DECL (c);
if (DECL_P (var)
its argument or a constant if the argument is known to be constant. */
tree
-get_addr_base_and_unit_offset_1 (tree exp, HOST_WIDE_INT *poffset,
+get_addr_base_and_unit_offset_1 (tree exp, poly_int64_pod *poffset,
tree (*valueize) (tree))
{
- HOST_WIDE_INT byte_offset = 0;
+ poly_int64 byte_offset = 0;
/* Compute cumulative byte-offset for nested component-refs and array-refs,
and find the ultimate containing object. */
{
case BIT_FIELD_REF:
{
- HOST_WIDE_INT this_off = TREE_INT_CST_LOW (TREE_OPERAND (exp, 2));
- if (this_off % BITS_PER_UNIT)
+ poly_int64 this_byte_offset;
+ poly_uint64 this_bit_offset;
+ if (!poly_int_tree_p (TREE_OPERAND (exp, 2), &this_bit_offset)
+ || !multiple_p (this_bit_offset, BITS_PER_UNIT,
+ &this_byte_offset))
return NULL_TREE;
- byte_offset += this_off / BITS_PER_UNIT;
+ byte_offset += this_byte_offset;
}
break;
{
tree field = TREE_OPERAND (exp, 1);
tree this_offset = component_ref_field_offset (exp);
- HOST_WIDE_INT hthis_offset;
+ poly_int64 hthis_offset;
if (!this_offset
- || TREE_CODE (this_offset) != INTEGER_CST
+ || !poly_int_tree_p (this_offset, &hthis_offset)
|| (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
% BITS_PER_UNIT))
return NULL_TREE;
- hthis_offset = TREE_INT_CST_LOW (this_offset);
hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
/ BITS_PER_UNIT);
byte_offset += hthis_offset;
index = (*valueize) (index);
/* If the resulting bit-offset is constant, track it. */
- if (TREE_CODE (index) == INTEGER_CST
+ if (poly_int_tree_p (index)
&& (low_bound = array_ref_low_bound (exp),
- TREE_CODE (low_bound) == INTEGER_CST)
+ poly_int_tree_p (low_bound))
&& (unit_size = array_ref_element_size (exp),
TREE_CODE (unit_size) == INTEGER_CST))
{
- offset_int woffset
- = wi::sext (wi::to_offset (index) - wi::to_offset (low_bound),
+ poly_offset_int woffset
+ = wi::sext (wi::to_poly_offset (index)
+ - wi::to_poly_offset (low_bound),
TYPE_PRECISION (TREE_TYPE (index)));
woffset *= wi::to_offset (unit_size);
- byte_offset += woffset.to_shwi ();
+ byte_offset += woffset.force_shwi ();
}
else
return NULL_TREE;
is not BITS_PER_UNIT-aligned. */
tree
-get_addr_base_and_unit_offset (tree exp, HOST_WIDE_INT *poffset)
+get_addr_base_and_unit_offset (tree exp, poly_int64_pod *poffset)
{
return get_addr_base_and_unit_offset_1 (exp, poffset, NULL);
}
poly_int64_pod *, bool *);
extern tree get_ref_base_and_extent_hwi (tree, HOST_WIDE_INT *,
HOST_WIDE_INT *, bool *);
-extern tree get_addr_base_and_unit_offset_1 (tree, HOST_WIDE_INT *,
+extern tree get_addr_base_and_unit_offset_1 (tree, poly_int64_pod *,
tree (*) (tree));
-extern tree get_addr_base_and_unit_offset (tree, HOST_WIDE_INT *);
+extern tree get_addr_base_and_unit_offset (tree, poly_int64_pod *);
extern bool stmt_references_abnormal_ssa_name (gimple *);
extern void replace_abnormal_ssa_names (gimple *);
extern void dump_enumerated_decls (FILE *, dump_flags_t);
tree prev_base = base;
tree off;
tree mem_ref;
- HOST_WIDE_INT base_offset;
+ poly_int64 base_offset;
unsigned HOST_WIDE_INT misalign;
unsigned int align;
build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
struct access *model)
{
- HOST_WIDE_INT base_offset;
+ poly_int64 base_offset;
tree off;
if (TREE_CODE (model->expr) == COMPONENT_REF
else if (addr.symbol
&& handled_component_p (TREE_OPERAND (addr.symbol, 0)))
{
- HOST_WIDE_INT offset;
+ poly_int64 offset;
addr.symbol = build_fold_addr_expr
(get_addr_base_and_unit_offset
(TREE_OPERAND (addr.symbol, 0), &offset));
void
ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
{
- HOST_WIDE_INT t;
- poly_int64 size_hwi, extra_offset = 0;
+ poly_int64 t, size_hwi, extra_offset = 0;
ref->ref = NULL_TREE;
if (TREE_CODE (ptr) == SSA_NAME)
{
gimple *defstmt = SSA_NAME_DEF_STMT (vuse);
tree src2 = NULL_TREE, len2 = NULL_TREE;
- HOST_WIDE_INT offset, offset2;
+ poly_int64 offset, offset2;
tree val = integer_zero_node;
if (gimple_store_p (defstmt)
&& gimple_assign_single_p (defstmt)
? DECL_SIZE_UNIT (TREE_OPERAND (src2, 1))
: TYPE_SIZE_UNIT (TREE_TYPE (src2)));
if (len == NULL_TREE
- || TREE_CODE (len) != INTEGER_CST
+ || !poly_int_tree_p (len)
|| len2 == NULL_TREE
- || TREE_CODE (len2) != INTEGER_CST)
+ || !poly_int_tree_p (len2))
return;
src = get_addr_base_and_unit_offset (src, &offset);
src2 = get_addr_base_and_unit_offset (src2, &offset2);
if (src == NULL_TREE
|| src2 == NULL_TREE
- || offset < offset2)
+ || maybe_lt (offset, offset2))
return;
if (!operand_equal_p (src, src2, 0))
/* [ src + offset2, src + offset2 + len2 - 1 ] is set to val.
Make sure that
[ src + offset, src + offset + len - 1 ] is a subset of that. */
- if (wi::to_offset (len) + (offset - offset2) > wi::to_offset (len2))
+ if (maybe_gt (wi::to_poly_offset (len) + (offset - offset2),
+ wi::to_poly_offset (len2)))
return;
if (dump_file && (dump_flags & TDF_DETAILS))
&& TREE_OPERAND (lhs, 0) == name)
{
tree def_rhs_base;
- HOST_WIDE_INT def_rhs_offset;
+ poly_int64 def_rhs_offset;
/* If the address is invariant we can always fold it. */
if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
&def_rhs_offset)))
{
- offset_int off = mem_ref_offset (lhs);
+ poly_offset_int off = mem_ref_offset (lhs);
tree new_ptr;
off += def_rhs_offset;
if (TREE_CODE (def_rhs_base) == MEM_REF)
&& TREE_OPERAND (rhs, 0) == name)
{
tree def_rhs_base;
- HOST_WIDE_INT def_rhs_offset;
+ poly_int64 def_rhs_offset;
if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
&def_rhs_offset)))
{
- offset_int off = mem_ref_offset (rhs);
+ poly_offset_int off = mem_ref_offset (rhs);
tree new_ptr;
off += def_rhs_offset;
if (TREE_CODE (def_rhs_base) == MEM_REF)
if (TREE_CODE (p) == ADDR_EXPR)
{
tree q = TREE_OPERAND (p, 0);
- HOST_WIDE_INT offset;
+ poly_int64 offset;
tree base = get_addr_base_and_unit_offset (q, &offset);
if (base)
{
q = base;
- if (offset)
+ if (maybe_ne (offset, 0))
off = size_binop (PLUS_EXPR, off, size_int (offset));
}
if (TREE_CODE (q) == MEM_REF
return expand_simple_operations (e, stop);
else if (code == ADDR_EXPR)
{
- HOST_WIDE_INT offset;
+ poly_int64 offset;
tree base = get_addr_base_and_unit_offset (TREE_OPERAND (e, 0),
&offset);
if (base
{
/* For arg = &p->i transform it to p, if possible. */
tree rhs1 = gimple_assign_rhs1 (stmt);
- HOST_WIDE_INT offset;
+ poly_int64 offset;
tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs1, 0),
&offset);
if (tem
&& TREE_CODE (tem) == MEM_REF
- && (mem_ref_offset (tem) + offset) == 0)
+ && known_eq (mem_ref_offset (tem) + offset, 0))
{
*arg = TREE_OPERAND (tem, 0);
return true;
if (TREE_CODE (baseop) == ADDR_EXPR
&& handled_component_p (TREE_OPERAND (baseop, 0)))
{
- HOST_WIDE_INT off;
+ poly_int64 off;
tree base;
base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
&off);
vn_reference_op_t op = &(*ops)[i];
vn_reference_op_t mem_op = &(*ops)[i - 1];
tree addr_base;
- HOST_WIDE_INT addr_offset = 0;
+ poly_int64 addr_offset = 0;
/* The only thing we have to do is from &OBJ.foo.bar add the offset
from .foo.bar to the preceding MEM_REF offset and replace the
gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
if (addr_base != TREE_OPERAND (op->op0, 0))
{
- offset_int off = offset_int::from (wi::to_wide (mem_op->op0), SIGNED);
- off += addr_offset;
+ poly_offset_int off
+ = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
+ SIGNED)
+ + addr_offset);
mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
op->op0 = build_fold_addr_expr (addr_base);
if (tree_fits_shwi_p (mem_op->op0))
vn_reference_op_t mem_op = &(*ops)[i - 1];
gimple *def_stmt;
enum tree_code code;
- offset_int off;
+ poly_offset_int off;
def_stmt = SSA_NAME_DEF_STMT (op->op0);
if (!is_gimple_assign (def_stmt))
&& code != POINTER_PLUS_EXPR)
return false;
- off = offset_int::from (wi::to_wide (mem_op->op0), SIGNED);
+ off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
/* The only thing we have to do is from &OBJ.foo.bar add the offset
from .foo.bar to the preceding MEM_REF offset and replace the
if (code == ADDR_EXPR)
{
tree addr, addr_base;
- HOST_WIDE_INT addr_offset;
+ poly_int64 addr_offset;
addr = gimple_assign_rhs1 (def_stmt);
addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
dereference isn't offsetted. */
if (!addr_base
&& *i_p == ops->length () - 1
- && off == 0
+ && known_eq (off, 0)
/* This makes us disable this transform for PRE where the
reference ops might be also used for code insertion which
is invalid. */
vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
new_mem_op->op0
= wide_int_to_tree (TREE_TYPE (mem_op->op0),
- wi::to_wide (new_mem_op->op0));
+ wi::to_poly_wide (new_mem_op->op0));
}
else
gcc_assert (tem.last ().opcode == STRING_CST);
}
if (TREE_CODE (lhs) == ADDR_EXPR)
{
- HOST_WIDE_INT tmp_lhs_offset;
tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
- &tmp_lhs_offset);
- lhs_offset = tmp_lhs_offset;
+ &lhs_offset);
if (!tem)
return (void *)-1;
if (TREE_CODE (tem) == MEM_REF
rhs = SSA_VAL (rhs);
if (TREE_CODE (rhs) == ADDR_EXPR)
{
- HOST_WIDE_INT tmp_rhs_offset;
tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
- &tmp_rhs_offset);
- rhs_offset = tmp_rhs_offset;
+ &rhs_offset);
if (!tem)
return (void *)-1;
if (TREE_CODE (tem) == MEM_REF
set_ssa_val_to (tree from, tree to)
{
tree currval = SSA_VAL (from);
- HOST_WIDE_INT toff, coff;
+ poly_int64 toff, coff;
/* The only thing we allow as value numbers are ssa_names
and invariants. So assert that here. We don't allow VN_TOP
&& TREE_CODE (to) == ADDR_EXPR
&& (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
== get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
- && coff == toff))
+ && known_eq (coff, toff)))
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " (changed)\n");
if (!decl_to_stridxlist_htab)
return 0;
- base = get_addr_base_and_unit_offset (exp, &off);
- if (base == NULL || !DECL_P (base))
+ poly_int64 poff;
+ base = get_addr_base_and_unit_offset (exp, &poff);
+ if (base == NULL || !DECL_P (base) || !poff.is_constant (&off))
return 0;
list = decl_to_stridxlist_htab->get (base);
{
HOST_WIDE_INT off;
- tree base = get_addr_base_and_unit_offset (exp, &off);
- if (base == NULL_TREE || !DECL_P (base))
+ poly_int64 poff;
+ tree base = get_addr_base_and_unit_offset (exp, &poff);
+ if (base == NULL_TREE || !DECL_P (base) || !poff.is_constant (&off))
return NULL;
if (!decl_to_stridxlist_htab)
if (TREE_CODE (ref) == ARRAY_REF)
ref = TREE_OPERAND (ref, 0);
- HOST_WIDE_INT dstoff;
+ poly_int64 dstoff;
tree dstbase = get_addr_base_and_unit_offset (ref, &dstoff);
- HOST_WIDE_INT lhsoff;
+ poly_int64 lhsoff;
tree lhsbase = get_addr_base_and_unit_offset (lhs, &lhsoff);
if (lhsbase
- && dstoff == lhsoff
+ && known_eq (dstoff, lhsoff)
&& operand_equal_p (dstbase, lhsbase, 0))
return false;
}
{
tree maxbound = TYPE_MAX_VALUE (ptrdiff_type_node);
tree arg = TREE_OPERAND (ref, 0);
- HOST_WIDE_INT off;
+ poly_int64 off;
- if (get_addr_base_and_unit_offset (arg, &off) && off > 0)
+ if (get_addr_base_and_unit_offset (arg, &off) && known_gt (off, 0))
maxbound = wide_int_to_tree (sizetype,
wi::sub (wi::to_wide (maxbound),
off));
tree
build_simple_mem_ref_loc (location_t loc, tree ptr)
{
- HOST_WIDE_INT offset = 0;
+ poly_int64 offset = 0;
tree ptype = TREE_TYPE (ptr);
tree tem;
/* For convenience allow addresses that collapse to a simple base
{
/* Check whether the array domain covers all of the available
padding. */
- HOST_WIDE_INT offset;
+ poly_int64 offset;
if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
|| TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
|| TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
return true;
/* If at least one extra element fits it is a flexarray. */
- if (wi::les_p ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
- - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
- + 2)
- * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
- wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
+ if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
+ - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
+ + 2)
+ * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
+ wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
return true;
return false;