+2017-12-21 Richard Sandiford <richard.sandiford@linaro.org>
+ Alan Hayward <alan.hayward@arm.com>
+ David Sherwood <david.sherwood@arm.com>
+
+ * tree.h (get_inner_reference): Return the bitsize and bitpos
+ as poly_int64_pods rather than HOST_WIDE_INT.
+ * fold-const.h (ptr_difference_const): Return the pointer difference
+ as a poly_int64_pod rather than a HOST_WIDE_INT.
+ * expr.c (get_inner_reference): Return the bitsize and bitpos
+ as poly_int64_pods rather than HOST_WIDE_INT.
+ (expand_expr_addr_expr_1, expand_expr_real_1): Track polynomial
+ offsets and sizes.
+ * fold-const.c (make_bit_field_ref): Take the bitpos as a poly_int64
+ rather than a HOST_WIDE_INT. Update call to get_inner_reference.
+ (optimize_bit_field_compare): Update call to get_inner_reference.
+ (decode_field_reference): Likewise.
+ (fold_unary_loc): Track polynomial offsets and sizes.
+ (split_address_to_core_and_offset): Return the bitpos as a
+ poly_int64_pod rather than a HOST_WIDE_INT.
+ (ptr_difference_const): Likewise for the pointer difference.
+ * asan.c (instrument_derefs): Track polynomial offsets and sizes.
+ * config/mips/mips.c (r10k_safe_mem_expr_p): Likewise.
+ * dbxout.c (dbxout_expand_expr): Likewise.
+ * dwarf2out.c (loc_list_for_address_of_addr_expr_of_indirect_ref)
+ (loc_list_from_tree_1, fortran_common): Likewise.
+ * gimple-laddress.c (pass_laddress::execute): Likewise.
+ * gimple-ssa-store-merging.c (find_bswap_or_nop_load): Likewise.
+ * gimplify.c (gimplify_scan_omp_clauses): Likewise.
+ * simplify-rtx.c (delegitimize_mem_from_attrs): Likewise.
+ * tree-affine.c (tree_to_aff_combination): Likewise.
+ (get_inner_reference_aff): Likewise.
+ * tree-data-ref.c (split_constant_offset_1): Likewise.
+ (dr_analyze_innermost): Likewise.
+ * tree-scalar-evolution.c (interpret_rhs_expr): Likewise.
+ * tree-sra.c (ipa_sra_check_caller): Likewise.
+ * tree-vect-data-refs.c (vect_check_gather_scatter): Likewise.
+ * ubsan.c (maybe_instrument_pointer_overflow): Likewise.
+ (instrument_bool_enum_load, instrument_object_size): Likewise.
+ * gimple-ssa-strength-reduction.c (slsr_process_ref): Update call
+ to get_inner_reference.
+ * hsa-gen.c (gen_hsa_addr): Likewise.
+ * sanopt.c (maybe_optimize_ubsan_ptr_ifn): Likewise.
+ * tsan.c (instrument_expr): Likewise.
+ * match.pd: Update call to ptr_difference_const.
+
2017-12-21 Richard Sandiford <richard.sandiford@linaro.org>
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>
+2017-12-21 Richard Sandiford <richard.sandiford@linaro.org>
+ Alan Hayward <alan.hayward@arm.com>
+ David Sherwood <david.sherwood@arm.com>
+
+ * gcc-interface/trans.c (Attribute_to_gnu): Track polynomial
+ offsets and sizes.
+ * gcc-interface/utils2.c (build_unary_op): Likewise.
+
2017-12-20 Eric Botcazou <ebotcazou@adacore.com>
* gcc-interface/trans.c (Loop_Statement_to_gnu): Use IN_RANGE macro.
case Attr_Last_Bit:
case Attr_Bit:
{
- HOST_WIDE_INT bitsize;
- HOST_WIDE_INT bitpos;
+ poly_int64 bitsize;
+ poly_int64 bitpos;
tree gnu_offset;
tree gnu_field_bitpos;
tree gnu_field_offset;
case Attr_First_Bit:
case Attr_Bit:
- gnu_result = size_int (bitpos % BITS_PER_UNIT);
+ gnu_result = size_int (num_trailing_bits (bitpos));
break;
case Attr_Last_Bit:
- gnu_result = bitsize_int (bitpos % BITS_PER_UNIT);
+ gnu_result = bitsize_int (num_trailing_bits (bitpos));
gnu_result = size_binop (PLUS_EXPR, gnu_result,
TYPE_SIZE (TREE_TYPE (gnu_prefix)));
/* ??? Avoid a large unsigned result that will overflow when
the offset to the field. Otherwise, do this the normal way. */
if (op_code == ATTR_ADDR_EXPR)
{
- HOST_WIDE_INT bitsize;
- HOST_WIDE_INT bitpos;
+ poly_int64 bitsize;
+ poly_int64 bitpos;
tree offset, inner;
machine_mode mode;
int unsignedp, reversep, volatilep;
if (!offset)
offset = size_zero_node;
- offset = size_binop (PLUS_EXPR, offset,
- size_int (bitpos / BITS_PER_UNIT));
+ offset
+ = size_binop (PLUS_EXPR, offset,
+ size_int (bits_to_bytes_round_down (bitpos)));
/* Take the address of INNER, convert it to a pointer to our type
and add the offset. */
if (size_in_bytes <= 0)
return;
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos;
tree offset;
machine_mode mode;
int unsignedp, reversep, volatilep = 0;
return;
}
- if (bitpos % BITS_PER_UNIT
- || bitsize != size_in_bytes * BITS_PER_UNIT)
+ if (!multiple_p (bitpos, BITS_PER_UNIT)
+ || maybe_ne (bitsize, size_in_bytes * BITS_PER_UNIT))
return;
if (VAR_P (inner) && DECL_HARD_REGISTER (inner))
return;
+ poly_int64 decl_size;
if (VAR_P (inner)
&& offset == NULL_TREE
- && bitpos >= 0
&& DECL_SIZE (inner)
- && tree_fits_shwi_p (DECL_SIZE (inner))
- && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
+ && poly_int_tree_p (DECL_SIZE (inner), &decl_size)
+ && known_subrange_p (bitpos, bitsize, 0, decl_size))
{
if (DECL_THREAD_LOCAL_P (inner))
return;
static bool
r10k_safe_mem_expr_p (tree expr, unsigned HOST_WIDE_INT offset)
{
- HOST_WIDE_INT bitoffset, bitsize;
+ poly_int64 bitoffset, bitsize;
tree inner, var_offset;
machine_mode mode;
int unsigned_p, reverse_p, volatile_p;
+2017-12-21 Richard Sandiford <richard.sandiford@linaro.org>
+ Alan Hayward <alan.hayward@arm.com>
+ David Sherwood <david.sherwood@arm.com>
+
+ * constexpr.c (check_automatic_or_tls): Track polynomial
+ offsets and sizes.
+
2017-12-19 Paolo Carlini <paolo.carlini@oracle.com>
PR c++/82593
check_automatic_or_tls (tree ref)
{
machine_mode mode;
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos;
tree offset;
int volatilep = 0, unsignedp = 0;
tree decl = get_inner_reference (ref, &bitsize, &bitpos, &offset,
case BIT_FIELD_REF:
{
machine_mode mode;
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos;
tree offset, tem;
int unsignedp, reversep, volatilep = 0;
rtx x;
return NULL;
x = adjust_address_nv (x, mode, tree_to_shwi (offset));
}
- if (bitpos != 0)
- x = adjust_address_nv (x, mode, bitpos / BITS_PER_UNIT);
+ if (maybe_ne (bitpos, 0))
+ x = adjust_address_nv (x, mode, bits_to_bytes_round_down (bitpos));
return x;
}
loc_descr_context *context)
{
tree obj, offset;
- HOST_WIDE_INT bitsize, bitpos, bytepos;
+ poly_int64 bitsize, bitpos, bytepos;
machine_mode mode;
int unsignedp, reversep, volatilep = 0;
dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
&bitsize, &bitpos, &offset, &mode,
&unsignedp, &reversep, &volatilep);
STRIP_NOPS (obj);
- if (bitpos % BITS_PER_UNIT)
+ if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
{
expansion_failed (loc, NULL_RTX, "bitfield access");
return 0;
NULL_RTX, "no indirect ref in inner refrence");
return 0;
}
- if (!offset && !bitpos)
+ if (!offset && known_eq (bitpos, 0))
list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
context);
else if (toplev
add_loc_descr_to_each (list_ret,
new_loc_descr (DW_OP_plus, 0, 0));
}
- bytepos = bitpos / BITS_PER_UNIT;
- if (bytepos > 0)
+ HOST_WIDE_INT value;
+ if (bytepos.is_constant (&value) && value > 0)
add_loc_descr_to_each (list_ret,
- new_loc_descr (DW_OP_plus_uconst,
- bytepos, 0));
- else if (bytepos < 0)
+ new_loc_descr (DW_OP_plus_uconst, value, 0));
+ else if (maybe_ne (bytepos, 0))
loc_list_plus_const (list_ret, bytepos);
add_loc_descr_to_each (list_ret,
new_loc_descr (DW_OP_stack_value, 0, 0));
case IMAGPART_EXPR:
{
tree obj, offset;
- HOST_WIDE_INT bitsize, bitpos, bytepos;
+ poly_int64 bitsize, bitpos, bytepos;
machine_mode mode;
int unsignedp, reversep, volatilep = 0;
list_ret = loc_list_from_tree_1 (obj,
want_address == 2
- && !bitpos && !offset ? 2 : 1,
+ && known_eq (bitpos, 0)
+ && !offset ? 2 : 1,
context);
/* TODO: We can extract value of the small expression via shifting even
for nonzero bitpos. */
if (list_ret == 0)
return 0;
- if (bitpos % BITS_PER_UNIT != 0 || bitsize % BITS_PER_UNIT != 0)
+ if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
+ || !multiple_p (bitsize, BITS_PER_UNIT))
{
expansion_failed (loc, NULL_RTX,
"bitfield access");
add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
}
- bytepos = bitpos / BITS_PER_UNIT;
- if (bytepos > 0)
- add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst, bytepos, 0));
- else if (bytepos < 0)
+ HOST_WIDE_INT value;
+ if (bytepos.is_constant (&value) && value > 0)
+ add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
+ value, 0));
+ else if (maybe_ne (bytepos, 0))
loc_list_plus_const (list_ret, bytepos);
have_address = 1;
{
tree val_expr, cvar;
machine_mode mode;
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos;
tree offset;
+ HOST_WIDE_INT cbitpos;
int unsignedp, reversep, volatilep = 0;
/* If the decl isn't a VAR_DECL, or if it isn't static, or if
if (cvar == NULL_TREE
|| !VAR_P (cvar)
|| DECL_ARTIFICIAL (cvar)
- || !TREE_PUBLIC (cvar))
+ || !TREE_PUBLIC (cvar)
+ /* We don't expect to have to cope with variable offsets,
+ since at present all static data must have a constant size. */
+ || !bitpos.is_constant (&cbitpos))
return NULL_TREE;
*value = 0;
return NULL_TREE;
*value = tree_to_shwi (offset);
}
- if (bitpos != 0)
- *value += bitpos / BITS_PER_UNIT;
+ if (cbitpos != 0)
+ *value += cbitpos / BITS_PER_UNIT;
return cvar;
}
this case, but the address of the object can be found. */
tree
-get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
- HOST_WIDE_INT *pbitpos, tree *poffset,
+get_inner_reference (tree exp, poly_int64_pod *pbitsize,
+ poly_int64_pod *pbitpos, tree *poffset,
machine_mode *pmode, int *punsignedp,
int *preversep, int *pvolatilep)
{
machine_mode mode = VOIDmode;
bool blkmode_bitfield = false;
tree offset = size_zero_node;
- offset_int bit_offset = 0;
+ poly_offset_int bit_offset = 0;
/* First get the mode, signedness, storage order and size. We do this from
just the outermost expression. */
switch (TREE_CODE (exp))
{
case BIT_FIELD_REF:
- bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
+ bit_offset += wi::to_poly_offset (TREE_OPERAND (exp, 2));
break;
case COMPONENT_REF:
break;
offset = size_binop (PLUS_EXPR, offset, this_offset);
- bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
+ bit_offset += wi::to_poly_offset (DECL_FIELD_BIT_OFFSET (field));
/* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
}
/* If OFFSET is constant, see if we can return the whole thing as a
constant bit position. Make sure to handle overflow during
this conversion. */
- if (TREE_CODE (offset) == INTEGER_CST)
+ if (poly_int_tree_p (offset))
{
- offset_int tem = wi::sext (wi::to_offset (offset),
- TYPE_PRECISION (sizetype));
+ poly_offset_int tem = wi::sext (wi::to_poly_offset (offset),
+ TYPE_PRECISION (sizetype));
tem <<= LOG2_BITS_PER_UNIT;
tem += bit_offset;
- if (wi::fits_shwi_p (tem))
- {
- *pbitpos = tem.to_shwi ();
- *poffset = offset = NULL_TREE;
- }
+ if (tem.to_shwi (pbitpos))
+ *poffset = offset = NULL_TREE;
}
/* Otherwise, split it up. */
if (offset)
{
/* Avoid returning a negative bitpos as this may wreak havoc later. */
- if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
+ if (!bit_offset.to_shwi (pbitpos) || maybe_lt (*pbitpos, 0))
{
- offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
- offset_int tem = wi::bit_and_not (bit_offset, mask);
- /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
- Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
- bit_offset -= tem;
- tem >>= LOG2_BITS_PER_UNIT;
+ *pbitpos = num_trailing_bits (bit_offset.force_shwi ());
+ poly_offset_int bytes = bits_to_bytes_round_down (bit_offset);
offset = size_binop (PLUS_EXPR, offset,
- wide_int_to_tree (sizetype, tem));
+ build_int_cst (sizetype, bytes.force_shwi ()));
}
- *pbitpos = bit_offset.to_shwi ();
*poffset = offset;
}
/* We can use BLKmode for a byte-aligned BLKmode bitfield. */
if (mode == VOIDmode
&& blkmode_bitfield
- && (*pbitpos % BITS_PER_UNIT) == 0
- && (*pbitsize % BITS_PER_UNIT) == 0)
+ && multiple_p (*pbitpos, BITS_PER_UNIT)
+ && multiple_p (*pbitsize, BITS_PER_UNIT))
*pmode = BLKmode;
else
*pmode = mode;
{
rtx result, subtarget;
tree inner, offset;
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos;
int unsignedp, reversep, volatilep = 0;
machine_mode mode1;
/* We must have made progress. */
gcc_assert (inner != exp);
- subtarget = offset || bitpos ? NULL_RTX : target;
+ subtarget = offset || maybe_ne (bitpos, 0) ? NULL_RTX : target;
/* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
inner alignment, force the inner to be sufficiently aligned. */
if (CONSTANT_CLASS_P (inner)
result = simplify_gen_binary (PLUS, tmode, result, tmp);
else
{
- subtarget = bitpos ? NULL_RTX : target;
+ subtarget = maybe_ne (bitpos, 0) ? NULL_RTX : target;
result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
1, OPTAB_LIB_WIDEN);
}
}
- if (bitpos)
+ if (maybe_ne (bitpos, 0))
{
/* Someone beforehand should have rejected taking the address
- of such an object. */
- gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
-
+ of an object that isn't byte-aligned. */
+ poly_int64 bytepos = exact_div (bitpos, BITS_PER_UNIT);
result = convert_memory_address_addr_space (tmode, result, as);
- result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
+ result = plus_constant (tmode, result, bytepos);
if (modifier < EXPAND_SUM)
result = force_operand (result, target);
}
normal_inner_ref:
{
machine_mode mode1, mode2;
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos, bytepos;
tree offset;
int reversep, volatilep = 0, must_force_mem;
tree tem
to a larger size. */
must_force_mem = (offset
|| mode1 == BLKmode
- || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
+ || maybe_gt (bitpos + bitsize,
+ GET_MODE_BITSIZE (mode2)));
/* Handle CONCAT first. */
if (GET_CODE (op0) == CONCAT && !must_force_mem)
{
- if (bitpos == 0
- && bitsize == GET_MODE_BITSIZE (GET_MODE (op0))
+ if (known_eq (bitpos, 0)
+ && known_eq (bitsize, GET_MODE_BITSIZE (GET_MODE (op0)))
&& COMPLEX_MODE_P (mode1)
&& COMPLEX_MODE_P (GET_MODE (op0))
&& (GET_MODE_PRECISION (GET_MODE_INNER (mode1))
}
return op0;
}
- if (bitpos == 0
- && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
- && bitsize)
+ if (known_eq (bitpos, 0)
+ && known_eq (bitsize,
+ GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))))
+ && maybe_ne (bitsize, 0))
{
op0 = XEXP (op0, 0);
mode2 = GET_MODE (op0);
}
- else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
- && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
- && bitpos
- && bitsize)
+ else if (known_eq (bitpos,
+ GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))))
+ && known_eq (bitsize,
+ GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1))))
+ && maybe_ne (bitpos, 0)
+ && maybe_ne (bitsize, 0))
{
op0 = XEXP (op0, 1);
bitpos = 0;
/* See the comment in expand_assignment for the rationale. */
if (mode1 != VOIDmode
- && bitpos != 0
- && bitsize > 0
- && (bitpos % bitsize) == 0
- && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
+ && maybe_ne (bitpos, 0)
+ && maybe_gt (bitsize, 0)
+ && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
+ && multiple_p (bitpos, bitsize)
+ && multiple_p (bitsize, GET_MODE_ALIGNMENT (mode1))
&& MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
{
- op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
+ op0 = adjust_address (op0, mode1, bytepos);
bitpos = 0;
}
/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
record its alignment as BIGGEST_ALIGNMENT. */
- if (MEM_P (op0) && bitpos == 0 && offset != 0
+ if (MEM_P (op0)
+ && known_eq (bitpos, 0)
+ && offset != 0
&& is_aligning_offset (offset, tem))
set_mem_align (op0, BIGGEST_ALIGNMENT);
|| (volatilep && TREE_CODE (exp) == COMPONENT_REF
&& DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
&& mode1 != BLKmode
- && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
+ && maybe_lt (bitsize, GET_MODE_SIZE (mode1) * BITS_PER_UNIT))
/* If the field isn't aligned enough to fetch as a memref,
fetch it as a bit field. */
|| (mode1 != BLKmode
&& (((MEM_P (op0)
? MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
- || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0)
+ || !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode1))
: TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
- || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
+ || !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode)))
&& modifier != EXPAND_MEMORY
&& ((modifier == EXPAND_CONST_ADDRESS
|| modifier == EXPAND_INITIALIZER)
? STRICT_ALIGNMENT
: targetm.slow_unaligned_access (mode1,
MEM_ALIGN (op0))))
- || (bitpos % BITS_PER_UNIT != 0)))
+ || !multiple_p (bitpos, BITS_PER_UNIT)))
/* If the type and the field are a constant size and the
size of the type isn't the same size as the bitfield,
we must use bitfield operations. */
- || (bitsize >= 0
+ || (known_size_p (bitsize)
&& TYPE_SIZE (TREE_TYPE (exp))
- && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
- && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
- bitsize) != 0))
+ && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
+ && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp))),
+ bitsize)))
{
machine_mode ext_mode = mode;
if (ext_mode == BLKmode
&& ! (target != 0 && MEM_P (op0)
&& MEM_P (target)
- && bitpos % BITS_PER_UNIT == 0))
+ && multiple_p (bitpos, BITS_PER_UNIT)))
ext_mode = int_mode_for_size (bitsize, 1).else_blk ();
if (ext_mode == BLKmode)
/* ??? Unlike the similar test a few lines below, this one is
very likely obsolete. */
- if (bitsize == 0)
+ if (known_eq (bitsize, 0))
return target;
/* In this case, BITPOS must start at a byte boundary and
TARGET, if specified, must be a MEM. */
gcc_assert (MEM_P (op0)
- && (!target || MEM_P (target))
- && !(bitpos % BITS_PER_UNIT));
+ && (!target || MEM_P (target)));
+ bytepos = exact_div (bitpos, BITS_PER_UNIT);
+ poly_int64 bytesize = bits_to_bytes_round_up (bitsize);
emit_block_move (target,
- adjust_address (op0, VOIDmode,
- bitpos / BITS_PER_UNIT),
- GEN_INT ((bitsize + BITS_PER_UNIT - 1)
- / BITS_PER_UNIT),
+ adjust_address (op0, VOIDmode, bytepos),
+ gen_int_mode (bytesize, Pmode),
(modifier == EXPAND_STACK_PARM
? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
return 0 for the sake of consistency, as reading a zero-sized
bitfield is valid in Ada and the value is fully specified. */
- if (bitsize == 0)
+ if (known_eq (bitsize, 0))
return const0_rtx;
op0 = validize_mem (op0);
{
HOST_WIDE_INT size = GET_MODE_BITSIZE (op0_mode);
- if (bitsize < size
+ gcc_checking_assert (known_le (bitsize, size));
+ if (maybe_lt (bitsize, size)
&& reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
op0 = expand_shift (LSHIFT_EXPR, op0_mode, op0,
size - bitsize, op0, 1);
mode1 = BLKmode;
/* Get a reference to just this component. */
+ bytepos = bits_to_bytes_round_down (bitpos);
if (modifier == EXPAND_CONST_ADDRESS
|| modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
- op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
+ op0 = adjust_address_nv (op0, mode1, bytepos);
else
- op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
+ op0 = adjust_address (op0, mode1, bytepos);
if (op0 == orig_op0)
op0 = copy_rtx (op0);
/* If we are converting to BLKmode, try to avoid an intermediate
temporary by fetching an inner memory reference. */
if (mode == BLKmode
- && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
+ && poly_int_tree_p (TYPE_SIZE (type))
&& TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
&& handled_component_p (treeop0))
{
machine_mode mode1;
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos, bytepos;
tree offset;
int unsignedp, reversep, volatilep = 0;
tree tem
/* ??? We should work harder and deal with non-zero offsets. */
if (!offset
- && (bitpos % BITS_PER_UNIT) == 0
+ && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
&& !reversep
- && bitsize >= 0
- && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
+ && known_size_p (bitsize)
+ && known_eq (wi::to_poly_offset (TYPE_SIZE (type)), bitsize))
{
/* See the normal_inner_ref case for the rationale. */
orig_op0
if (modifier == EXPAND_CONST_ADDRESS
|| modifier == EXPAND_SUM
|| modifier == EXPAND_INITIALIZER)
- op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
+ op0 = adjust_address_nv (op0, mode, bytepos);
else
- op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
+ op0 = adjust_address (op0, mode, bytepos);
if (op0 == orig_op0)
op0 = copy_rtx (op0);
static tree
make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
- HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
+ HOST_WIDE_INT bitsize, poly_int64 bitpos,
int unsignedp, int reversep)
{
tree result, bftype;
{
tree ninner = TREE_OPERAND (orig_inner, 0);
machine_mode nmode;
- HOST_WIDE_INT nbitsize, nbitpos;
+ poly_int64 nbitsize, nbitpos;
tree noffset;
int nunsignedp, nreversep, nvolatilep = 0;
tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
&nreversep, &nvolatilep);
if (base == inner
&& noffset == NULL_TREE
- && nbitsize >= bitsize
- && nbitpos <= bitpos
- && bitpos + bitsize <= nbitpos + nbitsize
+ && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
&& !reversep
&& !nreversep
&& !nvolatilep)
build_fold_addr_expr (inner),
build_int_cst (ptr_type_node, 0));
- if (bitpos == 0 && !reversep)
+ if (known_eq (bitpos, 0) && !reversep)
{
tree size = TYPE_SIZE (TREE_TYPE (inner));
if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
optimize_bit_field_compare (location_t loc, enum tree_code code,
tree compare_type, tree lhs, tree rhs)
{
- HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
+ poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
+ HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
tree type = TREE_TYPE (lhs);
tree unsigned_type;
int const_p = TREE_CODE (rhs) == INTEGER_CST;
tree offset;
/* Get all the information about the extractions being done. If the bit size
- if the same as the size of the underlying object, we aren't doing an
+ is the same as the size of the underlying object, we aren't doing an
extraction at all and so can do nothing. We also don't want to
do anything if the inner expression is a PLACEHOLDER_EXPR since we
then will no longer be able to replace it. */
- linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
+ linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
&lunsignedp, &lreversep, &lvolatilep);
- if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
- || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
+ if (linner == lhs
+ || !known_size_p (plbitsize)
+ || !plbitsize.is_constant (&lbitsize)
+ || !plbitpos.is_constant (&lbitpos)
+ || lbitsize == GET_MODE_BITSIZE (lmode)
+ || offset != 0
+ || TREE_CODE (linner) == PLACEHOLDER_EXPR
+ || lvolatilep)
return 0;
if (const_p)
= get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
&runsignedp, &rreversep, &rvolatilep);
- if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
- || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
- || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
+ if (rinner == rhs
+ || maybe_ne (lbitpos, rbitpos)
+ || maybe_ne (lbitsize, rbitsize)
+ || lunsignedp != runsignedp
+ || lreversep != rreversep
+ || offset != 0
+ || TREE_CODE (rinner) == PLACEHOLDER_EXPR
+ || rvolatilep)
return 0;
}
poly_uint64 bitend = 0;
if (TREE_CODE (lhs) == COMPONENT_REF)
{
- poly_int64 plbitpos;
get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
return 0;
return 0;
}
- inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
- punsignedp, preversep, pvolatilep);
+ poly_int64 poly_bitsize, poly_bitpos;
+ inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
+ pmode, punsignedp, preversep, pvolatilep);
if ((inner == exp && and_mask == 0)
- || *pbitsize < 0 || offset != 0
+ || !poly_bitsize.is_constant (pbitsize)
+ || !poly_bitpos.is_constant (pbitpos)
+ || *pbitsize < 0
+ || offset != 0
|| TREE_CODE (inner) == PLACEHOLDER_EXPR
/* Reject out-of-bound accesses (PR79731). */
|| (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
&& POINTER_TYPE_P (type)
&& handled_component_p (TREE_OPERAND (op0, 0)))
{
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos;
tree offset;
machine_mode mode;
int unsignedp, reversep, volatilep;
/* If the reference was to a (constant) zero offset, we can use
the address of the base if it has the same base type
as the result type and the pointer type is unqualified. */
- if (! offset && bitpos == 0
+ if (!offset
+ && known_eq (bitpos, 0)
&& (TYPE_MAIN_VARIANT (TREE_TYPE (type))
== TYPE_MAIN_VARIANT (TREE_TYPE (base)))
&& TYPE_QUALS (type) == TYPE_UNQUALIFIED)
static tree
split_address_to_core_and_offset (tree exp,
- HOST_WIDE_INT *pbitpos, tree *poffset)
+ poly_int64_pod *pbitpos, tree *poffset)
{
tree core;
machine_mode mode;
int unsignedp, reversep, volatilep;
- HOST_WIDE_INT bitsize;
+ poly_int64 bitsize;
location_t loc = EXPR_LOCATION (exp);
if (TREE_CODE (exp) == ADDR_EXPR)
STRIP_NOPS (core);
*pbitpos = 0;
*poffset = TREE_OPERAND (exp, 1);
- if (TREE_CODE (*poffset) == INTEGER_CST)
+ if (poly_int_tree_p (*poffset))
{
- offset_int tem = wi::sext (wi::to_offset (*poffset),
- TYPE_PRECISION (TREE_TYPE (*poffset)));
+ poly_offset_int tem
+ = wi::sext (wi::to_poly_offset (*poffset),
+ TYPE_PRECISION (TREE_TYPE (*poffset)));
tem <<= LOG2_BITS_PER_UNIT;
- if (wi::fits_shwi_p (tem))
- {
- *pbitpos = tem.to_shwi ();
- *poffset = NULL_TREE;
- }
+ if (tem.to_shwi (pbitpos))
+ *poffset = NULL_TREE;
}
}
else
otherwise. If they do, E1 - E2 is stored in *DIFF. */
bool
-ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
+ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
{
tree core1, core2;
- HOST_WIDE_INT bitpos1, bitpos2;
+ poly_int64 bitpos1, bitpos2;
tree toffset1, toffset2, tdiff, type;
core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
- if (bitpos1 % BITS_PER_UNIT != 0
- || bitpos2 % BITS_PER_UNIT != 0
+ poly_int64 bytepos1, bytepos2;
+ if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
+ || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
|| !operand_equal_p (core1, core2, 0))
return false;
else
*diff = 0;
- *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
+ *diff += bytepos1 - bytepos2;
return true;
}
extern bool tree_swap_operands_p (const_tree, const_tree);
extern enum tree_code swap_tree_comparison (enum tree_code);
-extern bool ptr_difference_const (tree, tree, HOST_WIDE_INT *);
+extern bool ptr_difference_const (tree, tree, poly_int64_pod *);
extern enum tree_code invert_tree_comparison (enum tree_code, bool);
extern bool tree_unary_nonzero_warnv_p (enum tree_code, tree, tree, bool *);
*/
tree expr = gimple_assign_rhs1 (stmt);
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos;
tree base, offset;
machine_mode mode;
int volatilep = 0, reversep, unsignedp = 0;
base = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize,
&bitpos, &offset, &mode, &unsignedp,
&reversep, &volatilep);
- gcc_assert (base != NULL_TREE && (bitpos % BITS_PER_UNIT) == 0);
+ gcc_assert (base != NULL_TREE);
+ poly_int64 bytepos = exact_div (bitpos, BITS_PER_UNIT);
if (offset != NULL_TREE)
{
- if (bitpos != 0)
- offset = size_binop (PLUS_EXPR, offset,
- size_int (bitpos / BITS_PER_UNIT));
+ if (maybe_ne (bytepos, 0))
+ offset = size_binop (PLUS_EXPR, offset, size_int (bytepos));
offset = force_gimple_operand_gsi (&gsi, offset, true, NULL,
true, GSI_SAME_STMT);
base = build_fold_addr_expr (base);
{
/* Leaf node is an array or component ref. Memorize its base and
offset from base to compare to other such leaf node. */
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos, bytepos;
machine_mode mode;
int unsignedp, reversep, volatilep;
tree offset, base_addr;
else
base_addr = build_fold_addr_expr (base_addr);
- if (bitpos % BITS_PER_UNIT)
+ if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
return false;
- if (bitsize % BITS_PER_UNIT)
+ if (!multiple_p (bitsize, BITS_PER_UNIT))
return false;
if (reversep)
return false;
return false;
n->base_addr = base_addr;
n->offset = offset;
- n->bytepos = bitpos / BITS_PER_UNIT;
+ n->bytepos = bytepos;
n->alias_set = reference_alias_ptr_type (ref);
n->vuse = gimple_vuse (stmt);
return true;
slsr_process_ref (gimple *gs)
{
tree ref_expr, base, offset, type;
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos;
machine_mode mode;
int unsignedp, reversep, volatilep;
slsr_cand_t c;
base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
&unsignedp, &reversep, &volatilep);
- if (reversep)
+ HOST_WIDE_INT cbitpos;
+ if (reversep || !bitpos.is_constant (&cbitpos))
return;
- widest_int index = bitpos;
+ widest_int index = cbitpos;
if (!restructure_reference (&base, &offset, &index, &type))
return;
}
tree offset;
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos;
machine_mode mode;
int unsignedp, reversep, volatilep = 0;
tree base = OMP_CLAUSE_DECL (c);
base = TREE_OPERAND (base, 0);
gcc_assert (base == decl
&& (offset == NULL_TREE
- || TREE_CODE (offset) == INTEGER_CST));
+ || poly_int_tree_p (offset)));
splay_tree_node n
= splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
tree *sc = NULL, *scp = NULL;
if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
n->value |= GOVD_SEEN;
- offset_int o1, o2;
+ poly_offset_int o1, o2;
if (offset)
- o1 = wi::to_offset (offset);
+ o1 = wi::to_poly_offset (offset);
else
o1 = 0;
- if (bitpos)
- o1 = o1 + bitpos / BITS_PER_UNIT;
+ if (maybe_ne (bitpos, 0))
+ o1 += bits_to_bytes_round_down (bitpos);
sc = &OMP_CLAUSE_CHAIN (*osc);
if (*sc != c
&& (OMP_CLAUSE_MAP_KIND (*sc)
else
{
tree offset2;
- HOST_WIDE_INT bitsize2, bitpos2;
+ poly_int64 bitsize2, bitpos2;
base = OMP_CLAUSE_DECL (*sc);
if (TREE_CODE (base) == ARRAY_REF)
{
if (scp)
continue;
gcc_assert (offset == NULL_TREE
- || TREE_CODE (offset) == INTEGER_CST);
+ || poly_int_tree_p (offset));
tree d1 = OMP_CLAUSE_DECL (*sc);
tree d2 = OMP_CLAUSE_DECL (c);
while (TREE_CODE (d1) == ARRAY_REF)
break;
}
if (offset2)
- o2 = wi::to_offset (offset2);
+ o2 = wi::to_poly_offset (offset2);
else
o2 = 0;
- if (bitpos2)
- o2 = o2 + bitpos2 / BITS_PER_UNIT;
- if (wi::ltu_p (o1, o2)
- || (wi::eq_p (o1, o2) && bitpos < bitpos2))
+ o2 += bits_to_bytes_round_down (bitpos2);
+ if (maybe_lt (o1, o2)
+ || (known_eq (o1, 2)
+ && maybe_lt (bitpos, bitpos2)))
{
if (ptr)
scp = sc;
{
machine_mode mode;
int unsignedp, volatilep, preversep;
-
- ref = get_inner_reference (ref, &bitsize, &bitpos, &varoffset, &mode,
- &unsignedp, &preversep, &volatilep);
-
- offset = bitpos;
- offset = wi::rshift (offset, LOG2_BITS_PER_UNIT, SIGNED);
+ poly_int64 pbitsize, pbitpos;
+ tree new_ref;
+
+ new_ref = get_inner_reference (ref, &pbitsize, &pbitpos, &varoffset,
+ &mode, &unsignedp, &preversep,
+ &volatilep);
+ /* When this isn't true, the switch below will report an
+ appropriate error. */
+ if (pbitsize.is_constant () && pbitpos.is_constant ())
+ {
+ bitsize = pbitsize.to_constant ();
+ bitpos = pbitpos.to_constant ();
+ ref = new_ref;
+ offset = bitpos;
+ offset = wi::rshift (offset, LOG2_BITS_PER_UNIT, SIGNED);
+ }
}
switch (TREE_CODE (ref))
(simplify
(minus (convert ADDR_EXPR@0) (convert @1))
(if (tree_nop_conversion_p (type, TREE_TYPE (@0)))
- (with { HOST_WIDE_INT diff; }
+ (with { poly_int64 diff; }
(if (ptr_difference_const (@0, @1, &diff))
{ build_int_cst_type (type, diff); }))))
(simplify
(minus (convert @0) (convert ADDR_EXPR@1))
(if (tree_nop_conversion_p (type, TREE_TYPE (@0)))
- (with { HOST_WIDE_INT diff; }
+ (with { poly_int64 diff; }
(if (ptr_difference_const (@0, @1, &diff))
{ build_int_cst_type (type, diff); }))))
(simplify
(pointer_diff (convert?@2 ADDR_EXPR@0) (convert?@3 @1))
(if (tree_nop_conversion_p (TREE_TYPE(@2), TREE_TYPE (@0))
&& tree_nop_conversion_p (TREE_TYPE(@3), TREE_TYPE (@1)))
- (with { HOST_WIDE_INT diff; }
+ (with { poly_int64 diff; }
(if (ptr_difference_const (@0, @1, &diff))
{ build_int_cst_type (type, diff); }))))
(simplify
(pointer_diff (convert?@2 @0) (convert?@3 ADDR_EXPR@1))
(if (tree_nop_conversion_p (TREE_TYPE(@2), TREE_TYPE (@0))
&& tree_nop_conversion_p (TREE_TYPE(@3), TREE_TYPE (@1)))
- (with { HOST_WIDE_INT diff; }
+ (with { poly_int64 diff; }
(if (ptr_difference_const (@0, @1, &diff))
{ build_int_cst_type (type, diff); }))))
static bool
maybe_optimize_ubsan_ptr_ifn (sanopt_ctx *ctx, gimple *stmt)
{
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, pbitpos;
machine_mode mode;
int volatilep = 0, reversep, unsignedp = 0;
tree offset;
{
base = TREE_OPERAND (base, 0);
- base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
+ HOST_WIDE_INT bitpos;
+ base = get_inner_reference (base, &bitsize, &pbitpos, &offset, &mode,
&unsignedp, &reversep, &volatilep);
- if (offset == NULL_TREE && DECL_P (base))
+ if (offset == NULL_TREE
+ && DECL_P (base)
+ && pbitpos.is_constant (&bitpos))
{
gcc_assert (!DECL_REGISTER (base));
offset_int expr_offset = bitpos / BITS_PER_UNIT;
case IMAGPART_EXPR:
case VIEW_CONVERT_EXPR:
{
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos, bytepos, toffset_val = 0;
tree toffset;
int unsignedp, reversep, volatilep = 0;
decl
= get_inner_reference (decl, &bitsize, &bitpos, &toffset, &mode,
&unsignedp, &reversep, &volatilep);
- if (bitsize != GET_MODE_BITSIZE (mode)
- || (bitpos % BITS_PER_UNIT)
- || (toffset && !tree_fits_shwi_p (toffset)))
+ if (maybe_ne (bitsize, GET_MODE_BITSIZE (mode))
+ || !multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
+ || (toffset && !poly_int_tree_p (toffset, &toffset_val)))
decl = NULL;
else
- {
- offset += bitpos / BITS_PER_UNIT;
- if (toffset)
- offset += tree_to_shwi (toffset);
- }
+ offset += bytepos + toffset_val;
break;
}
}
aff_tree tmp;
enum tree_code code;
tree cst, core, toffset;
- HOST_WIDE_INT bitpos, bitsize;
+ poly_int64 bitpos, bitsize, bytepos;
machine_mode mode;
int unsignedp, reversep, volatilep;
core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
&toffset, &mode, &unsignedp, &reversep,
&volatilep);
- if (bitpos % BITS_PER_UNIT != 0)
+ if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
break;
- aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
+ aff_combination_const (comb, type, bytepos);
if (TREE_CODE (core) == MEM_REF)
{
- aff_combination_add_cst (comb, wi::to_widest (TREE_OPERAND (core, 1)));
+ tree mem_offset = TREE_OPERAND (core, 1);
+ aff_combination_add_cst (comb, wi::to_poly_widest (mem_offset));
core = TREE_OPERAND (core, 0);
}
else
tree
get_inner_reference_aff (tree ref, aff_tree *addr, poly_widest_int *size)
{
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos;
tree toff;
machine_mode mode;
int uns, rev, vol;
aff_combination_add (addr, &tmp);
}
- aff_combination_const (&tmp, sizetype, bitpos / BITS_PER_UNIT);
+ aff_combination_const (&tmp, sizetype, bits_to_bytes_round_down (bitpos));
aff_combination_add (addr, &tmp);
- *size = (bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
+ *size = bits_to_bytes_round_up (bitsize);
return base;
}
case ADDR_EXPR:
{
tree base, poffset;
- HOST_WIDE_INT pbitsize, pbitpos;
+ poly_int64 pbitsize, pbitpos, pbytepos;
machine_mode pmode;
int punsignedp, preversep, pvolatilep;
= get_inner_reference (op0, &pbitsize, &pbitpos, &poffset, &pmode,
&punsignedp, &preversep, &pvolatilep);
- if (pbitpos % BITS_PER_UNIT != 0)
+ if (!multiple_p (pbitpos, BITS_PER_UNIT, &pbytepos))
return false;
base = build_fold_addr_expr (base);
- off0 = ssize_int (pbitpos / BITS_PER_UNIT);
+ off0 = ssize_int (pbytepos);
if (poffset)
{
dr_analyze_innermost (innermost_loop_behavior *drb, tree ref,
struct loop *loop)
{
- HOST_WIDE_INT pbitsize, pbitpos;
+ poly_int64 pbitsize, pbitpos;
tree base, poffset;
machine_mode pmode;
int punsignedp, preversep, pvolatilep;
&punsignedp, &preversep, &pvolatilep);
gcc_assert (base != NULL_TREE);
- if (pbitpos % BITS_PER_UNIT != 0)
+ poly_int64 pbytepos;
+ if (!multiple_p (pbitpos, BITS_PER_UNIT, &pbytepos))
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "failed: bit offset alignment.\n");
}
}
- init = ssize_int (pbitpos / BITS_PER_UNIT);
+ init = ssize_int (pbytepos);
/* Subtract any constant component from the base and add it to INIT instead.
Adjust the misalignment to reflect the amount we subtracted. */
|| handled_component_p (TREE_OPERAND (rhs1, 0)))
{
machine_mode mode;
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos;
int unsignedp, reversep;
int volatilep = 0;
tree base, offset;
res = chrec_fold_plus (type, res, chrec2);
}
- if (bitpos != 0)
+ if (maybe_ne (bitpos, 0))
{
- gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
-
- unitpos = size_int (bitpos / BITS_PER_UNIT);
+ unitpos = size_int (exact_div (bitpos, BITS_PER_UNIT));
chrec3 = analyze_scalar_evolution (loop, unitpos);
chrec3 = chrec_convert (TREE_TYPE (unitpos), chrec3, at_stmt);
chrec3 = instantiate_parameters (loop, chrec3);
continue;
tree offset;
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos;
machine_mode mode;
int unsignedp, reversep, volatilep = 0;
get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
&unsignedp, &reversep, &volatilep);
- if (bitpos % BITS_PER_UNIT)
+ if (!multiple_p (bitpos, BITS_PER_UNIT))
{
iscc->bad_arg_alignment = true;
return true;
vect_check_gather_scatter (gimple *stmt, loop_vec_info loop_vinfo,
gather_scatter_info *info)
{
- HOST_WIDE_INT scale = 1, pbitpos, pbitsize;
+ HOST_WIDE_INT scale = 1;
+ poly_int64 pbitpos, pbitsize;
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
that can be gimplified before the loop. */
base = get_inner_reference (base, &pbitsize, &pbitpos, &off, &pmode,
&punsignedp, &reversep, &pvolatilep);
- gcc_assert (base && (pbitpos % BITS_PER_UNIT) == 0 && !reversep);
+ gcc_assert (base && !reversep);
+ poly_int64 pbytepos = exact_div (pbitpos, BITS_PER_UNIT);
if (TREE_CODE (base) == MEM_REF)
{
if (!integer_zerop (off))
return false;
off = base;
- base = size_int (pbitpos / BITS_PER_UNIT);
+ base = size_int (pbytepos);
}
/* Otherwise put base + constant offset into the loop invariant BASE
and continue with OFF. */
else
{
base = fold_convert (sizetype, base);
- base = size_binop (PLUS_EXPR, base, size_int (pbitpos / BITS_PER_UNIT));
+ base = size_binop (PLUS_EXPR, base, size_int (pbytepos));
}
/* OFF at this point may be either a SSA_NAME or some tree expression
/* Given an expression EXP that is a handled_component_p,
look for the ultimate containing object, which is returned and specify
the access position and size. */
-extern tree get_inner_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
+extern tree get_inner_reference (tree, poly_int64_pod *, poly_int64_pod *,
tree *, machine_mode *, int *, int *, int *);
-/* Temporary. */
-inline tree
-get_inner_reference (tree exp, poly_int64_pod *pbitsize,
- poly_int64_pod *pbitpos, tree *poffset,
- machine_mode *pmode, int *punsignedp,
- int *preversep, int *pvolatilep)
-{
- return get_inner_reference (exp, &pbitsize->coeffs[0], &pbitpos->coeffs[0],
- poffset, pmode, punsignedp, preversep,
- pvolatilep);
-}
extern tree build_personality_function (const char *);
if (size <= 0)
return false;
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 unused_bitsize, unused_bitpos;
tree offset;
machine_mode mode;
int unsignedp, reversep, volatilep = 0;
- base = get_inner_reference (expr, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &reversep, &volatilep);
+ base = get_inner_reference (expr, &unused_bitsize, &unused_bitpos, &offset,
+ &mode, &unsignedp, &reversep, &volatilep);
/* No need to instrument accesses to decls that don't escape,
they can't escape to other threads then. */
&& DECL_BIT_FIELD_TYPE (TREE_OPERAND (expr, 1)))
|| TREE_CODE (expr) == BIT_FIELD_REF)
{
+ HOST_WIDE_INT bitpos, bitsize;
base = TREE_OPERAND (expr, 0);
if (TREE_CODE (expr) == COMPONENT_REF)
{
if (!handled_component_p (t) && TREE_CODE (t) != MEM_REF)
return;
- HOST_WIDE_INT bitsize, bitpos, bytepos;
+ poly_int64 bitsize, bitpos, bytepos;
tree offset;
machine_mode mode;
int volatilep = 0, reversep, unsignedp = 0;
/* If BASE is a fixed size automatic variable or
global variable defined in the current TU and bitpos
fits, don't instrument anything. */
+ poly_int64 base_size;
if (offset == NULL_TREE
- && bitpos > 0
+ && maybe_ne (bitpos, 0)
&& (VAR_P (base)
|| TREE_CODE (base) == PARM_DECL
|| TREE_CODE (base) == RESULT_DECL)
- && DECL_SIZE (base)
- && TREE_CODE (DECL_SIZE (base)) == INTEGER_CST
- && compare_tree_int (DECL_SIZE (base), bitpos) >= 0
+ && poly_int_tree_p (DECL_SIZE (base), &base_size)
+ && known_ge (base_size, bitpos)
&& (!is_global_var (base) || decl_binds_to_current_def_p (base)))
return;
}
if (!POINTER_TYPE_P (TREE_TYPE (base)) && !DECL_P (base))
return;
- bytepos = bitpos / BITS_PER_UNIT;
- if (offset == NULL_TREE && bytepos == 0 && moff == NULL_TREE)
+ bytepos = bits_to_bytes_round_down (bitpos);
+ if (offset == NULL_TREE && known_eq (bytepos, 0) && moff == NULL_TREE)
return;
tree base_addr = base;
base_addr = build1 (ADDR_EXPR,
build_pointer_type (TREE_TYPE (base)), base);
t = offset;
- if (bytepos)
+ if (maybe_ne (bytepos, 0))
{
if (t)
t = fold_build2 (PLUS_EXPR, TREE_TYPE (t), t,
return;
int modebitsize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos;
tree offset;
machine_mode mode;
int volatilep = 0, reversep, unsignedp = 0;
tree utype = build_nonstandard_integer_type (modebitsize, 1);
if ((VAR_P (base) && DECL_HARD_REGISTER (base))
- || (bitpos % modebitsize) != 0
- || bitsize != modebitsize
+ || !multiple_p (bitpos, modebitsize)
+ || maybe_ne (bitsize, modebitsize)
|| GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (utype)) != modebitsize
|| TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
return;
if (size_in_bytes <= 0)
return;
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos;
tree offset;
machine_mode mode;
int volatilep = 0, reversep, unsignedp = 0;
tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
&unsignedp, &reversep, &volatilep);
- if (bitpos % BITS_PER_UNIT != 0
- || bitsize != size_in_bytes * BITS_PER_UNIT)
+ if (!multiple_p (bitpos, BITS_PER_UNIT)
+ || maybe_ne (bitsize, size_in_bytes * BITS_PER_UNIT))
return;
bool decl_p = DECL_P (inner);