From: Richard Sandiford Date: Thu, 21 Dec 2017 06:57:18 +0000 (+0000) Subject: poly_int: get_bit_range X-Git-Url: https://git.libre-soc.org/?a=commitdiff_plain;h=39bb8924559d0487fb7cb6d4dc33d6b1e9c41004;p=gcc.git poly_int: get_bit_range This patch makes get_bit_range return the range and position as poly_ints. 2017-12-21 Richard Sandiford Alan Hayward David Sherwood gcc/ * expr.h (get_bit_range): Return the bitstart and bitend as poly_uint64s rather than unsigned HOST_WIDE_INTs. Return the bitpos as a poly_int64 rather than a HOST_WIDE_INT. * expr.c (get_bit_range): Likewise. (expand_assignment): Update call accordingly. * fold-const.c (optimize_bit_field_compare): Likewise. Co-Authored-By: Alan Hayward Co-Authored-By: David Sherwood From-SVN: r255912 --- diff --git a/gcc/ChangeLog b/gcc/ChangeLog index adb02ee10e1..3d783ffe2d5 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,14 @@ +2017-12-21 Richard Sandiford + Alan Hayward + David Sherwood + + * expr.h (get_bit_range): Return the bitstart and bitend as + poly_uint64s rather than unsigned HOST_WIDE_INTs. Return the bitpos + as a poly_int64 rather than a HOST_WIDE_INT. + * expr.c (get_bit_range): Likewise. + (expand_assignment): Update call accordingly. + * fold-const.c (optimize_bit_field_compare): Likewise. + 2017-12-21 Richard Sandiford Alan Hayward David Sherwood diff --git a/gcc/expr.c b/gcc/expr.c index 9020327984a..372b5fc84a0 100644 --- a/gcc/expr.c +++ b/gcc/expr.c @@ -4809,13 +4809,10 @@ optimize_bitfield_assignment_op (poly_uint64 pbitsize, *BITSTART and *BITEND. */ void -get_bit_range (unsigned HOST_WIDE_INT *bitstart, - unsigned HOST_WIDE_INT *bitend, - tree exp, - HOST_WIDE_INT *bitpos, - tree *offset) +get_bit_range (poly_uint64_pod *bitstart, poly_uint64_pod *bitend, tree exp, + poly_int64_pod *bitpos, tree *offset) { - HOST_WIDE_INT bitoffset; + poly_int64 bitoffset; tree field, repr; gcc_assert (TREE_CODE (exp) == COMPONENT_REF); @@ -4836,13 +4833,13 @@ get_bit_range (unsigned HOST_WIDE_INT *bitstart, if (handled_component_p (TREE_OPERAND (exp, 0))) { machine_mode rmode; - HOST_WIDE_INT rbitsize, rbitpos; + poly_int64 rbitsize, rbitpos; tree roffset; int unsignedp, reversep, volatilep = 0; get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos, &roffset, &rmode, &unsignedp, &reversep, &volatilep); - if ((rbitpos % BITS_PER_UNIT) != 0) + if (!multiple_p (rbitpos, BITS_PER_UNIT)) { *bitstart = *bitend = 0; return; @@ -4853,10 +4850,10 @@ get_bit_range (unsigned HOST_WIDE_INT *bitstart, relative to the representative. DECL_FIELD_OFFSET of field and repr are the same by construction if they are not constants, see finish_bitfield_layout. */ - if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)) - && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr))) - bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field)) - - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT; + poly_uint64 field_offset, repr_offset; + if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset) + && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset)) + bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT; else bitoffset = 0; bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)) @@ -4865,17 +4862,16 @@ get_bit_range (unsigned HOST_WIDE_INT *bitstart, /* If the adjustment is larger than bitpos, we would have a negative bit position for the lower bound and this may wreak havoc later. Adjust offset and bitpos to make the lower bound non-negative in that case. */ - if (bitoffset > *bitpos) + if (maybe_gt (bitoffset, *bitpos)) { - HOST_WIDE_INT adjust = bitoffset - *bitpos; - gcc_assert ((adjust % BITS_PER_UNIT) == 0); + poly_int64 adjust_bits = upper_bound (bitoffset, *bitpos) - *bitpos; + poly_int64 adjust_bytes = exact_div (adjust_bits, BITS_PER_UNIT); - *bitpos += adjust; + *bitpos += adjust_bits; if (*offset == NULL_TREE) - *offset = size_int (-adjust / BITS_PER_UNIT); + *offset = size_int (-adjust_bytes); else - *offset - = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT)); + *offset = size_binop (MINUS_EXPR, *offset, size_int (adjust_bytes)); *bitstart = 0; } else @@ -4988,9 +4984,9 @@ expand_assignment (tree to, tree from, bool nontemporal) || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE) { machine_mode mode1; - HOST_WIDE_INT bitsize, bitpos; - unsigned HOST_WIDE_INT bitregion_start = 0; - unsigned HOST_WIDE_INT bitregion_end = 0; + poly_int64 bitsize, bitpos; + poly_uint64 bitregion_start = 0; + poly_uint64 bitregion_end = 0; tree offset; int unsignedp, reversep, volatilep = 0; tree tem; @@ -5000,11 +4996,11 @@ expand_assignment (tree to, tree from, bool nontemporal) &unsignedp, &reversep, &volatilep); /* Make sure bitpos is not negative, it can wreak havoc later. */ - if (bitpos < 0) + if (maybe_lt (bitpos, 0)) { gcc_assert (offset == NULL_TREE); - offset = size_int (bitpos >> LOG2_BITS_PER_UNIT); - bitpos &= BITS_PER_UNIT - 1; + offset = size_int (bits_to_bytes_round_down (bitpos)); + bitpos = num_trailing_bits (bitpos); } if (TREE_CODE (to) == COMPONENT_REF @@ -5014,9 +5010,9 @@ expand_assignment (tree to, tree from, bool nontemporal) However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or BITSIZE are not byte-aligned, there is no need to limit the range we can access. This can occur with packed structures in Ada. */ - else if (bitsize > 0 - && bitsize % BITS_PER_UNIT == 0 - && bitpos % BITS_PER_UNIT == 0) + else if (maybe_gt (bitsize, 0) + && multiple_p (bitsize, BITS_PER_UNIT) + && multiple_p (bitpos, BITS_PER_UNIT)) { bitregion_start = bitpos; bitregion_end = bitpos + bitsize - 1; @@ -5078,16 +5074,18 @@ expand_assignment (tree to, tree from, bool nontemporal) This is only done for aligned data values, as these can be expected to result in single move instructions. */ + poly_int64 bytepos; if (mode1 != VOIDmode - && bitpos != 0 - && bitsize > 0 - && (bitpos % bitsize) == 0 - && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 + && maybe_ne (bitpos, 0) + && maybe_gt (bitsize, 0) + && multiple_p (bitpos, BITS_PER_UNIT, &bytepos) + && multiple_p (bitpos, bitsize) + && multiple_p (bitsize, GET_MODE_ALIGNMENT (mode1)) && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1)) { - to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT); + to_rtx = adjust_address (to_rtx, mode1, bytepos); bitregion_start = 0; - if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos) + if (known_ge (bitregion_end, poly_uint64 (bitpos))) bitregion_end -= bitpos; bitpos = 0; } @@ -5102,8 +5100,7 @@ expand_assignment (tree to, tree from, bool nontemporal) code contains an out-of-bounds access to a small array. */ if (!MEM_P (to_rtx) && GET_MODE (to_rtx) != BLKmode - && (unsigned HOST_WIDE_INT) bitpos - >= GET_MODE_PRECISION (GET_MODE (to_rtx))) + && known_ge (bitpos, GET_MODE_PRECISION (GET_MODE (to_rtx)))) { expand_normal (from); result = NULL; @@ -5114,25 +5111,26 @@ expand_assignment (tree to, tree from, bool nontemporal) unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx)); if (TYPE_MODE (TREE_TYPE (from)) == GET_MODE (to_rtx) && COMPLEX_MODE_P (GET_MODE (to_rtx)) - && bitpos == 0 - && bitsize == mode_bitsize) + && known_eq (bitpos, 0) + && known_eq (bitsize, mode_bitsize)) result = store_expr (from, to_rtx, false, nontemporal, reversep); - else if (bitsize == mode_bitsize / 2 - && (bitpos == 0 || bitpos == mode_bitsize / 2)) - result = store_expr (from, XEXP (to_rtx, bitpos != 0), false, - nontemporal, reversep); - else if (bitpos + bitsize <= mode_bitsize / 2) + else if (known_eq (bitsize, mode_bitsize / 2) + && (known_eq (bitpos, 0) + || known_eq (bitpos, mode_bitsize / 2))) + result = store_expr (from, XEXP (to_rtx, maybe_ne (bitpos, 0)), + false, nontemporal, reversep); + else if (known_le (bitpos + bitsize, mode_bitsize / 2)) result = store_field (XEXP (to_rtx, 0), bitsize, bitpos, bitregion_start, bitregion_end, mode1, from, get_alias_set (to), nontemporal, reversep); - else if (bitpos >= mode_bitsize / 2) + else if (known_ge (bitpos, mode_bitsize / 2)) result = store_field (XEXP (to_rtx, 1), bitsize, bitpos - mode_bitsize / 2, bitregion_start, bitregion_end, mode1, from, get_alias_set (to), nontemporal, reversep); - else if (bitpos == 0 && bitsize == mode_bitsize) + else if (known_eq (bitpos, 0) && known_eq (bitsize, mode_bitsize)) { result = expand_normal (from); if (GET_CODE (result) == CONCAT) diff --git a/gcc/expr.h b/gcc/expr.h index 9b0927197d5..a260192788b 100644 --- a/gcc/expr.h +++ b/gcc/expr.h @@ -240,17 +240,8 @@ extern bool emit_push_insn (rtx, machine_mode, tree, rtx, unsigned int, int, rtx, int, rtx, rtx, int, rtx, bool); /* Extract the accessible bit-range from a COMPONENT_REF. */ -extern void get_bit_range (unsigned HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, - tree, HOST_WIDE_INT *, tree *); - -/* Temporary. */ -inline void -get_bit_range (poly_uint64_pod *bitstart, poly_uint64_pod *bitend, tree exp, - poly_int64_pod *bitpos, tree *offset) -{ - get_bit_range (&bitstart->coeffs[0], &bitend->coeffs[0], exp, - &bitpos->coeffs[0], offset); -} +extern void get_bit_range (poly_uint64_pod *, poly_uint64_pod *, tree, + poly_int64_pod *, tree *); /* Expand an assignment that stores the value of FROM into TO. */ extern void expand_assignment (tree, tree, bool); diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 75bc7627a07..0b3cba53ded 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -4076,12 +4076,13 @@ optimize_bit_field_compare (location_t loc, enum tree_code code, } /* Honor the C++ memory model and mimic what RTL expansion does. */ - unsigned HOST_WIDE_INT bitstart = 0; - unsigned HOST_WIDE_INT bitend = 0; + poly_uint64 bitstart = 0; + poly_uint64 bitend = 0; if (TREE_CODE (lhs) == COMPONENT_REF) { - get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset); - if (offset != NULL_TREE) + poly_int64 plbitpos; + get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset); + if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE) return 0; }