From 5511bc5ada2bd2bc9ab835d0ed9fd96a83d3260d Mon Sep 17 00:00:00 2001 From: Bernd Schmidt Date: Wed, 6 Jul 2011 23:16:39 +0000 Subject: [PATCH] explow.c (trunc_int_for_mode): Use GET_MODE_PRECISION instead of GET_MODE_BITSIZE where appropriate. * explow.c (trunc_int_for_mode): Use GET_MODE_PRECISION instead of GET_MODE_BITSIZE where appropriate. * rtlanal.c (subreg_lsb_1, subreg_get_info, nonzero_bits1, num_sign_bit_copies1, canonicalize_condition, low_bitmask_len, init_num_sign_bit_copies_in_rep): Likewise. * cse.c (fold_rtx, cse_insn): Likewise. * loop-doloop.c (doloop_modify, doloop_optimize): Likewise. * simplify-rtx.c (simplify_unary_operation_1, simplify_const_unary_operation, simplify_binary_operation_1, simplify_const_binary_operation, simplify_ternary_operation, simplify_const_relational_operation, simplify_subreg): Likewise. * combine.c (try_combine, find_split_point, combine_simplify_rtx, simplify_if_then_else, simplify_set, expand_compound_operation, expand_field_assignment, make_extraction, if_then_else_cond, make_compound_operation, force_to_mode, make_field_assignment, reg_nonzero_bits_for_combine, reg_num_sign_bit_copies_for_combine, extended_count, try_widen_shift_mode, simplify_shift_const_1, simplify_comparison, record_promoted_value, simplify_compare_const, record_dead_and_set_regs_1): Likewise. From-SVN: r175946 --- gcc/ChangeLog | 20 ++++ gcc/combine.c | 257 +++++++++++++++++++++++---------------------- gcc/cse.c | 14 +-- gcc/explow.c | 2 +- gcc/loop-doloop.c | 8 +- gcc/rtlanal.c | 78 +++++++------- gcc/simplify-rtx.c | 132 +++++++++++------------ 7 files changed, 267 insertions(+), 244 deletions(-) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 97ee184e414..6c4d352053a 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -24,6 +24,26 @@ simplify_binary_operation_1, simplify_const_relational_operation): Likewise. + * explow.c (trunc_int_for_mode): Use GET_MODE_PRECISION + instead of GET_MODE_BITSIZE where appropriate. + * rtlanal.c (subreg_lsb_1, subreg_get_info, nonzero_bits1, + num_sign_bit_copies1, canonicalize_condition, low_bitmask_len, + init_num_sign_bit_copies_in_rep): Likewise. + * cse.c (fold_rtx, cse_insn): Likewise. + * loop-doloop.c (doloop_modify, doloop_optimize): Likewise. + * simplify-rtx.c (simplify_unary_operation_1, + simplify_const_unary_operation, simplify_binary_operation_1, + simplify_const_binary_operation, simplify_ternary_operation, + simplify_const_relational_operation, simplify_subreg): Likewise. + * combine.c (try_combine, find_split_point, combine_simplify_rtx, + simplify_if_then_else, simplify_set, expand_compound_operation, + expand_field_assignment, make_extraction, if_then_else_cond, + make_compound_operation, force_to_mode, make_field_assignment, + reg_nonzero_bits_for_combine, reg_num_sign_bit_copies_for_combine, + extended_count, try_widen_shift_mode, simplify_shift_const_1, + simplify_comparison, record_promoted_value, simplify_compare_const, + record_dead_and_set_regs_1): Likewise. + 2011-07-06 Michael Meissner * config/rs6000/rs6000-protos.h (rs6000_call_indirect_aix): New diff --git a/gcc/combine.c b/gcc/combine.c index 9ae5e9939ce..787b0db9820 100644 --- a/gcc/combine.c +++ b/gcc/combine.c @@ -2758,14 +2758,14 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p, offset = INTVAL (XEXP (dest, 2)); dest = XEXP (dest, 0); if (BITS_BIG_ENDIAN) - offset = GET_MODE_BITSIZE (GET_MODE (dest)) - width - offset; + offset = GET_MODE_PRECISION (GET_MODE (dest)) - width - offset; } } else { if (GET_CODE (dest) == STRICT_LOW_PART) dest = XEXP (dest, 0); - width = GET_MODE_BITSIZE (GET_MODE (dest)); + width = GET_MODE_PRECISION (GET_MODE (dest)); offset = 0; } @@ -2775,16 +2775,16 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p, if (subreg_lowpart_p (dest)) ; /* Handle the case where inner is twice the size of outer. */ - else if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp))) - == 2 * GET_MODE_BITSIZE (GET_MODE (dest))) - offset += GET_MODE_BITSIZE (GET_MODE (dest)); + else if (GET_MODE_PRECISION (GET_MODE (SET_DEST (temp))) + == 2 * GET_MODE_PRECISION (GET_MODE (dest))) + offset += GET_MODE_PRECISION (GET_MODE (dest)); /* Otherwise give up for now. */ else offset = -1; } if (offset >= 0 - && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp))) + && (GET_MODE_PRECISION (GET_MODE (SET_DEST (temp))) <= HOST_BITS_PER_DOUBLE_INT)) { double_int m, o, i; @@ -3745,8 +3745,8 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p, (REG_P (temp) && VEC_index (reg_stat_type, reg_stat, REGNO (temp))->nonzero_bits != 0 - && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD - && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT + && GET_MODE_PRECISION (GET_MODE (temp)) < BITS_PER_WORD + && GET_MODE_PRECISION (GET_MODE (temp)) < HOST_BITS_PER_INT && (VEC_index (reg_stat_type, reg_stat, REGNO (temp))->nonzero_bits != GET_MODE_MASK (word_mode)))) @@ -3755,8 +3755,8 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p, (REG_P (temp) && VEC_index (reg_stat_type, reg_stat, REGNO (temp))->nonzero_bits != 0 - && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD - && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT + && GET_MODE_PRECISION (GET_MODE (temp)) < BITS_PER_WORD + && GET_MODE_PRECISION (GET_MODE (temp)) < HOST_BITS_PER_INT && (VEC_index (reg_stat_type, reg_stat, REGNO (temp))->nonzero_bits != GET_MODE_MASK (word_mode))))) @@ -4685,7 +4685,7 @@ find_split_point (rtx *loc, rtx insn, bool set_src) && CONST_INT_P (SET_SRC (x)) && ((INTVAL (XEXP (SET_DEST (x), 1)) + INTVAL (XEXP (SET_DEST (x), 2))) - <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))) + <= GET_MODE_PRECISION (GET_MODE (XEXP (SET_DEST (x), 0)))) && ! side_effects_p (XEXP (SET_DEST (x), 0))) { HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2)); @@ -4698,7 +4698,7 @@ find_split_point (rtx *loc, rtx insn, bool set_src) rtx or_mask; if (BITS_BIG_ENDIAN) - pos = GET_MODE_BITSIZE (mode) - len - pos; + pos = GET_MODE_PRECISION (mode) - len - pos; or_mask = gen_int_mode (src << pos, mode); if (src == mask) @@ -4791,7 +4791,7 @@ find_split_point (rtx *loc, rtx insn, bool set_src) break; pos = 0; - len = GET_MODE_BITSIZE (GET_MODE (inner)); + len = GET_MODE_PRECISION (GET_MODE (inner)); unsignedp = 0; break; @@ -4805,7 +4805,7 @@ find_split_point (rtx *loc, rtx insn, bool set_src) pos = INTVAL (XEXP (SET_SRC (x), 2)); if (BITS_BIG_ENDIAN) - pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos; + pos = GET_MODE_PRECISION (GET_MODE (inner)) - len - pos; unsignedp = (code == ZERO_EXTRACT); } break; @@ -4814,7 +4814,8 @@ find_split_point (rtx *loc, rtx insn, bool set_src) break; } - if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner))) + if (len && pos >= 0 + && pos + len <= GET_MODE_PRECISION (GET_MODE (inner))) { enum machine_mode mode = GET_MODE (SET_SRC (x)); @@ -4845,9 +4846,9 @@ find_split_point (rtx *loc, rtx insn, bool set_src) (unsignedp ? LSHIFTRT : ASHIFTRT, mode, gen_rtx_ASHIFT (mode, gen_lowpart (mode, inner), - GEN_INT (GET_MODE_BITSIZE (mode) + GEN_INT (GET_MODE_PRECISION (mode) - len - pos)), - GEN_INT (GET_MODE_BITSIZE (mode) - len))); + GEN_INT (GET_MODE_PRECISION (mode) - len))); split = find_split_point (&SET_SRC (x), insn, true); if (split && split != &SET_SRC (x)) @@ -5544,7 +5545,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest, if (GET_CODE (temp) == ASHIFTRT && CONST_INT_P (XEXP (temp, 1)) - && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1) + && INTVAL (XEXP (temp, 1)) == GET_MODE_PRECISION (mode) - 1) return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (temp, 0), INTVAL (XEXP (temp, 1))); @@ -5563,8 +5564,8 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest, rtx temp1 = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, simplify_shift_const (NULL_RTX, ASHIFT, mode, temp, - GET_MODE_BITSIZE (mode) - 1 - i), - GET_MODE_BITSIZE (mode) - 1 - i); + GET_MODE_PRECISION (mode) - 1 - i), + GET_MODE_PRECISION (mode) - 1 - i); /* If all we did was surround TEMP with the two shifts, we haven't improved anything, so don't use it. Otherwise, @@ -5639,14 +5640,14 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest, && (UINTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) == ((unsigned HOST_WIDE_INT) 1 << (i + 1)) - 1)) || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND - && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0))) + && (GET_MODE_PRECISION (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0))) == (unsigned int) i + 1)))) return simplify_shift_const (NULL_RTX, ASHIFTRT, mode, simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (XEXP (XEXP (x, 0), 0), 0), - GET_MODE_BITSIZE (mode) - (i + 1)), - GET_MODE_BITSIZE (mode) - (i + 1)); + GET_MODE_PRECISION (mode) - (i + 1)), + GET_MODE_PRECISION (mode) - (i + 1)); /* If only the low-order bit of X is possibly nonzero, (plus x -1) can become (ashiftrt (ashift (xor x 1) C) C) where C is @@ -5660,8 +5661,8 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest, return simplify_shift_const (NULL_RTX, ASHIFTRT, mode, simplify_shift_const (NULL_RTX, ASHIFT, mode, gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx), - GET_MODE_BITSIZE (mode) - 1), - GET_MODE_BITSIZE (mode) - 1); + GET_MODE_PRECISION (mode) - 1), + GET_MODE_PRECISION (mode) - 1); /* If we are adding two things that have no bits in common, convert the addition into an IOR. This will often be further simplified, @@ -5793,7 +5794,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest, && op1 == const0_rtx && mode == GET_MODE (op0) && (num_sign_bit_copies (op0, mode) - == GET_MODE_BITSIZE (mode))) + == GET_MODE_PRECISION (mode))) { op0 = expand_compound_operation (op0); return simplify_gen_unary (NEG, mode, @@ -5818,7 +5819,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest, && op1 == const0_rtx && mode == GET_MODE (op0) && (num_sign_bit_copies (op0, mode) - == GET_MODE_BITSIZE (mode))) + == GET_MODE_PRECISION (mode))) { op0 = expand_compound_operation (op0); return plus_constant (gen_lowpart (mode, op0), 1); @@ -5833,7 +5834,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest, && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT && op1 == const0_rtx && (num_sign_bit_copies (op0, mode) - == GET_MODE_BITSIZE (mode))) + == GET_MODE_PRECISION (mode))) return gen_lowpart (mode, expand_compound_operation (op0)); @@ -5854,7 +5855,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest, && op1 == const0_rtx && mode == GET_MODE (op0) && (num_sign_bit_copies (op0, mode) - == GET_MODE_BITSIZE (mode))) + == GET_MODE_PRECISION (mode))) { op0 = expand_compound_operation (op0); return simplify_gen_unary (NOT, mode, @@ -5887,7 +5888,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest, { x = simplify_shift_const (NULL_RTX, ASHIFT, mode, expand_compound_operation (op0), - GET_MODE_BITSIZE (mode) - 1 - i); + GET_MODE_PRECISION (mode) - 1 - i); if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx) return XEXP (x, 0); else @@ -6011,7 +6012,7 @@ simplify_if_then_else (rtx x) } else if (true_code == EQ && true_val == const0_rtx && (num_sign_bit_copies (from, GET_MODE (from)) - == GET_MODE_BITSIZE (GET_MODE (from)))) + == GET_MODE_PRECISION (GET_MODE (from)))) { false_code = EQ; false_val = constm1_rtx; @@ -6181,8 +6182,8 @@ simplify_if_then_else (rtx x) && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f) && (num_sign_bit_copies (f, GET_MODE (f)) > (unsigned int) - (GET_MODE_BITSIZE (mode) - - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0)))))) + (GET_MODE_PRECISION (mode) + - GET_MODE_PRECISION (GET_MODE (XEXP (XEXP (t, 0), 0)))))) { c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0)); extend_op = SIGN_EXTEND; @@ -6197,8 +6198,8 @@ simplify_if_then_else (rtx x) && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f) && (num_sign_bit_copies (f, GET_MODE (f)) > (unsigned int) - (GET_MODE_BITSIZE (mode) - - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1)))))) + (GET_MODE_PRECISION (mode) + - GET_MODE_PRECISION (GET_MODE (XEXP (XEXP (t, 0), 1)))))) { c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0)); extend_op = SIGN_EXTEND; @@ -6269,7 +6270,7 @@ simplify_if_then_else (rtx x) && ((1 == nonzero_bits (XEXP (cond, 0), mode) && (i = exact_log2 (UINTVAL (true_rtx))) >= 0) || ((num_sign_bit_copies (XEXP (cond, 0), mode) - == GET_MODE_BITSIZE (mode)) + == GET_MODE_PRECISION (mode)) && (i = exact_log2 (-UINTVAL (true_rtx))) >= 0))) return simplify_shift_const (NULL_RTX, ASHIFT, mode, @@ -6535,8 +6536,8 @@ simplify_set (rtx x) if (dest == cc0_rtx && GET_CODE (src) == SUBREG && subreg_lowpart_p (src) - && (GET_MODE_BITSIZE (GET_MODE (src)) - < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src))))) + && (GET_MODE_PRECISION (GET_MODE (src)) + < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (src))))) { rtx inner = SUBREG_REG (src); enum machine_mode inner_mode = GET_MODE (inner); @@ -6588,7 +6589,7 @@ simplify_set (rtx x) #endif && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0), GET_MODE (XEXP (XEXP (src, 0), 0))) - == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0)))) + == GET_MODE_PRECISION (GET_MODE (XEXP (XEXP (src, 0), 0)))) && ! side_effects_p (src)) { rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE @@ -6764,7 +6765,7 @@ expand_compound_operation (rtx x) if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0)))) return x; - len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))); + len = GET_MODE_PRECISION (GET_MODE (XEXP (x, 0))); /* If the inner object has VOIDmode (the only way this can happen is if it is an ASM_OPERANDS), we can't do anything since we don't know how much masking to do. */ @@ -6798,11 +6799,11 @@ expand_compound_operation (rtx x) pos = INTVAL (XEXP (x, 2)); /* This should stay within the object being extracted, fail otherwise. */ - if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))) + if (len + pos > GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))) return x; if (BITS_BIG_ENDIAN) - pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos; + pos = GET_MODE_PRECISION (GET_MODE (XEXP (x, 0))) - len - pos; break; @@ -6863,7 +6864,7 @@ expand_compound_operation (rtx x) if (GET_CODE (XEXP (x, 0)) == TRUNCATE && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x) && COMPARISON_P (XEXP (XEXP (x, 0), 0)) - && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) + && (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0))) <= HOST_BITS_PER_WIDE_INT) && (STORE_FLAG_VALUE & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) return XEXP (XEXP (x, 0), 0); @@ -6873,7 +6874,7 @@ expand_compound_operation (rtx x) && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x) && subreg_lowpart_p (XEXP (x, 0)) && COMPARISON_P (SUBREG_REG (XEXP (x, 0))) - && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) + && (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0))) <= HOST_BITS_PER_WIDE_INT) && (STORE_FLAG_VALUE & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) return SUBREG_REG (XEXP (x, 0)); @@ -6895,7 +6896,7 @@ expand_compound_operation (rtx x) extraction. Then the constant of 31 would be substituted in to produce such a position. */ - modewidth = GET_MODE_BITSIZE (GET_MODE (x)); + modewidth = GET_MODE_PRECISION (GET_MODE (x)); if (modewidth >= pos + len) { enum machine_mode mode = GET_MODE (x); @@ -6949,7 +6950,7 @@ expand_field_assignment (const_rtx x) && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG) { inner = SUBREG_REG (XEXP (SET_DEST (x), 0)); - len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))); + len = GET_MODE_PRECISION (GET_MODE (XEXP (SET_DEST (x), 0))); pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0))); } else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT @@ -6961,23 +6962,23 @@ expand_field_assignment (const_rtx x) /* A constant position should stay within the width of INNER. */ if (CONST_INT_P (pos) - && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner))) + && INTVAL (pos) + len > GET_MODE_PRECISION (GET_MODE (inner))) break; if (BITS_BIG_ENDIAN) { if (CONST_INT_P (pos)) - pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len + pos = GEN_INT (GET_MODE_PRECISION (GET_MODE (inner)) - len - INTVAL (pos)); else if (GET_CODE (pos) == MINUS && CONST_INT_P (XEXP (pos, 1)) && (INTVAL (XEXP (pos, 1)) - == GET_MODE_BITSIZE (GET_MODE (inner)) - len)) + == GET_MODE_PRECISION (GET_MODE (inner)) - len)) /* If position is ADJUST - X, new position is X. */ pos = XEXP (pos, 0); else pos = simplify_gen_binary (MINUS, GET_MODE (pos), - GEN_INT (GET_MODE_BITSIZE ( + GEN_INT (GET_MODE_PRECISION ( GET_MODE (inner)) - len), pos); @@ -7152,7 +7153,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, : BITS_PER_UNIT)) == 0 /* We can't do this if we are widening INNER_MODE (it may not be aligned, for one thing). */ - && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode) + && GET_MODE_PRECISION (inner_mode) >= GET_MODE_PRECISION (tmode) && (inner_mode == tmode || (! mode_dependent_address_p (XEXP (inner, 0)) && ! MEM_VOLATILE_P (inner)))))) @@ -7170,7 +7171,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, /* POS counts from lsb, but make OFFSET count in memory order. */ if (BYTES_BIG_ENDIAN) - offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT; + offset = (GET_MODE_PRECISION (is_mode) - len - pos) / BITS_PER_UNIT; else offset = pos / BITS_PER_UNIT; @@ -7275,7 +7276,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, other cases, we would only be going outside our object in cases when an original shift would have been undefined. */ if (MEM_P (inner) - && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode)) + && ((pos_rtx == 0 && pos + len > GET_MODE_PRECISION (is_mode)) || (pos_rtx != 0 && len != 1))) return 0; @@ -7550,7 +7551,7 @@ make_compound_operation (rtx x, enum rtx_code in_code) { enum rtx_code code = GET_CODE (x); enum machine_mode mode = GET_MODE (x); - int mode_width = GET_MODE_BITSIZE (mode); + int mode_width = GET_MODE_PRECISION (mode); rtx rhs, lhs; enum rtx_code next_code; int i, j; @@ -7709,7 +7710,7 @@ make_compound_operation (rtx x, enum rtx_code in_code) { new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code); new_rtx = make_extraction (mode, new_rtx, - (GET_MODE_BITSIZE (mode) + (GET_MODE_PRECISION (mode) - INTVAL (XEXP (XEXP (x, 0), 1))), NULL_RTX, i, 1, 0, in_code == COMPARE); } @@ -8100,7 +8101,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, /* It is not valid to do a right-shift in a narrower mode than the one it came in with. */ if ((code == LSHIFTRT || code == ASHIFTRT) - && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x))) + && GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (GET_MODE (x))) op_mode = GET_MODE (x); /* Truncate MASK to fit OP_MODE. */ @@ -8208,7 +8209,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, unsigned HOST_WIDE_INT cval = UINTVAL (XEXP (x, 1)) | (GET_MODE_MASK (GET_MODE (x)) & ~mask); - int width = GET_MODE_BITSIZE (GET_MODE (x)); + int width = GET_MODE_PRECISION (GET_MODE (x)); rtx y; /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative @@ -8236,7 +8237,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, This may eliminate that PLUS and, later, the AND. */ { - unsigned int width = GET_MODE_BITSIZE (mode); + unsigned int width = GET_MODE_PRECISION (mode); unsigned HOST_WIDE_INT smask = mask; /* If MODE is narrower than HOST_WIDE_INT and mask is a negative @@ -8304,7 +8305,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, && CONST_INT_P (XEXP (x, 1)) && ((INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (INTVAL (XEXP (x, 1)))) - < GET_MODE_BITSIZE (GET_MODE (x))) + < GET_MODE_PRECISION (GET_MODE (x))) && (UINTVAL (XEXP (x, 1)) & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0) { @@ -8349,10 +8350,10 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, if (! (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0 - && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)) + && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (mode)) && ! (GET_MODE (XEXP (x, 1)) != VOIDmode && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1))) - < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode)))) + < (unsigned HOST_WIDE_INT) GET_MODE_PRECISION (mode)))) break; /* If the shift count is a constant and we can do arithmetic in @@ -8360,7 +8361,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, conservative form of the mask. */ if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0 - && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode) + && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (op_mode) && HWI_COMPUTABLE_MODE_P (op_mode)) mask >>= INTVAL (XEXP (x, 1)); else @@ -8411,17 +8412,17 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, bit. */ && ((INTVAL (XEXP (x, 1)) + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))) - >= GET_MODE_BITSIZE (GET_MODE (x))) + >= GET_MODE_PRECISION (GET_MODE (x))) && exact_log2 (mask + 1) >= 0 /* Number of bits left after the shift must be more than the mask needs. */ && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1)) - <= GET_MODE_BITSIZE (GET_MODE (x))) + <= GET_MODE_PRECISION (GET_MODE (x))) /* Must be more sign bit copies than the mask needs. */ && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))) >= exact_log2 (mask + 1))) x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), - GEN_INT (GET_MODE_BITSIZE (GET_MODE (x)) + GEN_INT (GET_MODE_PRECISION (GET_MODE (x)) - exact_log2 (mask + 1))); goto shiftrt; @@ -8448,20 +8449,20 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, represent a mask for all its bits in a single scalar. But we only care about the lower bits, so calculate these. */ - if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT) + if (GET_MODE_PRECISION (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT) { nonzero = ~(unsigned HOST_WIDE_INT) 0; - /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1)) + /* GET_MODE_PRECISION (GET_MODE (x)) - INTVAL (XEXP (x, 1)) is the number of bits a full-width mask would have set. We need only shift if these are fewer than nonzero can hold. If not, we must keep all bits set in nonzero. */ - if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1)) + if (GET_MODE_PRECISION (GET_MODE (x)) - INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) nonzero >>= INTVAL (XEXP (x, 1)) + HOST_BITS_PER_WIDE_INT - - GET_MODE_BITSIZE (GET_MODE (x)) ; + - GET_MODE_PRECISION (GET_MODE (x)) ; } else { @@ -8481,7 +8482,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, { x = simplify_shift_const (NULL_RTX, LSHIFTRT, GET_MODE (x), XEXP (x, 0), - GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i); + GET_MODE_PRECISION (GET_MODE (x)) - 1 - i); if (GET_CODE (x) != ASHIFTRT) return force_to_mode (x, mode, mask, next_select); @@ -8504,7 +8505,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, && CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0 && (INTVAL (XEXP (x, 1)) - <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1)) + <= GET_MODE_PRECISION (GET_MODE (x)) - (floor_log2 (mask) + 1)) && GET_CODE (XEXP (x, 0)) == ASHIFT && XEXP (XEXP (x, 0), 1) == XEXP (x, 1)) return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask, @@ -8552,7 +8553,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, && CONST_INT_P (XEXP (XEXP (x, 0), 1)) && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask) - < GET_MODE_BITSIZE (GET_MODE (x))) + < GET_MODE_PRECISION (GET_MODE (x))) && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT) { temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)), @@ -8804,7 +8805,7 @@ if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse) false values when testing X. */ else if (x == constm1_rtx || x == const0_rtx || (mode != VOIDmode - && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode))) + && num_sign_bit_copies (x, mode) == GET_MODE_PRECISION (mode))) { *ptrue = constm1_rtx, *pfalse = const0_rtx; return x; @@ -9136,8 +9137,8 @@ make_field_assignment (rtx x) return x; pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len); - if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest)) - || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT + if (pos < 0 || pos + len > GET_MODE_PRECISION (GET_MODE (dest)) + || GET_MODE_PRECISION (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0) return x; @@ -9158,7 +9159,7 @@ make_field_assignment (rtx x) other, pos), dest); src = force_to_mode (src, mode, - GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT + GET_MODE_PRECISION (mode) >= HOST_BITS_PER_WIDE_INT ? ~(unsigned HOST_WIDE_INT) 0 : ((unsigned HOST_WIDE_INT) 1 << len) - 1, 0); @@ -9580,7 +9581,7 @@ reg_nonzero_bits_for_combine (const_rtx x, enum machine_mode mode, { unsigned HOST_WIDE_INT mask = rsp->nonzero_bits; - if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode)) + if (GET_MODE_PRECISION (GET_MODE (x)) < GET_MODE_PRECISION (mode)) /* We don't know anything about the upper bits. */ mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x)); *nonzero &= mask; @@ -9626,7 +9627,7 @@ reg_num_sign_bit_copies_for_combine (const_rtx x, enum machine_mode mode, return tem; if (nonzero_sign_valid && rsp->sign_bit_copies != 0 - && GET_MODE_BITSIZE (GET_MODE (x)) == GET_MODE_BITSIZE (mode)) + && GET_MODE_PRECISION (GET_MODE (x)) == GET_MODE_PRECISION (mode)) *result = rsp->sign_bit_copies; return NULL; @@ -9651,7 +9652,7 @@ extended_count (const_rtx x, enum machine_mode mode, int unsignedp) return (unsignedp ? (HWI_COMPUTABLE_MODE_P (mode) - ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1 + ? (unsigned int) (GET_MODE_PRECISION (mode) - 1 - floor_log2 (nonzero_bits (x, mode))) : 0) : num_sign_bit_copies (x, mode) - 1); @@ -9802,7 +9803,7 @@ try_widen_shift_mode (enum rtx_code code, rtx op, int count, { if (orig_mode == mode) return mode; - gcc_assert (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (orig_mode)); + gcc_assert (GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (orig_mode)); /* In general we can't perform in wider mode for right shift and rotate. */ switch (code) @@ -9811,8 +9812,8 @@ try_widen_shift_mode (enum rtx_code code, rtx op, int count, /* We can still widen if the bits brought in from the left are identical to the sign bit of ORIG_MODE. */ if (num_sign_bit_copies (op, mode) - > (unsigned) (GET_MODE_BITSIZE (mode) - - GET_MODE_BITSIZE (orig_mode))) + > (unsigned) (GET_MODE_PRECISION (mode) + - GET_MODE_PRECISION (orig_mode))) return mode; return orig_mode; @@ -9829,7 +9830,7 @@ try_widen_shift_mode (enum rtx_code code, rtx op, int count, int care_bits = low_bitmask_len (orig_mode, outer_const); if (care_bits >= 0 - && GET_MODE_BITSIZE (orig_mode) - care_bits >= count) + && GET_MODE_PRECISION (orig_mode) - care_bits >= count) return mode; } /* fall through */ @@ -9845,9 +9846,9 @@ try_widen_shift_mode (enum rtx_code code, rtx op, int count, } } -/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift. - The result of the shift is RESULT_MODE. Return NULL_RTX if we cannot - simplify it. Otherwise, return a simplified value. +/* Simplify a shift of VAROP by ORIG_COUNT bits. CODE says what kind + of shift. The result of the shift is RESULT_MODE. Return NULL_RTX + if we cannot simplify it. Otherwise, return a simplified value. The shift is normally computed in the widest mode we find in VAROP, as long as it isn't a different number of words than RESULT_MODE. Exceptions @@ -9879,7 +9880,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, /* If we were given an invalid count, don't do anything except exactly what was requested. */ - if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode)) + if (orig_count < 0 || orig_count >= (int) GET_MODE_PRECISION (mode)) return NULL_RTX; count = orig_count; @@ -9896,7 +9897,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, /* Convert ROTATERT to ROTATE. */ if (code == ROTATERT) { - unsigned int bitsize = GET_MODE_BITSIZE (result_mode);; + unsigned int bitsize = GET_MODE_PRECISION (result_mode); code = ROTATE; if (VECTOR_MODE_P (result_mode)) count = bitsize / GET_MODE_NUNITS (result_mode) - count; @@ -9917,12 +9918,12 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, multiple operations, each of which are defined, we know what the result is supposed to be. */ - if (count > (GET_MODE_BITSIZE (shift_mode) - 1)) + if (count > (GET_MODE_PRECISION (shift_mode) - 1)) { if (code == ASHIFTRT) - count = GET_MODE_BITSIZE (shift_mode) - 1; + count = GET_MODE_PRECISION (shift_mode) - 1; else if (code == ROTATE || code == ROTATERT) - count %= GET_MODE_BITSIZE (shift_mode); + count %= GET_MODE_PRECISION (shift_mode); else { /* We can't simply return zero because there may be an @@ -9942,7 +9943,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, is a no-op. */ if (code == ASHIFTRT && (num_sign_bit_copies (varop, shift_mode) - == GET_MODE_BITSIZE (shift_mode))) + == GET_MODE_PRECISION (shift_mode))) { count = 0; break; @@ -9955,8 +9956,8 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, if (code == ASHIFTRT && (count + num_sign_bit_copies (varop, shift_mode) - >= GET_MODE_BITSIZE (shift_mode))) - count = GET_MODE_BITSIZE (shift_mode) - 1; + >= GET_MODE_PRECISION (shift_mode))) + count = GET_MODE_PRECISION (shift_mode) - 1; /* We simplify the tests below and elsewhere by converting ASHIFTRT to LSHIFTRT if we know the sign bit is clear. @@ -10086,7 +10087,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, AND of a new shift with a mask. We compute the result below. */ if (CONST_INT_P (XEXP (varop, 1)) && INTVAL (XEXP (varop, 1)) >= 0 - && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop)) + && INTVAL (XEXP (varop, 1)) < GET_MODE_PRECISION (GET_MODE (varop)) && HWI_COMPUTABLE_MODE_P (result_mode) && HWI_COMPUTABLE_MODE_P (mode) && !VECTOR_MODE_P (result_mode)) @@ -10101,11 +10102,11 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2) with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2), we can convert it to - (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1). + (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0) C3) C2) C1). This simplifies certain SIGN_EXTEND operations. */ if (code == ASHIFT && first_code == ASHIFTRT - && count == (GET_MODE_BITSIZE (result_mode) - - GET_MODE_BITSIZE (GET_MODE (varop)))) + && count == (GET_MODE_PRECISION (result_mode) + - GET_MODE_PRECISION (GET_MODE (varop)))) { /* C3 has the low-order C1 bits zero. */ @@ -10173,7 +10174,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, if (code == ASHIFTRT || (code == ROTATE && first_code == ASHIFTRT) - || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT + || GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT || (GET_MODE (varop) != result_mode && (first_code == ASHIFTRT || first_code == LSHIFTRT || first_code == ROTATE @@ -10261,7 +10262,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, && XEXP (XEXP (varop, 0), 1) == constm1_rtx && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) && (code == LSHIFTRT || code == ASHIFTRT) - && count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1) + && count == (GET_MODE_PRECISION (GET_MODE (varop)) - 1) && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1))) { count = 0; @@ -10323,12 +10324,12 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, case EQ: /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE says that the sign bit can be tested, FOO has mode MODE, C is - GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit + GET_MODE_PRECISION (MODE) - 1, and FOO has only its low-order bit that may be nonzero. */ if (code == LSHIFTRT && XEXP (varop, 1) == const0_rtx && GET_MODE (XEXP (varop, 0)) == result_mode - && count == (GET_MODE_BITSIZE (result_mode) - 1) + && count == (GET_MODE_PRECISION (result_mode) - 1) && HWI_COMPUTABLE_MODE_P (result_mode) && STORE_FLAG_VALUE == -1 && nonzero_bits (XEXP (varop, 0), result_mode) == 1 @@ -10345,7 +10346,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less than the number of bits in the mode is equivalent to A. */ if (code == LSHIFTRT - && count == (GET_MODE_BITSIZE (result_mode) - 1) + && count == (GET_MODE_PRECISION (result_mode) - 1) && nonzero_bits (XEXP (varop, 0), result_mode) == 1) { varop = XEXP (varop, 0); @@ -10369,7 +10370,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, is one less than the number of bits in the mode is equivalent to (xor A 1). */ if (code == LSHIFTRT - && count == (GET_MODE_BITSIZE (result_mode) - 1) + && count == (GET_MODE_PRECISION (result_mode) - 1) && XEXP (varop, 1) == constm1_rtx && nonzero_bits (XEXP (varop, 0), result_mode) == 1 && merge_outer_ops (&outer_op, &outer_const, XOR, 1, result_mode, @@ -10453,7 +10454,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) && GET_CODE (XEXP (varop, 0)) == ASHIFTRT - && count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1) + && count == (GET_MODE_PRECISION (GET_MODE (varop)) - 1) && (code == LSHIFTRT || code == ASHIFTRT) && CONST_INT_P (XEXP (XEXP (varop, 0), 1)) && INTVAL (XEXP (XEXP (varop, 0), 1)) == count @@ -10477,8 +10478,8 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, && GET_CODE (XEXP (varop, 0)) == LSHIFTRT && CONST_INT_P (XEXP (XEXP (varop, 0), 1)) && (INTVAL (XEXP (XEXP (varop, 0), 1)) - >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0))) - - GET_MODE_BITSIZE (GET_MODE (varop))))) + >= (GET_MODE_PRECISION (GET_MODE (XEXP (varop, 0))) + - GET_MODE_PRECISION (GET_MODE (varop))))) { rtx varop_inner = XEXP (varop, 0); @@ -10550,7 +10551,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, if (outer_op != UNKNOWN) { if (GET_RTX_CLASS (outer_op) != RTX_UNARY - && GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT) + && GET_MODE_PRECISION (result_mode) < HOST_BITS_PER_WIDE_INT) outer_const = trunc_int_for_mode (outer_const, result_mode); if (outer_op == AND) @@ -10852,7 +10853,7 @@ static enum rtx_code simplify_compare_const (enum rtx_code code, rtx op0, rtx *pop1) { enum machine_mode mode = GET_MODE (op0); - unsigned int mode_width = GET_MODE_BITSIZE (mode); + unsigned int mode_width = GET_MODE_PRECISION (mode); HOST_WIDE_INT const_op = INTVAL (*pop1); /* Get the constant we are comparing against and turn off all bits @@ -11065,8 +11066,8 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1) && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1) && (INTVAL (XEXP (op0, 1)) - == (GET_MODE_BITSIZE (GET_MODE (op0)) - - (GET_MODE_BITSIZE + == (GET_MODE_PRECISION (GET_MODE (op0)) + - (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))))))) { op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0)); @@ -11134,7 +11135,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) && GET_CODE (inner_op1) == SUBREG && (GET_MODE (SUBREG_REG (inner_op0)) == GET_MODE (SUBREG_REG (inner_op1))) - && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0))) + && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (inner_op0))) <= HOST_BITS_PER_WIDE_INT) && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0), GET_MODE (SUBREG_REG (inner_op0))))) @@ -11197,7 +11198,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) while (CONST_INT_P (op1)) { enum machine_mode mode = GET_MODE (op0); - unsigned int mode_width = GET_MODE_BITSIZE (mode); + unsigned int mode_width = GET_MODE_PRECISION (mode); unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode); int equality_comparison_p; int sign_bit_comparison_p; @@ -11231,7 +11232,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) if (sign_bit_comparison_p && HWI_COMPUTABLE_MODE_P (mode)) op0 = force_to_mode (op0, mode, (unsigned HOST_WIDE_INT) 1 - << (GET_MODE_BITSIZE (mode) - 1), + << (GET_MODE_PRECISION (mode) - 1), 0); /* Now try cases based on the opcode of OP0. If none of the cases @@ -11262,7 +11263,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) else { mode = new_mode; - i = (GET_MODE_BITSIZE (mode) - 1 - i); + i = (GET_MODE_PRECISION (mode) - 1 - i); } } @@ -11426,7 +11427,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) if (mode_width <= HOST_BITS_PER_WIDE_INT && subreg_lowpart_p (op0) - && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width + && GET_MODE_PRECISION (GET_MODE (SUBREG_REG (op0))) > mode_width && GET_CODE (SUBREG_REG (op0)) == PLUS && CONST_INT_P (XEXP (SUBREG_REG (op0), 1))) { @@ -11446,14 +11447,14 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) /* (A - C1) sign-extends if it is positive and 1-extends if it is negative, C2 both sign- and 1-extends. */ || (num_sign_bit_copies (a, inner_mode) - > (unsigned int) (GET_MODE_BITSIZE (inner_mode) + > (unsigned int) (GET_MODE_PRECISION (inner_mode) - mode_width) && const_op < 0))) || ((unsigned HOST_WIDE_INT) c1 < (unsigned HOST_WIDE_INT) 1 << (mode_width - 2) /* (A - C1) always sign-extends, like C2. */ && num_sign_bit_copies (a, inner_mode) - > (unsigned int) (GET_MODE_BITSIZE (inner_mode) + > (unsigned int) (GET_MODE_PRECISION (inner_mode) - (mode_width - 1)))) { op0 = SUBREG_REG (op0); @@ -11464,7 +11465,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) /* If the inner mode is narrower and we are extracting the low part, we can treat the SUBREG as if it were a ZERO_EXTEND. */ if (subreg_lowpart_p (op0) - && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width) + && GET_MODE_PRECISION (GET_MODE (SUBREG_REG (op0))) < mode_width) /* Fall through */ ; else break; @@ -11713,10 +11714,10 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) the code has been changed. */ && (0 #ifdef WORD_REGISTER_OPERATIONS - || (mode_width > GET_MODE_BITSIZE (tmode) + || (mode_width > GET_MODE_PRECISION (tmode) && mode_width <= BITS_PER_WORD) #endif - || (mode_width <= GET_MODE_BITSIZE (tmode) + || (mode_width <= GET_MODE_PRECISION (tmode) && subreg_lowpart_p (XEXP (op0, 0)))) && CONST_INT_P (XEXP (op0, 1)) && mode_width <= HOST_BITS_PER_WIDE_INT @@ -11983,7 +11984,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) op1 = gen_lowpart (GET_MODE (op0), op1); } } - else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) + else if ((GET_MODE_PRECISION (GET_MODE (SUBREG_REG (op0))) <= HOST_BITS_PER_WIDE_INT) && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0))) @@ -12045,11 +12046,11 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) if (zero_extended || ((num_sign_bit_copies (op0, tmode) - > (unsigned int) (GET_MODE_BITSIZE (tmode) - - GET_MODE_BITSIZE (mode))) + > (unsigned int) (GET_MODE_PRECISION (tmode) + - GET_MODE_PRECISION (mode))) && (num_sign_bit_copies (op1, tmode) - > (unsigned int) (GET_MODE_BITSIZE (tmode) - - GET_MODE_BITSIZE (mode))))) + > (unsigned int) (GET_MODE_PRECISION (tmode) + - GET_MODE_PRECISION (mode))))) { /* If OP0 is an AND and we don't have an AND in MODE either, make a new AND in the proper mode. */ @@ -12348,7 +12349,7 @@ record_dead_and_set_regs_1 (rtx dest, const_rtx setter, void *data) else if (GET_CODE (setter) == SET && GET_CODE (SET_DEST (setter)) == SUBREG && SUBREG_REG (SET_DEST (setter)) == dest - && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD + && GET_MODE_PRECISION (GET_MODE (dest)) <= BITS_PER_WORD && subreg_lowpart_p (SET_DEST (setter))) record_value_for_reg (dest, record_dead_insn, gen_lowpart (GET_MODE (dest), @@ -12445,7 +12446,7 @@ record_promoted_value (rtx insn, rtx subreg) unsigned int regno = REGNO (SUBREG_REG (subreg)); enum machine_mode mode = GET_MODE (subreg); - if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT) + if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT) return; for (links = LOG_LINKS (insn); links;) diff --git a/gcc/cse.c b/gcc/cse.c index da4b1e1ee47..a078329ac55 100644 --- a/gcc/cse.c +++ b/gcc/cse.c @@ -3650,7 +3650,7 @@ fold_rtx (rtx x, rtx insn) enum rtx_code associate_code; if (is_shift - && (INTVAL (const_arg1) >= GET_MODE_BITSIZE (mode) + && (INTVAL (const_arg1) >= GET_MODE_PRECISION (mode) || INTVAL (const_arg1) < 0)) { if (SHIFT_COUNT_TRUNCATED) @@ -3699,7 +3699,7 @@ fold_rtx (rtx x, rtx insn) break; if (is_shift - && (INTVAL (inner_const) >= GET_MODE_BITSIZE (mode) + && (INTVAL (inner_const) >= GET_MODE_PRECISION (mode) || INTVAL (inner_const) < 0)) { if (SHIFT_COUNT_TRUNCATED) @@ -3729,7 +3729,7 @@ fold_rtx (rtx x, rtx insn) if (is_shift && CONST_INT_P (new_const) - && INTVAL (new_const) >= GET_MODE_BITSIZE (mode)) + && INTVAL (new_const) >= GET_MODE_PRECISION (mode)) { /* As an exception, we can turn an ASHIFTRT of this form into a shift of the number of bits - 1. */ @@ -4672,13 +4672,13 @@ cse_insn (rtx insn) if (src_const && src_related == 0 && CONST_INT_P (src_const) && GET_MODE_CLASS (mode) == MODE_INT - && GET_MODE_BITSIZE (mode) < BITS_PER_WORD) + && GET_MODE_PRECISION (mode) < BITS_PER_WORD) { enum machine_mode wider_mode; for (wider_mode = GET_MODE_WIDER_MODE (mode); wider_mode != VOIDmode - && GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD + && GET_MODE_PRECISION (wider_mode) <= BITS_PER_WORD && src_related == 0; wider_mode = GET_MODE_WIDER_MODE (wider_mode)) { @@ -5031,7 +5031,7 @@ cse_insn (rtx insn) && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1)) && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2)) && REG_P (XEXP (SET_DEST (sets[i].rtl), 0)) - && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (sets[i].rtl))) + && (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets[i].rtl))) >= INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))) && ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1)) + (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2)) @@ -5058,7 +5058,7 @@ cse_insn (rtx insn) HOST_WIDE_INT mask; unsigned int shift; if (BITS_BIG_ENDIAN) - shift = GET_MODE_BITSIZE (GET_MODE (dest_reg)) + shift = GET_MODE_PRECISION (GET_MODE (dest_reg)) - INTVAL (pos) - INTVAL (width); else shift = INTVAL (pos); diff --git a/gcc/explow.c b/gcc/explow.c index a042273ec94..3c692f4074e 100644 --- a/gcc/explow.c +++ b/gcc/explow.c @@ -51,7 +51,7 @@ static rtx break_out_memory_refs (rtx); HOST_WIDE_INT trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode) { - int width = GET_MODE_BITSIZE (mode); + int width = GET_MODE_PRECISION (mode); /* You want to truncate to a _what_? */ gcc_assert (SCALAR_INT_MODE_P (mode)); diff --git a/gcc/loop-doloop.c b/gcc/loop-doloop.c index 560d49a32f9..f8429c4fd28 100644 --- a/gcc/loop-doloop.c +++ b/gcc/loop-doloop.c @@ -465,7 +465,7 @@ doloop_modify (struct loop *loop, struct niter_desc *desc, Note that the maximum value loaded is iterations_max - 1. */ if (desc->niter_max <= ((unsigned HOST_WIDEST_INT) 1 - << (GET_MODE_BITSIZE (mode) - 1))) + << (GET_MODE_PRECISION (mode) - 1))) nonneg = 1; break; @@ -677,7 +677,7 @@ doloop_optimize (struct loop *loop) doloop_seq = gen_doloop_end (doloop_reg, iterations, iterations_max, GEN_INT (level), start_label); - word_mode_size = GET_MODE_BITSIZE (word_mode); + word_mode_size = GET_MODE_PRECISION (word_mode); word_mode_max = ((unsigned HOST_WIDE_INT) 1 << (word_mode_size - 1) << 1) - 1; if (! doloop_seq @@ -685,10 +685,10 @@ doloop_optimize (struct loop *loop) /* Before trying mode different from the one in that # of iterations is computed, we must be sure that the number of iterations fits into the new mode. */ - && (word_mode_size >= GET_MODE_BITSIZE (mode) + && (word_mode_size >= GET_MODE_PRECISION (mode) || desc->niter_max <= word_mode_max)) { - if (word_mode_size > GET_MODE_BITSIZE (mode)) + if (word_mode_size > GET_MODE_PRECISION (mode)) { zero_extend_p = true; iterations = simplify_gen_unary (ZERO_EXTEND, word_mode, diff --git a/gcc/rtlanal.c b/gcc/rtlanal.c index 2dfbd8fe114..ac9da152c3c 100644 --- a/gcc/rtlanal.c +++ b/gcc/rtlanal.c @@ -3177,7 +3177,7 @@ subreg_lsb_1 (enum machine_mode outer_mode, unsigned int word; /* A paradoxical subreg begins at bit position 0. */ - if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode)) + if (GET_MODE_PRECISION (outer_mode) > GET_MODE_PRECISION (inner_mode)) return 0; if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN) @@ -3281,7 +3281,7 @@ subreg_get_info (unsigned int xregno, enum machine_mode xmode, /* Paradoxical subregs are otherwise valid. */ if (!rknown && offset == 0 - && GET_MODE_SIZE (ymode) > GET_MODE_SIZE (xmode)) + && GET_MODE_PRECISION (ymode) > GET_MODE_PRECISION (xmode)) { info->representable_p = true; /* If this is a big endian paradoxical subreg, which uses more @@ -3850,7 +3850,7 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, unsigned HOST_WIDE_INT inner_nz; enum rtx_code code; enum machine_mode inner_mode; - unsigned int mode_width = GET_MODE_BITSIZE (mode); + unsigned int mode_width = GET_MODE_PRECISION (mode); /* For floating-point and vector values, assume all bits are needed. */ if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode) @@ -3858,11 +3858,11 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, return nonzero; /* If X is wider than MODE, use its mode instead. */ - if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width) + if (GET_MODE_PRECISION (GET_MODE (x)) > mode_width) { mode = GET_MODE (x); nonzero = GET_MODE_MASK (mode); - mode_width = GET_MODE_BITSIZE (mode); + mode_width = GET_MODE_PRECISION (mode); } if (mode_width > HOST_BITS_PER_WIDE_INT) @@ -3879,9 +3879,9 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, not known to be zero. */ if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode - && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD - && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT - && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x))) + && GET_MODE_PRECISION (GET_MODE (x)) <= BITS_PER_WORD + && GET_MODE_PRECISION (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT + && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (GET_MODE (x))) { nonzero &= cached_nonzero_bits (x, GET_MODE (x), known_x, known_mode, known_ret); @@ -3989,7 +3989,7 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, /* Disabled to avoid exponential mutual recursion between nonzero_bits and num_sign_bit_copies. */ if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) - == GET_MODE_BITSIZE (GET_MODE (x))) + == GET_MODE_PRECISION (GET_MODE (x))) nonzero = 1; #endif @@ -4002,7 +4002,7 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, /* Disabled to avoid exponential mutual recursion between nonzero_bits and num_sign_bit_copies. */ if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) - == GET_MODE_BITSIZE (GET_MODE (x))) + == GET_MODE_PRECISION (GET_MODE (x))) nonzero = 1; #endif break; @@ -4075,7 +4075,7 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, unsigned HOST_WIDE_INT nz1 = cached_nonzero_bits (XEXP (x, 1), mode, known_x, known_mode, known_ret); - int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1; + int sign_index = GET_MODE_PRECISION (GET_MODE (x)) - 1; int width0 = floor_log2 (nz0) + 1; int width1 = floor_log2 (nz1) + 1; int low0 = floor_log2 (nz0 & -nz0); @@ -4156,8 +4156,8 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, /* If the inner mode is a single word for both the host and target machines, we can compute this from which bits of the inner object might be nonzero. */ - if (GET_MODE_BITSIZE (inner_mode) <= BITS_PER_WORD - && (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT)) + if (GET_MODE_PRECISION (inner_mode) <= BITS_PER_WORD + && (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT)) { nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode, known_x, known_mode, known_ret); @@ -4174,8 +4174,8 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, /* On many CISC machines, accessing an object in a wider mode causes the high-order bits to become undefined. So they are not known to be zero. */ - if (GET_MODE_SIZE (GET_MODE (x)) - > GET_MODE_SIZE (inner_mode)) + if (GET_MODE_PRECISION (GET_MODE (x)) + > GET_MODE_PRECISION (inner_mode)) nonzero |= (GET_MODE_MASK (GET_MODE (x)) & ~GET_MODE_MASK (inner_mode)); } @@ -4195,10 +4195,10 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT - && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x))) + && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x))) { enum machine_mode inner_mode = GET_MODE (x); - unsigned int width = GET_MODE_BITSIZE (inner_mode); + unsigned int width = GET_MODE_PRECISION (inner_mode); int count = INTVAL (XEXP (x, 1)); unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode); unsigned HOST_WIDE_INT op_nonzero @@ -4351,7 +4351,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, unsigned int known_ret) { enum rtx_code code = GET_CODE (x); - unsigned int bitwidth = GET_MODE_BITSIZE (mode); + unsigned int bitwidth = GET_MODE_PRECISION (mode); int num0, num1, result; unsigned HOST_WIDE_INT nonzero; @@ -4367,26 +4367,26 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, return 1; /* For a smaller object, just ignore the high bits. */ - if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x))) + if (bitwidth < GET_MODE_PRECISION (GET_MODE (x))) { num0 = cached_num_sign_bit_copies (x, GET_MODE (x), known_x, known_mode, known_ret); return MAX (1, - num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)); + num0 - (int) (GET_MODE_PRECISION (GET_MODE (x)) - bitwidth)); } - if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x))) + if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_PRECISION (GET_MODE (x))) { #ifndef WORD_REGISTER_OPERATIONS - /* If this machine does not do all register operations on the entire - register and MODE is wider than the mode of X, we can say nothing - at all about the high-order bits. */ + /* If this machine does not do all register operations on the entire + register and MODE is wider than the mode of X, we can say nothing + at all about the high-order bits. */ return 1; #else /* Likewise on machines that do, if the mode of the object is smaller than a word and loads of that size don't sign extend, we can say nothing about the high order bits. */ - if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD + if (GET_MODE_PRECISION (GET_MODE (x)) < BITS_PER_WORD #ifdef LOAD_EXTEND_OP && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND #endif @@ -4408,7 +4408,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, if (target_default_pointer_address_modes_p () && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode && REG_POINTER (x)) - return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1; + return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1; #endif { @@ -4433,7 +4433,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, /* Some RISC machines sign-extend all loads of smaller than a word. */ if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND) return MAX (1, ((int) bitwidth - - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1)); + - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1)); #endif break; @@ -4457,17 +4457,17 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode, known_x, known_mode, known_ret); return MAX ((int) bitwidth - - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1, + - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1, num0); } /* For a smaller object, just ignore the high bits. */ - if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))) + if (bitwidth <= GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x)))) { num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode, known_x, known_mode, known_ret); return MAX (1, (num0 - - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) + - (int) (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))) - bitwidth))); } @@ -4498,7 +4498,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, break; case SIGN_EXTEND: - return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) + return (bitwidth - GET_MODE_PRECISION (GET_MODE (XEXP (x, 0))) + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode, known_x, known_mode, known_ret)); @@ -4506,7 +4506,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, /* For a smaller object, just ignore the high bits. */ num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode, known_x, known_mode, known_ret); - return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) + return MAX (1, (num0 - (int) (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0))) - bitwidth))); case NOT: @@ -4683,7 +4683,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, known_x, known_mode, known_ret); if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) > 0 - && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x))) + && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x))) num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1))); return num0; @@ -4693,7 +4693,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, if (!CONST_INT_P (XEXP (x, 1)) || INTVAL (XEXP (x, 1)) < 0 || INTVAL (XEXP (x, 1)) >= (int) bitwidth - || INTVAL (XEXP (x, 1)) >= GET_MODE_BITSIZE (GET_MODE (x))) + || INTVAL (XEXP (x, 1)) >= GET_MODE_PRECISION (GET_MODE (x))) return 1; num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, @@ -4729,7 +4729,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, count those bits and return one less than that amount. If we can't safely compute the mask for this mode, always return BITWIDTH. */ - bitwidth = GET_MODE_BITSIZE (mode); + bitwidth = GET_MODE_PRECISION (mode); if (bitwidth > HOST_BITS_PER_WIDE_INT) return 1; @@ -4998,7 +4998,7 @@ canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest, if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC && CONST_INT_P (op1) && GET_MODE (op0) != VOIDmode - && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) + && GET_MODE_PRECISION (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) { HOST_WIDE_INT const_val = INTVAL (op1); unsigned HOST_WIDE_INT uconst_val = const_val; @@ -5017,7 +5017,7 @@ canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest, case GE: if ((const_val & max_val) != ((unsigned HOST_WIDE_INT) 1 - << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))) + << (GET_MODE_PRECISION (GET_MODE (op0)) - 1))) code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0)); break; @@ -5123,7 +5123,7 @@ init_num_sign_bit_copies_in_rep (void) have to be sign-bit copies too. */ || num_sign_bit_copies_in_rep [in_mode][mode]) num_sign_bit_copies_in_rep [in_mode][mode] - += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i); + += GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i); } } } @@ -5183,7 +5183,7 @@ low_bitmask_len (enum machine_mode mode, unsigned HOST_WIDE_INT m) { if (mode != VOIDmode) { - if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT) + if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT) return -1; m &= GET_MODE_MASK (mode); } diff --git a/gcc/simplify-rtx.c b/gcc/simplify-rtx.c index d5a9cbcbded..5a98b69abbc 100644 --- a/gcc/simplify-rtx.c +++ b/gcc/simplify-rtx.c @@ -649,7 +649,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) if (STORE_FLAG_VALUE == -1 && GET_CODE (op) == ASHIFTRT && GET_CODE (XEXP (op, 1)) - && INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1) + && INTVAL (XEXP (op, 1)) == GET_MODE_PRECISION (mode) - 1) return simplify_gen_relational (GE, mode, VOIDmode, XEXP (op, 0), const0_rtx); @@ -765,7 +765,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) C is equal to the width of MODE minus 1. */ if (GET_CODE (op) == ASHIFTRT && CONST_INT_P (XEXP (op, 1)) - && INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1) + && INTVAL (XEXP (op, 1)) == GET_MODE_PRECISION (mode) - 1) return simplify_gen_binary (LSHIFTRT, mode, XEXP (op, 0), XEXP (op, 1)); @@ -773,7 +773,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) C is equal to the width of MODE minus 1. */ if (GET_CODE (op) == LSHIFTRT && CONST_INT_P (XEXP (op, 1)) - && INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1) + && INTVAL (XEXP (op, 1)) == GET_MODE_PRECISION (mode) - 1) return simplify_gen_binary (ASHIFTRT, mode, XEXP (op, 0), XEXP (op, 1)); @@ -790,14 +790,14 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) && SCALAR_INT_MODE_P (GET_MODE (XEXP (op, 0)))) { enum machine_mode inner = GET_MODE (XEXP (op, 0)); - int isize = GET_MODE_BITSIZE (inner); + int isize = GET_MODE_PRECISION (inner); if (STORE_FLAG_VALUE == 1) { temp = simplify_gen_binary (ASHIFTRT, inner, XEXP (op, 0), GEN_INT (isize - 1)); if (mode == inner) return temp; - if (GET_MODE_BITSIZE (mode) > isize) + if (GET_MODE_PRECISION (mode) > isize) return simplify_gen_unary (SIGN_EXTEND, mode, temp, inner); return simplify_gen_unary (TRUNCATE, mode, temp, inner); } @@ -807,7 +807,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) GEN_INT (isize - 1)); if (mode == inner) return temp; - if (GET_MODE_BITSIZE (mode) > isize) + if (GET_MODE_PRECISION (mode) > isize) return simplify_gen_unary (ZERO_EXTEND, mode, temp, inner); return simplify_gen_unary (TRUNCATE, mode, temp, inner); } @@ -854,8 +854,8 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) patterns. */ if ((TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (op)) ? (num_sign_bit_copies (op, GET_MODE (op)) - > (unsigned int) (GET_MODE_BITSIZE (GET_MODE (op)) - - GET_MODE_BITSIZE (mode))) + > (unsigned int) (GET_MODE_PRECISION (GET_MODE (op)) + - GET_MODE_PRECISION (mode))) : truncated_to_mode (mode, op)) && ! (GET_CODE (op) == LSHIFTRT && GET_CODE (XEXP (op, 0)) == MULT)) @@ -904,7 +904,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) && (flag_unsafe_math_optimizations || (SCALAR_FLOAT_MODE_P (GET_MODE (op)) && ((unsigned)significand_size (GET_MODE (op)) - >= (GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0))) + >= (GET_MODE_PRECISION (GET_MODE (XEXP (op, 0))) - num_sign_bit_copies (XEXP (op, 0), GET_MODE (XEXP (op, 0)))))))) return simplify_gen_unary (FLOAT, mode, @@ -941,7 +941,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) || (GET_CODE (op) == FLOAT && SCALAR_FLOAT_MODE_P (GET_MODE (op)) && ((unsigned)significand_size (GET_MODE (op)) - >= (GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0))) + >= (GET_MODE_PRECISION (GET_MODE (XEXP (op, 0))) - num_sign_bit_copies (XEXP (op, 0), GET_MODE (XEXP (op, 0))))))) return simplify_gen_unary (GET_CODE (op), mode, @@ -968,7 +968,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) return op; /* If operand is known to be only -1 or 0, convert ABS to NEG. */ - if (num_sign_bit_copies (op, mode) == GET_MODE_BITSIZE (mode)) + if (num_sign_bit_copies (op, mode) == GET_MODE_PRECISION (mode)) return gen_rtx_NEG (mode, op); break; @@ -1261,8 +1261,8 @@ rtx simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, rtx op, enum machine_mode op_mode) { - unsigned int width = GET_MODE_BITSIZE (mode); - unsigned int op_width = GET_MODE_BITSIZE (op_mode); + unsigned int width = GET_MODE_PRECISION (mode); + unsigned int op_width = GET_MODE_PRECISION (op_mode); if (code == VEC_DUPLICATE) { @@ -1362,7 +1362,7 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, if (hv < 0) return 0; } - else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2) + else if (GET_MODE_PRECISION (op_mode) >= HOST_BITS_PER_WIDE_INT * 2) ; else hv = 0, lv &= GET_MODE_MASK (op_mode); @@ -1403,17 +1403,17 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, if (arg0 == 0 && CLZ_DEFINED_VALUE_AT_ZERO (op_mode, val)) ; else - val = GET_MODE_BITSIZE (op_mode) - floor_log2 (arg0) - 1; + val = GET_MODE_PRECISION (op_mode) - floor_log2 (arg0) - 1; break; case CLRSB: arg0 &= GET_MODE_MASK (op_mode); if (arg0 == 0) - val = GET_MODE_BITSIZE (op_mode) - 1; + val = GET_MODE_PRECISION (op_mode) - 1; else if (arg0 >= 0) - val = GET_MODE_BITSIZE (op_mode) - floor_log2 (arg0) - 2; + val = GET_MODE_PRECISION (op_mode) - floor_log2 (arg0) - 2; else if (arg0 < 0) - val = GET_MODE_BITSIZE (op_mode) - floor_log2 (~arg0) - 2; + val = GET_MODE_PRECISION (op_mode) - floor_log2 (~arg0) - 2; break; case CTZ: @@ -1423,7 +1423,7 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, /* Even if the value at zero is undefined, we have to come up with some replacement. Seems good enough. */ if (! CTZ_DEFINED_VALUE_AT_ZERO (op_mode, val)) - val = GET_MODE_BITSIZE (op_mode); + val = GET_MODE_PRECISION (op_mode); } else val = ctz_hwi (arg0); @@ -1467,12 +1467,12 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, /* When zero-extending a CONST_INT, we need to know its original mode. */ gcc_assert (op_mode != VOIDmode); - if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT) + if (op_width == HOST_BITS_PER_WIDE_INT) { /* If we were really extending the mode, we would have to distinguish between zero-extension and sign-extension. */ - gcc_assert (width == GET_MODE_BITSIZE (op_mode)); + gcc_assert (width == op_width); val = arg0; } else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT) @@ -1484,15 +1484,16 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, case SIGN_EXTEND: if (op_mode == VOIDmode) op_mode = mode; - if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT) + op_width = GET_MODE_PRECISION (op_mode); + if (op_width == HOST_BITS_PER_WIDE_INT) { /* If we were really extending the mode, we would have to distinguish between zero-extension and sign-extension. */ - gcc_assert (width == GET_MODE_BITSIZE (op_mode)); + gcc_assert (width == op_width); val = arg0; } - else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT) + else if (op_width < HOST_BITS_PER_WIDE_INT) { val = arg0 & GET_MODE_MASK (op_mode); if (val_signbit_known_set_p (op_mode, val)) @@ -1565,12 +1566,12 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, case CLZ: hv = 0; if (h1 != 0) - lv = GET_MODE_BITSIZE (mode) - floor_log2 (h1) - 1 + lv = GET_MODE_PRECISION (mode) - floor_log2 (h1) - 1 - HOST_BITS_PER_WIDE_INT; else if (l1 != 0) - lv = GET_MODE_BITSIZE (mode) - floor_log2 (l1) - 1; + lv = GET_MODE_PRECISION (mode) - floor_log2 (l1) - 1; else if (! CLZ_DEFINED_VALUE_AT_ZERO (mode, lv)) - lv = GET_MODE_BITSIZE (mode); + lv = GET_MODE_PRECISION (mode); break; case CTZ: @@ -1580,7 +1581,7 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, else if (h1 != 0) lv = HOST_BITS_PER_WIDE_INT + ctz_hwi (h1); else if (! CTZ_DEFINED_VALUE_AT_ZERO (mode, lv)) - lv = GET_MODE_BITSIZE (mode); + lv = GET_MODE_PRECISION (mode); break; case POPCOUNT: @@ -1634,7 +1635,7 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, case ZERO_EXTEND: gcc_assert (op_mode != VOIDmode); - if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT) + if (op_width > HOST_BITS_PER_WIDE_INT) return 0; hv = 0; @@ -1643,7 +1644,7 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, case SIGN_EXTEND: if (op_mode == VOIDmode - || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT) + || op_width > HOST_BITS_PER_WIDE_INT) return 0; else { @@ -1920,7 +1921,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode, { rtx tem, reversed, opleft, opright; HOST_WIDE_INT val; - unsigned int width = GET_MODE_BITSIZE (mode); + unsigned int width = GET_MODE_PRECISION (mode); /* Even if we can't compute a constant result, there are some cases worth simplifying. */ @@ -2505,7 +2506,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode, && CONST_INT_P (XEXP (opleft, 1)) && CONST_INT_P (XEXP (opright, 1)) && (INTVAL (XEXP (opleft, 1)) + INTVAL (XEXP (opright, 1)) - == GET_MODE_BITSIZE (mode))) + == GET_MODE_PRECISION (mode))) return gen_rtx_ROTATE (mode, XEXP (opright, 0), XEXP (opleft, 1)); /* Same, but for ashift that has been "simplified" to a wider mode @@ -2524,7 +2525,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode, && CONST_INT_P (XEXP (SUBREG_REG (opleft), 1)) && CONST_INT_P (XEXP (opright, 1)) && (INTVAL (XEXP (SUBREG_REG (opleft), 1)) + INTVAL (XEXP (opright, 1)) - == GET_MODE_BITSIZE (mode))) + == GET_MODE_PRECISION (mode))) return gen_rtx_ROTATE (mode, XEXP (opright, 0), XEXP (SUBREG_REG (opleft), 1)); @@ -2702,7 +2703,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode, && trueop1 == const1_rtx && GET_CODE (op0) == LSHIFTRT && CONST_INT_P (XEXP (op0, 1)) - && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1) + && INTVAL (XEXP (op0, 1)) == GET_MODE_PRECISION (mode) - 1) return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx); /* (xor (comparison foo bar) (const_int sign-bit)) @@ -3061,7 +3062,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode, unsigned HOST_WIDE_INT zero_val = 0; if (CLZ_DEFINED_VALUE_AT_ZERO (imode, zero_val) - && zero_val == GET_MODE_BITSIZE (imode) + && zero_val == GET_MODE_PRECISION (imode) && INTVAL (trueop1) == exact_log2 (zero_val)) return simplify_gen_relational (EQ, mode, imode, XEXP (op0, 0), const0_rtx); @@ -3351,7 +3352,7 @@ simplify_const_binary_operation (enum rtx_code code, enum machine_mode mode, { HOST_WIDE_INT arg0, arg1, arg0s, arg1s; HOST_WIDE_INT val; - unsigned int width = GET_MODE_BITSIZE (mode); + unsigned int width = GET_MODE_PRECISION (mode); if (VECTOR_MODE_P (mode) && code != VEC_CONCAT @@ -3636,24 +3637,24 @@ simplify_const_binary_operation (enum rtx_code code, enum machine_mode mode, unsigned HOST_WIDE_INT cnt; if (SHIFT_COUNT_TRUNCATED) - o1 = double_int_zext (o1, GET_MODE_BITSIZE (mode)); + o1 = double_int_zext (o1, GET_MODE_PRECISION (mode)); if (!double_int_fits_in_uhwi_p (o1) - || double_int_to_uhwi (o1) >= GET_MODE_BITSIZE (mode)) + || double_int_to_uhwi (o1) >= GET_MODE_PRECISION (mode)) return 0; cnt = double_int_to_uhwi (o1); if (code == LSHIFTRT || code == ASHIFTRT) - res = double_int_rshift (o0, cnt, GET_MODE_BITSIZE (mode), + res = double_int_rshift (o0, cnt, GET_MODE_PRECISION (mode), code == ASHIFTRT); else if (code == ASHIFT) - res = double_int_lshift (o0, cnt, GET_MODE_BITSIZE (mode), + res = double_int_lshift (o0, cnt, GET_MODE_PRECISION (mode), true); else if (code == ROTATE) - res = double_int_lrotate (o0, cnt, GET_MODE_BITSIZE (mode)); + res = double_int_lrotate (o0, cnt, GET_MODE_PRECISION (mode)); else /* code == ROTATERT */ - res = double_int_rrotate (o0, cnt, GET_MODE_BITSIZE (mode)); + res = double_int_rrotate (o0, cnt, GET_MODE_PRECISION (mode)); } break; @@ -4626,7 +4627,7 @@ simplify_const_relational_operation (enum rtx_code code, && (GET_CODE (trueop1) == CONST_DOUBLE || CONST_INT_P (trueop1))) { - int width = GET_MODE_BITSIZE (mode); + int width = GET_MODE_PRECISION (mode); HOST_WIDE_INT l0s, h0s, l1s, h1s; unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u; @@ -4814,7 +4815,7 @@ simplify_const_relational_operation (enum rtx_code code, rtx inner_const = avoid_constant_pool_reference (XEXP (op0, 1)); if (CONST_INT_P (inner_const) && inner_const != const0_rtx) { - int sign_bitnum = GET_MODE_BITSIZE (mode) - 1; + int sign_bitnum = GET_MODE_PRECISION (mode) - 1; int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum && (UINTVAL (inner_const) & ((unsigned HOST_WIDE_INT) 1 @@ -4906,7 +4907,7 @@ simplify_ternary_operation (enum rtx_code code, enum machine_mode mode, enum machine_mode op0_mode, rtx op0, rtx op1, rtx op2) { - unsigned int width = GET_MODE_BITSIZE (mode); + unsigned int width = GET_MODE_PRECISION (mode); bool any_change = false; rtx tem; @@ -4951,21 +4952,22 @@ simplify_ternary_operation (enum rtx_code code, enum machine_mode mode, { /* Extracting a bit-field from a constant */ unsigned HOST_WIDE_INT val = UINTVAL (op0); - + HOST_WIDE_INT op1val = INTVAL (op1); + HOST_WIDE_INT op2val = INTVAL (op2); if (BITS_BIG_ENDIAN) - val >>= GET_MODE_BITSIZE (op0_mode) - INTVAL (op2) - INTVAL (op1); + val >>= GET_MODE_PRECISION (op0_mode) - op2val - op1val; else - val >>= INTVAL (op2); + val >>= op2val; - if (HOST_BITS_PER_WIDE_INT != INTVAL (op1)) + if (HOST_BITS_PER_WIDE_INT != op1val) { /* First zero-extend. */ - val &= ((unsigned HOST_WIDE_INT) 1 << INTVAL (op1)) - 1; + val &= ((unsigned HOST_WIDE_INT) 1 << op1val) - 1; /* If desired, propagate sign bit. */ if (code == SIGN_EXTRACT - && (val & ((unsigned HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))) + && (val & ((unsigned HOST_WIDE_INT) 1 << (op1val - 1))) != 0) - val |= ~ (((unsigned HOST_WIDE_INT) 1 << INTVAL (op1)) - 1); + val |= ~ (((unsigned HOST_WIDE_INT) 1 << op1val) - 1); } return gen_int_mode (val, mode); @@ -5610,7 +5612,7 @@ simplify_subreg (enum machine_mode outermode, rtx op, /* Optimize SUBREG truncations of zero and sign extended values. */ if ((GET_CODE (op) == ZERO_EXTEND || GET_CODE (op) == SIGN_EXTEND) - && GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode)) + && GET_MODE_PRECISION (outermode) < GET_MODE_PRECISION (innermode)) { unsigned int bitpos = subreg_lsb_1 (outermode, innermode, byte); @@ -5626,7 +5628,7 @@ simplify_subreg (enum machine_mode outermode, rtx op, enum machine_mode origmode = GET_MODE (XEXP (op, 0)); if (outermode == origmode) return XEXP (op, 0); - if (GET_MODE_BITSIZE (outermode) <= GET_MODE_BITSIZE (origmode)) + if (GET_MODE_PRECISION (outermode) <= GET_MODE_PRECISION (origmode)) return simplify_gen_subreg (outermode, XEXP (op, 0), origmode, subreg_lowpart_offset (outermode, origmode)); @@ -5638,7 +5640,7 @@ simplify_subreg (enum machine_mode outermode, rtx op, /* A SUBREG resulting from a zero extension may fold to zero if it extracts higher bits that the ZERO_EXTEND's source bits. */ if (GET_CODE (op) == ZERO_EXTEND - && bitpos >= GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0)))) + && bitpos >= GET_MODE_PRECISION (GET_MODE (XEXP (op, 0)))) return CONST0_RTX (outermode); } @@ -5652,11 +5654,11 @@ simplify_subreg (enum machine_mode outermode, rtx op, to avoid the possibility that an outer LSHIFTRT shifts by more than the sign extension's sign_bit_copies and introduces zeros into the high bits of the result. */ - && (2 * GET_MODE_BITSIZE (outermode)) <= GET_MODE_BITSIZE (innermode) + && (2 * GET_MODE_PRECISION (outermode)) <= GET_MODE_PRECISION (innermode) && CONST_INT_P (XEXP (op, 1)) && GET_CODE (XEXP (op, 0)) == SIGN_EXTEND && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode - && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode) + && INTVAL (XEXP (op, 1)) < GET_MODE_PRECISION (outermode) && subreg_lsb_1 (outermode, innermode, byte) == 0) return simplify_gen_binary (ASHIFTRT, outermode, XEXP (XEXP (op, 0), 0), XEXP (op, 1)); @@ -5667,11 +5669,11 @@ simplify_subreg (enum machine_mode outermode, rtx op, if ((GET_CODE (op) == LSHIFTRT || GET_CODE (op) == ASHIFTRT) && SCALAR_INT_MODE_P (outermode) - && GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode) + && GET_MODE_PRECISION (outermode) < GET_MODE_PRECISION (innermode) && CONST_INT_P (XEXP (op, 1)) && GET_CODE (XEXP (op, 0)) == ZERO_EXTEND && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode - && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode) + && INTVAL (XEXP (op, 1)) < GET_MODE_PRECISION (outermode) && subreg_lsb_1 (outermode, innermode, byte) == 0) return simplify_gen_binary (LSHIFTRT, outermode, XEXP (XEXP (op, 0), 0), XEXP (op, 1)); @@ -5681,12 +5683,12 @@ simplify_subreg (enum machine_mode outermode, rtx op, the outer subreg is effectively a truncation to the original mode. */ if (GET_CODE (op) == ASHIFT && SCALAR_INT_MODE_P (outermode) - && GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode) + && GET_MODE_PRECISION (outermode) < GET_MODE_PRECISION (innermode) && CONST_INT_P (XEXP (op, 1)) && (GET_CODE (XEXP (op, 0)) == ZERO_EXTEND || GET_CODE (XEXP (op, 0)) == SIGN_EXTEND) && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode - && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode) + && INTVAL (XEXP (op, 1)) < GET_MODE_PRECISION (outermode) && subreg_lsb_1 (outermode, innermode, byte) == 0) return simplify_gen_binary (ASHIFT, outermode, XEXP (XEXP (op, 0), 0), XEXP (op, 1)); @@ -5695,12 +5697,12 @@ simplify_subreg (enum machine_mode outermode, rtx op, if ((GET_CODE (op) == LSHIFTRT || GET_CODE (op) == ASHIFTRT) && SCALAR_INT_MODE_P (outermode) - && GET_MODE_BITSIZE (outermode) >= BITS_PER_WORD - && GET_MODE_BITSIZE (innermode) >= (2 * GET_MODE_BITSIZE (outermode)) + && GET_MODE_PRECISION (outermode) >= BITS_PER_WORD + && GET_MODE_PRECISION (innermode) >= (2 * GET_MODE_PRECISION (outermode)) && CONST_INT_P (XEXP (op, 1)) - && (INTVAL (XEXP (op, 1)) & (GET_MODE_BITSIZE (outermode) - 1)) == 0 + && (INTVAL (XEXP (op, 1)) & (GET_MODE_PRECISION (outermode) - 1)) == 0 && INTVAL (XEXP (op, 1)) >= 0 - && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (innermode) + && INTVAL (XEXP (op, 1)) < GET_MODE_PRECISION (innermode) && byte == subreg_lowpart_offset (outermode, innermode)) { int shifted_bytes = INTVAL (XEXP (op, 1)) / BITS_PER_UNIT; -- 2.30.2