say what its contents were. */
&& ! REGNO_REG_SET_P
(DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x))
- && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
+ && HWI_COMPUTABLE_MODE_P (GET_MODE (x)))
{
reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
/* See if this is a bitfield assignment with everything constant. If
so, this is an IOR of an AND, so split it into that. */
if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
- && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
- <= HOST_BITS_PER_WIDE_INT)
+ && HWI_COMPUTABLE_MODE_P (GET_MODE (XEXP (SET_DEST (x), 0)))
&& CONST_INT_P (XEXP (SET_DEST (x), 1))
&& CONST_INT_P (XEXP (SET_DEST (x), 2))
&& CONST_INT_P (SET_SRC (x))
if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
break;
- if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
+ if (HWI_COMPUTABLE_MODE_P (mode))
SUBST (XEXP (x, 0),
force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
GET_MODE_MASK (mode), 0));
/* Similarly to what we do in simplify-rtx.c, a truncate of a register
whose value is a comparison can be replaced with a subreg if
STORE_FLAG_VALUE permits. */
- if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+ if (HWI_COMPUTABLE_MODE_P (mode)
&& (STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
&& (temp = get_last_value (XEXP (x, 0)))
&& COMPARISON_P (temp))
&& INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
&& ((i = exact_log2 (UINTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
|| (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0)
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+ && HWI_COMPUTABLE_MODE_P (mode)
&& ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
&& CONST_INT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
&& (UINTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
for example in cases like ((a & 1) + (a & 2)), which can
become a & 3. */
- if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+ if (HWI_COMPUTABLE_MODE_P (mode)
&& (nonzero_bits (XEXP (x, 0), mode)
& nonzero_bits (XEXP (x, 1), mode)) == 0)
{
AND with STORE_FLAG_VALUE when we are done, since we are only
going to test the sign bit. */
if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+ && HWI_COMPUTABLE_MODE_P (mode)
&& val_signbit_p (mode, STORE_FLAG_VALUE)
&& op1 == const0_rtx
&& mode == GET_MODE (op0)
|| GET_CODE (XEXP (t, 0)) == LSHIFTRT
|| GET_CODE (XEXP (t, 0)) == ASHIFTRT)
&& GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+ && HWI_COMPUTABLE_MODE_P (mode)
&& subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
&& rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
&& ((nonzero_bits (f, GET_MODE (f))
|| GET_CODE (XEXP (t, 0)) == IOR
|| GET_CODE (XEXP (t, 0)) == XOR)
&& GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+ && HWI_COMPUTABLE_MODE_P (mode)
&& subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
&& rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
&& ((nonzero_bits (f, GET_MODE (f))
simplify the expression for the object knowing that we only need the
low-order bits. */
- if (GET_MODE_CLASS (mode) == MODE_INT
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
+ if (GET_MODE_CLASS (mode) == MODE_INT && HWI_COMPUTABLE_MODE_P (mode))
{
src = force_to_mode (src, mode, ~(unsigned HOST_WIDE_INT) 0, 0);
SUBST (SET_SRC (x), src);
if (((old_code == NE && new_code == EQ)
|| (old_code == EQ && new_code == NE))
&& ! other_changed_previously && op1 == const0_rtx
- && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
+ && HWI_COMPUTABLE_MODE_P (GET_MODE (op0))
&& exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
{
rtx pat = PATTERN (other_insn), note = 0;
any (sign) bits when converting INTVAL (op1) to
"unsigned HOST_WIDE_INT". */
if (CONST_INT_P (op1)
- && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+ && (HWI_COMPUTABLE_MODE_P (mode)
|| INTVAL (op1) > 0))
{
x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
bit is not set, as this is easier to optimize. It will be converted
back to cheaper alternative in make_extraction. */
if (GET_CODE (x) == SIGN_EXTEND
- && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
+ && (HWI_COMPUTABLE_MODE_P (GET_MODE (x))
&& ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
& ~(((unsigned HOST_WIDE_INT)
GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
set. */
if (GET_CODE (XEXP (x, 0)) == TRUNCATE
&& GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
- && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
+ && HWI_COMPUTABLE_MODE_P (GET_MODE (x))
&& (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
& ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
return XEXP (XEXP (x, 0), 0);
if (GET_CODE (XEXP (x, 0)) == SUBREG
&& GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
&& subreg_lowpart_p (XEXP (x, 0))
- && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
+ && HWI_COMPUTABLE_MODE_P (GET_MODE (x))
&& (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
& ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
return SUBREG_REG (XEXP (x, 0));
bit is not set, convert the extraction to the cheaper of
sign and zero extension, that are equivalent in these cases. */
if (flag_expensive_optimizations
- && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
+ && (HWI_COMPUTABLE_MODE_P (tmode)
&& ((nonzero_bits (new_rtx, tmode)
- & ~(((unsigned HOST_WIDE_INT)
- GET_MODE_MASK (tmode))
- >> 1))
+ & ~(((unsigned HOST_WIDE_INT)GET_MODE_MASK (tmode)) >> 1))
== 0)))
{
rtx temp = gen_rtx_ZERO_EXTEND (mode, new_rtx);
SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
cases. */
if (flag_expensive_optimizations
- && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
+ && (HWI_COMPUTABLE_MODE_P (GET_MODE (pos_rtx))
&& ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
& ~(((unsigned HOST_WIDE_INT)
GET_MODE_MASK (GET_MODE (pos_rtx)))
if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
&& GET_MODE_MASK (GET_MODE (x)) != mask
- && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
+ && HWI_COMPUTABLE_MODE_P (GET_MODE (x)))
{
unsigned HOST_WIDE_INT cval
= UINTVAL (XEXP (x, 1))
if (CONST_INT_P (XEXP (x, 1))
&& INTVAL (XEXP (x, 1)) >= 0
&& INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
- && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
+ && HWI_COMPUTABLE_MODE_P (op_mode))
mask >>= INTVAL (XEXP (x, 1));
else
mask = fuller_mask;
if (CONST_INT_P (XEXP (x, 1))
&& INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
- && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
+ && HWI_COMPUTABLE_MODE_P (op_mode))
{
rtx inner = XEXP (x, 0);
unsigned HOST_WIDE_INT inner_mask;
}
/* Likewise for 0 or a single bit. */
- else if (SCALAR_INT_MODE_P (mode)
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+ else if (HWI_COMPUTABLE_MODE_P (mode)
&& exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
{
*ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
return 0;
return (unsignedp
- ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+ ? (HWI_COMPUTABLE_MODE_P (mode)
? (unsigned int) (GET_MODE_BITSIZE (mode) - 1
- floor_log2 (nonzero_bits (x, mode)))
: 0)
case LSHIFTRT:
/* Similarly here but with zero bits. */
- if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+ if (HWI_COMPUTABLE_MODE_P (mode)
&& (nonzero_bits (op, mode) & ~GET_MODE_MASK (orig_mode)) == 0)
return mode;
code = LSHIFTRT;
if (((code == LSHIFTRT
- && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
+ && HWI_COMPUTABLE_MODE_P (shift_mode)
&& !(nonzero_bits (varop, shift_mode) >> count))
|| (code == ASHIFT
- && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
+ && HWI_COMPUTABLE_MODE_P (shift_mode)
&& !((nonzero_bits (varop, shift_mode) << count)
& GET_MODE_MASK (shift_mode))))
&& !side_effects_p (varop))
if (CONST_INT_P (XEXP (varop, 1))
&& INTVAL (XEXP (varop, 1)) >= 0
&& INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
- && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+ && HWI_COMPUTABLE_MODE_P (result_mode)
+ && HWI_COMPUTABLE_MODE_P (mode)
&& !VECTOR_MODE_P (result_mode))
{
enum rtx_code first_code = GET_CODE (varop);
&& XEXP (varop, 1) == const0_rtx
&& GET_MODE (XEXP (varop, 0)) == result_mode
&& count == (GET_MODE_BITSIZE (result_mode) - 1)
- && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
+ && HWI_COMPUTABLE_MODE_P (result_mode)
&& STORE_FLAG_VALUE == -1
&& nonzero_bits (XEXP (varop, 0), result_mode) == 1
&& merge_outer_ops (&outer_op, &outer_const, XOR, 1, result_mode,
}
else if ((code == ASHIFTRT || code == LSHIFTRT)
&& count < HOST_BITS_PER_WIDE_INT
- && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
+ && HWI_COMPUTABLE_MODE_P (result_mode)
&& 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
>> count)
&& 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
this shift are known to be zero for both inputs and if the type of
comparison is compatible with the shift. */
if (GET_CODE (op0) == GET_CODE (op1)
- && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
+ && HWI_COMPUTABLE_MODE_P (GET_MODE(op0))
&& ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
|| ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
&& (code != GT && code != LT && code != GE && code != LE))
/* If this is a sign bit comparison and we can do arithmetic in
MODE, say that we will only be needing the sign bit of OP0. */
- if (sign_bit_comparison_p
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
+ if (sign_bit_comparison_p && HWI_COMPUTABLE_MODE_P (mode))
op0 = force_to_mode (op0, mode,
(unsigned HOST_WIDE_INT) 1
<< (GET_MODE_BITSIZE (mode) - 1),
mode = GET_MODE (XEXP (op0, 0));
if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
&& (unsigned_comparison_p || equality_comparison_p)
- && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
+ && HWI_COMPUTABLE_MODE_P (mode)
&& ((unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode))
&& have_insn_for (COMPARE, mode))
{
&& subreg_lowpart_p (XEXP (op0, 0))))
&& CONST_INT_P (XEXP (op0, 1))
&& mode_width <= HOST_BITS_PER_WIDE_INT
- && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
+ && HWI_COMPUTABLE_MODE_P (tmode)
&& ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0
&& (c1 & ~GET_MODE_MASK (tmode)) == 0
&& c1 != mask
|| (GET_CODE (shift_op) == XOR
&& CONST_INT_P (XEXP (shift_op, 1))
&& CONST_INT_P (shift_count)
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+ && HWI_COMPUTABLE_MODE_P (mode)
&& (UINTVAL (XEXP (shift_op, 1))
== (unsigned HOST_WIDE_INT) 1
<< INTVAL (shift_count))))
&& GET_MODE_SIZE (mode) < UNITS_PER_WORD
&& ! have_insn_for (COMPARE, mode))
for (tmode = GET_MODE_WIDER_MODE (mode);
- (tmode != VOIDmode
- && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
+ (tmode != VOIDmode && HWI_COMPUTABLE_MODE_P (tmode));
tmode = GET_MODE_WIDER_MODE (tmode))
if (have_insn_for (COMPARE, tmode))
{
a paradoxical subreg to extend OP0. */
if (op1 == const0_rtx && (code == LT || code == GE)
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
+ && HWI_COMPUTABLE_MODE_P (mode))
{
op0 = simplify_gen_binary (AND, tmode,
gen_lowpart (tmode, op0),
subst_low_luid = DF_INSN_LUID (insn);
rsp->last_set_mode = mode;
if (GET_MODE_CLASS (mode) == MODE_INT
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
+ && HWI_COMPUTABLE_MODE_P (mode))
mode = nonzero_bits_mode;
rsp->last_set_nonzero_bits = nonzero_bits (value, mode);
rsp->last_set_sign_bit_copies