* cse.c: Use HOST_WIDE_INT_M1 instead of ~(HOST_WIDE_INT) 0.
* combine.c: Use HOST_WIDE_INT_M1U instead of
~(unsigned HOST_WIDE_INT) 0.
* double-int.h: Ditto.
* dse.c: Ditto.
* dwarf2asm.c:Ditto.
* expmed.c: Ditto.
* genmodes.c: Ditto.
* match.pd: Ditto.
* read-rtl.c: Ditto.
* tree-ssa-loop-ivopts.c: Ditto.
* tree-ssa-loop-prefetch.c: Ditto.
* tree-vect-generic.c: Ditto.
* tree-vect-patterns.c: Ditto.
* tree.c: Ditto.
From-SVN: r238529
+2016-07-20 Uros Bizjak <ubizjak@gmail.com>
+
+ * cse.c: Use HOST_WIDE_INT_M1 instead of ~(HOST_WIDE_INT) 0.
+ * combine.c: Use HOST_WIDE_INT_M1U instead of
+ ~(unsigned HOST_WIDE_INT) 0.
+ * double-int.h: Ditto.
+ * dse.c: Ditto.
+ * dwarf2asm.c:Ditto.
+ * expmed.c: Ditto.
+ * genmodes.c: Ditto.
+ * match.pd: Ditto.
+ * read-rtl.c: Ditto.
+ * tree-ssa-loop-ivopts.c: Ditto.
+ * tree-ssa-loop-prefetch.c: Ditto.
+ * tree-vect-generic.c: Ditto.
+ * tree-vect-patterns.c: Ditto.
+ * tree.c: Ditto.
+
2016-07-20 Georg-Johann Lay <avr@gjlay.de>
* gcc/config/avr.c (avr_legitimize_address) [AVR_TINY]: Force
}
/* Don't call nonzero_bits if it cannot change anything. */
- if (rsp->nonzero_bits != ~(unsigned HOST_WIDE_INT) 0)
+ if (rsp->nonzero_bits != HOST_WIDE_INT_M1U)
{
bits = nonzero_bits (src, nonzero_bits_mode);
if (reg_equal && bits)
if (GET_MODE_CLASS (mode) == MODE_INT && HWI_COMPUTABLE_MODE_P (mode))
{
- src = force_to_mode (src, mode, ~(unsigned HOST_WIDE_INT) 0, 0);
+ src = force_to_mode (src, mode, HOST_WIDE_INT_M1U, 0);
SUBST (SET_SRC (x), src);
}
else
new_rtx = force_to_mode (inner, tmode,
len >= HOST_BITS_PER_WIDE_INT
- ? ~(unsigned HOST_WIDE_INT) 0
+ ? HOST_WIDE_INT_M1U
: (HOST_WIDE_INT_1U << len) - 1,
0);
inner = force_to_mode (inner, wanted_inner_mode,
pos_rtx
|| len + orig_pos >= HOST_BITS_PER_WIDE_INT
- ? ~(unsigned HOST_WIDE_INT) 0
+ ? HOST_WIDE_INT_M1U
: (((HOST_WIDE_INT_1U << len) - 1)
<< orig_pos),
0);
&& subreg_lowpart_p (x))
{
rtx newer
- = force_to_mode (tem, mode, ~(unsigned HOST_WIDE_INT) 0, 0);
+ = force_to_mode (tem, mode, HOST_WIDE_INT_M1U, 0);
/* If we have something other than a SUBREG, we might have
done an expansion, so rerun ourselves. */
do not know, we need to assume that all bits up to the highest-order
bit in MASK will be needed. This is how we form such a mask. */
if (mask & (HOST_WIDE_INT_1U << (HOST_BITS_PER_WIDE_INT - 1)))
- fuller_mask = ~(unsigned HOST_WIDE_INT) 0;
+ fuller_mask = HOST_WIDE_INT_M1U;
else
fuller_mask = ((HOST_WIDE_INT_1U << (floor_log2 (mask) + 1))
- 1);
if (GET_MODE_PRECISION (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
{
- nonzero = ~(unsigned HOST_WIDE_INT) 0;
+ nonzero = HOST_WIDE_INT_M1U;
/* GET_MODE_PRECISION (GET_MODE (x)) - INTVAL (XEXP (x, 1))
is the number of bits a full-width mask would have set.
dest);
src = force_to_mode (src, mode,
GET_MODE_PRECISION (mode) >= HOST_BITS_PER_WIDE_INT
- ? ~(unsigned HOST_WIDE_INT) 0
+ ? HOST_WIDE_INT_M1U
: (HOST_WIDE_INT_1U << len) - 1,
0);
else
shift = INTVAL (pos);
if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
- mask = ~(HOST_WIDE_INT) 0;
+ mask = HOST_WIDE_INT_M1;
else
mask = (HOST_WIDE_INT_1 << INTVAL (width)) - 1;
val = (val >> shift) & mask;
else
shift = INTVAL (pos);
if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
- mask = ~(HOST_WIDE_INT) 0;
+ mask = HOST_WIDE_INT_M1;
else
mask = (HOST_WIDE_INT_1 << INTVAL (width)) - 1;
val &= ~(mask << shift);
void dump_double_int (FILE *, double_int, bool);
-#define ALL_ONES (~((unsigned HOST_WIDE_INT) 0))
+#define ALL_ONES HOST_WIDE_INT_M1U
/* The operands of the following comparison functions must be processed
with double_int_ext, if their precision is less than
static unsigned HOST_WIDE_INT
lowpart_bitmask (int n)
{
- unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT) 0;
+ unsigned HOST_WIDE_INT mask = HOST_WIDE_INT_M1U;
return mask >> (HOST_BITS_PER_WIDE_INT - n);
}
va_start (ap, comment);
if (size * 8 < HOST_BITS_PER_WIDE_INT)
- value &= ~(~(unsigned HOST_WIDE_INT) 0 << (size * 8));
+ value &= ~(HOST_WIDE_INT_M1U << (size * 8));
if (op)
{
int nbit = 3;
mask = (n == HOST_BITS_PER_WIDE_INT
- ? ~(unsigned HOST_WIDE_INT) 0
+ ? HOST_WIDE_INT_M1U
: (HOST_WIDE_INT_1U << n) - 1);
while (nbit < n)
|| size - 1 >= BITS_PER_WORD)
goto fail1;
- ml |= (~(unsigned HOST_WIDE_INT) 0) << (size - 1);
+ ml |= HOST_WIDE_INT_M1U << (size - 1);
mlr = gen_int_mode (ml, compute_mode);
extra_cost = (shift_cost (speed, compute_mode, post_shift)
+ shift_cost (speed, compute_mode, size - 1)
puts ("\
#define MODE_MASK(m) \\\n\
((m) >= HOST_BITS_PER_WIDE_INT) \\\n\
- ? ~(unsigned HOST_WIDE_INT) 0 \\\n\
+ ? HOST_WIDE_INT_M1U \\\n\
: (HOST_WIDE_INT_1U << (m)) - 1\n");
for_all_modes (c, m)
is all ones. */
}
}
- zerobits = ~(unsigned HOST_WIDE_INT) 0;
+ zerobits = HOST_WIDE_INT_M1U;
if (shiftc < prec)
{
zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
break;
}
(if (prec < HOST_BITS_PER_WIDE_INT
- || newmask == ~(unsigned HOST_WIDE_INT) 0)
+ || newmask == HOST_WIDE_INT_M1U)
(with
{ tree newmaskt = build_int_cst_type (TREE_TYPE (@2), newmask); }
(if (!tree_int_cst_equal (newmaskt, @2))
if (new_wide < tmp_wide)
{
/* Return INT_MAX equiv on overflow. */
- tmp_wide = (~(unsigned HOST_WIDE_INT) 0) >> 1;
+ tmp_wide = HOST_WIDE_INT_M1U >> 1;
break;
}
tmp_wide = new_wide;
}
bits = GET_MODE_BITSIZE (address_mode);
- mask = ~(~(unsigned HOST_WIDE_INT) 0 << (bits - 1) << 1);
+ mask = ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
offset &= mask;
if ((offset >> (bits - 1) & 1))
offset |= ~mask;
/* Assigned to PREFETCH_BEFORE when all iterations are to be prefetched. */
-#define PREFETCH_ALL (~(unsigned HOST_WIDE_INT) 0)
+#define PREFETCH_ALL HOST_WIDE_INT_M1U
/* Do not generate a prefetch if the unroll factor is significantly less
than what is required by the prefetch. This is to avoid redundant
if (ml >= HOST_WIDE_INT_1U << (prec - 1))
{
this_mode = 4 + (d < 0);
- ml |= (~(unsigned HOST_WIDE_INT) 0) << (prec - 1);
+ ml |= HOST_WIDE_INT_M1U << (prec - 1);
}
else
this_mode = 2 + (d < 0);
if (ml >= HOST_WIDE_INT_1U << (prec - 1))
{
add = true;
- ml |= (~(unsigned HOST_WIDE_INT) 0) << (prec - 1);
+ ml |= HOST_WIDE_INT_M1U << (prec - 1);
}
if (post_shift >= prec)
return NULL;
{
bool negative = ((val >> (bits - 1)) & 1) != 0;
if (negative)
- val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
+ val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
else
- val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
+ val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
}
return val;