+2017-08-30 Richard Sandiford <richard.sandiford@linaro.org>
+ Alan Hayward <alan.hayward@arm.com>
+ David Sherwood <david.sherwood@arm.com>
+
+ * machmode.h (smallest_mode_for_size): Fix formatting.
+ (smallest_int_mode_for_size): New function.
+ * cfgexpand.c (expand_debug_expr): Use smallest_int_mode_for_size
+ instead of smallest_mode_for_size.
+ * combine.c (make_extraction): Likewise.
+ * config/arc/arc.c (arc_expand_movmem): Likewise.
+ * config/arm/arm.c (arm_expand_divmod_libfunc): Likewise.
+ * config/i386/i386.c (ix86_get_mask_mode): Likewise.
+ * config/s390/s390.c (s390_expand_insv): Likewise.
+ * config/sparc/sparc.c (assign_int_registers): Likewise.
+ * config/spu/spu.c (spu_function_value): Likewise.
+ (spu_function_arg): Likewise.
+ * coverage.c (get_gcov_type): Likewise.
+ (get_gcov_unsigned_t): Likewise.
+ * dse.c (find_shift_sequence): Likewise.
+ * expmed.c (store_bit_field_1): Likewise.
+ * expr.c (convert_move): Likewise.
+ (store_field): Likewise.
+ * internal-fn.c (expand_arith_overflow): Likewise.
+ * optabs-query.c (get_best_extraction_insn): Likewise.
+ * optabs.c (expand_twoval_binop_libfunc): Likewise.
+ * stor-layout.c (layout_type): Likewise.
+ (initialize_sizetypes): Likewise.
+ * targhooks.c (default_get_mask_mode): Likewise.
+ * tree-ssa-loop-manip.c (canonicalize_loop_ivs): Likewise.
+
2017-08-30 Richard Sandiford <richard.sandiford@linaro.org>
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>
{
if (mode1 == VOIDmode)
/* Bitfield. */
- mode1 = smallest_mode_for_size (bitsize, MODE_INT);
+ mode1 = smallest_int_mode_for_size (bitsize);
if (bitpos >= BITS_PER_UNIT)
{
op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
{
/* Be careful not to go beyond the extracted object and maintain the
natural alignment of the memory. */
- wanted_inner_mode = smallest_mode_for_size (len, MODE_INT);
+ wanted_inner_mode = smallest_int_mode_for_size (len);
while (pos % GET_MODE_BITSIZE (wanted_inner_mode) + len
> GET_MODE_BITSIZE (wanted_inner_mode))
wanted_inner_mode = GET_MODE_WIDER_MODE (wanted_inner_mode).require ();
while (piece > size)
piece >>= 1;
- mode = smallest_mode_for_size (piece * BITS_PER_UNIT, MODE_INT);
+ mode = smallest_int_mode_for_size (piece * BITS_PER_UNIT);
/* If we don't re-use temporaries, the scheduler gets carried away,
and the register pressure gets unnecessarily high. */
if (0 && tmpx[i] && GET_MODE (tmpx[i]) == mode)
if (mode == SImode)
gcc_assert (!TARGET_IDIV);
- machine_mode libval_mode = smallest_mode_for_size (2 * GET_MODE_BITSIZE (mode),
- MODE_INT);
+ scalar_int_mode libval_mode
+ = smallest_int_mode_for_size (2 * GET_MODE_BITSIZE (mode));
rtx libval = emit_library_call_value (libfunc, NULL_RTX, LCT_CONST,
libval_mode, 2,
|| (TARGET_AVX512VL && (vector_size == 32 || vector_size == 16)))
{
if (elem_size == 4 || elem_size == 8 || TARGET_AVX512BW)
- return smallest_mode_for_size (nunits, MODE_INT);
+ return smallest_int_mode_for_size (nunits);
}
- machine_mode elem_mode
- = smallest_mode_for_size (elem_size * BITS_PER_UNIT, MODE_INT);
+ scalar_int_mode elem_mode
+ = smallest_int_mode_for_size (elem_size * BITS_PER_UNIT);
gcc_assert (elem_size * nunits == vector_size);
return true;
}
- smode = smallest_mode_for_size (bitsize, MODE_INT);
+ smode = smallest_int_mode_for_size (bitsize);
smode_bsize = GET_MODE_BITSIZE (smode);
mode_bsize = GET_MODE_BITSIZE (mode);
the latter case we may pick up unwanted bits. It's not a problem
at the moment but may wish to revisit. */
if (intoffset % BITS_PER_WORD != 0)
- mode = smallest_mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
- MODE_INT);
+ mode = smallest_int_mode_for_size (BITS_PER_WORD
+ - intoffset % BITS_PER_WORD);
else
mode = word_mode;
{
if (byte_size < 4)
byte_size = 4;
- smode =
- smallest_mode_for_size (byte_size * BITS_PER_UNIT, MODE_INT);
+ smode = smallest_int_mode_for_size (byte_size * BITS_PER_UNIT);
RTVEC_ELT (v, n) =
gen_rtx_EXPR_LIST (VOIDmode,
gen_rtx_REG (smode, FIRST_RETURN_REGNUM + n),
rtx gr_reg;
if (byte_size < 4)
byte_size = 4;
- smode = smallest_mode_for_size (byte_size * BITS_PER_UNIT, MODE_INT);
+ smode = smallest_int_mode_for_size (byte_size * BITS_PER_UNIT);
gr_reg = gen_rtx_EXPR_LIST (VOIDmode,
gen_rtx_REG (smode, FIRST_ARG_REGNUM + *cum),
const0_rtx);
tree
get_gcov_type (void)
{
- machine_mode mode
- = smallest_mode_for_size (LONG_LONG_TYPE_SIZE > 32 ? 64 : 32, MODE_INT);
+ scalar_int_mode mode
+ = smallest_int_mode_for_size (LONG_LONG_TYPE_SIZE > 32 ? 64 : 32);
return lang_hooks.types.type_for_mode (mode, false);
}
static tree
get_gcov_unsigned_t (void)
{
- machine_mode mode = smallest_mode_for_size (32, MODE_INT);
+ scalar_int_mode mode = smallest_int_mode_for_size (32);
return lang_hooks.types.type_for_mode (mode, true);
}
\f
int shift, bool speed, bool require_cst)
{
machine_mode store_mode = GET_MODE (store_info->mem);
- machine_mode new_mode;
+ scalar_int_mode new_mode;
rtx read_reg = NULL;
/* Some machines like the x86 have shift insns for each size of
justify the value we want to read but is available in one insn on
the machine. */
- FOR_EACH_MODE_FROM (new_mode,
- smallest_mode_for_size (access_size * BITS_PER_UNIT,
- MODE_INT))
+ opt_scalar_int_mode new_mode_iter;
+ FOR_EACH_MODE_FROM (new_mode_iter,
+ smallest_int_mode_for_size (access_size * BITS_PER_UNIT))
{
rtx target, new_reg, new_lhs;
rtx_insn *shift_seq, *insn;
int cost;
+ new_mode = new_mode_iter.require ();
if (GET_MODE_BITSIZE (new_mode) > BITS_PER_WORD)
break;
is not allowed. */
fieldmode = GET_MODE (value);
if (fieldmode == VOIDmode)
- fieldmode = smallest_mode_for_size (nwords * BITS_PER_WORD, MODE_INT);
+ fieldmode = smallest_int_mode_for_size (nwords * BITS_PER_WORD);
last = get_last_insn ();
for (i = 0; i < nwords; i++)
xImode for all MODE_PARTIAL_INT modes they use, but no others. */
if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
{
- machine_mode full_mode
- = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
+ scalar_int_mode full_mode
+ = smallest_int_mode_for_size (GET_MODE_BITSIZE (to_mode));
gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
!= CODE_FOR_nothing);
if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
{
rtx new_from;
- machine_mode full_mode
- = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
+ scalar_int_mode full_mode
+ = smallest_int_mode_for_size (GET_MODE_BITSIZE (from_mode));
convert_optab ctab = unsignedp ? zext_optab : sext_optab;
enum insn_code icode;
if (GET_CODE (temp) == PARALLEL)
{
HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
- machine_mode temp_mode
- = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
+ scalar_int_mode temp_mode
+ = smallest_int_mode_for_size (size * BITS_PER_UNIT);
rtx temp_target = gen_reg_rtx (temp_mode);
emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
temp = temp_target;
word size, we need to load the value (see again store_bit_field). */
if (GET_MODE (temp) == BLKmode && bitsize <= BITS_PER_WORD)
{
- machine_mode temp_mode = smallest_mode_for_size (bitsize, MODE_INT);
+ scalar_int_mode temp_mode = smallest_int_mode_for_size (bitsize);
temp = extract_bit_field (temp, bitsize, 0, 1, NULL_RTX, temp_mode,
temp_mode, false, NULL);
}
if (orig_precres == precres && precop <= BITS_PER_WORD)
{
int p = MAX (min_precision, precop);
- machine_mode m = smallest_mode_for_size (p, MODE_INT);
+ scalar_int_mode m = smallest_int_mode_for_size (p);
tree optype = build_nonstandard_integer_type (GET_MODE_PRECISION (m),
uns0_p && uns1_p
&& unsr_p);
if (orig_precres == precres)
{
int p = MAX (prec0, prec1);
- machine_mode m = smallest_mode_for_size (p, MODE_INT);
+ scalar_int_mode m = smallest_int_mode_for_size (p);
tree optype = build_nonstandard_integer_type (GET_MODE_PRECISION (m),
uns0_p && uns1_p
&& unsr_p);
/* Similar to mode_for_size, but find the smallest mode for a given width. */
-extern machine_mode smallest_mode_for_size (unsigned int,
- enum mode_class);
+extern machine_mode smallest_mode_for_size (unsigned int, enum mode_class);
+/* Find the narrowest integer mode that contains at least SIZE bits.
+ Such a mode must exist. */
+
+inline scalar_int_mode
+smallest_int_mode_for_size (unsigned int size)
+{
+ return as_a <scalar_int_mode> (smallest_mode_for_size (size, MODE_INT));
+}
/* Return an integer mode of exactly the same size as the input mode. */
unsigned HOST_WIDE_INT struct_bits,
machine_mode field_mode)
{
- machine_mode mode = smallest_mode_for_size (struct_bits, MODE_INT);
- FOR_EACH_MODE_FROM (mode, mode)
+ opt_scalar_int_mode mode_iter;
+ FOR_EACH_MODE_FROM (mode_iter, smallest_int_mode_for_size (struct_bits))
{
+ scalar_int_mode mode = mode_iter.require ();
if (get_extraction_insn (insn, pattern, type, mode))
{
- FOR_EACH_MODE_FROM (mode, mode)
+ FOR_EACH_MODE_FROM (mode_iter, mode)
{
+ mode = mode_iter.require ();
if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (field_mode)
|| TRULY_NOOP_TRUNCATION_MODES_P (insn->field_mode,
field_mode))
/* The value returned by the library function will have twice as
many bits as the nominal MODE. */
- libval_mode = smallest_mode_for_size (2 * GET_MODE_BITSIZE (mode),
- MODE_INT);
+ libval_mode = smallest_int_mode_for_size (2 * GET_MODE_BITSIZE (mode));
start_sequence ();
libval = emit_library_call_value (libfunc, NULL_RTX, LCT_CONST,
libval_mode, 2,
case BOOLEAN_TYPE:
case INTEGER_TYPE:
case ENUMERAL_TYPE:
- SET_TYPE_MODE (type,
- smallest_mode_for_size (TYPE_PRECISION (type), MODE_INT));
- TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
- /* Don't set TYPE_PRECISION here, as it may be set by a bitfield. */
- TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
- break;
+ {
+ scalar_int_mode mode
+ = smallest_int_mode_for_size (TYPE_PRECISION (type));
+ SET_TYPE_MODE (type, mode);
+ TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
+ /* Don't set TYPE_PRECISION here, as it may be set by a bitfield. */
+ TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
+ break;
+ }
case REAL_TYPE:
{
bprecision
= MIN (precision + LOG2_BITS_PER_UNIT + 1, MAX_FIXED_MODE_SIZE);
- bprecision
- = GET_MODE_PRECISION (smallest_mode_for_size (bprecision, MODE_INT));
+ bprecision = GET_MODE_PRECISION (smallest_int_mode_for_size (bprecision));
if (bprecision > HOST_BITS_PER_DOUBLE_INT)
bprecision = HOST_BITS_PER_DOUBLE_INT;
TYPE_UNSIGNED (bitsizetype) = 1;
/* Now layout both types manually. */
- SET_TYPE_MODE (sizetype, smallest_mode_for_size (precision, MODE_INT));
+ scalar_int_mode mode = smallest_int_mode_for_size (precision);
+ SET_TYPE_MODE (sizetype, mode);
SET_TYPE_ALIGN (sizetype, GET_MODE_ALIGNMENT (TYPE_MODE (sizetype)));
TYPE_SIZE (sizetype) = bitsize_int (precision);
- TYPE_SIZE_UNIT (sizetype) = size_int (GET_MODE_SIZE (TYPE_MODE (sizetype)));
+ TYPE_SIZE_UNIT (sizetype) = size_int (GET_MODE_SIZE (mode));
set_min_and_max_values_for_integral_type (sizetype, precision, UNSIGNED);
- SET_TYPE_MODE (bitsizetype, smallest_mode_for_size (bprecision, MODE_INT));
+ mode = smallest_int_mode_for_size (bprecision);
+ SET_TYPE_MODE (bitsizetype, mode);
SET_TYPE_ALIGN (bitsizetype, GET_MODE_ALIGNMENT (TYPE_MODE (bitsizetype)));
TYPE_SIZE (bitsizetype) = bitsize_int (bprecision);
- TYPE_SIZE_UNIT (bitsizetype)
- = size_int (GET_MODE_SIZE (TYPE_MODE (bitsizetype)));
+ TYPE_SIZE_UNIT (bitsizetype) = size_int (GET_MODE_SIZE (mode));
set_min_and_max_values_for_integral_type (bitsizetype, bprecision, UNSIGNED);
/* Create the signed variants of *sizetype. */
default_get_mask_mode (unsigned nunits, unsigned vector_size)
{
unsigned elem_size = vector_size / nunits;
- machine_mode elem_mode
- = smallest_mode_for_size (elem_size * BITS_PER_UNIT, MODE_INT);
+ scalar_int_mode elem_mode
+ = smallest_int_mode_for_size (elem_size * BITS_PER_UNIT);
machine_mode vector_mode;
gcc_assert (elem_size * nunits == vector_size);
gcond *stmt;
edge exit = single_dom_exit (loop);
gimple_seq stmts;
- machine_mode mode;
bool unsigned_p = false;
for (psi = gsi_start_phis (loop->header);
precision = TYPE_PRECISION (type);
}
- mode = smallest_mode_for_size (precision, MODE_INT);
+ scalar_int_mode mode = smallest_int_mode_for_size (precision);
precision = GET_MODE_PRECISION (mode);
type = build_nonstandard_integer_type (precision, unsigned_p);