+2017-07-05 Richard Sandiford <richard.sandiford@linaro.org>
+ Alan Hayward <alan.hayward@arm.com>
+ David Sherwood <david.sherwood@arm.com>
+
+ * combine.c (simplify_if_then_else): Remove "enum" before
+ "machine_mode".
+ * compare-elim.c (can_eliminate_compare): Likewise.
+ * config/aarch64/aarch64-builtins.c (aarch64_simd_builtin_std_type):
+ Likewise.
+ (aarch64_lookup_simd_builtin_type): Likewise.
+ (aarch64_simd_builtin_type): Likewise.
+ (aarch64_init_simd_builtin_types): Likewise.
+ (aarch64_simd_expand_args): Likewise.
+ * config/aarch64/aarch64-protos.h (aarch64_simd_attr_length_rglist):
+ Likewise.
+ (aarch64_reverse_mask): Likewise.
+ (aarch64_simd_emit_reg_reg_move): Likewise.
+ (aarch64_gen_adjusted_ldpstp): Likewise.
+ (aarch64_ccmp_mode_to_code): Likewise.
+ (aarch64_operands_ok_for_ldpstp): Likewise.
+ (aarch64_operands_adjust_ok_for_ldpstp): Likewise.
+ * config/aarch64/aarch64.c (aarch64_ira_change_pseudo_allocno_class):
+ Likewise.
+ (aarch64_min_divisions_for_recip_mul): Likewise.
+ (aarch64_reassociation_width): Likewise.
+ (aarch64_get_condition_code_1): Likewise.
+ (aarch64_simd_emit_reg_reg_move): Likewise.
+ (aarch64_simd_attr_length_rglist): Likewise.
+ (aarch64_reverse_mask): Likewise.
+ (aarch64_operands_ok_for_ldpstp): Likewise.
+ (aarch64_operands_adjust_ok_for_ldpstp): Likewise.
+ (aarch64_gen_adjusted_ldpstp): Likewise.
+ * config/aarch64/cortex-a57-fma-steering.c (fma_node::rename):
+ Likewise.
+ * config/arc/arc.c (legitimate_offset_address_p): Likewise.
+ * config/arm/arm-builtins.c (arm_simd_builtin_std_type): Likewise.
+ (arm_lookup_simd_builtin_type): Likewise.
+ (arm_simd_builtin_type): Likewise.
+ (arm_init_simd_builtin_types): Likewise.
+ (arm_expand_builtin_args): Likewise.
+ * config/arm/arm-protos.h (arm_expand_builtin): Likewise.
+ * config/ft32/ft32.c (ft32_libcall_value): Likewise.
+ (ft32_setup_incoming_varargs): Likewise.
+ (ft32_function_arg): Likewise.
+ (ft32_function_arg_advance): Likewise.
+ (ft32_pass_by_reference): Likewise.
+ (ft32_arg_partial_bytes): Likewise.
+ (ft32_valid_pointer_mode): Likewise.
+ (ft32_addr_space_pointer_mode): Likewise.
+ (ft32_addr_space_legitimate_address_p): Likewise.
+ * config/i386/i386-protos.h (ix86_operands_ok_for_move_multiple):
+ Likewise.
+ * config/i386/i386.c (ix86_setup_incoming_vararg_bounds): Likewise.
+ (ix86_emit_outlined_ms2sysv_restore): Likewise.
+ (iamcu_alignment): Likewise.
+ (canonicalize_vector_int_perm): Likewise.
+ (ix86_noce_conversion_profitable_p): Likewise.
+ (ix86_mpx_bound_mode): Likewise.
+ (ix86_operands_ok_for_move_multiple): Likewise.
+ * config/microblaze/microblaze-protos.h
+ (microblaze_expand_conditional_branch_reg): Likewise.
+ * config/microblaze/microblaze.c
+ (microblaze_expand_conditional_branch_reg): Likewise.
+ * config/powerpcspe/powerpcspe.c (rs6000_init_hard_regno_mode_ok):
+ Likewise.
+ (rs6000_reassociation_width): Likewise.
+ (rs6000_invalid_binary_op): Likewise.
+ (fusion_p9_p): Likewise.
+ (emit_fusion_p9_load): Likewise.
+ (emit_fusion_p9_store): Likewise.
+ * config/riscv/riscv-protos.h (riscv_regno_mode_ok_for_base_p):
+ Likewise.
+ (riscv_hard_regno_mode_ok_p): Likewise.
+ (riscv_address_insns): Likewise.
+ (riscv_split_symbol): Likewise.
+ (riscv_legitimize_move): Likewise.
+ (riscv_function_value): Likewise.
+ (riscv_hard_regno_nregs): Likewise.
+ (riscv_expand_builtin): Likewise.
+ * config/riscv/riscv.c (riscv_build_integer_1): Likewise.
+ (riscv_build_integer): Likewise.
+ (riscv_split_integer): Likewise.
+ (riscv_legitimate_constant_p): Likewise.
+ (riscv_cannot_force_const_mem): Likewise.
+ (riscv_regno_mode_ok_for_base_p): Likewise.
+ (riscv_valid_base_register_p): Likewise.
+ (riscv_valid_offset_p): Likewise.
+ (riscv_valid_lo_sum_p): Likewise.
+ (riscv_classify_address): Likewise.
+ (riscv_legitimate_address_p): Likewise.
+ (riscv_address_insns): Likewise.
+ (riscv_load_store_insns): Likewise.
+ (riscv_force_binary): Likewise.
+ (riscv_split_symbol): Likewise.
+ (riscv_force_address): Likewise.
+ (riscv_legitimize_address): Likewise.
+ (riscv_move_integer): Likewise.
+ (riscv_legitimize_const_move): Likewise.
+ (riscv_legitimize_move): Likewise.
+ (riscv_address_cost): Likewise.
+ (riscv_subword): Likewise.
+ (riscv_output_move): Likewise.
+ (riscv_canonicalize_int_order_test): Likewise.
+ (riscv_emit_int_order_test): Likewise.
+ (riscv_function_arg_boundary): Likewise.
+ (riscv_pass_mode_in_fpr_p): Likewise.
+ (riscv_pass_fpr_single): Likewise.
+ (riscv_pass_fpr_pair): Likewise.
+ (riscv_get_arg_info): Likewise.
+ (riscv_function_arg): Likewise.
+ (riscv_function_arg_advance): Likewise.
+ (riscv_arg_partial_bytes): Likewise.
+ (riscv_function_value): Likewise.
+ (riscv_pass_by_reference): Likewise.
+ (riscv_setup_incoming_varargs): Likewise.
+ (riscv_print_operand): Likewise.
+ (riscv_elf_select_rtx_section): Likewise.
+ (riscv_save_restore_reg): Likewise.
+ (riscv_for_each_saved_reg): Likewise.
+ (riscv_register_move_cost): Likewise.
+ (riscv_hard_regno_mode_ok_p): Likewise.
+ (riscv_hard_regno_nregs): Likewise.
+ (riscv_class_max_nregs): Likewise.
+ (riscv_memory_move_cost): Likewise.
+ * config/rl78/rl78-protos.h (rl78_split_movsi): Likewise.
+ * config/rl78/rl78.c (rl78_split_movsi): Likewise.
+ (rl78_addr_space_address_mode): Likewise.
+ * config/rs6000/rs6000-c.c (altivec_resolve_overloaded_builtin):
+ Likewise.
+ * config/rs6000/rs6000.c (rs6000_init_hard_regno_mode_ok): Likewise.
+ (rs6000_reassociation_width): Likewise.
+ (rs6000_invalid_binary_op): Likewise.
+ (fusion_p9_p): Likewise.
+ (emit_fusion_p9_load): Likewise.
+ (emit_fusion_p9_store): Likewise.
+ * config/visium/visium-protos.h (prepare_move_operands): Likewise.
+ (ok_for_simple_move_operands): Likewise.
+ (ok_for_simple_move_strict_operands): Likewise.
+ (ok_for_simple_arith_logic_operands): Likewise.
+ (visium_legitimize_reload_address): Likewise.
+ (visium_select_cc_mode): Likewise.
+ (output_cbranch): Likewise.
+ (visium_split_double_move): Likewise.
+ (visium_expand_copysign): Likewise.
+ (visium_expand_int_cstore): Likewise.
+ (visium_expand_fp_cstore): Likewise.
+ * config/visium/visium.c (visium_pass_by_reference): Likewise.
+ (visium_function_arg): Likewise.
+ (visium_function_arg_advance): Likewise.
+ (visium_libcall_value): Likewise.
+ (visium_setup_incoming_varargs): Likewise.
+ (visium_legitimate_constant_p): Likewise.
+ (visium_legitimate_address_p): Likewise.
+ (visium_legitimize_address): Likewise.
+ (visium_secondary_reload): Likewise.
+ (visium_register_move_cost): Likewise.
+ (visium_memory_move_cost): Likewise.
+ (prepare_move_operands): Likewise.
+ (ok_for_simple_move_operands): Likewise.
+ (ok_for_simple_move_strict_operands): Likewise.
+ (ok_for_simple_arith_logic_operands): Likewise.
+ (visium_function_value_1): Likewise.
+ (rtx_ok_for_offset_p): Likewise.
+ (visium_legitimize_reload_address): Likewise.
+ (visium_split_double_move): Likewise.
+ (visium_expand_copysign): Likewise.
+ (visium_expand_int_cstore): Likewise.
+ (visium_expand_fp_cstore): Likewise.
+ (visium_split_cstore): Likewise.
+ (visium_select_cc_mode): Likewise.
+ (visium_split_cbranch): Likewise.
+ (output_cbranch): Likewise.
+ (visium_print_operand_address): Likewise.
+ * expmed.c (flip_storage_order): Likewise.
+ * expmed.h (emit_cstore): Likewise.
+ (flip_storage_order): Likewise.
+ * genrecog.c (validate_pattern): Likewise.
+ * hsa-gen.c (gen_hsa_addr): Likewise.
+ * internal-fn.c (expand_arith_overflow): Likewise.
+ * ira-color.c (allocno_copy_cost_saving): Likewise.
+ * lra-assigns.c (find_hard_regno_for_1): Likewise.
+ * lra-constraints.c (prohibited_class_reg_set_mode_p): Likewise.
+ (process_invariant_for_inheritance): Likewise.
+ * lra-eliminations.c (move_plus_up): Likewise.
+ * omp-low.c (lower_oacc_reductions): Likewise.
+ * simplify-rtx.c (simplify_subreg): Likewise.
+ * target.def (TARGET_SETUP_INCOMING_VARARG_BOUNDS): Likewise.
+ (TARGET_CHKP_BOUND_MODE): Likewise..
+ * targhooks.c (default_chkp_bound_mode): Likewise.
+ (default_setup_incoming_vararg_bounds): Likewise.
+ * targhooks.h (default_chkp_bound_mode): Likewise.
+ (default_setup_incoming_vararg_bounds): Likewise.
+ * tree-ssa-math-opts.c (divmod_candidate_p): Likewise.
+ * tree-vect-loop.c (calc_vec_perm_mask_for_shift): Likewise.
+ (have_whole_vector_shift): Likewise.
+ * tree-vect-stmts.c (vectorizable_load): Likewise.
+ * doc/tm.texi: Regenerate.
+
2017-07-05 Georg-Johann Lay <avr@gjlay.de>
Graceful degrade if Binutils PR21472 is not available.
+2017-07-05 Richard Sandiford <richard.sandiford@linaro.org>
+ Alan Hayward <alan.hayward@arm.com>
+ David Sherwood <david.sherwood@arm.com>
+
+ * brig-c.h (brig_type_for_mode): Remove "enum" before "machine_mode".
+ * brig-lang.c (brig_langhook_type_for_mode): Likewise.
+
2017-07-04 Jakub Jelinek <jakub@redhat.com>
* brigfrontend/brig-function.cc: Include profile-count.h.
extern void brig_write_globals (void);
extern tree brig_type_for_size (unsigned int bits, int unsignedp);
-extern tree brig_type_for_mode (enum machine_mode, int unsignedp);
+extern tree brig_type_for_mode (machine_mode, int unsignedp);
/* Functions defined in the GCC interface called by the Brig frontend
proper. */
}
static tree
-brig_langhook_type_for_mode (enum machine_mode mode, int unsignedp)
+brig_langhook_type_for_mode (machine_mode mode, int unsignedp)
{
if (mode == TYPE_MODE (void_type_node))
return void_type_node;
&& (i = exact_log2 (UINTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0)
{
rtx val = XEXP (cond, 0);
- enum machine_mode val_mode = GET_MODE (val);
+ machine_mode val_mode = GET_MODE (val);
if (val_mode == mode)
return val;
else if (GET_MODE_PRECISION (val_mode) < GET_MODE_PRECISION (mode))
return false;
/* New mode must be compatible with the previous compare mode. */
- enum machine_mode new_mode
+ machine_mode new_mode
= targetm.cc_modes_compatible (GET_MODE (compare), cmp->orig_mode);
if (new_mode == VOIDmode)
}
static tree
-aarch64_simd_builtin_std_type (enum machine_mode mode,
+aarch64_simd_builtin_std_type (machine_mode mode,
enum aarch64_type_qualifiers q)
{
#define QUAL_TYPE(M) \
}
static tree
-aarch64_lookup_simd_builtin_type (enum machine_mode mode,
+aarch64_lookup_simd_builtin_type (machine_mode mode,
enum aarch64_type_qualifiers q)
{
int i;
}
static tree
-aarch64_simd_builtin_type (enum machine_mode mode,
+aarch64_simd_builtin_type (machine_mode mode,
bool unsigned_p, bool poly_p)
{
if (poly_p)
for (i = 0; i < nelts; i++)
{
tree eltype = aarch64_simd_types[i].eltype;
- enum machine_mode mode = aarch64_simd_types[i].mode;
+ machine_mode mode = aarch64_simd_types[i].mode;
if (aarch64_simd_types[i].itype == NULL)
{
static rtx
aarch64_simd_expand_args (rtx target, int icode, int have_retval,
tree exp, builtin_simd_arg *args,
- enum machine_mode builtin_mode)
+ machine_mode builtin_mode)
{
rtx pat;
rtx op[SIMD_MAX_BUILTIN_ARGS + 1]; /* First element for result operand. */
else
{
tree arg = CALL_EXPR_ARG (exp, opc - have_retval);
- enum machine_mode mode = insn_data[icode].operand[opc].mode;
+ machine_mode mode = insn_data[icode].operand[opc].mode;
op[opc] = expand_normal (arg);
switch (thisarg)
bool aarch64_zero_extend_const_eq (machine_mode, rtx, machine_mode, rtx);
bool aarch64_move_imm (HOST_WIDE_INT, machine_mode);
bool aarch64_mov_operand_p (rtx, machine_mode);
-int aarch64_simd_attr_length_rglist (enum machine_mode);
-rtx aarch64_reverse_mask (enum machine_mode);
+int aarch64_simd_attr_length_rglist (machine_mode);
+rtx aarch64_reverse_mask (machine_mode);
bool aarch64_offset_7bit_signed_scaled_p (machine_mode, HOST_WIDE_INT);
char *aarch64_output_scalar_simd_mov_immediate (rtx, machine_mode);
char *aarch64_output_simd_mov_immediate (rtx, machine_mode, unsigned);
/* Initialize builtins for SIMD intrinsics. */
void init_aarch64_simd_builtins (void);
-void aarch64_simd_emit_reg_reg_move (rtx *, enum machine_mode, unsigned int);
+void aarch64_simd_emit_reg_reg_move (rtx *, machine_mode, unsigned int);
/* Expand builtins for SIMD intrinsics. */
rtx aarch64_simd_expand_builtin (int, tree, rtx);
void aarch64_gen_atomic_ldop (enum rtx_code, rtx, rtx, rtx, rtx, rtx);
void aarch64_split_atomic_op (enum rtx_code, rtx, rtx, rtx, rtx, rtx, rtx);
-bool aarch64_gen_adjusted_ldpstp (rtx *, bool, enum machine_mode, RTX_CODE);
+bool aarch64_gen_adjusted_ldpstp (rtx *, bool, machine_mode, RTX_CODE);
#endif /* RTX_CODE */
void aarch64_init_builtins (void);
extern bool
aarch64_expand_vec_perm_const (rtx target, rtx op0, rtx op1, rtx sel);
void aarch64_atomic_assign_expand_fenv (tree *, tree *, tree *);
-int aarch64_ccmp_mode_to_code (enum machine_mode mode);
+int aarch64_ccmp_mode_to_code (machine_mode mode);
bool extract_base_offset_in_addr (rtx mem, rtx *base, rtx *offset);
-bool aarch64_operands_ok_for_ldpstp (rtx *, bool, enum machine_mode);
-bool aarch64_operands_adjust_ok_for_ldpstp (rtx *, bool, enum machine_mode);
+bool aarch64_operands_ok_for_ldpstp (rtx *, bool, machine_mode);
+bool aarch64_operands_adjust_ok_for_ldpstp (rtx *, bool, machine_mode);
extern void aarch64_asm_output_pool_epilogue (FILE *, const char *,
tree, HOST_WIDE_INT);
aarch64_ira_change_pseudo_allocno_class (int regno, reg_class_t allocno_class,
reg_class_t best_class)
{
- enum machine_mode mode;
+ machine_mode mode;
if (allocno_class != ALL_REGS)
return allocno_class;
}
static unsigned int
-aarch64_min_divisions_for_recip_mul (enum machine_mode mode)
+aarch64_min_divisions_for_recip_mul (machine_mode mode)
{
if (GET_MODE_UNIT_SIZE (mode) == 4)
return aarch64_tune_params.min_div_recip_mul_sf;
static int
aarch64_reassociation_width (unsigned opc ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
if (VECTOR_MODE_P (mode))
return aarch64_tune_params.vec_reassoc_width;
}
static int
-aarch64_get_condition_code_1 (enum machine_mode, enum rtx_code);
+aarch64_get_condition_code_1 (machine_mode, enum rtx_code);
int
aarch64_get_condition_code (rtx x)
}
static int
-aarch64_get_condition_code_1 (enum machine_mode mode, enum rtx_code comp_code)
+aarch64_get_condition_code_1 (machine_mode mode, enum rtx_code comp_code)
{
switch (mode)
{
COUNT is the number of components into which the copy needs to be
decomposed. */
void
-aarch64_simd_emit_reg_reg_move (rtx *operands, enum machine_mode mode,
+aarch64_simd_emit_reg_reg_move (rtx *operands, machine_mode mode,
unsigned int count)
{
unsigned int i;
/* Compute and return the length of aarch64_simd_reglist<mode>, where <mode> is
one of VSTRUCT modes: OI, CI, or XI. */
int
-aarch64_simd_attr_length_rglist (enum machine_mode mode)
+aarch64_simd_attr_length_rglist (machine_mode mode)
{
return (GET_MODE_SIZE (mode) / UNITS_PER_VREG) * 4;
}
}
rtx
-aarch64_reverse_mask (enum machine_mode mode)
+aarch64_reverse_mask (machine_mode mode)
{
/* We have to reverse each vector because we dont have
a permuted load that can reverse-load according to ABI rules. */
bool
aarch64_operands_ok_for_ldpstp (rtx *operands, bool load,
- enum machine_mode mode)
+ machine_mode mode)
{
HOST_WIDE_INT offval_1, offval_2, msize;
enum reg_class rclass_1, rclass_2;
bool
aarch64_operands_adjust_ok_for_ldpstp (rtx *operands, bool load,
- enum machine_mode mode)
+ machine_mode mode)
{
enum reg_class rclass_1, rclass_2, rclass_3, rclass_4;
HOST_WIDE_INT offval_1, offval_2, offval_3, offval_4, msize;
bool
aarch64_gen_adjusted_ldpstp (rtx *operands, bool load,
- enum machine_mode mode, RTX_CODE code)
+ machine_mode mode, RTX_CODE code)
{
rtx base, offset, t1, t2;
rtx mem_1, mem_2, mem_3, mem_4;
{
rtx_insn *insn = this->m_insn;
HARD_REG_SET unavailable;
- enum machine_mode mode;
+ machine_mode mode;
int reg;
if (dump_file)
register, an immediate or an long immediate. */
static bool
-legitimate_offset_address_p (enum machine_mode mode, rtx x, bool index,
- bool strict)
+legitimate_offset_address_p (machine_mode mode, rtx x, bool index, bool strict)
{
if (GET_CODE (x) != PLUS)
return false;
}
static tree
-arm_simd_builtin_std_type (enum machine_mode mode,
+arm_simd_builtin_std_type (machine_mode mode,
enum arm_type_qualifiers q)
{
#define QUAL_TYPE(M) \
}
static tree
-arm_lookup_simd_builtin_type (enum machine_mode mode,
+arm_lookup_simd_builtin_type (machine_mode mode,
enum arm_type_qualifiers q)
{
int i;
}
static tree
-arm_simd_builtin_type (enum machine_mode mode,
- bool unsigned_p, bool poly_p)
+arm_simd_builtin_type (machine_mode mode, bool unsigned_p, bool poly_p)
{
if (poly_p)
return arm_lookup_simd_builtin_type (mode, qualifier_poly);
for (i = 0; i < nelts; i++)
{
tree eltype = arm_simd_types[i].eltype;
- enum machine_mode mode = arm_simd_types[i].mode;
+ machine_mode mode = arm_simd_types[i].mode;
if (arm_simd_types[i].itype == NULL)
arm_simd_types[i].itype =
gcc_assert (argc > 0);
if (CONST_INT_P (op[argc]))
{
- enum machine_mode vmode = mode[argc - 1];
+ machine_mode vmode = mode[argc - 1];
neon_lane_bounds (op[argc], 0, GET_MODE_NUNITS (vmode), exp);
}
/* If the lane index isn't a constant then the next
extern void arm_output_fn_unwind (FILE *, bool);
extern rtx arm_expand_builtin (tree exp, rtx target, rtx subtarget
- ATTRIBUTE_UNUSED, enum machine_mode mode
+ ATTRIBUTE_UNUSED, machine_mode mode
ATTRIBUTE_UNUSED, int ignore ATTRIBUTE_UNUSED);
extern tree arm_builtin_decl (unsigned code, bool initialize_p
ATTRIBUTE_UNUSED);
We always return values in register $r0 for ft32. */
static rtx
-ft32_libcall_value (enum machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
+ft32_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, FT32_R0);
}
static void
ft32_setup_incoming_varargs (cumulative_args_t cum_v,
- enum machine_mode mode,
+ machine_mode mode,
tree type ATTRIBUTE_UNUSED,
int *pretend_size, int no_rtl ATTRIBUTE_UNUSED)
{
NULL_RTX if there's no more space. */
static rtx
-ft32_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+ft32_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
: (unsigned) int_size_in_bytes (TYPE))
static void
-ft32_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+ft32_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
static bool
ft32_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
- enum machine_mode mode, const_tree type,
+ machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
unsigned HOST_WIDE_INT size;
static int
ft32_arg_partial_bytes (cumulative_args_t cum_v,
- enum machine_mode mode, tree type, bool named)
+ machine_mode mode, tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int bytes_left, size;
#undef TARGET_VALID_POINTER_MODE
#define TARGET_VALID_POINTER_MODE ft32_valid_pointer_mode
static bool
-ft32_valid_pointer_mode (enum machine_mode mode)
+ft32_valid_pointer_mode (machine_mode mode)
{
if (mode == SImode)
return 1;
#undef TARGET_ADDR_SPACE_POINTER_MODE
#define TARGET_ADDR_SPACE_POINTER_MODE ft32_addr_space_pointer_mode
-static enum machine_mode
+static machine_mode
ft32_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
{
return Pmode;
#undef TARGET_ADDR_SPACE_ADDRESS_MODE
#define TARGET_ADDR_SPACE_ADDRESS_MODE ft32_addr_space_address_mode
-static enum machine_mode
+static machine_mode
ft32_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
{
return Pmode;
}
static bool
-ft32_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
- bool strict,
+ft32_addr_space_legitimate_address_p (machine_mode mode, rtx x, bool strict,
addr_space_t as ATTRIBUTE_UNUSED)
{
if (mode != BLKmode)
extern const char * ix86_output_call_insn (rtx_insn *insn, rtx call_op);
extern bool ix86_operands_ok_for_move_multiple (rtx *operands, bool load,
- enum machine_mode mode);
+ machine_mode mode);
#ifdef RTX_CODE
/* Target data for multipass lookahead scheduling.
static void
ix86_setup_incoming_vararg_bounds (cumulative_args_t cum_v,
- enum machine_mode mode,
+ machine_mode mode,
tree type,
int *pretend_size ATTRIBUTE_UNUSED,
int no_rtl)
for (i = 0; i < ncregs; ++i)
{
const xlogue_layout::reginfo &r = xlogue.get_reginfo (i);
- enum machine_mode mode = SSE_REGNO_P (r.regno) ? V4SFmode : word_mode;
+ machine_mode mode = SSE_REGNO_P (r.regno) ? V4SFmode : word_mode;
rtx reg, frame_load;
reg = gen_rtx_REG (mode, r.regno);
static int
iamcu_alignment (tree type, int align)
{
- enum machine_mode mode;
+ machine_mode mode;
if (align < 32 || TYPE_USER_ALIGN (type))
return align;
struct expand_vec_perm_d *nd)
{
int i;
- enum machine_mode mode = VOIDmode;
+ machine_mode mode = VOIDmode;
switch (d->vmode)
{
if (GET_CODE (SET_SRC (set)) != IF_THEN_ELSE)
continue;
rtx src = SET_SRC (set);
- enum machine_mode mode = GET_MODE (src);
+ machine_mode mode = GET_MODE (src);
if (GET_MODE_CLASS (mode) != MODE_INT
&& GET_MODE_CLASS (mode) != MODE_FLOAT)
continue;
/* Return mode to be used for bounds or VOIDmode
if bounds are not supported. */
-static enum machine_mode
+static machine_mode
ix86_mpx_bound_mode ()
{
/* Do not support pointer checker if MPX
bool
ix86_operands_ok_for_move_multiple (rtx *operands, bool load,
- enum machine_mode mode)
+ machine_mode mode)
{
HOST_WIDE_INT offval_1, offval_2, msize;
rtx mem_1, mem_2, reg_1, reg_2, base_1, base_2, offset_1, offset_2;
extern bool microblaze_expand_block_move (rtx, rtx, rtx, rtx);
extern void microblaze_expand_divide (rtx *);
extern void microblaze_expand_conditional_branch (machine_mode, rtx *);
-extern void microblaze_expand_conditional_branch_reg (enum machine_mode, rtx *);
+extern void microblaze_expand_conditional_branch_reg (machine_mode, rtx *);
extern void microblaze_expand_conditional_branch_sf (rtx *);
extern int microblaze_can_use_return_insn (void);
extern void print_operand (FILE *, rtx, int);
}
void
-microblaze_expand_conditional_branch_reg (enum machine_mode mode,
- rtx operands[])
+microblaze_expand_conditional_branch_reg (machine_mode mode, rtx operands[])
{
enum rtx_code code = GET_CODE (operands[0]);
rtx cmp_op0 = operands[1];
CODE_FOR_fusion_gpr_di_df_store },
};
- enum machine_mode cur_pmode = Pmode;
+ machine_mode cur_pmode = Pmode;
size_t i;
for (i = 0; i < ARRAY_SIZE (addis_insns); i++)
{
- enum machine_mode xmode = addis_insns[i].mode;
+ machine_mode xmode = addis_insns[i].mode;
enum rs6000_reload_reg_type rtype = addis_insns[i].rtype;
if (addis_insns[i].pmode != cur_pmode)
static int
rs6000_reassociation_width (unsigned int opc ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
switch (rs6000_cpu)
{
const_tree type1,
const_tree type2)
{
- enum machine_mode mode1 = TYPE_MODE (type1);
- enum machine_mode mode2 = TYPE_MODE (type2);
+ machine_mode mode1 = TYPE_MODE (type1);
+ machine_mode mode2 = TYPE_MODE (type2);
/* For complex modes, use the inner type. */
if (COMPLEX_MODE_P (mode1))
rtx src) /* source (register or memory). */
{
rtx addr, mem, offset;
- enum machine_mode mode = GET_MODE (src);
+ machine_mode mode = GET_MODE (src);
/* Validate arguments. */
if (!base_reg_operand (addis_reg, GET_MODE (addis_reg)))
const char *
emit_fusion_p9_load (rtx reg, rtx mem, rtx tmp_reg)
{
- enum machine_mode mode = GET_MODE (reg);
+ machine_mode mode = GET_MODE (reg);
rtx hi;
rtx lo;
rtx addr;
const char *
emit_fusion_p9_store (rtx mem, rtx reg, rtx tmp_reg)
{
- enum machine_mode mode = GET_MODE (reg);
+ machine_mode mode = GET_MODE (reg);
rtx hi;
rtx lo;
rtx addr;
/* Routines implemented in riscv.c. */
extern enum riscv_symbol_type riscv_classify_symbolic_expression (rtx);
extern bool riscv_symbolic_constant_p (rtx, enum riscv_symbol_type *);
-extern int riscv_regno_mode_ok_for_base_p (int, enum machine_mode, bool);
-extern bool riscv_hard_regno_mode_ok_p (unsigned int, enum machine_mode);
-extern int riscv_address_insns (rtx, enum machine_mode, bool);
+extern int riscv_regno_mode_ok_for_base_p (int, machine_mode, bool);
+extern bool riscv_hard_regno_mode_ok_p (unsigned int, machine_mode);
+extern int riscv_address_insns (rtx, machine_mode, bool);
extern int riscv_const_insns (rtx);
extern int riscv_split_const_insns (rtx);
extern int riscv_load_store_insns (rtx, rtx_insn *);
extern rtx riscv_emit_move (rtx, rtx);
-extern bool riscv_split_symbol (rtx, rtx, enum machine_mode, rtx *);
+extern bool riscv_split_symbol (rtx, rtx, machine_mode, rtx *);
extern bool riscv_split_symbol_type (enum riscv_symbol_type);
extern rtx riscv_unspec_address (rtx, enum riscv_symbol_type);
extern void riscv_move_integer (rtx, rtx, HOST_WIDE_INT);
-extern bool riscv_legitimize_move (enum machine_mode, rtx, rtx);
+extern bool riscv_legitimize_move (machine_mode, rtx, rtx);
extern rtx riscv_subword (rtx, bool);
extern bool riscv_split_64bit_move_p (rtx, rtx);
extern void riscv_split_doubleword_move (rtx, rtx);
extern void riscv_expand_prologue (void);
extern void riscv_expand_epilogue (bool);
extern bool riscv_can_use_return_insn (void);
-extern rtx riscv_function_value (const_tree, const_tree, enum machine_mode);
-extern unsigned int riscv_hard_regno_nregs (int, enum machine_mode);
+extern rtx riscv_function_value (const_tree, const_tree, machine_mode);
+extern unsigned int riscv_hard_regno_nregs (int, machine_mode);
/* Routines implemented in riscv-c.c. */
void riscv_cpu_cpp_builtins (cpp_reader *);
/* Routines implemented in riscv-builtins.c. */
extern void riscv_atomic_assign_expand_fenv (tree *, tree *, tree *);
-extern rtx riscv_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
+extern rtx riscv_expand_builtin (tree, rtx, rtx, machine_mode, int);
extern tree riscv_builtin_decl (unsigned int, bool);
extern void riscv_init_builtins (void);
static int
riscv_build_integer_1 (struct riscv_integer_op codes[RISCV_MAX_INTEGER_OPS],
- HOST_WIDE_INT value, enum machine_mode mode)
+ HOST_WIDE_INT value, machine_mode mode)
{
HOST_WIDE_INT low_part = CONST_LOW_PART (value);
int cost = RISCV_MAX_INTEGER_OPS + 1, alt_cost;
static int
riscv_build_integer (struct riscv_integer_op *codes, HOST_WIDE_INT value,
- enum machine_mode mode)
+ machine_mode mode)
{
int cost = riscv_build_integer_1 (codes, value, mode);
/* Try to split a 64b integer into 32b parts, then reassemble. */
static rtx
-riscv_split_integer (HOST_WIDE_INT val, enum machine_mode mode)
+riscv_split_integer (HOST_WIDE_INT val, machine_mode mode)
{
unsigned HOST_WIDE_INT loval = sext_hwi (val, 32);
unsigned HOST_WIDE_INT hival = sext_hwi ((val - loval) >> 32, 32);
/* Implement TARGET_LEGITIMATE_CONSTANT_P. */
static bool
-riscv_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+riscv_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
return riscv_const_insns (x) > 0;
}
/* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
static bool
-riscv_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+riscv_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
enum riscv_symbol_type type;
rtx base, offset;
int
riscv_regno_mode_ok_for_base_p (int regno,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
bool strict_p)
{
if (!HARD_REGISTER_NUM_P (regno))
STRICT_P is true if REG_OK_STRICT is in effect. */
static bool
-riscv_valid_base_register_p (rtx x, enum machine_mode mode, bool strict_p)
+riscv_valid_base_register_p (rtx x, machine_mode mode, bool strict_p)
{
if (!strict_p && GET_CODE (x) == SUBREG)
x = SUBREG_REG (x);
can address a value of mode MODE. */
static bool
-riscv_valid_offset_p (rtx x, enum machine_mode mode)
+riscv_valid_offset_p (rtx x, machine_mode mode)
{
/* Check that X is a signed 12-bit number. */
if (!const_arith_operand (x, Pmode))
LO_SUM symbol has type SYM_TYPE. */
static bool
-riscv_valid_lo_sum_p (enum riscv_symbol_type sym_type, enum machine_mode mode)
+riscv_valid_lo_sum_p (enum riscv_symbol_type sym_type, machine_mode mode)
{
/* Check that symbols of type SYMBOL_TYPE can be used to access values
of mode MODE. */
static bool
riscv_classify_address (struct riscv_address_info *info, rtx x,
- enum machine_mode mode, bool strict_p)
+ machine_mode mode, bool strict_p)
{
switch (GET_CODE (x))
{
/* Implement TARGET_LEGITIMATE_ADDRESS_P. */
static bool
-riscv_legitimate_address_p (enum machine_mode mode, rtx x, bool strict_p)
+riscv_legitimate_address_p (machine_mode mode, rtx x, bool strict_p)
{
struct riscv_address_info addr;
enough. */
int
-riscv_address_insns (rtx x, enum machine_mode mode, bool might_split_p)
+riscv_address_insns (rtx x, machine_mode mode, bool might_split_p)
{
struct riscv_address_info addr;
int n = 1;
int
riscv_load_store_insns (rtx mem, rtx_insn *insn)
{
- enum machine_mode mode;
+ machine_mode mode;
bool might_split_p;
rtx set;
of mode MODE. Return that new register. */
static rtx
-riscv_force_binary (enum machine_mode mode, enum rtx_code code, rtx x, rtx y)
+riscv_force_binary (machine_mode mode, enum rtx_code code, rtx x, rtx y)
{
return riscv_emit_binary (code, gen_reg_rtx (mode), x, y);
}
is guaranteed to be a legitimate address for mode MODE. */
bool
-riscv_split_symbol (rtx temp, rtx addr, enum machine_mode mode, rtx *low_out)
+riscv_split_symbol (rtx temp, rtx addr, machine_mode mode, rtx *low_out)
{
enum riscv_symbol_type symbol_type;
/* If X is not a valid address for mode MODE, force it into a register. */
static rtx
-riscv_force_address (rtx x, enum machine_mode mode)
+riscv_force_address (rtx x, machine_mode mode)
{
if (!riscv_legitimate_address_p (mode, x, false))
x = force_reg (Pmode, x);
static rtx
riscv_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
rtx addr;
riscv_move_integer (rtx temp, rtx dest, HOST_WIDE_INT value)
{
struct riscv_integer_op codes[RISCV_MAX_INTEGER_OPS];
- enum machine_mode mode;
+ machine_mode mode;
int i, num_ops;
rtx x;
move_operand. */
static void
-riscv_legitimize_const_move (enum machine_mode mode, rtx dest, rtx src)
+riscv_legitimize_const_move (machine_mode mode, rtx dest, rtx src)
{
rtx base, offset;
sequence that is valid. */
bool
-riscv_legitimize_move (enum machine_mode mode, rtx dest, rtx src)
+riscv_legitimize_move (machine_mode mode, rtx dest, rtx src)
{
if (!register_operand (dest, mode) && !reg_or_0_operand (src, mode))
{
/* Implement TARGET_ADDRESS_COST. */
static int
-riscv_address_cost (rtx addr, enum machine_mode mode,
+riscv_address_cost (rtx addr, machine_mode mode,
addr_space_t as ATTRIBUTE_UNUSED,
bool speed ATTRIBUTE_UNUSED)
{
riscv_subword (rtx op, bool high_p)
{
unsigned int byte = high_p ? UNITS_PER_WORD : 0;
- enum machine_mode mode = GET_MODE (op);
+ machine_mode mode = GET_MODE (op);
if (mode == VOIDmode)
mode = TARGET_64BIT ? TImode : DImode;
riscv_output_move (rtx dest, rtx src)
{
enum rtx_code dest_code, src_code;
- enum machine_mode mode;
+ machine_mode mode;
bool dbl_p;
dest_code = GET_CODE (dest);
static bool
riscv_canonicalize_int_order_test (enum rtx_code *code, rtx *cmp1,
- enum machine_mode mode)
+ machine_mode mode)
{
HOST_WIDE_INT plus_one;
riscv_emit_int_order_test (enum rtx_code code, bool *invert_ptr,
rtx target, rtx cmp0, rtx cmp1)
{
- enum machine_mode mode;
+ machine_mode mode;
/* First see if there is a RISCV instruction that can do this operation.
If not, try doing the same for the inverse operation. If that also
to STACK_BOUNDARY bits if the type requires it. */
static unsigned int
-riscv_function_arg_boundary (enum machine_mode mode, const_tree type)
+riscv_function_arg_boundary (machine_mode mode, const_tree type)
{
unsigned int alignment;
floating-point registers, return the number of registers, else 0. */
static unsigned
-riscv_pass_mode_in_fpr_p (enum machine_mode mode)
+riscv_pass_mode_in_fpr_p (machine_mode mode)
{
if (GET_MODE_UNIT_SIZE (mode) <= UNITS_PER_FP_ARG)
{
has mode BLKmode. */
static rtx
-riscv_pass_fpr_single (enum machine_mode type_mode, unsigned regno,
- enum machine_mode value_mode)
+riscv_pass_fpr_single (machine_mode type_mode, unsigned regno,
+ machine_mode value_mode)
{
rtx x = gen_rtx_REG (value_mode, regno);
second value. */
static rtx
-riscv_pass_fpr_pair (enum machine_mode mode, unsigned regno1,
- enum machine_mode mode1, HOST_WIDE_INT offset1,
- unsigned regno2, enum machine_mode mode2,
+riscv_pass_fpr_pair (machine_mode mode, unsigned regno1,
+ machine_mode mode1, HOST_WIDE_INT offset1,
+ unsigned regno2, machine_mode mode2,
HOST_WIDE_INT offset2)
{
return gen_rtx_PARALLEL
static rtx
riscv_get_arg_info (struct riscv_arg_info *info, const CUMULATIVE_ARGS *cum,
- enum machine_mode mode, const_tree type, bool named,
+ machine_mode mode, const_tree type, bool named,
bool return_p)
{
unsigned num_bytes, num_words;
/* Implement TARGET_FUNCTION_ARG. */
static rtx
-riscv_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+riscv_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
/* Implement TARGET_FUNCTION_ARG_ADVANCE. */
static void
-riscv_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+riscv_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
static int
riscv_arg_partial_bytes (cumulative_args_t cum,
- enum machine_mode mode, tree type, bool named)
+ machine_mode mode, tree type, bool named)
{
struct riscv_arg_info arg;
VALTYPE is null and MODE is the mode of the return value. */
rtx
-riscv_function_value (const_tree type, const_tree func, enum machine_mode mode)
+riscv_function_value (const_tree type, const_tree func, machine_mode mode)
{
struct riscv_arg_info info;
CUMULATIVE_ARGS args;
/* Implement TARGET_PASS_BY_REFERENCE. */
static bool
-riscv_pass_by_reference (cumulative_args_t cum_v, enum machine_mode mode,
+riscv_pass_by_reference (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named)
{
HOST_WIDE_INT size = type ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
/* Implement TARGET_SETUP_INCOMING_VARARGS. */
static void
-riscv_setup_incoming_varargs (cumulative_args_t cum, enum machine_mode mode,
+riscv_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode,
tree type, int *pretend_size ATTRIBUTE_UNUSED,
int no_rtl)
{
static void
riscv_print_operand (FILE *file, rtx op, int letter)
{
- enum machine_mode mode = GET_MODE (op);
+ machine_mode mode = GET_MODE (op);
enum rtx_code code = GET_CODE (op);
switch (letter)
/* Return a section for X, handling small data. */
static section *
-riscv_elf_select_rtx_section (enum machine_mode mode, rtx x,
+riscv_elf_select_rtx_section (machine_mode mode, rtx x,
unsigned HOST_WIDE_INT align)
{
section *s = default_elf_select_rtx_section (mode, x, align);
stack pointer. */
static void
-riscv_save_restore_reg (enum machine_mode mode, int regno,
+riscv_save_restore_reg (machine_mode mode, int regno,
HOST_WIDE_INT offset, riscv_save_restore_fn fn)
{
rtx mem;
for (int regno = FP_REG_FIRST; regno <= FP_REG_LAST; regno++)
if (BITSET_P (cfun->machine->frame.fmask, regno - FP_REG_FIRST))
{
- enum machine_mode mode = TARGET_DOUBLE_FLOAT ? DFmode : SFmode;
+ machine_mode mode = TARGET_DOUBLE_FLOAT ? DFmode : SFmode;
riscv_save_restore_reg (mode, regno, offset, fn);
offset -= GET_MODE_SIZE (mode);
/* Implement TARGET_REGISTER_MOVE_COST. */
static int
-riscv_register_move_cost (enum machine_mode mode,
+riscv_register_move_cost (machine_mode mode,
reg_class_t from, reg_class_t to)
{
return SECONDARY_MEMORY_NEEDED (from, to, mode) ? 8 : 2;
/* Return true if register REGNO can store a value of mode MODE. */
bool
-riscv_hard_regno_mode_ok_p (unsigned int regno, enum machine_mode mode)
+riscv_hard_regno_mode_ok_p (unsigned int regno, machine_mode mode)
{
unsigned int nregs = riscv_hard_regno_nregs (regno, mode);
/* Implement HARD_REGNO_NREGS. */
unsigned int
-riscv_hard_regno_nregs (int regno, enum machine_mode mode)
+riscv_hard_regno_nregs (int regno, machine_mode mode)
{
if (FP_REG_P (regno))
return (GET_MODE_SIZE (mode) + UNITS_PER_FP_REG - 1) / UNITS_PER_FP_REG;
/* Implement CLASS_MAX_NREGS. */
static unsigned char
-riscv_class_max_nregs (reg_class_t rclass, enum machine_mode mode)
+riscv_class_max_nregs (reg_class_t rclass, machine_mode mode)
{
if (reg_class_subset_p (FP_REGS, rclass))
return riscv_hard_regno_nregs (FP_REG_FIRST, mode);
/* Implement TARGET_MEMORY_MOVE_COST. */
static int
-riscv_memory_move_cost (enum machine_mode mode, reg_class_t rclass, bool in)
+riscv_memory_move_cost (machine_mode mode, reg_class_t rclass, bool in)
{
return (tune_info->memory_cost
+ memory_move_secondary_cost (mode, rclass, in));
void rl78_emit_eh_epilogue (rtx);
void rl78_expand_compare (rtx *);
void rl78_expand_movsi (rtx *);
-void rl78_split_movsi (rtx *, enum machine_mode);
+void rl78_split_movsi (rtx *, machine_mode);
int rl78_force_nonfar_2 (rtx *, rtx (*gen)(rtx,rtx));
int rl78_force_nonfar_3 (rtx *, rtx (*gen)(rtx,rtx,rtx));
void rl78_expand_eh_epilogue (rtx);
/* Generate code to move an SImode value. */
void
-rl78_split_movsi (rtx *operands, enum machine_mode omode)
+rl78_split_movsi (rtx *operands, machine_mode omode)
{
rtx op00, op02, op10, op12;
#undef TARGET_ADDR_SPACE_ADDRESS_MODE
#define TARGET_ADDR_SPACE_ADDRESS_MODE rl78_addr_space_address_mode
-static enum machine_mode
+static machine_mode
rl78_addr_space_address_mode (addr_space_t addrspace)
{
switch (addrspace)
if (fcode == P6_OV_BUILTIN_CMPB)
{
int overloaded_code;
- int arg1_mode = TYPE_MODE (types[0]);
- int arg2_mode = TYPE_MODE (types[1]);
+ machine_mode arg1_mode = TYPE_MODE (types[0]);
+ machine_mode arg2_mode = TYPE_MODE (types[1]);
if (nargs != 2)
{
CODE_FOR_fusion_gpr_di_df_store },
};
- enum machine_mode cur_pmode = Pmode;
+ machine_mode cur_pmode = Pmode;
size_t i;
for (i = 0; i < ARRAY_SIZE (addis_insns); i++)
{
- enum machine_mode xmode = addis_insns[i].mode;
+ machine_mode xmode = addis_insns[i].mode;
enum rs6000_reload_reg_type rtype = addis_insns[i].rtype;
if (addis_insns[i].pmode != cur_pmode)
static int
rs6000_reassociation_width (unsigned int opc ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
switch (rs6000_cpu)
{
const_tree type1,
const_tree type2)
{
- enum machine_mode mode1 = TYPE_MODE (type1);
- enum machine_mode mode2 = TYPE_MODE (type2);
+ machine_mode mode1 = TYPE_MODE (type1);
+ machine_mode mode2 = TYPE_MODE (type2);
/* For complex modes, use the inner type. */
if (COMPLEX_MODE_P (mode1))
rtx src) /* source (register or memory). */
{
rtx addr, mem, offset;
- enum machine_mode mode = GET_MODE (src);
+ machine_mode mode = GET_MODE (src);
/* Validate arguments. */
if (!base_reg_operand (addis_reg, GET_MODE (addis_reg)))
const char *
emit_fusion_p9_load (rtx reg, rtx mem, rtx tmp_reg)
{
- enum machine_mode mode = GET_MODE (reg);
+ machine_mode mode = GET_MODE (reg);
rtx hi;
rtx lo;
rtx addr;
const char *
emit_fusion_p9_store (rtx mem, rtx reg, rtx tmp_reg)
{
- enum machine_mode mode = GET_MODE (reg);
+ machine_mode mode = GET_MODE (reg);
rtx hi;
rtx lo;
rtx addr;
extern int visium_hard_regno_rename_ok (unsigned int, unsigned int);
extern int visium_initial_elimination_offset (int from, int to);
#ifdef RTX_CODE
-extern void prepare_move_operands (rtx *, enum machine_mode);
-extern bool ok_for_simple_move_operands (rtx *, enum machine_mode);
-extern bool ok_for_simple_move_strict_operands (rtx *, enum machine_mode);
-extern bool ok_for_simple_arith_logic_operands (rtx *, enum machine_mode);
+extern void prepare_move_operands (rtx *, machine_mode);
+extern bool ok_for_simple_move_operands (rtx *, machine_mode);
+extern bool ok_for_simple_move_strict_operands (rtx *, machine_mode);
+extern bool ok_for_simple_arith_logic_operands (rtx *, machine_mode);
extern void visium_initialize_trampoline (rtx, rtx, rtx);
extern int empty_delay_slot (rtx_insn *);
extern int gr5_hazard_bypass_p (rtx_insn *, rtx_insn *);
extern rtx visium_return_addr_rtx (int, rtx);
extern rtx visium_eh_return_handler_rtx (void);
extern rtx visium_dynamic_chain_address (rtx);
-extern rtx visium_legitimize_reload_address (rtx, enum machine_mode, int, int,
+extern rtx visium_legitimize_reload_address (rtx, machine_mode, int, int,
int);
-extern enum machine_mode visium_select_cc_mode (enum rtx_code, rtx, rtx);
+extern machine_mode visium_select_cc_mode (enum rtx_code, rtx, rtx);
extern void visium_split_cbranch (enum rtx_code, rtx, rtx, rtx);
extern const char *output_ubranch (rtx, rtx_insn *);
-extern const char *output_cbranch (rtx, enum rtx_code, enum machine_mode, int,
+extern const char *output_cbranch (rtx, enum rtx_code, machine_mode, int,
rtx_insn *);
-extern void visium_split_double_move (rtx *, enum machine_mode);
+extern void visium_split_double_move (rtx *, machine_mode);
extern void visium_split_double_add (enum rtx_code, rtx, rtx, rtx);
-extern void visium_expand_copysign (rtx *, enum machine_mode);
-extern void visium_expand_int_cstore (rtx *, enum machine_mode);
-extern void visium_expand_fp_cstore (rtx *, enum machine_mode);
+extern void visium_expand_copysign (rtx *, machine_mode);
+extern void visium_expand_int_cstore (rtx *, machine_mode);
+extern void visium_expand_fp_cstore (rtx *, machine_mode);
extern void visium_split_cstore (enum rtx_code, rtx, rtx,
enum rtx_code, rtx, rtx);
extern int visium_expand_block_move (rtx *);
/* Target hooks and TARGET_INITIALIZER */
-static bool visium_pass_by_reference (cumulative_args_t, enum machine_mode,
+static bool visium_pass_by_reference (cumulative_args_t, machine_mode,
const_tree, bool);
-static rtx visium_function_arg (cumulative_args_t, enum machine_mode,
+static rtx visium_function_arg (cumulative_args_t, machine_mode,
const_tree, bool);
-static void visium_function_arg_advance (cumulative_args_t, enum machine_mode,
+static void visium_function_arg_advance (cumulative_args_t, machine_mode,
const_tree, bool);
static bool visium_return_in_memory (const_tree, const_tree fntype);
static rtx visium_function_value (const_tree, const_tree fn_decl_or_type,
bool);
-static rtx visium_libcall_value (enum machine_mode, const_rtx);
+static rtx visium_libcall_value (machine_mode, const_rtx);
static void visium_setup_incoming_varargs (cumulative_args_t,
- enum machine_mode,
+ machine_mode,
tree, int *, int);
static void visium_va_start (tree valist, rtx nextarg);
vec<const char *> &,
vec<rtx> &, HARD_REG_SET &);
-static bool visium_legitimate_constant_p (enum machine_mode, rtx);
+static bool visium_legitimate_constant_p (machine_mode, rtx);
-static bool visium_legitimate_address_p (enum machine_mode, rtx, bool);
+static bool visium_legitimate_address_p (machine_mode, rtx, bool);
static bool visium_print_operand_punct_valid_p (unsigned char);
static void visium_print_operand (FILE *, rtx, int);
static void visium_conditional_register_usage (void);
-static rtx visium_legitimize_address (rtx, rtx, enum machine_mode);
+static rtx visium_legitimize_address (rtx, rtx, machine_mode);
static reg_class_t visium_secondary_reload (bool, rtx, reg_class_t,
- enum machine_mode,
+ machine_mode,
secondary_reload_info *);
static bool visium_class_likely_spilled_p (reg_class_t);
static int visium_adjust_cost (rtx_insn *, int, rtx_insn *, int, unsigned int);
-static int visium_register_move_cost (enum machine_mode, reg_class_t,
+static int visium_register_move_cost (machine_mode, reg_class_t,
reg_class_t);
-static int visium_memory_move_cost (enum machine_mode, reg_class_t, bool);
+static int visium_memory_move_cost (machine_mode, reg_class_t, bool);
static bool visium_rtx_costs (rtx, machine_mode, int, int, int *, bool);
X is guaranteed to satisfy the CONSTANT_P predicate. */
static bool
-visium_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+visium_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED,
rtx x ATTRIBUTE_UNUSED)
{
return true;
/* Prepare operands for a move define_expand in MODE. */
void
-prepare_move_operands (rtx *operands, enum machine_mode mode)
+prepare_move_operands (rtx *operands, machine_mode mode)
{
/* If the output is not a register, the input must be. */
if (GET_CODE (operands[0]) == MEM && !reg_or_0_operand (operands[1], mode))
/* Return true if the operands are valid for a simple move insn. */
bool
-ok_for_simple_move_operands (rtx *operands, enum machine_mode mode)
+ok_for_simple_move_operands (rtx *operands, machine_mode mode)
{
/* One of the operands must be a register. */
if (!register_operand (operands[0], mode)
/* Return true if the operands are valid for a simple move strict insn. */
bool
-ok_for_simple_move_strict_operands (rtx *operands, enum machine_mode mode)
+ok_for_simple_move_strict_operands (rtx *operands, machine_mode mode)
{
/* Once the flags are exposed, no simple moves between integer registers.
Note that, in QImode only, a zero source counts as an integer register
insn. */
bool
-ok_for_simple_arith_logic_operands (rtx *, enum machine_mode)
+ok_for_simple_arith_logic_operands (rtx *, machine_mode)
{
/* Once the flags are exposed, no simple arithmetic or logical operations
between integer registers. */
static bool
visium_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type,
bool named ATTRIBUTE_UNUSED)
{
in general registers. */
static rtx
-visium_function_arg (cumulative_args_t pcum_v, enum machine_mode mode,
+visium_function_arg (cumulative_args_t pcum_v, machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
static void
visium_function_arg_advance (cumulative_args_t pcum_v,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named)
{
/* Define how scalar values are returned. */
static rtx
-visium_function_value_1 (enum machine_mode mode)
+visium_function_value_1 (machine_mode mode)
{
/* Scalar or complex single precision floating point values
are returned in floating register f1. */
be returned. */
static rtx
-visium_libcall_value (enum machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
+visium_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
{
return visium_function_value_1 (mode);
}
static void
visium_setup_incoming_varargs (cumulative_args_t pcum_v,
- enum machine_mode mode,
+ machine_mode mode,
tree type,
int *pretend_size ATTRIBUTE_UNUSED,
int no_rtl)
address of a memory access in mode MODE. */
static bool
-rtx_ok_for_offset_p (enum machine_mode mode, rtx op)
+rtx_ok_for_offset_p (machine_mode mode, rtx op)
{
if (!CONST_INT_P (op) || INTVAL (op) < 0)
return false;
kind of register is required. */
static bool
-visium_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
+visium_legitimate_address_p (machine_mode mode, rtx x, bool strict)
{
rtx base;
unsigned int regno;
static rtx
visium_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
if (GET_CODE (x) == PLUS
&& GET_CODE (XEXP (x, 1)) == CONST_INT
that need reloading are indicated by calling push_reload. */
rtx
-visium_legitimize_reload_address (rtx x, enum machine_mode mode, int opnum,
+visium_legitimize_reload_address (rtx x, machine_mode mode, int opnum,
int type, int ind ATTRIBUTE_UNUSED)
{
rtx newrtx, tem = NULL_RTX;
relative to that. */
static int
-visium_register_move_cost (enum machine_mode mode, reg_class_t from,
+visium_register_move_cost (machine_mode mode, reg_class_t from,
reg_class_t to)
{
const int numwords = (GET_MODE_SIZE (mode) <= UNITS_PER_WORD) ? 1 : 2;
visium_register_move_cost. */
static int
-visium_memory_move_cost (enum machine_mode mode,
+visium_memory_move_cost (machine_mode mode,
reg_class_t to ATTRIBUTE_UNUSED,
bool in)
{
/* Split a double move of OPERANDS in MODE. */
void
-visium_split_double_move (rtx *operands, enum machine_mode mode)
+visium_split_double_move (rtx *operands, machine_mode mode)
{
bool swap = false;
/* Expand a copysign of OPERANDS in MODE. */
void
-visium_expand_copysign (rtx *operands, enum machine_mode mode)
+visium_expand_copysign (rtx *operands, machine_mode mode)
{
rtx op0 = operands[0];
rtx op1 = operands[1];
generated code. */
void
-visium_expand_int_cstore (rtx *operands, enum machine_mode mode)
+visium_expand_int_cstore (rtx *operands, machine_mode mode)
{
enum rtx_code code = GET_CODE (operands[1]);
rtx op0 = operands[0], op1 = operands[2], op2 = operands[3], sltu;
void
visium_expand_fp_cstore (rtx *operands,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
enum rtx_code code = GET_CODE (operands[1]);
rtx op0 = operands[0], op1 = operands[2], op2 = operands[3], slt;
visium_split_cstore (enum rtx_code op_code, rtx op0, rtx op1,
enum rtx_code code, rtx op2, rtx op3)
{
- enum machine_mode cc_mode = visium_select_cc_mode (code, op2, op3);
+ machine_mode cc_mode = visium_select_cc_mode (code, op2, op3);
/* If a FP cstore was reversed, then it was originally UNGE/UNLE. */
if (cc_mode == CCFPEmode && (op_code == NEG || op_code == MINUS))
/* Given a comparison code (EQ, NE, etc.) and the operands of a COMPARE,
return the mode to be used for the comparison. */
-enum machine_mode
+machine_mode
visium_select_cc_mode (enum rtx_code code, rtx op0, rtx op1)
{
if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
void
visium_split_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx label)
{
- enum machine_mode cc_mode = visium_select_cc_mode (code, op0, op1);
+ machine_mode cc_mode = visium_select_cc_mode (code, op0, op1);
rtx flags = gen_rtx_REG (cc_mode, FLAGS_REGNUM);
rtx x = gen_rtx_COMPARE (cc_mode, op0, op1);
should reverse the sense of the comparison. INSN is the instruction. */
const char *
-output_cbranch (rtx label, enum rtx_code code, enum machine_mode cc_mode,
+output_cbranch (rtx label, enum rtx_code code, machine_mode cc_mode,
int reversed, rtx_insn *insn)
{
const char *cond;
whose address is ADDR. */
static void
-visium_print_operand_address (FILE *file, enum machine_mode mode, rtx addr)
+visium_print_operand_address (FILE *file, machine_mode mode, rtx addr)
{
switch (GET_CODE (addr))
{
static reg_class_t
visium_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x,
reg_class_t rclass,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
secondary_reload_info *sri ATTRIBUTE_UNUSED)
{
int regno = true_regnum (x);
@code{TARGET_FUNCTION_VALUE}.
@end deftypefn
-@deftypefn {Target Hook} void TARGET_SETUP_INCOMING_VARARG_BOUNDS (cumulative_args_t @var{args_so_far}, enum machine_mode @var{mode}, tree @var{type}, int *@var{pretend_args_size}, int @var{second_time})
+@deftypefn {Target Hook} void TARGET_SETUP_INCOMING_VARARG_BOUNDS (cumulative_args_t @var{args_so_far}, machine_mode @var{mode}, tree @var{type}, int *@var{pretend_args_size}, int @var{second_time})
Use it to store bounds for anonymous register arguments stored
into the stack. Arguments meaning is similar to
@code{TARGET_SETUP_INCOMING_VARARGS}.
@deftypefn {Target Hook} tree TARGET_CHKP_BOUND_TYPE (void)
Return type to be used for bounds
@end deftypefn
-@deftypefn {Target Hook} {enum machine_mode} TARGET_CHKP_BOUND_MODE (void)
+@deftypefn {Target Hook} machine_mode TARGET_CHKP_BOUND_MODE (void)
Return mode to be used for bounds.
@end deftypefn
@deftypefn {Target Hook} tree TARGET_CHKP_MAKE_BOUNDS_CONSTANT (HOST_WIDE_INT @var{lb}, HOST_WIDE_INT @var{ub})
useful if X is a CONST_INT. */
rtx
-flip_storage_order (enum machine_mode mode, rtx x)
+flip_storage_order (machine_mode mode, rtx x)
{
- enum machine_mode int_mode;
+ machine_mode int_mode;
rtx result;
if (mode == QImode)
extern int mult_by_coeff_cost (HOST_WIDE_INT, machine_mode, bool);
extern rtx emit_cstore (rtx target, enum insn_code icode, enum rtx_code code,
- enum machine_mode mode, enum machine_mode compare_mode,
+ machine_mode mode, machine_mode compare_mode,
int unsignedp, rtx x, rtx y, int normalizep,
- enum machine_mode target_mode);
+ machine_mode target_mode);
/* Arguments MODE, RTX: return an rtx for the negation of that value.
May emit insns. */
/* Arguments MODE, RTX: return an rtx for the flipping of that value.
May emit insns. */
-extern rtx flip_storage_order (enum machine_mode, rtx);
+extern rtx flip_storage_order (machine_mode, rtx);
/* Expand a logical AND operation. */
extern rtx expand_and (machine_mode, rtx, rtx, rtx);
case VEC_SELECT:
if (GET_MODE (pattern) != VOIDmode)
{
- enum machine_mode mode = GET_MODE (pattern);
- enum machine_mode imode = GET_MODE (XEXP (pattern, 0));
- enum machine_mode emode
+ machine_mode mode = GET_MODE (pattern);
+ machine_mode imode = GET_MODE (XEXP (pattern, 0));
+ machine_mode emode
= VECTOR_MODE_P (mode) ? GET_MODE_INNER (mode) : mode;
if (GET_CODE (XEXP (pattern, 1)) == PARALLEL)
{
if (handled_component_p (ref))
{
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp, volatilep, preversep;
ref = get_inner_reference (ref, &bitsize, &bitpos, &varoffset, &mode,
/* The infinity precision result will always fit into result. */
rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
write_complex_part (target, const0_rtx, true);
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
struct separate_ops ops;
ops.code = code;
ops.type = type;
if (orig_precres == precres && precop <= BITS_PER_WORD)
{
int p = MAX (min_precision, precop);
- enum machine_mode m = smallest_mode_for_size (p, MODE_INT);
+ machine_mode m = smallest_mode_for_size (p, MODE_INT);
tree optype = build_nonstandard_integer_type (GET_MODE_PRECISION (m),
uns0_p && uns1_p
&& unsr_p);
if (orig_precres == precres)
{
int p = MAX (prec0, prec1);
- enum machine_mode m = smallest_mode_for_size (p, MODE_INT);
+ machine_mode m = smallest_mode_for_size (p, MODE_INT);
tree optype = build_nonstandard_integer_type (GET_MODE_PRECISION (m),
uns0_p && uns1_p
&& unsr_p);
allocno_copy_cost_saving (ira_allocno_t allocno, int hard_regno)
{
int cost = 0;
- enum machine_mode allocno_mode = ALLOCNO_MODE (allocno);
+ machine_mode allocno_mode = ALLOCNO_MODE (allocno);
enum reg_class rclass;
ira_copy_t cp, next_cp;
+2017-07-05 Richard Sandiford <richard.sandiford@linaro.org>
+ Alan Hayward <alan.hayward@arm.com>
+ David Sherwood <david.sherwood@arm.com>
+
+ * dummy-frontend.c (jit_langhook_type_for_mode): Remove "enum" before
+ "machine_mode".
+
2017-04-24 David Malcolm <dmalcolm@redhat.com>
* docs/cp/topics/types.rst (gccjit::type::get_const): Remove
}
static tree
-jit_langhook_type_for_mode (enum machine_mode mode, int unsignedp)
+jit_langhook_type_for_mode (machine_mode mode, int unsignedp)
{
if (mode == TYPE_MODE (float_type_node))
return float_type_node;
}
else
{
- enum machine_mode biggest_conflict_mode
+ machine_mode biggest_conflict_mode
= lra_reg_info[conflict_regno].biggest_mode;
int biggest_conflict_nregs
= hard_regno_nregs[conflict_hr][biggest_conflict_mode];
static bool
prohibited_class_reg_set_mode_p (enum reg_class rclass,
HARD_REG_SET &set,
- enum machine_mode mode)
+ machine_mode mode)
{
HARD_REG_SET temp;
int insn_regno;
bool succ_p = false;
int dst_regno = REGNO (dst_reg);
- enum machine_mode dst_mode = GET_MODE (dst_reg);
+ machine_mode dst_mode = GET_MODE (dst_reg);
enum reg_class cl = lra_get_allocno_class (dst_regno), insn_reg_cl;
invariant_ptr = insert_invariant (invariant_rtx);
move_plus_up (rtx x)
{
rtx subreg_reg;
- enum machine_mode x_mode, subreg_reg_mode;
+ machine_mode x_mode, subreg_reg_mode;
if (GET_CODE (x) != SUBREG || !subreg_lowpart_p (x))
return x;
/* Determine position in reduction buffer, which may be used
by target. */
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (var));
+ machine_mode mode = TYPE_MODE (TREE_TYPE (var));
unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
offset = (offset + align - 1) & ~(align - 1);
tree off = build_int_cst (sizetype, offset);
unsigned int part_size, final_offset;
rtx part, res;
- enum machine_mode part_mode = GET_MODE (XEXP (op, 0));
+ machine_mode part_mode = GET_MODE (XEXP (op, 0));
if (part_mode == VOIDmode)
part_mode = GET_MODE_INNER (GET_MODE (op));
part_size = GET_MODE_SIZE (part_mode);
DEFHOOK
(chkp_bound_mode,
"Return mode to be used for bounds.",
- enum machine_mode, (void),
+ machine_mode, (void),
default_chkp_bound_mode)
DEFHOOK
"Use it to store bounds for anonymous register arguments stored\n\
into the stack. Arguments meaning is similar to\n\
@code{TARGET_SETUP_INCOMING_VARARGS}.",
- void, (cumulative_args_t args_so_far, enum machine_mode mode, tree type,
+ void, (cumulative_args_t args_so_far, machine_mode mode, tree type,
int *pretend_args_size, int second_time),
default_setup_incoming_vararg_bounds)
return res;
}
-enum machine_mode
+machine_mode
default_chkp_bound_mode (void)
{
return VOIDmode;
void
default_setup_incoming_vararg_bounds (cumulative_args_t ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED,
int *pretend_arg_size ATTRIBUTE_UNUSED,
int second_time ATTRIBUTE_UNUSED)
extern rtx default_load_returned_bounds (rtx);
extern void default_store_returned_bounds (rtx,rtx);
extern tree default_chkp_bound_type (void);
-extern enum machine_mode default_chkp_bound_mode (void);
+extern machine_mode default_chkp_bound_mode (void);
extern tree default_builtin_chkp_function (unsigned int);
extern rtx default_chkp_function_value_bounds (const_tree, const_tree, bool);
extern tree default_chkp_make_bounds_constant (HOST_WIDE_INT lb, HOST_WIDE_INT ub);
extern int default_chkp_initialize_bounds (tree var, tree lb, tree ub,
tree *stmts);
extern void default_setup_incoming_vararg_bounds (cumulative_args_t ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED,
int *pretend_arg_size ATTRIBUTE_UNUSED,
int second_time ATTRIBUTE_UNUSED);
divmod_candidate_p (gassign *stmt)
{
tree type = TREE_TYPE (gimple_assign_lhs (stmt));
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
optab divmod_optab, div_optab;
if (TYPE_UNSIGNED (type))
/* Writes into SEL a mask for a vec_perm, equivalent to a vec_shr by OFFSET
vector elements (not bits) for a vector of mode MODE. */
static void
-calc_vec_perm_mask_for_shift (enum machine_mode mode, unsigned int offset,
+calc_vec_perm_mask_for_shift (machine_mode mode, unsigned int offset,
unsigned char *sel)
{
unsigned int i, nelt = GET_MODE_NUNITS (mode);
MODE. This is the case if _either_ the platform handles vec_shr_optab, _or_
it supports vec_perm_const with masks for all necessary shift amounts. */
static bool
-have_whole_vector_shift (enum machine_mode mode)
+have_whole_vector_shift (machine_mode mode)
{
if (optab_handler (vec_shr_optab, mode) != CODE_FOR_nothing)
return true;
to a larger load. */
unsigned lsize
= group_size * TYPE_PRECISION (TREE_TYPE (vectype));
- enum machine_mode elmode = mode_for_size (lsize, MODE_INT, 0);
- enum machine_mode vmode = mode_for_vector (elmode,
- nunits / group_size);
+ machine_mode elmode = mode_for_size (lsize, MODE_INT, 0);
+ machine_mode vmode = mode_for_vector (elmode,
+ nunits / group_size);
/* If we can't construct such a vector fall back to
element loads of the original vector type. */
if (VECTOR_MODE_P (vmode)