+2014-10-29 Richard Sandiford <richard.sandiford@arm.com>
+
+ * addresses.h, alias.c, asan.c, auto-inc-dec.c, bt-load.c, builtins.c,
+ builtins.h, caller-save.c, calls.c, calls.h, cfgexpand.c, cfgloop.h,
+ cfgrtl.c, combine.c, compare-elim.c, config/aarch64/aarch64-builtins.c,
+ config/aarch64/aarch64-protos.h, config/aarch64/aarch64-simd.md,
+ config/aarch64/aarch64.c, config/aarch64/aarch64.h,
+ config/aarch64/aarch64.md, config/alpha/alpha-protos.h,
+ config/alpha/alpha.c, config/arc/arc-protos.h, config/arc/arc.c,
+ config/arc/arc.h, config/arc/predicates.md,
+ config/arm/aarch-common-protos.h, config/arm/aarch-common.c,
+ config/arm/arm-protos.h, config/arm/arm.c, config/arm/arm.h,
+ config/arm/arm.md, config/arm/neon.md, config/arm/thumb2.md,
+ config/avr/avr-log.c, config/avr/avr-protos.h, config/avr/avr.c,
+ config/avr/avr.md, config/bfin/bfin-protos.h, config/bfin/bfin.c,
+ config/c6x/c6x-protos.h, config/c6x/c6x.c, config/c6x/c6x.md,
+ config/cr16/cr16-protos.h, config/cr16/cr16.c,
+ config/cris/cris-protos.h, config/cris/cris.c, config/cris/cris.md,
+ config/darwin-protos.h, config/darwin.c,
+ config/epiphany/epiphany-protos.h, config/epiphany/epiphany.c,
+ config/epiphany/epiphany.md, config/fr30/fr30.c,
+ config/frv/frv-protos.h, config/frv/frv.c, config/frv/predicates.md,
+ config/h8300/h8300-protos.h, config/h8300/h8300.c,
+ config/i386/i386-builtin-types.awk, config/i386/i386-protos.h,
+ config/i386/i386.c, config/i386/i386.md, config/i386/predicates.md,
+ config/i386/sse.md, config/i386/sync.md, config/ia64/ia64-protos.h,
+ config/ia64/ia64.c, config/iq2000/iq2000-protos.h,
+ config/iq2000/iq2000.c, config/iq2000/iq2000.md,
+ config/lm32/lm32-protos.h, config/lm32/lm32.c,
+ config/m32c/m32c-protos.h, config/m32c/m32c.c,
+ config/m32r/m32r-protos.h, config/m32r/m32r.c,
+ config/m68k/m68k-protos.h, config/m68k/m68k.c,
+ config/mcore/mcore-protos.h, config/mcore/mcore.c,
+ config/mcore/mcore.md, config/mep/mep-protos.h, config/mep/mep.c,
+ config/microblaze/microblaze-protos.h, config/microblaze/microblaze.c,
+ config/mips/mips-protos.h, config/mips/mips.c,
+ config/mmix/mmix-protos.h, config/mmix/mmix.c,
+ config/mn10300/mn10300-protos.h, config/mn10300/mn10300.c,
+ config/moxie/moxie.c, config/msp430/msp430-protos.h,
+ config/msp430/msp430.c, config/nds32/nds32-cost.c,
+ config/nds32/nds32-intrinsic.c, config/nds32/nds32-md-auxiliary.c,
+ config/nds32/nds32-protos.h, config/nds32/nds32.c,
+ config/nios2/nios2-protos.h, config/nios2/nios2.c,
+ config/pa/pa-protos.h, config/pa/pa.c, config/pdp11/pdp11-protos.h,
+ config/pdp11/pdp11.c, config/rl78/rl78-protos.h, config/rl78/rl78.c,
+ config/rs6000/altivec.md, config/rs6000/rs6000-c.c,
+ config/rs6000/rs6000-protos.h, config/rs6000/rs6000.c,
+ config/rs6000/rs6000.h, config/rx/rx-protos.h, config/rx/rx.c,
+ config/s390/predicates.md, config/s390/s390-protos.h,
+ config/s390/s390.c, config/s390/s390.h, config/s390/s390.md,
+ config/sh/predicates.md, config/sh/sh-protos.h, config/sh/sh.c,
+ config/sh/sh.md, config/sparc/predicates.md,
+ config/sparc/sparc-protos.h, config/sparc/sparc.c,
+ config/sparc/sparc.md, config/spu/spu-protos.h, config/spu/spu.c,
+ config/stormy16/stormy16-protos.h, config/stormy16/stormy16.c,
+ config/tilegx/tilegx-protos.h, config/tilegx/tilegx.c,
+ config/tilegx/tilegx.md, config/tilepro/tilepro-protos.h,
+ config/tilepro/tilepro.c, config/v850/v850-protos.h,
+ config/v850/v850.c, config/v850/v850.md, config/vax/vax-protos.h,
+ config/vax/vax.c, config/vms/vms-c.c, config/xtensa/xtensa-protos.h,
+ config/xtensa/xtensa.c, coverage.c, cprop.c, cse.c, cselib.c, cselib.h,
+ dbxout.c, ddg.c, df-problems.c, dfp.c, dfp.h, doc/md.texi,
+ doc/rtl.texi, doc/tm.texi, doc/tm.texi.in, dojump.c, dse.c,
+ dwarf2cfi.c, dwarf2out.c, dwarf2out.h, emit-rtl.c, emit-rtl.h,
+ except.c, explow.c, expmed.c, expmed.h, expr.c, expr.h, final.c,
+ fixed-value.c, fixed-value.h, fold-const.c, function.c, function.h,
+ fwprop.c, gcse.c, gengenrtl.c, genmodes.c, genopinit.c, genoutput.c,
+ genpreds.c, genrecog.c, gensupport.c, gimple-ssa-strength-reduction.c,
+ graphite-clast-to-gimple.c, haifa-sched.c, hooks.c, hooks.h, ifcvt.c,
+ internal-fn.c, ira-build.c, ira-color.c, ira-conflicts.c, ira-costs.c,
+ ira-emit.c, ira-int.h, ira-lives.c, ira.c, ira.h, jump.c, langhooks.h,
+ libfuncs.h, lists.c, loop-doloop.c, loop-invariant.c, loop-iv.c,
+ loop-unroll.c, lower-subreg.c, lower-subreg.h, lra-assigns.c,
+ lra-constraints.c, lra-eliminations.c, lra-int.h, lra-lives.c,
+ lra-spills.c, lra.c, lra.h, machmode.h, omp-low.c, optabs.c, optabs.h,
+ output.h, postreload.c, print-tree.c, read-rtl.c, real.c, real.h,
+ recog.c, recog.h, ree.c, reg-stack.c, regcprop.c, reginfo.c,
+ regrename.c, regs.h, reload.c, reload.h, reload1.c, rtl.c, rtl.h,
+ rtlanal.c, rtlhash.c, rtlhooks-def.h, rtlhooks.c, sched-deps.c,
+ sel-sched-dump.c, sel-sched-ir.c, sel-sched-ir.h, sel-sched.c,
+ simplify-rtx.c, stmt.c, stor-layout.c, stor-layout.h, target.def,
+ targhooks.c, targhooks.h, tree-affine.c, tree-call-cdce.c,
+ tree-complex.c, tree-data-ref.c, tree-dfa.c, tree-if-conv.c,
+ tree-inline.c, tree-outof-ssa.c, tree-scalar-evolution.c,
+ tree-ssa-address.c, tree-ssa-ccp.c, tree-ssa-loop-ivopts.c,
+ tree-ssa-loop-ivopts.h, tree-ssa-loop-manip.c,
+ tree-ssa-loop-prefetch.c, tree-ssa-math-opts.c, tree-ssa-reassoc.c,
+ tree-ssa-sccvn.c, tree-streamer-in.c, tree-switch-conversion.c,
+ tree-vect-data-refs.c, tree-vect-generic.c, tree-vect-loop.c,
+ tree-vect-patterns.c, tree-vect-slp.c, tree-vect-stmts.c,
+ tree-vrp.c, tree.c, tree.h, tsan.c, ubsan.c, valtrack.c,
+ var-tracking.c, varasm.c: Remove redundant enum from
+ machine_mode.
+ * gengtype.c (main): Treat machine_mode as a scalar typedef.
+ * genmodes.c (emit_insn_modes_h): Hide inline functions if
+ USED_FOR_TARGET.
+
2014-10-29 Richard Sandiford <richard.sandiford@arm.com>
PR rtl-optimization/63340 (part 2)
+2014-10-29 Richard Sandiford <richard.sandiford@arm.com>
+
+ * gcc-interface/decl.c, gcc-interface/gigi.h, gcc-interface/misc.c,
+ gcc-interface/trans.c, gcc-interface/utils.c, gcc-interface/utils2.c:
+ Remove redundant enum from machine_mode.
+
2014-10-28 Andrew MacLeod <amacleod@redhat.com>
* gcc-interface/trans.c: Adjust include files.
/* True if we make a dummy type here. */
bool made_dummy = false;
/* The mode to be used for the pointer type. */
- enum machine_mode p_mode = mode_for_size (esize, MODE_INT, 0);
+ machine_mode p_mode = mode_for_size (esize, MODE_INT, 0);
/* The GCC type used for the designated type. */
tree gnu_desig_type = NULL_TREE;
unsigned int size
= TREE_INT_CST_LOW (TYPE_SIZE (gnu_return_type));
unsigned int i = BITS_PER_UNIT;
- enum machine_mode mode;
+ machine_mode mode;
while (i < size)
i <<= 1;
{
HOST_WIDE_INT bitsize, bitpos;
tree offset;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp, volatilep;
inner = get_inner_reference (inner, &bitsize, &bitpos, &offset,
by the smallest integral mode that's valid for pointers. */
if (TREE_CODE (gnu_type) == POINTER_TYPE || TYPE_IS_FAT_POINTER_P (gnu_type))
{
- enum machine_mode p_mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
+ machine_mode p_mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
while (!targetm.valid_pointer_mode (p_mode))
p_mode = GET_MODE_WIDER_MODE (p_mode);
type_size = bitsize_int (GET_MODE_BITSIZE (p_mode));
{
Node_Id gnat_error_point = gnat_entity;
Node_Id gnat_node;
- enum machine_mode mode;
+ machine_mode mode;
unsigned int align;
tree size;
/* Return a data type that has machine mode MODE. UNSIGNEDP selects
an unsigned type; otherwise a signed type is returned. */
-extern tree gnat_type_for_mode (enum machine_mode mode, int unsignedp);
+extern tree gnat_type_for_mode (machine_mode mode, int unsignedp);
/* Emit debug info for all global variable declarations. */
extern void gnat_write_global_declarations (void);
for (iloop = 0; iloop < NUM_MACHINE_MODES; iloop++)
{
- enum machine_mode i = (enum machine_mode) iloop;
- enum machine_mode inner_mode = i;
+ machine_mode i = (machine_mode) iloop;
+ machine_mode inner_mode = i;
bool float_p = false;
bool complex_p = false;
bool vector_p = false;
int
fp_prec_to_size (int prec)
{
- enum machine_mode mode;
+ machine_mode mode;
for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
int
fp_size_to_prec (int size)
{
- enum machine_mode mode;
+ machine_mode mode;
for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
Node_Id gnat_expr = Expression (gnat_temp);
tree gnu_expr = gnat_to_gnu (gnat_expr);
int use_address;
- enum machine_mode mode;
+ machine_mode mode;
tree asm_constraint = NULL_TREE;
#ifdef ASM_COMMENT_START
char *comment;
tree gnu_field_bitpos;
tree gnu_field_offset;
tree gnu_inner;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp, volatilep;
gnu_result_type = get_unpadded_type (Etype (gnat_node));
static tree merge_sizes (tree, tree, tree, bool, bool);
static tree compute_related_constant (tree, tree);
static tree split_plus (tree, tree *);
-static tree float_type_for_precision (int, enum machine_mode);
+static tree float_type_for_precision (int, machine_mode);
static tree convert_to_fat_pointer (tree, tree);
static unsigned int scale_by_factor_of (tree, unsigned int);
static bool potential_alignment_gap (tree, tree, tree);
may need to return the thin pointer. */
if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
{
- enum machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
+ machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
if (!targetm.valid_pointer_mode (p_mode))
p_mode = ptr_mode;
return
/* Likewise for floating-point types. */
static tree
-float_type_for_precision (int precision, enum machine_mode mode)
+float_type_for_precision (int precision, machine_mode mode)
{
tree t;
char type_name[20];
an unsigned type; otherwise a signed type is returned. */
tree
-gnat_type_for_mode (enum machine_mode mode, int unsignedp)
+gnat_type_for_mode (machine_mode mode, int unsignedp)
{
if (mode == BLKmode)
return NULL_TREE;
if (VECTOR_MODE_P (mode))
{
- enum machine_mode inner_mode = GET_MODE_INNER (mode);
+ machine_mode inner_mode = GET_MODE_INNER (mode);
tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
if (inner_type)
return build_vector_type_for_mode (inner_type, mode);
static bool
type_for_vector_element_p (tree type)
{
- enum machine_mode mode;
+ machine_mode mode;
if (!INTEGRAL_TYPE_P (type)
&& !SCALAR_FLOAT_TYPE_P (type)
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos;
tree offset, inner;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp, volatilep;
inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
#define GCC_ADDRESSES_H
static inline enum reg_class
-base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
+base_reg_class (machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as ATTRIBUTE_UNUSED,
enum rtx_code outer_code ATTRIBUTE_UNUSED,
enum rtx_code index_code ATTRIBUTE_UNUSED)
static inline bool
ok_for_base_p_1 (unsigned regno ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as ATTRIBUTE_UNUSED,
enum rtx_code outer_code ATTRIBUTE_UNUSED,
enum rtx_code index_code ATTRIBUTE_UNUSED)
complete. Arguments as for the called function. */
static inline bool
-regno_ok_for_base_p (unsigned regno, enum machine_mode mode, addr_space_t as,
+regno_ok_for_base_p (unsigned regno, machine_mode mode, addr_space_t as,
enum rtx_code outer_code, enum rtx_code index_code)
{
if (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0)
static int rtx_equal_for_memref_p (const_rtx, const_rtx);
static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
static void record_set (rtx, const_rtx, void *);
-static int base_alias_check (rtx, rtx, rtx, rtx, enum machine_mode,
- enum machine_mode);
+static int base_alias_check (rtx, rtx, rtx, rtx, machine_mode,
+ machine_mode);
static rtx find_base_value (rtx);
static int mems_in_disjoint_alias_sets_p (const_rtx, const_rtx);
static int insert_subset_children (splay_tree_node, void*);
static alias_set_entry get_alias_set_entry (alias_set_type);
static tree decl_for_component_ref (tree);
static int write_dependence_p (const_rtx,
- const_rtx, enum machine_mode, rtx,
+ const_rtx, machine_mode, rtx,
bool, bool, bool);
static void memory_modified_1 (rtx, const_rtx, void *);
static int
base_alias_check (rtx x, rtx x_base, rtx y, rtx y_base,
- enum machine_mode x_mode, enum machine_mode y_mode)
+ machine_mode x_mode, machine_mode y_mode)
{
/* If the address itself has no known base see if a known equivalent
value has one. If either address still has no known base, nothing
Returns 1 if there is a true dependence, 0 otherwise. */
static int
-true_dependence_1 (const_rtx mem, enum machine_mode mem_mode, rtx mem_addr,
+true_dependence_1 (const_rtx mem, machine_mode mem_mode, rtx mem_addr,
const_rtx x, rtx x_addr, bool mem_canonicalized)
{
rtx true_mem_addr;
/* True dependence: X is read after store in MEM takes place. */
int
-true_dependence (const_rtx mem, enum machine_mode mem_mode, const_rtx x)
+true_dependence (const_rtx mem, machine_mode mem_mode, const_rtx x)
{
return true_dependence_1 (mem, mem_mode, NULL_RTX,
x, NULL_RTX, /*mem_canonicalized=*/false);
this value prior to canonicalizing. */
int
-canon_true_dependence (const_rtx mem, enum machine_mode mem_mode, rtx mem_addr,
+canon_true_dependence (const_rtx mem, machine_mode mem_mode, rtx mem_addr,
const_rtx x, rtx x_addr)
{
return true_dependence_1 (mem, mem_mode, mem_addr,
static int
write_dependence_p (const_rtx mem,
- const_rtx x, enum machine_mode x_mode, rtx x_addr,
+ const_rtx x, machine_mode x_mode, rtx x_addr,
bool mem_canonicalized, bool x_canonicalized, bool writep)
{
rtx mem_addr;
int
canon_anti_dependence (const_rtx mem, bool mem_canonicalized,
- const_rtx x, enum machine_mode x_mode, rtx x_addr)
+ const_rtx x, machine_mode x_mode, rtx x_addr)
{
return write_dependence_p (mem, x, x_mode, x_addr,
mem_canonicalized, /*x_canonicalized=*/true,
HOST_WIDE_INT bitsize, bitpos;
tree offset;
- enum machine_mode mode;
+ machine_mode mode;
int volatilep = 0, unsignedp = 0;
tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
&mode, &unsignedp, &volatilep, false);
rtx_insn *mov_insn = NULL;
int regno;
rtx mem = *mem_insn.mem_loc;
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
rtx new_mem;
int old_cost = 0;
int new_cost = 0;
/* The width of the mem being accessed. */
int size = GET_MODE_SIZE (GET_MODE (mem));
rtx_insn *last_insn = NULL;
- enum machine_mode reg_mode = GET_MODE (inc_reg);
+ machine_mode reg_mode = GET_MODE (inc_reg);
switch (inc_insn.form)
{
rtx src;
rtx btr_rtx;
rtx_insn *new_insn;
- enum machine_mode btr_mode;
+ machine_mode btr_mode;
btr_user user;
rtx set;
/* Non-zero if __builtin_constant_p should be folded right away. */
bool force_folding_builtin_constant_p;
-static rtx c_readstr (const char *, enum machine_mode);
+static rtx c_readstr (const char *, machine_mode);
static int target_char_cast (tree, char *);
static rtx get_memory_rtx (tree, tree);
static int apply_args_size (void);
static rtx expand_builtin_va_start (tree);
static rtx expand_builtin_va_end (tree);
static rtx expand_builtin_va_copy (tree);
-static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
+static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
static rtx expand_builtin_strcmp (tree, rtx);
-static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
-static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
+static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
+static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
static rtx expand_builtin_memcpy (tree, rtx);
-static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
+static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
- enum machine_mode, int);
+ machine_mode, int);
static rtx expand_builtin_strcpy (tree, rtx);
static rtx expand_builtin_strcpy_args (tree, tree, rtx);
-static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
+static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
static rtx expand_builtin_strncpy (tree, rtx);
-static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
-static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
-static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
+static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
+static rtx expand_builtin_memset (tree, rtx, machine_mode);
+static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
static rtx expand_builtin_bzero (tree);
-static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
+static rtx expand_builtin_strlen (tree, rtx, machine_mode);
static rtx expand_builtin_alloca (tree, bool);
-static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
+static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
static rtx expand_builtin_frame_address (tree, tree);
static tree stabilize_va_list_loc (location_t, tree, int);
static rtx expand_builtin_expect (tree, rtx);
static tree fold_builtin_strcspn (location_t, tree, tree);
static rtx expand_builtin_object_size (tree);
-static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
+static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
enum built_in_function);
static void maybe_emit_chk_warning (tree, enum built_in_function);
static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
{
HOST_WIDE_INT bitsize, bitpos;
tree offset;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp, volatilep;
unsigned int align = BITS_PER_UNIT;
bool known_alignment = false;
GET_MODE_BITSIZE (MODE) bits from string constant STR. */
static rtx
-c_readstr (const char *str, enum machine_mode mode)
+c_readstr (const char *str, machine_mode mode)
{
HOST_WIDE_INT ch;
unsigned int i, j;
void
expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
{
- enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
+ machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
rtx stack_save;
rtx mem;
{
rtx fp, lab, stack;
rtx_insn *insn, *last;
- enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
+ machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
/* DRAP is needed for stack realign if longjmp is expanded to current
function */
static void
expand_builtin_update_setjmp_buf (rtx buf_addr)
{
- enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
+ machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
rtx stack_save
= gen_rtx_MEM (sa_mode,
memory_address
static int size = -1;
int align;
unsigned int regno;
- enum machine_mode mode;
+ machine_mode mode;
/* The values computed by this function never change. */
if (size < 0)
{
static int size = -1;
int align, regno;
- enum machine_mode mode;
+ machine_mode mode;
/* The values computed by this function never change. */
if (size < 0)
result_vector (int savep, rtx result)
{
int regno, size, align, nelts;
- enum machine_mode mode;
+ machine_mode mode;
rtx reg, mem;
rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
{
rtx registers, tem;
int size, align, regno;
- enum machine_mode mode;
+ machine_mode mode;
rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
/* Create a block where the arg-pointer, structure value address,
expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
{
int size, align, regno;
- enum machine_mode mode;
+ machine_mode mode;
rtx incoming_args, result, reg, dest, src;
rtx_call_insn *call_insn;
rtx old_stack_level = 0;
expand_builtin_return (rtx result)
{
int size, align, regno;
- enum machine_mode mode;
+ machine_mode mode;
rtx reg;
rtx_insn *call_fusage = 0;
rtx op0;
rtx_insn *insns;
tree fndecl = get_callee_fndecl (exp);
- enum machine_mode mode;
+ machine_mode mode;
bool errno_set = false;
bool try_widening = false;
tree arg;
int op1_type = REAL_TYPE;
tree fndecl = get_callee_fndecl (exp);
tree arg0, arg1;
- enum machine_mode mode;
+ machine_mode mode;
bool errno_set = true;
switch (DECL_FUNCTION_CODE (fndecl))
rtx_insn *insns;
tree fndecl = get_callee_fndecl (exp);
tree arg0, arg1, arg2;
- enum machine_mode mode;
+ machine_mode mode;
if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
return NULL_RTX;
rtx op0;
rtx_insn *insns;
tree fndecl = get_callee_fndecl (exp);
- enum machine_mode mode;
+ machine_mode mode;
tree arg;
if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
{
bool errno_set = false;
optab builtin_optab = unknown_optab;
- enum machine_mode mode;
+ machine_mode mode;
switch (DECL_FUNCTION_CODE (fndecl))
{
enum insn_code icode = CODE_FOR_nothing;
rtx op0;
tree fndecl = get_callee_fndecl (exp);
- enum machine_mode mode;
+ machine_mode mode;
tree arg;
if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
expand_builtin_sincos (tree exp)
{
rtx op0, op1, op2, target1, target2;
- enum machine_mode mode;
+ machine_mode mode;
tree arg, sinp, cosp;
int result;
location_t loc = EXPR_LOCATION (exp);
{
tree fndecl = get_callee_fndecl (exp);
tree arg, type;
- enum machine_mode mode;
+ machine_mode mode;
rtx op0, op1, op2;
location_t loc = EXPR_LOCATION (exp);
tree fndecl = get_callee_fndecl (exp);
enum built_in_function fallback_fn;
tree fallback_fndecl;
- enum machine_mode mode;
+ machine_mode mode;
tree arg;
if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
rtx_insn *insns;
tree fndecl = get_callee_fndecl (exp);
tree arg;
- enum machine_mode mode;
+ machine_mode mode;
enum built_in_function fallback_fn = BUILT_IN_NONE;
if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
{
tree arg0, arg1;
rtx op0, op1;
- enum machine_mode mode;
- enum machine_mode mode2;
+ machine_mode mode;
+ machine_mode mode2;
if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
return NULL_RTX;
static rtx
expand_builtin_strlen (tree exp, rtx target,
- enum machine_mode target_mode)
+ machine_mode target_mode)
{
if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
return NULL_RTX;
tree src = CALL_EXPR_ARG (exp, 0);
rtx src_reg;
rtx_insn *before_strlen;
- enum machine_mode insn_mode = target_mode;
+ machine_mode insn_mode = target_mode;
enum insn_code icode = CODE_FOR_nothing;
unsigned int align;
static rtx
builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
- enum machine_mode mode)
+ machine_mode mode)
{
const char *str = (const char *) data;
stpcpy. */
static rtx
-expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
+expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
{
if (!validate_arglist (exp,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
static rtx
expand_builtin_mempcpy_args (tree dest, tree src, tree len,
- rtx target, enum machine_mode mode, int endp)
+ rtx target, machine_mode mode, int endp)
{
/* If return value is ignored, transform mempcpy into memcpy. */
if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
mode MODE if that's convenient). */
static rtx
-expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
+expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
{
tree dst, src;
location_t loc = EXPR_LOCATION (exp);
rtx
builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
- enum machine_mode mode)
+ machine_mode mode)
{
const char *str = (const char *) data;
rtx
builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
const char *c = (const char *) data;
char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
static rtx
builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
rtx target, coeff;
size_t size;
convenient). */
static rtx
-expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
+expand_builtin_memset (tree exp, rtx target, machine_mode mode)
{
if (!validate_arglist (exp,
POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
static rtx
expand_builtin_memset_args (tree dest, tree val, tree len,
- rtx target, enum machine_mode mode, tree orig_exp)
+ rtx target, machine_mode mode, tree orig_exp)
{
tree fndecl, fn;
enum built_in_function fcode;
- enum machine_mode val_mode;
+ machine_mode val_mode;
char c;
unsigned int dest_align;
rtx dest_mem, dest_addr, len_rtx;
static rtx
expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
- ATTRIBUTE_UNUSED enum machine_mode mode)
+ ATTRIBUTE_UNUSED machine_mode mode)
{
location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
- enum machine_mode insn_mode;
+ machine_mode insn_mode;
if (HAVE_cmpmemsi)
insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
/* Try to call cmpstrsi. */
if (HAVE_cmpstrsi)
{
- enum machine_mode insn_mode
+ machine_mode insn_mode
= insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
/* Make a place to write the result of the instruction. */
tree len;
rtx arg3_rtx;
- enum machine_mode insn_mode
+ machine_mode insn_mode
= insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
tree len1 = c_strlen (arg1, 1);
tree len2 = c_strlen (arg2, 1);
if (insn)
{
- enum machine_mode mode;
+ machine_mode mode;
emit_insn (insn);
/* Return the value in the proper mode for this function. */
static rtx
expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
- ATTRIBUTE_UNUSED enum machine_mode mode)
+ ATTRIBUTE_UNUSED machine_mode mode)
{
location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
- enum machine_mode insn_mode
+ machine_mode insn_mode
= insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
len1 = c_strlen (arg1, 1);
SUBTARGET may be used as the target for computing one of EXP's operands. */
static rtx
-expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
+expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
rtx subtarget)
{
tree arg;
SUBTARGET may be used as the target for computing one of EXP's operands. */
static rtx
-expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
+expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
rtx subtarget, optab op_optab)
{
rtx op0;
static rtx
expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
{
- enum machine_mode mode;
+ machine_mode mode;
tree arg;
rtx op0;
expand_builtin_signbit (tree exp, rtx target)
{
const struct real_format *fmt;
- enum machine_mode fmode, imode, rmode;
+ machine_mode fmode, imode, rmode;
tree arg;
int word, bitpos;
enum insn_code icode;
FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
group of builtins. This gives us log2 of the mode size. */
-static inline enum machine_mode
+static inline machine_mode
get_builtin_sync_mode (int fcode_diff)
{
/* The size is not negotiable, so ask not to get BLKmode in return
for the builtin_sync operations. */
static rtx
-get_builtin_sync_mem (tree loc, enum machine_mode mode)
+get_builtin_sync_mem (tree loc, machine_mode mode)
{
rtx addr, mem;
MODE is the mode it should be in. */
static rtx
-expand_expr_force_mode (tree exp, enum machine_mode mode)
+expand_expr_force_mode (tree exp, machine_mode mode)
{
rtx val;
- enum machine_mode old_mode;
+ machine_mode old_mode;
val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
/* If VAL is promoted to a wider mode, convert it back to MODE. Take care
fetch_and_xxx form. */
static rtx
-expand_builtin_sync_operation (enum machine_mode mode, tree exp,
+expand_builtin_sync_operation (machine_mode mode, tree exp,
enum rtx_code code, bool after,
rtx target)
{
results; this is NOT optional if IS_BOOL is true. */
static rtx
-expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
+expand_builtin_compare_and_swap (machine_mode mode, tree exp,
bool is_bool, rtx target)
{
rtx old_val, new_val, mem;
the results. */
static rtx
-expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
+expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
rtx target)
{
rtx val, mem;
/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
static void
-expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
+expand_builtin_sync_lock_release (machine_mode mode, tree exp)
{
rtx mem;
TARGET is an optional place for us to store the results. */
static rtx
-expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
+expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
{
rtx val, mem;
enum memmodel model;
TARGET is an optional place for us to store the results. */
static rtx
-expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
+expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
rtx target)
{
rtx expect, desired, mem, oldval;
TARGET is an optional place for us to store the results. */
static rtx
-expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
+expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
{
rtx mem;
enum memmodel model;
TARGET is an optional place for us to store the results. */
static rtx
-expand_builtin_atomic_store (enum machine_mode mode, tree exp)
+expand_builtin_atomic_store (machine_mode mode, tree exp)
{
rtx mem, val;
enum memmodel model;
resolved to an instruction sequence. */
static rtx
-expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
+expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
enum rtx_code code, bool fetch_after,
bool ignore, enum built_in_function ext_call)
{
static rtx
expand_builtin_atomic_clear (tree exp)
{
- enum machine_mode mode;
+ machine_mode mode;
rtx mem, ret;
enum memmodel model;
{
rtx mem;
enum memmodel model;
- enum machine_mode mode;
+ machine_mode mode;
mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
{
int size;
- enum machine_mode mode;
+ machine_mode mode;
unsigned int mode_align, type_align;
if (TREE_CODE (arg0) != INTEGER_CST)
IGNORE is nonzero if the value is to be ignored. */
rtx
-expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
+expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
int ignore)
{
tree fndecl = get_callee_fndecl (exp);
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
- enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
+ machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
int flags;
/* When ASan is enabled, we don't want to expand some memory/string
static tree
fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
{
- enum machine_mode mode;
+ machine_mode mode;
if (!validate_arg (arg, REAL_TYPE))
return NULL_TREE;
{
tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
arg, type, res, tmp;
- enum machine_mode mode;
+ machine_mode mode;
REAL_VALUE_TYPE r;
char buf[128];
default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
rtx target ATTRIBUTE_UNUSED,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
return NULL_RTX;
mode MODE if that's convenient). */
static rtx
-expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
+expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
enum built_in_function fcode)
{
tree dest, src, len, size;
the register is not used for calling a function. If the machine
has register windows, this gives only the outbound registers.
INCOMING_REGNO gives the corresponding inbound register. */
- enum machine_mode x_apply_args_mode[FIRST_PSEUDO_REGISTER];
+ machine_mode x_apply_args_mode[FIRST_PSEUDO_REGISTER];
/* For each register that may be used for returning values, this gives
a mode used to copy the register's value. VOIDmode indicates the
register is not used for returning values. If the machine has
register windows, this gives only the outbound registers.
INCOMING_REGNO gives the corresponding inbound register. */
- enum machine_mode x_apply_result_mode[FIRST_PSEUDO_REGISTER];
+ machine_mode x_apply_result_mode[FIRST_PSEUDO_REGISTER];
};
extern struct target_builtins default_target_builtins;
extern void expand_builtin_setjmp_setup (rtx, rtx);
extern void expand_builtin_setjmp_receiver (rtx);
extern tree mathfn_built_in (tree, enum built_in_function fn);
-extern rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
-extern rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
+extern rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, machine_mode);
+extern rtx builtin_memset_read_str (void *, HOST_WIDE_INT, machine_mode);
extern rtx expand_builtin_saveregs (void);
extern tree std_build_builtin_va_list (void);
extern tree std_fn_abi_va_list (tree);
extern tree std_canonical_va_list_type (tree);
extern void std_expand_builtin_va_start (tree, rtx);
extern void expand_builtin_trap (void);
-extern rtx expand_builtin (tree, rtx, rtx, enum machine_mode, int);
+extern rtx expand_builtin (tree, rtx, rtx, machine_mode, int);
extern enum built_in_function builtin_mathfn_code (const_tree);
extern tree fold_builtin_expect (location_t, tree, tree, tree);
extern tree fold_fma (location_t, tree, tree, tree, tree);
extern tree fold_builtin_call_array (location_t, tree, tree, int, tree *);
extern tree fold_builtin_n (location_t, tree, tree *, int, bool);
extern bool validate_gimple_arglist (const_gimple, ...);
-extern rtx default_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
+extern rtx default_expand_builtin (tree, rtx, rtx, machine_mode, int);
extern bool fold_builtin_next_arg (tree, bool);
extern tree do_mpc_arg2 (tree, tree, tree, int, int (*)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t));
extern tree fold_call_stmt (gimple, bool);
+2014-10-29 Richard Sandiford <richard.sandiford@arm.com>
+
+ * c-common.c, c-common.h, c-cppbuiltin.c, c-lex.c: Remove redundant
+ enum from machine_mode.
+
2014-10-28 Andrew MacLeod <amacleod@redhat.com>
* c-family/c-common.c: Adjust include files.
c_common_fixed_point_type_for_size (unsigned int ibit, unsigned int fbit,
int unsignedp, int satp)
{
- enum machine_mode mode;
+ machine_mode mode;
if (ibit == 0)
mode = unsignedp ? UQQmode : QQmode;
else
then UNSIGNEDP selects between saturating and nonsaturating types. */
tree
-c_common_type_for_mode (enum machine_mode mode, int unsignedp)
+c_common_type_for_mode (machine_mode mode, int unsignedp)
{
tree t;
int i;
if (COMPLEX_MODE_P (mode))
{
- enum machine_mode inner_mode;
+ machine_mode inner_mode;
tree inner_type;
if (mode == TYPE_MODE (complex_float_type_node))
}
else if (VECTOR_MODE_P (mode))
{
- enum machine_mode inner_mode = GET_MODE_INNER (mode);
+ machine_mode inner_mode = GET_MODE_INNER (mode);
tree inner_type = c_common_type_for_mode (inner_mode, unsignedp);
if (inner_type != NULL_TREE)
return build_vector_type_for_mode (inner_type, mode);
vector mode, but we can emulate with narrower modes. */
static int
-vector_mode_valid_p (enum machine_mode mode)
+vector_mode_valid_p (machine_mode mode)
{
enum mode_class mclass = GET_MODE_CLASS (mode);
- enum machine_mode innermode;
+ machine_mode innermode;
/* Doh! What's going on? */
if (mclass != MODE_VECTOR_INT
int j;
const char *p = IDENTIFIER_POINTER (ident);
int len = strlen (p);
- enum machine_mode mode = VOIDmode;
+ machine_mode mode = VOIDmode;
tree typefm;
bool valid_mode;
for (j = 0; j < NUM_MACHINE_MODES; j++)
if (!strcmp (p, GET_MODE_NAME (j)))
{
- mode = (enum machine_mode) j;
+ mode = (machine_mode) j;
break;
}
if (POINTER_TYPE_P (type))
{
addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (type));
- tree (*fn)(tree, enum machine_mode, bool);
+ tree (*fn)(tree, machine_mode, bool);
if (!targetm.addr_space.valid_pointer_mode (mode, as))
{
bool *no_add_attrs)
{
unsigned HOST_WIDE_INT vecsize, nunits;
- enum machine_mode orig_mode;
+ machine_mode orig_mode;
tree type = *node, new_type, size;
*no_add_attrs = true;
extern bool c_common_handle_option (size_t, const char *, int, int, location_t,
const struct cl_option_handlers *);
extern bool default_handle_c_option (size_t, const char *, int);
-extern tree c_common_type_for_mode (enum machine_mode, int);
+extern tree c_common_type_for_mode (machine_mode, int);
extern tree c_common_type_for_size (unsigned int, int);
extern tree c_common_fixed_point_type_for_size (unsigned int, unsigned int,
int, int);
point types. */
static bool
-mode_has_fma (enum machine_mode mode)
+mode_has_fma (machine_mode mode)
{
switch (mode)
{
if (flag_building_libgcc)
{
/* Properties of floating-point modes for libgcc2.c. */
- for (enum machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
+ for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
{
{
const char *hex_str;
cpp_macro *macro;
- enum machine_mode mode;
+ machine_mode mode;
int digits;
const char *fp_suffix;
};
if (flags & CPP_N_WIDTH_MD)
{
char suffix;
- enum machine_mode mode;
+ machine_mode mode;
if ((flags & CPP_N_WIDTH_MD) == CPP_N_MD_W)
suffix = 'w';
+2014-10-29 Richard Sandiford <richard.sandiford@arm.com>
+
+ * c-decl.c, c-tree.h, c-typeck.c: Remove redundant enum from
+ machine_mode.
+
2014-10-28 Andrew MacLeod <amacleod@redhat.com>
* c-decl.c: Adjust include files.
/* Mode used to build pointers (VOIDmode means ptr_mode). */
-enum machine_mode c_default_pointer_mode = VOIDmode;
+machine_mode c_default_pointer_mode = VOIDmode;
/* If non-zero, implicit "omp declare target" attribute is added into the
attribute lists. */
{
addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
: TYPE_ADDR_SPACE (to_type);
- enum machine_mode pointer_mode;
+ machine_mode pointer_mode;
if (as != ADDR_SPACE_GENERIC || c_default_pointer_mode == VOIDmode)
pointer_mode = targetm.addr_space.pointer_mode (as);
/* Mode used to build pointers (VOIDmode means ptr_mode). */
-extern enum machine_mode c_default_pointer_mode;
+extern machine_mode c_default_pointer_mode;
/* In c-decl.c */
if (code1 == FIXED_POINT_TYPE || code2 == FIXED_POINT_TYPE)
{
unsigned int unsignedp = 0, satp = 0;
- enum machine_mode m1, m2;
+ machine_mode m1, m2;
unsigned int fbit1, ibit1, fbit2, ibit2, max_fbit, max_ibit;
m1 = TYPE_MODE (t1);
static HARD_REG_SET referenced_regs;
-typedef void refmarker_fn (rtx *loc, enum machine_mode mode, int hardregno,
+typedef void refmarker_fn (rtx *loc, machine_mode mode, int hardregno,
void *mark_arg);
-static int reg_save_code (int, enum machine_mode);
-static int reg_restore_code (int, enum machine_mode);
+static int reg_save_code (int, machine_mode);
+static int reg_restore_code (int, machine_mode);
struct saved_hard_reg;
static void initiate_saved_hard_regs (void);
static refmarker_fn mark_reg_as_referenced;
static refmarker_fn replace_reg_with_saved_mem;
static int insert_save (struct insn_chain *, int, int, HARD_REG_SET *,
- enum machine_mode *);
+ machine_mode *);
static int insert_restore (struct insn_chain *, int, int, int,
- enum machine_mode *);
+ machine_mode *);
static struct insn_chain *insert_one_insn (struct insn_chain *, int, int,
rtx);
static void add_stored_regs (rtx, const_rtx, void *);
/* Return the INSN_CODE used to save register REG in mode MODE. */
static int
-reg_save_code (int reg, enum machine_mode mode)
+reg_save_code (int reg, machine_mode mode)
{
bool ok;
if (cached_reg_save_code[reg][mode])
/* Return the INSN_CODE used to restore register REG in mode MODE. */
static int
-reg_restore_code (int reg, enum machine_mode mode)
+reg_restore_code (int reg, machine_mode mode)
{
if (cached_reg_restore_code[reg][mode])
return cached_reg_restore_code[reg][mode];
save_call_clobbered_regs (void)
{
struct insn_chain *chain, *next, *last = NULL;
- enum machine_mode save_mode [FIRST_PSEUDO_REGISTER];
+ machine_mode save_mode [FIRST_PSEUDO_REGISTER];
/* Computed in mark_set_regs, holds all registers set by the current
instruction. */
{
int r = reg_renumber[regno];
int nregs;
- enum machine_mode mode;
+ machine_mode mode;
if (r < 0 || regno_reg_rtx[regno] == cheap)
continue;
add_stored_regs (rtx reg, const_rtx setter, void *data)
{
int regno, endregno, i;
- enum machine_mode mode = GET_MODE (reg);
+ machine_mode mode = GET_MODE (reg);
int offset = 0;
if (GET_CODE (setter) == CLOBBER)
static void
mark_reg_as_referenced (rtx *loc ATTRIBUTE_UNUSED,
- enum machine_mode mode,
+ machine_mode mode,
int hardregno,
void *arg ATTRIBUTE_UNUSED)
{
static void
replace_reg_with_saved_mem (rtx *loc,
- enum machine_mode mode,
+ machine_mode mode,
int regno,
void *arg)
{
unsigned int i, nregs = hard_regno_nregs [regno][mode];
rtx mem;
- enum machine_mode *save_mode = (enum machine_mode *)arg;
+ machine_mode *save_mode = (machine_mode *)arg;
for (i = 0; i < nregs; i++)
if (TEST_HARD_REG_BIT (hard_regs_saved, regno + i))
}
else
{
- enum machine_mode smode = save_mode[regno];
+ machine_mode smode = save_mode[regno];
gcc_assert (smode != VOIDmode);
if (hard_regno_nregs [regno][smode] > 1)
smode = mode_for_size (GET_MODE_SIZE (mode) / nregs,
static int
insert_restore (struct insn_chain *chain, int before_p, int regno,
- int maxrestore, enum machine_mode *save_mode)
+ int maxrestore, machine_mode *save_mode)
{
int i, k;
rtx pat = NULL_RTX;
static int
insert_save (struct insn_chain *chain, int before_p, int regno,
- HARD_REG_SET (*to_save), enum machine_mode *save_mode)
+ HARD_REG_SET (*to_save), machine_mode *save_mode)
{
int i;
unsigned int k;
/* Tree node for this argument. */
tree tree_value;
/* Mode for value; TYPE_MODE unless promoted. */
- enum machine_mode mode;
+ machine_mode mode;
/* Current RTL value for argument, or 0 if it isn't precomputed. */
rtx value;
/* Initially-compute RTL value for argument; only for const functions. */
static void load_register_parameters (struct arg_data *, int, rtx *, int,
int, int *);
static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
- enum machine_mode, int, va_list);
+ machine_mode, int, va_list);
static int special_function_p (const_tree, int);
static int check_sibcall_argument_overlap_1 (rtx);
static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
if (stack_usage_map[low] != 0)
{
int num_to_save;
- enum machine_mode save_mode;
+ machine_mode save_mode;
int delta;
rtx addr;
rtx stack_area;
static void
restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
{
- enum machine_mode save_mode = GET_MODE (save_area);
+ machine_mode save_mode = GET_MODE (save_area);
int delta;
rtx addr, stack_area;
{
tree type = TREE_TYPE (args[i].tree_value);
int unsignedp;
- enum machine_mode mode;
+ machine_mode mode;
/* Replace erroneous argument with constant zero. */
if (type == error_mark_node || !COMPLETE_TYPE_P (type))
for (i = 0; i < num_actuals; i++)
{
tree type;
- enum machine_mode mode;
+ machine_mode mode;
if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
continue;
rtx addr;
unsigned int align, boundary;
unsigned int units_on_stack = 0;
- enum machine_mode partial_mode = VOIDmode;
+ machine_mode partial_mode = VOIDmode;
/* Skip this parm if it will not be passed on the stack. */
if (! args[i].pass_on_stack
as specified by LEFT_P. Return true if some action was needed. */
bool
-shift_return_value (enum machine_mode mode, bool left_p, rtx value)
+shift_return_value (machine_mode mode, bool left_p, rtx value)
{
HOST_WIDE_INT shift;
return value. */
if (try_tail_call)
{
- enum machine_mode caller_mode, caller_promoted_mode;
- enum machine_mode callee_mode, callee_promoted_mode;
+ machine_mode caller_mode, caller_promoted_mode;
+ machine_mode callee_mode, callee_promoted_mode;
int caller_unsignedp, callee_unsignedp;
tree caller_res = DECL_RESULT (current_function_decl);
tree type = rettype;
int unsignedp = TYPE_UNSIGNED (type);
int offset = 0;
- enum machine_mode pmode;
+ machine_mode pmode;
/* Ensure we promote as expected, and get the new unsignedness. */
pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
for (i = 0; i < num_actuals; i++)
if (args[i].save_area)
{
- enum machine_mode save_mode = GET_MODE (args[i].save_area);
+ machine_mode save_mode = GET_MODE (args[i].save_area);
rtx stack_area
= gen_rtx_MEM (save_mode,
memory_address (save_mode,
static rtx
emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
enum libcall_type fn_type,
- enum machine_mode outmode, int nargs, va_list p)
+ machine_mode outmode, int nargs, va_list p)
{
/* Total size in bytes of all the stack-parms scanned so far. */
struct args_size args_size;
struct arg
{
rtx value;
- enum machine_mode mode;
+ machine_mode mode;
rtx reg;
int partial;
struct locate_and_pad_arg_data locate;
for (; count < nargs; count++)
{
rtx val = va_arg (p, rtx);
- enum machine_mode mode = (enum machine_mode) va_arg (p, int);
+ machine_mode mode = (machine_mode) va_arg (p, int);
int unsigned_p = 0;
/* We cannot convert the arg value to the mode the library wants here;
are to be pushed. */
for (count = 0; count < nargs; count++, argnum--)
{
- enum machine_mode mode = argvec[argnum].mode;
+ machine_mode mode = argvec[argnum].mode;
rtx val = argvec[argnum].value;
rtx reg = argvec[argnum].reg;
int partial = argvec[argnum].partial;
/* We need to make a save area. */
unsigned int size
= argvec[argnum].locate.size.constant * BITS_PER_UNIT;
- enum machine_mode save_mode
+ machine_mode save_mode
= mode_for_size (size, MODE_INT, 1);
rtx adr
= plus_constant (Pmode, argblock,
are to be pushed. */
for (count = 0; count < nargs; count++, argnum--)
{
- enum machine_mode mode = argvec[argnum].mode;
+ machine_mode mode = argvec[argnum].mode;
rtx val = argvec[argnum].value;
rtx reg = argvec[argnum].reg;
int partial = argvec[argnum].partial;
for (count = 0; count < nargs; count++)
if (argvec[count].save_area)
{
- enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
+ machine_mode save_mode = GET_MODE (argvec[count].save_area);
rtx adr = plus_constant (Pmode, argblock,
argvec[count].locate.offset.constant);
rtx stack_area = gen_rtx_MEM (save_mode,
void
emit_library_call (rtx orgfun, enum libcall_type fn_type,
- enum machine_mode outmode, int nargs, ...)
+ machine_mode outmode, int nargs, ...)
{
va_list p;
rtx
emit_library_call_value (rtx orgfun, rtx value,
enum libcall_type fn_type,
- enum machine_mode outmode, int nargs, ...)
+ machine_mode outmode, int nargs, ...)
{
rtx result;
va_list p;
{
/* We need to make a save area. */
unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
- enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
+ machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
rtx stack_area = gen_rtx_MEM (save_mode, adr);
/* Nonzero if we do not know how to pass TYPE solely in registers. */
bool
-must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
+must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
const_tree type)
{
if (!type)
/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
bool
-must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type)
+must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
{
if (!type)
return false;
extern int setjmp_call_p (const_tree);
extern bool gimple_alloca_call_p (const_gimple);
extern bool alloca_call_p (const_tree);
-extern bool must_pass_in_stack_var_size (enum machine_mode, const_tree);
-extern bool must_pass_in_stack_var_size_or_pad (enum machine_mode, const_tree);
+extern bool must_pass_in_stack_var_size (machine_mode, const_tree);
+extern bool must_pass_in_stack_var_size_or_pad (machine_mode, const_tree);
#endif // GCC_CALLS_H
{
tree decl = SSAVAR (var);
tree type = TREE_TYPE (decl);
- enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
+ machine_mode reg_mode = promote_decl_mode (decl, NULL);
rtx x = gen_reg_rtx (reg_mode);
set_rtl (var, x);
static void
expand_one_error_var (tree var)
{
- enum machine_mode mode = DECL_MODE (var);
+ machine_mode mode = DECL_MODE (var);
rtx x;
if (mode == BLKmode)
rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
int *inout_opnum = XALLOCAVEC (int, noutputs);
rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
- enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
+ machine_mode *inout_mode = XALLOCAVEC (machine_mode, noutputs);
const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
int old_generating_concat_p = generating_concat_p;
rtx_code_label *fallthru_label = NULL;
tree funtype = TREE_TYPE (current_function_decl);
tree type = TREE_TYPE (decl);
int unsignedp = TYPE_UNSIGNED (type);
- enum machine_mode old_mode = DECL_MODE (decl);
- enum machine_mode mode;
+ machine_mode old_mode = DECL_MODE (decl);
+ machine_mode mode;
if (DECL_BY_REFERENCE (decl))
mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
else
/* Return the difference between the floor and the truncated result of
a signed division by OP1 with remainder MOD. */
static rtx
-floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
+floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
{
/* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
return gen_rtx_IF_THEN_ELSE
/* Return the difference between the ceil and the truncated result of
a signed division by OP1 with remainder MOD. */
static rtx
-ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
+ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
{
/* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
return gen_rtx_IF_THEN_ELSE
/* Return the difference between the ceil and the truncated result of
an unsigned division by OP1 with remainder MOD. */
static rtx
-ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
+ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
{
/* (mod != 0 ? 1 : 0) */
return gen_rtx_IF_THEN_ELSE
of a signed division by OP1 with remainder MOD. Halfway cases are
rounded away from zero, rather than to the nearest even number. */
static rtx
-round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
+round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
{
/* (abs (mod) >= abs (op1) - abs (mod)
? (op1 / mod > 0 ? 1 : -1)
are rounded away from zero, rather than to the nearest even
number. */
static rtx
-round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
+round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
{
/* (mod >= op1 - mod ? 1 : 0) */
return gen_rtx_IF_THEN_ELSE
any rtl. */
static rtx
-convert_debug_memory_address (enum machine_mode mode, rtx x,
+convert_debug_memory_address (machine_mode mode, rtx x,
addr_space_t as)
{
- enum machine_mode xmode = GET_MODE (x);
+ machine_mode xmode = GET_MODE (x);
#ifndef POINTERS_EXTEND_UNSIGNED
gcc_assert (mode == Pmode
expand_debug_expr (tree exp)
{
rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
- enum machine_mode inner_mode = VOIDmode;
+ machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
+ machine_mode inner_mode = VOIDmode;
int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
addr_space_t as;
case IMAGPART_EXPR:
case VIEW_CONVERT_EXPR:
{
- enum machine_mode mode1;
+ machine_mode mode1;
HOST_WIDE_INT bitsize, bitpos;
tree offset;
int volatilep = 0;
if (offset)
{
- enum machine_mode addrmode, offmode;
+ machine_mode addrmode, offmode;
if (!MEM_P (op0))
return NULL;
if ((bitpos % BITS_PER_UNIT) == 0
&& bitsize == GET_MODE_BITSIZE (mode1))
{
- enum machine_mode opmode = GET_MODE (op0);
+ machine_mode opmode = GET_MODE (op0);
if (opmode == VOIDmode)
opmode = TYPE_MODE (TREE_TYPE (tem));
GET_MODE_INNER (mode)));
else
{
- enum machine_mode imode = GET_MODE_INNER (mode);
+ machine_mode imode = GET_MODE_INNER (mode);
rtx re, im;
if (MEM_P (op0))
}
else
{
- enum machine_mode ifmode = int_mode_for_mode (mode);
- enum machine_mode ihmode = int_mode_for_mode (imode);
+ machine_mode ifmode = int_mode_for_mode (mode);
+ machine_mode ihmode = int_mode_for_mode (imode);
rtx halfsize;
if (ifmode == BLKmode || ihmode == BLKmode)
return NULL;
expand_debug_source_expr (tree exp)
{
rtx op0 = NULL_RTX;
- enum machine_mode mode = VOIDmode, inner_mode;
+ machine_mode mode = VOIDmode, inner_mode;
switch (TREE_CODE (exp))
{
tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
rtx val;
rtx_insn *prev_insn, *insn2;
- enum machine_mode mode;
+ machine_mode mode;
if (value == NULL_TREE)
val = NULL_RTX;
tree value = gimple_assign_rhs_to_tree (def);
tree vexpr = make_node (DEBUG_EXPR_DECL);
rtx val;
- enum machine_mode mode;
+ machine_mode mode;
set_curr_insn_location (gimple_location (def));
tree var = gimple_debug_bind_get_var (stmt);
tree value;
rtx val;
- enum machine_mode mode;
+ machine_mode mode;
if (TREE_CODE (var) != DEBUG_EXPR_DECL
&& TREE_CODE (var) != LABEL_DECL
tree var = gimple_debug_source_bind_get_var (stmt);
tree value = gimple_debug_source_bind_get_value (stmt);
rtx val;
- enum machine_mode mode;
+ machine_mode mode;
last = get_last_insn ();
rtx delta, mult;
/* The mode it is extended to. */
- enum machine_mode extend_mode;
+ machine_mode extend_mode;
/* The mode the variable iterates in. */
- enum machine_mode mode;
+ machine_mode mode;
/* Whether the first iteration needs to be handled specially. */
unsigned first_special : 1;
bool signed_p;
/* The mode in that niter_expr should be computed. */
- enum machine_mode mode;
+ machine_mode mode;
/* The number of iterations of the loop. */
rtx niter_expr;
extern void iv_analysis_loop_init (struct loop *);
extern bool iv_analyze (rtx_insn *, rtx, struct rtx_iv *);
extern bool iv_analyze_result (rtx_insn *, rtx, struct rtx_iv *);
-extern bool iv_analyze_expr (rtx_insn *, rtx, enum machine_mode,
+extern bool iv_analyze_expr (rtx_insn *, rtx, machine_mode,
struct rtx_iv *);
extern rtx get_iv_value (struct rtx_iv *, rtx);
extern bool biv_p (rtx_insn *, rtx);
rtx op0 = XEXP ((rtx)comp_rtx, 0);
rtx op1 = XEXP ((rtx)comp_rtx, 1);
enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
- enum machine_mode mode;
+ machine_mode mode;
label = block_label (first_head);
/* Mode used to compute significance in reg_stat[].nonzero_bits. It is the
largest integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
-static enum machine_mode nonzero_bits_mode;
+static machine_mode nonzero_bits_mode;
/* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
be safely used. It is zero while computing them and after combine has
{
struct undo *next;
enum undo_kind kind;
- union { rtx r; int i; enum machine_mode m; struct insn_link *l; } old_contents;
+ union { rtx r; int i; machine_mode m; struct insn_link *l; } old_contents;
union { rtx *r; int *i; struct insn_link **l; } where;
};
static int n_occurrences;
-static rtx reg_nonzero_bits_for_combine (const_rtx, enum machine_mode, const_rtx,
- enum machine_mode,
+static rtx reg_nonzero_bits_for_combine (const_rtx, machine_mode, const_rtx,
+ machine_mode,
unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT *);
-static rtx reg_num_sign_bit_copies_for_combine (const_rtx, enum machine_mode, const_rtx,
- enum machine_mode,
+static rtx reg_num_sign_bit_copies_for_combine (const_rtx, machine_mode, const_rtx,
+ machine_mode,
unsigned int, unsigned int *);
static void do_SUBST (rtx *, rtx);
static void do_SUBST_INT (int *, int);
static void undo_commit (void);
static rtx *find_split_point (rtx *, rtx_insn *, bool);
static rtx subst (rtx, rtx, rtx, int, int, int);
-static rtx combine_simplify_rtx (rtx, enum machine_mode, int, int);
+static rtx combine_simplify_rtx (rtx, machine_mode, int, int);
static rtx simplify_if_then_else (rtx);
static rtx simplify_set (rtx);
static rtx simplify_logical (rtx);
static rtx expand_compound_operation (rtx);
static const_rtx expand_field_assignment (const_rtx);
-static rtx make_extraction (enum machine_mode, rtx, HOST_WIDE_INT,
+static rtx make_extraction (machine_mode, rtx, HOST_WIDE_INT,
rtx, unsigned HOST_WIDE_INT, int, int, int);
static rtx extract_left_shift (rtx, int);
static int get_pos_from_mask (unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT *);
static rtx canon_reg_for_combine (rtx, rtx);
-static rtx force_to_mode (rtx, enum machine_mode,
+static rtx force_to_mode (rtx, machine_mode,
unsigned HOST_WIDE_INT, int);
static rtx if_then_else_cond (rtx, rtx *, rtx *);
static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
static rtx make_field_assignment (rtx);
static rtx apply_distributive_law (rtx);
static rtx distribute_and_simplify_rtx (rtx, int);
-static rtx simplify_and_const_int_1 (enum machine_mode, rtx,
+static rtx simplify_and_const_int_1 (machine_mode, rtx,
unsigned HOST_WIDE_INT);
-static rtx simplify_and_const_int (rtx, enum machine_mode, rtx,
+static rtx simplify_and_const_int (rtx, machine_mode, rtx,
unsigned HOST_WIDE_INT);
static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
- HOST_WIDE_INT, enum machine_mode, int *);
-static rtx simplify_shift_const_1 (enum rtx_code, enum machine_mode, rtx, int);
-static rtx simplify_shift_const (rtx, enum rtx_code, enum machine_mode, rtx,
+ HOST_WIDE_INT, machine_mode, int *);
+static rtx simplify_shift_const_1 (enum rtx_code, machine_mode, rtx, int);
+static rtx simplify_shift_const (rtx, enum rtx_code, machine_mode, rtx,
int);
static int recog_for_combine (rtx *, rtx_insn *, rtx *);
-static rtx gen_lowpart_for_combine (enum machine_mode, rtx);
-static enum rtx_code simplify_compare_const (enum rtx_code, enum machine_mode,
+static rtx gen_lowpart_for_combine (machine_mode, rtx);
+static enum rtx_code simplify_compare_const (enum rtx_code, machine_mode,
rtx, rtx *);
static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
static void update_table_tick (rtx);
static void record_promoted_value (rtx_insn *, rtx);
static bool unmentioned_reg_p (rtx, rtx);
static void record_truncated_values (rtx *, void *);
-static bool reg_truncated_to_mode (enum machine_mode, const_rtx);
-static rtx gen_lowpart_or_truncate (enum machine_mode, rtx);
+static bool reg_truncated_to_mode (machine_mode, const_rtx);
+static rtx gen_lowpart_or_truncate (machine_mode, rtx);
\f
/* It is not safe to use ordinary gen_lowpart in combine.
well. */
static void
-do_SUBST_MODE (rtx *into, enum machine_mode newval)
+do_SUBST_MODE (rtx *into, machine_mode newval)
{
struct undo *buf;
- enum machine_mode oldval = GET_MODE (*into);
+ machine_mode oldval = GET_MODE (*into);
if (oldval == newval)
return;
{
rtx x, reg = DECL_INCOMING_RTL (arg);
int uns1, uns3;
- enum machine_mode mode1, mode2, mode3, mode4;
+ machine_mode mode1, mode2, mode3, mode4;
/* Only continue if the incoming argument is in a register. */
if (!REG_P (reg))
/* Return TRUE if combine can reuse reg X in mode MODE.
ADDED_SETS is nonzero if the original set is still required. */
static bool
-can_change_dest_mode (rtx x, int added_sets, enum machine_mode mode)
+can_change_dest_mode (rtx x, int added_sets, machine_mode mode)
{
unsigned int regno;
rtx *cc_use_loc = NULL;
rtx_insn *cc_use_insn = NULL;
rtx op0 = i2src, op1 = XEXP (SET_SRC (PATTERN (i3)), 1);
- enum machine_mode compare_mode, orig_compare_mode;
+ machine_mode compare_mode, orig_compare_mode;
enum rtx_code compare_code = UNKNOWN, orig_compare_code = UNKNOWN;
newpat = PATTERN (i3);
if (cc_use_loc)
{
#ifdef SELECT_CC_MODE
- enum machine_mode new_mode
+ machine_mode new_mode
= SELECT_CC_MODE (compare_code, op0, op1);
if (new_mode != orig_compare_mode
&& can_change_dest_mode (SET_DEST (newpat),
if (m_split_insn == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
{
- enum machine_mode new_mode = GET_MODE (SET_DEST (newpat));
+ machine_mode new_mode = GET_MODE (SET_DEST (newpat));
/* First try to split using the original register as a
scratch register. */
&& new_mode != VOIDmode
&& can_change_dest_mode (i2dest, added_sets_2, new_mode))
{
- enum machine_mode old_mode = GET_MODE (i2dest);
+ machine_mode old_mode = GET_MODE (i2dest);
rtx ni2dest;
if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
{
rtx newdest = i2dest;
enum rtx_code split_code = GET_CODE (*split);
- enum machine_mode split_mode = GET_MODE (*split);
+ machine_mode split_mode = GET_MODE (*split);
bool subst_done = false;
newi2pat = NULL_RTX;
&& ! side_effects_p (SET_SRC (newpat)))
{
rtx setsrc = SET_SRC (newpat);
- enum machine_mode mode = GET_MODE (setsrc);
+ machine_mode mode = GET_MODE (setsrc);
enum rtx_code code = GET_CODE (setsrc);
rtx src_op0 = XEXP (setsrc, 0);
rtx src_op1 = XEXP (setsrc, 1);
if (undo->kind == UNDO_MODE)
{
rtx reg = *undo->where.r;
- enum machine_mode new_mode = GET_MODE (reg);
- enum machine_mode old_mode = undo->old_contents.m;
+ machine_mode new_mode = GET_MODE (reg);
+ machine_mode old_mode = undo->old_contents.m;
/* Temporarily revert mode back. */
adjust_reg_mode (reg, old_mode);
if (GET_CODE (XEXP (x, 0)) == CONST
|| GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
{
- enum machine_mode address_mode = get_address_mode (x);
+ machine_mode address_mode = get_address_mode (x);
SUBST (XEXP (x, 0),
gen_rtx_LO_SUM (address_mode,
unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
rtx dest = XEXP (SET_DEST (x), 0);
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
unsigned HOST_WIDE_INT mask
= ((unsigned HOST_WIDE_INT) 1 << len) - 1;
rtx or_mask;
(nonzero_bits (XEXP (SET_SRC (x), 0),
GET_MODE (XEXP (SET_SRC (x), 0))))))
{
- enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
+ machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
SUBST (SET_SRC (x),
gen_rtx_NEG (mode,
if (len && pos >= 0
&& pos + len <= GET_MODE_PRECISION (GET_MODE (inner)))
{
- enum machine_mode mode = GET_MODE (SET_SRC (x));
+ machine_mode mode = GET_MODE (SET_SRC (x));
/* For unsigned, we have a choice of a shift followed by an
AND or two shifts. Use two shifts for field sizes where the
&& GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
&& exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1))) < 0)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
unsigned HOST_WIDE_INT this_int = INTVAL (XEXP (XEXP (x, 1), 1));
HOST_WIDE_INT other_int = trunc_int_for_mode (-this_int, mode);
SUBST (*loc, gen_rtx_PLUS (mode,
subst (rtx x, rtx from, rtx to, int in_dest, int in_cond, int unique_copy)
{
enum rtx_code code = GET_CODE (x);
- enum machine_mode op0_mode = VOIDmode;
+ machine_mode op0_mode = VOIDmode;
const char *fmt;
int len, i;
rtx new_rtx;
if (GET_CODE (x) == SUBREG && CONST_SCALAR_INT_P (new_rtx))
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
x = simplify_subreg (GET_MODE (x), new_rtx,
GET_MODE (SUBREG_REG (x)),
of a condition. */
static rtx
-combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest,
+combine_simplify_rtx (rtx x, machine_mode op0_mode, int in_dest,
int in_cond)
{
enum rtx_code code = GET_CODE (x);
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
rtx temp;
int i;
case RTX_COMPARE:
case RTX_COMM_COMPARE:
{
- enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
+ machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
if (cmp_mode == VOIDmode)
{
cmp_mode = GET_MODE (XEXP (x, 1));
static rtx
simplify_if_then_else (rtx x)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
rtx cond = XEXP (x, 0);
rtx true_rtx = XEXP (x, 1);
rtx false_rtx = XEXP (x, 2);
rtx cond_op0 = XEXP (cond, 0);
rtx cond_op1 = XEXP (cond, 1);
enum rtx_code op = UNKNOWN, extend_op = UNKNOWN;
- enum machine_mode m = mode;
+ machine_mode m = mode;
rtx z = 0, c1 = NULL_RTX;
if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
{
rtx src = SET_SRC (x);
rtx dest = SET_DEST (x);
- enum machine_mode mode
+ machine_mode mode
= GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
rtx_insn *other_insn;
rtx *cc_use;
rtx op0, op1, tmp;
int other_changed = 0;
rtx inner_compare = NULL_RTX;
- enum machine_mode compare_mode = GET_MODE (dest);
+ machine_mode compare_mode = GET_MODE (dest);
if (GET_CODE (src) == COMPARE)
{
< GET_MODE_PRECISION (GET_MODE (SUBREG_REG (src)))))
{
rtx inner = SUBREG_REG (src);
- enum machine_mode inner_mode = GET_MODE (inner);
+ machine_mode inner_mode = GET_MODE (inner);
/* Here we make sure that we don't have a sign bit on. */
if (val_signbit_known_clear_p (GET_MODE (src),
static rtx
simplify_logical (rtx x)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
rtx op0 = XEXP (x, 0);
rtx op1 = XEXP (x, 1);
modewidth = GET_MODE_PRECISION (GET_MODE (x));
if (modewidth >= pos + len)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
tem = gen_lowpart (mode, XEXP (x, 0));
if (!tem || GET_CODE (tem) == CLOBBER)
return x;
rtx pos; /* Always counts from low bit. */
int len;
rtx mask, cleared, masked;
- enum machine_mode compute_mode;
+ machine_mode compute_mode;
/* Loop until we find something we can't simplify. */
while (1)
/* Don't attempt bitwise arithmetic on non scalar integer modes. */
if (! SCALAR_INT_MODE_P (compute_mode))
{
- enum machine_mode imode;
+ machine_mode imode;
/* Don't do anything for vector or complex integral types. */
if (! FLOAT_MODE_P (compute_mode))
can't handle it. */
static rtx
-make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
+make_extraction (machine_mode mode, rtx inner, HOST_WIDE_INT pos,
rtx pos_rtx, unsigned HOST_WIDE_INT len, int unsignedp,
int in_dest, int in_compare)
{
/* This mode describes the size of the storage area
to fetch the overall value from. Within that, we
ignore the POS lowest bits, etc. */
- enum machine_mode is_mode = GET_MODE (inner);
- enum machine_mode inner_mode;
- enum machine_mode wanted_inner_mode;
- enum machine_mode wanted_inner_reg_mode = word_mode;
- enum machine_mode pos_mode = word_mode;
- enum machine_mode extraction_mode = word_mode;
- enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
+ machine_mode is_mode = GET_MODE (inner);
+ machine_mode inner_mode;
+ machine_mode wanted_inner_mode;
+ machine_mode wanted_inner_reg_mode = word_mode;
+ machine_mode pos_mode = word_mode;
+ machine_mode extraction_mode = word_mode;
+ machine_mode tmode = mode_for_size (len, MODE_INT, 1);
rtx new_rtx = 0;
rtx orig_pos_rtx = pos_rtx;
HOST_WIDE_INT orig_pos;
extract_left_shift (rtx x, int count)
{
enum rtx_code code = GET_CODE (x);
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
rtx tem;
switch (code)
make_compound_operation (rtx x, enum rtx_code in_code)
{
enum rtx_code code = GET_CODE (x);
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
int mode_width = GET_MODE_PRECISION (mode);
rtx rhs, lhs;
enum rtx_code next_code;
would need an explicit truncation. */
static rtx
-gen_lowpart_or_truncate (enum machine_mode mode, rtx x)
+gen_lowpart_or_truncate (machine_mode mode, rtx x)
{
if (!CONST_INT_P (x)
&& GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
NOT, NEG, or XOR. */
static rtx
-force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
+force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
int just_select)
{
enum rtx_code code = GET_CODE (x);
int next_select = just_select || code == XOR || code == NOT || code == NEG;
- enum machine_mode op_mode;
+ machine_mode op_mode;
unsigned HOST_WIDE_INT fuller_mask, nonzero;
rtx op0, op1, temp;
static rtx
if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
enum rtx_code code = GET_CODE (x);
rtx cond0, cond1, true0, true1, false0, false1;
unsigned HOST_WIDE_INT nz;
}
else if (code == SUBREG)
{
- enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
+ machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
rtx new_rtx, r = known_cond (SUBREG_REG (x), cond, reg, val);
if (SUBREG_REG (x) != r)
story is different. */
else if (code == ZERO_EXTEND)
{
- enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
+ machine_mode inner_mode = GET_MODE (XEXP (x, 0));
rtx new_rtx, r = known_cond (XEXP (x, 0), cond, reg, val);
if (XEXP (x, 0) != r)
HOST_WIDE_INT pos;
unsigned HOST_WIDE_INT len;
rtx other;
- enum machine_mode mode;
+ machine_mode mode;
/* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
a clear of a one-bit field. We will have changed it to
static rtx
distribute_and_simplify_rtx (rtx x, int n)
{
- enum machine_mode mode;
+ machine_mode mode;
enum rtx_code outer_code, inner_code;
rtx decomposed, distributed, inner_op0, inner_op1, new_op0, new_op1, tmp;
(const_int CONSTOP)). Otherwise, return NULL_RTX. */
static rtx
-simplify_and_const_int_1 (enum machine_mode mode, rtx varop,
+simplify_and_const_int_1 (machine_mode mode, rtx varop,
unsigned HOST_WIDE_INT constop)
{
unsigned HOST_WIDE_INT nonzero;
X is zero, we are to always construct the equivalent form. */
static rtx
-simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop,
+simplify_and_const_int (rtx x, machine_mode mode, rtx varop,
unsigned HOST_WIDE_INT constop)
{
rtx tem = simplify_and_const_int_1 (mode, varop, constop);
a shift, AND, or zero_extract, we can do better. */
static rtx
-reg_nonzero_bits_for_combine (const_rtx x, enum machine_mode mode,
+reg_nonzero_bits_for_combine (const_rtx x, machine_mode mode,
const_rtx known_x ATTRIBUTE_UNUSED,
- enum machine_mode known_mode ATTRIBUTE_UNUSED,
+ machine_mode known_mode ATTRIBUTE_UNUSED,
unsigned HOST_WIDE_INT known_ret ATTRIBUTE_UNUSED,
unsigned HOST_WIDE_INT *nonzero)
{
be between 1 and the number of bits in MODE. */
static rtx
-reg_num_sign_bit_copies_for_combine (const_rtx x, enum machine_mode mode,
+reg_num_sign_bit_copies_for_combine (const_rtx x, machine_mode mode,
const_rtx known_x ATTRIBUTE_UNUSED,
- enum machine_mode known_mode
+ machine_mode known_mode
ATTRIBUTE_UNUSED,
unsigned int known_ret ATTRIBUTE_UNUSED,
unsigned int *result)
implies that it must be called from a define_split. */
unsigned int
-extended_count (const_rtx x, enum machine_mode mode, int unsignedp)
+extended_count (const_rtx x, machine_mode mode, int unsignedp)
{
if (nonzero_sign_valid == 0)
return 0;
return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
static int
-merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0, enum rtx_code op1, HOST_WIDE_INT const1, enum machine_mode mode, int *pcomp_p)
+merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0, enum rtx_code op1, HOST_WIDE_INT const1, machine_mode mode, int *pcomp_p)
{
enum rtx_code op0 = *pop0;
HOST_WIDE_INT const0 = *pconst0;
result of the shift is subject to operation OUTER_CODE with operand
OUTER_CONST. */
-static enum machine_mode
+static machine_mode
try_widen_shift_mode (enum rtx_code code, rtx op, int count,
- enum machine_mode orig_mode, enum machine_mode mode,
+ machine_mode orig_mode, machine_mode mode,
enum rtx_code outer_code, HOST_WIDE_INT outer_const)
{
if (orig_mode == mode)
are ASHIFTRT and ROTATE, which are always done in their original mode. */
static rtx
-simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
+simplify_shift_const_1 (enum rtx_code code, machine_mode result_mode,
rtx varop, int orig_count)
{
enum rtx_code orig_code = code;
rtx orig_varop = varop;
int count;
- enum machine_mode mode = result_mode;
- enum machine_mode shift_mode, tmode;
+ machine_mode mode = result_mode;
+ machine_mode shift_mode, tmode;
unsigned int mode_words
= (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
/* We form (outer_op (code varop count) (outer_const)). */
are ASHIFTRT and ROTATE, which are always done in their original mode. */
static rtx
-simplify_shift_const (rtx x, enum rtx_code code, enum machine_mode result_mode,
+simplify_shift_const (rtx x, enum rtx_code code, machine_mode result_mode,
rtx varop, int count)
{
rtx tem = simplify_shift_const_1 (code, result_mode, varop, count);
An insn containing that will not be recognized. */
static rtx
-gen_lowpart_for_combine (enum machine_mode omode, rtx x)
+gen_lowpart_for_combine (machine_mode omode, rtx x)
{
- enum machine_mode imode = GET_MODE (x);
+ machine_mode imode = GET_MODE (x);
unsigned int osize = GET_MODE_SIZE (omode);
unsigned int isize = GET_MODE_SIZE (imode);
rtx result;
*POP1 may be updated. */
static enum rtx_code
-simplify_compare_const (enum rtx_code code, enum machine_mode mode,
+simplify_compare_const (enum rtx_code code, machine_mode mode,
rtx op0, rtx *pop1)
{
unsigned int mode_width = GET_MODE_PRECISION (mode);
rtx op1 = *pop1;
rtx tem, tem1;
int i;
- enum machine_mode mode, tmode;
+ machine_mode mode, tmode;
/* Try a few ways of applying the same transformation to both operands. */
while (1)
&& INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
&& XEXP (op0, 1) == XEXP (op1, 1))
{
- enum machine_mode mode = GET_MODE (op0);
+ machine_mode mode = GET_MODE (op0);
unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
int shift_count = INTVAL (XEXP (op0, 1));
while (CONST_INT_P (op1))
{
- enum machine_mode mode = GET_MODE (op0);
+ machine_mode mode = GET_MODE (op0);
unsigned int mode_width = GET_MODE_PRECISION (mode);
unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
int equality_comparison_p;
&& GET_CODE (SUBREG_REG (op0)) == PLUS
&& CONST_INT_P (XEXP (SUBREG_REG (op0), 1)))
{
- enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
+ machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
rtx a = XEXP (SUBREG_REG (op0), 0);
HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
if (value)
{
- enum machine_mode mode = GET_MODE (reg);
+ machine_mode mode = GET_MODE (reg);
subst_low_luid = DF_INSN_LUID (insn);
rsp->last_set_mode = mode;
if (GET_MODE_CLASS (mode) == MODE_INT
struct insn_link *links;
rtx set;
unsigned int regno = REGNO (SUBREG_REG (subreg));
- enum machine_mode mode = GET_MODE (subreg);
+ machine_mode mode = GET_MODE (subreg);
if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT)
return;
an explicit truncation. */
static bool
-reg_truncated_to_mode (enum machine_mode mode, const_rtx x)
+reg_truncated_to_mode (machine_mode mode, const_rtx x)
{
reg_stat_type *rsp = ®_stat[REGNO (x)];
- enum machine_mode truncated = rsp->truncated_to_mode;
+ machine_mode truncated = rsp->truncated_to_mode;
if (truncated == 0
|| rsp->truncation_label < label_tick_ebb_start)
static bool
record_truncated_value (rtx x)
{
- enum machine_mode truncated_mode;
+ machine_mode truncated_mode;
reg_stat_type *rsp;
if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x)))
{
- enum machine_mode original_mode = GET_MODE (SUBREG_REG (x));
+ machine_mode original_mode = GET_MODE (SUBREG_REG (x));
truncated_mode = GET_MODE (x);
if (GET_MODE_SIZE (original_mode) <= GET_MODE_SIZE (truncated_mode))
struct comparison_use uses[MAX_CMP_USE];
/* The original CC_MODE for this comparison. */
- enum machine_mode orig_mode;
+ machine_mode orig_mode;
/* The number of uses identified for this comparison. */
unsigned short n_uses;
src = conforming_compare (insn);
if (src)
{
- enum machine_mode src_mode = GET_MODE (src);
+ machine_mode src_mode = GET_MODE (src);
rtx eh_note = NULL;
if (flag_non_call_exceptions)
/* New mode must be compatible with the previous compare mode. */
{
- enum machine_mode new_mode
+ machine_mode new_mode
= targetm.cc_modes_compatible (last_cmp->orig_mode, src_mode);
if (new_mode == VOIDmode)
goto dont_delete;
maybe_select_cc_mode (struct comparison *cmp, rtx a ATTRIBUTE_UNUSED,
rtx b ATTRIBUTE_UNUSED)
{
- enum machine_mode sel_mode;
+ machine_mode sel_mode;
const int n = cmp->n_uses;
rtx flags = NULL;
sel_mode = SELECT_CC_MODE (cmp->uses[0].code, a, b);
for (i = 1; i < n; ++i)
{
- enum machine_mode new_mode;
+ machine_mode new_mode;
new_mode = SELECT_CC_MODE (cmp->uses[i].code, a, b);
if (new_mode != sel_mode)
{
typedef struct
{
const char *name;
- enum machine_mode mode;
+ machine_mode mode;
const enum insn_code code;
unsigned int fcode;
enum aarch64_type_qualifiers *qualifiers;
typedef struct
{
const char *name;
- enum machine_mode mode;
+ machine_mode mode;
const enum insn_code icode;
unsigned int fcode;
} aarch64_crc_builtin_datum;
/* Return a tree for a signed or unsigned argument of either
the mode specified by MODE, or the inner mode of MODE. */
tree
-aarch64_build_scalar_type (enum machine_mode mode,
+aarch64_build_scalar_type (machine_mode mode,
bool unsigned_p,
bool poly_p)
{
}
tree
-aarch64_build_vector_type (enum machine_mode mode,
+aarch64_build_vector_type (machine_mode mode,
bool unsigned_p,
bool poly_p)
{
}
tree
-aarch64_build_type (enum machine_mode mode, bool unsigned_p, bool poly_p)
+aarch64_build_type (machine_mode mode, bool unsigned_p, bool poly_p)
{
if (VECTOR_MODE_P (mode))
return aarch64_build_vector_type (mode, unsigned_p, poly_p);
}
tree
-aarch64_build_signed_type (enum machine_mode mode)
+aarch64_build_signed_type (machine_mode mode)
{
return aarch64_build_type (mode, false, false);
}
tree
-aarch64_build_unsigned_type (enum machine_mode mode)
+aarch64_build_unsigned_type (machine_mode mode)
{
return aarch64_build_type (mode, true, false);
}
tree
-aarch64_build_poly_type (enum machine_mode mode)
+aarch64_build_poly_type (machine_mode mode)
{
return aarch64_build_type (mode, false, true);
}
removing duplicates for us. */
for (; op_num >= 0; arg_num--, op_num--)
{
- enum machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
+ machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
enum aarch64_type_qualifiers qualifiers = d->qualifiers[arg_num];
if (qualifiers & qualifier_unsigned)
rtx pat;
tree arg[SIMD_MAX_BUILTIN_ARGS];
rtx op[SIMD_MAX_BUILTIN_ARGS];
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode[SIMD_MAX_BUILTIN_ARGS];
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode[SIMD_MAX_BUILTIN_ARGS];
int argc = 0;
if (have_retval
tree arg1 = CALL_EXPR_ARG (exp, 1);
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode0 = insn_data[icode].operand[1].mode;
- enum machine_mode mode1 = insn_data[icode].operand[2].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode0 = insn_data[icode].operand[1].mode;
+ machine_mode mode1 = insn_data[icode].operand[2].mode;
if (! target
|| GET_MODE (target) != tmode
aarch64_expand_builtin (tree exp,
rtx target,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
tree
aarch64_builtin_vectorized_function (tree fndecl, tree type_out, tree type_in)
{
- enum machine_mode in_mode, out_mode;
+ machine_mode in_mode, out_mode;
int in_n, out_n;
if (TREE_CODE (type_out) != VECTOR_TYPE
HOST_WIDE_INT aarch64_initial_elimination_offset (unsigned, unsigned);
int aarch64_get_condition_code (rtx);
-bool aarch64_bitmask_imm (HOST_WIDE_INT val, enum machine_mode);
-bool aarch64_cannot_change_mode_class (enum machine_mode,
- enum machine_mode,
+bool aarch64_bitmask_imm (HOST_WIDE_INT val, machine_mode);
+bool aarch64_cannot_change_mode_class (machine_mode,
+ machine_mode,
enum reg_class);
enum aarch64_symbol_type
aarch64_classify_symbolic_expression (rtx, enum aarch64_symbol_context);
bool aarch64_function_arg_regno_p (unsigned);
bool aarch64_gen_movmemqi (rtx *);
bool aarch64_gimple_fold_builtin (gimple_stmt_iterator *);
-bool aarch64_is_extend_from_extract (enum machine_mode, rtx, rtx);
+bool aarch64_is_extend_from_extract (machine_mode, rtx, rtx);
bool aarch64_is_long_call_p (rtx);
bool aarch64_label_mentioned_p (rtx);
bool aarch64_legitimate_pic_operand_p (rtx);
-bool aarch64_modes_tieable_p (enum machine_mode mode1,
- enum machine_mode mode2);
-bool aarch64_move_imm (HOST_WIDE_INT, enum machine_mode);
+bool aarch64_modes_tieable_p (machine_mode mode1,
+ machine_mode mode2);
+bool aarch64_move_imm (HOST_WIDE_INT, machine_mode);
bool aarch64_mov_operand_p (rtx, enum aarch64_symbol_context,
- enum machine_mode);
-bool aarch64_offset_7bit_signed_scaled_p (enum machine_mode, HOST_WIDE_INT);
-char *aarch64_output_scalar_simd_mov_immediate (rtx, enum machine_mode);
-char *aarch64_output_simd_mov_immediate (rtx, enum machine_mode, unsigned);
-bool aarch64_pad_arg_upward (enum machine_mode, const_tree);
-bool aarch64_pad_reg_upward (enum machine_mode, const_tree, bool);
+ machine_mode);
+bool aarch64_offset_7bit_signed_scaled_p (machine_mode, HOST_WIDE_INT);
+char *aarch64_output_scalar_simd_mov_immediate (rtx, machine_mode);
+char *aarch64_output_simd_mov_immediate (rtx, machine_mode, unsigned);
+bool aarch64_pad_arg_upward (machine_mode, const_tree);
+bool aarch64_pad_reg_upward (machine_mode, const_tree, bool);
bool aarch64_regno_ok_for_base_p (int, bool);
bool aarch64_regno_ok_for_index_p (int, bool);
-bool aarch64_simd_check_vect_par_cnst_half (rtx op, enum machine_mode mode,
+bool aarch64_simd_check_vect_par_cnst_half (rtx op, machine_mode mode,
bool high);
-bool aarch64_simd_imm_scalar_p (rtx x, enum machine_mode mode);
-bool aarch64_simd_imm_zero_p (rtx, enum machine_mode);
-bool aarch64_simd_scalar_immediate_valid_for_move (rtx, enum machine_mode);
-bool aarch64_simd_shift_imm_p (rtx, enum machine_mode, bool);
-bool aarch64_simd_valid_immediate (rtx, enum machine_mode, bool,
+bool aarch64_simd_imm_scalar_p (rtx x, machine_mode mode);
+bool aarch64_simd_imm_zero_p (rtx, machine_mode);
+bool aarch64_simd_scalar_immediate_valid_for_move (rtx, machine_mode);
+bool aarch64_simd_shift_imm_p (rtx, machine_mode, bool);
+bool aarch64_simd_valid_immediate (rtx, machine_mode, bool,
struct simd_immediate_info *);
bool aarch64_symbolic_address_p (rtx);
bool aarch64_uimm12_shift (HOST_WIDE_INT);
enum aarch64_symbol_type aarch64_classify_tls_symbol (rtx);
enum reg_class aarch64_regno_regclass (unsigned);
int aarch64_asm_preferred_eh_data_format (int, int);
-enum machine_mode aarch64_hard_regno_caller_save_mode (unsigned, unsigned,
- enum machine_mode);
-int aarch64_hard_regno_mode_ok (unsigned, enum machine_mode);
-int aarch64_hard_regno_nregs (unsigned, enum machine_mode);
+machine_mode aarch64_hard_regno_caller_save_mode (unsigned, unsigned,
+ machine_mode);
+int aarch64_hard_regno_mode_ok (unsigned, machine_mode);
+int aarch64_hard_regno_nregs (unsigned, machine_mode);
int aarch64_simd_attr_length_move (rtx_insn *);
int aarch64_uxt_size (int, HOST_WIDE_INT);
rtx aarch64_final_eh_return_addr (void);
-rtx aarch64_legitimize_reload_address (rtx *, enum machine_mode, int, int, int);
+rtx aarch64_legitimize_reload_address (rtx *, machine_mode, int, int, int);
const char *aarch64_output_move_struct (rtx *operands);
rtx aarch64_return_addr (int, rtx);
-rtx aarch64_simd_gen_const_vector_dup (enum machine_mode, int);
+rtx aarch64_simd_gen_const_vector_dup (machine_mode, int);
bool aarch64_simd_mem_operand_p (rtx);
-rtx aarch64_simd_vect_par_cnst_half (enum machine_mode, bool);
+rtx aarch64_simd_vect_par_cnst_half (machine_mode, bool);
rtx aarch64_tls_get_addr (void);
tree aarch64_fold_builtin (tree, int, tree *, bool);
unsigned aarch64_dbx_register_number (unsigned);
/* Emit code to place a AdvSIMD pair result in memory locations (with equal
registers). */
-void aarch64_simd_emit_pair_result_insn (enum machine_mode,
+void aarch64_simd_emit_pair_result_insn (machine_mode,
rtx (*intfn) (rtx, rtx, rtx), rtx,
rtx);
#if defined (RTX_CODE)
-bool aarch64_legitimate_address_p (enum machine_mode, rtx, RTX_CODE, bool);
-enum machine_mode aarch64_select_cc_mode (RTX_CODE, rtx, rtx);
+bool aarch64_legitimate_address_p (machine_mode, rtx, RTX_CODE, bool);
+machine_mode aarch64_select_cc_mode (RTX_CODE, rtx, rtx);
rtx aarch64_gen_compare_reg (RTX_CODE, rtx, rtx);
rtx aarch64_load_tp (rtx);
rtx aarch64_expand_builtin (tree exp,
rtx target,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED);
tree aarch64_builtin_decl (unsigned, bool ATTRIBUTE_UNUSED);
if (GP_REGNUM_P (REGNO (operands[0]))
&& GP_REGNUM_P (REGNO (operands[1])))
{
- enum machine_mode mode = SELECT_CC_MODE (<CMP>, operands[1], operands[2]);
+ machine_mode mode = SELECT_CC_MODE (<CMP>, operands[1], operands[2]);
rtx cc_reg = aarch64_gen_compare_reg (<CMP>, operands[1], operands[2]);
rtx comparison = gen_rtx_<CMP> (mode, operands[1], operands[2]);
emit_insn (gen_cstoredi_neg (operands[0], comparison, cc_reg));
if (GP_REGNUM_P (REGNO (operands[0]))
&& GP_REGNUM_P (REGNO (operands[1])))
{
- enum machine_mode mode = CCmode;
+ machine_mode mode = CCmode;
rtx cc_reg = aarch64_gen_compare_reg (<CMP>, operands[1], operands[2]);
rtx comparison = gen_rtx_<CMP> (mode, operands[1], operands[2]);
emit_insn (gen_cstoredi_neg (operands[0], comparison, cc_reg));
&& GP_REGNUM_P (REGNO (operands[1])))
{
rtx and_tree = gen_rtx_AND (DImode, operands[1], operands[2]);
- enum machine_mode mode = SELECT_CC_MODE (NE, and_tree, const0_rtx);
+ machine_mode mode = SELECT_CC_MODE (NE, and_tree, const0_rtx);
rtx cc_reg = aarch64_gen_compare_reg (NE, and_tree, const0_rtx);
rtx comparison = gen_rtx_NE (mode, and_tree, const0_rtx);
emit_insn (gen_cstoredi_neg (operands[0], comparison, cc_reg));
(unspec:VALLDIF [(const_int 0)] UNSPEC_VSTRUCTDUMMY)]
"TARGET_SIMD"
{
- enum machine_mode mode = <V_TWO_ELEM>mode;
+ machine_mode mode = <V_TWO_ELEM>mode;
rtx mem = gen_rtx_MEM (mode, operands[1]);
emit_insn (gen_aarch64_simd_ld2r<mode> (operands[0], mem));
(unspec:VALLDIF [(const_int 0)] UNSPEC_VSTRUCTDUMMY)]
"TARGET_SIMD"
{
- enum machine_mode mode = <V_THREE_ELEM>mode;
+ machine_mode mode = <V_THREE_ELEM>mode;
rtx mem = gen_rtx_MEM (mode, operands[1]);
emit_insn (gen_aarch64_simd_ld3r<mode> (operands[0], mem));
(unspec:VALLDIF [(const_int 0)] UNSPEC_VSTRUCTDUMMY)]
"TARGET_SIMD"
{
- enum machine_mode mode = <V_FOUR_ELEM>mode;
+ machine_mode mode = <V_FOUR_ELEM>mode;
rtx mem = gen_rtx_MEM (mode, operands[1]);
emit_insn (gen_aarch64_simd_ld4r<mode> (operands[0],mem));
(unspec:VDC [(const_int 0)] UNSPEC_VSTRUCTDUMMY)]
"TARGET_SIMD"
{
- enum machine_mode mode = <VSTRUCT:VSTRUCT_DREG>mode;
+ machine_mode mode = <VSTRUCT:VSTRUCT_DREG>mode;
rtx mem = gen_rtx_MEM (mode, operands[1]);
emit_insn (gen_aarch64_ld<VSTRUCT:nregs><VDC:mode>_dreg (operands[0], mem));
(match_operand:DI 1 "register_operand")]
"TARGET_SIMD"
{
- enum machine_mode mode = <VALL:MODE>mode;
+ machine_mode mode = <VALL:MODE>mode;
rtx mem = gen_rtx_MEM (mode, operands[1]);
if (BYTES_BIG_ENDIAN)
(unspec:VQ [(const_int 0)] UNSPEC_VSTRUCTDUMMY)]
"TARGET_SIMD"
{
- enum machine_mode mode = <VSTRUCT:MODE>mode;
+ machine_mode mode = <VSTRUCT:MODE>mode;
rtx mem = gen_rtx_MEM (mode, operands[1]);
emit_insn (gen_vec_load_lanes<VSTRUCT:mode><VQ:mode> (operands[0], mem));
(unspec:VQ [(const_int 0)] UNSPEC_VSTRUCTDUMMY)]
"TARGET_SIMD"
{
- enum machine_mode mode = <V_TWO_ELEM>mode;
+ machine_mode mode = <V_TWO_ELEM>mode;
rtx mem = gen_rtx_MEM (mode, operands[1]);
aarch64_simd_lane_bounds (operands[3], 0, GET_MODE_NUNITS (<VCONQ>mode));
(unspec:VQ [(const_int 0)] UNSPEC_VSTRUCTDUMMY)]
"TARGET_SIMD"
{
- enum machine_mode mode = <V_THREE_ELEM>mode;
+ machine_mode mode = <V_THREE_ELEM>mode;
rtx mem = gen_rtx_MEM (mode, operands[1]);
aarch64_simd_lane_bounds (operands[3], 0, GET_MODE_NUNITS (<VCONQ>mode));
(unspec:VQ [(const_int 0)] UNSPEC_VSTRUCTDUMMY)]
"TARGET_SIMD"
{
- enum machine_mode mode = <V_FOUR_ELEM>mode;
+ machine_mode mode = <V_FOUR_ELEM>mode;
rtx mem = gen_rtx_MEM (mode, operands[1]);
aarch64_simd_lane_bounds (operands[3], 0, GET_MODE_NUNITS (<VCONQ>mode));
(unspec:VDC [(const_int 0)] UNSPEC_VSTRUCTDUMMY)]
"TARGET_SIMD"
{
- enum machine_mode mode = <VSTRUCT:VSTRUCT_DREG>mode;
+ machine_mode mode = <VSTRUCT:VSTRUCT_DREG>mode;
rtx mem = gen_rtx_MEM (mode, operands[0]);
emit_insn (gen_aarch64_st<VSTRUCT:nregs><VDC:mode>_dreg (mem, operands[1]));
(unspec:VQ [(const_int 0)] UNSPEC_VSTRUCTDUMMY)]
"TARGET_SIMD"
{
- enum machine_mode mode = <VSTRUCT:MODE>mode;
+ machine_mode mode = <VSTRUCT:MODE>mode;
rtx mem = gen_rtx_MEM (mode, operands[0]);
emit_insn (gen_vec_store_lanes<VSTRUCT:mode><VQ:mode> (mem, operands[1]));
(match_operand:SI 2 "immediate_operand")]
"TARGET_SIMD"
{
- enum machine_mode mode = <V_TWO_ELEM>mode;
+ machine_mode mode = <V_TWO_ELEM>mode;
rtx mem = gen_rtx_MEM (mode, operands[0]);
operands[2] = GEN_INT (ENDIAN_LANE_N (<MODE>mode, INTVAL (operands[2])));
(match_operand:SI 2 "immediate_operand")]
"TARGET_SIMD"
{
- enum machine_mode mode = <V_THREE_ELEM>mode;
+ machine_mode mode = <V_THREE_ELEM>mode;
rtx mem = gen_rtx_MEM (mode, operands[0]);
operands[2] = GEN_INT (ENDIAN_LANE_N (<MODE>mode, INTVAL (operands[2])));
(match_operand:SI 2 "immediate_operand")]
"TARGET_SIMD"
{
- enum machine_mode mode = <V_FOUR_ELEM>mode;
+ machine_mode mode = <V_FOUR_ELEM>mode;
rtx mem = gen_rtx_MEM (mode, operands[0]);
operands[2] = GEN_INT (ENDIAN_LANE_N (<MODE>mode, INTVAL (operands[2])));
(match_operand:VALL 1 "register_operand")]
"TARGET_SIMD"
{
- enum machine_mode mode = <VALL:MODE>mode;
+ machine_mode mode = <VALL:MODE>mode;
rtx mem = gen_rtx_MEM (mode, operands[0]);
if (BYTES_BIG_ENDIAN)
#endif
static bool aarch64_lra_p (void);
-static bool aarch64_composite_type_p (const_tree, enum machine_mode);
-static bool aarch64_vfp_is_call_or_return_candidate (enum machine_mode,
+static bool aarch64_composite_type_p (const_tree, machine_mode);
+static bool aarch64_vfp_is_call_or_return_candidate (machine_mode,
const_tree,
- enum machine_mode *, int *,
+ machine_mode *, int *,
bool *);
static void aarch64_elf_asm_constructor (rtx, int) ATTRIBUTE_UNUSED;
static void aarch64_elf_asm_destructor (rtx, int) ATTRIBUTE_UNUSED;
static void aarch64_override_options_after_change (void);
-static bool aarch64_vector_mode_supported_p (enum machine_mode);
+static bool aarch64_vector_mode_supported_p (machine_mode);
static unsigned bit_count (unsigned HOST_WIDE_INT);
-static bool aarch64_vectorize_vec_perm_const_ok (enum machine_mode vmode,
+static bool aarch64_vectorize_vec_perm_const_ok (machine_mode vmode,
const unsigned char *sel);
-static int aarch64_address_cost (rtx, enum machine_mode, addr_space_t, bool);
+static int aarch64_address_cost (rtx, machine_mode, addr_space_t, bool);
/* The processor for which instructions should be scheduled. */
enum aarch64_processor aarch64_tune = cortexa53;
/* Used to track the size of an address when generating a pre/post
increment address. */
-static enum machine_mode aarch64_memory_reference_mode;
+static machine_mode aarch64_memory_reference_mode;
/* Used to force GTY into this file. */
static GTY(()) int gty_dummy;
/* Return TRUE if MODE is any of the large INT modes. */
static bool
-aarch64_vect_struct_mode_p (enum machine_mode mode)
+aarch64_vect_struct_mode_p (machine_mode mode)
{
return mode == OImode || mode == CImode || mode == XImode;
}
/* Return TRUE if MODE is any of the vector modes. */
static bool
-aarch64_vector_mode_p (enum machine_mode mode)
+aarch64_vector_mode_p (machine_mode mode)
{
return aarch64_vector_mode_supported_p (mode)
|| aarch64_vect_struct_mode_p (mode);
/* Implement target hook TARGET_ARRAY_MODE_SUPPORTED_P. */
static bool
-aarch64_array_mode_supported_p (enum machine_mode mode,
+aarch64_array_mode_supported_p (machine_mode mode,
unsigned HOST_WIDE_INT nelems)
{
if (TARGET_SIMD
/* Implement HARD_REGNO_NREGS. */
int
-aarch64_hard_regno_nregs (unsigned regno, enum machine_mode mode)
+aarch64_hard_regno_nregs (unsigned regno, machine_mode mode)
{
switch (aarch64_regno_regclass (regno))
{
/* Implement HARD_REGNO_MODE_OK. */
int
-aarch64_hard_regno_mode_ok (unsigned regno, enum machine_mode mode)
+aarch64_hard_regno_mode_ok (unsigned regno, machine_mode mode)
{
if (GET_MODE_CLASS (mode) == MODE_CC)
return regno == CC_REGNUM;
}
/* Implement HARD_REGNO_CALLER_SAVE_MODE. */
-enum machine_mode
+machine_mode
aarch64_hard_regno_caller_save_mode (unsigned regno, unsigned nregs,
- enum machine_mode mode)
+ machine_mode mode)
{
/* Handle modes that fit within single registers. */
if (nregs == 1 && GET_MODE_SIZE (mode) <= 16)
(extract:MODE (mult (reg) (MULT_IMM)) (EXTRACT_IMM) (const_int 0)). */
bool
-aarch64_is_extend_from_extract (enum machine_mode mode, rtx mult_imm,
+aarch64_is_extend_from_extract (machine_mode mode, rtx mult_imm,
rtx extract_imm)
{
HOST_WIDE_INT mult_val, extract_val;
rtx
aarch64_gen_compare_reg (RTX_CODE code, rtx x, rtx y)
{
- enum machine_mode mode = SELECT_CC_MODE (code, x, y);
+ machine_mode mode = SELECT_CC_MODE (code, x, y);
rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
emit_set_insn (cc_reg, gen_rtx_COMPARE (mode, x, y));
{
/* In ILP32, the mode of dest can be either SImode or DImode. */
rtx tmp_reg = dest;
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
gcc_assert (mode == Pmode || mode == ptr_mode);
This is why we have to handle three different ldr_got_small
patterns here (two patterns for ILP32). */
rtx tmp_reg = dest;
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
if (can_create_pseudo_p ())
tmp_reg = gen_reg_rtx (mode);
case SYMBOL_SMALL_TLSDESC:
{
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
rtx x0 = gen_rtx_REG (mode, R0_REGNUM);
rtx tp;
DImode if dest is dereferenced to access the memeory.
This is why we have to handle three different tlsie_small
patterns here (two patterns for ILP32). */
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
rtx tmp_reg = gen_reg_rtx (mode);
rtx tp = aarch64_load_tp (NULL);
rtx dst_lo, dst_hi;
rtx src_lo, src_hi;
- enum machine_mode mode = GET_MODE (dst);
+ machine_mode mode = GET_MODE (dst);
gcc_assert (mode == TImode || mode == TFmode);
gcc_assert (!(side_effects_p (src) || side_effects_p (dst)));
void
aarch64_split_simd_combine (rtx dst, rtx src1, rtx src2)
{
- enum machine_mode src_mode = GET_MODE (src1);
- enum machine_mode dst_mode = GET_MODE (dst);
+ machine_mode src_mode = GET_MODE (src1);
+ machine_mode dst_mode = GET_MODE (dst);
gcc_assert (VECTOR_MODE_P (dst_mode));
void
aarch64_split_simd_move (rtx dst, rtx src)
{
- enum machine_mode src_mode = GET_MODE (src);
- enum machine_mode dst_mode = GET_MODE (dst);
+ machine_mode src_mode = GET_MODE (src);
+ machine_mode dst_mode = GET_MODE (dst);
gcc_assert (VECTOR_MODE_P (dst_mode));
}
static rtx
-aarch64_force_temporary (enum machine_mode mode, rtx x, rtx value)
+aarch64_force_temporary (machine_mode mode, rtx x, rtx value)
{
if (can_create_pseudo_p ())
return force_reg (mode, value);
static rtx
-aarch64_add_offset (enum machine_mode mode, rtx temp, rtx reg, HOST_WIDE_INT offset)
+aarch64_add_offset (machine_mode mode, rtx temp, rtx reg, HOST_WIDE_INT offset)
{
if (!aarch64_plus_immediate (GEN_INT (offset), mode))
{
void
aarch64_expand_mov_immediate (rtx dest, rtx imm)
{
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
unsigned HOST_WIDE_INT mask;
int i;
bool first;
static bool
aarch64_pass_by_reference (cumulative_args_t pcum ATTRIBUTE_UNUSED,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type,
bool named ATTRIBUTE_UNUSED)
{
HOST_WIDE_INT size;
- enum machine_mode dummymode;
+ machine_mode dummymode;
int nregs;
/* GET_MODE_SIZE (BLKmode) is useless since it is 0. */
static bool
aarch64_return_in_msb (const_tree valtype)
{
- enum machine_mode dummy_mode;
+ machine_mode dummy_mode;
int dummy_int;
/* Never happens in little-endian mode. */
aarch64_function_value (const_tree type, const_tree func,
bool outgoing ATTRIBUTE_UNUSED)
{
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp;
int count;
- enum machine_mode ag_mode;
+ machine_mode ag_mode;
mode = TYPE_MODE (type);
if (INTEGRAL_TYPE_P (type))
aarch64_return_in_memory (const_tree type, const_tree fndecl ATTRIBUTE_UNUSED)
{
HOST_WIDE_INT size;
- enum machine_mode ag_mode;
+ machine_mode ag_mode;
int count;
if (!AGGREGATE_TYPE_P (type)
}
static bool
-aarch64_vfp_is_call_candidate (cumulative_args_t pcum_v, enum machine_mode mode,
+aarch64_vfp_is_call_candidate (cumulative_args_t pcum_v, machine_mode mode,
const_tree type, int *nregs)
{
CUMULATIVE_ARGS *pcum = get_cumulative_args (pcum_v);
This is a helper function for local use only. */
static unsigned int
-aarch64_function_arg_alignment (enum machine_mode mode, const_tree type)
+aarch64_function_arg_alignment (machine_mode mode, const_tree type)
{
unsigned int alignment;
numbers refer to the rule numbers in the AAPCS64. */
static void
-aarch64_layout_arg (cumulative_args_t pcum_v, enum machine_mode mode,
+aarch64_layout_arg (cumulative_args_t pcum_v, machine_mode mode,
const_tree type,
bool named ATTRIBUTE_UNUSED)
{
/* Implement TARGET_FUNCTION_ARG. */
static rtx
-aarch64_function_arg (cumulative_args_t pcum_v, enum machine_mode mode,
+aarch64_function_arg (cumulative_args_t pcum_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *pcum = get_cumulative_args (pcum_v);
static void
aarch64_function_arg_advance (cumulative_args_t pcum_v,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type,
bool named)
{
8 bytes. */
static unsigned int
-aarch64_function_arg_boundary (enum machine_mode mode, const_tree type)
+aarch64_function_arg_boundary (machine_mode mode, const_tree type)
{
unsigned int alignment = aarch64_function_arg_alignment (mode, type);
The related parameter passing rules are B.4, C.3, C.5 and C.14. */
bool
-aarch64_pad_arg_upward (enum machine_mode mode, const_tree type)
+aarch64_pad_arg_upward (machine_mode mode, const_tree type)
{
/* On little-endian targets, the least significant byte of every stack
argument is passed at the lowest byte address of the stack slot. */
significant byte does. */
bool
-aarch64_pad_reg_upward (enum machine_mode mode, const_tree type,
+aarch64_pad_reg_upward (machine_mode mode, const_tree type,
bool first ATTRIBUTE_UNUSED)
{
return !BYTES_BIG_ENDIAN;
}
-static enum machine_mode
+static machine_mode
aarch64_libgcc_cmp_return_mode (void)
{
return SImode;
}
static void
-aarch64_pushwb_single_reg (enum machine_mode mode, unsigned regno,
+aarch64_pushwb_single_reg (machine_mode mode, unsigned regno,
HOST_WIDE_INT adjustment)
{
rtx base_rtx = stack_pointer_rtx;
}
static rtx
-aarch64_gen_storewb_pair (enum machine_mode mode, rtx base, rtx reg, rtx reg2,
+aarch64_gen_storewb_pair (machine_mode mode, rtx base, rtx reg, rtx reg2,
HOST_WIDE_INT adjustment)
{
switch (mode)
}
static void
-aarch64_pushwb_pair_reg (enum machine_mode mode, unsigned regno1,
+aarch64_pushwb_pair_reg (machine_mode mode, unsigned regno1,
unsigned regno2, HOST_WIDE_INT adjustment)
{
rtx_insn *insn;
}
static rtx
-aarch64_gen_loadwb_pair (enum machine_mode mode, rtx base, rtx reg, rtx reg2,
+aarch64_gen_loadwb_pair (machine_mode mode, rtx base, rtx reg, rtx reg2,
HOST_WIDE_INT adjustment)
{
switch (mode)
}
static rtx
-aarch64_gen_store_pair (enum machine_mode mode, rtx mem1, rtx reg1, rtx mem2,
+aarch64_gen_store_pair (machine_mode mode, rtx mem1, rtx reg1, rtx mem2,
rtx reg2)
{
switch (mode)
}
static rtx
-aarch64_gen_load_pair (enum machine_mode mode, rtx reg1, rtx mem1, rtx reg2,
+aarch64_gen_load_pair (machine_mode mode, rtx reg1, rtx mem1, rtx reg2,
rtx mem2)
{
switch (mode)
static void
-aarch64_save_callee_saves (enum machine_mode mode, HOST_WIDE_INT start_offset,
+aarch64_save_callee_saves (machine_mode mode, HOST_WIDE_INT start_offset,
unsigned start, unsigned limit, bool skip_wb)
{
rtx_insn *insn;
- rtx (*gen_mem_ref) (enum machine_mode, rtx) = (frame_pointer_needed
+ rtx (*gen_mem_ref) (machine_mode, rtx) = (frame_pointer_needed
? gen_frame_mem : gen_rtx_MEM);
unsigned regno;
unsigned regno2;
}
static void
-aarch64_restore_callee_saves (enum machine_mode mode,
+aarch64_restore_callee_saves (machine_mode mode,
HOST_WIDE_INT start_offset, unsigned start,
unsigned limit, bool skip_wb, rtx *cfi_ops)
{
rtx base_rtx = stack_pointer_rtx;
- rtx (*gen_mem_ref) (enum machine_mode, rtx) = (frame_pointer_needed
+ rtx (*gen_mem_ref) (machine_mode, rtx) = (frame_pointer_needed
? gen_frame_mem : gen_rtx_MEM);
unsigned regno;
unsigned regno2;
}
else
{
- enum machine_mode mode1 = (reg1 <= R30_REGNUM) ? DImode : DFmode;
+ machine_mode mode1 = (reg1 <= R30_REGNUM) ? DImode : DFmode;
skip_wb = true;
if (skip_wb)
{
- enum machine_mode mode1 = (reg1 <= R30_REGNUM) ? DImode : DFmode;
+ machine_mode mode1 = (reg1 <= R30_REGNUM) ? DImode : DFmode;
rtx rreg1 = gen_rtx_REG (mode1, reg1);
cfi_ops = alloc_reg_note (REG_CFA_RESTORE, rreg1, cfi_ops);
/* Return true if val is an immediate that can be loaded into a
register by a MOVZ instruction. */
static bool
-aarch64_movw_imm (HOST_WIDE_INT val, enum machine_mode mode)
+aarch64_movw_imm (HOST_WIDE_INT val, machine_mode mode)
{
if (GET_MODE_SIZE (mode) > 4)
{
/* Return true if val is a valid bitmask immediate. */
bool
-aarch64_bitmask_imm (HOST_WIDE_INT val, enum machine_mode mode)
+aarch64_bitmask_imm (HOST_WIDE_INT val, machine_mode mode)
{
if (GET_MODE_SIZE (mode) < 8)
{
/* Return true if val is an immediate that can be loaded into a
register in a single instruction. */
bool
-aarch64_move_imm (HOST_WIDE_INT val, enum machine_mode mode)
+aarch64_move_imm (HOST_WIDE_INT val, machine_mode mode)
{
if (aarch64_movw_imm (val, mode) || aarch64_movw_imm (~val, mode))
return 1;
}
static bool
-aarch64_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+aarch64_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
rtx base, offset;
static bool
aarch64_classify_index (struct aarch64_address_info *info, rtx x,
- enum machine_mode mode, bool strict_p)
+ machine_mode mode, bool strict_p)
{
enum aarch64_address_type type;
rtx index;
}
bool
-aarch64_offset_7bit_signed_scaled_p (enum machine_mode mode, HOST_WIDE_INT offset)
+aarch64_offset_7bit_signed_scaled_p (machine_mode mode, HOST_WIDE_INT offset)
{
return (offset >= -64 * GET_MODE_SIZE (mode)
&& offset < 64 * GET_MODE_SIZE (mode)
}
static inline bool
-offset_9bit_signed_unscaled_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+offset_9bit_signed_unscaled_p (machine_mode mode ATTRIBUTE_UNUSED,
HOST_WIDE_INT offset)
{
return offset >= -256 && offset < 256;
}
static inline bool
-offset_12bit_unsigned_scaled_p (enum machine_mode mode, HOST_WIDE_INT offset)
+offset_12bit_unsigned_scaled_p (machine_mode mode, HOST_WIDE_INT offset)
{
return (offset >= 0
&& offset < 4096 * GET_MODE_SIZE (mode)
static bool
aarch64_classify_address (struct aarch64_address_info *info,
- rtx x, enum machine_mode mode,
+ rtx x, machine_mode mode,
RTX_CODE outer_code, bool strict_p)
{
enum rtx_code code = GET_CODE (x);
/* Return TRUE if X is a legitimate address for accessing memory in
mode MODE. */
static bool
-aarch64_legitimate_address_hook_p (enum machine_mode mode, rtx x, bool strict_p)
+aarch64_legitimate_address_hook_p (machine_mode mode, rtx x, bool strict_p)
{
struct aarch64_address_info addr;
mode MODE. OUTER_CODE will be PARALLEL if this is a load/store
pair operation. */
bool
-aarch64_legitimate_address_p (enum machine_mode mode, rtx x,
+aarch64_legitimate_address_p (machine_mode mode, rtx x,
RTX_CODE outer_code, bool strict_p)
{
struct aarch64_address_info addr;
clobber_reg (fusage, gen_rtx_REG (word_mode, IP1_REGNUM));
}
-enum machine_mode
+machine_mode
aarch64_select_cc_mode (RTX_CODE code, rtx x, rtx y)
{
/* All floating point compares return CCFP if it is an equality
int
aarch64_get_condition_code (rtx x)
{
- enum machine_mode mode = GET_MODE (XEXP (x, 0));
+ machine_mode mode = GET_MODE (XEXP (x, 0));
enum rtx_code comp_code = GET_CODE (x);
if (GET_MODE_CLASS (mode) != MODE_CC)
}
static rtx
-aarch64_legitimize_address (rtx x, rtx /* orig_x */, enum machine_mode mode)
+aarch64_legitimize_address (rtx x, rtx /* orig_x */, machine_mode mode)
{
/* Try to split X+CONST into Y=X+(CONST & ~mask), Y+(CONST&mask),
where mask is selected by alignment and size of the offset.
rtx
aarch64_legitimize_reload_address (rtx *x_p,
- enum machine_mode mode,
+ machine_mode mode,
int opnum, int type,
int ind_levels ATTRIBUTE_UNUSED)
{
HOST_WIDE_INT high = val - low;
HOST_WIDE_INT offs;
rtx cst;
- enum machine_mode xmode = GET_MODE (x);
+ machine_mode xmode = GET_MODE (x);
/* In ILP32, xmode can be either DImode or SImode. */
gcc_assert (xmode == DImode || xmode == SImode);
static reg_class_t
aarch64_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x,
reg_class_t rclass,
- enum machine_mode mode,
+ machine_mode mode,
secondary_reload_info *sri)
{
/* Without the TARGET_SIMD instructions we cannot move a Q register
}
static unsigned char
-aarch64_class_max_nregs (reg_class_t regclass, enum machine_mode mode)
+aarch64_class_max_nregs (reg_class_t regclass, machine_mode mode)
{
switch (regclass)
{
}
static bool
-aarch64_use_blocks_for_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+aarch64_use_blocks_for_constant_p (machine_mode mode ATTRIBUTE_UNUSED,
const_rtx x ATTRIBUTE_UNUSED)
{
/* We can't use blocks for constants when we're using a per-function
}
static section *
-aarch64_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
+aarch64_select_rtx_section (machine_mode mode ATTRIBUTE_UNUSED,
rtx x ATTRIBUTE_UNUSED,
unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
{
= aarch64_tune_params->insn_extra_cost;
int cost = 0;
bool maybe_fma = (outer == PLUS || outer == MINUS);
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
gcc_checking_assert (code == MULT);
static int
aarch64_address_cost (rtx x,
- enum machine_mode mode,
+ machine_mode mode,
addr_space_t as ATTRIBUTE_UNUSED,
bool speed)
{
/* Return true if the RTX X in mode MODE is a zero or sign extract
usable in an ADD or SUB (extended register) instruction. */
static bool
-aarch64_rtx_arith_op_extract_p (rtx x, enum machine_mode mode)
+aarch64_rtx_arith_op_extract_p (rtx x, machine_mode mode)
{
/* Catch add with a sign extract.
This is add_<optab><mode>_multp2. */
rtx op0, op1, op2;
const struct cpu_cost_table *extra_cost
= aarch64_tune_params->insn_extra_cost;
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
/* By default, assume that everything has equivalent cost to the
cheapest instruction. Any additional costs are applied as a delta
}
static int
-aarch64_register_move_cost (enum machine_mode mode,
+aarch64_register_move_cost (machine_mode mode,
reg_class_t from_i, reg_class_t to_i)
{
enum reg_class from = (enum reg_class) from_i;
}
static int
-aarch64_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+aarch64_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t rclass ATTRIBUTE_UNUSED,
bool in ATTRIBUTE_UNUSED)
{
/* Return true if X holds either a quarter-precision or
floating-point +0.0 constant. */
static bool
-aarch64_valid_floating_const (enum machine_mode mode, rtx x)
+aarch64_valid_floating_const (machine_mode mode, rtx x)
{
if (!CONST_DOUBLE_P (x))
return false;
}
static bool
-aarch64_legitimate_constant_p (enum machine_mode mode, rtx x)
+aarch64_legitimate_constant_p (machine_mode mode, rtx x)
{
/* Do not allow vector struct mode constants. We could support
0 and -1 easily, but they need support in aarch64-simd.md. */
bool indirect_p;
bool is_ha; /* is HFA or HVA. */
bool dw_align; /* double-word align. */
- enum machine_mode ag_mode = VOIDmode;
+ machine_mode ag_mode = VOIDmode;
int nregs;
- enum machine_mode mode;
+ machine_mode mode;
tree f_stack, f_grtop, f_vrtop, f_groff, f_vroff;
tree stack, f_top, f_off, off, arg, roundup, on_stack;
/* Implement TARGET_SETUP_INCOMING_VARARGS. */
static void
-aarch64_setup_incoming_varargs (cumulative_args_t cum_v, enum machine_mode mode,
+aarch64_setup_incoming_varargs (cumulative_args_t cum_v, machine_mode mode,
tree type, int *pretend_size ATTRIBUTE_UNUSED,
int no_rtl)
{
{
/* We can't use move_block_from_reg, because it will use
the wrong mode, storing D regs only. */
- enum machine_mode mode = TImode;
+ machine_mode mode = TImode;
int off, i;
/* Set OFF to the offset from virtual_incoming_args_rtx of
type that doesn't match a non-VOIDmode *MODEP is found, then return -1,
otherwise return the count in the sub-tree. */
static int
-aapcs_vfp_sub_candidate (const_tree type, enum machine_mode *modep)
+aapcs_vfp_sub_candidate (const_tree type, machine_mode *modep)
{
- enum machine_mode mode;
+ machine_mode mode;
HOST_WIDE_INT size;
switch (TREE_CODE (type))
static bool
aarch64_composite_type_p (const_tree type,
- enum machine_mode mode)
+ machine_mode mode)
{
if (type && (AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE))
return true;
static bool
aarch64_short_vector_p (const_tree type,
- enum machine_mode mode)
+ machine_mode mode)
{
HOST_WIDE_INT size = -1;
floating-point aggregate or a homogeneous short-vector aggregate. */
static bool
-aarch64_vfp_is_call_or_return_candidate (enum machine_mode mode,
+aarch64_vfp_is_call_or_return_candidate (machine_mode mode,
const_tree type,
- enum machine_mode *base_mode,
+ machine_mode *base_mode,
int *count,
bool *is_ha)
{
- enum machine_mode new_mode = VOIDmode;
+ machine_mode new_mode = VOIDmode;
bool composite_p = aarch64_composite_type_p (type, mode);
if (is_ha != NULL) *is_ha = false;
/* Implements target hook vector_mode_supported_p. */
static bool
-aarch64_vector_mode_supported_p (enum machine_mode mode)
+aarch64_vector_mode_supported_p (machine_mode mode)
{
if (TARGET_SIMD
&& (mode == V4SImode || mode == V8HImode
/* Return appropriate SIMD container
for MODE within a vector of WIDTH bits. */
-static enum machine_mode
-aarch64_simd_container_mode (enum machine_mode mode, unsigned width)
+static machine_mode
+aarch64_simd_container_mode (machine_mode mode, unsigned width)
{
gcc_assert (width == 64 || width == 128);
if (TARGET_SIMD)
}
/* Return 128-bit container as the preferred SIMD mode for MODE. */
-static enum machine_mode
-aarch64_preferred_simd_mode (enum machine_mode mode)
+static machine_mode
+aarch64_preferred_simd_mode (machine_mode mode)
{
return aarch64_simd_container_mode (mode, 128);
}
mangled names. */
typedef struct
{
- enum machine_mode mode;
+ machine_mode mode;
const char *element_type_name;
const char *mangled_name;
} aarch64_simd_mangle_map_entry;
/* Return true for valid and false for invalid. */
bool
-aarch64_simd_valid_immediate (rtx op, enum machine_mode mode, bool inverse,
+aarch64_simd_valid_immediate (rtx op, machine_mode mode, bool inverse,
struct simd_immediate_info *info)
{
#define CHECK(STRIDE, ELSIZE, CLASS, TEST, SHIFT, NEG) \
/* Check of immediate shift constants are within range. */
bool
-aarch64_simd_shift_imm_p (rtx x, enum machine_mode mode, bool left)
+aarch64_simd_shift_imm_p (rtx x, machine_mode mode, bool left)
{
int bit_width = GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT;
if (left)
are either the floating-point constant 0.0 or the
integer constant 0. */
bool
-aarch64_simd_imm_zero_p (rtx x, enum machine_mode mode)
+aarch64_simd_imm_zero_p (rtx x, machine_mode mode)
{
return x == CONST0_RTX (mode);
}
bool
-aarch64_simd_imm_scalar_p (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
+aarch64_simd_imm_scalar_p (rtx x, machine_mode mode ATTRIBUTE_UNUSED)
{
HOST_WIDE_INT imm = INTVAL (x);
int i;
bool
aarch64_mov_operand_p (rtx x,
enum aarch64_symbol_context context,
- enum machine_mode mode)
+ machine_mode mode)
{
if (GET_CODE (x) == HIGH
&& aarch64_valid_symref (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
/* Return a const_int vector of VAL. */
rtx
-aarch64_simd_gen_const_vector_dup (enum machine_mode mode, int val)
+aarch64_simd_gen_const_vector_dup (machine_mode mode, int val)
{
int nunits = GET_MODE_NUNITS (mode);
rtvec v = rtvec_alloc (nunits);
/* Check OP is a legal scalar immediate for the MOVI instruction. */
bool
-aarch64_simd_scalar_immediate_valid_for_move (rtx op, enum machine_mode mode)
+aarch64_simd_scalar_immediate_valid_for_move (rtx op, machine_mode mode)
{
- enum machine_mode vmode;
+ machine_mode vmode;
gcc_assert (!VECTOR_MODE_P (mode));
vmode = aarch64_preferred_simd_mode (mode);
*/
rtx
-aarch64_simd_vect_par_cnst_half (enum machine_mode mode, bool high)
+aarch64_simd_vect_par_cnst_half (machine_mode mode, bool high)
{
int nunits = GET_MODE_NUNITS (mode);
rtvec v = rtvec_alloc (nunits / 2);
aarch64_simd_vect_par_cnst_half for more details. */
bool
-aarch64_simd_check_vect_par_cnst_half (rtx op, enum machine_mode mode,
+aarch64_simd_check_vect_par_cnst_half (rtx op, machine_mode mode,
bool high)
{
rtx ideal = aarch64_simd_vect_par_cnst_half (mode, high);
/* Emit code to place a AdvSIMD pair result in memory locations (with equal
registers). */
void
-aarch64_simd_emit_pair_result_insn (enum machine_mode mode,
+aarch64_simd_emit_pair_result_insn (machine_mode mode,
rtx (*intfn) (rtx, rtx, rtx), rtx destaddr,
rtx op1)
{
int
aarch64_simd_attr_length_move (rtx_insn *insn)
{
- enum machine_mode mode;
+ machine_mode mode;
extract_insn_cached (insn);
static rtx
aarch64_simd_dup_constant (rtx vals)
{
- enum machine_mode mode = GET_MODE (vals);
- enum machine_mode inner_mode = GET_MODE_INNER (mode);
+ machine_mode mode = GET_MODE (vals);
+ machine_mode inner_mode = GET_MODE_INNER (mode);
int n_elts = GET_MODE_NUNITS (mode);
bool all_same = true;
rtx x;
static rtx
aarch64_simd_make_constant (rtx vals)
{
- enum machine_mode mode = GET_MODE (vals);
+ machine_mode mode = GET_MODE (vals);
rtx const_dup;
rtx const_vec = NULL_RTX;
int n_elts = GET_MODE_NUNITS (mode);
void
aarch64_expand_vector_init (rtx target, rtx vals)
{
- enum machine_mode mode = GET_MODE (target);
- enum machine_mode inner_mode = GET_MODE_INNER (mode);
+ machine_mode mode = GET_MODE (target);
+ machine_mode inner_mode = GET_MODE_INNER (mode);
int n_elts = GET_MODE_NUNITS (mode);
int n_var = 0, one_var = -1;
bool all_same = true;
}
static unsigned HOST_WIDE_INT
-aarch64_shift_truncation_mask (enum machine_mode mode)
+aarch64_shift_truncation_mask (machine_mode mode)
{
return
(aarch64_vector_mode_supported_p (mode)
/* Emit load exclusive. */
static void
-aarch64_emit_load_exclusive (enum machine_mode mode, rtx rval,
+aarch64_emit_load_exclusive (machine_mode mode, rtx rval,
rtx mem, rtx model_rtx)
{
rtx (*gen) (rtx, rtx, rtx);
/* Emit store exclusive. */
static void
-aarch64_emit_store_exclusive (enum machine_mode mode, rtx bval,
+aarch64_emit_store_exclusive (machine_mode mode, rtx bval,
rtx rval, rtx mem, rtx model_rtx)
{
rtx (*gen) (rtx, rtx, rtx, rtx);
aarch64_expand_compare_and_swap (rtx operands[])
{
rtx bval, rval, mem, oldval, newval, is_weak, mod_s, mod_f, x;
- enum machine_mode mode, cmp_mode;
+ machine_mode mode, cmp_mode;
rtx (*gen) (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
bval = operands[0];
aarch64_split_compare_and_swap (rtx operands[])
{
rtx rval, mem, oldval, newval, scratch;
- enum machine_mode mode;
+ machine_mode mode;
bool is_weak;
rtx_code_label *label1, *label2;
rtx x, cond;
aarch64_split_atomic_op (enum rtx_code code, rtx old_out, rtx new_out, rtx mem,
rtx value, rtx model_rtx, rtx cond)
{
- enum machine_mode mode = GET_MODE (mem);
- enum machine_mode wmode = (mode == DImode ? DImode : SImode);
+ machine_mode mode = GET_MODE (mem);
+ machine_mode wmode = (mode == DImode ? DImode : SImode);
rtx_code_label *label;
rtx x;
}
/* Target hook for c_mode_for_suffix. */
-static enum machine_mode
+static machine_mode
aarch64_c_mode_for_suffix (char suffix)
{
if (suffix == 'q')
char*
aarch64_output_simd_mov_immediate (rtx const_vector,
- enum machine_mode mode,
+ machine_mode mode,
unsigned width)
{
bool is_valid;
char*
aarch64_output_scalar_simd_mov_immediate (rtx immediate,
- enum machine_mode mode)
+ machine_mode mode)
{
- enum machine_mode vmode;
+ machine_mode vmode;
gcc_assert (!VECTOR_MODE_P (mode));
vmode = aarch64_simd_container_mode (mode, 64);
unsigned int dest = REGNO (operands[0]);
unsigned int src1 = REGNO (operands[1]);
unsigned int src2 = REGNO (operands[2]);
- enum machine_mode halfmode = GET_MODE (operands[1]);
+ machine_mode halfmode = GET_MODE (operands[1]);
unsigned int halfregs = HARD_REGNO_NREGS (src1, halfmode);
rtx destlo, desthi;
{
rtx target, op0, op1;
unsigned char perm[MAX_VECT_LEN];
- enum machine_mode vmode;
+ machine_mode vmode;
unsigned char nelt;
bool one_vector_p;
bool testing_p;
static void
aarch64_expand_vec_perm_1 (rtx target, rtx op0, rtx op1, rtx sel)
{
- enum machine_mode vmode = GET_MODE (target);
+ machine_mode vmode = GET_MODE (target);
bool one_vector_p = rtx_equal_p (op0, op1);
gcc_checking_assert (vmode == V8QImode || vmode == V16QImode);
void
aarch64_expand_vec_perm (rtx target, rtx op0, rtx op1, rtx sel)
{
- enum machine_mode vmode = GET_MODE (target);
+ machine_mode vmode = GET_MODE (target);
unsigned int nelt = GET_MODE_NUNITS (vmode);
bool one_vector_p = rtx_equal_p (op0, op1);
rtx mask;
unsigned int i, odd, mask, nelt = d->nelt;
rtx out, in0, in1, x;
rtx (*gen) (rtx, rtx, rtx);
- enum machine_mode vmode = d->vmode;
+ machine_mode vmode = d->vmode;
if (GET_MODE_UNIT_SIZE (vmode) > 8)
return false;
unsigned int i, odd, mask, nelt = d->nelt;
rtx out, in0, in1, x;
rtx (*gen) (rtx, rtx, rtx);
- enum machine_mode vmode = d->vmode;
+ machine_mode vmode = d->vmode;
if (GET_MODE_UNIT_SIZE (vmode) > 8)
return false;
unsigned int i, high, mask, nelt = d->nelt;
rtx out, in0, in1, x;
rtx (*gen) (rtx, rtx, rtx);
- enum machine_mode vmode = d->vmode;
+ machine_mode vmode = d->vmode;
if (GET_MODE_UNIT_SIZE (vmode) > 8)
return false;
rtx (*gen) (rtx, rtx, rtx);
rtx out = d->target;
rtx in0;
- enum machine_mode vmode = d->vmode;
+ machine_mode vmode = d->vmode;
unsigned int i, elt, nelt = d->nelt;
rtx lane;
aarch64_evpc_tbl (struct expand_vec_perm_d *d)
{
rtx rperm[MAX_VECT_LEN], sel;
- enum machine_mode vmode = d->vmode;
+ machine_mode vmode = d->vmode;
unsigned int i, nelt = d->nelt;
if (d->testing_p)
}
static bool
-aarch64_vectorize_vec_perm_const_ok (enum machine_mode vmode,
+aarch64_vectorize_vec_perm_const_ok (machine_mode vmode,
const unsigned char *sel)
{
struct expand_vec_perm_d d;
/* Implement target hook CANNOT_CHANGE_MODE_CLASS. */
bool
-aarch64_cannot_change_mode_class (enum machine_mode from,
- enum machine_mode to,
+aarch64_cannot_change_mode_class (machine_mode from,
+ machine_mode to,
enum reg_class rclass)
{
/* Full-reg subregs are allowed on general regs or any class if they are
/* Implement MODES_TIEABLE_P. */
bool
-aarch64_modes_tieable_p (enum machine_mode mode1, enum machine_mode mode2)
+aarch64_modes_tieable_p (machine_mode mode1, machine_mode mode2)
{
if (GET_MODE_CLASS (mode1) == GET_MODE_CLASS (mode2))
return true;
static void
aarch64_copy_one_block_and_progress_pointers (rtx *src, rtx *dst,
- enum machine_mode mode)
+ machine_mode mode)
{
rtx reg = gen_reg_rtx (mode);
-/* We can't use enum machine_mode inside a generator file because it
+/* We can't use machine_mode inside a generator file because it
hasn't been created yet; we shouldn't be using any code that
needs the real definition though, so this ought to be safe. */
#ifdef GENERATOR_FILE
#define MACHMODE int
#else
#include "insn-modes.h"
-#define MACHMODE enum machine_mode
+#define MACHMODE machine_mode
#endif
(match_operand 2 "aarch64_valid_symref" "S")))]
""
{
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
emit_insn ((mode == DImode
? gen_add_losym_di
UNSPEC_GOTSMALLTLS))]
""
{
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
emit_insn ((mode == DImode
? gen_tlsle_small_di
: gen_tlsle_small_si) (operands[0],
(match_operand 1 "memory_operand")]
""
{
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
emit_insn ((mode == DImode
? gen_stack_protect_set_di
""
{
rtx result;
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
result = gen_reg_rtx(mode);
extern void alpha_expand_epilogue (void);
extern void alpha_output_filename (FILE *, const char *);
-extern bool alpha_legitimate_constant_p (enum machine_mode, rtx);
-extern rtx alpha_legitimize_reload_address (rtx, enum machine_mode,
+extern bool alpha_legitimate_constant_p (machine_mode, rtx);
+extern rtx alpha_legitimize_reload_address (rtx, machine_mode,
int, int, int);
extern rtx split_small_symbolic_operand (rtx);
extern enum reg_class alpha_preferred_reload_class (rtx, enum reg_class);
extern void alpha_set_memflags (rtx, rtx);
-extern bool alpha_split_const_mov (enum machine_mode, rtx *);
-extern bool alpha_expand_mov (enum machine_mode, rtx *);
-extern bool alpha_expand_mov_nobwx (enum machine_mode, rtx *);
-extern void alpha_expand_movmisalign (enum machine_mode, rtx *);
+extern bool alpha_split_const_mov (machine_mode, rtx *);
+extern bool alpha_expand_mov (machine_mode, rtx *);
+extern bool alpha_expand_mov_nobwx (machine_mode, rtx *);
+extern void alpha_expand_movmisalign (machine_mode, rtx *);
extern void alpha_emit_floatuns (rtx[]);
-extern rtx alpha_emit_conditional_move (rtx, enum machine_mode);
-extern void alpha_split_tmode_pair (rtx[], enum machine_mode, bool);
+extern rtx alpha_emit_conditional_move (rtx, machine_mode);
+extern void alpha_split_tmode_pair (rtx[], machine_mode, bool);
extern void alpha_split_tfmode_frobsign (rtx[], rtx (*)(rtx, rtx, rtx));
extern void alpha_expand_unaligned_load (rtx, rtx, HOST_WIDE_INT,
HOST_WIDE_INT, int);
extern int alpha_expand_block_clear (rtx []);
extern rtx alpha_expand_zap_mask (HOST_WIDE_INT);
extern void alpha_expand_builtin_vector_binop (rtx (*)(rtx, rtx, rtx),
- enum machine_mode,
+ machine_mode,
rtx, rtx, rtx);
extern void alpha_expand_builtin_establish_vms_condition_handler (rtx, rtx);
extern void alpha_expand_builtin_revert_vms_condition_handler (rtx);
extern void alpha_initialize_trampoline (rtx, rtx, rtx, int, int, int);
extern rtx alpha_va_arg (tree, tree);
-extern rtx function_value (const_tree, const_tree, enum machine_mode);
+extern rtx function_value (const_tree, const_tree, machine_mode);
extern void alpha_start_function (FILE *, const char *, tree);
extern void alpha_end_function (FILE *, const char *, tree);
extern bool alpha_find_lo_sum_using_gp (rtx);
#ifdef REAL_VALUE_TYPE
-extern int check_float_value (enum machine_mode, REAL_VALUE_TYPE *, int);
+extern int check_float_value (machine_mode, REAL_VALUE_TYPE *, int);
#endif
#ifdef RTX_CODE
-extern void alpha_emit_conditional_branch (rtx[], enum machine_mode);
-extern bool alpha_emit_setcc (rtx[], enum machine_mode);
+extern void alpha_emit_conditional_branch (rtx[], machine_mode);
+extern bool alpha_emit_setcc (rtx[], machine_mode);
extern int alpha_split_conditional_move (enum rtx_code, rtx, rtx, rtx, rtx);
extern void alpha_emit_xfloating_arith (enum rtx_code, rtx[]);
extern void alpha_emit_xfloating_cvt (enum rtx_code, rtx[]);
extern rtx alpha_use_linkage (rtx, bool, bool);
#if TARGET_ABI_OPEN_VMS
-extern enum avms_arg_type alpha_arg_type (enum machine_mode);
+extern enum avms_arg_type alpha_arg_type (machine_mode);
extern rtx alpha_arg_info_reg_val (CUMULATIVE_ARGS);
extern void avms_asm_output_external (FILE *, tree, const char *);
extern void vms_output_aligned_decl_common (FILE *, tree, const char *,
#if TARGET_ABI_OPEN_VMS
static void alpha_write_linkage (FILE *, const char *);
-static bool vms_valid_pointer_mode (enum machine_mode);
+static bool vms_valid_pointer_mode (machine_mode);
#else
#define vms_patch_builtins() gcc_unreachable()
#endif
indicates only DFmode. */
static bool
-alpha_scalar_mode_supported_p (enum machine_mode mode)
+alpha_scalar_mode_supported_p (machine_mode mode)
{
switch (mode)
{
or when expand_vector_operations can do something useful. */
static bool
-alpha_vector_mode_supported_p (enum machine_mode mode)
+alpha_vector_mode_supported_p (machine_mode mode)
{
return mode == V8QImode || mode == V4HImode || mode == V2SImode;
}
#if TARGET_ABI_OPEN_VMS
static bool
-vms_valid_pointer_mode (enum machine_mode mode)
+vms_valid_pointer_mode (machine_mode mode)
{
return (mode == SImode || mode == DImode);
}
low-order three bits; this is an "unaligned" access. */
static bool
-alpha_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
+alpha_legitimate_address_p (machine_mode mode, rtx x, bool strict)
{
/* If this is an ldq_u type address, discard the outer AND. */
if (mode == DImode
to be legitimate. If we find one, return the new, valid address. */
static rtx
-alpha_legitimize_address_1 (rtx x, rtx scratch, enum machine_mode mode)
+alpha_legitimize_address_1 (rtx x, rtx scratch, machine_mode mode)
{
HOST_WIDE_INT addend;
static rtx
alpha_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
rtx new_x = alpha_legitimize_address_1 (x, NULL_RTX, mode);
return new_x ? new_x : x;
should never be spilling symbolic operands to the constant pool, ever. */
static bool
-alpha_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+alpha_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
enum rtx_code code = GET_CODE (x);
return code == SYMBOL_REF || code == LABEL_REF || code == CONST;
rtx
alpha_legitimize_reload_address (rtx x,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int opnum, int type,
int ind_levels ATTRIBUTE_UNUSED)
{
alpha_rtx_costs (rtx x, int code, int outer_code, int opno, int *total,
bool speed)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
bool float_mode_p = FLOAT_MODE_P (mode);
const struct alpha_rtx_cost_data *cost_data;
static reg_class_t
alpha_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
- enum machine_mode mode, secondary_reload_info *sri)
+ machine_mode mode, secondary_reload_info *sri)
{
enum reg_class rclass = (enum reg_class) rclass_i;
gcc_unreachable ();
}
\f
-static rtx alpha_emit_set_const (rtx, enum machine_mode, HOST_WIDE_INT,
+static rtx alpha_emit_set_const (rtx, machine_mode, HOST_WIDE_INT,
int, bool);
/* Internal routine for alpha_emit_set_const to check for N or below insns.
and return pc_rtx if successful. */
static rtx
-alpha_emit_set_const_1 (rtx target, enum machine_mode mode,
+alpha_emit_set_const_1 (rtx target, machine_mode mode,
HOST_WIDE_INT c, int n, bool no_output)
{
HOST_WIDE_INT new_const;
insns and emitted. */
static rtx
-alpha_emit_set_const (rtx target, enum machine_mode mode,
+alpha_emit_set_const (rtx target, machine_mode mode,
HOST_WIDE_INT c, int n, bool no_output)
{
- enum machine_mode orig_mode = mode;
+ machine_mode orig_mode = mode;
rtx orig_target = target;
rtx result = 0;
int i;
take three or fewer instructions, and floating-point zero. */
bool
-alpha_legitimate_constant_p (enum machine_mode mode, rtx x)
+alpha_legitimate_constant_p (machine_mode mode, rtx x)
{
HOST_WIDE_INT i0, i1;
instruction to load. Emit that multi-part load. */
bool
-alpha_split_const_mov (enum machine_mode mode, rtx *operands)
+alpha_split_const_mov (machine_mode mode, rtx *operands)
{
HOST_WIDE_INT i0, i1;
rtx temp = NULL_RTX;
We don't handle non-bwx subword loads here. */
bool
-alpha_expand_mov (enum machine_mode mode, rtx *operands)
+alpha_expand_mov (machine_mode mode, rtx *operands)
{
rtx tmp;
return true if all work is done. */
bool
-alpha_expand_mov_nobwx (enum machine_mode mode, rtx *operands)
+alpha_expand_mov_nobwx (machine_mode mode, rtx *operands)
{
rtx seq;
that is not naturally aligned. Emit instructions to load it. */
void
-alpha_expand_movmisalign (enum machine_mode mode, rtx *operands)
+alpha_expand_movmisalign (machine_mode mode, rtx *operands)
{
/* Honor misaligned loads, for those we promised to do so. */
if (MEM_P (operands[1]))
alpha_emit_floatuns (rtx operands[2])
{
rtx neglab, donelab, i0, i1, f0, in, out;
- enum machine_mode mode;
+ machine_mode mode;
out = operands[0];
in = force_reg (DImode, operands[1]);
/* Generate the comparison for a conditional branch. */
void
-alpha_emit_conditional_branch (rtx operands[], enum machine_mode cmp_mode)
+alpha_emit_conditional_branch (rtx operands[], machine_mode cmp_mode)
{
enum rtx_code cmp_code, branch_code;
- enum machine_mode branch_mode = VOIDmode;
+ machine_mode branch_mode = VOIDmode;
enum rtx_code code = GET_CODE (operands[0]);
rtx op0 = operands[1], op1 = operands[2];
rtx tem;
valid. Return the final comparison, or NULL if we can't work. */
bool
-alpha_emit_setcc (rtx operands[], enum machine_mode cmp_mode)
+alpha_emit_setcc (rtx operands[], machine_mode cmp_mode)
{
enum rtx_code cmp_code;
enum rtx_code code = GET_CODE (operands[1]);
the conditional move). */
rtx
-alpha_emit_conditional_move (rtx cmp, enum machine_mode mode)
+alpha_emit_conditional_move (rtx cmp, machine_mode mode)
{
enum rtx_code code = GET_CODE (cmp);
enum rtx_code cmov_code = NE;
rtx op0 = XEXP (cmp, 0);
rtx op1 = XEXP (cmp, 1);
- enum machine_mode cmp_mode
+ machine_mode cmp_mode
= (GET_MODE (op0) == VOIDmode ? DImode : GET_MODE (op0));
- enum machine_mode cmov_mode = VOIDmode;
+ machine_mode cmov_mode = VOIDmode;
int local_fast_math = flag_unsafe_math_optimizations;
rtx tem;
rtx t_rtx, rtx f_rtx)
{
HOST_WIDE_INT t, f, diff;
- enum machine_mode mode;
+ machine_mode mode;
rtx target, subtarget, tmp;
mode = GET_MODE (dest);
This is used by *movtf_internal and *movti_internal. */
void
-alpha_split_tmode_pair (rtx operands[4], enum machine_mode mode,
+alpha_split_tmode_pair (rtx operands[4], machine_mode mode,
bool fixup_overlap)
{
switch (GET_CODE (operands[1]))
HOST_WIDE_INT ofs, int sign)
{
rtx meml, memh, addr, extl, exth, tmp, mema;
- enum machine_mode mode;
+ machine_mode mode;
if (TARGET_BWX && size == 2)
{
&& align >= 32
&& !(alignofs == 4 && bytes >= 4))
{
- enum machine_mode mode = (align >= 64 ? DImode : SImode);
+ machine_mode mode = (align >= 64 ? DImode : SImode);
int inv_alignofs = (align >= 64 ? 8 : 4) - alignofs;
rtx mem, tmp;
HOST_WIDE_INT mask;
void
alpha_expand_builtin_vector_binop (rtx (*gen) (rtx, rtx, rtx),
- enum machine_mode mode,
+ machine_mode mode,
rtx op0, rtx op1, rtx op2)
{
op0 = gen_lowpart (mode, op0);
instruction in MODE. */
static void
-emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
+emit_load_locked (machine_mode mode, rtx reg, rtx mem)
{
rtx (*fn) (rtx, rtx) = NULL;
if (mode == SImode)
instruction in MODE. */
static void
-emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
+emit_store_conditional (machine_mode mode, rtx res, rtx mem, rtx val)
{
rtx (*fn) (rtx, rtx, rtx) = NULL;
if (mode == SImode)
instruction in MODE. */
static rtx
-emit_insxl (enum machine_mode mode, rtx op1, rtx op2)
+emit_insxl (machine_mode mode, rtx op1, rtx op2)
{
rtx ret = gen_reg_rtx (DImode);
rtx (*fn) (rtx, rtx, rtx);
alpha_split_atomic_op (enum rtx_code code, rtx mem, rtx val, rtx before,
rtx after, rtx scratch, enum memmodel model)
{
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
rtx label, x, cond = gen_rtx_REG (DImode, REGNO (scratch));
alpha_pre_atomic_barrier (model);
rtx cond, retval, mem, oldval, newval;
bool is_weak;
enum memmodel mod_s, mod_f;
- enum machine_mode mode;
+ machine_mode mode;
rtx label1, label2, x;
cond = operands[0];
alpha_expand_compare_and_swap_12 (rtx operands[])
{
rtx cond, dst, mem, oldval, newval, is_weak, mod_s, mod_f;
- enum machine_mode mode;
+ machine_mode mode;
rtx addr, align, wdst;
rtx (*gen) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
alpha_split_compare_and_swap_12 (rtx operands[])
{
rtx cond, dest, orig_mem, oldval, newval, align, scratch;
- enum machine_mode mode;
+ machine_mode mode;
bool is_weak;
enum memmodel mod_s, mod_f;
rtx label1, label2, mem, addr, width, mask, x;
{
rtx retval, mem, val, scratch;
enum memmodel model;
- enum machine_mode mode;
+ machine_mode mode;
rtx label, x, cond;
retval = operands[0];
alpha_expand_atomic_exchange_12 (rtx operands[])
{
rtx dst, mem, val, model;
- enum machine_mode mode;
+ machine_mode mode;
rtx addr, align, wdst;
rtx (*gen) (rtx, rtx, rtx, rtx, rtx);
{
rtx dest, orig_mem, addr, val, align, scratch;
rtx label, mem, width, mask, x;
- enum machine_mode mode;
+ machine_mode mode;
enum memmodel model;
dest = operands[0];
and the rest are pushed. */
static rtx
-alpha_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+alpha_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
(TYPE is null for libcalls where that information may not be available.) */
static void
-alpha_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+alpha_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
static int
alpha_arg_partial_bytes (cumulative_args_t cum_v,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
static bool
alpha_return_in_memory (const_tree type, const_tree fndecl ATTRIBUTE_UNUSED)
{
- enum machine_mode mode = VOIDmode;
+ machine_mode mode = VOIDmode;
int size;
if (type)
static bool
alpha_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
rtx
function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
unsigned int regnum, dummy ATTRIBUTE_UNUSED;
enum mode_class mclass;
case MODE_COMPLEX_FLOAT:
{
- enum machine_mode cmode = GET_MODE_INNER (mode);
+ machine_mode cmode = GET_MODE_INNER (mode);
return gen_rtx_PARALLEL
(VOIDmode,
variable number of arguments. */
static void
-alpha_setup_incoming_varargs (cumulative_args_t pcum, enum machine_mode mode,
+alpha_setup_incoming_varargs (cumulative_args_t pcum, machine_mode mode,
tree type, int *pretend_size, int no_rtl)
{
CUMULATIVE_ARGS cum = *get_cumulative_args (pcum);
static rtx
alpha_expand_builtin (tree exp, rtx target,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
#define MAX_ARGS 2
if (nonvoid)
{
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
if (!target
|| GET_MODE (target) != tmode
|| !(*insn_data[icode].operand[0].predicate) (target, tmode))
honor small data. */
static section *
-alpha_elf_select_rtx_section (enum machine_mode mode, rtx x,
+alpha_elf_select_rtx_section (machine_mode mode, rtx x,
unsigned HOST_WIDE_INT align)
{
if (TARGET_SMALL_DATA && GET_MODE_SIZE (mode) <= g_switch_value)
/* Return the VMS argument type corresponding to MODE. */
enum avms_arg_type
-alpha_arg_type (enum machine_mode mode)
+alpha_arg_type (machine_mode mode)
{
switch (mode)
{
#ifdef RTX_CODE
-extern enum machine_mode arc_select_cc_mode (enum rtx_code, rtx, rtx);
+extern machine_mode arc_select_cc_mode (enum rtx_code, rtx, rtx);
/* Define the function that build the compare insn for scc, bcc and mov*cc. */
-extern struct rtx_def *gen_compare_reg (rtx, enum machine_mode);
+extern struct rtx_def *gen_compare_reg (rtx, machine_mode);
/* Declarations for various fns used in the .md file. */
extern void arc_output_function_epilogue (FILE *, HOST_WIDE_INT, int);
extern const char *output_shift (rtx *);
-extern bool compact_sda_memory_operand (rtx op,enum machine_mode mode);
+extern bool compact_sda_memory_operand (rtx op,machine_mode mode);
extern bool arc_double_limm_p (rtx);
extern void arc_print_operand (FILE *, rtx, int);
extern void arc_print_operand_address (FILE *, rtx);
extern void arc_set_default_type_attributes(tree type);
extern const char *arc_output_libcall (const char *);
extern bool prepare_extend_operands (rtx *operands, enum rtx_code code,
- enum machine_mode omode);
+ machine_mode omode);
extern int arc_output_addsi (rtx *operands, bool, bool);
extern int arc_output_commutative_cond_exec (rtx *operands, bool);
extern bool arc_expand_movmem (rtx *operands);
-extern bool prepare_move_operands (rtx *operands, enum machine_mode mode);
+extern bool prepare_move_operands (rtx *operands, machine_mode mode);
extern void emit_shift (enum rtx_code, rtx, rtx, rtx);
#endif /* RTX_CODE */
extern rtx arc_return_addr_rtx (int , rtx);
extern bool check_if_valid_regno_const (rtx *, int);
extern bool check_if_valid_sleep_operand (rtx *, int);
-extern bool arc_legitimate_constant_p (enum machine_mode, rtx);
+extern bool arc_legitimate_constant_p (machine_mode, rtx);
extern bool arc_legitimate_pc_offset_p (rtx);
extern bool arc_legitimate_pic_addr_p (rtx);
-extern void emit_pic_move (rtx *, enum machine_mode);
+extern void emit_pic_move (rtx *, machine_mode);
extern bool arc_raw_symbolic_reference_mentioned_p (rtx, bool);
extern bool arc_legitimate_pic_operand_p (rtx);
extern bool arc_is_longcall_p (rtx);
extern bool arc_is_shortcall_p (rtx);
extern bool arc_profile_call (rtx callee);
extern bool valid_brcc_with_delay_p (rtx *);
-extern bool small_data_pattern (rtx , enum machine_mode);
+extern bool small_data_pattern (rtx , machine_mode);
extern rtx arc_rewrite_small_data (rtx);
extern bool arc_ccfsm_cond_exec_p (void);
struct secondary_reload_info;
-extern int arc_register_move_cost (enum machine_mode, enum reg_class,
+extern int arc_register_move_cost (machine_mode, enum reg_class,
enum reg_class);
extern rtx disi_highpart (rtx);
extern int arc_adjust_insn_length (rtx_insn *, int, bool);
extern bool arc_short_comparison_p (rtx, int);
extern bool arc_epilogue_uses (int regno);
/* insn-attrtab.c doesn't include reload.h, which declares regno_clobbered_p. */
-extern int regno_clobbered_p (unsigned int, rtx_insn *, enum machine_mode, int);
+extern int regno_clobbered_p (unsigned int, rtx_insn *, machine_mode, int);
extern int arc_return_slot_offset (void);
-extern bool arc_legitimize_reload_address (rtx *, enum machine_mode, int, int);
+extern bool arc_legitimize_reload_address (rtx *, machine_mode, int, int);
static void arc_internal_label (FILE *, const char *, unsigned long);
static void arc_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
tree);
-static int arc_address_cost (rtx, enum machine_mode, addr_space_t, bool);
+static int arc_address_cost (rtx, machine_mode, addr_space_t, bool);
static void arc_encode_section_info (tree decl, rtx rtl, int first);
static void arc_init_builtins (void);
-static rtx arc_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
+static rtx arc_expand_builtin (tree, rtx, rtx, machine_mode, int);
static int branch_dest (rtx);
static void arc_output_pic_addr_const (FILE *, rtx, int);
-void emit_pic_move (rtx *, enum machine_mode);
+void emit_pic_move (rtx *, machine_mode);
bool arc_legitimate_pic_operand_p (rtx);
static bool arc_function_ok_for_sibcall (tree, tree);
static rtx arc_function_value (const_tree, const_tree, bool);
static void arc_init_reg_tables (void);
static bool arc_return_in_memory (const_tree, const_tree);
static void arc_init_simd_builtins (void);
-static bool arc_vector_mode_supported_p (enum machine_mode);
+static bool arc_vector_mode_supported_p (machine_mode);
static bool arc_can_use_doloop_p (const widest_int &, const widest_int &,
unsigned int, bool);
/* Implements target hook vector_mode_supported_p. */
static bool
-arc_vector_mode_supported_p (enum machine_mode mode)
+arc_vector_mode_supported_p (machine_mode mode)
{
if (!TARGET_SIMD_SET)
return false;
const rtx_insn *followee);
static rtx frame_insn (rtx);
-static void arc_function_arg_advance (cumulative_args_t, enum machine_mode,
+static void arc_function_arg_advance (cumulative_args_t, machine_mode,
const_tree, bool);
-static rtx arc_legitimize_address_0 (rtx, rtx, enum machine_mode mode);
+static rtx arc_legitimize_address_0 (rtx, rtx, machine_mode mode);
static void arc_finalize_pic (void);
}
static reg_class_t
-arc_secondary_reload (bool in_p, rtx x, reg_class_t cl, enum machine_mode,
+arc_secondary_reload (bool in_p, rtx x, reg_class_t cl, machine_mode,
secondary_reload_info *)
{
if (cl == DOUBLE_REGS)
/* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
return the mode to be used for the comparison. */
-enum machine_mode
+machine_mode
arc_select_cc_mode (enum rtx_code op, rtx x, rtx y)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
rtx x1;
/* For an operation that sets the condition codes as a side-effect, the
for (i = 0; i < NUM_MACHINE_MODES; i++)
{
- enum machine_mode m = (enum machine_mode) i;
+ machine_mode m = (machine_mode) i;
switch (GET_MODE_CLASS (m))
{
return the rtx for the cc reg in the proper mode. */
rtx
-gen_compare_reg (rtx comparison, enum machine_mode omode)
+gen_compare_reg (rtx comparison, machine_mode omode)
{
enum rtx_code code = GET_CODE (comparison);
rtx x = XEXP (comparison, 0);
rtx y = XEXP (comparison, 1);
rtx tmp, cc_reg;
- enum machine_mode mode, cmode;
+ machine_mode mode, cmode;
cmode = GET_MODE (x);
static void
arc_setup_incoming_varargs (cumulative_args_t args_so_far,
- enum machine_mode mode, tree type,
+ machine_mode mode, tree type,
int *pretend_size, int no_rtl)
{
int first_anon_arg;
If ADDR is not a valid address, its cost is irrelevant. */
int
-arc_address_cost (rtx addr, enum machine_mode, addr_space_t, bool speed)
+arc_address_cost (rtx addr, machine_mode, addr_space_t, bool speed)
{
switch (GET_CODE (addr))
{
{
/* static int loopend_lab;*/
rtx shift = operands[3];
- enum machine_mode mode = GET_MODE (shift);
+ machine_mode mode = GET_MODE (shift);
enum rtx_code code = GET_CODE (shift);
const char *shift_one;
/* Emit insns to move operands[1] into operands[0]. */
void
-emit_pic_move (rtx *operands, enum machine_mode)
+emit_pic_move (rtx *operands, machine_mode)
{
rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
/* Implement TARGET_ARG_PARTIAL_BYTES. */
static int
-arc_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
+arc_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
and the rest are pushed. */
static rtx
-arc_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+arc_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
course function_arg_partial_nregs will come into play. */
static void
-arc_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+arc_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
bool outgoing ATTRIBUTE_UNUSED)
{
- enum machine_mode mode = TYPE_MODE (valtype);
+ machine_mode mode = TYPE_MODE (valtype);
int unsignedp ATTRIBUTE_UNUSED;
unsignedp = TYPE_UNSIGNED (valtype);
satisfies CONSTANT_P. */
bool
-arc_legitimate_constant_p (enum machine_mode, rtx x)
+arc_legitimate_constant_p (machine_mode, rtx x)
{
if (!flag_pic)
return true;
}
static bool
-arc_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
+arc_legitimate_address_p (machine_mode mode, rtx x, bool strict)
{
if (RTX_OK_FOR_BASE_P (x, strict))
return true;
/* Determine if it's legal to put X into the constant pool. */
static bool
-arc_cannot_force_const_mem (enum machine_mode mode, rtx x)
+arc_cannot_force_const_mem (machine_mode mode, rtx x)
{
return !arc_legitimate_constant_p (mode, x);
}
arc_init_simd_builtins ();
}
-static rtx arc_expand_simd_builtin (tree, rtx, rtx, enum machine_mode, int);
+static rtx arc_expand_simd_builtin (tree, rtx, rtx, machine_mode, int);
/* Expand an expression EXP that calls a built-in function,
with result going to TARGET if that's convenient
arc_expand_builtin (tree exp,
rtx target,
rtx subtarget,
- enum machine_mode mode,
+ machine_mode mode,
int ignore)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
rtx op1;
int fcode = DECL_FUNCTION_CODE (fndecl);
int icode;
- enum machine_mode mode0;
- enum machine_mode mode1;
+ machine_mode mode0;
+ machine_mode mode1;
if (fcode > ARC_SIMD_BUILTIN_BEGIN && fcode < ARC_SIMD_BUILTIN_END)
return arc_expand_simd_builtin (exp, target, subtarget, mode, ignore);
static bool
arc_pass_by_reference (cumulative_args_t ca_v ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type,
bool named ATTRIBUTE_UNUSED)
{
a PLUS. */
bool
-small_data_pattern (rtx op, enum machine_mode)
+small_data_pattern (rtx op, machine_mode)
{
return (GET_CODE (op) != SEQUENCE
&& for_each_rtx (&op, small_data_pattern_1, 0));
/* volatile cache option still to be handled. */
bool
-compact_sda_memory_operand (rtx op, enum machine_mode mode)
+compact_sda_memory_operand (rtx op, machine_mode mode)
{
rtx addr;
int size;
arc_expand_simd_builtin (tree exp,
rtx target,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
unsigned int i;
int fcode = DECL_FUNCTION_CODE (fndecl);
int icode;
- enum machine_mode mode0;
- enum machine_mode mode1;
- enum machine_mode mode2;
- enum machine_mode mode3;
- enum machine_mode mode4;
+ machine_mode mode0;
+ machine_mode mode1;
+ machine_mode mode2;
+ machine_mode mode3;
+ machine_mode mode4;
const struct builtin_description * d;
for (i = 0, d = arc_simd_builtin_desc_list;
}
int
-arc_register_move_cost (enum machine_mode,
+arc_register_move_cost (machine_mode,
enum reg_class from_class, enum reg_class to_class)
{
/* The ARC600 has no bypass for extension registers, hence a nop might be
for (i = 0; size > 0; i ^= 1, size -= piece)
{
rtx tmp;
- enum machine_mode mode;
+ machine_mode mode;
if (piece > size)
piece = size & -size;
been emitted. */
bool
-prepare_move_operands (rtx *operands, enum machine_mode mode)
+prepare_move_operands (rtx *operands, machine_mode mode)
{
/* We used to do this only for MODE_INT Modes, but addresses to floating
point variables may well be in the small data section. */
bool
prepare_extend_operands (rtx *operands, enum rtx_code code,
- enum machine_mode omode)
+ machine_mode omode)
{
if (!TARGET_NO_SDATA_SET && small_data_pattern (operands[1], Pmode))
{
gcc_assert (ARC_INVERSE_CONDITION_CODE (raw_cc) == statep->cc);
- enum machine_mode ccm = GET_MODE (XEXP (cond, 0));
+ machine_mode ccm = GET_MODE (XEXP (cond, 0));
enum rtx_code code = reverse_condition (GET_CODE (cond));
if (code == UNKNOWN || ccm == CC_FP_GTmode || ccm == CC_FP_GEmode)
code = reverse_condition_maybe_unordered (GET_CODE (cond));
gcc_unreachable ();
if (reverse != !INSN_FROM_TARGET_P (dlay))
{
- enum machine_mode ccm = GET_MODE (XEXP (cond, 0));
+ machine_mode ccm = GET_MODE (XEXP (cond, 0));
enum rtx_code code = reverse_condition (GET_CODE (cond));
if (code == UNKNOWN || ccm == CC_FP_GTmode || ccm == CC_FP_GEmode)
code = reverse_condition_maybe_unordered (GET_CODE (cond));
static rtx
arc_legitimize_address_0 (rtx x, rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
rtx addr, inner;
}
else if (GET_CODE (addr) == SYMBOL_REF && !SYMBOL_REF_FUNCTION_P (addr))
x = force_reg (Pmode, x);
- if (memory_address_p ((enum machine_mode) mode, x))
+ if (memory_address_p ((machine_mode) mode, x))
return x;
return NULL_RTX;
}
static rtx
-arc_legitimize_address (rtx orig_x, rtx oldx, enum machine_mode mode)
+arc_legitimize_address (rtx orig_x, rtx oldx, machine_mode mode)
{
rtx new_x = arc_legitimize_address_0 (orig_x, oldx, mode);
rtx
arc_split_move (rtx *operands)
{
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
int i;
int swap = 0;
rtx xop[4];
}
static reg_class_t
-arc_spill_class (reg_class_t /* orig_class */, enum machine_mode)
+arc_spill_class (reg_class_t /* orig_class */, machine_mode)
{
return GENERAL_REGS;
}
bool
-arc_legitimize_reload_address (rtx *p, enum machine_mode mode, int opnum,
+arc_legitimize_reload_address (rtx *p, machine_mode mode, int opnum,
int itype)
{
rtx x = *p;
/* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
return the mode to be used for the comparison. */
-/*extern enum machine_mode arc_select_cc_mode ();*/
+/*extern machine_mode arc_select_cc_mode ();*/
#define SELECT_CC_MODE(OP, X, Y) \
arc_select_cc_mode (OP, X, Y)
(define_special_predicate "cc_set_register"
(match_code "reg")
{
- enum machine_mode rmode = GET_MODE (op);
+ machine_mode rmode = GET_MODE (op);
if (mode == VOIDmode)
{
extern int aarch_crypto_can_dual_issue (rtx_insn *, rtx_insn *);
extern bool aarch_rev16_p (rtx);
-extern bool aarch_rev16_shleft_mask_imm_p (rtx, enum machine_mode);
-extern bool aarch_rev16_shright_mask_imm_p (rtx, enum machine_mode);
+extern bool aarch_rev16_shleft_mask_imm_p (rtx, machine_mode);
+extern bool aarch_rev16_shright_mask_imm_p (rtx, machine_mode);
extern int arm_early_load_addr_dep (rtx, rtx);
extern int arm_early_store_addr_dep (rtx, rtx);
extern int arm_mac_accumulator_is_mul_result (rtx, rtx);
}
bool
-aarch_rev16_shright_mask_imm_p (rtx val, enum machine_mode mode)
+aarch_rev16_shright_mask_imm_p (rtx val, machine_mode mode)
{
return CONST_INT_P (val)
&& INTVAL (val)
}
bool
-aarch_rev16_shleft_mask_imm_p (rtx val, enum machine_mode mode)
+aarch_rev16_shleft_mask_imm_p (rtx val, machine_mode mode)
{
return CONST_INT_P (val)
&& INTVAL (val)
static bool
-aarch_rev16_p_1 (rtx lhs, rtx rhs, enum machine_mode mode)
+aarch_rev16_p_1 (rtx lhs, rtx rhs, machine_mode mode)
{
if (GET_CODE (lhs) == AND
&& GET_CODE (XEXP (lhs, 0)) == ASHIFT
#ifdef RTX_CODE
-extern bool arm_vector_mode_supported_p (enum machine_mode);
-extern bool arm_small_register_classes_for_mode_p (enum machine_mode);
-extern int arm_hard_regno_mode_ok (unsigned int, enum machine_mode);
-extern bool arm_modes_tieable_p (enum machine_mode, enum machine_mode);
+extern bool arm_vector_mode_supported_p (machine_mode);
+extern bool arm_small_register_classes_for_mode_p (machine_mode);
+extern int arm_hard_regno_mode_ok (unsigned int, machine_mode);
+extern bool arm_modes_tieable_p (machine_mode, machine_mode);
extern int const_ok_for_arm (HOST_WIDE_INT);
extern int const_ok_for_op (HOST_WIDE_INT, enum rtx_code);
extern int const_ok_for_dimode_op (HOST_WIDE_INT, enum rtx_code);
-extern int arm_split_constant (RTX_CODE, enum machine_mode, rtx,
+extern int arm_split_constant (RTX_CODE, machine_mode, rtx,
HOST_WIDE_INT, rtx, rtx, int);
extern int legitimate_pic_operand_p (rtx);
-extern rtx legitimize_pic_address (rtx, enum machine_mode, rtx);
+extern rtx legitimize_pic_address (rtx, machine_mode, rtx);
extern rtx legitimize_tls_address (rtx, rtx);
-extern bool arm_legitimate_address_p (enum machine_mode, rtx, bool);
-extern int arm_legitimate_address_outer_p (enum machine_mode, rtx, RTX_CODE, int);
-extern int thumb_legitimate_offset_p (enum machine_mode, HOST_WIDE_INT);
-extern bool arm_legitimize_reload_address (rtx *, enum machine_mode, int, int,
+extern bool arm_legitimate_address_p (machine_mode, rtx, bool);
+extern int arm_legitimate_address_outer_p (machine_mode, rtx, RTX_CODE, int);
+extern int thumb_legitimate_offset_p (machine_mode, HOST_WIDE_INT);
+extern bool arm_legitimize_reload_address (rtx *, machine_mode, int, int,
int);
-extern rtx thumb_legitimize_reload_address (rtx *, enum machine_mode, int, int,
+extern rtx thumb_legitimize_reload_address (rtx *, machine_mode, int, int,
int);
-extern int thumb1_legitimate_address_p (enum machine_mode, rtx, int);
-extern bool ldm_stm_operation_p (rtx, bool, enum machine_mode mode,
+extern int thumb1_legitimate_address_p (machine_mode, rtx, int);
+extern bool ldm_stm_operation_p (rtx, bool, machine_mode mode,
bool, bool);
extern int arm_const_double_rtx (rtx);
extern int vfp3_const_double_rtx (rtx);
-extern int neon_immediate_valid_for_move (rtx, enum machine_mode, rtx *, int *);
-extern int neon_immediate_valid_for_logic (rtx, enum machine_mode, int, rtx *,
+extern int neon_immediate_valid_for_move (rtx, machine_mode, rtx *, int *);
+extern int neon_immediate_valid_for_logic (rtx, machine_mode, int, rtx *,
int *);
-extern int neon_immediate_valid_for_shift (rtx, enum machine_mode, rtx *,
+extern int neon_immediate_valid_for_shift (rtx, machine_mode, rtx *,
int *, bool);
extern char *neon_output_logic_immediate (const char *, rtx *,
- enum machine_mode, int, int);
+ machine_mode, int, int);
extern char *neon_output_shift_immediate (const char *, char, rtx *,
- enum machine_mode, int, bool);
-extern void neon_pairwise_reduce (rtx, rtx, enum machine_mode,
+ machine_mode, int, bool);
+extern void neon_pairwise_reduce (rtx, rtx, machine_mode,
rtx (*) (rtx, rtx, rtx));
extern rtx neon_make_constant (rtx);
extern tree arm_builtin_vectorized_function (tree, tree, tree);
extern void neon_expand_vector_init (rtx, rtx);
extern void neon_lane_bounds (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
extern void neon_const_bounds (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
-extern HOST_WIDE_INT neon_element_bits (enum machine_mode);
+extern HOST_WIDE_INT neon_element_bits (machine_mode);
extern void neon_reinterpret (rtx, rtx);
-extern void neon_emit_pair_result_insn (enum machine_mode,
+extern void neon_emit_pair_result_insn (machine_mode,
rtx (*) (rtx, rtx, rtx, rtx),
rtx, rtx, rtx);
extern void neon_disambiguate_copy (rtx *, rtx *, rtx *, unsigned int);
extern void neon_split_vcombine (rtx op[3]);
-extern enum reg_class coproc_secondary_reload_class (enum machine_mode, rtx,
+extern enum reg_class coproc_secondary_reload_class (machine_mode, rtx,
bool);
extern bool arm_tls_referenced_p (rtx);
extern bool gen_operands_ldrd_strd (rtx *, bool, bool, bool);
extern int arm_gen_movmemqi (rtx *);
extern bool gen_movmem_ldrd_strd (rtx *);
-extern enum machine_mode arm_select_cc_mode (RTX_CODE, rtx, rtx);
-extern enum machine_mode arm_select_dominance_cc_mode (rtx, rtx,
+extern machine_mode arm_select_cc_mode (RTX_CODE, rtx, rtx);
+extern machine_mode arm_select_dominance_cc_mode (rtx, rtx,
HOST_WIDE_INT);
extern rtx arm_gen_compare_reg (RTX_CODE, rtx, rtx, rtx);
extern rtx arm_gen_return_addr_mask (void);
#if defined TREE_CODE
extern void arm_init_cumulative_args (CUMULATIVE_ARGS *, tree, rtx, tree);
-extern bool arm_pad_arg_upward (enum machine_mode, const_tree);
-extern bool arm_pad_reg_upward (enum machine_mode, tree, int);
+extern bool arm_pad_arg_upward (machine_mode, const_tree);
+extern bool arm_pad_reg_upward (machine_mode, tree, int);
#endif
extern int arm_apply_result_size (void);
extern void arm_expand_vec_perm (rtx target, rtx op0, rtx op1, rtx sel);
extern bool arm_expand_vec_perm_const (rtx target, rtx op0, rtx op1, rtx sel);
-extern bool arm_autoinc_modes_ok_p (enum machine_mode, enum arm_auto_incmodes);
+extern bool arm_autoinc_modes_ok_p (machine_mode, enum arm_auto_incmodes);
extern void arm_emit_eabi_attribute (const char *, int, int);
/* Forward function declarations. */
static bool arm_const_not_ok_for_debug_p (rtx);
static bool arm_lra_p (void);
-static bool arm_needs_doubleword_align (enum machine_mode, const_tree);
+static bool arm_needs_doubleword_align (machine_mode, const_tree);
static int arm_compute_static_chain_stack_bytes (void);
static arm_stack_offsets *arm_get_frame_offsets (void);
static void arm_add_gc_roots (void);
-static int arm_gen_constant (enum rtx_code, enum machine_mode, rtx,
+static int arm_gen_constant (enum rtx_code, machine_mode, rtx,
HOST_WIDE_INT, rtx, rtx, int, int);
static unsigned bit_count (unsigned long);
static int arm_address_register_rtx_p (rtx, int);
-static int arm_legitimate_index_p (enum machine_mode, rtx, RTX_CODE, int);
-static int thumb2_legitimate_index_p (enum machine_mode, rtx, int);
-static int thumb1_base_register_rtx_p (rtx, enum machine_mode, int);
-static rtx arm_legitimize_address (rtx, rtx, enum machine_mode);
+static int arm_legitimate_index_p (machine_mode, rtx, RTX_CODE, int);
+static int thumb2_legitimate_index_p (machine_mode, rtx, int);
+static int thumb1_base_register_rtx_p (rtx, machine_mode, int);
+static rtx arm_legitimize_address (rtx, rtx, machine_mode);
static reg_class_t arm_preferred_reload_class (rtx, reg_class_t);
-static rtx thumb_legitimize_address (rtx, rtx, enum machine_mode);
+static rtx thumb_legitimize_address (rtx, rtx, machine_mode);
inline static int thumb1_index_register_rtx_p (rtx, int);
static int thumb_far_jump_used_p (void);
static bool thumb_force_lr_save (void);
static Mfix *create_fix_barrier (Mfix *, HOST_WIDE_INT);
static void push_minipool_barrier (rtx_insn *, HOST_WIDE_INT);
static void push_minipool_fix (rtx_insn *, HOST_WIDE_INT, rtx *,
- enum machine_mode, rtx);
+ machine_mode, rtx);
static void arm_reorg (void);
static void note_invalid_constants (rtx_insn *, HOST_WIDE_INT, int);
static unsigned long arm_compute_save_reg0_reg12_mask (void);
int i);
static int arm_get_strip_length (int);
static bool arm_function_ok_for_sibcall (tree, tree);
-static enum machine_mode arm_promote_function_mode (const_tree,
- enum machine_mode, int *,
+static machine_mode arm_promote_function_mode (const_tree,
+ machine_mode, int *,
const_tree, int);
static bool arm_return_in_memory (const_tree, const_tree);
static rtx arm_function_value (const_tree, const_tree, bool);
-static rtx arm_libcall_value_1 (enum machine_mode);
-static rtx arm_libcall_value (enum machine_mode, const_rtx);
+static rtx arm_libcall_value_1 (machine_mode);
+static rtx arm_libcall_value (machine_mode, const_rtx);
static bool arm_function_value_regno_p (const unsigned int);
static void arm_internal_label (FILE *, const char *, unsigned long);
static void arm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
tree);
static bool arm_have_conditional_execution (void);
-static bool arm_cannot_force_const_mem (enum machine_mode, rtx);
-static bool arm_legitimate_constant_p (enum machine_mode, rtx);
+static bool arm_cannot_force_const_mem (machine_mode, rtx);
+static bool arm_legitimate_constant_p (machine_mode, rtx);
static bool arm_rtx_costs_1 (rtx, enum rtx_code, int*, bool);
static bool arm_size_rtx_costs (rtx, enum rtx_code, enum rtx_code, int *);
static bool arm_slowmul_rtx_costs (rtx, enum rtx_code, enum rtx_code, int *, bool);
static bool arm_xscale_rtx_costs (rtx, enum rtx_code, enum rtx_code, int *, bool);
static bool arm_9e_rtx_costs (rtx, enum rtx_code, enum rtx_code, int *, bool);
static bool arm_rtx_costs (rtx, int, int, int, int *, bool);
-static int arm_address_cost (rtx, enum machine_mode, addr_space_t, bool);
-static int arm_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
-static int arm_memory_move_cost (enum machine_mode, reg_class_t, bool);
+static int arm_address_cost (rtx, machine_mode, addr_space_t, bool);
+static int arm_register_move_cost (machine_mode, reg_class_t, reg_class_t);
+static int arm_memory_move_cost (machine_mode, reg_class_t, bool);
static void arm_init_builtins (void);
static void arm_init_iwmmxt_builtins (void);
-static rtx safe_vector_operand (rtx, enum machine_mode);
+static rtx safe_vector_operand (rtx, machine_mode);
static rtx arm_expand_binop_builtin (enum insn_code, tree, rtx);
static rtx arm_expand_unop_builtin (enum insn_code, tree, rtx, int);
-static rtx arm_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
+static rtx arm_expand_builtin (tree, rtx, rtx, machine_mode, int);
static tree arm_builtin_decl (unsigned, bool);
static void emit_constant_insn (rtx cond, rtx pattern);
static rtx_insn *emit_set_insn (rtx, rtx);
static rtx emit_multi_reg_push (unsigned long, unsigned long);
-static int arm_arg_partial_bytes (cumulative_args_t, enum machine_mode,
+static int arm_arg_partial_bytes (cumulative_args_t, machine_mode,
tree, bool);
-static rtx arm_function_arg (cumulative_args_t, enum machine_mode,
+static rtx arm_function_arg (cumulative_args_t, machine_mode,
const_tree, bool);
-static void arm_function_arg_advance (cumulative_args_t, enum machine_mode,
+static void arm_function_arg_advance (cumulative_args_t, machine_mode,
const_tree, bool);
-static unsigned int arm_function_arg_boundary (enum machine_mode, const_tree);
-static rtx aapcs_allocate_return_reg (enum machine_mode, const_tree,
+static unsigned int arm_function_arg_boundary (machine_mode, const_tree);
+static rtx aapcs_allocate_return_reg (machine_mode, const_tree,
const_tree);
-static rtx aapcs_libcall_value (enum machine_mode);
+static rtx aapcs_libcall_value (machine_mode);
static int aapcs_select_return_coproc (const_tree, const_tree);
#ifdef OBJECT_FORMAT_ELF
static void arm_file_end (void);
static void arm_file_start (void);
-static void arm_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
+static void arm_setup_incoming_varargs (cumulative_args_t, machine_mode,
tree, int *, int);
static bool arm_pass_by_reference (cumulative_args_t,
- enum machine_mode, const_tree, bool);
+ machine_mode, const_tree, bool);
static bool arm_promote_prototypes (const_tree);
static bool arm_default_short_enums (void);
static bool arm_align_anon_bitfield (void);
static bool arm_return_in_msb (const_tree);
-static bool arm_must_pass_in_stack (enum machine_mode, const_tree);
+static bool arm_must_pass_in_stack (machine_mode, const_tree);
static bool arm_return_in_memory (const_tree, const_tree);
#if ARM_UNWIND_INFO
static void arm_unwind_emit (FILE *, rtx_insn *);
static void arm_expand_builtin_va_start (tree, rtx);
static tree arm_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
static void arm_option_override (void);
-static unsigned HOST_WIDE_INT arm_shift_truncation_mask (enum machine_mode);
+static unsigned HOST_WIDE_INT arm_shift_truncation_mask (machine_mode);
static bool arm_cannot_copy_insn_p (rtx_insn *);
static int arm_issue_rate (void);
static void arm_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
static const char *arm_invalid_return_type (const_tree t);
static tree arm_promoted_type (const_tree t);
static tree arm_convert_to_type (tree type, tree expr);
-static bool arm_scalar_mode_supported_p (enum machine_mode);
+static bool arm_scalar_mode_supported_p (machine_mode);
static bool arm_frame_pointer_required (void);
static bool arm_can_eliminate (const int, const int);
static void arm_asm_trampoline_template (FILE *);
static bool cortex_a9_sched_adjust_cost (rtx_insn *, rtx, rtx_insn *, int *);
static bool xscale_sched_adjust_cost (rtx_insn *, rtx, rtx_insn *, int *);
static bool fa726te_sched_adjust_cost (rtx_insn *, rtx, rtx_insn *, int *);
-static bool arm_array_mode_supported_p (enum machine_mode,
+static bool arm_array_mode_supported_p (machine_mode,
unsigned HOST_WIDE_INT);
-static enum machine_mode arm_preferred_simd_mode (enum machine_mode);
+static machine_mode arm_preferred_simd_mode (machine_mode);
static bool arm_class_likely_spilled_p (reg_class_t);
static HOST_WIDE_INT arm_vector_alignment (const_tree type);
static bool arm_vector_alignment_reachable (const_tree type, bool is_packed);
-static bool arm_builtin_support_vector_misalignment (enum machine_mode mode,
+static bool arm_builtin_support_vector_misalignment (machine_mode mode,
const_tree type,
int misalignment,
bool is_packed);
static int arm_cortex_a5_branch_cost (bool, bool);
static int arm_cortex_m_branch_cost (bool, bool);
-static bool arm_vectorize_vec_perm_const_ok (enum machine_mode vmode,
+static bool arm_vectorize_vec_perm_const_ok (machine_mode vmode,
const unsigned char *sel);
static int arm_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference,
we must report the mode of the memory reference from
TARGET_PRINT_OPERAND to TARGET_PRINT_OPERAND_ADDRESS. */
-enum machine_mode output_memory_reference_mode;
+machine_mode output_memory_reference_mode;
/* The register number to be used for the PIC offset register. */
unsigned arm_pic_register = INVALID_REGNUM;
typedef struct
{
- enum machine_mode mode;
+ machine_mode mode;
const char *name;
} arm_fixed_mode_set;
/* A small helper for setting fixed-point library libfuncs. */
static void
-arm_set_fixed_optab_libfunc (optab optable, enum machine_mode mode,
+arm_set_fixed_optab_libfunc (optab optable, machine_mode mode,
const char *funcname, const char *modename,
int num_suffix)
{
}
static void
-arm_set_fixed_conv_libfunc (convert_optab optable, enum machine_mode to,
- enum machine_mode from, const char *funcname,
+arm_set_fixed_conv_libfunc (convert_optab optable, machine_mode to,
+ machine_mode from, const char *funcname,
const char *toname, const char *fromname)
{
char buffer[50];
/* ??? Tweak this for thumb2. */
int
-arm_split_constant (enum rtx_code code, enum machine_mode mode, rtx insn,
+arm_split_constant (enum rtx_code code, machine_mode mode, rtx insn,
HOST_WIDE_INT val, rtx target, rtx source, int subtargets)
{
rtx cond;
RTL generation. */
static int
-arm_gen_constant (enum rtx_code code, enum machine_mode mode, rtx cond,
+arm_gen_constant (enum rtx_code code, machine_mode mode, rtx cond,
HOST_WIDE_INT val, rtx target, rtx source, int subtargets,
int generate)
{
arm_canonicalize_comparison (int *code, rtx *op0, rtx *op1,
bool op0_preserve_value)
{
- enum machine_mode mode;
+ machine_mode mode;
unsigned HOST_WIDE_INT i, maxval;
mode = GET_MODE (*op0);
arm_function_value(const_tree type, const_tree func,
bool outgoing ATTRIBUTE_UNUSED)
{
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp ATTRIBUTE_UNUSED;
rtx r ATTRIBUTE_UNUSED;
}
static rtx
-arm_libcall_value_1 (enum machine_mode mode)
+arm_libcall_value_1 (machine_mode mode)
{
if (TARGET_AAPCS_BASED)
return aapcs_libcall_value (mode);
assuming the value has mode MODE. */
static rtx
-arm_libcall_value (enum machine_mode mode, const_rtx libcall)
+arm_libcall_value (machine_mode mode, const_rtx libcall)
{
if (TARGET_AAPCS_BASED && arm_pcs_default != ARM_PCS_AAPCS
&& GET_MODE_CLASS (mode) == MODE_FLOAT)
type that doesn't match a non-VOIDmode *MODEP is found, then return -1,
otherwise return the count in the sub-tree. */
static int
-aapcs_vfp_sub_candidate (const_tree type, enum machine_mode *modep)
+aapcs_vfp_sub_candidate (const_tree type, machine_mode *modep)
{
- enum machine_mode mode;
+ machine_mode mode;
HOST_WIDE_INT size;
switch (TREE_CODE (type))
*COUNT to hold the number of such elements. */
static bool
aapcs_vfp_is_call_or_return_candidate (enum arm_pcs pcs_variant,
- enum machine_mode mode, const_tree type,
- enum machine_mode *base_mode, int *count)
+ machine_mode mode, const_tree type,
+ machine_mode *base_mode, int *count)
{
- enum machine_mode new_mode = VOIDmode;
+ machine_mode new_mode = VOIDmode;
/* If we have the type information, prefer that to working things
out from the mode. */
static bool
aapcs_vfp_is_return_candidate (enum arm_pcs pcs_variant,
- enum machine_mode mode, const_tree type)
+ machine_mode mode, const_tree type)
{
int count ATTRIBUTE_UNUSED;
- enum machine_mode ag_mode ATTRIBUTE_UNUSED;
+ machine_mode ag_mode ATTRIBUTE_UNUSED;
if (!use_vfp_abi (pcs_variant, false))
return false;
}
static bool
-aapcs_vfp_is_call_candidate (CUMULATIVE_ARGS *pcum, enum machine_mode mode,
+aapcs_vfp_is_call_candidate (CUMULATIVE_ARGS *pcum, machine_mode mode,
const_tree type)
{
if (!use_vfp_abi (pcum->pcs_variant, false))
}
static bool
-aapcs_vfp_allocate (CUMULATIVE_ARGS *pcum, enum machine_mode mode,
+aapcs_vfp_allocate (CUMULATIVE_ARGS *pcum, machine_mode mode,
const_tree type ATTRIBUTE_UNUSED)
{
int shift = GET_MODE_SIZE (pcum->aapcs_vfp_rmode) / GET_MODE_SIZE (SFmode);
int i;
int rcount = pcum->aapcs_vfp_rcount;
int rshift = shift;
- enum machine_mode rmode = pcum->aapcs_vfp_rmode;
+ machine_mode rmode = pcum->aapcs_vfp_rmode;
rtx par;
if (!TARGET_NEON)
{
static rtx
aapcs_vfp_allocate_return_reg (enum arm_pcs pcs_variant ATTRIBUTE_UNUSED,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type ATTRIBUTE_UNUSED)
{
if (!use_vfp_abi (pcs_variant, false))
if (mode == BLKmode || (mode == TImode && !TARGET_NEON))
{
int count;
- enum machine_mode ag_mode;
+ machine_mode ag_mode;
int i;
rtx par;
int shift;
static void
aapcs_vfp_advance (CUMULATIVE_ARGS *pcum ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED)
{
pcum->aapcs_vfp_regs_free &= ~pcum->aapcs_vfp_reg_alloc;
BLKmode) is a candidate for this co-processor's registers; this
function should ignore any position-dependent state in
CUMULATIVE_ARGS and only use call-type dependent information. */
- bool (*is_call_candidate) (CUMULATIVE_ARGS *, enum machine_mode, const_tree);
+ bool (*is_call_candidate) (CUMULATIVE_ARGS *, machine_mode, const_tree);
/* Return true if the argument does get a co-processor register; it
should set aapcs_reg to an RTX of the register allocated as is
required for a return from FUNCTION_ARG. */
- bool (*allocate) (CUMULATIVE_ARGS *, enum machine_mode, const_tree);
+ bool (*allocate) (CUMULATIVE_ARGS *, machine_mode, const_tree);
/* Return true if a result of mode MODE (or type TYPE if MODE is
BLKmode) is can be returned in this co-processor's registers. */
- bool (*is_return_candidate) (enum arm_pcs, enum machine_mode, const_tree);
+ bool (*is_return_candidate) (enum arm_pcs, machine_mode, const_tree);
/* Allocate and return an RTX element to hold the return type of a
call, this routine must not fail and will only be called if
is_return_candidate returned true with the same parameters. */
- rtx (*allocate_return_reg) (enum arm_pcs, enum machine_mode, const_tree);
+ rtx (*allocate_return_reg) (enum arm_pcs, machine_mode, const_tree);
/* Finish processing this argument and prepare to start processing
the next one. */
- void (*advance) (CUMULATIVE_ARGS *, enum machine_mode, const_tree);
+ void (*advance) (CUMULATIVE_ARGS *, machine_mode, const_tree);
} aapcs_cp_arg_layout[ARM_NUM_COPROC_SLOTS] =
{
AAPCS_CP(vfp)
#undef AAPCS_CP
static int
-aapcs_select_call_coproc (CUMULATIVE_ARGS *pcum, enum machine_mode mode,
+aapcs_select_call_coproc (CUMULATIVE_ARGS *pcum, machine_mode mode,
const_tree type)
{
int i;
}
static rtx
-aapcs_allocate_return_reg (enum machine_mode mode, const_tree type,
+aapcs_allocate_return_reg (machine_mode mode, const_tree type,
const_tree fntype)
{
/* We aren't passed a decl, so we can't check that a call is local.
}
static rtx
-aapcs_libcall_value (enum machine_mode mode)
+aapcs_libcall_value (machine_mode mode)
{
if (BYTES_BIG_ENDIAN && ALL_FIXED_POINT_MODE_P (mode)
&& GET_MODE_SIZE (mode) <= 4)
/* Lay out a function argument using the AAPCS rules. The rule
numbers referred to here are those in the AAPCS. */
static void
-aapcs_layout_arg (CUMULATIVE_ARGS *pcum, enum machine_mode mode,
+aapcs_layout_arg (CUMULATIVE_ARGS *pcum, machine_mode mode,
const_tree type, bool named)
{
int nregs, nregs2;
/* Return true if mode/type need doubleword alignment. */
static bool
-arm_needs_doubleword_align (enum machine_mode mode, const_tree type)
+arm_needs_doubleword_align (machine_mode mode, const_tree type)
{
return (GET_MODE_ALIGNMENT (mode) > PARM_BOUNDARY
|| (type && TYPE_ALIGN (type) > PARM_BOUNDARY));
indeed make it pass in the stack if necessary). */
static rtx
-arm_function_arg (cumulative_args_t pcum_v, enum machine_mode mode,
+arm_function_arg (cumulative_args_t pcum_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *pcum = get_cumulative_args (pcum_v);
}
static unsigned int
-arm_function_arg_boundary (enum machine_mode mode, const_tree type)
+arm_function_arg_boundary (machine_mode mode, const_tree type)
{
return (ARM_DOUBLEWORD_ALIGN && arm_needs_doubleword_align (mode, type)
? DOUBLEWORD_ALIGNMENT
}
static int
-arm_arg_partial_bytes (cumulative_args_t pcum_v, enum machine_mode mode,
+arm_arg_partial_bytes (cumulative_args_t pcum_v, machine_mode mode,
tree type, bool named)
{
CUMULATIVE_ARGS *pcum = get_cumulative_args (pcum_v);
(TYPE is null for libcalls where that information may not be available.) */
static void
-arm_function_arg_advance (cumulative_args_t pcum_v, enum machine_mode mode,
+arm_function_arg_advance (cumulative_args_t pcum_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *pcum = get_cumulative_args (pcum_v);
static bool
arm_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
}
rtx
-legitimize_pic_address (rtx orig, enum machine_mode mode, rtx reg)
+legitimize_pic_address (rtx orig, machine_mode mode, rtx reg)
{
if (GET_CODE (orig) == SYMBOL_REF
|| GET_CODE (orig) == LABEL_REF)
/* Return nonzero if X is a valid ARM state address operand. */
int
-arm_legitimate_address_outer_p (enum machine_mode mode, rtx x, RTX_CODE outer,
+arm_legitimate_address_outer_p (machine_mode mode, rtx x, RTX_CODE outer,
int strict_p)
{
bool use_ldrd;
/* Return nonzero if X is a valid Thumb-2 address operand. */
static int
-thumb2_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
+thumb2_legitimate_address_p (machine_mode mode, rtx x, int strict_p)
{
bool use_ldrd;
enum rtx_code code = GET_CODE (x);
/* Return nonzero if INDEX is valid for an address index operand in
ARM state. */
static int
-arm_legitimate_index_p (enum machine_mode mode, rtx index, RTX_CODE outer,
+arm_legitimate_index_p (machine_mode mode, rtx index, RTX_CODE outer,
int strict_p)
{
HOST_WIDE_INT range;
/* Return nonzero if INDEX is a valid Thumb-2 address index operand. */
static int
-thumb2_legitimate_index_p (enum machine_mode mode, rtx index, int strict_p)
+thumb2_legitimate_index_p (machine_mode mode, rtx index, int strict_p)
{
enum rtx_code code = GET_CODE (index);
/* Return nonzero if X is valid as a 16-bit Thumb state base register. */
static int
-thumb1_base_register_rtx_p (rtx x, enum machine_mode mode, int strict_p)
+thumb1_base_register_rtx_p (rtx x, machine_mode mode, int strict_p)
{
int regno;
reload pass starts. This is so that eliminating such addresses
into stack based ones won't produce impossible code. */
int
-thumb1_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
+thumb1_legitimate_address_p (machine_mode mode, rtx x, int strict_p)
{
/* ??? Not clear if this is right. Experiment. */
if (GET_MODE_SIZE (mode) < 4
/* Return nonzero if VAL can be used as an offset in a Thumb-state address
instruction of mode MODE. */
int
-thumb_legitimate_offset_p (enum machine_mode mode, HOST_WIDE_INT val)
+thumb_legitimate_offset_p (machine_mode mode, HOST_WIDE_INT val)
{
switch (GET_MODE_SIZE (mode))
{
}
bool
-arm_legitimate_address_p (enum machine_mode mode, rtx x, bool strict_p)
+arm_legitimate_address_p (machine_mode mode, rtx x, bool strict_p)
{
if (TARGET_ARM)
return arm_legitimate_address_outer_p (mode, x, SET, strict_p);
/* Try machine-dependent ways of modifying an illegitimate address
to be legitimate. If we find one, return the new, valid address. */
rtx
-arm_legitimize_address (rtx x, rtx orig_x, enum machine_mode mode)
+arm_legitimize_address (rtx x, rtx orig_x, machine_mode mode)
{
if (arm_tls_referenced_p (x))
{
/* Try machine-dependent ways of modifying an illegitimate Thumb address
to be legitimate. If we find one, return the new, valid address. */
rtx
-thumb_legitimize_address (rtx x, rtx orig_x, enum machine_mode mode)
+thumb_legitimize_address (rtx x, rtx orig_x, machine_mode mode)
{
if (GET_CODE (x) == PLUS
&& CONST_INT_P (XEXP (x, 1))
bool
arm_legitimize_reload_address (rtx *p,
- enum machine_mode mode,
+ machine_mode mode,
int opnum, int type,
int ind_levels ATTRIBUTE_UNUSED)
{
rtx
thumb_legitimize_reload_address (rtx *x_p,
- enum machine_mode mode,
+ machine_mode mode,
int opnum, int type,
int ind_levels ATTRIBUTE_UNUSED)
{
When generating pic allow anything. */
static bool
-arm_legitimate_constant_p_1 (enum machine_mode mode, rtx x)
+arm_legitimate_constant_p_1 (machine_mode mode, rtx x)
{
/* At present, we have no support for Neon structure constants, so forbid
them here. It might be possible to handle simple cases like 0 and -1
}
static bool
-thumb_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+thumb_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
return (CONST_INT_P (x)
|| CONST_DOUBLE_P (x)
}
static bool
-arm_legitimate_constant_p (enum machine_mode mode, rtx x)
+arm_legitimate_constant_p (machine_mode mode, rtx x)
{
return (!arm_cannot_force_const_mem (mode, x)
&& (TARGET_32BIT
/* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
static bool
-arm_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+arm_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
rtx base, offset;
static inline int
thumb1_rtx_costs (rtx x, enum rtx_code code, enum rtx_code outer)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
int total, words;
switch (code)
static inline bool
arm_rtx_costs_1 (rtx x, enum rtx_code outer, int* total, bool speed)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
enum rtx_code subcode;
rtx operand;
enum rtx_code code = GET_CODE (x);
if (GET_MODE_CLASS (mode) == MODE_INT)
{
rtx op = XEXP (x, 0);
- enum machine_mode opmode = GET_MODE (op);
+ machine_mode opmode = GET_MODE (op);
if (mode == DImode)
*total += COSTS_N_INSNS (1);
static inline int
thumb1_size_rtx_costs (rtx x, enum rtx_code code, enum rtx_code outer)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
int words;
switch (code)
arm_size_rtx_costs (rtx x, enum rtx_code code, enum rtx_code outer_code,
int *total)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
if (TARGET_THUMB1)
{
*total = thumb1_size_rtx_costs (x, code, outer_code);
const struct cpu_cost_table *extra_cost,
int *cost, bool speed_p)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
if (TARGET_THUMB1)
{
*cost = 0;
else
{
- enum machine_mode op0mode;
+ machine_mode op0mode;
/* We'll mostly assume that the cost of a compare is the cost of the
LHS. However, there are some notable exceptions. */
arm_slowmul_rtx_costs (rtx x, enum rtx_code code, enum rtx_code outer_code,
int *total, bool speed)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
if (TARGET_THUMB)
{
arm_fastmul_rtx_costs (rtx x, enum rtx_code code, enum rtx_code outer_code,
int *total, bool speed)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
if (TARGET_THUMB1)
{
arm_xscale_rtx_costs (rtx x, enum rtx_code code, enum rtx_code outer_code,
int *total, bool speed)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
if (TARGET_THUMB)
{
arm_9e_rtx_costs (rtx x, enum rtx_code code, enum rtx_code outer_code,
int *total, bool speed)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
if (TARGET_THUMB1)
{
}
static int
-arm_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
+arm_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as ATTRIBUTE_UNUSED, bool speed ATTRIBUTE_UNUSED)
{
return TARGET_32BIT ? arm_arm_address_cost (x) : arm_thumb_address_cost (x);
point to integer conversion does not go through memory. */
int
-arm_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+arm_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t from, reg_class_t to)
{
if (TARGET_32BIT)
/* Implement TARGET_MEMORY_MOVE_COST. */
int
-arm_memory_move_cost (enum machine_mode mode, reg_class_t rclass,
+arm_memory_move_cost (machine_mode mode, reg_class_t rclass,
bool in ATTRIBUTE_UNUSED)
{
if (TARGET_32BIT)
-1 if the given value doesn't match any of the listed patterns.
*/
static int
-neon_valid_immediate (rtx op, enum machine_mode mode, int inverse,
+neon_valid_immediate (rtx op, machine_mode mode, int inverse,
rtx *modconst, int *elementwidth)
{
#define CHECK(STRIDE, ELSIZE, CLASS, TEST) \
VMOV) in *MODCONST. */
int
-neon_immediate_valid_for_move (rtx op, enum machine_mode mode,
+neon_immediate_valid_for_move (rtx op, machine_mode mode,
rtx *modconst, int *elementwidth)
{
rtx tmpconst;
*ELEMENTWIDTH. See neon_valid_immediate for description of INVERSE. */
int
-neon_immediate_valid_for_logic (rtx op, enum machine_mode mode, int inverse,
+neon_immediate_valid_for_logic (rtx op, machine_mode mode, int inverse,
rtx *modconst, int *elementwidth)
{
rtx tmpconst;
because they have different limitations. */
int
-neon_immediate_valid_for_shift (rtx op, enum machine_mode mode,
+neon_immediate_valid_for_shift (rtx op, machine_mode mode,
rtx *modconst, int *elementwidth,
bool isleftshift)
{
MNEM. */
char *
-neon_output_logic_immediate (const char *mnem, rtx *op2, enum machine_mode mode,
+neon_output_logic_immediate (const char *mnem, rtx *op2, machine_mode mode,
int inverse, int quad)
{
int width, is_valid;
char *
neon_output_shift_immediate (const char *mnem, char sign, rtx *op2,
- enum machine_mode mode, int quad,
+ machine_mode mode, int quad,
bool isleftshift)
{
int width, is_valid;
for no particular gain. */
void
-neon_pairwise_reduce (rtx op0, rtx op1, enum machine_mode mode,
+neon_pairwise_reduce (rtx op0, rtx op1, machine_mode mode,
rtx (*reduc) (rtx, rtx, rtx))
{
- enum machine_mode inner = GET_MODE_INNER (mode);
+ machine_mode inner = GET_MODE_INNER (mode);
unsigned int i, parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (inner);
rtx tmpsum = op1;
static rtx
neon_vdup_constant (rtx vals)
{
- enum machine_mode mode = GET_MODE (vals);
- enum machine_mode inner_mode = GET_MODE_INNER (mode);
+ machine_mode mode = GET_MODE (vals);
+ machine_mode inner_mode = GET_MODE_INNER (mode);
int n_elts = GET_MODE_NUNITS (mode);
bool all_same = true;
rtx x;
rtx
neon_make_constant (rtx vals)
{
- enum machine_mode mode = GET_MODE (vals);
+ machine_mode mode = GET_MODE (vals);
rtx target;
rtx const_vec = NULL_RTX;
int n_elts = GET_MODE_NUNITS (mode);
void
neon_expand_vector_init (rtx target, rtx vals)
{
- enum machine_mode mode = GET_MODE (target);
- enum machine_mode inner_mode = GET_MODE_INNER (mode);
+ machine_mode mode = GET_MODE (target);
+ machine_mode inner_mode = GET_MODE_INNER (mode);
int n_elts = GET_MODE_NUNITS (mode);
int n_var = 0, one_var = -1;
bool all_same = true;
}
HOST_WIDE_INT
-neon_element_bits (enum machine_mode mode)
+neon_element_bits (machine_mode mode)
{
if (mode == DImode)
return GET_MODE_BITSIZE (mode);
coprocessor registers. Otherwise return NO_REGS. */
enum reg_class
-coproc_secondary_reload_class (enum machine_mode mode, rtx x, bool wb)
+coproc_secondary_reload_class (machine_mode mode, rtx x, bool wb)
{
if (mode == HFmode)
{
REGNO (R_dk) = REGNO (R_d0) + k.
The pattern for store is similar. */
bool
-ldm_stm_operation_p (rtx op, bool load, enum machine_mode mode,
+ldm_stm_operation_p (rtx op, bool load, machine_mode mode,
bool consecutive, bool return_pc)
{
HOST_WIDE_INT count = XVECLEN (op, 0);
inline static rtx
next_consecutive_mem (rtx mem)
{
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
HOST_WIDE_INT offset = GET_MODE_SIZE (mode);
rtx addr = plus_constant (Pmode, XEXP (mem, 0), offset);
here. If we are unable to support a dominance comparison we return
CC mode. This will then fail to match for the RTL expressions that
generate this call. */
-enum machine_mode
+machine_mode
arm_select_dominance_cc_mode (rtx x, rtx y, HOST_WIDE_INT cond_or)
{
enum rtx_code cond1, cond2;
}
}
-enum machine_mode
+machine_mode
arm_select_cc_mode (enum rtx_code op, rtx x, rtx y)
{
/* All floating point compares return CCFP if it is an equality
rtx
arm_gen_compare_reg (enum rtx_code code, rtx x, rtx y, rtx scratch)
{
- enum machine_mode mode;
+ machine_mode mode;
rtx cc_reg;
int dimode_comparison = GET_MODE (x) == DImode || GET_MODE (y) == DImode;
(padded to the size of a word) should be passed in a register. */
static bool
-arm_must_pass_in_stack (enum machine_mode mode, const_tree type)
+arm_must_pass_in_stack (machine_mode mode, const_tree type)
{
if (TARGET_AAPCS_BASED)
return must_pass_in_stack_var_size (mode, type);
aggregate types are placed in the lowest memory address. */
bool
-arm_pad_arg_upward (enum machine_mode mode ATTRIBUTE_UNUSED, const_tree type)
+arm_pad_arg_upward (machine_mode mode ATTRIBUTE_UNUSED, const_tree type)
{
if (!TARGET_AAPCS_BASED)
return DEFAULT_FUNCTION_ARG_PADDING(mode, type) == upward;
significant byte does. */
bool
-arm_pad_reg_upward (enum machine_mode mode,
+arm_pad_reg_upward (machine_mode mode,
tree type, int first ATTRIBUTE_UNUSED)
{
if (TARGET_AAPCS_BASED && BYTES_BIG_ENDIAN)
/* The value in table. */
rtx value;
/* The mode of value. */
- enum machine_mode mode;
+ machine_mode mode;
/* The size of the value. With iWMMXt enabled
sizes > 4 also imply an alignment of 8-bytes. */
int fix_size;
rtx_insn * insn;
HOST_WIDE_INT address;
rtx * loc;
- enum machine_mode mode;
+ machine_mode mode;
int fix_size;
rtx value;
Mnode * minipool;
MODE. */
static void
push_minipool_fix (rtx_insn *insn, HOST_WIDE_INT address, rtx *loc,
- enum machine_mode mode, rtx value)
+ machine_mode mode, rtx value)
{
Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
arm_const_double_inline_cost (rtx val)
{
rtx lowpart, highpart;
- enum machine_mode mode;
+ machine_mode mode;
mode = GET_MODE (val);
bool
arm_const_double_by_parts (rtx val)
{
- enum machine_mode mode = GET_MODE (val);
+ machine_mode mode = GET_MODE (val);
rtx part;
if (optimize_size || arm_ld_sched)
bool
arm_const_double_by_immediates (rtx val)
{
- enum machine_mode mode = GET_MODE (val);
+ machine_mode mode = GET_MODE (val);
rtx part;
if (mode == VOIDmode)
int integer_p = GET_MODE_CLASS (GET_MODE (operands[0])) == MODE_INT;
const char *templ;
char buff[50];
- enum machine_mode mode;
+ machine_mode mode;
reg = operands[!load];
mem = operands[load];
int regno, nregs, load = REG_P (operands[0]);
const char *templ;
char buff[50];
- enum machine_mode mode;
+ machine_mode mode;
reg = operands[!load];
mem = operands[load];
{
rtx reg, mem, addr;
int load;
- enum machine_mode mode;
+ machine_mode mode;
extract_insn_cached (insn);
static unsigned
arm_size_return_regs (void)
{
- enum machine_mode mode;
+ machine_mode mode;
if (crtl->return_rtx != 0)
mode = GET_MODE (crtl->return_rtx);
case 'R':
if (CONST_INT_P (x) || CONST_DOUBLE_P (x))
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
rtx part;
if (mode == VOIDmode)
register. */
case 'p':
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
int regno;
if (GET_MODE_SIZE (mode) != 8 || !REG_P (x))
case 'P':
case 'q':
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
int is_quad = (code == 'q');
int regno;
case 'e':
case 'f':
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
int regno;
if ((GET_MODE_SIZE (mode) != 16
/* Translate an S register number into a D register number and element index. */
case 'y':
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
int regno;
if (GET_MODE_SIZE (mode) != 4 || !REG_P (x))
number into a D register number and element index. */
case 'z':
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
int regno;
if (GET_MODE_SIZE (mode) != 2 || !REG_P (x))
else if (GET_CODE (x) == PRE_INC || GET_CODE (x) == POST_INC
|| GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_DEC)
{
- extern enum machine_mode output_memory_reference_mode;
+ extern machine_mode output_memory_reference_mode;
gcc_assert (REG_P (XEXP (x, 0)));
static bool
arm_assemble_integer (rtx x, unsigned int size, int aligned_p)
{
- enum machine_mode mode;
+ machine_mode mode;
if (size == UNITS_PER_WORD && aligned_p)
{
enum arm_cond_code
maybe_get_arm_condition_code (rtx comparison)
{
- enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
+ machine_mode mode = GET_MODE (XEXP (comparison, 0));
enum arm_cond_code code;
enum rtx_code comp_code = GET_CODE (comparison);
/* Returns true if REGNO is a valid register
for holding a quantity of type MODE. */
int
-arm_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
+arm_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
{
if (GET_MODE_CLASS (mode) == MODE_CC)
return (regno == CC_REGNUM
/* Implement MODES_TIEABLE_P. */
bool
-arm_modes_tieable_p (enum machine_mode mode1, enum machine_mode mode2)
+arm_modes_tieable_p (machine_mode mode1, machine_mode mode2)
{
if (GET_MODE_CLASS (mode1) == GET_MODE_CLASS (mode2))
return true;
{
/* Use one of the operands; the target can have a different mode for
mask-generating compares. */
- enum machine_mode mode;
+ machine_mode mode;
tree type;
if (d->name == 0 || !(d->mask == FL_IWMMXT || d->mask == FL_IWMMXT2))
special-cased in the default hook. */
static bool
-arm_scalar_mode_supported_p (enum machine_mode mode)
+arm_scalar_mode_supported_p (machine_mode mode)
{
if (mode == HFmode)
return (arm_fp16_format != ARM_FP16_FORMAT_NONE);
clear instructions. */
static rtx
-safe_vector_operand (rtx x, enum machine_mode mode)
+safe_vector_operand (rtx x, machine_mode mode)
{
if (x != const0_rtx)
return x;
|| icode == CODE_FOR_crypto_sha1m);
builtin_sha1cpm_p = true;
}
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode0 = insn_data[icode].operand[1].mode;
- enum machine_mode mode1 = insn_data[icode].operand[2].mode;
- enum machine_mode mode2 = insn_data[icode].operand[3].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode0 = insn_data[icode].operand[1].mode;
+ machine_mode mode1 = insn_data[icode].operand[2].mode;
+ machine_mode mode2 = insn_data[icode].operand[3].mode;
if (VECTOR_MODE_P (mode0))
tree arg1 = CALL_EXPR_ARG (exp, 1);
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode0 = insn_data[icode].operand[1].mode;
- enum machine_mode mode1 = insn_data[icode].operand[2].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode0 = insn_data[icode].operand[1].mode;
+ machine_mode mode1 = insn_data[icode].operand[2].mode;
if (VECTOR_MODE_P (mode0))
op0 = safe_vector_operand (op0, mode0);
tree arg0 = CALL_EXPR_ARG (exp, 0);
rtx op0 = expand_normal (arg0);
rtx op1 = NULL_RTX;
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode0 = insn_data[icode].operand[1].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode0 = insn_data[icode].operand[1].mode;
bool builtin_sha1h_p = false;
if (insn_data[icode].n_operands == 3)
available. */
static tree
-neon_dereference_pointer (tree exp, tree type, enum machine_mode mem_mode,
- enum machine_mode reg_mode,
+neon_dereference_pointer (tree exp, tree type, machine_mode mem_mode,
+ machine_mode reg_mode,
neon_builtin_type_mode type_mode)
{
HOST_WIDE_INT reg_size, vector_size, nvectors, nelems;
rtx op[NEON_MAX_BUILTIN_ARGS];
tree arg_type;
tree formals;
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode[NEON_MAX_BUILTIN_ARGS];
- enum machine_mode other_mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode[NEON_MAX_BUILTIN_ARGS];
+ machine_mode other_mode;
int argc = 0;
int opno;
unsigned int dest = REGNO (operands[0]);
unsigned int src1 = REGNO (operands[1]);
unsigned int src2 = REGNO (operands[2]);
- enum machine_mode halfmode = GET_MODE (operands[1]);
+ machine_mode halfmode = GET_MODE (operands[1]);
unsigned int halfregs = HARD_REGNO_NREGS (src1, halfmode);
rtx destlo, desthi;
arm_expand_builtin (tree exp,
rtx target,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
const struct builtin_description * d;
rtx pat;
unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
size_t i;
- enum machine_mode tmode;
- enum machine_mode mode0;
- enum machine_mode mode1;
- enum machine_mode mode2;
+ machine_mode tmode;
+ machine_mode mode0;
+ machine_mode mode1;
+ machine_mode mode2;
int opint;
int selector;
int mask;
int pops_needed;
unsigned available;
unsigned required;
- enum machine_mode mode;
+ machine_mode mode;
int size;
int restore_a4 = FALSE;
static void
arm_setup_incoming_varargs (cumulative_args_t pcum_v,
- enum machine_mode mode,
+ machine_mode mode,
tree type,
int *pretend_size,
int second_time ATTRIBUTE_UNUSED)
return !TARGET_AAPCS_BASED;
}
-static enum machine_mode
+static machine_mode
arm_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
- enum machine_mode mode,
+ machine_mode mode,
int *punsignedp ATTRIBUTE_UNUSED,
const_tree fntype ATTRIBUTE_UNUSED,
int for_return ATTRIBUTE_UNUSED)
/* Implements target hook vector_mode_supported_p. */
bool
-arm_vector_mode_supported_p (enum machine_mode mode)
+arm_vector_mode_supported_p (machine_mode mode)
{
/* Neon also supports V2SImode, etc. listed in the clause below. */
if (TARGET_NEON && (mode == V2SFmode || mode == V4SImode || mode == V8HImode
/* Implements target hook array_mode_supported_p. */
static bool
-arm_array_mode_supported_p (enum machine_mode mode,
+arm_array_mode_supported_p (machine_mode mode,
unsigned HOST_WIDE_INT nelems)
{
if (TARGET_NEON
registers when autovectorizing for Neon, at least until multiple vector
widths are supported properly by the middle-end. */
-static enum machine_mode
-arm_preferred_simd_mode (enum machine_mode mode)
+static machine_mode
+arm_preferred_simd_mode (machine_mode mode)
{
if (TARGET_NEON)
switch (mode)
/* Implements target hook small_register_classes_for_mode_p. */
bool
-arm_small_register_classes_for_mode_p (enum machine_mode mode ATTRIBUTE_UNUSED)
+arm_small_register_classes_for_mode_p (machine_mode mode ATTRIBUTE_UNUSED)
{
return TARGET_THUMB1;
}
guarantee no particular behavior for out-of-range counts. */
static unsigned HOST_WIDE_INT
-arm_shift_truncation_mask (enum machine_mode mode)
+arm_shift_truncation_mask (machine_mode mode)
{
return mode == SImode ? 255 : 0;
}
static rtx
arm_dwarf_register_span (rtx rtl)
{
- enum machine_mode mode;
+ machine_mode mode;
unsigned regno;
rtx parts[16];
int nregs;
{
int shift = INTVAL (operands[2]);
char templ[50];
- enum machine_mode opmode = GET_MODE (operands[0]);
+ machine_mode opmode = GET_MODE (operands[0]);
gcc_assert (shift >= 0);
composed of NEON vector element types (e.g. __builtin_neon_qi). */
typedef struct
{
- enum machine_mode mode;
+ machine_mode mode;
const char *element_type_name;
const char *aapcs_name;
} arm_mangle_map_entry;
tree
arm_builtin_vectorized_function (tree fndecl, tree type_out, tree type_in)
{
- enum machine_mode in_mode, out_mode;
+ machine_mode in_mode, out_mode;
int in_n, out_n;
bool out_unsigned_p = TYPE_UNSIGNED (type_out);
}
static bool
-arm_builtin_support_vector_misalignment (enum machine_mode mode,
+arm_builtin_support_vector_misalignment (machine_mode mode,
const_tree type, int misalignment,
bool is_packed)
{
Use acquire and release versions if necessary. */
static void
-arm_emit_load_exclusive (enum machine_mode mode, rtx rval, rtx mem, bool acq)
+arm_emit_load_exclusive (machine_mode mode, rtx rval, rtx mem, bool acq)
{
rtx (*gen) (rtx, rtx);
}
static void
-arm_emit_store_exclusive (enum machine_mode mode, rtx bval, rtx rval,
+arm_emit_store_exclusive (machine_mode mode, rtx bval, rtx rval,
rtx mem, bool rel)
{
rtx (*gen) (rtx, rtx, rtx);
arm_expand_compare_and_swap (rtx operands[])
{
rtx bval, rval, mem, oldval, newval, is_weak, mod_s, mod_f, x;
- enum machine_mode mode;
+ machine_mode mode;
rtx (*gen) (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
bval = operands[0];
arm_split_compare_and_swap (rtx operands[])
{
rtx rval, mem, oldval, newval, scratch;
- enum machine_mode mode;
+ machine_mode mode;
enum memmodel mod_s, mod_f;
bool is_weak;
rtx_code_label *label1, *label2;
rtx value, rtx model_rtx, rtx cond)
{
enum memmodel model = (enum memmodel) INTVAL (model_rtx);
- enum machine_mode mode = GET_MODE (mem);
- enum machine_mode wmode = (mode == DImode ? DImode : SImode);
+ machine_mode mode = GET_MODE (mem);
+ machine_mode wmode = (mode == DImode ? DImode : SImode);
rtx_code_label *label;
rtx x;
{
rtx target, op0, op1;
unsigned char perm[MAX_VECT_LEN];
- enum machine_mode vmode;
+ machine_mode vmode;
unsigned char nelt;
bool one_vector_p;
bool testing_p;
static void
arm_expand_vec_perm_1 (rtx target, rtx op0, rtx op1, rtx sel)
{
- enum machine_mode vmode = GET_MODE (target);
+ machine_mode vmode = GET_MODE (target);
bool one_vector_p = rtx_equal_p (op0, op1);
gcc_checking_assert (vmode == V8QImode || vmode == V16QImode);
void
arm_expand_vec_perm (rtx target, rtx op0, rtx op1, rtx sel)
{
- enum machine_mode vmode = GET_MODE (target);
+ machine_mode vmode = GET_MODE (target);
unsigned int i, nelt = GET_MODE_NUNITS (vmode);
bool one_vector_p = rtx_equal_p (op0, op1);
rtx rmask[MAX_VECT_LEN], mask;
arm_evpc_neon_vtbl (struct expand_vec_perm_d *d)
{
rtx rperm[MAX_VECT_LEN], sel;
- enum machine_mode vmode = d->vmode;
+ machine_mode vmode = d->vmode;
unsigned int i, nelt = d->nelt;
/* TODO: ARM's VTBL indexing is little-endian. In order to handle GCC's
/* Implement TARGET_VECTORIZE_VEC_PERM_CONST_OK. */
static bool
-arm_vectorize_vec_perm_const_ok (enum machine_mode vmode,
+arm_vectorize_vec_perm_const_ok (machine_mode vmode,
const unsigned char *sel)
{
struct expand_vec_perm_d d;
}
bool
-arm_autoinc_modes_ok_p (enum machine_mode mode, enum arm_auto_incmodes code)
+arm_autoinc_modes_ok_p (machine_mode mode, enum arm_auto_incmodes code)
{
/* If we are soft float and we do not have ldrd
then all auto increment forms are ok. */
{
enum rtx_code code = GET_CODE (*comparison);
int code_int;
- enum machine_mode mode = (GET_MODE (*op1) == VOIDmode)
+ machine_mode mode = (GET_MODE (*op1) == VOIDmode)
? GET_MODE (*op2) : GET_MODE (*op1);
gcc_assert (GET_MODE (*op1) != VOIDmode || GET_MODE (*op2) != VOIDmode);
static bool
arm_block_set_vect_profit_p (unsigned HOST_WIDE_INT length,
unsigned HOST_WIDE_INT align,
- enum machine_mode mode)
+ machine_mode mode)
{
int num;
bool unaligned_p = ((align & 3) != 0);
rtx val_elt, val_vec, reg;
rtx rval[MAX_VECT_LEN];
rtx (*gen_func) (rtx, rtx);
- enum machine_mode mode;
+ machine_mode mode;
unsigned HOST_WIDE_INT v = value;
gcc_assert ((align & 0x3) != 0);
rtx dst, addr, mem;
rtx val_elt, val_vec, reg;
rtx rval[MAX_VECT_LEN];
- enum machine_mode mode;
+ machine_mode mode;
unsigned HOST_WIDE_INT v = value;
gcc_assert ((align & 0x3) == 0);
unsigned int i;
rtx dst, addr, mem;
rtx val_exp, val_reg, reg;
- enum machine_mode mode;
+ machine_mode mode;
HOST_WIDE_INT v = value;
gcc_assert (align == 1 || align == 2);
#ifndef GCC_ARM_H
#define GCC_ARM_H
-/* We can't use enum machine_mode inside a generator file because it
+/* We can't use machine_mode inside a generator file because it
hasn't been created yet; we shouldn't be using any code that
needs the real definition though, so this ought to be safe. */
#ifdef GENERATOR_FILE
#define MACHMODE int
#else
#include "insn-modes.h"
-#define MACHMODE enum machine_mode
+#define MACHMODE machine_mode
#endif
#include "config/vxworks-dummy.h"
rtx thumb1_cc_op0;
rtx thumb1_cc_op1;
/* Also record the CC mode that is supported. */
- enum machine_mode thumb1_cc_mode;
+ machine_mode thumb1_cc_mode;
/* Set to 1 after arm_reorg has started. */
int after_arm_reorg;
}
(set (match_dup 0)
(match_dup 6)))]
{
- enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
+ machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
operands[2], operands[3]);
enum rtx_code rc = minmax_code (operands[4]);
operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
[(set (match_dup 0) (match_dup 1))]
{
rtx lo_part = gen_lowpart (SImode, operands[0]);
- enum machine_mode src_mode = GET_MODE (operands[1]);
+ machine_mode src_mode = GET_MODE (operands[1]);
if (REG_P (operands[0])
&& !reg_overlap_mentioned_p (operands[0], operands[1]))
[(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
{
rtx lo_part = gen_lowpart (SImode, operands[0]);
- enum machine_mode src_mode = GET_MODE (operands[1]);
+ machine_mode src_mode = GET_MODE (operands[1]);
if (REG_P (operands[0])
&& !reg_overlap_mentioned_p (operands[0], operands[1]))
[(const_int 0)]
{
enum rtx_code rev_code;
- enum machine_mode mode;
+ machine_mode mode;
rtx rev_cond;
emit_insn (gen_rtx_COND_EXEC (VOIDmode,
(cond_exec (match_dup 4) (set (match_dup 0)
(and:SI (match_dup 3) (const_int 1))))]
{
- enum machine_mode mode = GET_MODE (operands[2]);
+ machine_mode mode = GET_MODE (operands[2]);
enum rtx_code rc = GET_CODE (operands[1]);
/* Note that operands[4] is the same as operands[1],
(cond_exec (match_dup 4) (set (match_dup 0)
(ior:SI (match_dup 3) (const_int 1))))]
{
- enum machine_mode mode = GET_MODE (operands[2]);
+ machine_mode mode = GET_MODE (operands[2]);
enum rtx_code rc = GET_CODE (operands[1]);
/* Note that operands[4] is the same as operands[1],
(cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
{
rtx tmp1;
- enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
+ machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
operands[2], operands[3]);
enum rtx_code rc = GET_CODE (operands[1]);
(set (match_dup 0) (match_dup 2)))]
"
{
- enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
+ machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
operands[3], operands[4]);
enum rtx_code rc = GET_CODE (operands[5]);
operands[6] = gen_rtx_REG (mode, CC_REGNUM);
(set (match_dup 0) (match_dup 4)))]
"
{
- enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
+ machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
operands[2], operands[3]);
enum rtx_code rc = GET_CODE (operands[1]);
(set (match_dup 0) (match_dup 4)))]
"
{
- enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
+ machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
operands[2], operands[3]);
operands[5] = gen_rtx_REG (mode, CC_REGNUM);
(set (match_dup 0) (match_dup 5)))]
"
{
- enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
+ machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
operands[2], operands[3]);
enum rtx_code rc = GET_CODE (operands[1]);
(set (match_dup 0) (not:SI (match_dup 5))))]
"
{
- enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
+ machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
operands[2], operands[3]);
enum rtx_code rc = GET_CODE (operands[1]);
rtx zero_reg;
HOST_WIDE_INT num_bits = INTVAL (operands[2]);
const int width = GET_MODE_BITSIZE (<MODE>mode);
- const enum machine_mode bvecmode = (width == 128) ? V16QImode : V8QImode;
+ const machine_mode bvecmode = (width == 128) ? V16QImode : V8QImode;
rtx (*gen_ext) (rtx, rtx, rtx, rtx) =
(width == 128) ? gen_neon_vextv16qi : gen_neon_vextv8qi;
rtx zero_reg;
HOST_WIDE_INT num_bits = INTVAL (operands[2]);
const int width = GET_MODE_BITSIZE (<MODE>mode);
- const enum machine_mode bvecmode = (width == 128) ? V16QImode : V8QImode;
+ const machine_mode bvecmode = (width == 128) ? V16QImode : V8QImode;
rtx (*gen_ext) (rtx, rtx, rtx, rtx) =
(width == 128) ? gen_neon_vextv16qi : gen_neon_vextv8qi;
(const_int 0)))]
{
operands[3] = GEN_INT (~0);
- enum machine_mode mode = GET_MODE (operands[2]);
+ machine_mode mode = GET_MODE (operands[2]);
enum rtx_code rc = GET_CODE (operands[1]);
if (mode == CCFPmode || mode == CCFPEmode)
[(const_int 0)]
{
enum rtx_code rev_code;
- enum machine_mode mode;
+ machine_mode mode;
rtx rev_cond;
emit_insn (gen_rtx_COND_EXEC (VOIDmode,
(and:SI (match_dup 3) (const_int 1)))
(cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))]
{
- enum machine_mode mode = GET_MODE (operands[2]);
+ machine_mode mode = GET_MODE (operands[2]);
enum rtx_code rc = GET_CODE (operands[1]);
if (mode == CCFPmode || mode == CCFPEmode)
(cond_exec (match_dup 4) (set (match_dup 0)
(ior:SI (match_dup 3) (const_int 1))))]
{
- enum machine_mode mode = GET_MODE (operands[2]);
+ machine_mode mode = GET_MODE (operands[2]);
enum rtx_code rc = GET_CODE (operands[1]);
operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
{
/* Emit: cmp\\t%1, %2\;mvn\\t%0, #0\;it\\t%D3\;mov%D3\\t%0, #0\;*/
enum rtx_code rc = reverse_condition (GET_CODE (operands[3]));
- enum machine_mode mode = SELECT_CC_MODE (rc, operands[1], operands[2]);
+ machine_mode mode = SELECT_CC_MODE (rc, operands[1], operands[2]);
rtx tmp1 = gen_rtx_REG (mode, CC_REGNUM);
emit_insn (gen_rtx_SET (VOIDmode,
t: tree
T: tree (brief)
C: enum rtx_code
- m: enum machine_mode
+ m: machine_mode
R: enum reg_class
L: insn list
H: location_t
break;
case 'm':
- fputs (GET_MODE_NAME ((enum machine_mode) va_arg (ap, int)),
+ fputs (GET_MODE_NAME ((machine_mode) va_arg (ap, int)),
file);
break;
#endif /* TREE_CODE */
#ifdef RTX_CODE
-extern int avr_hard_regno_call_part_clobbered (unsigned, enum machine_mode);
-extern bool tiny_valid_direct_memory_access_range(rtx, enum machine_mode);
+extern int avr_hard_regno_call_part_clobbered (unsigned, machine_mode);
+extern bool tiny_valid_direct_memory_access_range(rtx, machine_mode);
extern const char *output_movqi (rtx_insn *insn, rtx operands[], int *l);
extern const char *output_movhi (rtx_insn *insn, rtx operands[], int *l);
extern const char *output_movsisf (rtx_insn *insn, rtx operands[], int *l);
extern int test_hard_reg_class (enum reg_class rclass, rtx x);
extern int jump_over_one_insn_p (rtx_insn *insn, rtx dest);
-extern int avr_hard_regno_mode_ok (int regno, enum machine_mode mode);
+extern int avr_hard_regno_mode_ok (int regno, machine_mode mode);
extern void avr_final_prescan_insn (rtx_insn *insn, rtx *operand,
int num_operands);
-extern int avr_simplify_comparison_p (enum machine_mode mode,
+extern int avr_simplify_comparison_p (machine_mode mode,
RTX_CODE op, rtx x);
extern RTX_CODE avr_normalize_condition (RTX_CODE condition);
extern void out_shift_with_cnt (const char *templ, rtx_insn *insn,
rtx operands[], int *len, int t_len);
-extern enum reg_class avr_mode_code_base_reg_class (enum machine_mode, addr_space_t, RTX_CODE, RTX_CODE);
-extern bool avr_regno_mode_code_ok_for_base_p (int, enum machine_mode, addr_space_t, RTX_CODE, RTX_CODE);
+extern enum reg_class avr_mode_code_base_reg_class (machine_mode, addr_space_t, RTX_CODE, RTX_CODE);
+extern bool avr_regno_mode_code_ok_for_base_p (int, machine_mode, addr_space_t, RTX_CODE, RTX_CODE);
extern rtx avr_incoming_return_addr_rtx (void);
-extern rtx avr_legitimize_reload_address (rtx*, enum machine_mode, int, int, int, int, rtx (*)(rtx,int));
+extern rtx avr_legitimize_reload_address (rtx*, machine_mode, int, int, int, int, rtx (*)(rtx,int));
extern bool avr_mem_flash_p (rtx);
extern bool avr_mem_memx_p (rtx);
extern bool avr_load_libgcc_p (rtx);
-extern bool avr_xload_libgcc_p (enum machine_mode);
+extern bool avr_xload_libgcc_p (machine_mode);
extern rtx avr_eval_addr_attrib (rtx x);
extern rtx lpm_reg_rtx;
static int sequent_regs_live (void);
static const char *ptrreg_to_str (int);
static const char *cond_string (enum rtx_code);
-static int avr_num_arg_regs (enum machine_mode, const_tree);
-static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
+static int avr_num_arg_regs (machine_mode, const_tree);
+static int avr_operand_rtx_cost (rtx, machine_mode, enum rtx_code,
int, bool);
static void output_reload_in_const (rtx*, rtx, int*, bool);
static struct machine_function * avr_init_machine_status (void);
{
int i;
- enum machine_mode mode = GET_MODE (xval);
+ machine_mode mode = GET_MODE (xval);
if (VOIDmode == mode)
mode = SImode;
rtx
avr_to_int_mode (rtx x)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
return VOIDmode == mode
? x
/* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
static bool
-avr_scalar_mode_supported_p (enum machine_mode mode)
+avr_scalar_mode_supported_p (machine_mode mode)
{
if (ALL_FIXED_POINT_MODE_P (mode))
return true;
/* Helper for the function below. */
static void
-avr_adjust_type_node (tree *node, enum machine_mode mode, int sat_p)
+avr_adjust_type_node (tree *node, machine_mode mode, int sat_p)
{
*node = make_node (FIXED_POINT_TYPE);
TYPE_SATURATING (*node) = sat_p;
machine for a memory operand of mode MODE. */
static bool
-avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
+avr_legitimate_address_p (machine_mode mode, rtx x, bool strict)
{
bool ok = CONSTANT_ADDRESS_P (x);
memory address for an operand of mode MODE */
static rtx
-avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
+avr_legitimize_address (rtx x, rtx oldx, machine_mode mode)
{
bool big_offset_p = false;
than 63 bytes or for R++ or --R addressing. */
rtx
-avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
+avr_legitimize_reload_address (rtx *px, machine_mode mode,
int opnum, int type, int addr_type,
int ind_levels ATTRIBUTE_UNUSED,
rtx (*mk_memloc)(rtx,int))
static reg_class_t
avr_secondary_reload (bool in_p, rtx x,
reg_class_t reload_class ATTRIBUTE_UNUSED,
- enum machine_mode mode, secondary_reload_info *sri)
+ machine_mode mode, secondary_reload_info *sri)
{
if (in_p
&& MEM_P (x)
/* Return 0 if undefined, 1 if always true or always false. */
int
-avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
+avr_simplify_comparison_p (machine_mode mode, RTX_CODE op, rtx x)
{
unsigned int max = (mode == QImode ? 0xff :
mode == HImode ? 0xffff :
/* Returns the number of registers to allocate for a function argument. */
static int
-avr_num_arg_regs (enum machine_mode mode, const_tree type)
+avr_num_arg_regs (machine_mode mode, const_tree type)
{
int size;
in a register, and which register. */
static rtx
-avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+avr_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
in the argument list. */
static void
-avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+avr_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
bool
avr_load_libgcc_p (rtx op)
{
- enum machine_mode mode = GET_MODE (op);
+ machine_mode mode = GET_MODE (op);
int n_bytes = GET_MODE_SIZE (mode);
return (n_bytes > 2
/* Return true if a value of mode MODE is read by __xload_* function. */
bool
-avr_xload_libgcc_p (enum machine_mode mode)
+avr_xload_libgcc_p (machine_mode mode)
{
int n_bytes = GET_MODE_SIZE (mode);
IN / OUT instruction will be generated. */
bool
-tiny_valid_direct_memory_access_range (rtx op, enum machine_mode mode)
+tiny_valid_direct_memory_access_range (rtx op, machine_mode mode)
{
rtx x;
rtx xval = xop[1];
/* MODE of the comparison. */
- enum machine_mode mode;
+ machine_mode mode;
/* Number of bytes to operate on. */
int i, n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
enum rtx_code code_sat, int sign, bool out_label)
{
/* MODE of the operation. */
- enum machine_mode mode = GET_MODE (xop[0]);
+ machine_mode mode = GET_MODE (xop[0]);
/* INT_MODE of the same size. */
- enum machine_mode imode = int_mode_for_mode (mode);
+ machine_mode imode = int_mode_for_mode (mode);
/* Number of bytes to operate on. */
int i, n_bytes = GET_MODE_SIZE (mode);
static const char*
avr_out_plus_symbol (rtx *xop, enum rtx_code code, int *plen, int *pcc)
{
- enum machine_mode mode = GET_MODE (xop[0]);
+ machine_mode mode = GET_MODE (xop[0]);
/* Only pointer modes want to add symbols. */
rtx op[4];
rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
rtx xdest = SET_DEST (xpattern);
- enum machine_mode mode = GET_MODE (xdest);
- enum machine_mode imode = int_mode_for_mode (mode);
+ machine_mode mode = GET_MODE (xdest);
+ machine_mode imode = int_mode_for_mode (mode);
int n_bytes = GET_MODE_SIZE (mode);
enum rtx_code code_sat = GET_CODE (SET_SRC (xpattern));
enum rtx_code code
/* CODE and MODE of the operation. */
rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
enum rtx_code code = GET_CODE (SET_SRC (xpattern));
- enum machine_mode mode = GET_MODE (xop[0]);
+ machine_mode mode = GET_MODE (xop[0]);
/* Number of bytes to operate on. */
int i, n_bytes = GET_MODE_SIZE (mode);
for (i = 0; i < sizeof (val) / sizeof (*val); i++)
{
- enum machine_mode mode;
+ machine_mode mode;
xop[i] = operands[i];
const char*
avr_out_round (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
{
- enum machine_mode mode = GET_MODE (xop[0]);
- enum machine_mode imode = int_mode_for_mode (mode);
+ machine_mode mode = GET_MODE (xop[0]);
+ machine_mode imode = int_mode_for_mode (mode);
// The smallest fractional bit not cleared by the rounding is 2^(-RP).
int fbit = (int) GET_MODE_FBIT (mode);
double_int i_add = double_int_zero.set_bit (fbit-1 - INTVAL (xop[2]));
avr_rotate_bytes (rtx operands[])
{
int i, j;
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
bool same_reg = rtx_equal_p (operands[0], operands[1]);
int num = INTVAL (operands[2]);
rtx scratch = operands[3];
/* Work out if byte or word move is needed. Odd byte rotates need QImode.
Word move if no scratch is needed, otherwise use size of scratch. */
- enum machine_mode move_mode = QImode;
+ machine_mode move_mode = QImode;
int move_size, offset, size;
if (num & 0xf)
/* Implement `TARGET_REGISTER_MOVE_COST' */
static int
-avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+avr_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t from, reg_class_t to)
{
return (from == STACK_REG ? 6
/* Implement `TARGET_MEMORY_MOVE_COST' */
static int
-avr_memory_move_cost (enum machine_mode mode,
+avr_memory_move_cost (machine_mode mode,
reg_class_t rclass ATTRIBUTE_UNUSED,
bool in ATTRIBUTE_UNUSED)
{
operand's parent operator. */
static int
-avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
+avr_operand_rtx_cost (rtx x, machine_mode mode, enum rtx_code outer,
int opno, bool speed)
{
enum rtx_code code = GET_CODE (x);
int opno ATTRIBUTE_UNUSED, int *total, bool speed)
{
enum rtx_code code = (enum rtx_code) codearg;
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
HOST_WIDE_INT val;
switch (code)
/* Implement `TARGET_ADDRESS_COST'. */
static int
-avr_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
+avr_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as ATTRIBUTE_UNUSED,
bool speed ATTRIBUTE_UNUSED)
{
&& SET_DEST (pattern) == cc0_rtx
&& GET_CODE (SET_SRC (pattern)) == COMPARE)
{
- enum machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
- enum machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
+ machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
+ machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
/* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
They must not be swapped, thus skip them. */
rtx x = XEXP (pattern, 1);
rtx src = SET_SRC (pat);
rtx t = XEXP (src,0);
- enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
+ machine_mode mode = GET_MODE (XEXP (pattern, 0));
if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
{
library function returns a value of mode MODE. */
static rtx
-avr_libcall_value (enum machine_mode mode,
+avr_libcall_value (machine_mode mode,
const_rtx func ATTRIBUTE_UNUSED)
{
int offs = GET_MODE_SIZE (mode);
(this way we don't have to check for odd registers everywhere). */
int
-avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
+avr_hard_regno_mode_ok (int regno, machine_mode mode)
{
/* NOTE: 8-bit values must not be disallowed for R28 or R29.
Disallowing QI et al. in these regs might lead to code like
/* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
int
-avr_hard_regno_call_part_clobbered (unsigned regno, enum machine_mode mode)
+avr_hard_regno_call_part_clobbered (unsigned regno, machine_mode mode)
{
/* FIXME: This hook gets called with MODE:REGNO combinations that don't
represent valid hard registers like, e.g. HI:29. Returning TRUE
/* Implement `MODE_CODE_BASE_REG_CLASS'. */
enum reg_class
-avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
+avr_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as, RTX_CODE outer_code,
RTX_CODE index_code ATTRIBUTE_UNUSED)
{
bool
avr_regno_mode_code_ok_for_base_p (int regno,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as ATTRIBUTE_UNUSED,
RTX_CODE outer_code,
RTX_CODE index_code ATTRIBUTE_UNUSED)
int clobber_val = 1234;
bool cooked_clobber_p = false;
bool set_p = false;
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
int n, n_bytes = GET_MODE_SIZE (mode);
gcc_assert (REG_P (dest)
/* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
-static enum machine_mode
+static machine_mode
avr_addr_space_address_mode (addr_space_t as)
{
return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
/* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
-static enum machine_mode
+static machine_mode
avr_addr_space_pointer_mode (addr_space_t as)
{
return avr_addr_space_address_mode (as);
/* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
static bool
-avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
+avr_addr_space_legitimate_address_p (machine_mode mode, rtx x,
bool strict, addr_space_t as)
{
bool ok = false;
static rtx
avr_addr_space_legitimize_address (rtx x, rtx old_x,
- enum machine_mode mode, addr_space_t as)
+ machine_mode mode, addr_space_t as)
{
if (ADDR_SPACE_GENERIC_P (as))
return avr_legitimize_address (x, old_x, mode);
avr_emit_movmemhi (rtx *xop)
{
HOST_WIDE_INT count;
- enum machine_mode loop_mode;
+ machine_mode loop_mode;
addr_space_t as = MEM_ADDR_SPACE (xop[1]);
rtx loop_reg, addr1, a_src, a_dest, insn, xas;
rtx a_hi8 = NULL_RTX;
avr_out_movmem (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
{
addr_space_t as = (addr_space_t) INTVAL (op[0]);
- enum machine_mode loop_mode = GET_MODE (op[1]);
+ machine_mode loop_mode = GET_MODE (op[1]);
bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
rtx xop[3];
{
rtx pat, xop[3];
int n, n_args = call_expr_nargs (exp);
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
gcc_assert (n_args >= 1 && n_args <= 3);
{
tree arg = CALL_EXPR_ARG (exp, n);
rtx op = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- enum machine_mode opmode = GET_MODE (op);
- enum machine_mode mode = insn_data[icode].operand[n+1].mode;
+ machine_mode opmode = GET_MODE (op);
+ machine_mode mode = insn_data[icode].operand[n+1].mode;
if ((opmode == SImode || opmode == VOIDmode) && mode == HImode)
{
static rtx
avr_expand_builtin (tree exp, rtx target,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
""
{
rtx addr0;
- enum machine_mode mode;
+ machine_mode mode;
/* If value to set is not zero, use the library routine. */
if (operands[2] != const0_rtx)
extern char *bfin_asm_short (void);
extern int log2constp (unsigned HOST_WIDE_INT);
-extern int hard_regno_mode_ok (int, enum machine_mode);
+extern int hard_regno_mode_ok (int, machine_mode);
extern void init_cumulative_args (CUMULATIVE_ARGS *, tree, rtx);
extern HOST_WIDE_INT bfin_initial_elimination_offset (int, int);
-extern int effective_address_32bit_p (rtx, enum machine_mode);
+extern int effective_address_32bit_p (rtx, machine_mode);
extern int symbolic_reference_mentioned_p (rtx);
-extern rtx bfin_gen_compare (rtx, enum machine_mode);
-extern bool expand_move (rtx *, enum machine_mode);
+extern rtx bfin_gen_compare (rtx, machine_mode);
+extern bool expand_move (rtx *, machine_mode);
extern void bfin_expand_call (rtx, rtx, rtx, rtx, int);
extern bool bfin_longcall_p (rtx, int);
extern bool bfin_dsp_memref_p (rtx);
extern bool bfin_expand_movmem (rtx, rtx, rtx, rtx);
extern enum reg_class secondary_input_reload_class (enum reg_class,
- enum machine_mode,
+ machine_mode,
rtx);
extern enum reg_class secondary_output_reload_class (enum reg_class,
- enum machine_mode,
+ machine_mode,
rtx);
extern char *section_asm_op_1 (SECT_ENUM_T);
extern char *section_asm_op (SECT_ENUM_T);
extern void print_address_operand (FILE *, rtx);
extern void split_di (rtx [], int, rtx [], rtx []);
extern int split_load_immediate (rtx []);
-extern void emit_pic_move (rtx *, enum machine_mode);
+extern void emit_pic_move (rtx *, machine_mode);
extern void asm_conditional_branch (rtx_insn *, rtx *, int, int);
-extern rtx bfin_gen_compare (rtx, enum machine_mode);
+extern rtx bfin_gen_compare (rtx, machine_mode);
extern unsigned bfin_local_alignment (tree, unsigned);
extern rtx bfin_va_arg (tree, tree);
static void
setup_incoming_varargs (cumulative_args_t cum,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED, int *pretend_size,
int no_rtl)
{
32-bit instruction. */
int
-effective_address_32bit_p (rtx op, enum machine_mode mode)
+effective_address_32bit_p (rtx op, machine_mode mode)
{
HOST_WIDE_INT offset;
static int
bfin_address_cost (rtx addr ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as ATTRIBUTE_UNUSED,
bool speed ATTRIBUTE_UNUSED)
{
void
print_operand (FILE *file, rtx x, char code)
{
- enum machine_mode mode;
+ machine_mode mode;
if (code == '!')
{
(TYPE is null for libcalls where that information may not be available.) */
static void
-bfin_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+bfin_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
(otherwise it is an extra parameter matching an ellipsis). */
static rtx
-bfin_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+bfin_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
stack. */
static int
-bfin_arg_partial_bytes (cumulative_args_t cum, enum machine_mode mode,
+bfin_arg_partial_bytes (cumulative_args_t cum, machine_mode mode,
tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
static bool
bfin_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
/* Emit insns to move operands[1] into operands[0]. */
void
-emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
+emit_pic_move (rtx *operands, machine_mode mode ATTRIBUTE_UNUSED)
{
rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
bool
-expand_move (rtx *operands, enum machine_mode mode)
+expand_move (rtx *operands, machine_mode mode)
{
rtx op = operands[1];
if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
/* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
int
-hard_regno_mode_ok (int regno, enum machine_mode mode)
+hard_regno_mode_ok (int regno, machine_mode mode)
{
/* Allow only dregs to store value of mode HI or QI */
enum reg_class rclass = REGNO_REG_CLASS (regno);
/* Implements target hook vector_mode_supported_p. */
static bool
-bfin_vector_mode_supported_p (enum machine_mode mode)
+bfin_vector_mode_supported_p (machine_mode mode)
{
return mode == V2HImode;
}
/* Worker function for TARGET_REGISTER_MOVE_COST. */
static int
-bfin_register_move_cost (enum machine_mode mode,
+bfin_register_move_cost (machine_mode mode,
reg_class_t class1, reg_class_t class2)
{
/* These need secondary reloads, so they're more expensive. */
program; it'll make the costs more accurate. */
static int
-bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+bfin_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t rclass,
bool in ATTRIBUTE_UNUSED)
{
static reg_class_t
bfin_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
- enum machine_mode mode, secondary_reload_info *sri)
+ machine_mode mode, secondary_reload_info *sri)
{
/* If we have HImode or QImode, we can only use DREGS as secondary registers;
in most other cases we can also use PREGS. */
stored in bfin_compare_op0 and bfin_compare_op1 already. */
rtx
-bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
+bfin_gen_compare (rtx cmp, machine_mode mode ATTRIBUTE_UNUSED)
{
enum rtx_code code1, code2;
rtx op0 = XEXP (cmp, 0), op1 = XEXP (cmp, 1);
MODE. Return false if not. */
static bool
-bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
+bfin_valid_add (machine_mode mode, HOST_WIDE_INT value)
{
unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
int sz = GET_MODE_SIZE (mode);
}
static bool
-bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
+bfin_valid_reg_p (unsigned int regno, int strict, machine_mode mode,
enum rtx_code outer_code)
{
if (strict)
*/
static bool
-bfin_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
+bfin_legitimate_address_p (machine_mode mode, rtx x, bool strict)
{
switch (GET_CODE (x)) {
case REG:
another way. */
static bool
-bfin_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED,
+bfin_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED,
rtx x ATTRIBUTE_UNUSED)
{
/* We have only one class of non-legitimate constants, and our movsi
crossing section boundaries. */
static bool
-bfin_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+bfin_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
rtx sym;
HOST_WIDE_INT offset;
/* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
static void
-single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
+single_move_for_movmem (rtx dst, rtx src, machine_mode mode, HOST_WIDE_INT offset)
{
rtx scratch = gen_reg_rtx (mode);
rtx srcmem, dstmem;
where we expect a vector. To avoid crashing, use one of the vector
clear instructions. */
static rtx
-safe_vector_operand (rtx x, enum machine_mode mode)
+safe_vector_operand (rtx x, machine_mode mode)
{
if (x != const0_rtx)
return x;
tree arg1 = CALL_EXPR_ARG (exp, 1);
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
- enum machine_mode op0mode = GET_MODE (op0);
- enum machine_mode op1mode = GET_MODE (op1);
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode0 = insn_data[icode].operand[1].mode;
- enum machine_mode mode1 = insn_data[icode].operand[2].mode;
+ machine_mode op0mode = GET_MODE (op0);
+ machine_mode op1mode = GET_MODE (op1);
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode0 = insn_data[icode].operand[1].mode;
+ machine_mode mode1 = insn_data[icode].operand[2].mode;
if (VECTOR_MODE_P (mode0))
op0 = safe_vector_operand (op0, mode0);
rtx pat;
tree arg0 = CALL_EXPR_ARG (exp, 0);
rtx op0 = expand_normal (arg0);
- enum machine_mode op0mode = GET_MODE (op0);
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode0 = insn_data[icode].operand[1].mode;
+ machine_mode op0mode = GET_MODE (op0);
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode0 = insn_data[icode].operand[1].mode;
if (! target
|| GET_MODE (target) != tmode
static rtx
bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
size_t i;
unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
tree arg0, arg1, arg2;
rtx op0, op1, op2, accvec, pat, tmp1, tmp2, a0reg, a1reg;
- enum machine_mode tmode, mode0;
+ machine_mode tmode, mode0;
switch (fcode)
{
#ifdef RTX_CODE
extern void c6x_init_cumulative_args (CUMULATIVE_ARGS *, const_tree, rtx, int);
-extern bool c6x_block_reg_pad_upward (enum machine_mode, const_tree, bool);
+extern bool c6x_block_reg_pad_upward (machine_mode, const_tree, bool);
-extern bool c6x_legitimate_address_p_1 (enum machine_mode, rtx, bool, bool);
+extern bool c6x_legitimate_address_p_1 (machine_mode, rtx, bool, bool);
extern bool c6x_mem_operand (rtx, enum reg_class, bool);
-extern bool expand_move (rtx *, enum machine_mode);
+extern bool expand_move (rtx *, machine_mode);
extern bool c6x_long_call_p (rtx);
extern void c6x_expand_call (rtx, rtx, bool);
-extern rtx c6x_expand_compare (rtx, enum machine_mode);
+extern rtx c6x_expand_compare (rtx, machine_mode);
extern bool c6x_force_op_for_comparison_p (enum rtx_code, rtx);
extern bool c6x_expand_movmem (rtx, rtx, rtx, rtx, rtx, rtx);
/* Implements the macro FUNCTION_ARG defined in c6x.h. */
static rtx
-c6x_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+c6x_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
static void
c6x_function_arg_advance (cumulative_args_t cum_v,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
upward rather than downward. */
bool
-c6x_block_reg_pad_upward (enum machine_mode mode ATTRIBUTE_UNUSED,
+c6x_block_reg_pad_upward (machine_mode mode ATTRIBUTE_UNUSED,
const_tree type, bool first)
{
HOST_WIDE_INT size;
/* Implement TARGET_FUNCTION_ARG_BOUNDARY. */
static unsigned int
-c6x_function_arg_boundary (enum machine_mode mode, const_tree type)
+c6x_function_arg_boundary (machine_mode mode, const_tree type)
{
unsigned int boundary = type ? TYPE_ALIGN (type) : GET_MODE_BITSIZE (mode);
/* Implement TARGET_FUNCTION_ARG_ROUND_BOUNDARY. */
static unsigned int
-c6x_function_arg_round_boundary (enum machine_mode mode, const_tree type)
+c6x_function_arg_round_boundary (machine_mode mode, const_tree type)
{
return c6x_function_arg_boundary (mode, type);
}
/* Implement TARGET_LIBCALL_VALUE. */
static rtx
-c6x_libcall_value (enum machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
+c6x_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, REG_A4);
}
static bool
c6x_pass_by_reference (cumulative_args_t cum_v ATTRIBUTE_UNUSED,
- enum machine_mode mode, const_tree type,
+ machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
int size = -1;
static bool
c6x_callee_copies (cumulative_args_t cum_v ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
everything sized 8 bytes or smaller into small data. */
static section *
-c6x_select_rtx_section (enum machine_mode mode, rtx x,
+c6x_select_rtx_section (machine_mode mode, rtx x,
unsigned HOST_WIDE_INT align)
{
if (c6x_sdata_mode == C6X_SDATA_ALL
cumulative_args_t cum;
HARD_REG_SET call_saved_regset;
tree parameter;
- enum machine_mode mode;
+ machine_mode mode;
tree type;
rtx parm_rtx;
int i;
should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
bool
-expand_move (rtx *operands, enum machine_mode mode)
+expand_move (rtx *operands, machine_mode mode)
{
rtx dest = operands[0];
rtx op = operands[1];
that should be used in the jump insn. */
rtx
-c6x_expand_compare (rtx comparison, enum machine_mode mode)
+c6x_expand_compare (rtx comparison, machine_mode mode)
{
enum rtx_code code = GET_CODE (comparison);
rtx op0 = XEXP (comparison, 0);
rtx op1 = XEXP (comparison, 1);
rtx cmp;
enum rtx_code jump_code = code;
- enum machine_mode op_mode = GET_MODE (op0);
+ machine_mode op_mode = GET_MODE (op0);
if (op_mode == DImode && (code == NE || code == EQ) && op1 == const0_rtx)
{
c6x_subword (rtx op, bool high_p)
{
unsigned int byte;
- enum machine_mode mode;
+ machine_mode mode;
mode = GET_MODE (op);
if (mode == VOIDmode)
while (count > 0)
{
rtx reg, reg_lowpart;
- enum machine_mode srcmode, dstmode;
+ machine_mode srcmode, dstmode;
unsigned HOST_WIDE_INT src_size, dst_size, src_left;
int shift;
rtx srcmem, dstmem;
use the scaled form. */
static void
-print_address_offset (FILE *file, rtx off, enum machine_mode mem_mode)
+print_address_offset (FILE *file, rtx off, machine_mode mem_mode)
{
rtx pat;
/* Subroutine of c6x_print_operand; used to print a memory reference X to FILE. */
static void
-c6x_print_address_operand (FILE *file, rtx x, enum machine_mode mem_mode)
+c6x_print_address_operand (FILE *file, rtx x, machine_mode mem_mode)
{
rtx off;
switch (GET_CODE (x))
int i;
HOST_WIDE_INT v;
tree t;
- enum machine_mode mode;
+ machine_mode mode;
if (code == '|')
{
bool
c6x_mem_operand (rtx op, enum reg_class c, bool small_offset)
{
- enum machine_mode mode = GET_MODE (op);
+ machine_mode mode = GET_MODE (op);
rtx base = XEXP (op, 0);
switch (GET_CODE (base))
{
recursively examining an operand inside a PRE/POST_MODIFY. */
bool
-c6x_legitimate_address_p_1 (enum machine_mode mode, rtx x, bool strict,
+c6x_legitimate_address_p_1 (machine_mode mode, rtx x, bool strict,
bool no_large_offset)
{
int size, size1;
}
static bool
-c6x_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
+c6x_legitimate_address_p (machine_mode mode, rtx x, bool strict)
{
return c6x_legitimate_address_p_1 (mode, x, strict, false);
}
static bool
-c6x_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+c6x_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED,
rtx x ATTRIBUTE_UNUSED)
{
return true;
int idx = N_SAVE_ORDER - i - 1;
unsigned regno = reg_save_order[idx];
rtx reg;
- enum machine_mode save_mode = SImode;
+ machine_mode save_mode = SImode;
if (regno == REG_A15 && frame_pointer_needed)
/* Already saved. */
{
unsigned regno = reg_save_order[i];
rtx reg;
- enum machine_mode save_mode = SImode;
+ machine_mode save_mode = SImode;
if (!c6x_save_reg (regno))
continue;
/* Implements target hook vector_mode_supported_p. */
static bool
-c6x_vector_mode_supported_p (enum machine_mode mode)
+c6x_vector_mode_supported_p (machine_mode mode)
{
switch (mode)
{
}
/* Implements TARGET_VECTORIZE_PREFERRED_SIMD_MODE. */
-static enum machine_mode
-c6x_preferred_simd_mode (enum machine_mode mode)
+static machine_mode
+c6x_preferred_simd_mode (machine_mode mode)
{
switch (mode)
{
/* Implement TARGET_SCALAR_MODE_SUPPORTED_P. */
static bool
-c6x_scalar_mode_supported_p (enum machine_mode mode)
+c6x_scalar_mode_supported_p (machine_mode mode)
{
if (ALL_FIXED_POINT_MODE_P (mode)
&& GET_MODE_PRECISION (mode) <= 2 * BITS_PER_WORD)
where we expect a vector. To avoid crashing, use one of the vector
clear instructions. */
static rtx
-safe_vector_operand (rtx x, enum machine_mode mode)
+safe_vector_operand (rtx x, machine_mode mode)
{
if (x != const0_rtx)
return x;
tree arg1 = CALL_EXPR_ARG (exp, 1);
rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- enum machine_mode op0mode = GET_MODE (op0);
- enum machine_mode op1mode = GET_MODE (op1);
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode0 = insn_data[icode].operand[1 + offs].mode;
- enum machine_mode mode1 = insn_data[icode].operand[2 + offs].mode;
+ machine_mode op0mode = GET_MODE (op0);
+ machine_mode op1mode = GET_MODE (op1);
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode0 = insn_data[icode].operand[1 + offs].mode;
+ machine_mode mode1 = insn_data[icode].operand[2 + offs].mode;
rtx ret = target;
if (VECTOR_MODE_P (mode0))
rtx pat;
tree arg0 = CALL_EXPR_ARG (exp, 0);
rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- enum machine_mode op0mode = GET_MODE (op0);
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode0 = insn_data[icode].operand[1].mode;
+ machine_mode op0mode = GET_MODE (op0);
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode0 = insn_data[icode].operand[1].mode;
if (! target
|| GET_MODE (target) != tmode
static rtx
c6x_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
size_t i;
[(set (match_dup 2) (match_dup 3))]
{
unsigned HOST_WIDE_INT mask, val;
- enum machine_mode inner_mode = GET_MODE_INNER (<MODE>mode);
+ machine_mode inner_mode = GET_MODE_INNER (<MODE>mode);
int i;
val = 0;
unsigned HOST_WIDE_INT mask;
unsigned HOST_WIDE_INT val[2];
rtx lo_half, hi_half;
- enum machine_mode inner_mode = GET_MODE_INNER (<MODE>mode);
+ machine_mode inner_mode = GET_MODE_INNER (<MODE>mode);
int i, j;
split_di (operands, 1, &lo_half, &hi_half);
/* Register usage. */
extern enum reg_class cr16_regno_reg_class (int);
-extern int cr16_hard_regno_mode_ok (int regno, enum machine_mode);
+extern int cr16_hard_regno_mode_ok (int regno, machine_mode);
/* Passing function arguments. */
extern int cr16_function_arg_regno_p (int);
bool treat_as_const);
extern int cr16_const_double_ok (rtx op);
extern int legitimate_pic_operand_p (rtx);
-extern rtx legitimize_pic_address (rtx, enum machine_mode, rtx);
+extern rtx legitimize_pic_address (rtx, machine_mode, rtx);
/* Prologue/Epilogue functions. */
/* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
int
-cr16_hard_regno_mode_ok (int regno, enum machine_mode mode)
+cr16_hard_regno_mode_ok (int regno, machine_mode mode)
{
if ((GET_MODE_SIZE (mode) >= 4) && (regno == 11))
return 0;
/* Create an RTX representing the place where a
library function returns a value of mode MODE. */
static rtx
-cr16_libcall_value (enum machine_mode mode,
+cr16_libcall_value (machine_mode mode,
const_rtx func ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, cr16_ret_register ());
the number of registers needed else 0. */
static int
enough_regs_for_param (CUMULATIVE_ARGS * cum, const_tree type,
- enum machine_mode mode)
+ machine_mode mode)
{
int type_size;
int remaining_size;
/* Implements the macro FUNCTION_ARG defined in cr16.h. */
static rtx
-cr16_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+cr16_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
/* Implements the macro FUNCTION_ARG_ADVANCE defined in cr16.h. */
static void
-cr16_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+cr16_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS * cum = get_cumulative_args (cum_v);
NOTE: @BRO is added using unspec:BRO
NOTE: @GOT is added using unspec:GOT. */
rtx
-legitimize_pic_address (rtx orig, enum machine_mode mode ATTRIBUTE_UNUSED,
+legitimize_pic_address (rtx orig, machine_mode mode ATTRIBUTE_UNUSED,
rtx reg)
{
/* First handle a simple SYMBOL_REF or LABEL_REF. */
/* Implementation of TARGET_LEGITIMATE_ADDRESS_P. */
static bool
-cr16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+cr16_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
rtx addr, bool strict)
{
enum cr16_addrtype addrtype;
/* Return cost of the memory address x. */
static int
-cr16_address_cost (rtx addr, enum machine_mode mode ATTRIBUTE_UNUSED,
+cr16_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as ATTRIBUTE_UNUSED,
bool speed ATTRIBUTE_UNUSED)
{
/* Implement `TARGET_REGISTER_MOVE_COST'. */
static int
-cr16_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+cr16_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t from ATTRIBUTE_UNUSED, reg_class_t to)
{
return (to != GENERAL_REGS ? 8 : 2);
nonzero if it is to be read in. This cost is relative to those in
REGISTER_MOVE_COST. */
static int
-cr16_memory_move_cost (enum machine_mode mode,
+cr16_memory_move_cost (machine_mode mode,
reg_class_t rclass ATTRIBUTE_UNUSED,
bool in ATTRIBUTE_UNUSED)
{
{
rtx dwarf, reg, tmp;
int i, j, from, to, word_cnt, dwarf_par_index, inc;
- enum machine_mode mode;
+ machine_mode mode;
int num_regs = 0, offset = 0, split_here = 0, total_push_bytes = 0;
for (i = 0; i <= current_frame_info.last_reg_to_save; ++i)
it should assign X (which will always be a C variable) a new value. */
static rtx
cr16_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
if (flag_pic)
return legitimize_pic_address (orig_x, mode, NULL_RTX);
satisfies CONSTANT_P. In cr16c treat legitimize float
constant as an immediate operand. */
static bool
-cr16_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+cr16_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED,
rtx x ATTRIBUTE_UNUSED)
{
return 1;
return;
}
-static enum machine_mode
+static machine_mode
cr16_unwind_word_mode (void)
{
return SImode;
#ifdef RTX_CODE
extern const char *cris_op_str (rtx);
extern void cris_notice_update_cc (rtx, rtx_insn *);
-extern bool cris_reload_address_legitimized (rtx, enum machine_mode, int, int, int);
+extern bool cris_reload_address_legitimized (rtx, machine_mode, int, int, int);
extern int cris_side_effect_mode_ok (enum rtx_code, rtx *, int, int,
int, int, int);
extern bool cris_cc0_user_requires_cmp (rtx);
extern int cris_legitimate_pic_operand (rtx);
extern enum cris_symbol_type cris_symbol_type_of (const_rtx);
extern bool cris_valid_pic_const (const_rtx, bool);
-extern bool cris_legitimate_constant_p (enum machine_mode, rtx);
+extern bool cris_legitimate_constant_p (machine_mode, rtx);
extern bool cris_constant_index_p (const_rtx);
extern bool cris_base_p (const_rtx, bool);
extern bool cris_base_or_autoincr_p (const_rtx, bool);
extern bool cris_bdap_index_p (const_rtx, bool);
extern bool cris_biap_index_p (const_rtx, bool);
-extern bool cris_legitimate_address_p (enum machine_mode, rtx, bool);
+extern bool cris_legitimate_address_p (machine_mode, rtx, bool);
extern bool cris_store_multiple_op_p (rtx);
extern bool cris_movem_load_rest_p (rtx, int);
extern void cris_asm_output_symbol_ref (FILE *, rtx);
/* Fix for reg_overlap_mentioned_p. */
static int cris_reg_overlap_mentioned_p (rtx, rtx);
-static enum machine_mode cris_promote_function_mode (const_tree, enum machine_mode,
+static machine_mode cris_promote_function_mode (const_tree, machine_mode,
int *, const_tree, int);
-static unsigned int cris_atomic_align_for_mode (enum machine_mode);
+static unsigned int cris_atomic_align_for_mode (machine_mode);
static void cris_print_base (rtx, FILE *);
static rtx cris_struct_value_rtx (tree, int);
-static void cris_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
+static void cris_setup_incoming_varargs (cumulative_args_t, machine_mode,
tree type, int *, int);
static int cris_initial_frame_pointer_offset (void);
static reg_class_t cris_preferred_reload_class (rtx, reg_class_t);
-static int cris_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
-static int cris_memory_move_cost (enum machine_mode, reg_class_t, bool);
+static int cris_register_move_cost (machine_mode, reg_class_t, reg_class_t);
+static int cris_memory_move_cost (machine_mode, reg_class_t, bool);
static bool cris_rtx_costs (rtx, int, int, int, int *, bool);
-static int cris_address_cost (rtx, enum machine_mode, addr_space_t, bool);
-static bool cris_pass_by_reference (cumulative_args_t, enum machine_mode,
+static int cris_address_cost (rtx, machine_mode, addr_space_t, bool);
+static bool cris_pass_by_reference (cumulative_args_t, machine_mode,
const_tree, bool);
-static int cris_arg_partial_bytes (cumulative_args_t, enum machine_mode,
+static int cris_arg_partial_bytes (cumulative_args_t, machine_mode,
tree, bool);
-static rtx cris_function_arg (cumulative_args_t, enum machine_mode,
+static rtx cris_function_arg (cumulative_args_t, machine_mode,
const_tree, bool);
static rtx cris_function_incoming_arg (cumulative_args_t,
- enum machine_mode, const_tree, bool);
-static void cris_function_arg_advance (cumulative_args_t, enum machine_mode,
+ machine_mode, const_tree, bool);
+static void cris_function_arg_advance (cumulative_args_t, machine_mode,
const_tree, bool);
static tree cris_md_asm_clobbers (tree, tree, tree);
-static bool cris_cannot_force_const_mem (enum machine_mode, rtx);
+static bool cris_cannot_force_const_mem (machine_mode, rtx);
static void cris_option_override (void);
static void cris_trampoline_init (rtx, tree, rtx);
static rtx cris_function_value(const_tree, const_tree, bool);
-static rtx cris_libcall_value (enum machine_mode, const_rtx);
+static rtx cris_libcall_value (machine_mode, const_rtx);
static bool cris_function_value_regno_p (const unsigned int);
static void cris_file_end (void);
can be reached as pc-relative as we can't tell when or how to do that. */
static bool
-cris_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+cris_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
enum cris_symbol_type t = cris_symbol_type_of (x);
symbol is valid for the plain "symbol + offset" case. */
bool
-cris_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
+cris_legitimate_address_p (machine_mode mode, rtx x, bool strict)
{
const_rtx x1, x2;
so don't bother; fix the documentation instead. */
bool
-cris_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+cris_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
enum cris_symbol_type t;
bool
cris_reload_address_legitimized (rtx x,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int opnum ATTRIBUTE_UNUSED,
int itype,
int ind_levels ATTRIBUTE_UNUSED)
/* Worker function for TARGET_REGISTER_MOVE_COST. */
static int
-cris_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+cris_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t from, reg_class_t to)
{
/* Can't move to and from a SPECIAL_REGS register, so we have to say
suffice. */
static int
-cris_memory_move_cost (enum machine_mode mode,
+cris_memory_move_cost (machine_mode mode,
reg_class_t rclass ATTRIBUTE_UNUSED,
bool in ATTRIBUTE_UNUSED)
{
/* The ADDRESS_COST worker. */
static int
-cris_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
+cris_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as ATTRIBUTE_UNUSED,
bool speed ATTRIBUTE_UNUSED)
{
rtx
cris_split_movdx (rtx *operands)
{
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
rtx dest = operands[0];
rtx src = operands[1];
rtx val;
static void
cris_setup_incoming_varargs (cumulative_args_t ca_v,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED,
int *pretend_arg_size,
int second_time)
static bool
cris_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
- enum machine_mode mode, const_tree type,
+ machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
return (targetm.calls.must_pass_in_stack (mode, type)
and *not* defining TARGET_PROMOTE_PROTOTYPES or PROMOTE_MODE gives the
best code size and speed for gcc, ipps and products in gcc-2.7.2. */
-enum machine_mode
+machine_mode
cris_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
- enum machine_mode mode,
+ machine_mode mode,
int *punsignedp ATTRIBUTE_UNUSED,
const_tree fntype ATTRIBUTE_UNUSED,
int for_return)
/* Atomic types require alignment to be at least their "natural" size. */
static unsigned int
-cris_atomic_align_for_mode (enum machine_mode mode)
+cris_atomic_align_for_mode (machine_mode mode)
{
return GET_MODE_BITSIZE (mode);
}
time being. */
static rtx
-cris_libcall_value (enum machine_mode mode,
+cris_libcall_value (machine_mode mode,
const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, CRIS_FIRST_ARG_REG);
}
static int
-cris_arg_partial_bytes (cumulative_args_t ca, enum machine_mode mode,
+cris_arg_partial_bytes (cumulative_args_t ca, machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
if (get_cumulative_args (ca)->regs == CRIS_MAX_ARGS_IN_REGS - 1
static rtx
cris_function_arg_1 (cumulative_args_t ca_v,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named, bool incoming)
{
The void_type_node is sent as a "closing" call. */
static rtx
-cris_function_arg (cumulative_args_t ca, enum machine_mode mode,
+cris_function_arg (cumulative_args_t ca, machine_mode mode,
const_tree type, bool named)
{
return cris_function_arg_1 (ca, mode, type, named, false);
void_type_node TYPE parameter. */
static rtx
-cris_function_incoming_arg (cumulative_args_t ca, enum machine_mode mode,
+cris_function_incoming_arg (cumulative_args_t ca, machine_mode mode,
const_tree type, bool named)
{
return cris_function_arg_1 (ca, mode, type, named, true);
/* Worker function for TARGET_FUNCTION_ARG_ADVANCE. */
static void
-cris_function_arg_advance (cumulative_args_t ca_v, enum machine_mode mode,
+cris_function_arg_advance (cumulative_args_t ca_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *ca = get_cumulative_args (ca_v);
[(set (match_dup 0) (match_dup 4))
(set (match_dup 5) (match_dup 6))]
{
- enum machine_mode zmode = INTVAL (operands[3]) <= 255 ? QImode : HImode;
- enum machine_mode amode
+ machine_mode zmode = INTVAL (operands[3]) <= 255 ? QImode : HImode;
+ machine_mode amode
= satisfies_constraint_O (operands[3]) ? SImode : zmode;
rtx op1
= (REG_S_P (operands[1])
[(set (match_dup 0) (match_dup 3))
(set (match_dup 0) (and:SI (match_dup 0) (match_dup 4)))]
{
- enum machine_mode zmode = INTVAL (operands[2]) <= 255 ? QImode : HImode;
+ machine_mode zmode = INTVAL (operands[2]) <= 255 ? QImode : HImode;
rtx op1
= (REG_S_P (operands[2])
? gen_rtx_REG (zmode, REGNO (operands[2]))
extern rtx machopic_indirect_data_reference (rtx, rtx);
extern rtx machopic_indirect_call_target (rtx);
-extern rtx machopic_legitimize_pic_address (rtx, enum machine_mode, rtx);
+extern rtx machopic_legitimize_pic_address (rtx, machine_mode, rtx);
extern void machopic_asm_out_constructor (rtx, int);
extern void machopic_asm_out_destructor (rtx, int);
-extern section *machopic_select_rtx_section (enum machine_mode, rtx,
+extern section *machopic_select_rtx_section (machine_mode, rtx,
unsigned HOST_WIDE_INT);
#endif /* RTX_CODE */
rtx sym_ref = XEXP (target, 0);
const char *stub_name = machopic_indirection_name (sym_ref,
/*stub_p=*/true);
- enum machine_mode mode = GET_MODE (sym_ref);
+ machine_mode mode = GET_MODE (sym_ref);
XEXP (target, 0) = gen_rtx_SYMBOL_REF (mode, stub_name);
SYMBOL_REF_DATA (XEXP (target, 0)) = SYMBOL_REF_DATA (sym_ref);
}
rtx
-machopic_legitimize_pic_address (rtx orig, enum machine_mode mode, rtx reg)
+machopic_legitimize_pic_address (rtx orig, machine_mode mode, rtx reg)
{
rtx pic_ref = orig;
unsigned HOST_WIDE_INT align,
bool zsize)
{
- enum machine_mode mode = DECL_MODE (exp);
+ machine_mode mode = DECL_MODE (exp);
unsigned int modesize = GET_MODE_BITSIZE (mode);
if (DARWIN_SECTION_ANCHORS
They must go in "const". */
section *
-machopic_select_rtx_section (enum machine_mode mode, rtx x,
+machopic_select_rtx_section (machine_mode mode, rtx x,
unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
{
if (GET_MODE_SIZE (mode) == 8
<http://www.gnu.org/licenses/>. */
#ifdef RTX_CODE
-extern enum machine_mode epiphany_select_cc_mode (enum rtx_code, rtx, rtx);
+extern machine_mode epiphany_select_cc_mode (enum rtx_code, rtx, rtx);
/* Define the function that build the compare insn for scc and bcc. */
-extern struct rtx_def *gen_compare_reg (enum machine_mode, enum rtx_code,
- enum machine_mode, rtx, rtx);
+extern struct rtx_def *gen_compare_reg (machine_mode, enum rtx_code,
+ machine_mode, rtx, rtx);
#endif
/* Declarations for various fns used in the .md file. */
extern void epiphany_expand_epilogue (int);
extern int epiphany_initial_elimination_offset (int, int);
extern void epiphany_init_expanders (void);
-extern int hard_regno_mode_ok (int regno, enum machine_mode mode);
+extern int hard_regno_mode_ok (int regno, machine_mode mode);
#ifdef HARD_CONST
extern void emit_set_fp_mode (int entity, int mode, int prev_mode,
HARD_REG_SET regs_live);
static tree epiphany_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
static tree epiphany_handle_forwarder_attribute (tree *, tree, tree, int,
bool *);
-static bool epiphany_pass_by_reference (cumulative_args_t, enum machine_mode,
+static bool epiphany_pass_by_reference (cumulative_args_t, machine_mode,
const_tree, bool);
static rtx_insn *frame_insn (rtx);
\f
/* Return 1 if hard register REGNO can hold a value of machine_mode MODE. */
int
-hard_regno_mode_ok (int regno, enum machine_mode mode)
+hard_regno_mode_ok (int regno, machine_mode mode)
{
if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
return (regno & 1) == 0 && GPR_P (regno);
/* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
return the mode to be used for the comparison. */
-enum machine_mode
+machine_mode
epiphany_select_cc_mode (enum rtx_code op,
rtx x ATTRIBUTE_UNUSED,
rtx y ATTRIBUTE_UNUSED)
mode, and return the rtx for the cc reg comparison in CMODE. */
rtx
-gen_compare_reg (enum machine_mode cmode, enum rtx_code code,
- enum machine_mode in_mode, rtx x, rtx y)
+gen_compare_reg (machine_mode cmode, enum rtx_code code,
+ machine_mode in_mode, rtx x, rtx y)
{
- enum machine_mode mode = SELECT_CC_MODE (code, x, y);
+ machine_mode mode = SELECT_CC_MODE (code, x, y);
rtx cc_reg, pat, clob0, clob1, clob2;
if (in_mode == VOIDmode)
: (CUM))
static unsigned int
-epiphany_function_arg_boundary (enum machine_mode mode, const_tree type)
+epiphany_function_arg_boundary (machine_mode mode, const_tree type)
{
if ((type ? TYPE_ALIGN (type) : GET_MODE_BITSIZE (mode)) <= PARM_BOUNDARY)
return PARM_BOUNDARY;
static void
-epiphany_setup_incoming_varargs (cumulative_args_t cum, enum machine_mode mode,
+epiphany_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode,
tree type, int *pretend_size, int no_rtl)
{
int first_anon_arg;
}
static int
-epiphany_arg_partial_bytes (cumulative_args_t cum, enum machine_mode mode,
+epiphany_arg_partial_bytes (cumulative_args_t cum, machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
int words = 0, rounded_cum;
If ADDR is not a valid address, its cost is irrelevant. */
static int
-epiphany_address_cost (rtx addr, enum machine_mode mode,
+epiphany_address_cost (rtx addr, machine_mode mode,
addr_space_t as ATTRIBUTE_UNUSED, bool speed)
{
rtx reg;
but issue pich is the same. For floating point, load latency is three
times as much as a reg-reg move. */
static int
-epiphany_memory_move_cost (enum machine_mode mode,
+epiphany_memory_move_cost (machine_mode mode,
reg_class_t rclass ATTRIBUTE_UNUSED,
bool in ATTRIBUTE_UNUSED)
{
static bool
epiphany_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
- enum machine_mode mode, const_tree type,
+ machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
if (type)
const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
bool outgoing ATTRIBUTE_UNUSED)
{
- enum machine_mode mode;
+ machine_mode mode;
mode = TYPE_MODE (ret_type);
/* We must change the mode like PROMOTE_MODE does.
}
static rtx
-epiphany_libcall_value (enum machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
+epiphany_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, 0);
}
/* Generate a MEM referring to a varargs argument slot. */
static rtx
-gen_varargs_mem (enum machine_mode mode, rtx addr)
+gen_varargs_mem (machine_mode mode, rtx addr)
{
rtx mem = gen_rtx_MEM (mode, addr);
MEM_NOTRAP_P (mem) = 1;
last_saved--;
for (i = 0; i < limit; i++)
{
- enum machine_mode mode = word_mode;
+ machine_mode mode = word_mode;
rtx mem, reg;
int n = i;
- rtx (*gen_mem) (enum machine_mode, rtx) = gen_frame_mem;
+ rtx (*gen_mem) (machine_mode, rtx) = gen_frame_mem;
/* Make sure we push the arguments in the right order. */
if (n < MAX_EPIPHANY_PARM_REGS && crtl->args.pretend_args_size)
allocate the entire frame; this is joint with one register save. */
if (current_frame_info.first_slot >= 0)
{
- enum machine_mode mode
+ machine_mode mode
= (current_frame_info.first_slot_size == UNITS_PER_WORD
? word_mode : DImode);
|| RTX_OK_FOR_OFFSET_P (MODE, XEXP (X, 1))))
static bool
-epiphany_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
+epiphany_legitimate_address_p (machine_mode mode, rtx x, bool strict)
{
#define REG_OK_FOR_BASE_P(X) \
(strict ? GPR_P (REGNO (X)) : GPR_AP_OR_PSEUDO_P (REGNO (X)))
static reg_class_t
epiphany_secondary_reload (bool in_p, rtx x, reg_class_t rclass,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
secondary_reload_info *sri)
{
/* This could give more reload inheritance, but we are missing some
return epiphany_uninterruptible_p (t);
}
-static enum machine_mode
-epiphany_promote_function_mode (const_tree type, enum machine_mode mode,
+static machine_mode
+epiphany_promote_function_mode (const_tree type, machine_mode mode,
int *punsignedp ATTRIBUTE_UNUSED,
const_tree funtype ATTRIBUTE_UNUSED,
int for_return ATTRIBUTE_UNUSED)
/* On the EPIPHANY the first MAX_EPIPHANY_PARM_REGS args are normally in
registers and the rest are pushed. */
static rtx
-epiphany_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+epiphany_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS cum = *get_cumulative_args (cum_v);
of mode MODE and data type TYPE.
(TYPE is null for libcalls where that information may not be available.) */
static void
-epiphany_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+epiphany_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
}
static unsigned int
-epiphany_min_divisions_for_recip_mul (enum machine_mode mode)
+epiphany_min_divisions_for_recip_mul (machine_mode mode)
{
if (flag_reciprocal_math && mode == SFmode)
/* We'll expand into a multiply-by-reciprocal anyway, so we might a well do
return default_min_divisions_for_recip_mul (mode);
}
-static enum machine_mode
-epiphany_preferred_simd_mode (enum machine_mode mode ATTRIBUTE_UNUSED)
+static machine_mode
+epiphany_preferred_simd_mode (machine_mode mode ATTRIBUTE_UNUSED)
{
return TARGET_VECT_DOUBLE ? DImode : SImode;
}
static bool
-epiphany_vector_mode_supported_p (enum machine_mode mode)
+epiphany_vector_mode_supported_p (machine_mode mode)
{
if (mode == V2SFmode)
return true;
}
static bool
-epiphany_support_vector_misalignment (enum machine_mode mode, const_tree type,
+epiphany_support_vector_misalignment (machine_mode mode, const_tree type,
int misalignment, bool is_packed)
{
if (GET_MODE_SIZE (mode) == 8 && misalignment % 4 == 0)
{
rtx cmp_op0 = XEXP (operands[1], 0);
rtx cmp_op1 = XEXP (operands[1], 1);
- enum machine_mode cmp_in_mode;
+ machine_mode cmp_in_mode;
enum rtx_code code = GET_CODE (operands[1]);
cmp_in_mode = GET_MODE (cmp_op0);
/* Zero structure to initialize current_frame_info. */
static struct fr30_frame_info zero_frame_info;
-static void fr30_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
+static void fr30_setup_incoming_varargs (cumulative_args_t, machine_mode,
tree, int *, int);
-static bool fr30_must_pass_in_stack (enum machine_mode, const_tree);
-static int fr30_arg_partial_bytes (cumulative_args_t, enum machine_mode,
+static bool fr30_must_pass_in_stack (machine_mode, const_tree);
+static int fr30_arg_partial_bytes (cumulative_args_t, machine_mode,
tree, bool);
-static rtx fr30_function_arg (cumulative_args_t, enum machine_mode,
+static rtx fr30_function_arg (cumulative_args_t, machine_mode,
const_tree, bool);
-static void fr30_function_arg_advance (cumulative_args_t, enum machine_mode,
+static void fr30_function_arg_advance (cumulative_args_t, machine_mode,
const_tree, bool);
static bool fr30_frame_pointer_required (void);
static rtx fr30_function_value (const_tree, const_tree, bool);
-static rtx fr30_libcall_value (enum machine_mode, const_rtx);
+static rtx fr30_libcall_value (machine_mode, const_rtx);
static bool fr30_function_value_regno_p (const unsigned int);
static bool fr30_can_eliminate (const int, const int);
static void fr30_asm_trampoline_template (FILE *);
static void fr30_trampoline_init (rtx, tree, rtx);
-static int fr30_num_arg_regs (enum machine_mode, const_tree);
+static int fr30_num_arg_regs (machine_mode, const_tree);
#define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
#define RETURN_POINTER_MASK (1 << (RETURN_POINTER_REGNUM))
which has type TYPE and mode MODE, and we rely on this fact. */
void
fr30_setup_incoming_varargs (cumulative_args_t arg_regs_used_so_far_v,
- enum machine_mode mode,
+ machine_mode mode,
tree type ATTRIBUTE_UNUSED,
int *pretend_size,
int second_time ATTRIBUTE_UNUSED)
/* Implements TARGET_LIBCALL_VALUE. */
static rtx
-fr30_libcall_value (enum machine_mode mode,
+fr30_libcall_value (machine_mode mode,
const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
in registers. */
static bool
-fr30_must_pass_in_stack (enum machine_mode mode, const_tree type)
+fr30_must_pass_in_stack (machine_mode mode, const_tree type)
{
if (mode == BLKmode)
return true;
/* Compute the number of word sized registers needed to hold a
function argument of mode INT_MODE and tree type TYPE. */
static int
-fr30_num_arg_regs (enum machine_mode mode, const_tree type)
+fr30_num_arg_regs (machine_mode mode, const_tree type)
{
int size;
parameters to the function. */
static int
-fr30_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
+fr30_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
}
static rtx
-fr30_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+fr30_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
the stack. The compiler knows how to track the amount of stack space used
for arguments without any special help. */
static void
-fr30_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
+fr30_function_arg_advance (cumulative_args_t cum, machine_mode mode,
const_tree type, bool named)
{
*get_cumulative_args (cum) += named * fr30_num_arg_regs (mode, type);
/*{{{ Operand predicates */
#ifndef Mmode
-#define Mmode enum machine_mode
+#define Mmode machine_mode
#endif
/* Returns true iff all the registers in the operands array
rtx dest = operands[0];
enum rtx_code src_code = GET_CODE (src);
enum rtx_code dest_code = GET_CODE (dest);
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
rtx val;
start_sequence ();
extern void frv_ifcvt_machdep_init (void *);
#ifdef RTX_CODE
-extern int frv_legitimate_address_p_1 (enum machine_mode, rtx,
+extern int frv_legitimate_address_p_1 (machine_mode, rtx,
int, int, int);
extern rtx frv_find_base_term (rtx);
extern int frv_expand_block_clear (rtx *);
extern rtx frv_dynamic_chain_address (rtx);
extern rtx frv_return_addr_rtx (int, rtx);
-extern rtx frv_index_memory (rtx, enum machine_mode, int);
+extern rtx frv_index_memory (rtx, machine_mode, int);
extern const char *frv_asm_output_opcode
(FILE *, const char *);
extern void frv_final_prescan_insn (rtx_insn *, rtx *, int);
-extern void frv_emit_move (enum machine_mode, rtx, rtx);
+extern void frv_emit_move (machine_mode, rtx, rtx);
extern int frv_emit_movsi (rtx, rtx);
extern const char *output_move_single (rtx *, rtx);
extern const char *output_move_double (rtx *, rtx);
#endif
extern enum reg_class frv_secondary_reload_class
(enum reg_class,
- enum machine_mode, rtx);
-extern int frv_hard_regno_mode_ok (int, enum machine_mode);
-extern int frv_hard_regno_nregs (int, enum machine_mode);
+ machine_mode, rtx);
+extern int frv_hard_regno_mode_ok (int, machine_mode);
+extern int frv_hard_regno_nregs (int, machine_mode);
extern int frv_class_max_nregs (enum reg_class rclass,
- enum machine_mode mode);
-extern enum machine_mode frv_select_cc_mode (enum rtx_code, rtx, rtx);
+ machine_mode mode);
+extern machine_mode frv_select_cc_mode (enum rtx_code, rtx, rtx);
#endif /* RTX_CODE */
extern int frv_trampoline_size (void);
#endif
#ifdef RTX_CODE
-extern int integer_register_operand (rtx, enum machine_mode);
-extern int frv_load_operand (rtx, enum machine_mode);
-extern int gpr_or_fpr_operand (rtx, enum machine_mode);
-extern int gpr_no_subreg_operand (rtx, enum machine_mode);
-extern int gpr_or_int6_operand (rtx, enum machine_mode);
-extern int fpr_or_int6_operand (rtx, enum machine_mode);
-extern int gpr_or_int_operand (rtx, enum machine_mode);
-extern int gpr_or_int12_operand (rtx, enum machine_mode);
-extern int gpr_fpr_or_int12_operand (rtx, enum machine_mode);
-extern int gpr_or_int10_operand (rtx, enum machine_mode);
-extern int move_source_operand (rtx, enum machine_mode);
-extern int move_destination_operand (rtx, enum machine_mode);
-extern int condexec_source_operand (rtx, enum machine_mode);
-extern int condexec_dest_operand (rtx, enum machine_mode);
-extern int lr_operand (rtx, enum machine_mode);
-extern int gpr_or_memory_operand (rtx, enum machine_mode);
-extern int fpr_or_memory_operand (rtx, enum machine_mode);
-extern int reg_or_0_operand (rtx, enum machine_mode);
-extern int fcc_operand (rtx, enum machine_mode);
-extern int icc_operand (rtx, enum machine_mode);
-extern int cc_operand (rtx, enum machine_mode);
-extern int fcr_operand (rtx, enum machine_mode);
-extern int icr_operand (rtx, enum machine_mode);
-extern int cr_operand (rtx, enum machine_mode);
-extern int call_operand (rtx, enum machine_mode);
-extern int fpr_operand (rtx, enum machine_mode);
-extern int even_reg_operand (rtx, enum machine_mode);
-extern int odd_reg_operand (rtx, enum machine_mode);
-extern int even_gpr_operand (rtx, enum machine_mode);
-extern int odd_gpr_operand (rtx, enum machine_mode);
-extern int quad_fpr_operand (rtx, enum machine_mode);
-extern int even_fpr_operand (rtx, enum machine_mode);
-extern int odd_fpr_operand (rtx, enum machine_mode);
-extern int dbl_memory_one_insn_operand (rtx, enum machine_mode);
-extern int dbl_memory_two_insn_operand (rtx, enum machine_mode);
-extern int int12_operand (rtx, enum machine_mode);
-extern int int6_operand (rtx, enum machine_mode);
-extern int int5_operand (rtx, enum machine_mode);
-extern int uint5_operand (rtx, enum machine_mode);
-extern int uint4_operand (rtx, enum machine_mode);
-extern int uint1_operand (rtx, enum machine_mode);
-extern int int_2word_operand (rtx, enum machine_mode);
-extern int pic_register_operand (rtx, enum machine_mode);
-extern int pic_symbolic_operand (rtx, enum machine_mode);
-extern int small_data_register_operand (rtx, enum machine_mode);
-extern int small_data_symbolic_operand (rtx, enum machine_mode);
-extern int upper_int16_operand (rtx, enum machine_mode);
-extern int uint16_operand (rtx, enum machine_mode);
-extern int symbolic_operand (rtx, enum machine_mode);
-extern int relational_operator (rtx, enum machine_mode);
-extern int signed_relational_operator (rtx, enum machine_mode);
-extern int unsigned_relational_operator (rtx, enum machine_mode);
-extern int float_relational_operator (rtx, enum machine_mode);
-extern int ccr_eqne_operator (rtx, enum machine_mode);
-extern int minmax_operator (rtx, enum machine_mode);
-extern int condexec_si_binary_operator (rtx, enum machine_mode);
-extern int condexec_si_media_operator (rtx, enum machine_mode);
-extern int condexec_si_divide_operator (rtx, enum machine_mode);
-extern int condexec_si_unary_operator (rtx, enum machine_mode);
-extern int condexec_sf_conv_operator (rtx, enum machine_mode);
-extern int condexec_sf_add_operator (rtx, enum machine_mode);
-extern int condexec_memory_operand (rtx, enum machine_mode);
-extern int intop_compare_operator (rtx, enum machine_mode);
-extern int acc_operand (rtx, enum machine_mode);
-extern int even_acc_operand (rtx, enum machine_mode);
-extern int quad_acc_operand (rtx, enum machine_mode);
-extern int accg_operand (rtx, enum machine_mode);
+extern int integer_register_operand (rtx, machine_mode);
+extern int frv_load_operand (rtx, machine_mode);
+extern int gpr_or_fpr_operand (rtx, machine_mode);
+extern int gpr_no_subreg_operand (rtx, machine_mode);
+extern int gpr_or_int6_operand (rtx, machine_mode);
+extern int fpr_or_int6_operand (rtx, machine_mode);
+extern int gpr_or_int_operand (rtx, machine_mode);
+extern int gpr_or_int12_operand (rtx, machine_mode);
+extern int gpr_fpr_or_int12_operand (rtx, machine_mode);
+extern int gpr_or_int10_operand (rtx, machine_mode);
+extern int move_source_operand (rtx, machine_mode);
+extern int move_destination_operand (rtx, machine_mode);
+extern int condexec_source_operand (rtx, machine_mode);
+extern int condexec_dest_operand (rtx, machine_mode);
+extern int lr_operand (rtx, machine_mode);
+extern int gpr_or_memory_operand (rtx, machine_mode);
+extern int fpr_or_memory_operand (rtx, machine_mode);
+extern int reg_or_0_operand (rtx, machine_mode);
+extern int fcc_operand (rtx, machine_mode);
+extern int icc_operand (rtx, machine_mode);
+extern int cc_operand (rtx, machine_mode);
+extern int fcr_operand (rtx, machine_mode);
+extern int icr_operand (rtx, machine_mode);
+extern int cr_operand (rtx, machine_mode);
+extern int call_operand (rtx, machine_mode);
+extern int fpr_operand (rtx, machine_mode);
+extern int even_reg_operand (rtx, machine_mode);
+extern int odd_reg_operand (rtx, machine_mode);
+extern int even_gpr_operand (rtx, machine_mode);
+extern int odd_gpr_operand (rtx, machine_mode);
+extern int quad_fpr_operand (rtx, machine_mode);
+extern int even_fpr_operand (rtx, machine_mode);
+extern int odd_fpr_operand (rtx, machine_mode);
+extern int dbl_memory_one_insn_operand (rtx, machine_mode);
+extern int dbl_memory_two_insn_operand (rtx, machine_mode);
+extern int int12_operand (rtx, machine_mode);
+extern int int6_operand (rtx, machine_mode);
+extern int int5_operand (rtx, machine_mode);
+extern int uint5_operand (rtx, machine_mode);
+extern int uint4_operand (rtx, machine_mode);
+extern int uint1_operand (rtx, machine_mode);
+extern int int_2word_operand (rtx, machine_mode);
+extern int pic_register_operand (rtx, machine_mode);
+extern int pic_symbolic_operand (rtx, machine_mode);
+extern int small_data_register_operand (rtx, machine_mode);
+extern int small_data_symbolic_operand (rtx, machine_mode);
+extern int upper_int16_operand (rtx, machine_mode);
+extern int uint16_operand (rtx, machine_mode);
+extern int symbolic_operand (rtx, machine_mode);
+extern int relational_operator (rtx, machine_mode);
+extern int signed_relational_operator (rtx, machine_mode);
+extern int unsigned_relational_operator (rtx, machine_mode);
+extern int float_relational_operator (rtx, machine_mode);
+extern int ccr_eqne_operator (rtx, machine_mode);
+extern int minmax_operator (rtx, machine_mode);
+extern int condexec_si_binary_operator (rtx, machine_mode);
+extern int condexec_si_media_operator (rtx, machine_mode);
+extern int condexec_si_divide_operator (rtx, machine_mode);
+extern int condexec_si_unary_operator (rtx, machine_mode);
+extern int condexec_sf_conv_operator (rtx, machine_mode);
+extern int condexec_sf_add_operator (rtx, machine_mode);
+extern int condexec_memory_operand (rtx, machine_mode);
+extern int intop_compare_operator (rtx, machine_mode);
+extern int acc_operand (rtx, machine_mode);
+extern int even_acc_operand (rtx, machine_mode);
+extern int quad_acc_operand (rtx, machine_mode);
+extern int accg_operand (rtx, machine_mode);
extern rtx frv_matching_accg_for_acc (rtx);
extern void frv_expand_fdpic_call (rtx *, bool, bool);
extern rtx frv_gen_GPsym2reg (rtx, rtx);
-extern int frv_legitimate_memory_operand (rtx, enum machine_mode, int);
+extern int frv_legitimate_memory_operand (rtx, machine_mode, int);
/* Information about a relocation unspec. SYMBOL is the relocation symbol
(a SYMBOL_REF or LABEL_REF), RELOC is the type of relocation and OFFSET
/* Forward references */
static void frv_option_override (void);
-static bool frv_legitimate_address_p (enum machine_mode, rtx, bool);
+static bool frv_legitimate_address_p (machine_mode, rtx, bool);
static int frv_default_flags_for_cpu (void);
static int frv_string_begins_with (const char *, const char *);
static FRV_INLINE bool frv_small_data_reloc_p (rtx, int);
static const char *comparison_string (enum rtx_code, rtx);
static rtx frv_function_value (const_tree, const_tree,
bool);
-static rtx frv_libcall_value (enum machine_mode,
+static rtx frv_libcall_value (machine_mode,
const_rtx);
static FRV_INLINE int frv_regno_ok_for_base_p (int, int);
static rtx single_set_pattern (rtx);
static int frv_function_contains_far_jump (void);
static rtx frv_alloc_temp_reg (frv_tmp_reg_t *,
enum reg_class,
- enum machine_mode,
+ machine_mode,
int, int);
static rtx frv_frame_offset_rtx (int);
-static rtx frv_frame_mem (enum machine_mode, rtx, int);
+static rtx frv_frame_mem (machine_mode, rtx, int);
static rtx frv_dwarf_store (rtx, int);
static void frv_frame_insn (rtx, rtx);
static void frv_frame_access (frv_frame_accessor_t*,
frv_stack_t *);
static struct machine_function *frv_init_machine_status (void);
static rtx frv_int_to_acc (enum insn_code, int, rtx);
-static enum machine_mode frv_matching_accg_mode (enum machine_mode);
+static machine_mode frv_matching_accg_mode (machine_mode);
static rtx frv_read_argument (tree, unsigned int);
-static rtx frv_read_iacc_argument (enum machine_mode, tree, unsigned int);
+static rtx frv_read_iacc_argument (machine_mode, tree, unsigned int);
static int frv_check_constant_argument (enum insn_code, int, rtx);
static rtx frv_legitimize_target (enum insn_code, rtx);
static rtx frv_legitimize_argument (enum insn_code, int, rtx);
static rtx frv_legitimize_tls_address (rtx, enum tls_model);
-static rtx frv_legitimize_address (rtx, rtx, enum machine_mode);
+static rtx frv_legitimize_address (rtx, rtx, machine_mode);
static rtx frv_expand_set_builtin (enum insn_code, tree, rtx);
static rtx frv_expand_unop_builtin (enum insn_code, tree, rtx);
static rtx frv_expand_binop_builtin (enum insn_code, tree, rtx);
static rtx frv_emit_comparison (enum rtx_code, rtx, rtx);
static int frv_clear_registers_used (rtx *, void *);
static void frv_ifcvt_add_insn (rtx, rtx, int);
-static rtx frv_ifcvt_rewrite_mem (rtx, enum machine_mode, rtx);
+static rtx frv_ifcvt_rewrite_mem (rtx, machine_mode, rtx);
static rtx frv_ifcvt_load_value (rtx, rtx);
static int frv_acc_group_1 (rtx *, void *);
static unsigned int frv_insn_unit (rtx_insn *);
static void frv_function_epilogue (FILE *, HOST_WIDE_INT);
static bool frv_assemble_integer (rtx, unsigned, int);
static void frv_init_builtins (void);
-static rtx frv_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
+static rtx frv_expand_builtin (tree, rtx, rtx, machine_mode, int);
static void frv_init_libfuncs (void);
static bool frv_in_small_data_p (const_tree);
static void frv_asm_output_mi_thunk
(FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT, tree);
static void frv_setup_incoming_varargs (cumulative_args_t,
- enum machine_mode,
+ machine_mode,
tree, int *, int);
static rtx frv_expand_builtin_saveregs (void);
static void frv_expand_builtin_va_start (tree, rtx);
static bool frv_rtx_costs (rtx, int, int, int, int*,
bool);
-static int frv_register_move_cost (enum machine_mode,
+static int frv_register_move_cost (machine_mode,
reg_class_t, reg_class_t);
-static int frv_memory_move_cost (enum machine_mode,
+static int frv_memory_move_cost (machine_mode,
reg_class_t, bool);
static void frv_asm_out_constructor (rtx, int);
static void frv_asm_out_destructor (rtx, int);
static bool frv_function_symbol_referenced_p (rtx);
-static bool frv_legitimate_constant_p (enum machine_mode, rtx);
-static bool frv_cannot_force_const_mem (enum machine_mode, rtx);
+static bool frv_legitimate_constant_p (machine_mode, rtx);
+static bool frv_cannot_force_const_mem (machine_mode, rtx);
static const char *unspec_got_name (int);
static void frv_output_const_unspec (FILE *,
const struct frv_unspec *);
static bool frv_function_ok_for_sibcall (tree, tree);
static rtx frv_struct_value_rtx (tree, int);
-static bool frv_must_pass_in_stack (enum machine_mode mode, const_tree type);
-static int frv_arg_partial_bytes (cumulative_args_t, enum machine_mode,
+static bool frv_must_pass_in_stack (machine_mode mode, const_tree type);
+static int frv_arg_partial_bytes (cumulative_args_t, machine_mode,
tree, bool);
-static rtx frv_function_arg (cumulative_args_t, enum machine_mode,
+static rtx frv_function_arg (cumulative_args_t, machine_mode,
const_tree, bool);
-static rtx frv_function_incoming_arg (cumulative_args_t, enum machine_mode,
+static rtx frv_function_incoming_arg (cumulative_args_t, machine_mode,
const_tree, bool);
-static void frv_function_arg_advance (cumulative_args_t, enum machine_mode,
+static void frv_function_arg_advance (cumulative_args_t, machine_mode,
const_tree, bool);
-static unsigned int frv_function_arg_boundary (enum machine_mode,
+static unsigned int frv_function_arg_boundary (machine_mode,
const_tree);
static void frv_output_dwarf_dtprel (FILE *, int, rtx)
ATTRIBUTE_UNUSED;
static reg_class_t frv_secondary_reload (bool, rtx, reg_class_t,
- enum machine_mode,
+ machine_mode,
secondary_reload_info *);
static bool frv_frame_pointer_required (void);
static bool frv_can_eliminate (const int, const int);
4. In many cases, it's more efficient to calculate the constant in-line. */
static bool
-frv_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED,
+frv_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED,
rtx x ATTRIBUTE_UNUSED)
{
return TARGET_FDPIC;
frv_alloc_temp_reg (
frv_tmp_reg_t *info, /* which registers are available */
enum reg_class rclass, /* register class desired */
- enum machine_mode mode, /* mode to allocate register with */
+ machine_mode mode, /* mode to allocate register with */
int mark_as_used, /* register not available after allocation */
int no_abort) /* return NULL instead of aborting */
{
/* Generate (mem:MODE (plus:Pmode BASE (frv_frame_offset OFFSET)))). The
prologue and epilogue uses such expressions to access the stack. */
static rtx
-frv_frame_mem (enum machine_mode mode, rtx base, int offset)
+frv_frame_mem (machine_mode mode, rtx base, int offset)
{
return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode,
base,
static void
frv_frame_access (frv_frame_accessor_t *accessor, rtx reg, int stack_offset)
{
- enum machine_mode mode = GET_MODE (reg);
+ machine_mode mode = GET_MODE (reg);
rtx mem = frv_frame_mem (mode,
accessor->base,
stack_offset - accessor->base_offset);
static void
frv_setup_incoming_varargs (cumulative_args_t cum_v,
- enum machine_mode mode,
+ machine_mode mode,
tree type ATTRIBUTE_UNUSED,
int *pretend_size,
int second_time)
rtx tmp_reg;
rtx stores[MAX_MOVE_REG];
int move_bytes;
- enum machine_mode mode;
+ machine_mode mode;
/* If this is not a fixed size move, just call memcpy. */
if (! constp)
rtx dest_addr;
rtx dest_mem;
int clear_bytes;
- enum machine_mode mode;
+ machine_mode mode;
/* If this is not a fixed size move, just call memcpy. */
if (! constp)
frv_legitimate_address_p forbids register+register addresses, which
this function cannot handle. */
rtx
-frv_index_memory (rtx memref, enum machine_mode mode, int index)
+frv_index_memory (rtx memref, machine_mode mode, int index)
{
rtx base = XEXP (memref, 0);
if (GET_CODE (base) == PRE_MODIFY)
in registers. */
static bool
-frv_must_pass_in_stack (enum machine_mode mode, const_tree type)
+frv_must_pass_in_stack (machine_mode mode, const_tree type)
{
if (mode == BLKmode)
return true;
`PARM_BOUNDARY' is used for all arguments. */
static unsigned int
-frv_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
+frv_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED)
{
return BITS_PER_WORD;
}
static rtx
-frv_function_arg_1 (cumulative_args_t cum_v, enum machine_mode mode,
+frv_function_arg_1 (cumulative_args_t cum_v, machine_mode mode,
const_tree type ATTRIBUTE_UNUSED, bool named,
bool incoming ATTRIBUTE_UNUSED)
{
const CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
- enum machine_mode xmode = (mode == BLKmode) ? SImode : mode;
+ machine_mode xmode = (mode == BLKmode) ? SImode : mode;
int arg_num = *cum;
rtx ret;
const char *debstr;
}
static rtx
-frv_function_arg (cumulative_args_t cum, enum machine_mode mode,
+frv_function_arg (cumulative_args_t cum, machine_mode mode,
const_tree type, bool named)
{
return frv_function_arg_1 (cum, mode, type, named, false);
}
static rtx
-frv_function_incoming_arg (cumulative_args_t cum, enum machine_mode mode,
+frv_function_incoming_arg (cumulative_args_t cum, machine_mode mode,
const_tree type, bool named)
{
return frv_function_arg_1 (cum, mode, type, named, true);
static void
frv_function_arg_advance (cumulative_args_t cum_v,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
- enum machine_mode xmode = (mode == BLKmode) ? SImode : mode;
+ machine_mode xmode = (mode == BLKmode) ? SImode : mode;
int bytes = GET_MODE_SIZE (xmode);
int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
int arg_num = *cum;
the called function. */
static int
-frv_arg_partial_bytes (cumulative_args_t cum, enum machine_mode mode,
+frv_arg_partial_bytes (cumulative_args_t cum, machine_mode mode,
tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
{
- enum machine_mode xmode = (mode == BLKmode) ? SImode : mode;
+ machine_mode xmode = (mode == BLKmode) ? SImode : mode;
int bytes = GET_MODE_SIZE (xmode);
int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
int arg_num = *get_cumulative_args (cum);
/* Implements TARGET_LIBCALL_VALUE. */
static rtx
-frv_libcall_value (enum machine_mode mode,
+frv_libcall_value (machine_mode mode,
const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
will be given to `TARGET_PRINT_OPERAND_ADDRESS'. */
int
-frv_legitimate_address_p_1 (enum machine_mode mode,
+frv_legitimate_address_p_1 (machine_mode mode,
rtx x,
int strict_p,
int condexec_p,
}
bool
-frv_legitimate_address_p (enum machine_mode mode, rtx x, bool strict_p)
+frv_legitimate_address_p (machine_mode mode, rtx x, bool strict_p)
{
return frv_legitimate_address_p_1 (mode, x, strict_p, FALSE, FALSE);
}
rtx
frv_legitimize_address (rtx x,
rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
if (GET_CODE (x) == SYMBOL_REF)
{
the operand is used by a predicated instruction. */
int
-frv_legitimate_memory_operand (rtx op, enum machine_mode mode, int condexec_p)
+frv_legitimate_memory_operand (rtx op, machine_mode mode, int condexec_p)
{
return ((GET_MODE (op) == mode || mode == VOIDmode)
&& GET_CODE (op) == MEM
executed. */
int
-condexec_memory_operand (rtx op, enum machine_mode mode)
+condexec_memory_operand (rtx op, machine_mode mode)
{
- enum machine_mode op_mode = GET_MODE (op);
+ machine_mode op_mode = GET_MODE (op);
rtx addr;
if (mode != VOIDmode && op_mode != mode)
\f
void
-frv_emit_move (enum machine_mode mode, rtx dest, rtx src)
+frv_emit_move (machine_mode mode, rtx dest, rtx src)
{
if (GET_CODE (src) == SYMBOL_REF)
{
if (GET_CODE (dest) == REG)
{
int dest_regno = REGNO (dest);
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
if (GPR_P (dest_regno))
{
if (GET_CODE (src) == REG)
{
int src_regno = REGNO (src);
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
if (GPR_P (src_regno))
{
{
rtx dest = operands[0];
rtx src = operands[1];
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
if (GET_CODE (dest) == REG)
{
if (GET_CODE (dest) == REG)
{
int dest_regno = REGNO (dest);
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
if (GPR_P (dest_regno))
{
if (GET_CODE (src) == REG)
{
int src_regno = REGNO (src);
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
if (GPR_P (src_regno))
{
else if (ZERO_P (src))
{
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
switch (mode)
{
default:
static rtx
frv_emit_comparison (enum rtx_code test, rtx op0, rtx op1)
{
- enum machine_mode cc_mode;
+ machine_mode cc_mode;
rtx cc_reg;
/* Floating point doesn't have comparison against a constant. */
rtx if_else;
enum rtx_code test = GET_CODE (operands[0]);
rtx cc_reg = frv_emit_comparison (test, operands[1], operands[2]);
- enum machine_mode cc_mode = GET_MODE (cc_reg);
+ machine_mode cc_mode = GET_MODE (cc_reg);
/* Branches generate:
(set (pc)
enum rtx_code test = GET_CODE (test_rtx);
rtx cc_reg = frv_emit_comparison (test,
XEXP (test_rtx, 0), XEXP (test_rtx, 1));
- enum machine_mode cc_mode = GET_MODE (cc_reg);
+ machine_mode cc_mode = GET_MODE (cc_reg);
/* Conditional move instructions generate:
(parallel [(set <target>
rtx src2 = operands[4];
rtx cr_reg = operands[5];
rtx ret;
- enum machine_mode cr_mode = GET_MODE (cr_reg);
+ machine_mode cr_mode = GET_MODE (cr_reg);
start_sequence ();
rtx cr_reg = operands[5];
rtx ret;
enum rtx_code test_code;
- enum machine_mode cr_mode = GET_MODE (cr_reg);
+ machine_mode cr_mode = GET_MODE (cr_reg);
start_sequence ();
rtx cr;
rtx cc;
rtx nested_cc;
- enum machine_mode mode = GET_MODE (true_expr);
+ machine_mode mode = GET_MODE (true_expr);
int j;
basic_block *bb;
int num_bb;
rtx compare;
rtx cc;
enum reg_class cr_class;
- enum machine_mode mode = GET_MODE (true_expr);
+ machine_mode mode = GET_MODE (true_expr);
rtx (*logical_func)(rtx, rtx, rtx);
if (TARGET_DEBUG_COND_EXEC)
into a temporary register, or the new MEM if we were successful. */
static rtx
-frv_ifcvt_rewrite_mem (rtx mem, enum machine_mode mode, rtx insn)
+frv_ifcvt_rewrite_mem (rtx mem, machine_mode mode, rtx insn)
{
rtx addr = XEXP (mem, 0);
{
rtx dest = SET_DEST (set);
rtx src = SET_SRC (set);
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
/* Check for normal binary operators. */
if (mode == SImode && ARITHMETIC_P (src))
enum reg_class
frv_secondary_reload_class (enum reg_class rclass,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
rtx x)
{
enum reg_class ret;
static reg_class_t
frv_secondary_reload (bool in_p, rtx x, reg_class_t reload_class_i,
- enum machine_mode reload_mode,
+ machine_mode reload_mode,
secondary_reload_info * sri)
{
enum reg_class rclass = NO_REGS;
pattern's constraint asks for one. */
int
-frv_hard_regno_mode_ok (int regno, enum machine_mode mode)
+frv_hard_regno_mode_ok (int regno, machine_mode mode)
{
int base;
int mask;
for each byte. */
int
-frv_hard_regno_nregs (int regno, enum machine_mode mode)
+frv_hard_regno_nregs (int regno, machine_mode mode)
{
if (ACCG_P (regno))
return GET_MODE_SIZE (mode);
This declaration is required. */
int
-frv_class_max_nregs (enum reg_class rclass, enum machine_mode mode)
+frv_class_max_nregs (enum reg_class rclass, machine_mode mode)
{
if (rclass == ACCG_REGS)
/* An N-byte value requires N accumulator guards. */
definition for this macro on machines where anything `CONSTANT_P' is valid. */
static bool
-frv_legitimate_constant_p (enum machine_mode mode, rtx x)
+frv_legitimate_constant_p (machine_mode mode, rtx x)
{
/* frv_cannot_force_const_mem always returns true for FDPIC. This
means that the move expanders will be expected to deal with most
is enough, CC_UNS for other unsigned comparisons, and CC for other
signed comparisons. */
-enum machine_mode
+machine_mode
frv_select_cc_mode (enum rtx_code code, rtx x, rtx y)
{
if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
#define LOW_COST 1
static int
-frv_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+frv_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t from, reg_class_t to)
{
switch (from)
/* Worker function for TARGET_MEMORY_MOVE_COST. */
static int
-frv_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+frv_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t rclass ATTRIBUTE_UNUSED,
bool in ATTRIBUTE_UNUSED)
{
/* If an ACC rtx has mode MODE, return the mode that the matching ACCG
should have. */
-static enum machine_mode
-frv_matching_accg_mode (enum machine_mode mode)
+static machine_mode
+frv_matching_accg_mode (machine_mode mode)
{
switch (mode)
{
of an IACC register and return a (reg:MODE ...) rtx for it. */
static rtx
-frv_read_iacc_argument (enum machine_mode mode, tree call,
+frv_read_iacc_argument (machine_mode mode, tree call,
unsigned int index)
{
int i, regno;
static rtx
frv_legitimize_target (enum insn_code icode, rtx target)
{
- enum machine_mode mode = insn_data[icode].operand[0].mode;
+ machine_mode mode = insn_data[icode].operand[0].mode;
if (! target
|| GET_MODE (target) != mode
static rtx
frv_legitimize_argument (enum insn_code icode, int opnum, rtx arg)
{
- enum machine_mode mode = insn_data[icode].operand[opnum].mode;
+ machine_mode mode = insn_data[icode].operand[opnum].mode;
if ((*insn_data[icode].operand[opnum].predicate) (arg, mode))
return arg;
/* Return a volatile memory reference of mode MODE whose address is ARG. */
static rtx
-frv_volatile_memref (enum machine_mode mode, rtx arg)
+frv_volatile_memref (machine_mode mode, rtx arg)
{
rtx mem;
rtx pat;
rtx op0 = frv_read_argument (call, 0);
rtx op1 = frv_read_argument (call, 1);
- enum machine_mode mode0 = insn_data[icode].operand[0].mode;
+ machine_mode mode0 = insn_data[icode].operand[0].mode;
rtx addr;
if (GET_CODE (op0) != MEM)
membar and TARGET_MODE is the mode that the loaded value should have. */
static rtx
-frv_expand_load_builtin (enum insn_code icode, enum machine_mode target_mode,
+frv_expand_load_builtin (enum insn_code icode, machine_mode target_mode,
tree call, rtx target)
{
rtx op0 = frv_read_argument (call, 0);
static void
frv_split_iacc_move (rtx dest, rtx src)
{
- enum machine_mode inner;
+ machine_mode inner;
int i;
inner = GET_MODE (dest);
frv_expand_builtin (tree exp,
rtx target,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
(define_predicate "ccr_eqne_operator"
(match_code "eq,ne")
{
- enum machine_mode op_mode = GET_MODE (op);
+ machine_mode op_mode = GET_MODE (op);
rtx op0;
rtx op1;
int regno;
(define_predicate "condexec_si_binary_operator"
(match_code "plus,minus,and,ior,xor,ashift,ashiftrt,lshiftrt")
{
- enum machine_mode op_mode = GET_MODE (op);
+ machine_mode op_mode = GET_MODE (op);
if (mode != VOIDmode && op_mode != mode)
return FALSE;
(define_predicate "condexec_si_media_operator"
(match_code "and,ior,xor")
{
- enum machine_mode op_mode = GET_MODE (op);
+ machine_mode op_mode = GET_MODE (op);
if (mode != VOIDmode && op_mode != mode)
return FALSE;
(define_predicate "condexec_si_divide_operator"
(match_code "div,udiv")
{
- enum machine_mode op_mode = GET_MODE (op);
+ machine_mode op_mode = GET_MODE (op);
if (mode != VOIDmode && op_mode != mode)
return FALSE;
(define_predicate "condexec_si_unary_operator"
(match_code "not,neg")
{
- enum machine_mode op_mode = GET_MODE (op);
+ machine_mode op_mode = GET_MODE (op);
if (mode != VOIDmode && op_mode != mode)
return FALSE;
(define_predicate "condexec_sf_add_operator"
(match_code "plus,minus")
{
- enum machine_mode op_mode = GET_MODE (op);
+ machine_mode op_mode = GET_MODE (op);
if (mode != VOIDmode && op_mode != mode)
return FALSE;
(define_predicate "condexec_sf_conv_operator"
(match_code "abs,neg")
{
- enum machine_mode op_mode = GET_MODE (op);
+ machine_mode op_mode = GET_MODE (op);
if (mode != VOIDmode && op_mode != mode)
return FALSE;
extern void final_prescan_insn (rtx_insn *, rtx *, int);
extern int h8300_expand_movsi (rtx[]);
extern void notice_update_cc (rtx, rtx_insn *);
-extern const char *output_logical_op (enum machine_mode, rtx *);
-extern unsigned int compute_logical_op_length (enum machine_mode,
+extern const char *output_logical_op (machine_mode, rtx *);
+extern unsigned int compute_logical_op_length (machine_mode,
rtx *);
#ifdef HAVE_ATTR_cc
extern enum attr_cc compute_plussi_cc (rtx *);
extern enum attr_cc compute_a_shift_cc (rtx, rtx *);
-extern enum attr_cc compute_logical_op_cc (enum machine_mode, rtx *);
+extern enum attr_cc compute_logical_op_cc (machine_mode, rtx *);
#endif
extern void h8300_expand_branch (rtx[]);
extern void h8300_expand_store (rtx[]);
-extern bool expand_a_shift (enum machine_mode, enum rtx_code, rtx[]);
-extern int h8300_shift_needs_scratch_p (int, enum machine_mode);
+extern bool expand_a_shift (machine_mode, enum rtx_code, rtx[]);
+extern int h8300_shift_needs_scratch_p (int, machine_mode);
extern int expand_a_rotate (rtx[]);
extern int fix_bit_operand (rtx *, enum rtx_code);
extern int h8300_adjust_insn_length (rtx, int);
-extern void split_adds_subs (enum machine_mode, rtx[]);
+extern void split_adds_subs (machine_mode, rtx[]);
extern int h8300_eightbit_constant_address_p (rtx);
extern int h8300_tiny_constant_address_p (rtx);
H8SX_SHIFT_BINARY
};
-extern enum h8sx_shift_type h8sx_classify_shift (enum machine_mode, enum rtx_code, rtx);
+extern enum h8sx_shift_type h8sx_classify_shift (machine_mode, enum rtx_code, rtx);
extern int h8300_ldm_stm_parallel (rtvec, int, int);
#endif /* RTX_CODE */
extern int h8300_initial_elimination_offset (int, int);
extern int h8300_regs_ok_for_stm (int, rtx[]);
extern int h8300_hard_regno_rename_ok (unsigned int, unsigned int);
-extern int h8300_hard_regno_nregs (int, enum machine_mode);
-extern int h8300_hard_regno_mode_ok (int, enum machine_mode);
+extern int h8300_hard_regno_nregs (int, machine_mode);
+extern int h8300_hard_regno_mode_ok (int, machine_mode);
extern bool h8300_move_ok (rtx, rtx);
struct cpp_reader;
#ifndef OBJECT_FORMAT_ELF
static void h8300_asm_named_section (const char *, unsigned int, tree);
#endif
-static int h8300_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
+static int h8300_register_move_cost (machine_mode, reg_class_t, reg_class_t);
static int h8300_and_costs (rtx);
static int h8300_shift_costs (rtx);
static void h8300_push_pop (int, int, bool, bool);
static bool h8300_short_move_mem_p (rtx, enum rtx_code);
static unsigned int h8300_move_length (rtx *, const h8300_length_table *);
static bool h8300_hard_regno_scratch_ok (unsigned int);
-static rtx h8300_get_index (rtx, enum machine_mode mode, int *);
+static rtx h8300_get_index (rtx, machine_mode mode, int *);
/* CPU_TYPE, says what cpu we're compiling for. */
int cpu_type;
instead of adds/subs. */
void
-split_adds_subs (enum machine_mode mode, rtx *operands)
+split_adds_subs (machine_mode mode, rtx *operands)
{
HOST_WIDE_INT val = INTVAL (operands[1]);
rtx reg = operands[0];
case the first 3 arguments are passed in registers. */
static rtx
-h8300_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+h8300_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
(TYPE is null for libcalls where that information may not be available.) */
static void
-h8300_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+h8300_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
shortcuts. */
static int
-h8300_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+h8300_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t from, reg_class_t to)
{
if (from == MAC_REGS || to == MAC_REG)
if the address is known to be valid, but its mode is unknown. */
static rtx
-h8300_get_index (rtx x, enum machine_mode mode, int *size)
+h8300_get_index (rtx x, machine_mode mode, int *size)
{
int dummy, factor;
length, assuming the largest addressing mode is used, and then
adjust later in the function. Otherwise, we compute and return
the exact length in one step. */
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
rtx dest = operands[0];
rtx src = operands[1];
rtx addr;
const char *
output_plussi (rtx *operands)
{
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
gcc_assert (mode == SImode);
unsigned int
compute_plussi_length (rtx *operands)
{
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
gcc_assert (mode == SImode);
enum attr_cc
compute_plussi_cc (rtx *operands)
{
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
gcc_assert (mode == SImode);
/* Output a logical insn. */
const char *
-output_logical_op (enum machine_mode mode, rtx *operands)
+output_logical_op (machine_mode mode, rtx *operands)
{
/* Figure out the logical op that we need to perform. */
enum rtx_code code = GET_CODE (operands[3]);
/* Compute the length of a logical insn. */
unsigned int
-compute_logical_op_length (enum machine_mode mode, rtx *operands)
+compute_logical_op_length (machine_mode mode, rtx *operands)
{
/* Figure out the logical op that we need to perform. */
enum rtx_code code = GET_CODE (operands[3]);
/* Compute which flag bits are valid after a logical insn. */
enum attr_cc
-compute_logical_op_cc (enum machine_mode mode, rtx *operands)
+compute_logical_op_cc (machine_mode mode, rtx *operands)
{
/* Figure out the logical op that we need to perform. */
enum rtx_code code = GET_CODE (operands[3]);
/* Classify a shift with the given mode and code. OP is the shift amount. */
enum h8sx_shift_type
-h8sx_classify_shift (enum machine_mode mode, enum rtx_code code, rtx op)
+h8sx_classify_shift (machine_mode mode, enum rtx_code code, rtx op)
{
if (!TARGET_H8300SX)
return H8SX_SHIFT_NONE;
/* Emit code to do shifts. */
bool
-expand_a_shift (enum machine_mode mode, enum rtx_code code, rtx operands[])
+expand_a_shift (machine_mode mode, enum rtx_code code, rtx operands[])
{
switch (h8sx_classify_shift (mode, code, operands[2]))
{
needed for some shift with COUNT and MODE. Return 0 otherwise. */
int
-h8300_shift_needs_scratch_p (int count, enum machine_mode mode)
+h8300_shift_needs_scratch_p (int count, machine_mode mode)
{
enum h8_cpu cpu;
int a, lr, ar;
{
static int loopend_lab;
rtx shift = operands[3];
- enum machine_mode mode = GET_MODE (shift);
+ machine_mode mode = GET_MODE (shift);
enum rtx_code code = GET_CODE (shift);
enum shift_type shift_type;
enum shift_mode shift_mode;
compute_a_shift_length (rtx insn ATTRIBUTE_UNUSED, rtx *operands)
{
rtx shift = operands[3];
- enum machine_mode mode = GET_MODE (shift);
+ machine_mode mode = GET_MODE (shift);
enum rtx_code code = GET_CODE (shift);
enum shift_type shift_type;
enum shift_mode shift_mode;
compute_a_shift_cc (rtx insn ATTRIBUTE_UNUSED, rtx *operands)
{
rtx shift = operands[3];
- enum machine_mode mode = GET_MODE (shift);
+ machine_mode mode = GET_MODE (shift);
enum rtx_code code = GET_CODE (shift);
enum shift_type shift_type;
enum shift_mode shift_mode;
rtx dst = operands[0];
rtx src = operands[1];
rtx rotate_amount = operands[2];
- enum machine_mode mode = GET_MODE (dst);
+ machine_mode mode = GET_MODE (dst);
if (h8sx_classify_shift (mode, ROTATE, rotate_amount) == H8SX_SHIFT_UNARY)
return false;
const char *insn_buf;
int bits;
int amount;
- enum machine_mode mode = GET_MODE (dst);
+ machine_mode mode = GET_MODE (dst);
gcc_assert (GET_CODE (rotate_amount) == CONST_INT);
{
rtx src = operands[1];
rtx amount_rtx = operands[2];
- enum machine_mode mode = GET_MODE (src);
+ machine_mode mode = GET_MODE (src);
int amount;
unsigned int length = 0;
CONSTANT_ADDRESS. */
static bool
-h8300_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
+h8300_legitimate_address_p (machine_mode mode, rtx x, bool strict)
{
/* The register indirect addresses like @er0 is always valid. */
if (h8300_rtx_ok_for_base_p (x, strict))
types on the H8 series to handle more than 32bits. */
int
-h8300_hard_regno_nregs (int regno ATTRIBUTE_UNUSED, enum machine_mode mode)
+h8300_hard_regno_nregs (int regno ATTRIBUTE_UNUSED, machine_mode mode)
{
return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
}
/* Worker function for HARD_REGNO_MODE_OK. */
int
-h8300_hard_regno_mode_ok (int regno, enum machine_mode mode)
+h8300_hard_regno_mode_ok (int regno, machine_mode mode)
{
if (TARGET_H8300)
/* If an even reg, then anything goes. Otherwise the mode must be
On the H8 the return value is in R0/R1. */
static rtx
-h8300_libcall_value (enum machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
+h8300_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, R0_REG);
}
# The vector types are defined via two tables defining the real
# machine mode and the builtin primitive type. We use two tables
# rather than a structure to avoid structure padding and save space.
- print "static const enum machine_mode ix86_builtin_type_vect_mode[] = {"
+ print "static const machine_mode ix86_builtin_type_vect_mode[] = {"
for (i = 0; i < vect_defs; ++i) {
if (i == 0)
printf " "
extern bool extended_reg_mentioned_p (rtx);
extern bool x86_extended_QIreg_mentioned_p (rtx_insn *);
extern bool x86_extended_reg_mentioned_p (rtx);
-extern bool x86_maybe_negate_const_int (rtx *, enum machine_mode);
-extern enum machine_mode ix86_cc_mode (enum rtx_code, rtx, rtx);
+extern bool x86_maybe_negate_const_int (rtx *, machine_mode);
+extern machine_mode ix86_cc_mode (enum rtx_code, rtx, rtx);
-extern int avx_vpermilp_parallel (rtx par, enum machine_mode mode);
-extern int avx_vperm2f128_parallel (rtx par, enum machine_mode mode);
+extern int avx_vpermilp_parallel (rtx par, machine_mode mode);
+extern int avx_vperm2f128_parallel (rtx par, machine_mode mode);
extern bool ix86_expand_strlen (rtx, rtx, rtx, rtx);
extern bool ix86_expand_set_or_movmem (rtx, rtx, rtx, rtx, rtx, rtx,
extern bool constant_address_p (rtx);
extern bool legitimate_pic_operand_p (rtx);
extern bool legitimate_pic_address_disp_p (rtx);
-extern bool ix86_legitimize_reload_address (rtx, enum machine_mode,
+extern bool ix86_legitimize_reload_address (rtx, machine_mode,
int, int, int);
extern void print_reg (rtx, int, FILE*);
extern void ix86_print_operand (FILE *, rtx, int);
-extern void split_double_mode (enum machine_mode, rtx[], int, rtx[], rtx[]);
+extern void split_double_mode (machine_mode, rtx[], int, rtx[], rtx[]);
extern const char *output_set_got (rtx, rtx);
extern const char *output_387_binary_op (rtx, rtx*);
extern const char *output_probe_stack_range (rtx, rtx);
extern void ix86_expand_clear (rtx);
-extern void ix86_expand_move (enum machine_mode, rtx[]);
-extern void ix86_expand_vector_move (enum machine_mode, rtx[]);
-extern void ix86_expand_vector_move_misalign (enum machine_mode, rtx[]);
+extern void ix86_expand_move (machine_mode, rtx[]);
+extern void ix86_expand_vector_move (machine_mode, rtx[]);
+extern void ix86_expand_vector_move_misalign (machine_mode, rtx[]);
extern rtx ix86_fixup_binary_operands (enum rtx_code,
- enum machine_mode, rtx[]);
+ machine_mode, rtx[]);
extern void ix86_fixup_binary_operands_no_copy (enum rtx_code,
- enum machine_mode, rtx[]);
+ machine_mode, rtx[]);
extern void ix86_expand_binary_operator (enum rtx_code,
- enum machine_mode, rtx[]);
+ machine_mode, rtx[]);
extern void ix86_expand_vector_logical_operator (enum rtx_code,
- enum machine_mode, rtx[]);
-extern bool ix86_binary_operator_ok (enum rtx_code, enum machine_mode, rtx[]);
+ machine_mode, rtx[]);
+extern bool ix86_binary_operator_ok (enum rtx_code, machine_mode, rtx[]);
extern bool ix86_avoid_lea_for_add (rtx_insn *, rtx[]);
extern bool ix86_use_lea_for_mov (rtx_insn *, rtx[]);
extern bool ix86_avoid_lea_for_addr (rtx_insn *, rtx[]);
-extern void ix86_split_lea_for_addr (rtx_insn *, rtx[], enum machine_mode);
+extern void ix86_split_lea_for_addr (rtx_insn *, rtx[], machine_mode);
extern bool ix86_lea_for_add_ok (rtx_insn *, rtx[]);
extern bool ix86_vec_interleave_v2df_operator_ok (rtx operands[3], bool high);
extern bool ix86_dep_by_shift_count (const_rtx set_insn, const_rtx use_insn);
extern bool ix86_agi_dependent (rtx_insn *set_insn, rtx_insn *use_insn);
-extern void ix86_expand_unary_operator (enum rtx_code, enum machine_mode,
+extern void ix86_expand_unary_operator (enum rtx_code, machine_mode,
rtx[]);
-extern rtx ix86_build_const_vector (enum machine_mode, bool, rtx);
-extern rtx ix86_build_signbit_mask (enum machine_mode, bool, bool);
+extern rtx ix86_build_const_vector (machine_mode, bool, rtx);
+extern rtx ix86_build_signbit_mask (machine_mode, bool, bool);
extern void ix86_split_convert_uns_si_sse (rtx[]);
extern void ix86_expand_convert_uns_didf_sse (rtx, rtx);
extern void ix86_expand_convert_uns_sixf_sse (rtx, rtx);
extern void ix86_expand_vector_convert_uns_vsivsf (rtx, rtx);
extern rtx ix86_expand_adjust_ufix_to_sfix_si (rtx, rtx *);
extern enum ix86_fpcmp_strategy ix86_fp_comparison_strategy (enum rtx_code);
-extern void ix86_expand_fp_absneg_operator (enum rtx_code, enum machine_mode,
+extern void ix86_expand_fp_absneg_operator (enum rtx_code, machine_mode,
rtx[]);
extern void ix86_expand_copysign (rtx []);
extern void ix86_split_copysign_const (rtx []);
extern void ix86_split_copysign_var (rtx []);
-extern bool ix86_unary_operator_ok (enum rtx_code, enum machine_mode, rtx[]);
-extern bool ix86_match_ccmode (rtx, enum machine_mode);
+extern bool ix86_unary_operator_ok (enum rtx_code, machine_mode, rtx[]);
+extern bool ix86_match_ccmode (rtx, machine_mode);
extern void ix86_expand_branch (enum rtx_code, rtx, rtx, rtx);
extern void ix86_expand_setcc (rtx, enum rtx_code, rtx, rtx);
extern bool ix86_expand_int_movcc (rtx[]);
extern void x86_initialize_trampoline (rtx, rtx, rtx);
extern rtx ix86_zero_extend_to_Pmode (rtx);
extern void ix86_split_long_move (rtx[]);
-extern void ix86_split_ashl (rtx *, rtx, enum machine_mode);
-extern void ix86_split_ashr (rtx *, rtx, enum machine_mode);
-extern void ix86_split_lshr (rtx *, rtx, enum machine_mode);
+extern void ix86_split_ashl (rtx *, rtx, machine_mode);
+extern void ix86_split_ashr (rtx *, rtx, machine_mode);
+extern void ix86_split_lshr (rtx *, rtx, machine_mode);
extern rtx ix86_find_base_term (rtx);
extern bool ix86_check_movabs (rtx, int);
-extern void ix86_split_idivmod (enum machine_mode, rtx[], bool);
+extern void ix86_split_idivmod (machine_mode, rtx[], bool);
-extern rtx assign_386_stack_local (enum machine_mode, enum ix86_stack_slot);
+extern rtx assign_386_stack_local (machine_mode, enum ix86_stack_slot);
extern int ix86_attr_length_immediate_default (rtx_insn *, bool);
extern int ix86_attr_length_address_default (rtx_insn *);
extern int ix86_attr_length_vex_default (rtx_insn *, bool, bool);
-extern enum machine_mode ix86_fp_compare_mode (enum rtx_code);
+extern machine_mode ix86_fp_compare_mode (enum rtx_code);
-extern rtx ix86_libcall_value (enum machine_mode);
+extern rtx ix86_libcall_value (machine_mode);
extern bool ix86_function_arg_regno_p (int);
extern void ix86_asm_output_function_label (FILE *, const char *, tree);
extern void ix86_call_abi_override (const_tree);
extern void ix86_split_fp_branch (enum rtx_code code, rtx, rtx,
rtx, rtx, rtx);
-extern bool ix86_hard_regno_mode_ok (int, enum machine_mode);
-extern bool ix86_modes_tieable_p (enum machine_mode, enum machine_mode);
+extern bool ix86_hard_regno_mode_ok (int, machine_mode);
+extern bool ix86_modes_tieable_p (machine_mode, machine_mode);
extern bool ix86_secondary_memory_needed (enum reg_class, enum reg_class,
- enum machine_mode, int);
-extern bool ix86_cannot_change_mode_class (enum machine_mode,
- enum machine_mode, enum reg_class);
+ machine_mode, int);
+extern bool ix86_cannot_change_mode_class (machine_mode,
+ machine_mode, enum reg_class);
extern bool ix86_libc_has_function (enum function_class fn_class);
extern void ix86_emit_i387_log1p (rtx, rtx);
extern void ix86_emit_i387_round (rtx, rtx);
-extern void ix86_emit_swdivsf (rtx, rtx, rtx, enum machine_mode);
-extern void ix86_emit_swsqrtsf (rtx, rtx, enum machine_mode, bool);
+extern void ix86_emit_swdivsf (rtx, rtx, rtx, machine_mode);
+extern void ix86_emit_swsqrtsf (rtx, rtx, machine_mode, bool);
-extern enum rtx_code ix86_reverse_condition (enum rtx_code, enum machine_mode);
+extern enum rtx_code ix86_reverse_condition (enum rtx_code, machine_mode);
extern void ix86_expand_lround (rtx, rtx);
extern void ix86_expand_lfloorceil (rtx, rtx, bool);
#ifdef TREE_CODE
extern int ix86_data_alignment (tree, int, bool);
-extern unsigned int ix86_local_alignment (tree, enum machine_mode,
+extern unsigned int ix86_local_alignment (tree, machine_mode,
unsigned int);
-extern unsigned int ix86_minimum_alignment (tree, enum machine_mode,
+extern unsigned int ix86_minimum_alignment (tree, machine_mode,
unsigned int);
extern int ix86_constant_alignment (tree, int);
extern tree ix86_handle_shared_attribute (tree *, tree, tree, int, bool *);
static struct machine_function * ix86_init_machine_status (void);
static rtx ix86_function_value (const_tree, const_tree, bool);
static bool ix86_function_value_regno_p (const unsigned int);
-static unsigned int ix86_function_arg_boundary (enum machine_mode,
+static unsigned int ix86_function_arg_boundary (machine_mode,
const_tree);
static rtx ix86_static_chain (const_tree, bool);
static int ix86_function_regparm (const_tree, const_tree);
static void ix86_compute_frame_layout (struct ix86_frame *);
-static bool ix86_expand_vector_init_one_nonzero (bool, enum machine_mode,
+static bool ix86_expand_vector_init_one_nonzero (bool, machine_mode,
rtx, rtx, int);
static void ix86_add_new_builtins (HOST_WIDE_INT);
static tree ix86_canonical_va_list_type (tree);
for (i = 0; i < n_operands; i++)
{
rtx op = recog_data.operand[i];
- enum machine_mode mode = GET_MODE (op);
+ machine_mode mode = GET_MODE (op);
const operand_alternative *op_alt;
int offset = 0;
bool win;
/* Return if we do not know how to pass TYPE solely in registers. */
static bool
-ix86_must_pass_in_stack (enum machine_mode mode, const_tree type)
+ix86_must_pass_in_stack (machine_mode mode, const_tree type)
{
if (must_pass_in_stack_var_size_or_pad (mode, type))
return true;
If INT_RETURN is true, warn ABI change if the vector mode isn't
available for function return value. */
-static enum machine_mode
+static machine_mode
type_natural_mode (const_tree type, const CUMULATIVE_ARGS *cum,
bool in_return)
{
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
if (TREE_CODE (type) == VECTOR_TYPE && !VECTOR_MODE_P (mode))
{
/* ??? Generic code allows us to create width 1 vectors. Ignore. */
&& TYPE_VECTOR_SUBPARTS (type) > 1)
{
- enum machine_mode innermode = TYPE_MODE (TREE_TYPE (type));
+ machine_mode innermode = TYPE_MODE (TREE_TYPE (type));
if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
mode = MIN_MODE_VECTOR_FLOAT;
go ahead and use it. Otherwise we have to build a PARALLEL instead. */
static rtx
-gen_reg_or_parallel (enum machine_mode mode, enum machine_mode orig_mode,
+gen_reg_or_parallel (machine_mode mode, machine_mode orig_mode,
unsigned int regno)
{
rtx tmp;
*/
static int
-classify_argument (enum machine_mode mode, const_tree type,
+classify_argument (machine_mode mode, const_tree type,
enum x86_64_reg_class classes[MAX_CLASSES], int bit_offset)
{
HOST_WIDE_INT bytes =
class. Return true iff parameter should be passed in memory. */
static bool
-examine_argument (enum machine_mode mode, const_tree type, int in_return,
+examine_argument (machine_mode mode, const_tree type, int in_return,
int *int_nregs, int *sse_nregs)
{
enum x86_64_reg_class regclass[MAX_CLASSES];
FUNCTION_ARG for the detailed description. */
static rtx
-construct_container (enum machine_mode mode, enum machine_mode orig_mode,
+construct_container (machine_mode mode, machine_mode orig_mode,
const_tree type, int in_return, int nintregs, int nsseregs,
const int *intreg, int sse_regno)
{
static bool issued_sse_ret_error;
static bool issued_x87_ret_error;
- enum machine_mode tmpmode;
+ machine_mode tmpmode;
int bytes =
(mode == BLKmode) ? int_size_in_bytes (type) : (int) GET_MODE_SIZE (mode);
enum x86_64_reg_class regclass[MAX_CLASSES];
may not be available.) */
static void
-function_arg_advance_32 (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+function_arg_advance_32 (CUMULATIVE_ARGS *cum, machine_mode mode,
const_tree type, HOST_WIDE_INT bytes,
HOST_WIDE_INT words)
{
}
static void
-function_arg_advance_64 (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+function_arg_advance_64 (CUMULATIVE_ARGS *cum, machine_mode mode,
const_tree type, HOST_WIDE_INT words, bool named)
{
int int_nregs, sse_nregs;
may not be available.) */
static void
-ix86_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+ix86_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
(otherwise it is an extra parameter matching an ellipsis). */
static rtx
-function_arg_32 (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
- enum machine_mode orig_mode, const_tree type,
+function_arg_32 (const CUMULATIVE_ARGS *cum, machine_mode mode,
+ machine_mode orig_mode, const_tree type,
HOST_WIDE_INT bytes, HOST_WIDE_INT words)
{
/* Avoid the AL settings for the Unix64 ABI. */
}
static rtx
-function_arg_64 (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
- enum machine_mode orig_mode, const_tree type, bool named)
+function_arg_64 (const CUMULATIVE_ARGS *cum, machine_mode mode,
+ machine_mode orig_mode, const_tree type, bool named)
{
/* Handle a hidden AL argument containing number of registers
for varargs x86-64 functions. */
}
static rtx
-function_arg_ms_64 (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
- enum machine_mode orig_mode, bool named,
+function_arg_ms_64 (const CUMULATIVE_ARGS *cum, machine_mode mode,
+ machine_mode orig_mode, bool named,
HOST_WIDE_INT bytes)
{
unsigned int regno;
ellipsis). */
static rtx
-ix86_function_arg (cumulative_args_t cum_v, enum machine_mode omode,
+ix86_function_arg (cumulative_args_t cum_v, machine_mode omode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
- enum machine_mode mode = omode;
+ machine_mode mode = omode;
HOST_WIDE_INT bytes, words;
rtx arg;
appropriate for passing a pointer to that type. */
static bool
-ix86_pass_by_reference (cumulative_args_t cum_v, enum machine_mode mode,
+ix86_pass_by_reference (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
static bool
ix86_compat_aligned_value_p (const_tree type)
{
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
if (((TARGET_SSE && SSE_REG_MODE_P (mode))
|| mode == TDmode
|| mode == TFmode
compatibility with previous versions of GCC. */
static unsigned int
-ix86_compat_function_arg_boundary (enum machine_mode mode,
+ix86_compat_function_arg_boundary (machine_mode mode,
const_tree type, unsigned int align)
{
/* In 32bit, only _Decimal128 and __float128 are aligned to their
static bool
ix86_contains_aligned_value_p (const_tree type)
{
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
if (mode == XFmode || mode == XCmode)
return false;
specified mode and type. */
static unsigned int
-ix86_function_arg_boundary (enum machine_mode mode, const_tree type)
+ix86_function_arg_boundary (machine_mode mode, const_tree type)
{
unsigned int align;
if (type)
otherwise, FUNC is 0. */
static rtx
-function_value_32 (enum machine_mode orig_mode, enum machine_mode mode,
+function_value_32 (machine_mode orig_mode, machine_mode mode,
const_tree fntype, const_tree fn)
{
unsigned int regno;
}
static rtx
-function_value_64 (enum machine_mode orig_mode, enum machine_mode mode,
+function_value_64 (machine_mode orig_mode, machine_mode mode,
const_tree valtype)
{
rtx ret;
}
static rtx
-function_value_ms_64 (enum machine_mode orig_mode, enum machine_mode mode,
+function_value_ms_64 (machine_mode orig_mode, machine_mode mode,
const_tree valtype)
{
unsigned int regno = AX_REG;
static rtx
ix86_function_value_1 (const_tree valtype, const_tree fntype_or_decl,
- enum machine_mode orig_mode, enum machine_mode mode)
+ machine_mode orig_mode, machine_mode mode)
{
const_tree fn, fntype;
static rtx
ix86_function_value (const_tree valtype, const_tree fntype_or_decl, bool)
{
- enum machine_mode mode, orig_mode;
+ machine_mode mode, orig_mode;
orig_mode = TYPE_MODE (valtype);
mode = type_natural_mode (valtype, NULL, true);
/* Pointer function arguments and return values are promoted to
word_mode. */
-static enum machine_mode
-ix86_promote_function_mode (const_tree type, enum machine_mode mode,
+static machine_mode
+ix86_promote_function_mode (const_tree type, machine_mode mode,
int *punsignedp, const_tree fntype,
int for_return)
{
should be accessed using BLKmode. */
static bool
-ix86_member_type_forces_blk (const_tree field, enum machine_mode mode)
+ix86_member_type_forces_blk (const_tree field, machine_mode mode)
{
/* Union with XFmode must be in BLKmode. */
return (mode == XFmode
}
rtx
-ix86_libcall_value (enum machine_mode mode)
+ix86_libcall_value (machine_mode mode)
{
return ix86_function_value_1 (NULL, NULL, mode, mode);
}
#ifdef SUBTARGET_RETURN_IN_MEMORY
return SUBTARGET_RETURN_IN_MEMORY (type, fntype);
#else
- const enum machine_mode mode = type_natural_mode (type, NULL, true);
+ const machine_mode mode = type_natural_mode (type, NULL, true);
HOST_WIDE_INT size;
if (TARGET_64BIT)
if (ix86_varargs_fpr_size)
{
- enum machine_mode smode;
+ machine_mode smode;
rtx_code_label *label;
rtx test;
}
static void
-ix86_setup_incoming_varargs (cumulative_args_t cum_v, enum machine_mode mode,
+ix86_setup_incoming_varargs (cumulative_args_t cum_v, machine_mode mode,
tree type, int *, int no_rtl)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
rtx container;
int indirect_p = 0;
tree ptrtype;
- enum machine_mode nat_mode;
+ machine_mode nat_mode;
unsigned int arg_boundary;
/* Only 64bit target needs something special. */
{
rtx slot = XVECEXP (container, 0, i);
rtx reg = XEXP (slot, 0);
- enum machine_mode mode = GET_MODE (reg);
+ machine_mode mode = GET_MODE (reg);
tree piece_type;
tree addr_type;
tree daddr_type;
int
standard_80387_constant_p (rtx x)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
REAL_VALUE_TYPE r;
int
standard_sse_constant_p (rtx x)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
if (x == const0_rtx || x == CONST0_RTX (GET_MODE (x)))
return 1;
/* Emit a single register save at CFA - CFA_OFFSET. */
static void
-ix86_emit_save_reg_using_mov (enum machine_mode mode, unsigned int regno,
+ix86_emit_save_reg_using_mov (machine_mode mode, unsigned int regno,
HOST_WIDE_INT cfa_offset)
{
struct machine_function *m = cfun->machine;
requires to two regs - that would mean more pseudos with longer
lifetimes. */
static int
-ix86_address_cost (rtx x, enum machine_mode, addr_space_t, bool)
+ix86_address_cost (rtx x, machine_mode, addr_space_t, bool)
{
struct ix86_address parts;
int cost = 1;
satisfies CONSTANT_P. */
static bool
-ix86_legitimate_constant_p (enum machine_mode, rtx x)
+ix86_legitimate_constant_p (machine_mode, rtx x)
{
switch (GET_CODE (x))
{
is checked above. */
static bool
-ix86_cannot_force_const_mem (enum machine_mode mode, rtx x)
+ix86_cannot_force_const_mem (machine_mode mode, rtx x)
{
/* We can always put integral constants and vectors in memory. */
switch (GET_CODE (x))
0 if it should not. */
bool
-ix86_legitimize_reload_address (rtx x, enum machine_mode, int opnum, int type,
+ix86_legitimize_reload_address (rtx x, machine_mode, int opnum, int type,
int)
{
/* Reload can generate:
static rtx
ix86_validate_address_register (rtx op)
{
- enum machine_mode mode = GET_MODE (op);
+ machine_mode mode = GET_MODE (op);
/* Only SImode or DImode registers can form the address. */
if (mode != SImode && mode != DImode)
be recognized. */
static bool
-ix86_legitimate_address_p (enum machine_mode, rtx addr, bool strict)
+ix86_legitimate_address_p (machine_mode, rtx addr, bool strict)
{
struct ix86_address parts;
rtx base, index, disp;
else
{
rtx base = legitimize_pic_address (op0, reg);
- enum machine_mode mode = GET_MODE (base);
+ machine_mode mode = GET_MODE (base);
new_rtx
= legitimize_pic_address (op1, base == reg ? NULL_RTX : reg);
/* Load the thread pointer. If TO_REG is true, force it into a register. */
static rtx
-get_thread_pointer (enum machine_mode tp_mode, bool to_reg)
+get_thread_pointer (machine_mode tp_mode, bool to_reg)
{
rtx tp = gen_rtx_UNSPEC (ptr_mode, gen_rtvec (1, const0_rtx), UNSPEC_TP);
{
rtx dest, base, off;
rtx pic = NULL_RTX, tp = NULL_RTX;
- enum machine_mode tp_mode = Pmode;
+ machine_mode tp_mode = Pmode;
int type;
/* Fall back to global dynamic model if tool chain cannot support local
See comments by legitimize_pic_address in i386.c for details. */
static rtx
-ix86_legitimize_address (rtx x, rtx, enum machine_mode mode)
+ix86_legitimize_address (rtx x, rtx, machine_mode mode)
{
int changed = 0;
unsigned log;
}
\f
static void
-put_condition_code (enum rtx_code code, enum machine_mode mode, bool reverse,
+put_condition_code (enum rtx_code code, machine_mode mode, bool reverse,
bool fp, FILE *file)
{
const char *suffix;
that parallel "operands". */
void
-split_double_mode (enum machine_mode mode, rtx operands[],
+split_double_mode (machine_mode mode, rtx operands[],
int num, rtx lo_half[], rtx hi_half[])
{
- enum machine_mode half_mode;
+ machine_mode half_mode;
unsigned int byte;
switch (mode)
}
void
-ix86_expand_move (enum machine_mode mode, rtx operands[])
+ix86_expand_move (machine_mode mode, rtx operands[])
{
rtx op0, op1;
enum tls_model model;
}
void
-ix86_expand_vector_move (enum machine_mode mode, rtx operands[])
+ix86_expand_vector_move (machine_mode mode, rtx operands[])
{
rtx op0 = operands[0], op1 = operands[1];
unsigned int align = GET_MODE_ALIGNMENT (mode);
rtx (*extract) (rtx, rtx, rtx);
rtx (*load_unaligned) (rtx, rtx);
rtx (*store_unaligned) (rtx, rtx);
- enum machine_mode mode;
+ machine_mode mode;
switch (GET_MODE (op0))
{
*/
void
-ix86_expand_vector_move_misalign (enum machine_mode mode, rtx operands[])
+ix86_expand_vector_move_misalign (machine_mode mode, rtx operands[])
{
rtx op0, op1, orig_op0 = NULL_RTX, m;
rtx (*load_unaligned) (rtx, rtx);
operand order. Returns true if the operands should be swapped. */
static bool
-ix86_swap_binary_operands_p (enum rtx_code code, enum machine_mode mode,
+ix86_swap_binary_operands_p (enum rtx_code code, machine_mode mode,
rtx operands[])
{
rtx dst = operands[0];
destination in operands[0], a copy operation will be required. */
rtx
-ix86_fixup_binary_operands (enum rtx_code code, enum machine_mode mode,
+ix86_fixup_binary_operands (enum rtx_code code, machine_mode mode,
rtx operands[])
{
rtx dst = operands[0];
void
ix86_fixup_binary_operands_no_copy (enum rtx_code code,
- enum machine_mode mode, rtx operands[])
+ machine_mode mode, rtx operands[])
{
rtx dst = ix86_fixup_binary_operands (code, mode, operands);
gcc_assert (dst == operands[0]);
memory references (one output, two input) in a single insn. */
void
-ix86_expand_binary_operator (enum rtx_code code, enum machine_mode mode,
+ix86_expand_binary_operator (enum rtx_code code, machine_mode mode,
rtx operands[])
{
rtx src1, src2, dst, op, clob;
the given OPERANDS. */
void
-ix86_expand_vector_logical_operator (enum rtx_code code, enum machine_mode mode,
+ix86_expand_vector_logical_operator (enum rtx_code code, machine_mode mode,
rtx operands[])
{
rtx op1 = NULL_RTX, op2 = NULL_RTX;
appropriate constraints. */
bool
-ix86_binary_operator_ok (enum rtx_code code, enum machine_mode mode,
+ix86_binary_operator_ok (enum rtx_code code, machine_mode mode,
rtx operands[3])
{
rtx dst = operands[0];
memory references (one output, one input) in a single insn. */
void
-ix86_expand_unary_operator (enum rtx_code code, enum machine_mode mode,
+ix86_expand_unary_operator (enum rtx_code code, machine_mode mode,
rtx operands[])
{
int matching_memory;
divisor are within the range [0-255]. */
void
-ix86_split_idivmod (enum machine_mode mode, rtx operands[],
+ix86_split_idivmod (machine_mode mode, rtx operands[],
bool signed_p)
{
rtx_code_label *end_label, *qimode_label;
matches destination. RTX includes clobber of FLAGS_REG. */
static void
-ix86_emit_binop (enum rtx_code code, enum machine_mode mode,
+ix86_emit_binop (enum rtx_code code, machine_mode mode,
rtx dst, rtx src)
{
rtx op, clob;
at lea position. */
void
-ix86_split_lea_for_addr (rtx_insn *insn, rtx operands[], enum machine_mode mode)
+ix86_split_lea_for_addr (rtx_insn *insn, rtx operands[], machine_mode mode)
{
unsigned int regno0, regno1, regno2;
struct ix86_address parts;
bool
ix86_unary_operator_ok (enum rtx_code,
- enum machine_mode,
+ machine_mode,
rtx operands[2])
{
/* If one of operands is memory, source and destination must match. */
void
ix86_split_convert_uns_si_sse (rtx operands[])
{
- enum machine_mode vecmode;
+ machine_mode vecmode;
rtx value, large, zero_or_two31, input, two31, x;
large = operands[1];
{
rtx tmp[8];
REAL_VALUE_TYPE TWO16r;
- enum machine_mode intmode = GET_MODE (val);
- enum machine_mode fltmode = GET_MODE (target);
+ machine_mode intmode = GET_MODE (val);
+ machine_mode fltmode = GET_MODE (target);
rtx (*cvt) (rtx, rtx);
if (intmode == V4SImode)
{
REAL_VALUE_TYPE TWO31r;
rtx two31r, tmp[4];
- enum machine_mode mode = GET_MODE (val);
- enum machine_mode scalarmode = GET_MODE_INNER (mode);
- enum machine_mode intmode = GET_MODE_SIZE (mode) == 32 ? V8SImode : V4SImode;
+ machine_mode mode = GET_MODE (val);
+ machine_mode scalarmode = GET_MODE_INNER (mode);
+ machine_mode intmode = GET_MODE_SIZE (mode) == 32 ? V8SImode : V4SImode;
rtx (*cmp) (rtx, rtx, rtx, rtx);
int i;
register. */
rtx
-ix86_build_const_vector (enum machine_mode mode, bool vect, rtx value)
+ix86_build_const_vector (machine_mode mode, bool vect, rtx value)
{
int i, n_elt;
rtvec v;
- enum machine_mode scalar_mode;
+ machine_mode scalar_mode;
switch (mode)
{
a mask excluding the sign bit. */
rtx
-ix86_build_signbit_mask (enum machine_mode mode, bool vect, bool invert)
+ix86_build_signbit_mask (machine_mode mode, bool vect, bool invert)
{
- enum machine_mode vec_mode, imode;
+ machine_mode vec_mode, imode;
HOST_WIDE_INT hi, lo;
int shift = 63;
rtx v;
/* Generate code for floating point ABS or NEG. */
void
-ix86_expand_fp_absneg_operator (enum rtx_code code, enum machine_mode mode,
+ix86_expand_fp_absneg_operator (enum rtx_code code, machine_mode mode,
rtx operands[])
{
rtx mask, set, dst, src;
bool use_sse = false;
bool vector_mode = VECTOR_MODE_P (mode);
- enum machine_mode vmode = mode;
+ machine_mode vmode = mode;
if (vector_mode)
use_sse = true;
void
ix86_expand_copysign (rtx operands[])
{
- enum machine_mode mode, vmode;
+ machine_mode mode, vmode;
rtx dest, op0, op1, mask, nmask;
dest = operands[0];
void
ix86_split_copysign_const (rtx operands[])
{
- enum machine_mode mode, vmode;
+ machine_mode mode, vmode;
rtx dest, op0, mask, x;
dest = operands[0];
void
ix86_split_copysign_var (rtx operands[])
{
- enum machine_mode mode, vmode;
+ machine_mode mode, vmode;
rtx dest, scratch, op0, op1, mask, nmask, x;
dest = operands[0];
CC mode is at least as constrained as REQ_MODE. */
bool
-ix86_match_ccmode (rtx insn, enum machine_mode req_mode)
+ix86_match_ccmode (rtx insn, machine_mode req_mode)
{
rtx set;
- enum machine_mode set_mode;
+ machine_mode set_mode;
set = PATTERN (insn);
if (GET_CODE (set) == PARALLEL)
static rtx
ix86_expand_int_compare (enum rtx_code code, rtx op0, rtx op1)
{
- enum machine_mode cmpmode;
+ machine_mode cmpmode;
rtx tmp, flags;
cmpmode = SELECT_CC_MODE (code, op0, op1);
/* Figure out whether to use ordered or unordered fp comparisons.
Return the appropriate mode to use. */
-enum machine_mode
+machine_mode
ix86_fp_compare_mode (enum rtx_code)
{
/* ??? In order to make all comparisons reversible, we do all comparisons
return TARGET_IEEE_FP ? CCFPUmode : CCFPmode;
}
-enum machine_mode
+machine_mode
ix86_cc_mode (enum rtx_code code, rtx op0, rtx op1)
{
- enum machine_mode mode = GET_MODE (op0);
+ machine_mode mode = GET_MODE (op0);
if (SCALAR_FLOAT_MODE_P (mode))
{
mode which is compatible with both. Otherwise, return
VOIDmode. */
-static enum machine_mode
-ix86_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
+static machine_mode
+ix86_cc_modes_compatible (machine_mode m1, machine_mode m2)
{
if (m1 == m2)
return m1;
static enum rtx_code
ix86_prepare_fp_compare_args (enum rtx_code code, rtx *pop0, rtx *pop1)
{
- enum machine_mode fpcmp_mode = ix86_fp_compare_mode (code);
+ machine_mode fpcmp_mode = ix86_fp_compare_mode (code);
rtx op0 = *pop0, op1 = *pop1;
- enum machine_mode op_mode = GET_MODE (op0);
+ machine_mode op_mode = GET_MODE (op0);
int is_sse = TARGET_SSE_MATH && SSE_FLOAT_MODE_P (op_mode);
/* All of the unordered compare instructions only work on registers.
static rtx
ix86_expand_fp_compare (enum rtx_code code, rtx op0, rtx op1, rtx scratch)
{
- enum machine_mode fpcmp_mode, intcmp_mode;
+ machine_mode fpcmp_mode, intcmp_mode;
rtx tmp, tmp2;
fpcmp_mode = ix86_fp_compare_mode (code);
void
ix86_expand_branch (enum rtx_code code, rtx op0, rtx op1, rtx label)
{
- enum machine_mode mode = GET_MODE (op0);
+ machine_mode mode = GET_MODE (op0);
rtx tmp;
switch (mode)
rtx lo[2], hi[2];
rtx_code_label *label2;
enum rtx_code code1, code2, code3;
- enum machine_mode submode;
+ machine_mode submode;
if (CONSTANT_P (op0) && !CONSTANT_P (op1))
{
static bool
ix86_expand_carry_flag_compare (enum rtx_code code, rtx op0, rtx op1, rtx *pop)
{
- enum machine_mode mode =
+ machine_mode mode =
GET_MODE (op0) != VOIDmode ? GET_MODE (op0) : GET_MODE (op1);
/* Do not handle double-mode compares that go through special path. */
enum rtx_code code = GET_CODE (operands[1]), compare_code;
rtx_insn *compare_seq;
rtx compare_op;
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
bool sign_bit_compare_p = false;
rtx op0 = XEXP (operands[1], 0);
rtx op1 = XEXP (operands[1], 1);
if (diff < 0)
{
- enum machine_mode cmp_mode = GET_MODE (op0);
+ machine_mode cmp_mode = GET_MODE (op0);
HOST_WIDE_INT tmp;
tmp = ct, ct = cf, cf = tmp;
{
if (cf == 0)
{
- enum machine_mode cmp_mode = GET_MODE (op0);
+ machine_mode cmp_mode = GET_MODE (op0);
cf = ct;
ct = 0;
ix86_expand_sse_fp_minmax (rtx dest, enum rtx_code code, rtx cmp_op0,
rtx cmp_op1, rtx if_true, rtx if_false)
{
- enum machine_mode mode;
+ machine_mode mode;
bool is_min;
rtx tmp;
ix86_expand_sse_cmp (rtx dest, enum rtx_code code, rtx cmp_op0, rtx cmp_op1,
rtx op_true, rtx op_false)
{
- enum machine_mode mode = GET_MODE (dest);
- enum machine_mode cmp_ops_mode = GET_MODE (cmp_op0);
+ machine_mode mode = GET_MODE (dest);
+ machine_mode cmp_ops_mode = GET_MODE (cmp_op0);
/* In general case result of comparison can differ from operands' type. */
- enum machine_mode cmp_mode;
+ machine_mode cmp_mode;
/* In AVX512F the result of comparison is an integer mask. */
bool maskcmp = false;
static void
ix86_expand_sse_movcc (rtx dest, rtx cmp, rtx op_true, rtx op_false)
{
- enum machine_mode mode = GET_MODE (dest);
- enum machine_mode cmpmode = GET_MODE (cmp);
+ machine_mode mode = GET_MODE (dest);
+ machine_mode cmpmode = GET_MODE (cmp);
/* In AVX512F the result of comparison is an integer mask. */
bool maskcmp = (mode != cmpmode && TARGET_AVX512F);
bool
ix86_expand_fp_movcc (rtx operands[])
{
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
enum rtx_code code = GET_CODE (operands[1]);
rtx tmp, compare_op;
rtx op0 = XEXP (operands[1], 0);
if (TARGET_SSE_MATH && SSE_FLOAT_MODE_P (mode))
{
- enum machine_mode cmode;
+ machine_mode cmode;
/* Since we've no cmove for sse registers, don't force bad register
allocation just to gain access to it. Deny movcc when the
bool
ix86_expand_int_vcond (rtx operands[])
{
- enum machine_mode data_mode = GET_MODE (operands[0]);
- enum machine_mode mode = GET_MODE (operands[4]);
+ machine_mode data_mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[4]);
enum rtx_code code = GET_CODE (operands[3]);
bool negate = false;
rtx x, cop0, cop1;
{
rtx target, op0, op1;
unsigned char perm[MAX_VECT_LEN];
- enum machine_mode vmode;
+ machine_mode vmode;
unsigned char nelt;
bool one_operand_p;
bool testing_p;
{
/* ix86_expand_vec_perm_vpermi2 is called from both const and non-const
expander, so args are either in d, or in op0, op1 etc. */
- enum machine_mode mode = GET_MODE (d ? d->op0 : op0);
- enum machine_mode maskmode = mode;
+ machine_mode mode = GET_MODE (d ? d->op0 : op0);
+ machine_mode maskmode = mode;
rtx (*gen) (rtx, rtx, rtx, rtx) = NULL;
switch (mode)
rtx op1 = operands[2];
rtx mask = operands[3];
rtx t1, t2, t3, t4, t5, t6, t7, t8, vt, vt2, vec[32];
- enum machine_mode mode = GET_MODE (op0);
- enum machine_mode maskmode = GET_MODE (mask);
+ machine_mode mode = GET_MODE (op0);
+ machine_mode maskmode = GET_MODE (mask);
int w, e, i;
bool one_operand_shuffle = rtx_equal_p (op0, op1);
void
ix86_expand_sse_unpack (rtx dest, rtx src, bool unsigned_p, bool high_p)
{
- enum machine_mode imode = GET_MODE (src);
+ machine_mode imode = GET_MODE (src);
rtx tmp;
if (TARGET_SSE4_1)
{
rtx (*unpack)(rtx, rtx);
rtx (*extract)(rtx, rtx) = NULL;
- enum machine_mode halfmode = BLKmode;
+ machine_mode halfmode = BLKmode;
switch (imode)
{
rtx compare_op;
rtx val = const0_rtx;
bool fpcmp = false;
- enum machine_mode mode;
+ machine_mode mode;
rtx op0 = XEXP (operands[1], 0);
rtx op1 = XEXP (operands[1], 1);
in the right order. Maximally three parts are generated. */
static int
-ix86_split_to_parts (rtx operand, rtx *parts, enum machine_mode mode)
+ix86_split_to_parts (rtx operand, rtx *parts, machine_mode mode)
{
int size;
if (GET_CODE (operand) == CONST_VECTOR)
{
- enum machine_mode imode = int_mode_for_mode (mode);
+ machine_mode imode = int_mode_for_mode (mode);
/* Caution: if we looked through a constant pool memory above,
the operand may actually have a different mode now. That's
ok, since we want to pun this all the way back to an integer. */
split_double_mode (mode, &operand, 1, &parts[0], &parts[1]);
if (mode == XFmode || mode == TFmode)
{
- enum machine_mode upper_mode = mode==XFmode ? SImode : DImode;
+ machine_mode upper_mode = mode==XFmode ? SImode : DImode;
if (REG_P (operand))
{
gcc_assert (reload_completed);
int nparts, i, j;
int push = 0;
int collisions = 0;
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
bool collisionparts[4];
/* The DFmode expanders may ask us to move double.
a sequence of add instructions. */
static void
-ix86_expand_ashl_const (rtx operand, int count, enum machine_mode mode)
+ix86_expand_ashl_const (rtx operand, int count, machine_mode mode)
{
rtx (*insn)(rtx, rtx, rtx);
}
void
-ix86_split_ashl (rtx *operands, rtx scratch, enum machine_mode mode)
+ix86_split_ashl (rtx *operands, rtx scratch, machine_mode mode)
{
rtx (*gen_ashl3)(rtx, rtx, rtx);
rtx (*gen_shld)(rtx, rtx, rtx);
pentium4 a bit; no one else seems to care much either way. */
else
{
- enum machine_mode half_mode;
+ machine_mode half_mode;
rtx (*gen_lshr3)(rtx, rtx, rtx);
rtx (*gen_and3)(rtx, rtx, rtx);
rtx (*gen_xor3)(rtx, rtx, rtx);
}
void
-ix86_split_ashr (rtx *operands, rtx scratch, enum machine_mode mode)
+ix86_split_ashr (rtx *operands, rtx scratch, machine_mode mode)
{
rtx (*gen_ashr3)(rtx, rtx, rtx)
= mode == DImode ? gen_ashrsi3 : gen_ashrdi3;
}
void
-ix86_split_lshr (rtx *operands, rtx scratch, enum machine_mode mode)
+ix86_split_lshr (rtx *operands, rtx scratch, machine_mode mode)
{
rtx (*gen_lshr3)(rtx, rtx, rtx)
= mode == DImode ? gen_lshrsi3 : gen_lshrdi3;
/* Return mode for the memcpy/memset loop counter. Prefer SImode over
DImode for constant loop counts. */
-static enum machine_mode
+static machine_mode
counter_mode (rtx count_exp)
{
if (GET_MODE (count_exp) != VOIDmode)
static void
expand_set_or_movmem_via_loop (rtx destmem, rtx srcmem,
rtx destptr, rtx srcptr, rtx value,
- rtx count, enum machine_mode mode, int unroll,
+ rtx count, machine_mode mode, int unroll,
int expected_size, bool issetmem)
{
rtx_code_label *out_label, *top_label;
rtx iter, tmp;
- enum machine_mode iter_mode = counter_mode (count);
+ machine_mode iter_mode = counter_mode (count);
int piece_size_n = GET_MODE_SIZE (mode) * unroll;
rtx piece_size = GEN_INT (piece_size_n);
rtx piece_size_mask = GEN_INT (~((GET_MODE_SIZE (mode) * unroll) - 1));
expand_set_or_movmem_via_rep (rtx destmem, rtx srcmem,
rtx destptr, rtx srcptr, rtx value, rtx orig_value,
rtx count,
- enum machine_mode mode, bool issetmem)
+ machine_mode mode, bool issetmem)
{
rtx destexp;
rtx srcexp;
{
rtx dst = destmem, src = *srcmem, adjust, tempreg;
enum insn_code code;
- enum machine_mode move_mode;
+ machine_mode move_mode;
int piece_size, i;
/* Find the widest mode in which we could perform moves.
{
rtx dst = destmem, adjust;
enum insn_code code;
- enum machine_mode move_mode;
+ machine_mode move_mode;
int piece_size, i;
/* Find the widest mode in which we could perform moves.
rtx done_label, bool issetmem)
{
rtx_code_label *label = ix86_expand_aligntest (count, size, false);
- enum machine_mode mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 1);
+ machine_mode mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 1);
rtx modesize;
int n;
static void
expand_set_or_movmem_prologue_epilogue_by_misaligned_moves (rtx destmem, rtx srcmem,
rtx *destptr, rtx *srcptr,
- enum machine_mode mode,
+ machine_mode mode,
rtx value, rtx vec_value,
rtx *count,
rtx_code_label **done_label,
decide_alignment (int align,
enum stringop_alg alg,
int expected_size,
- enum machine_mode move_mode)
+ machine_mode move_mode)
{
int desired_align = 0;
synth_mult by unwinding the sequence by hand on CPUs with
slow multiply. */
static rtx
-promote_duplicated_reg (enum machine_mode mode, rtx val)
+promote_duplicated_reg (machine_mode mode, rtx val)
{
- enum machine_mode valmode = GET_MODE (val);
+ machine_mode valmode = GET_MODE (val);
rtx tmp;
int nops = mode == DImode ? 3 : 2;
int dynamic_check;
bool need_zero_guard = false;
bool noalign;
- enum machine_mode move_mode = VOIDmode;
+ machine_mode move_mode = VOIDmode;
int unroll_factor = 1;
/* TODO: Once value ranges are available, fill in proper data. */
unsigned HOST_WIDE_INT min_size = 0;
for (i = 0; i < cregs_size; i++)
{
int regno = x86_64_ms_sysv_extra_clobbered_registers[i];
- enum machine_mode mode = SSE_REGNO_P (regno) ? TImode : DImode;
+ machine_mode mode = SSE_REGNO_P (regno) ? TImode : DImode;
clobber_reg (&use, gen_rtx_REG (mode, regno));
}
which slot to use. */
rtx
-assign_386_stack_local (enum machine_mode mode, enum ix86_stack_slot n)
+assign_386_stack_local (machine_mode mode, enum ix86_stack_slot n)
{
struct stack_local_entry *s;
object. */
unsigned int
-ix86_local_alignment (tree exp, enum machine_mode mode,
+ix86_local_alignment (tree exp, machine_mode mode,
unsigned int align)
{
tree type, decl;
alignment that the object would ordinarily have. */
unsigned int
-ix86_minimum_alignment (tree exp, enum machine_mode mode,
+ix86_minimum_alignment (tree exp, machine_mode mode,
unsigned int align)
{
tree type, decl;
gcc_assert (tcode > IX86_BT_LAST_PRIM);
if (tcode <= IX86_BT_LAST_VECT)
{
- enum machine_mode mode;
+ machine_mode mode;
index = tcode - IX86_BT_LAST_PRIM - 1;
itype = ix86_get_builtin_type (ix86_builtin_type_vect_base[index]);
where we expect a vector. To avoid crashing, use one of the vector
clear instructions. */
static rtx
-safe_vector_operand (rtx x, enum machine_mode mode)
+safe_vector_operand (rtx x, machine_mode mode)
{
if (x == const0_rtx)
x = CONST0_RTX (mode);
tree arg1 = CALL_EXPR_ARG (exp, 1);
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode0 = insn_data[icode].operand[1].mode;
- enum machine_mode mode1 = insn_data[icode].operand[2].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode0 = insn_data[icode].operand[1].mode;
+ machine_mode mode1 = insn_data[icode].operand[2].mode;
if (VECTOR_MODE_P (mode0))
op0 = safe_vector_operand (op0, mode0);
int num_memory = 0;
struct {
rtx op;
- enum machine_mode mode;
+ machine_mode mode;
} args[4];
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
switch (m_type)
{
tree arg = CALL_EXPR_ARG (exp, i);
rtx op = expand_normal (arg);
int adjust = (comparison_p) ? 1 : 0;
- enum machine_mode mode = insn_data[icode].operand[i+adjust+1].mode;
+ machine_mode mode = insn_data[icode].operand[i+adjust+1].mode;
if (last_arg_constant && i == nargs - 1)
{
rtx pat;
tree arg0 = CALL_EXPR_ARG (exp, 0);
rtx op1, op0 = expand_normal (arg0);
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode0 = insn_data[icode].operand[1].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode0 = insn_data[icode].operand[1].mode;
if (optimize || !target
|| GET_MODE (target) != tmode
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
rtx op2;
- enum machine_mode tmode = insn_data[d->icode].operand[0].mode;
- enum machine_mode mode0 = insn_data[d->icode].operand[1].mode;
- enum machine_mode mode1 = insn_data[d->icode].operand[2].mode;
+ machine_mode tmode = insn_data[d->icode].operand[0].mode;
+ machine_mode mode0 = insn_data[d->icode].operand[1].mode;
+ machine_mode mode1 = insn_data[d->icode].operand[2].mode;
enum rtx_code comparison = d->comparison;
if (VECTOR_MODE_P (mode0))
tree arg1 = CALL_EXPR_ARG (exp, 1);
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
- enum machine_mode mode0 = insn_data[d->icode].operand[0].mode;
- enum machine_mode mode1 = insn_data[d->icode].operand[1].mode;
+ machine_mode mode0 = insn_data[d->icode].operand[0].mode;
+ machine_mode mode1 = insn_data[d->icode].operand[1].mode;
enum rtx_code comparison = d->comparison;
if (VECTOR_MODE_P (mode0))
rtx pat;
tree arg0 = CALL_EXPR_ARG (exp, 0);
rtx op1, op0 = expand_normal (arg0);
- enum machine_mode tmode = insn_data[d->icode].operand[0].mode;
- enum machine_mode mode0 = insn_data[d->icode].operand[1].mode;
+ machine_mode tmode = insn_data[d->icode].operand[0].mode;
+ machine_mode mode0 = insn_data[d->icode].operand[1].mode;
if (optimize || target == 0
|| GET_MODE (target) != tmode
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
rtx op2;
- enum machine_mode tmode = insn_data[d->icode].operand[0].mode;
- enum machine_mode mode0 = insn_data[d->icode].operand[1].mode;
- enum machine_mode mode1 = insn_data[d->icode].operand[2].mode;
+ machine_mode tmode = insn_data[d->icode].operand[0].mode;
+ machine_mode mode0 = insn_data[d->icode].operand[1].mode;
+ machine_mode mode1 = insn_data[d->icode].operand[2].mode;
if (optimize || target == 0
|| GET_MODE (target) != tmode
tree arg1 = CALL_EXPR_ARG (exp, 1);
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
- enum machine_mode mode0 = insn_data[d->icode].operand[0].mode;
- enum machine_mode mode1 = insn_data[d->icode].operand[1].mode;
+ machine_mode mode0 = insn_data[d->icode].operand[0].mode;
+ machine_mode mode1 = insn_data[d->icode].operand[1].mode;
enum rtx_code comparison = d->comparison;
if (VECTOR_MODE_P (mode0))
rtx op2 = expand_normal (arg2);
rtx op3 = expand_normal (arg3);
rtx op4 = expand_normal (arg4);
- enum machine_mode tmode0, tmode1, modev2, modei3, modev4, modei5, modeimm;
+ machine_mode tmode0, tmode1, modev2, modei3, modev4, modei5, modeimm;
tmode0 = insn_data[d->icode].operand[0].mode;
tmode1 = insn_data[d->icode].operand[1].mode;
emit_insn
(gen_rtx_SET (VOIDmode, gen_rtx_STRICT_LOW_PART (VOIDmode, target),
gen_rtx_fmt_ee (EQ, QImode,
- gen_rtx_REG ((enum machine_mode) d->flag,
+ gen_rtx_REG ((machine_mode) d->flag,
FLAGS_REG),
const0_rtx)));
return SUBREG_REG (target);
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
rtx op2 = expand_normal (arg2);
- enum machine_mode tmode0, tmode1, modev2, modev3, modeimm;
+ machine_mode tmode0, tmode1, modev2, modev3, modeimm;
tmode0 = insn_data[d->icode].operand[0].mode;
tmode1 = insn_data[d->icode].operand[1].mode;
emit_insn
(gen_rtx_SET (VOIDmode, gen_rtx_STRICT_LOW_PART (VOIDmode, target),
gen_rtx_fmt_ee (EQ, QImode,
- gen_rtx_REG ((enum machine_mode) d->flag,
+ gen_rtx_REG ((machine_mode) d->flag,
FLAGS_REG),
const0_rtx)));
return SUBREG_REG (target);
struct
{
rtx op;
- enum machine_mode mode;
+ machine_mode mode;
} args[6];
bool last_arg_count = false;
enum insn_code icode = d->icode;
const struct insn_data_d *insn_p = &insn_data[icode];
- enum machine_mode tmode = insn_p->operand[0].mode;
- enum machine_mode rmode = VOIDmode;
+ machine_mode tmode = insn_p->operand[0].mode;
+ machine_mode rmode = VOIDmode;
bool swap = false;
enum rtx_code comparison = d->comparison;
{
tree arg = CALL_EXPR_ARG (exp, i);
rtx op = expand_normal (arg);
- enum machine_mode mode = insn_p->operand[i + 1].mode;
+ machine_mode mode = insn_p->operand[i + 1].mode;
bool match = insn_p->operand[i + 1].predicate (op, mode);
if (last_arg_count && (i + 1) == nargs)
rtx op3 = expand_normal (arg3);
enum insn_code icode = d->icode;
const struct insn_data_d *insn_p = &insn_data[icode];
- enum machine_mode mode0 = insn_p->operand[0].mode;
- enum machine_mode mode1 = insn_p->operand[1].mode;
+ machine_mode mode0 = insn_p->operand[0].mode;
+ machine_mode mode1 = insn_p->operand[1].mode;
enum rtx_code comparison = UNEQ;
bool need_ucomi = false;
struct
{
rtx op;
- enum machine_mode mode;
+ machine_mode mode;
} args[6];
enum insn_code icode = d->icode;
const struct insn_data_d *insn_p = &insn_data[icode];
- enum machine_mode tmode = insn_p->operand[0].mode;
+ machine_mode tmode = insn_p->operand[0].mode;
unsigned int nargs_constant = 0;
unsigned int redundant_embed_rnd = 0;
{
tree arg = CALL_EXPR_ARG (exp, i);
rtx op = expand_normal (arg);
- enum machine_mode mode = insn_p->operand[i + 1].mode;
+ machine_mode mode = insn_p->operand[i + 1].mode;
bool match = insn_p->operand[i + 1].predicate (op, mode);
if (i == nargs - nargs_constant)
struct
{
rtx op;
- enum machine_mode mode;
+ machine_mode mode;
} args[3];
enum insn_code icode = d->icode;
bool last_arg_constant = false;
const struct insn_data_d *insn_p = &insn_data[icode];
- enum machine_mode tmode = insn_p->operand[0].mode;
+ machine_mode tmode = insn_p->operand[0].mode;
enum { load, store } klass;
switch ((enum ix86_builtin_func_type) d->flag)
for (i = 0; i < nargs; i++)
{
- enum machine_mode mode = insn_p->operand[i + 1].mode;
+ machine_mode mode = insn_p->operand[i + 1].mode;
bool match;
arg = CALL_EXPR_ARG (exp, i + arg_adjust);
static rtx
ix86_expand_vec_init_builtin (tree type, tree exp, rtx target)
{
- enum machine_mode tmode = TYPE_MODE (type);
- enum machine_mode inner_mode = GET_MODE_INNER (tmode);
+ machine_mode tmode = TYPE_MODE (type);
+ machine_mode inner_mode = GET_MODE_INNER (tmode);
int i, n_elt = GET_MODE_NUNITS (tmode);
rtvec v = rtvec_alloc (n_elt);
static rtx
ix86_expand_vec_ext_builtin (tree exp, rtx target)
{
- enum machine_mode tmode, mode0;
+ machine_mode tmode, mode0;
tree arg0, arg1;
int elt;
rtx op0;
static rtx
ix86_expand_vec_set_builtin (tree exp)
{
- enum machine_mode tmode, mode1;
+ machine_mode tmode, mode1;
tree arg0, arg1, arg2;
int elt;
rtx op0, op1, target;
static rtx
ix86_expand_builtin (tree exp, rtx target, rtx subtarget,
- enum machine_mode mode, int ignore)
+ machine_mode mode, int ignore)
{
const struct builtin_description *d;
size_t i;
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
tree arg0, arg1, arg2, arg3, arg4;
rtx op0, op1, op2, op3, op4, pat, insn;
- enum machine_mode mode0, mode1, mode2, mode3, mode4;
+ machine_mode mode0, mode1, mode2, mode3, mode4;
unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
/* For CPU builtins that can be folded, fold first and expand the fold. */
ix86_builtin_vectorized_function (tree fndecl, tree type_out,
tree type_in)
{
- enum machine_mode in_mode, out_mode;
+ machine_mode in_mode, out_mode;
int in_n, out_n;
enum built_in_function fn = DECL_FUNCTION_CODE (fndecl);
tree fntype, new_fndecl, args;
unsigned arity;
const char *bname;
- enum machine_mode el_mode, in_mode;
+ machine_mode el_mode, in_mode;
int n, in_n;
/* The SVML is suitable for unsafe math only. */
tree fntype, new_fndecl, args;
unsigned arity;
const char *bname;
- enum machine_mode el_mode, in_mode;
+ machine_mode el_mode, in_mode;
int n, in_n;
/* The ACML is 64bits only and suitable for unsafe math only as
The return value is 0 for no match and the imm8+1 for a match. */
int
-avx_vpermilp_parallel (rtx par, enum machine_mode mode)
+avx_vpermilp_parallel (rtx par, machine_mode mode)
{
unsigned i, nelt = GET_MODE_NUNITS (mode);
unsigned mask = 0;
The return value is 0 for no match and the imm8+1 for a match. */
int
-avx_vperm2f128_parallel (rtx par, enum machine_mode mode)
+avx_vperm2f128_parallel (rtx par, machine_mode mode)
{
unsigned i, nelt = GET_MODE_NUNITS (mode), nelt2 = nelt / 2;
unsigned mask = 0;
static reg_class_t
ix86_preferred_reload_class (rtx x, reg_class_t regclass)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
/* We're only allowed to return a subclass of CLASS. Many of the
following checks fail for NO_REGS, so eliminate that early. */
static reg_class_t
ix86_preferred_output_reload_class (rtx x, reg_class_t regclass)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
/* Restrict the output reload class to the register bank that we are doing
math on. If we would like not to return a subset of CLASS, reject this
static reg_class_t
ix86_secondary_reload (bool in_p, rtx x, reg_class_t rclass,
- enum machine_mode mode, secondary_reload_info *sri)
+ machine_mode mode, secondary_reload_info *sri)
{
/* Double-word spills from general registers to non-offsettable memory
references (zero-extended addresses) require special handling. */
static inline bool
inline_secondary_memory_needed (enum reg_class class1, enum reg_class class2,
- enum machine_mode mode, int strict)
+ machine_mode mode, int strict)
{
if (lra_in_progress && (class1 == NO_REGS || class2 == NO_REGS))
return false;
bool
ix86_secondary_memory_needed (enum reg_class class1, enum reg_class class2,
- enum machine_mode mode, int strict)
+ machine_mode mode, int strict)
{
return inline_secondary_memory_needed (class1, class2, mode, strict);
}
except in the FP regs, where a single reg is always enough. */
static unsigned char
-ix86_class_max_nregs (reg_class_t rclass, enum machine_mode mode)
+ix86_class_max_nregs (reg_class_t rclass, machine_mode mode)
{
if (MAYBE_INTEGER_CLASS_P (rclass))
{
modes FROM to TO. */
bool
-ix86_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
+ix86_cannot_change_mode_class (machine_mode from, machine_mode to,
enum reg_class regclass)
{
if (from == to)
Q_REGS classes.
*/
static inline int
-inline_memory_move_cost (enum machine_mode mode, enum reg_class regclass,
+inline_memory_move_cost (machine_mode mode, enum reg_class regclass,
int in)
{
int cost;
}
static int
-ix86_memory_move_cost (enum machine_mode mode, reg_class_t regclass,
+ix86_memory_move_cost (machine_mode mode, reg_class_t regclass,
bool in)
{
return inline_memory_move_cost (mode, (enum reg_class) regclass, in ? 1 : 0);
general registers. */
static int
-ix86_register_move_cost (enum machine_mode mode, reg_class_t class1_i,
+ix86_register_move_cost (machine_mode mode, reg_class_t class1_i,
reg_class_t class2_i)
{
enum reg_class class1 = (enum reg_class) class1_i;
MODE. */
bool
-ix86_hard_regno_mode_ok (int regno, enum machine_mode mode)
+ix86_hard_regno_mode_ok (int regno, machine_mode mode)
{
/* Flags and only flags can only hold CCmode values. */
if (CC_REGNO_P (regno))
tieable integer mode. */
static bool
-ix86_tieable_integer_mode_p (enum machine_mode mode)
+ix86_tieable_integer_mode_p (machine_mode mode)
{
switch (mode)
{
can also hold MODE1. */
bool
-ix86_modes_tieable_p (enum machine_mode mode1, enum machine_mode mode2)
+ix86_modes_tieable_p (machine_mode mode1, machine_mode mode2)
{
if (mode1 == mode2)
return true;
/* Return the cost of moving between two registers of mode MODE. */
static int
-ix86_set_reg_reg_cost (enum machine_mode mode)
+ix86_set_reg_reg_cost (machine_mode mode)
{
unsigned int units = UNITS_PER_WORD;
rtx mask;
enum rtx_code code = (enum rtx_code) code_i;
enum rtx_code outer_code = (enum rtx_code) outer_code_i;
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
const struct processor_costs *cost = speed ? ix86_cost : &ix86_size_cost;
switch (code)
== GET_MODE_SIZE (mode))
{
int is_mulwiden = 0;
- enum machine_mode inner_mode = GET_MODE (op0);
+ machine_mode inner_mode = GET_MODE (op0);
if (GET_CODE (op0) == GET_CODE (op1))
is_mulwiden = 1, op1 = XEXP (op1, 0);
int
x86_field_alignment (tree field, int computed)
{
- enum machine_mode mode;
+ machine_mode mode;
tree type = TREE_TYPE (field);
if (TARGET_64BIT || TARGET_ALIGN_DOUBLE)
/* If profitable, negate (without causing overflow) integer constant
of mode MODE at location LOC. Return true in this case. */
bool
-x86_maybe_negate_const_int (rtx *loc, enum machine_mode mode)
+x86_maybe_negate_const_int (rtx *loc, machine_mode mode)
{
HOST_WIDE_INT val;
{
rtx_code_label *neglab, *donelab;
rtx i0, i1, f0, in, out;
- enum machine_mode mode, inmode;
+ machine_mode mode, inmode;
inmode = GET_MODE (operands[1]);
gcc_assert (inmode == SImode || inmode == DImode);
/* Get a vector mode of the same size as the original but with elements
twice as wide. This is only guaranteed to apply to integral vectors. */
-static inline enum machine_mode
-get_mode_wider_vector (enum machine_mode o)
+static inline machine_mode
+get_mode_wider_vector (machine_mode o)
{
/* ??? Rely on the ordering that genmodes.c gives to vectors. */
- enum machine_mode n = GET_MODE_WIDER_MODE (o);
+ machine_mode n = GET_MODE_WIDER_MODE (o);
gcc_assert (GET_MODE_NUNITS (o) == GET_MODE_NUNITS (n) * 2);
gcc_assert (GET_MODE_SIZE (o) == GET_MODE_SIZE (n));
return n;
fill target with val via vec_duplicate. */
static bool
-ix86_vector_duplicate_value (enum machine_mode mode, rtx target, rtx val)
+ix86_vector_duplicate_value (machine_mode mode, rtx target, rtx val)
{
bool ok;
rtx_insn *insn;
with all elements equal to VAR. Return true if successful. */
static bool
-ix86_expand_vector_init_duplicate (bool mmx_ok, enum machine_mode mode,
+ix86_expand_vector_init_duplicate (bool mmx_ok, machine_mode mode,
rtx target, rtx val)
{
bool ok;
widen:
/* Replicate the value once into the next wider mode and recurse. */
{
- enum machine_mode smode, wsmode, wvmode;
+ machine_mode smode, wsmode, wvmode;
rtx x;
smode = GET_MODE_INNER (mode);
return ix86_vector_duplicate_value (mode, target, val);
else
{
- enum machine_mode hvmode = (mode == V16HImode ? V8HImode : V16QImode);
+ machine_mode hvmode = (mode == V16HImode ? V8HImode : V16QImode);
rtx x = gen_reg_rtx (hvmode);
ok = ix86_expand_vector_init_duplicate (false, hvmode, x, val);
return ix86_vector_duplicate_value (mode, target, val);
else
{
- enum machine_mode hvmode = (mode == V32HImode ? V16HImode : V32QImode);
+ machine_mode hvmode = (mode == V32HImode ? V16HImode : V32QImode);
rtx x = gen_reg_rtx (hvmode);
ok = ix86_expand_vector_init_duplicate (false, hvmode, x, val);
if successful. */
static bool
-ix86_expand_vector_init_one_nonzero (bool mmx_ok, enum machine_mode mode,
+ix86_expand_vector_init_one_nonzero (bool mmx_ok, machine_mode mode,
rtx target, rtx var, int one_var)
{
- enum machine_mode vsimode;
+ machine_mode vsimode;
rtx new_target;
rtx x, tmp;
bool use_vector_set = false;
except ONE_VAR are constants. Return true if successful. */
static bool
-ix86_expand_vector_init_one_var (bool mmx_ok, enum machine_mode mode,
+ix86_expand_vector_init_one_var (bool mmx_ok, machine_mode mode,
rtx target, rtx vals, int one_var)
{
rtx var = XVECEXP (vals, 0, one_var);
- enum machine_mode wmode;
+ machine_mode wmode;
rtx const_vec, x;
const_vec = copy_rtx (vals);
and none identical. */
static void
-ix86_expand_vector_init_concat (enum machine_mode mode,
+ix86_expand_vector_init_concat (machine_mode mode,
rtx target, rtx *ops, int n)
{
- enum machine_mode cmode, hmode = VOIDmode, gmode = VOIDmode;
+ machine_mode cmode, hmode = VOIDmode, gmode = VOIDmode;
rtx first[16], second[8], third[4];
rtvec v;
int i, j;
and none identical. */
static void
-ix86_expand_vector_init_interleave (enum machine_mode mode,
+ix86_expand_vector_init_interleave (machine_mode mode,
rtx target, rtx *ops, int n)
{
- enum machine_mode first_imode, second_imode, third_imode, inner_mode;
+ machine_mode first_imode, second_imode, third_imode, inner_mode;
int i, j;
rtx op0, op1;
rtx (*gen_load_even) (rtx, rtx, rtx);
all values variable, and none identical. */
static void
-ix86_expand_vector_init_general (bool mmx_ok, enum machine_mode mode,
+ix86_expand_vector_init_general (bool mmx_ok, machine_mode mode,
rtx target, rtx vals)
{
rtx ops[64], op0, op1, op2, op3, op4, op5;
- enum machine_mode half_mode = VOIDmode;
- enum machine_mode quarter_mode = VOIDmode;
+ machine_mode half_mode = VOIDmode;
+ machine_mode quarter_mode = VOIDmode;
int n, i;
switch (mode)
{
int i, j, n_elts, n_words, n_elt_per_word;
- enum machine_mode inner_mode;
+ machine_mode inner_mode;
rtx words[4], shift;
inner_mode = GET_MODE_INNER (mode);
void
ix86_expand_vector_init (bool mmx_ok, rtx target, rtx vals)
{
- enum machine_mode mode = GET_MODE (target);
- enum machine_mode inner_mode = GET_MODE_INNER (mode);
+ machine_mode mode = GET_MODE (target);
+ machine_mode inner_mode = GET_MODE_INNER (mode);
int n_elts = GET_MODE_NUNITS (mode);
int n_var = 0, one_var = -1;
bool all_same = true, all_const_zero = true;
void
ix86_expand_vector_set (bool mmx_ok, rtx target, rtx val, int elt)
{
- enum machine_mode mode = GET_MODE (target);
- enum machine_mode inner_mode = GET_MODE_INNER (mode);
- enum machine_mode half_mode;
+ machine_mode mode = GET_MODE (target);
+ machine_mode inner_mode = GET_MODE_INNER (mode);
+ machine_mode half_mode;
bool use_vec_merge = false;
rtx tmp;
static rtx (*gen_extract[6][2]) (rtx, rtx)
void
ix86_expand_vector_extract (bool mmx_ok, rtx target, rtx vec, int elt)
{
- enum machine_mode mode = GET_MODE (vec);
- enum machine_mode inner_mode = GET_MODE_INNER (mode);
+ machine_mode mode = GET_MODE (vec);
+ machine_mode inner_mode = GET_MODE_INNER (mode);
bool use_vec_extr = false;
rtx tmp;
ix86_expand_reduc (rtx (*fn) (rtx, rtx, rtx), rtx dest, rtx in)
{
rtx half, dst, vec = in;
- enum machine_mode mode = GET_MODE (in);
+ machine_mode mode = GET_MODE (in);
int i;
/* SSE4 has a special instruction for V8HImode UMIN reduction. */
\f
/* Target hook for scalar_mode_supported_p. */
static bool
-ix86_scalar_mode_supported_p (enum machine_mode mode)
+ix86_scalar_mode_supported_p (machine_mode mode)
{
if (DECIMAL_FLOAT_MODE_P (mode))
return default_decimal_float_supported_p ();
/* Implements target hook vector_mode_supported_p. */
static bool
-ix86_vector_mode_supported_p (enum machine_mode mode)
+ix86_vector_mode_supported_p (machine_mode mode)
{
if (TARGET_SSE && VALID_SSE_REG_MODE (mode))
return true;
/* Implement target hook libgcc_floating_mode_supported_p. */
static bool
-ix86_libgcc_floating_mode_supported_p (enum machine_mode mode)
+ix86_libgcc_floating_mode_supported_p (machine_mode mode)
{
switch (mode)
{
}
/* Target hook for c_mode_for_suffix. */
-static enum machine_mode
+static machine_mode
ix86_c_mode_for_suffix (char suffix)
{
if (suffix == 'q')
/* Worker function for REVERSE_CONDITION. */
enum rtx_code
-ix86_reverse_condition (enum rtx_code code, enum machine_mode mode)
+ix86_reverse_condition (enum rtx_code code, machine_mode mode)
{
return (mode != CCFPmode && mode != CCFPUmode
? reverse_condition (code)
/* Emit code for round calculation. */
void ix86_emit_i387_round (rtx op0, rtx op1)
{
- enum machine_mode inmode = GET_MODE (op1);
- enum machine_mode outmode = GET_MODE (op0);
+ machine_mode inmode = GET_MODE (op1);
+ machine_mode outmode = GET_MODE (op0);
rtx e1, e2, res, tmp, tmp1, half;
rtx scratch = gen_reg_rtx (HImode);
rtx flags = gen_rtx_REG (CCNOmode, FLAGS_REG);
/* Output code to perform a Newton-Rhapson approximation of a single precision
floating point divide [http://en.wikipedia.org/wiki/N-th_root_algorithm]. */
-void ix86_emit_swdivsf (rtx res, rtx a, rtx b, enum machine_mode mode)
+void ix86_emit_swdivsf (rtx res, rtx a, rtx b, machine_mode mode)
{
rtx x0, x1, e0, e1;
/* Output code to perform a Newton-Rhapson approximation of a
single precision floating point [reciprocal] square root. */
-void ix86_emit_swsqrtsf (rtx res, rtx a, enum machine_mode mode,
+void ix86_emit_swsqrtsf (rtx res, rtx a, machine_mode mode,
bool recip)
{
rtx x0, e0, e1, e2, e3, mthree, mhalf;
static void
ix86_sse_copysign_to_positive (rtx result, rtx abs_value, rtx sign, rtx mask)
{
- enum machine_mode mode = GET_MODE (sign);
+ machine_mode mode = GET_MODE (sign);
rtx sgn = gen_reg_rtx (mode);
if (mask == NULL_RTX)
{
- enum machine_mode vmode;
+ machine_mode vmode;
if (mode == SFmode)
vmode = V4SFmode;
static rtx
ix86_expand_sse_fabs (rtx op0, rtx *smask)
{
- enum machine_mode vmode, mode = GET_MODE (op0);
+ machine_mode vmode, mode = GET_MODE (op0);
rtx xa, mask;
xa = gen_reg_rtx (mode);
ix86_expand_sse_compare_and_jump (enum rtx_code code, rtx op0, rtx op1,
bool swap_operands)
{
- enum machine_mode fpcmp_mode = ix86_fp_compare_mode (code);
+ machine_mode fpcmp_mode = ix86_fp_compare_mode (code);
rtx_code_label *label;
rtx tmp;
bool swap_operands)
{
rtx (*insn)(rtx, rtx, rtx, rtx);
- enum machine_mode mode = GET_MODE (op0);
+ machine_mode mode = GET_MODE (op0);
rtx mask = gen_reg_rtx (mode);
if (swap_operands)
/* Generate and return a rtx of mode MODE for 2**n where n is the number
of bits of the mantissa of MODE, which must be one of DFmode or SFmode. */
static rtx
-ix86_gen_TWO52 (enum machine_mode mode)
+ix86_gen_TWO52 (machine_mode mode)
{
REAL_VALUE_TYPE TWO52r;
rtx TWO52;
tmp = op1 + copysign (nextafter (0.5, 0.0), op1)
return (long)tmp;
*/
- enum machine_mode mode = GET_MODE (op1);
+ machine_mode mode = GET_MODE (op1);
const struct real_format *fmt;
REAL_VALUE_TYPE pred_half, half_minus_pred_half;
rtx adj;
xi -= (double)xi > op1 ? 1 : 0;
return xi;
*/
- enum machine_mode fmode = GET_MODE (op1);
- enum machine_mode imode = GET_MODE (op0);
+ machine_mode fmode = GET_MODE (op1);
+ machine_mode imode = GET_MODE (op0);
rtx ireg, freg, tmp;
rtx_code_label *label;
xa = xa + 2**52 - 2**52;
return copysign (xa, operand1);
*/
- enum machine_mode mode = GET_MODE (operand0);
+ machine_mode mode = GET_MODE (operand0);
rtx res, xa, TWO52, mask;
rtx_code_label *label;
x2 -= -1;
return x2;
*/
- enum machine_mode mode = GET_MODE (operand0);
+ machine_mode mode = GET_MODE (operand0);
rtx xa, TWO52, tmp, one, res, mask;
rtx_code_label *label;
return copysign (x2, x);
return x2;
*/
- enum machine_mode mode = GET_MODE (operand0);
+ machine_mode mode = GET_MODE (operand0);
rtx xa, xi, TWO52, tmp, one, res, mask;
rtx_code_label *label;
x2 = copysign (xa2, x);
return x2;
*/
- enum machine_mode mode = GET_MODE (operand0);
+ machine_mode mode = GET_MODE (operand0);
rtx xa, xa2, dxa, TWO52, tmp, half, mhalf, one, res, mask;
rtx_code_label *label;
return copysign (x2, x);
return x2;
*/
- enum machine_mode mode = GET_MODE (operand0);
+ machine_mode mode = GET_MODE (operand0);
rtx xa, xi, TWO52, res, mask;
rtx_code_label *label;
void
ix86_expand_truncdf_32 (rtx operand0, rtx operand1)
{
- enum machine_mode mode = GET_MODE (operand0);
+ machine_mode mode = GET_MODE (operand0);
rtx xa, mask, TWO52, one, res, smask, tmp;
rtx_code_label *label;
xa = (double)(long)(xa + nextafter (0.5, 0.0));
return copysign (xa, x);
*/
- enum machine_mode mode = GET_MODE (operand0);
+ machine_mode mode = GET_MODE (operand0);
rtx res, TWO52, xa, xi, half, mask;
rtx_code_label *label;
const struct real_format *fmt;
void
ix86_expand_round_sse4 (rtx op0, rtx op1)
{
- enum machine_mode mode = GET_MODE (op0);
+ machine_mode mode = GET_MODE (op0);
rtx e1, e2, res, half;
const struct real_format *fmt;
REAL_VALUE_TYPE pred_half, half_minus_pred_half;
const unsigned char *perm, unsigned nelt,
bool testing_p)
{
- enum machine_mode v2mode;
+ machine_mode v2mode;
rtx x;
bool ok;
static bool
expand_vec_perm_blend (struct expand_vec_perm_d *d)
{
- enum machine_mode vmode = d->vmode;
+ machine_mode vmode = d->vmode;
unsigned i, mask, nelt = d->nelt;
rtx target, op0, op1, x;
rtx rperm[32], vperm;
instead. */
static bool
-valid_perm_using_mode_p (enum machine_mode vmode, struct expand_vec_perm_d *d)
+valid_perm_using_mode_p (machine_mode vmode, struct expand_vec_perm_d *d)
{
unsigned int i, j, chunk;
{
unsigned i, nelt, eltsz, mask;
unsigned char perm[64];
- enum machine_mode vmode = V16QImode;
+ machine_mode vmode = V16QImode;
rtx rperm[64], vperm, target, op0, op1;
nelt = d->nelt;
{
unsigned i, which, nelt = d->nelt;
struct expand_vec_perm_d dcopy, dcopy1;
- enum machine_mode vmode = d->vmode;
+ machine_mode vmode = d->vmode;
bool ok;
/* Use the same checks as in expand_vec_perm_blend. */
expand_vec_perm_broadcast_1 (struct expand_vec_perm_d *d)
{
unsigned elt = d->perm[0], nelt2 = d->nelt / 2;
- enum machine_mode vmode = d->vmode;
+ machine_mode vmode = d->vmode;
unsigned char perm2[4];
rtx op0 = d->op0, dest;
bool ok;
/* Implement targetm.vectorize.vec_perm_const_ok. */
static bool
-ix86_vectorize_vec_perm_const_ok (enum machine_mode vmode,
+ix86_vectorize_vec_perm_const_ok (machine_mode vmode,
const unsigned char *sel)
{
struct expand_vec_perm_d d;
void
ix86_expand_vecop_qihi (enum rtx_code code, rtx dest, rtx op1, rtx op2)
{
- enum machine_mode qimode = GET_MODE (dest);
- enum machine_mode himode;
+ machine_mode qimode = GET_MODE (dest);
+ machine_mode himode;
rtx (*gen_il) (rtx, rtx, rtx);
rtx (*gen_ih) (rtx, rtx, rtx);
rtx op1_l, op1_h, op2_l, op2_h, res_l, res_h;
static bool
const_vector_equal_evenodd_p (rtx op)
{
- enum machine_mode mode = GET_MODE (op);
+ machine_mode mode = GET_MODE (op);
int i, nunits = GET_MODE_NUNITS (mode);
if (GET_CODE (op) != CONST_VECTOR
|| nunits != CONST_VECTOR_NUNITS (op))
ix86_expand_mul_widen_evenodd (rtx dest, rtx op1, rtx op2,
bool uns_p, bool odd_p)
{
- enum machine_mode mode = GET_MODE (op1);
- enum machine_mode wmode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (op1);
+ machine_mode wmode = GET_MODE (dest);
rtx x;
rtx orig_op1 = op1, orig_op2 = op2;
ix86_expand_mul_widen_hilo (rtx dest, rtx op1, rtx op2,
bool uns_p, bool high_p)
{
- enum machine_mode wmode = GET_MODE (dest);
- enum machine_mode mode = GET_MODE (op1);
+ machine_mode wmode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (op1);
rtx t1, t2, t3, t4, mask;
switch (mode)
void
ix86_expand_sse2_mulvxdi3 (rtx op0, rtx op1, rtx op2)
{
- enum machine_mode mode = GET_MODE (op0);
+ machine_mode mode = GET_MODE (op0);
rtx t1, t2, t3, t4, t5, t6;
if (TARGET_AVX512DQ && mode == V8DImode)
}
else
{
- enum machine_mode nmode;
+ machine_mode nmode;
rtx (*umul) (rtx, rtx, rtx);
if (mode == V2DImode)
void
ix86_expand_sse2_abs (rtx target, rtx input)
{
- enum machine_mode mode = GET_MODE (target);
+ machine_mode mode = GET_MODE (target);
rtx tmp0, tmp1, x;
switch (mode)
case V4SImode:
case V2DImode:
{
- enum machine_mode srcmode, dstmode;
+ machine_mode srcmode, dstmode;
rtx (*pinsr)(rtx, rtx, rtx, rtx);
srcmode = mode_for_size (size, MODE_INT, 0);
enabled for other processors. */
static int
-ix86_reassociation_width (unsigned int, enum machine_mode mode)
+ix86_reassociation_width (unsigned int, machine_mode mode)
{
int res = 1;
/* ??? No autovectorization into MMX or 3DNOW until we can reliably
place emms and femms instructions. */
-static enum machine_mode
-ix86_preferred_simd_mode (enum machine_mode mode)
+static machine_mode
+ix86_preferred_simd_mode (machine_mode mode)
{
if (!TARGET_SSE)
return word_mode;
and of class RCLASS for spilling instead of memory. Return NO_REGS
if it is not possible or non-profitable. */
static reg_class_t
-ix86_spill_class (reg_class_t rclass, enum machine_mode mode)
+ix86_spill_class (reg_class_t rclass, machine_mode mode)
{
if (TARGET_SSE && TARGET_GENERAL_REGS_SSE_SPILL && ! TARGET_MMX
&& (mode == SImode || (TARGET_64BIT && mode == DImode))
if (const_rtx x = *iter)
if (MEM_P (x))
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
unsigned int n_words = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
if (n_words > 4)
mem_count += 2;
(clobber (match_scratch:<ssevecmode> 4))])]
"!TARGET_64BIT && TARGET_SSE2 && TARGET_SSE_MATH"
{
- enum machine_mode mode = <MODE>mode;
- enum machine_mode vecmode = <ssevecmode>mode;
+ machine_mode mode = <MODE>mode;
+ machine_mode vecmode = <ssevecmode>mode;
REAL_VALUE_TYPE TWO31r;
rtx two31;
&& reload_completed && SSE_REG_P (operands[0])"
[(const_int 0)]
{
- const enum machine_mode vmode = <MODEF:ssevecmode>mode;
- const enum machine_mode mode = <MODEF:MODE>mode;
+ const machine_mode vmode = <MODEF:ssevecmode>mode;
+ const machine_mode mode = <MODEF:MODE>mode;
rtx t, op0 = simplify_gen_subreg (vmode, operands[0], mode, 0);
emit_move_insn (op0, CONST0_RTX (vmode));
"reload_completed && ix86_avoid_lea_for_addr (insn, operands)"
[(const_int 0)]
{
- enum machine_mode mode = <MODE>mode;
+ machine_mode mode = <MODE>mode;
rtx pat;
/* ix86_avoid_lea_for_addr re-recognizes insn and may
"reload_completed && ix86_lea_for_add_ok (insn, operands)"
[(const_int 0)]
{
- enum machine_mode mode = <MODE>mode;
+ machine_mode mode = <MODE>mode;
rtx pat;
if (<MODE_SIZE> < GET_MODE_SIZE (SImode))
"&& reload_completed"
[(const_int 0)]
{
- enum machine_mode mode = SImode;
+ machine_mode mode = SImode;
rtx pat;
operands[0] = gen_lowpart (mode, operands[0]);
"&& reload_completed"
[(const_int 0)]
{
- enum machine_mode mode = SImode;
+ machine_mode mode = SImode;
rtx pat;
operands[0] = gen_lowpart (mode, operands[0]);
"&& reload_completed"
[(const_int 0)]
{
- enum machine_mode mode = SImode;
+ machine_mode mode = SImode;
rtx pat;
operands[0] = gen_lowpart (mode, operands[0]);
"&& reload_completed"
[(const_int 0)]
{
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
rtx pat;
if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (SImode))
HOST_WIDE_INT len = INTVAL (operands[3]);
HOST_WIDE_INT pos = INTVAL (operands[4]);
HOST_WIDE_INT mask;
- enum machine_mode mode, submode;
+ machine_mode mode, submode;
mode = GET_MODE (val);
if (MEM_P (val))
(match_operand:SWIM 2 "<general_szext_operand>")))]
""
{
- enum machine_mode mode = <MODE>mode;
+ machine_mode mode = <MODE>mode;
rtx (*insn) (rtx, rtx);
if (CONST_INT_P (operands[2]) && REG_P (operands[0]))
[(const_int 0)]
{
HOST_WIDE_INT ival = INTVAL (operands[2]);
- enum machine_mode mode;
+ machine_mode mode;
rtx (*insn) (rtx, rtx);
if (ival == (HOST_WIDE_INT) 0xffffffff)
"reload_completed && SSE_REG_P (operands[0])"
[(set (match_dup 0) (match_dup 3))]
{
- enum machine_mode mode = GET_MODE (operands[0]);
- enum machine_mode vmode = GET_MODE (operands[2]);
+ machine_mode mode = GET_MODE (operands[0]);
+ machine_mode vmode = GET_MODE (operands[2]);
rtx tmp;
operands[0] = simplify_gen_subreg (vmode, operands[0], mode, 0);
&& true_regnum (operands[0]) != true_regnum (operands[1])"
[(const_int 0)]
{
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
rtx pat;
if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (SImode))
(clobber (reg:CC FLAGS_REG))])]
""
{
- enum machine_mode flags_mode;
+ machine_mode flags_mode;
if (<MODE>mode == SImode && !TARGET_CMOVE)
{
(parallel [(set (match_dup 0) (plus:SI (match_dup 0) (const_int 1)))
(clobber (reg:CC FLAGS_REG))])]
{
- enum machine_mode flags_mode
+ machine_mode flags_mode
= (TARGET_BMI && !TARGET_AVOID_FALSE_DEP_FOR_BMI) ? CCCmode : CCZmode;
operands[3] = gen_lowpart (QImode, operands[2]);
[(set (match_dup 5) (match_dup 4))
(set (match_dup 0) (match_dup 1))]
{
- enum machine_mode op1mode = GET_MODE (operands[1]);
- enum machine_mode mode = op1mode == DImode ? DImode : SImode;
+ machine_mode op1mode = GET_MODE (operands[1]);
+ machine_mode mode = op1mode == DImode ? DImode : SImode;
int scale = 1 << INTVAL (operands[2]);
rtx index = gen_lowpart (word_mode, operands[1]);
rtx base = gen_lowpart (word_mode, operands[5]);
(define_predicate "fcmov_comparison_operator"
(match_operand 0 "comparison_operator")
{
- enum machine_mode inmode = GET_MODE (XEXP (op, 0));
+ machine_mode inmode = GET_MODE (XEXP (op, 0));
enum rtx_code code = GET_CODE (op);
if (inmode == CCFPmode || inmode == CCFPUmode)
(define_predicate "ix86_comparison_operator"
(match_operand 0 "comparison_operator")
{
- enum machine_mode inmode = GET_MODE (XEXP (op, 0));
+ machine_mode inmode = GET_MODE (XEXP (op, 0));
enum rtx_code code = GET_CODE (op);
if (inmode == CCFPmode || inmode == CCFPUmode)
(define_predicate "ix86_carry_flag_operator"
(match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
{
- enum machine_mode inmode = GET_MODE (XEXP (op, 0));
+ machine_mode inmode = GET_MODE (XEXP (op, 0));
enum rtx_code code = GET_CODE (op);
if (inmode == CCFPmode || inmode == CCFPUmode)
UNSPEC_ROUND))]
"TARGET_ROUND && !flag_trapping_math"
{
- enum machine_mode scalar_mode;
+ machine_mode scalar_mode;
const struct real_format *fmt;
REAL_VALUE_TYPE pred_half, half_minus_pred_half;
rtx half, vec_half;
[(match_operand 3 "const_int_operand" "n, n")])))]
"TARGET_SSSE3"
{
- enum machine_mode imode = GET_MODE_INNER (GET_MODE (operands[0]));
+ machine_mode imode = GET_MODE_INNER (GET_MODE (operands[0]));
operands[2] = GEN_INT (INTVAL (operands[3]) * GET_MODE_SIZE (imode));
switch (which_alternative)
}
else
{
- enum machine_mode hmode = <CASHMODE>mode;
+ machine_mode hmode = <CASHMODE>mode;
rtx lo_o, lo_e, lo_n, hi_o, hi_e, hi_n;
lo_o = operands[1];
extern void ia64_emit_cond_move (rtx, rtx, rtx);
extern int ia64_depz_field_mask (rtx, rtx);
extern void ia64_split_tmode_move (rtx[]);
-extern bool ia64_expand_movxf_movrf (enum machine_mode, rtx[]);
+extern bool ia64_expand_movxf_movrf (machine_mode, rtx[]);
extern void ia64_expand_compare (rtx *, rtx *, rtx *);
extern void ia64_expand_vecint_cmov (rtx[]);
-extern bool ia64_expand_vecint_minmax (enum rtx_code, enum machine_mode, rtx[]);
+extern bool ia64_expand_vecint_minmax (enum rtx_code, machine_mode, rtx[]);
extern void ia64_unpack_assemble (rtx, rtx, rtx, bool);
extern void ia64_expand_unpack (rtx [], bool, bool);
extern void ia64_expand_widen_sum (rtx[], bool);
extern int ia64_hard_regno_rename_ok (int, int);
extern enum reg_class ia64_secondary_reload_class (enum reg_class,
- enum machine_mode, rtx);
+ machine_mode, rtx);
extern const char *get_bundle_name (int);
extern const char *output_probe_stack_range (rtx, rtx);
#ifdef TREE_CODE
#ifdef RTX_CODE
-extern rtx ia64_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
+extern rtx ia64_expand_builtin (tree, rtx, rtx, machine_mode, int);
extern rtx ia64_va_arg (tree, tree);
#endif /* RTX_CODE */
#ifdef ARGS_SIZE_RTX
/* expr.h defines ARGS_SIZE_RTX and `enum direction'. */
-extern enum direction ia64_hpux_function_arg_padding (enum machine_mode, const_tree);
+extern enum direction ia64_hpux_function_arg_padding (machine_mode, const_tree);
#endif /* ARGS_SIZE_RTX */
extern void ia64_hpux_handle_builtin_pragma (struct cpp_reader *);
static void ia64_set_sched_context (void *);
static void ia64_clear_sched_context (void *);
static void ia64_free_sched_context (void *);
-static int ia64_mode_to_int (enum machine_mode);
+static int ia64_mode_to_int (machine_mode);
static void ia64_set_sched_flags (spec_info_t);
static ds_t ia64_get_insn_spec_ds (rtx_insn *);
static ds_t ia64_get_insn_checked_ds (rtx_insn *);
static void ia64_option_override (void);
static bool ia64_can_eliminate (const int, const int);
-static enum machine_mode hfa_element_mode (const_tree, bool);
-static void ia64_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
+static machine_mode hfa_element_mode (const_tree, bool);
+static void ia64_setup_incoming_varargs (cumulative_args_t, machine_mode,
tree, int *, int);
-static int ia64_arg_partial_bytes (cumulative_args_t, enum machine_mode,
+static int ia64_arg_partial_bytes (cumulative_args_t, machine_mode,
tree, bool);
-static rtx ia64_function_arg_1 (cumulative_args_t, enum machine_mode,
+static rtx ia64_function_arg_1 (cumulative_args_t, machine_mode,
const_tree, bool, bool);
-static rtx ia64_function_arg (cumulative_args_t, enum machine_mode,
+static rtx ia64_function_arg (cumulative_args_t, machine_mode,
const_tree, bool);
static rtx ia64_function_incoming_arg (cumulative_args_t,
- enum machine_mode, const_tree, bool);
-static void ia64_function_arg_advance (cumulative_args_t, enum machine_mode,
+ machine_mode, const_tree, bool);
+static void ia64_function_arg_advance (cumulative_args_t, machine_mode,
const_tree, bool);
-static unsigned int ia64_function_arg_boundary (enum machine_mode,
+static unsigned int ia64_function_arg_boundary (machine_mode,
const_tree);
static bool ia64_function_ok_for_sibcall (tree, tree);
static bool ia64_return_in_memory (const_tree, const_tree);
static rtx ia64_function_value (const_tree, const_tree, bool);
-static rtx ia64_libcall_value (enum machine_mode, const_rtx);
+static rtx ia64_libcall_value (machine_mode, const_rtx);
static bool ia64_function_value_regno_p (const unsigned int);
-static int ia64_register_move_cost (enum machine_mode, reg_class_t,
+static int ia64_register_move_cost (machine_mode, reg_class_t,
reg_class_t);
-static int ia64_memory_move_cost (enum machine_mode mode, reg_class_t,
+static int ia64_memory_move_cost (machine_mode mode, reg_class_t,
bool);
static bool ia64_rtx_costs (rtx, int, int, int, int *, bool);
static int ia64_unspec_may_trap_p (const_rtx, unsigned);
static int ia64_hpux_reloc_rw_mask (void) ATTRIBUTE_UNUSED;
static int ia64_reloc_rw_mask (void) ATTRIBUTE_UNUSED;
-static section *ia64_select_rtx_section (enum machine_mode, rtx,
+static section *ia64_select_rtx_section (machine_mode, rtx,
unsigned HOST_WIDE_INT);
static void ia64_output_dwarf_dtprel (FILE *, int, rtx)
ATTRIBUTE_UNUSED;
ATTRIBUTE_UNUSED;
static void ia64_soft_fp_init_libfuncs (void)
ATTRIBUTE_UNUSED;
-static bool ia64_vms_valid_pointer_mode (enum machine_mode mode)
+static bool ia64_vms_valid_pointer_mode (machine_mode mode)
ATTRIBUTE_UNUSED;
static tree ia64_vms_common_object_attribute (tree *, tree, tree, int, bool *)
ATTRIBUTE_UNUSED;
static void ia64_encode_section_info (tree, rtx, int);
static rtx ia64_struct_value_rtx (tree, int);
static tree ia64_gimplify_va_arg (tree, tree, gimple_seq *, gimple_seq *);
-static bool ia64_scalar_mode_supported_p (enum machine_mode mode);
-static bool ia64_vector_mode_supported_p (enum machine_mode mode);
-static bool ia64_libgcc_floating_mode_supported_p (enum machine_mode mode);
-static bool ia64_legitimate_constant_p (enum machine_mode, rtx);
-static bool ia64_legitimate_address_p (enum machine_mode, rtx, bool);
-static bool ia64_cannot_force_const_mem (enum machine_mode, rtx);
+static bool ia64_scalar_mode_supported_p (machine_mode mode);
+static bool ia64_vector_mode_supported_p (machine_mode mode);
+static bool ia64_libgcc_floating_mode_supported_p (machine_mode mode);
+static bool ia64_legitimate_constant_p (machine_mode, rtx);
+static bool ia64_legitimate_address_p (machine_mode, rtx, bool);
+static bool ia64_cannot_force_const_mem (machine_mode, rtx);
static const char *ia64_mangle_type (const_tree);
static const char *ia64_invalid_conversion (const_tree, const_tree);
static const char *ia64_invalid_unary_op (int, const_tree);
static const char *ia64_invalid_binary_op (int, const_tree, const_tree);
-static enum machine_mode ia64_c_mode_for_suffix (char);
+static machine_mode ia64_c_mode_for_suffix (char);
static void ia64_trampoline_init (rtx, tree, rtx);
static void ia64_override_options_after_change (void);
-static bool ia64_member_type_forces_blk (const_tree, enum machine_mode);
+static bool ia64_member_type_forces_blk (const_tree, machine_mode);
static tree ia64_builtin_decl (unsigned, bool);
static reg_class_t ia64_preferred_reload_class (rtx, reg_class_t);
-static enum machine_mode ia64_get_reg_raw_mode (int regno);
+static machine_mode ia64_get_reg_raw_mode (int regno);
static section * ia64_hpux_function_section (tree, enum node_frequency,
bool, bool);
-static bool ia64_vectorize_vec_perm_const_ok (enum machine_mode vmode,
+static bool ia64_vectorize_vec_perm_const_ok (machine_mode vmode,
const unsigned char *sel);
#define MAX_VECT_LEN 8
{
rtx target, op0, op1;
unsigned char perm[MAX_VECT_LEN];
- enum machine_mode vmode;
+ machine_mode vmode;
unsigned char nelt;
bool one_operand_p;
bool testing_p;
/* Implement TARGET_LEGITIMATE_ADDRESS_P. */
static bool
-ia64_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+ia64_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
rtx x, bool strict)
{
if (ia64_legitimate_address_reg (x, strict))
field in an instruction. */
static bool
-ia64_legitimate_constant_p (enum machine_mode mode, rtx x)
+ia64_legitimate_constant_p (machine_mode mode, rtx x)
{
switch (GET_CODE (x))
{
/* Don't allow TLS addresses to get spilled to memory. */
static bool
-ia64_cannot_force_const_mem (enum machine_mode mode, rtx x)
+ia64_cannot_force_const_mem (machine_mode mode, rtx x)
{
if (mode == RFmode)
return true;
rtx
ia64_expand_move (rtx op0, rtx op1)
{
- enum machine_mode mode = GET_MODE (op0);
+ machine_mode mode = GET_MODE (op0);
if (!reload_in_progress && !reload_completed && !ia64_move_ok (op0, op1))
op1 = force_reg (mode, op1);
we see something like the above, we spill the inner register to memory. */
static rtx
-spill_xfmode_rfmode_operand (rtx in, int force, enum machine_mode mode)
+spill_xfmode_rfmode_operand (rtx in, int force, machine_mode mode)
{
if (GET_CODE (in) == SUBREG
&& GET_MODE (SUBREG_REG (in)) == TImode
DONE. */
bool
-ia64_expand_movxf_movrf (enum machine_mode mode, rtx operands[])
+ia64_expand_movxf_movrf (machine_mode mode, rtx operands[])
{
rtx op0 = operands[0];
been reversed, and so the sense of the comparison should be inverted. */
static bool
-ia64_expand_vecint_compare (enum rtx_code code, enum machine_mode mode,
+ia64_expand_vecint_compare (enum rtx_code code, machine_mode mode,
rtx dest, rtx op0, rtx op1)
{
bool negate = false;
void
ia64_expand_vecint_cmov (rtx operands[])
{
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
enum rtx_code code = GET_CODE (operands[3]);
bool negate;
rtx cmp, x, ot, of;
/* Emit an integral vector min or max operation. Return true if all done. */
bool
-ia64_expand_vecint_minmax (enum rtx_code code, enum machine_mode mode,
+ia64_expand_vecint_minmax (enum rtx_code code, machine_mode mode,
rtx operands[])
{
rtx xops[6];
void
ia64_unpack_assemble (rtx out, rtx lo, rtx hi, bool highp)
{
- enum machine_mode vmode = GET_MODE (lo);
+ machine_mode vmode = GET_MODE (lo);
unsigned int i, high, nelt = GET_MODE_NUNITS (vmode);
struct expand_vec_perm_d d;
bool ok;
static rtx
ia64_unpack_sign (rtx vec, bool unsignedp)
{
- enum machine_mode mode = GET_MODE (vec);
+ machine_mode mode = GET_MODE (vec);
rtx zero = CONST0_RTX (mode);
if (unsignedp)
void
ia64_expand_widen_sum (rtx operands[3], bool unsignedp)
{
- enum machine_mode wmode;
+ machine_mode wmode;
rtx l, h, t, sign;
sign = ia64_unpack_sign (operands[1], unsignedp);
ia64_expand_atomic_op (enum rtx_code code, rtx mem, rtx val,
rtx old_dst, rtx new_dst, enum memmodel model)
{
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
rtx old_reg, new_reg, cmp_reg, ar_ccv, label;
enum insn_code icode;
We generate the actual spill instructions during prologue generation. */
static void
-ia64_setup_incoming_varargs (cumulative_args_t cum, enum machine_mode mode,
+ia64_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode,
tree type, int * pretend_size,
int second_time ATTRIBUTE_UNUSED)
{
have already decided to pass them by reference. Top-level zero-sized
aggregates are excluded because our parallels crash the middle-end. */
-static enum machine_mode
+static machine_mode
hfa_element_mode (const_tree type, bool nested)
{
- enum machine_mode element_mode = VOIDmode;
- enum machine_mode mode;
+ machine_mode element_mode = VOIDmode;
+ machine_mode mode;
enum tree_code code = TREE_CODE (type);
int know_element_mode = 0;
tree t;
/* Return the number of words required to hold a quantity of TYPE and MODE
when passed as an argument. */
static int
-ia64_function_arg_words (const_tree type, enum machine_mode mode)
+ia64_function_arg_words (const_tree type, machine_mode mode)
{
int words;
registers. */
static rtx
-ia64_function_arg_1 (cumulative_args_t cum_v, enum machine_mode mode,
+ia64_function_arg_1 (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named, bool incoming)
{
const CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int basereg = (incoming ? GR_ARG_FIRST : AR_ARG_FIRST);
int words = ia64_function_arg_words (type, mode);
int offset = ia64_function_arg_offset (cum, type, words);
- enum machine_mode hfa_mode = VOIDmode;
+ machine_mode hfa_mode = VOIDmode;
/* For OPEN VMS, emit the instruction setting up the argument register here,
when we know this will be together with the other arguments setup related
for (; offset < byte_size && int_regs < MAX_ARGUMENT_SLOTS; i++)
{
- enum machine_mode gr_mode = DImode;
+ machine_mode gr_mode = DImode;
unsigned int gr_size;
/* If we have an odd 4 byte hunk because we ran out of FR regs,
else
{
/* See comment above. */
- enum machine_mode inner_mode =
+ machine_mode inner_mode =
(BYTES_BIG_ENDIAN && mode == SFmode) ? DImode : mode;
rtx fp_reg = gen_rtx_EXPR_LIST (VOIDmode,
/* Implement TARGET_FUNCION_ARG target hook. */
static rtx
-ia64_function_arg (cumulative_args_t cum, enum machine_mode mode,
+ia64_function_arg (cumulative_args_t cum, machine_mode mode,
const_tree type, bool named)
{
return ia64_function_arg_1 (cum, mode, type, named, false);
static rtx
ia64_function_incoming_arg (cumulative_args_t cum,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type, bool named)
{
return ia64_function_arg_1 (cum, mode, type, named, true);
in memory. */
static int
-ia64_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
+ia64_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
/* Return ivms_arg_type based on machine_mode. */
static enum ivms_arg_type
-ia64_arg_type (enum machine_mode mode)
+ia64_arg_type (machine_mode mode)
{
switch (mode)
{
ia64_function_arg. */
static void
-ia64_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+ia64_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int words = ia64_function_arg_words (type, mode);
int offset = ia64_function_arg_offset (cum, type, words);
- enum machine_mode hfa_mode = VOIDmode;
+ machine_mode hfa_mode = VOIDmode;
/* If all arg slots are already full, then there is nothing to do. */
if (cum->words >= MAX_ARGUMENT_SLOTS)
even though their normal alignment is 8 bytes. See ia64_function_arg. */
static unsigned int
-ia64_function_arg_boundary (enum machine_mode mode, const_tree type)
+ia64_function_arg_boundary (machine_mode mode, const_tree type)
{
if (mode == TFmode && TARGET_HPUX && TARGET_ILP32)
return PARM_BOUNDARY * 2;
static bool
ia64_return_in_memory (const_tree valtype, const_tree fntype ATTRIBUTE_UNUSED)
{
- enum machine_mode mode;
- enum machine_mode hfa_mode;
+ machine_mode mode;
+ machine_mode hfa_mode;
HOST_WIDE_INT byte_size;
mode = TYPE_MODE (valtype);
const_tree fn_decl_or_type,
bool outgoing ATTRIBUTE_UNUSED)
{
- enum machine_mode mode;
- enum machine_mode hfa_mode;
+ machine_mode mode;
+ machine_mode hfa_mode;
int unsignedp;
const_tree func = fn_decl_or_type;
/* Worker function for TARGET_LIBCALL_VALUE. */
static rtx
-ia64_libcall_value (enum machine_mode mode,
+ia64_libcall_value (machine_mode mode,
const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode,
one in class TO, using MODE. */
static int
-ia64_register_move_cost (enum machine_mode mode, reg_class_t from,
+ia64_register_move_cost (machine_mode mode, reg_class_t from,
reg_class_t to)
{
/* ADDL_REGS is the same as GR_REGS for movement purposes. */
memory. */
static int
-ia64_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+ia64_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t rclass,
bool in ATTRIBUTE_UNUSED)
{
enum reg_class
ia64_secondary_reload_class (enum reg_class rclass,
- enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+ machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
int regno = -1;
return gen_ld[mode_no];
}
-/* Constants that help mapping 'enum machine_mode' to int. */
+/* Constants that help mapping 'machine_mode' to int. */
enum SPEC_MODES
{
SPEC_MODE_INVALID = -1,
/* Return index of the MODE. */
static int
-ia64_mode_to_int (enum machine_mode mode)
+ia64_mode_to_int (machine_mode mode)
{
switch (mode)
{
rtx
ia64_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
case IA64_BUILTIN_INFQ:
case IA64_BUILTIN_HUGE_VALQ:
{
- enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
+ machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
REAL_VALUE_TYPE inf;
rtx tmp;
most significant bits of the stack slot. */
enum direction
-ia64_hpux_function_arg_padding (enum machine_mode mode, const_tree type)
+ia64_hpux_function_arg_padding (machine_mode mode, const_tree type)
{
/* Exception to normal case for structures/unions/etc. */
}
static bool
-ia64_vms_valid_pointer_mode (enum machine_mode mode)
+ia64_vms_valid_pointer_mode (machine_mode mode)
{
return (mode == SImode || mode == DImode);
}
is to honor small data. */
static section *
-ia64_select_rtx_section (enum machine_mode mode, rtx x,
+ia64_select_rtx_section (machine_mode mode, rtx x,
unsigned HOST_WIDE_INT align)
{
if (GET_MODE_SIZE (mode) > 0
}
static bool
-ia64_scalar_mode_supported_p (enum machine_mode mode)
+ia64_scalar_mode_supported_p (machine_mode mode)
{
switch (mode)
{
}
static bool
-ia64_vector_mode_supported_p (enum machine_mode mode)
+ia64_vector_mode_supported_p (machine_mode mode)
{
switch (mode)
{
/* Implement TARGET_LIBGCC_FLOATING_MODE_SUPPORTED_P. */
static bool
-ia64_libgcc_floating_mode_supported_p (enum machine_mode mode)
+ia64_libgcc_floating_mode_supported_p (machine_mode mode)
{
switch (mode)
{
/* Target hook for c_mode_for_suffix. */
-static enum machine_mode
+static machine_mode
ia64_c_mode_for_suffix (char suffix)
{
if (suffix == 'q')
return ia64_dconst_0_375_rtx;
}
-static enum machine_mode
+static machine_mode
ia64_get_reg_raw_mode (int regno)
{
if (FR_REGNO_P (regno))
anymore. */
bool
-ia64_member_type_forces_blk (const_tree, enum machine_mode mode)
+ia64_member_type_forces_blk (const_tree, machine_mode mode)
{
return TARGET_HPUX && mode == TFmode;
}
expand_vselect_vconcat (rtx target, rtx op0, rtx op1,
const unsigned char *perm, unsigned nelt)
{
- enum machine_mode v2mode;
+ machine_mode v2mode;
rtx x;
v2mode = GET_MODE_2XWIDER_MODE (GET_MODE (op0));
/* Implement targetm.vectorize.vec_perm_const_ok. */
static bool
-ia64_vectorize_vec_perm_const_ok (enum machine_mode vmode,
+ia64_vectorize_vec_perm_const_ok (machine_mode vmode,
const unsigned char *sel)
{
struct expand_vec_perm_d d;
ia64_expand_vec_perm_even_odd (rtx target, rtx op0, rtx op1, int odd)
{
struct expand_vec_perm_d d;
- enum machine_mode vmode = GET_MODE (target);
+ machine_mode vmode = GET_MODE (target);
unsigned int i, nelt = GET_MODE_NUNITS (vmode);
bool ok;
#ifndef GCC_IQ2000_PROTOS_H
#define GCC_IQ2000_PROTOS_H
-extern int iq2000_check_split (rtx, enum machine_mode);
-extern int iq2000_reg_mode_ok_for_base_p (rtx, enum machine_mode, int);
+extern int iq2000_check_split (rtx, machine_mode);
+extern int iq2000_reg_mode_ok_for_base_p (rtx, machine_mode, int);
extern const char * iq2000_fill_delay_slot (const char *, enum delay_type, rtx *, rtx_insn *);
extern const char * iq2000_move_1word (rtx *, rtx_insn *, int);
extern HOST_WIDE_INT iq2000_debugger_offset (rtx, HOST_WIDE_INT);
#ifdef RTX_CODE
extern rtx gen_int_relational (enum rtx_code, rtx, rtx, rtx, int *);
-extern void gen_conditional_branch (rtx *, enum machine_mode);
+extern void gen_conditional_branch (rtx *, machine_mode);
#endif
#ifdef TREE_CODE
static rtx iq2000_load_reg4;
/* Mode used for saving/restoring general purpose registers. */
-static enum machine_mode gpr_mode;
+static machine_mode gpr_mode;
\f
/* Initialize the GCC target structure. */
static struct machine_function* iq2000_init_machine_status (void);
static void iq2000_option_override (void);
-static section *iq2000_select_rtx_section (enum machine_mode, rtx,
+static section *iq2000_select_rtx_section (machine_mode, rtx,
unsigned HOST_WIDE_INT);
static void iq2000_init_builtins (void);
-static rtx iq2000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
+static rtx iq2000_expand_builtin (tree, rtx, rtx, machine_mode, int);
static bool iq2000_return_in_memory (const_tree, const_tree);
static void iq2000_setup_incoming_varargs (cumulative_args_t,
- enum machine_mode, tree, int *,
+ machine_mode, tree, int *,
int);
static bool iq2000_rtx_costs (rtx, int, int, int, int *, bool);
-static int iq2000_address_cost (rtx, enum machine_mode, addr_space_t,
+static int iq2000_address_cost (rtx, machine_mode, addr_space_t,
bool);
static section *iq2000_select_section (tree, int, unsigned HOST_WIDE_INT);
-static rtx iq2000_legitimize_address (rtx, rtx, enum machine_mode);
-static bool iq2000_pass_by_reference (cumulative_args_t, enum machine_mode,
+static rtx iq2000_legitimize_address (rtx, rtx, machine_mode);
+static bool iq2000_pass_by_reference (cumulative_args_t, machine_mode,
const_tree, bool);
-static int iq2000_arg_partial_bytes (cumulative_args_t, enum machine_mode,
+static int iq2000_arg_partial_bytes (cumulative_args_t, machine_mode,
tree, bool);
static rtx iq2000_function_arg (cumulative_args_t,
- enum machine_mode, const_tree, bool);
+ machine_mode, const_tree, bool);
static void iq2000_function_arg_advance (cumulative_args_t,
- enum machine_mode, const_tree, bool);
-static unsigned int iq2000_function_arg_boundary (enum machine_mode,
+ machine_mode, const_tree, bool);
+static unsigned int iq2000_function_arg_boundary (machine_mode,
const_tree);
static void iq2000_va_start (tree, rtx);
-static bool iq2000_legitimate_address_p (enum machine_mode, rtx, bool);
+static bool iq2000_legitimate_address_p (machine_mode, rtx, bool);
static bool iq2000_can_eliminate (const int, const int);
static void iq2000_asm_trampoline_template (FILE *);
static void iq2000_trampoline_init (rtx, tree, rtx);
static rtx iq2000_function_value (const_tree, const_tree, bool);
-static rtx iq2000_libcall_value (enum machine_mode, const_rtx);
+static rtx iq2000_libcall_value (machine_mode, const_rtx);
static void iq2000_print_operand (FILE *, rtx, int);
static void iq2000_print_operand_address (FILE *, rtx);
static bool iq2000_print_operand_punct_valid_p (unsigned char code);
/* Return nonzero if we split the address into high and low parts. */
int
-iq2000_check_split (rtx address, enum machine_mode mode)
+iq2000_check_split (rtx address, machine_mode mode)
{
/* This is the same check used in simple_memory_operand.
We use it here because LO_SUM is not offsettable. */
int
iq2000_reg_mode_ok_for_base_p (rtx reg,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int strict)
{
return (strict
function is called during reload. */
bool
-iq2000_legitimate_address_p (enum machine_mode mode, rtx xinsn, bool strict)
+iq2000_legitimate_address_p (machine_mode mode, rtx xinsn, bool strict)
{
if (TARGET_DEBUG_A_MODE)
{
}
if (TARGET_DEBUG_A_MODE)
- GO_PRINTF ("Not a enum machine_mode mode, legitimate address\n");
+ GO_PRINTF ("Not a machine_mode mode, legitimate address\n");
/* The address was not legitimate. */
return 0;
rtx_insn *cur_insn)
{
rtx set_reg;
- enum machine_mode mode;
+ machine_mode mode;
rtx_insn *next_insn = cur_insn ? NEXT_INSN (cur_insn) : NULL;
int num_nops;
rtx op1 = operands[1];
enum rtx_code code0 = GET_CODE (op0);
enum rtx_code code1 = GET_CODE (op1);
- enum machine_mode mode = GET_MODE (op0);
+ machine_mode mode = GET_MODE (op0);
int subreg_offset0 = 0;
int subreg_offset1 = 0;
enum delay_type delay = DELAY_NONE;
/* Provide the costs of an addressing mode that contains ADDR. */
static int
-iq2000_address_cost (rtx addr, enum machine_mode mode, addr_space_t as,
+iq2000_address_cost (rtx addr, machine_mode mode, addr_space_t as,
bool speed)
{
switch (GET_CODE (addr))
};
enum internal_test test;
- enum machine_mode mode;
+ machine_mode mode;
struct cmp_info *p_info;
int branch_p;
int eqne_p;
The comparison operands are saved away by cmp{si,di,sf,df}. */
void
-gen_conditional_branch (rtx operands[], enum machine_mode mode)
+gen_conditional_branch (rtx operands[], machine_mode mode)
{
enum rtx_code test_code = GET_CODE (operands[0]);
rtx cmp0 = operands[1];
position in CUM. */
static void
-iq2000_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+iq2000_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
and type TYPE in CUM, or 0 if the argument is to be passed on the stack. */
static rtx
-iq2000_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+iq2000_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
if (mode == VOIDmode)
{
if (cum->num_adjusts > 0)
- ret = gen_rtx_PARALLEL ((enum machine_mode) cum->fp_code,
+ ret = gen_rtx_PARALLEL ((machine_mode) cum->fp_code,
gen_rtvec_v (cum->num_adjusts, cum->adjust));
}
}
static unsigned int
-iq2000_function_arg_boundary (enum machine_mode mode, const_tree type)
+iq2000_function_arg_boundary (machine_mode mode, const_tree type)
{
return (type != NULL_TREE
? (TYPE_ALIGN (type) <= PARM_BOUNDARY
}
static int
-iq2000_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
+iq2000_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
for (cur_arg = fnargs; cur_arg != 0; cur_arg = next_arg)
{
tree passed_type = DECL_ARG_TYPE (cur_arg);
- enum machine_mode passed_mode = TYPE_MODE (passed_type);
+ machine_mode passed_mode = TYPE_MODE (passed_type);
rtx entry_parm;
if (TREE_ADDRESSABLE (passed_type))
mode MODE. */
static section *
-iq2000_select_rtx_section (enum machine_mode mode, rtx x ATTRIBUTE_UNUSED,
+iq2000_select_rtx_section (machine_mode mode, rtx x ATTRIBUTE_UNUSED,
unsigned HOST_WIDE_INT align)
{
/* For embedded applications, always put constants in read-only data,
bool outgoing ATTRIBUTE_UNUSED)
{
int reg = GP_RETURN;
- enum machine_mode mode = TYPE_MODE (valtype);
+ machine_mode mode = TYPE_MODE (valtype);
int unsignedp = TYPE_UNSIGNED (valtype);
const_tree func = fn_decl_or_type;
/* Worker function for TARGET_LIBCALL_VALUE. */
static rtx
-iq2000_libcall_value (enum machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
+iq2000_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (((GET_MODE_CLASS (mode) != MODE_INT
|| GET_MODE_SIZE (mode) >= 4)
/* Return true when an argument must be passed by reference. */
static bool
-iq2000_pass_by_reference (cumulative_args_t cum_v, enum machine_mode mode,
+iq2000_pass_by_reference (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
rtx pat;
tree arg [5];
rtx op [5];
- enum machine_mode mode [5];
+ machine_mode mode [5];
int i;
mode[0] = insn_data[icode].operand[0].mode;
static rtx
iq2000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
static void
iq2000_setup_incoming_varargs (cumulative_args_t cum_v,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED, int * pretend_size,
int no_rtl)
{
rtx
iq2000_legitimize_address (rtx xinsn, rtx old_x ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
if (TARGET_DEBUG_B_MODE)
{
int opno ATTRIBUTE_UNUSED, int * total,
bool speed ATTRIBUTE_UNUSED)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
switch (code)
{
{
if (iq2000_check_split (operands[1], SImode))
{
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
rtx tem = ((reload_in_progress | reload_completed)
? operands[0] : gen_reg_rtx (mode));
extern rtx lm32_return_addr_rtx (int count, rtx frame);
extern int lm32_expand_block_move (rtx *);
extern int nonpic_symbol_mentioned_p (rtx);
-extern rtx lm32_legitimize_pic_address (rtx, enum machine_mode, rtx);
+extern rtx lm32_legitimize_pic_address (rtx, machine_mode, rtx);
extern void lm32_expand_scc (rtx operands[]);
extern void lm32_expand_conditional_branch (rtx operands[]);
-extern bool lm32_move_ok (enum machine_mode, rtx operands[2]);
+extern bool lm32_move_ok (machine_mode, rtx operands[2]);
static void stack_adjust (HOST_WIDE_INT amount);
static bool lm32_in_small_data_p (const_tree);
static void lm32_setup_incoming_varargs (cumulative_args_t cum,
- enum machine_mode mode, tree type,
+ machine_mode mode, tree type,
int *pretend_size, int no_rtl);
static bool lm32_rtx_costs (rtx x, int code, int outer_code, int opno,
int *total, bool speed);
static bool lm32_can_eliminate (const int, const int);
static bool
-lm32_legitimate_address_p (enum machine_mode mode, rtx x, bool strict);
+lm32_legitimate_address_p (machine_mode mode, rtx x, bool strict);
static HOST_WIDE_INT lm32_compute_frame_size (int size);
static void lm32_option_override (void);
static rtx lm32_function_arg (cumulative_args_t cum,
- enum machine_mode mode, const_tree type,
+ machine_mode mode, const_tree type,
bool named);
static void lm32_function_arg_advance (cumulative_args_t cum,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type, bool named);
#undef TARGET_OPTION_OVERRIDE
rtx cmp1,
rtx destination)
{
- enum machine_mode mode;
+ machine_mode mode;
int branch_p;
mode = GET_MODE (cmp0);
(otherwise it is an extra parameter matching an ellipsis). */
static rtx
-lm32_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+lm32_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
}
static void
-lm32_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
+lm32_function_arg_advance (cumulative_args_t cum, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
*get_cumulative_args (cum) += LM32_NUM_REGS2 (mode, type);
}
static void
-lm32_setup_incoming_varargs (cumulative_args_t cum_v, enum machine_mode mode,
+lm32_setup_incoming_varargs (cumulative_args_t cum_v, machine_mode mode,
tree type, int *pretend_size, int no_rtl)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
HOST_WIDE_INT offset, delta;
unsigned HOST_WIDE_INT bits;
int i;
- enum machine_mode mode;
+ machine_mode mode;
rtx *regs;
/* Work out how many bits to move at a time. */
lm32_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
int *total, bool speed)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
bool small_mode;
const int arithmetic_latency = 1;
/* Implement TARGET_LEGITIMATE_ADDRESS_P. */
static bool
-lm32_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
+lm32_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
{
/* (rM) */
if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
/* Check a move is not memory to memory. */
bool
-lm32_move_ok (enum machine_mode mode, rtx operands[2]) {
+lm32_move_ok (machine_mode mode, rtx operands[2]) {
if (memory_operand (operands[0], mode))
return register_or_zero_operand (operands[1], mode);
return true;
#ifdef RTX_CODE
-int m32c_cannot_change_mode_class (enum machine_mode, enum machine_mode, int);
+int m32c_cannot_change_mode_class (machine_mode, machine_mode, int);
rtx m32c_eh_return_stackadj_rtx (void);
void m32c_emit_eh_epilogue (rtx);
int m32c_expand_cmpstr (rtx *);
void m32c_expand_neg_mulpsi3 (rtx *);
int m32c_expand_setmemhi (rtx *);
bool m32c_matches_constraint_p (rtx, int);
-int m32c_hard_regno_nregs (int, enum machine_mode);
-int m32c_hard_regno_ok (int, enum machine_mode);
+int m32c_hard_regno_nregs (int, machine_mode);
+int m32c_hard_regno_ok (int, machine_mode);
bool m32c_illegal_subreg_p (rtx);
-bool m32c_immd_dbl_mov (rtx *, enum machine_mode);
+bool m32c_immd_dbl_mov (rtx *, machine_mode);
rtx m32c_incoming_return_addr_rtx (void);
-int m32c_legitimize_reload_address (rtx *, enum machine_mode, int, int, int);
-int m32c_limit_reload_class (enum machine_mode, int);
-int m32c_modes_tieable_p (enum machine_mode, enum machine_mode);
-bool m32c_mov_ok (rtx *, enum machine_mode);
+int m32c_legitimize_reload_address (rtx *, machine_mode, int, int, int);
+int m32c_limit_reload_class (machine_mode, int);
+int m32c_modes_tieable_p (machine_mode, machine_mode);
+bool m32c_mov_ok (rtx *, machine_mode);
char * m32c_output_compare (rtx_insn *, rtx *);
-int m32c_prepare_move (rtx *, enum machine_mode);
+int m32c_prepare_move (rtx *, machine_mode);
int m32c_prepare_shift (rtx *, int, int);
int m32c_reg_ok_for_base_p (rtx, int);
enum reg_class m32c_regno_reg_class (int);
rtx m32c_return_addr_rtx (int);
const char *m32c_scc_pattern (rtx *, RTX_CODE);
-int m32c_secondary_reload_class (int, enum machine_mode, rtx);
-int m32c_split_move (rtx *, enum machine_mode, int);
+int m32c_secondary_reload_class (int, machine_mode, rtx);
+int m32c_split_move (rtx *, machine_mode, int);
int m32c_split_psi_p (rtx *);
int current_function_special_page_vector (rtx);
static bool m32c_fixed_condition_code_regs (unsigned int *, unsigned int *);
static struct machine_function *m32c_init_machine_status (void);
static void m32c_insert_attributes (tree, tree *);
-static bool m32c_legitimate_address_p (enum machine_mode, rtx, bool);
-static bool m32c_addr_space_legitimate_address_p (enum machine_mode, rtx, bool, addr_space_t);
-static rtx m32c_function_arg (cumulative_args_t, enum machine_mode,
+static bool m32c_legitimate_address_p (machine_mode, rtx, bool);
+static bool m32c_addr_space_legitimate_address_p (machine_mode, rtx, bool, addr_space_t);
+static rtx m32c_function_arg (cumulative_args_t, machine_mode,
const_tree, bool);
-static bool m32c_pass_by_reference (cumulative_args_t, enum machine_mode,
+static bool m32c_pass_by_reference (cumulative_args_t, machine_mode,
const_tree, bool);
-static void m32c_function_arg_advance (cumulative_args_t, enum machine_mode,
+static void m32c_function_arg_advance (cumulative_args_t, machine_mode,
const_tree, bool);
-static unsigned int m32c_function_arg_boundary (enum machine_mode, const_tree);
+static unsigned int m32c_function_arg_boundary (machine_mode, const_tree);
static int m32c_pushm_popm (Push_Pop_Type);
static bool m32c_strict_argument_naming (cumulative_args_t);
static rtx m32c_struct_value_rtx (tree, int);
-static rtx m32c_subreg (enum machine_mode, rtx, enum machine_mode, int);
+static rtx m32c_subreg (machine_mode, rtx, machine_mode, int);
static int need_to_save (int);
static rtx m32c_function_value (const_tree, const_tree, bool);
-static rtx m32c_libcall_value (enum machine_mode, const_rtx);
+static rtx m32c_libcall_value (machine_mode, const_rtx);
/* Returns true if an address is specified, else false. */
static bool m32c_get_pragma_address (const char *varname, unsigned *addr);
by print_operand(). */
static const char *
-reg_name_with_mode (int regno, enum machine_mode mode)
+reg_name_with_mode (int regno, machine_mode mode)
{
int mlen = GET_MODE_SIZE (mode);
if (regno == R0_REGNO && mlen == 1)
/* Used by m32c_register_move_cost to determine if a move is
impossibly expensive. */
static bool
-class_can_hold_mode (reg_class_t rclass, enum machine_mode mode)
+class_can_hold_mode (reg_class_t rclass, machine_mode mode)
{
/* Cache the results: 0=untested 1=no 2=yes */
static char results[LIM_REG_CLASSES][MAX_MACHINE_MODE];
different registers are different sizes from each other, *and* may
be different sizes in different chip families. */
static int
-m32c_hard_regno_nregs_1 (int regno, enum machine_mode mode)
+m32c_hard_regno_nregs_1 (int regno, machine_mode mode)
{
if (regno == FLG_REGNO && mode == CCmode)
return 1;
}
int
-m32c_hard_regno_nregs (int regno, enum machine_mode mode)
+m32c_hard_regno_nregs (int regno, machine_mode mode)
{
int rv = m32c_hard_regno_nregs_1 (regno, mode);
return rv ? rv : 1;
/* Implements HARD_REGNO_MODE_OK. The above function does the work
already; just test its return value. */
int
-m32c_hard_regno_ok (int regno, enum machine_mode mode)
+m32c_hard_regno_ok (int regno, machine_mode mode)
{
return m32c_hard_regno_nregs_1 (regno, mode) != 0;
}
bigger than our registers anyway, it's easier to implement this
function that way, leaving QImode as the only unique case. */
int
-m32c_modes_tieable_p (enum machine_mode m1, enum machine_mode m2)
+m32c_modes_tieable_p (machine_mode m1, machine_mode m2)
{
if (GET_MODE_SIZE (m1) == GET_MODE_SIZE (m2))
return 1;
address registers for reloads since they're needed for address
reloads. */
int
-m32c_limit_reload_class (enum machine_mode mode, int rclass)
+m32c_limit_reload_class (machine_mode mode, int rclass)
{
#if DEBUG_RELOAD
fprintf (stderr, "limit_reload_class for %s: %s ->",
reloaded through appropriately sized general or address
registers. */
int
-m32c_secondary_reload_class (int rclass, enum machine_mode mode, rtx x)
+m32c_secondary_reload_class (int rclass, machine_mode mode, rtx x)
{
int cc = class_contents[rclass][0];
#if DEBUG0
#define TARGET_CLASS_MAX_NREGS m32c_class_max_nregs
static unsigned char
-m32c_class_max_nregs (reg_class_t regclass, enum machine_mode mode)
+m32c_class_max_nregs (reg_class_t regclass, machine_mode mode)
{
int rn;
unsigned char max = 0;
registers (well, it does on a0/a1 but if we let gcc do that, reload
suffers). Otherwise, we allow changes to larger modes. */
int
-m32c_cannot_change_mode_class (enum machine_mode from,
- enum machine_mode to, int rclass)
+m32c_cannot_change_mode_class (machine_mode from,
+ machine_mode to, int rclass)
{
int rn;
#if DEBUG0
rtx
m32c_return_addr_rtx (int count)
{
- enum machine_mode mode;
+ machine_mode mode;
int offset;
rtx ra_mem;
if (ppt == PP_pushm)
{
- enum machine_mode mode = (bytes == 2) ? HImode : SImode;
+ machine_mode mode = (bytes == 2) ? HImode : SImode;
rtx addr;
/* Always use stack_pointer_rtx instead of calling
#define TARGET_FUNCTION_ARG m32c_function_arg
static rtx
m32c_function_arg (cumulative_args_t ca_v,
- enum machine_mode mode, const_tree type, bool named)
+ machine_mode mode, const_tree type, bool named)
{
CUMULATIVE_ARGS *ca = get_cumulative_args (ca_v);
#define TARGET_PASS_BY_REFERENCE m32c_pass_by_reference
static bool
m32c_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
#define TARGET_FUNCTION_ARG_ADVANCE m32c_function_arg_advance
static void
m32c_function_arg_advance (cumulative_args_t ca_v,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
#undef TARGET_FUNCTION_ARG_BOUNDARY
#define TARGET_FUNCTION_ARG_BOUNDARY m32c_function_arg_boundary
static unsigned int
-m32c_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
+m32c_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED)
{
return (TARGET_A16 ? 8 : 16);
#undef TARGET_VALID_POINTER_MODE
#define TARGET_VALID_POINTER_MODE m32c_valid_pointer_mode
static bool
-m32c_valid_pointer_mode (enum machine_mode mode)
+m32c_valid_pointer_mode (machine_mode mode)
{
if (mode == HImode
|| mode == PSImode
#define TARGET_LIBCALL_VALUE m32c_libcall_value
static rtx
-m32c_libcall_value (enum machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
+m32c_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
{
/* return reg or parallel */
#if 0
bool outgoing ATTRIBUTE_UNUSED)
{
/* return reg or parallel */
- const enum machine_mode mode = TYPE_MODE (valtype);
+ const machine_mode mode = TYPE_MODE (valtype);
return m32c_libcall_value (mode, NULL_RTX);
}
#undef TARGET_LEGITIMATE_ADDRESS_P
#define TARGET_LEGITIMATE_ADDRESS_P m32c_legitimate_address_p
bool
-m32c_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
+m32c_legitimate_address_p (machine_mode mode, rtx x, bool strict)
{
int mode_adjust;
if (CONSTANT_P (x))
#define TARGET_LEGITIMIZE_ADDRESS m32c_legitimize_address
static rtx
m32c_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
#if DEBUG0
fprintf (stderr, "m32c_legitimize_address for mode %s\n", mode_name[mode]);
/* Implements LEGITIMIZE_RELOAD_ADDRESS. See comment above. */
int
m32c_legitimize_reload_address (rtx * x,
- enum machine_mode mode,
+ machine_mode mode,
int opnum,
int type, int ind_levels ATTRIBUTE_UNUSED)
{
/* Return the appropriate mode for a named address pointer. */
#undef TARGET_ADDR_SPACE_POINTER_MODE
#define TARGET_ADDR_SPACE_POINTER_MODE m32c_addr_space_pointer_mode
-static enum machine_mode
+static machine_mode
m32c_addr_space_pointer_mode (addr_space_t addrspace)
{
switch (addrspace)
/* Return the appropriate mode for a named address address. */
#undef TARGET_ADDR_SPACE_ADDRESS_MODE
#define TARGET_ADDR_SPACE_ADDRESS_MODE m32c_addr_space_address_mode
-static enum machine_mode
+static machine_mode
m32c_addr_space_address_mode (addr_space_t addrspace)
{
switch (addrspace)
#define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
m32c_addr_space_legitimate_address_p
static bool
-m32c_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
+m32c_addr_space_legitimate_address_p (machine_mode mode, rtx x,
bool strict, addr_space_t as)
{
if (as == ADDR_SPACE_FAR)
#undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
#define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS m32c_addr_space_legitimize_address
static rtx
-m32c_addr_space_legitimize_address (rtx x, rtx oldx, enum machine_mode mode,
+m32c_addr_space_legitimize_address (rtx x, rtx oldx, machine_mode mode,
addr_space_t as)
{
if (as != ADDR_SPACE_GENERIC)
#define TARGET_REGISTER_MOVE_COST m32c_register_move_cost
static int
-m32c_register_move_cost (enum machine_mode mode, reg_class_t from,
+m32c_register_move_cost (machine_mode mode, reg_class_t from,
reg_class_t to)
{
int cost = COSTS_N_INSNS (3);
#define TARGET_MEMORY_MOVE_COST m32c_memory_move_cost
static int
-m32c_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+m32c_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t rclass ATTRIBUTE_UNUSED,
bool in ATTRIBUTE_UNUSED)
{
#undef TARGET_ADDRESS_COST
#define TARGET_ADDRESS_COST m32c_address_cost
static int
-m32c_address_cost (rtx addr, enum machine_mode mode ATTRIBUTE_UNUSED,
+m32c_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as ATTRIBUTE_UNUSED,
bool speed ATTRIBUTE_UNUSED)
{
{
int offset;
unsigned int i;
- enum machine_mode src_mode, dest_mode;
+ machine_mode src_mode, dest_mode;
if (GET_CODE (op) == MEM
&& ! m32c_legitimate_address_p (Pmode, XEXP (op, 0), false))
number of address registers, and we can get into a situation where
we need three of them when we only have two. */
bool
-m32c_mov_ok (rtx * operands, enum machine_mode mode ATTRIBUTE_UNUSED)
+m32c_mov_ok (rtx * operands, machine_mode mode ATTRIBUTE_UNUSED)
{
rtx op0 = operands[0];
rtx op1 = operands[1];
location, can be combined into single SImode mov instruction. */
bool
m32c_immd_dbl_mov (rtx * operands ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
/* ??? This relied on the now-defunct MEM_SCALAR and MEM_IN_STRUCT_P
flags. */
/* Subregs are non-orthogonal for us, because our registers are all
different sizes. */
static rtx
-m32c_subreg (enum machine_mode outer,
- rtx x, enum machine_mode inner, int byte)
+m32c_subreg (machine_mode outer,
+ rtx x, machine_mode inner, int byte)
{
int r, nr = -1;
/* Used to emit move instructions. We split some moves,
and avoid mem-mem moves. */
int
-m32c_prepare_move (rtx * operands, enum machine_mode mode)
+m32c_prepare_move (rtx * operands, machine_mode mode)
{
if (far_addr_space_p (operands[0])
&& CONSTANT_P (operands[1]))
(define_expand), 1 if it is not optional (define_insn_and_split),
and 3 for define_split (alternate api). */
int
-m32c_split_move (rtx * operands, enum machine_mode mode, int split_all)
+m32c_split_move (rtx * operands, machine_mode mode, int split_all)
{
rtx s[4], d[4];
int parts, si, di, rev = 0;
int rv = 0, opi = 2;
- enum machine_mode submode = HImode;
+ machine_mode submode = HImode;
rtx *ops, local_ops[10];
/* define_split modifies the existing operands, but the other two
int
m32c_prepare_shift (rtx * operands, int scale, int shift_code)
{
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
shift_gen_func func = shift_gen_func_for (mode, shift_code);
rtx temp;
if (cfun->machine->is_interrupt)
{
- enum machine_mode spmode = TARGET_A16 ? HImode : PSImode;
+ machine_mode spmode = TARGET_A16 ? HImode : PSImode;
/* REIT clears B flag and restores $fp for us, but we still
have to fix up the stack. USE_RTS just means we didn't
extern rtx m32r_function_symbol (const char *);
#ifdef HAVE_MACHINE_MODES
-extern int call_operand (rtx, enum machine_mode);
-extern int small_data_operand (rtx, enum machine_mode);
-extern int addr24_operand (rtx, enum machine_mode);
-extern int addr32_operand (rtx, enum machine_mode);
-extern int call26_operand (rtx, enum machine_mode);
-extern int memreg_operand (rtx, enum machine_mode);
-extern int small_insn_p (rtx, enum machine_mode);
+extern int call_operand (rtx, machine_mode);
+extern int small_data_operand (rtx, machine_mode);
+extern int addr24_operand (rtx, machine_mode);
+extern int addr32_operand (rtx, machine_mode);
+extern int call26_operand (rtx, machine_mode);
+extern int memreg_operand (rtx, machine_mode);
+extern int small_insn_p (rtx, machine_mode);
#endif /* HAVE_MACHINE_MODES */
static void init_reg_tables (void);
static void block_move_call (rtx, rtx, rtx);
static int m32r_is_insn (rtx);
-static bool m32r_legitimate_address_p (enum machine_mode, rtx, bool);
-static rtx m32r_legitimize_address (rtx, rtx, enum machine_mode);
+static bool m32r_legitimate_address_p (machine_mode, rtx, bool);
+static rtx m32r_legitimize_address (rtx, rtx, machine_mode);
static bool m32r_mode_dependent_address_p (const_rtx, addr_space_t);
static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
static void m32r_print_operand (FILE *, rtx, int);
static bool m32r_in_small_data_p (const_tree);
static bool m32r_return_in_memory (const_tree, const_tree);
static rtx m32r_function_value (const_tree, const_tree, bool);
-static rtx m32r_libcall_value (enum machine_mode, const_rtx);
+static rtx m32r_libcall_value (machine_mode, const_rtx);
static bool m32r_function_value_regno_p (const unsigned int);
-static void m32r_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
+static void m32r_setup_incoming_varargs (cumulative_args_t, machine_mode,
tree, int *, int);
static void init_idents (void);
static bool m32r_rtx_costs (rtx, int, int, int, int *, bool speed);
-static int m32r_memory_move_cost (enum machine_mode, reg_class_t, bool);
-static bool m32r_pass_by_reference (cumulative_args_t, enum machine_mode,
+static int m32r_memory_move_cost (machine_mode, reg_class_t, bool);
+static bool m32r_pass_by_reference (cumulative_args_t, machine_mode,
const_tree, bool);
-static int m32r_arg_partial_bytes (cumulative_args_t, enum machine_mode,
+static int m32r_arg_partial_bytes (cumulative_args_t, machine_mode,
tree, bool);
-static rtx m32r_function_arg (cumulative_args_t, enum machine_mode,
+static rtx m32r_function_arg (cumulative_args_t, machine_mode,
const_tree, bool);
-static void m32r_function_arg_advance (cumulative_args_t, enum machine_mode,
+static void m32r_function_arg_advance (cumulative_args_t, machine_mode,
const_tree, bool);
static bool m32r_can_eliminate (const int, const int);
static void m32r_conditional_register_usage (void);
static void m32r_trampoline_init (rtx, tree, rtx);
-static bool m32r_legitimate_constant_p (enum machine_mode, rtx);
+static bool m32r_legitimate_constant_p (machine_mode, rtx);
\f
/* M32R specific attributes. */
for (i = 0; i < NUM_MACHINE_MODES; i++)
{
- enum machine_mode m = (enum machine_mode) i;
+ machine_mode m = (machine_mode) i;
switch (GET_MODE_CLASS (m))
{
}
\f
int
-call_operand (rtx op, enum machine_mode mode)
+call_operand (rtx op, machine_mode mode)
{
if (!MEM_P (op))
return 0;
/* Return 1 if OP is a reference to an object in .sdata/.sbss. */
int
-small_data_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
+small_data_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
{
if (! TARGET_SDATA_USE)
return 0;
/* Return 1 if OP is a symbol that can use 24-bit addressing. */
int
-addr24_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
+addr24_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
{
rtx sym;
/* Return 1 if OP is a symbol that needs 32-bit addressing. */
int
-addr32_operand (rtx op, enum machine_mode mode)
+addr32_operand (rtx op, machine_mode mode)
{
rtx sym;
/* Return 1 if OP is a function that can be called with the `bl' insn. */
int
-call26_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
+call26_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
{
if (flag_pic)
return 1;
This is used in insn length calcs. */
int
-memreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
+memreg_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
{
return MEM_P (op) && REG_P (XEXP (op, 0));
}
static bool
m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
- enum machine_mode mode, const_tree type,
+ machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
int size;
bool
gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
{
- enum machine_mode mode = GET_MODE (op0);
+ machine_mode mode = GET_MODE (op0);
gcc_assert (mode == SImode);
switch (code)
rtx
gen_split_move_double (rtx operands[])
{
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
rtx dest = operands[0];
rtx src = operands[1];
rtx val;
\f
static int
-m32r_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
+m32r_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
and the rest are pushed. */
static rtx
-m32r_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+m32r_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
(TYPE is null for libcalls where that information may not be available.) */
static void
-m32r_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+m32r_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
/* Worker function for TARGET_LIBCALL_VALUE. */
static rtx
-m32r_libcall_value (enum machine_mode mode,
+m32r_libcall_value (machine_mode mode,
const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, 0);
and mode MODE, and we rely on this fact. */
static void
-m32r_setup_incoming_varargs (cumulative_args_t cum, enum machine_mode mode,
+m32r_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode,
tree type, int *pretend_size, int no_rtl)
{
int first_anon_arg;
??? Is that the right way to look at it? */
static int
-m32r_memory_move_cost (enum machine_mode mode,
+m32r_memory_move_cost (machine_mode mode,
reg_class_t rclass ATTRIBUTE_UNUSED,
bool in ATTRIBUTE_UNUSED)
{
static rtx
m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
if (flag_pic)
return m32r_legitimize_pic_address (x, NULL_RTX);
}
static inline bool
-m32r_legitimate_offset_addres_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+m32r_legitimate_offset_addres_p (machine_mode mode ATTRIBUTE_UNUSED,
const_rtx x, bool strict)
{
if (GET_CODE (x) == PLUS
since more than one instruction will be required. */
static inline bool
-m32r_legitimate_lo_sum_addres_p (enum machine_mode mode, const_rtx x,
+m32r_legitimate_lo_sum_addres_p (machine_mode mode, const_rtx x,
bool strict)
{
if (GET_CODE (x) == LO_SUM
/* Is this a load and increment operation. */
static inline bool
-m32r_load_postinc_p (enum machine_mode mode, const_rtx x, bool strict)
+m32r_load_postinc_p (machine_mode mode, const_rtx x, bool strict)
{
if ((mode == SImode || mode == SFmode)
&& GET_CODE (x) == POST_INC
/* Is this an increment/decrement and store operation. */
static inline bool
-m32r_store_preinc_predec_p (enum machine_mode mode, const_rtx x, bool strict)
+m32r_store_preinc_predec_p (machine_mode mode, const_rtx x, bool strict)
{
if ((mode == SImode || mode == SFmode)
&& (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
/* Implement TARGET_LEGITIMATE_ADDRESS_P. */
static bool
-m32r_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
+m32r_legitimate_address_p (machine_mode mode, rtx x, bool strict)
{
if (m32r_rtx_ok_for_base_p (x, strict)
|| m32r_legitimate_offset_addres_p (mode, x, strict)
constant to memory if they can't handle them. */
static bool
-m32r_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+m32r_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
return !(GET_CODE (x) == CONST
&& GET_CODE (XEXP (x, 0)) == PLUS
extern const char *output_sibcall (rtx);
extern void output_dbcc_and_branch (rtx *);
extern int floating_exact_log2 (rtx);
-extern bool strict_low_part_peephole_ok (enum machine_mode mode,
+extern bool strict_low_part_peephole_ok (machine_mode mode,
rtx_insn *first_insn, rtx target);
/* Functions from m68k.c used in macros. */
extern bool m68k_legitimate_base_reg_p (rtx, bool);
extern bool m68k_legitimate_index_reg_p (rtx, bool);
extern bool m68k_illegitimate_symbolic_constant_p (rtx);
-extern bool m68k_legitimate_constant_p (enum machine_mode, rtx);
+extern bool m68k_legitimate_constant_p (machine_mode, rtx);
extern bool m68k_matches_q_p (rtx);
extern bool m68k_matches_u_p (rtx);
-extern rtx legitimize_pic_address (rtx, enum machine_mode, rtx);
+extern rtx legitimize_pic_address (rtx, machine_mode, rtx);
extern rtx m68k_legitimize_tls_address (rtx);
extern bool m68k_tls_reference_p (rtx, bool);
-extern int valid_dbcc_comparison_p_2 (rtx, enum machine_mode);
-extern rtx m68k_libcall_value (enum machine_mode);
+extern int valid_dbcc_comparison_p_2 (rtx, machine_mode);
+extern rtx m68k_libcall_value (machine_mode);
extern rtx m68k_function_value (const_tree, const_tree);
-extern int emit_move_sequence (rtx *, enum machine_mode, rtx);
+extern int emit_move_sequence (rtx *, machine_mode, rtx);
extern bool m68k_movem_pattern_p (rtx, rtx, HOST_WIDE_INT, bool);
extern const char *m68k_output_movem (rtx *, rtx, HOST_WIDE_INT, bool);
extern void m68k_final_prescan_insn (rtx_insn *, rtx *, int);
#endif /* RTX_CODE */
-extern bool m68k_regno_mode_ok (int, enum machine_mode);
+extern bool m68k_regno_mode_ok (int, machine_mode);
extern enum reg_class m68k_secondary_reload_class (enum reg_class,
- enum machine_mode, rtx);
+ machine_mode, rtx);
extern enum reg_class m68k_preferred_reload_class (rtx, enum reg_class);
extern int flags_in_68881 (void);
extern void m68k_expand_prologue (void);
static bool m68k_can_eliminate (const int, const int);
static void m68k_conditional_register_usage (void);
-static bool m68k_legitimate_address_p (enum machine_mode, rtx, bool);
+static bool m68k_legitimate_address_p (machine_mode, rtx, bool);
static void m68k_option_override (void);
static void m68k_override_options_after_change (void);
static rtx find_addr_reg (rtx);
static bool m68k_save_reg (unsigned int regno, bool interrupt_handler);
static bool m68k_ok_for_sibcall_p (tree, tree);
static bool m68k_tls_symbol_p (rtx);
-static rtx m68k_legitimize_address (rtx, rtx, enum machine_mode);
+static rtx m68k_legitimize_address (rtx, rtx, machine_mode);
static bool m68k_rtx_costs (rtx, int, int, int, int *, bool);
#if M68K_HONOR_TARGET_STRICT_ALIGNMENT
static bool m68k_return_in_memory (const_tree, const_tree);
static void m68k_trampoline_init (rtx, tree, rtx);
static int m68k_return_pops_args (tree, tree, int);
static rtx m68k_delegitimize_address (rtx);
-static void m68k_function_arg_advance (cumulative_args_t, enum machine_mode,
+static void m68k_function_arg_advance (cumulative_args_t, machine_mode,
const_tree, bool);
-static rtx m68k_function_arg (cumulative_args_t, enum machine_mode,
+static rtx m68k_function_arg (cumulative_args_t, machine_mode,
const_tree, bool);
-static bool m68k_cannot_force_const_mem (enum machine_mode mode, rtx x);
+static bool m68k_cannot_force_const_mem (machine_mode mode, rtx x);
static bool m68k_output_addr_const_extra (FILE *, rtx);
static void m68k_init_sync_libfuncs (void) ATTRIBUTE_UNUSED;
\f
{
int i;
rtx body, addr, src, operands[2];
- enum machine_mode mode;
+ machine_mode mode;
body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (adjust_stack_p + count));
mode = reg_raw_mode[regno];
It also rejects some comparisons when CC_NO_OVERFLOW is set. */
int
-valid_dbcc_comparison_p_2 (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
+valid_dbcc_comparison_p_2 (rtx x, machine_mode mode ATTRIBUTE_UNUSED)
{
switch (GET_CODE (x))
{
static rtx
m68k_function_arg (cumulative_args_t cum ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
}
static void
-m68k_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+m68k_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
nothing needs to be done because REG can certainly go in an address reg. */
static rtx
-m68k_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
+m68k_legitimize_address (rtx x, rtx oldx, machine_mode mode)
{
if (m68k_tls_symbol_p (x))
return m68k_legitimize_tls_address (x);
/* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
static bool
-m68k_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+m68k_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
return m68k_illegitimate_symbolic_constant_p (x);
}
is valid, describe its components in *ADDRESS. */
static bool
-m68k_decompose_address (enum machine_mode mode, rtx x,
+m68k_decompose_address (machine_mode mode, rtx x,
bool strict_p, struct m68k_address *address)
{
unsigned int reach;
STRICT_P says whether strict checking is needed. */
bool
-m68k_legitimate_address_p (enum machine_mode mode, rtx x, bool strict_p)
+m68k_legitimate_address_p (machine_mode mode, rtx x, bool strict_p)
{
struct m68k_address address;
/* Implement TARGET_LEGITIMATE_CONSTANT_P. */
bool
-m68k_legitimate_constant_p (enum machine_mode mode, rtx x)
+m68k_legitimate_constant_p (machine_mode mode, rtx x)
{
return mode != XFmode && !m68k_illegitimate_symbolic_constant_p (x);
}
handled. */
rtx
-legitimize_pic_address (rtx orig, enum machine_mode mode ATTRIBUTE_UNUSED,
+legitimize_pic_address (rtx orig, machine_mode mode ATTRIBUTE_UNUSED,
rtx reg)
{
rtx pic_ref = orig;
/* Copy OP and change its mode to MODE. */
static rtx
-copy_operand (rtx op, enum machine_mode mode)
+copy_operand (rtx op, machine_mode mode)
{
/* ??? This looks really ugly. There must be a better way
to change a mode on the operand. */
new rtx with the correct mode. */
static rtx
-force_mode (enum machine_mode mode, rtx orig)
+force_mode (machine_mode mode, rtx orig)
{
if (mode == GET_MODE (orig))
return orig;
}
static int
-fp_reg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
+fp_reg_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
{
return reg_renumber && FP_REG_P (op);
}
of SCRATCH_REG in the proper mode. */
int
-emit_move_sequence (rtx *operands, enum machine_mode mode, rtx scratch_reg)
+emit_move_sequence (rtx *operands, machine_mode mode, rtx scratch_reg)
{
register rtx operand0 = operands[0];
register rtx operand1 = operands[1];
{
int i;
REAL_VALUE_TYPE r;
- enum machine_mode mode;
+ machine_mode mode;
mode = SFmode;
for (i = 0; i < 7; i++)
clear insn. */
bool
-strict_low_part_peephole_ok (enum machine_mode mode, rtx_insn *first_insn,
+strict_low_part_peephole_ok (machine_mode mode, rtx_insn *first_insn,
rtx target)
{
rtx_insn *p = first_insn;
restrict the 68881 registers to floating-point modes. */
bool
-m68k_regno_mode_ok (int regno, enum machine_mode mode)
+m68k_regno_mode_ok (int regno, machine_mode mode)
{
if (DATA_REGNO_P (regno))
{
enum reg_class
m68k_secondary_reload_class (enum reg_class rclass,
- enum machine_mode mode, rtx x)
+ machine_mode mode, rtx x)
{
int regno;
If there is need for a hard-float ABI it is probably worth doing it
properly and also passing function arguments in FP registers. */
rtx
-m68k_libcall_value (enum machine_mode mode)
+m68k_libcall_value (machine_mode mode)
{
switch (mode) {
case SFmode:
rtx
m68k_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
{
- enum machine_mode mode;
+ machine_mode mode;
mode = TYPE_MODE (valtype);
switch (mode) {
static bool
m68k_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
{
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
if (mode == BLKmode)
return true;
/* Return type of memory ADDR_RTX refers to. */
static enum attr_op_type
-sched_address_type (enum machine_mode mode, rtx addr_rtx)
+sched_address_type (machine_mode mode, rtx addr_rtx)
{
struct m68k_address address;
#ifdef TREE_CODE
#ifdef HAVE_MACHINE_MODES
-extern int mcore_num_arg_regs (enum machine_mode, const_tree);
+extern int mcore_num_arg_regs (machine_mode, const_tree);
#endif /* HAVE_MACHINE_MODES */
#ifdef RTX_CODE
extern bool mcore_gen_compare (RTX_CODE, rtx, rtx);
extern int mcore_symbolic_address_p (rtx);
extern bool mcore_r15_operand_p (rtx);
-extern enum reg_class mcore_secondary_reload_class (enum reg_class, enum machine_mode, rtx);
+extern enum reg_class mcore_secondary_reload_class (enum reg_class, machine_mode, rtx);
extern enum reg_class mcore_reload_class (rtx, enum reg_class);
extern int mcore_is_same_reg (rtx, rtx);
extern int mcore_arith_S_operand (rtx);
#ifdef HAVE_MACHINE_MODES
-extern const char * mcore_output_move (rtx, rtx *, enum machine_mode);
-extern const char * mcore_output_movedouble (rtx *, enum machine_mode);
+extern const char * mcore_output_move (rtx, rtx *, machine_mode);
+extern const char * mcore_output_movedouble (rtx *, machine_mode);
extern int const_ok_for_mcore (HOST_WIDE_INT);
#endif /* HAVE_MACHINE_MODES */
#endif /* RTX_CODE */
static void output_stack_adjust (int, int);
static int calc_live_regs (int *);
static int try_constant_tricks (HOST_WIDE_INT, HOST_WIDE_INT *, HOST_WIDE_INT *);
-static const char * output_inline_const (enum machine_mode, rtx *);
+static const char * output_inline_const (machine_mode, rtx *);
static void layout_mcore_frame (struct mcore_frame *);
-static void mcore_setup_incoming_varargs (cumulative_args_t, enum machine_mode, tree, int *, int);
+static void mcore_setup_incoming_varargs (cumulative_args_t, machine_mode, tree, int *, int);
static cond_type is_cond_candidate (rtx);
static rtx_insn *emit_new_cond_insn (rtx, int);
static rtx_insn *conditionalize_block (rtx_insn *);
static void conditionalize_optimization (void);
static void mcore_reorg (void);
-static rtx handle_structs_in_regs (enum machine_mode, const_tree, int);
+static rtx handle_structs_in_regs (machine_mode, const_tree, int);
static void mcore_mark_dllexport (tree);
static void mcore_mark_dllimport (tree);
static int mcore_dllexport_p (tree);
static void mcore_external_libcall (rtx);
static bool mcore_return_in_memory (const_tree, const_tree);
static int mcore_arg_partial_bytes (cumulative_args_t,
- enum machine_mode,
+ machine_mode,
tree, bool);
static rtx mcore_function_arg (cumulative_args_t,
- enum machine_mode,
+ machine_mode,
const_tree, bool);
static void mcore_function_arg_advance (cumulative_args_t,
- enum machine_mode,
+ machine_mode,
const_tree, bool);
-static unsigned int mcore_function_arg_boundary (enum machine_mode,
+static unsigned int mcore_function_arg_boundary (machine_mode,
const_tree);
static void mcore_asm_trampoline_template (FILE *);
static void mcore_trampoline_init (rtx, tree, rtx);
static bool mcore_warn_func_return (tree);
static void mcore_option_override (void);
-static bool mcore_legitimate_constant_p (enum machine_mode, rtx);
+static bool mcore_legitimate_constant_p (machine_mode, rtx);
\f
/* MCore specific attributes. */
/* Output an inline constant. */
static const char *
-output_inline_const (enum machine_mode mode, rtx operands[])
+output_inline_const (machine_mode mode, rtx operands[])
{
HOST_WIDE_INT x = 0, y = 0;
int trick_no;
const char *
mcore_output_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[],
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
rtx dst = operands[0];
rtx src = operands[1];
to take care when we see overlapping source and dest registers. */
const char *
-mcore_output_movedouble (rtx operands[], enum machine_mode mode ATTRIBUTE_UNUSED)
+mcore_output_movedouble (rtx operands[], machine_mode mode ATTRIBUTE_UNUSED)
{
rtx dst = operands[0];
rtx src = operands[1];
known constants. DEST and SRC are registers. OFFSET is the known
starting point for the output pattern. */
-static const enum machine_mode mode_from_align[] =
+static const machine_mode mode_from_align[] =
{
VOIDmode, QImode, HImode, VOIDmode, SImode,
};
block_move_sequence (rtx dst_mem, rtx src_mem, int size, int align)
{
rtx temp[2];
- enum machine_mode mode[2];
+ machine_mode mode[2];
int amount[2];
bool active[2];
int phase = 0;
static void
mcore_setup_incoming_varargs (cumulative_args_t args_so_far_v,
- enum machine_mode mode, tree type,
+ machine_mode mode, tree type,
int * ptr_pretend_size ATTRIBUTE_UNUSED,
int second_time ATTRIBUTE_UNUSED)
{
enum reg_class
mcore_secondary_reload_class (enum reg_class rclass,
- enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+ machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
if (TEST_HARD_REG_BIT (reg_class_contents[rclass], 15)
&& !mcore_r15_operand_p (x))
hold a function argument of mode MODE and type TYPE. */
int
-mcore_num_arg_regs (enum machine_mode mode, const_tree type)
+mcore_num_arg_regs (machine_mode mode, const_tree type)
{
int size;
}
static rtx
-handle_structs_in_regs (enum machine_mode mode, const_tree type, int reg)
+handle_structs_in_regs (machine_mode mode, const_tree type, int reg)
{
int size;
rtx
mcore_function_value (const_tree valtype, const_tree func)
{
- enum machine_mode mode;
+ machine_mode mode;
int unsigned_p;
mode = TYPE_MODE (valtype);
its data type forbids. */
static rtx
-mcore_function_arg (cumulative_args_t cum, enum machine_mode mode,
+mcore_function_arg (cumulative_args_t cum, machine_mode mode,
const_tree type, bool named)
{
int arg_reg;
}
static void
-mcore_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+mcore_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
}
static unsigned int
-mcore_function_arg_boundary (enum machine_mode mode,
+mcore_function_arg_boundary (machine_mode mode,
const_tree type ATTRIBUTE_UNUSED)
{
/* Doubles must be aligned to an 8 byte boundary. */
the function. */
static int
-mcore_arg_partial_bytes (cumulative_args_t cum, enum machine_mode mode,
+mcore_arg_partial_bytes (cumulative_args_t cum, machine_mode mode,
tree type, bool named)
{
int reg = ROUND_REG (*get_cumulative_args (cum), mode);
On the MCore, allow anything but a double. */
static bool
-mcore_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+mcore_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
return GET_CODE (x) != CONST_DOUBLE;
}
"*
{
int ofs;
- enum machine_mode mode;
+ machine_mode mode;
rtx base_reg = XEXP (operands[4], 0);
if ((ofs = mcore_byte_offset (INTVAL (operands[3]))) > -1)
extern bool mep_vliw_mode_match (rtx);
extern bool mep_vliw_jmp_match (rtx);
extern bool mep_multi_slot (rtx_insn *);
-extern bool mep_legitimate_address (enum machine_mode, rtx, int);
-extern int mep_legitimize_address (rtx *, rtx, enum machine_mode);
-extern int mep_legitimize_reload_address (rtx *, enum machine_mode, int, /*enum reload_type*/ int, int);
+extern bool mep_legitimate_address (machine_mode, rtx, int);
+extern int mep_legitimize_address (rtx *, rtx, machine_mode);
+extern int mep_legitimize_reload_address (rtx *, machine_mode, int, /*enum reload_type*/ int, int);
extern int mep_core_address_length (rtx_insn *, int);
extern int mep_cop_address_length (rtx_insn *, int);
-extern bool mep_expand_mov (rtx *, enum machine_mode);
-extern bool mep_mov_ok (rtx *, enum machine_mode);
-extern void mep_split_wide_move (rtx *, enum machine_mode);
+extern bool mep_expand_mov (rtx *, machine_mode);
+extern bool mep_mov_ok (rtx *, machine_mode);
+extern void mep_split_wide_move (rtx *, machine_mode);
#ifdef RTX_CODE
extern bool mep_expand_setcc (rtx *);
extern rtx mep_expand_cbranch (rtx *);
extern const char *mep_emit_cbranch (rtx *, int);
extern void mep_expand_call (rtx *, int);
extern rtx mep_find_base_term (rtx);
-extern enum reg_class mep_secondary_input_reload_class (enum reg_class, enum machine_mode, rtx);
-extern enum reg_class mep_secondary_output_reload_class (enum reg_class, enum machine_mode, rtx);
+extern enum reg_class mep_secondary_input_reload_class (enum reg_class, machine_mode, rtx);
+extern enum reg_class mep_secondary_output_reload_class (enum reg_class, machine_mode, rtx);
extern bool mep_secondary_memory_needed (enum reg_class, enum reg_class,
- enum machine_mode);
-extern void mep_expand_reload (rtx *, enum machine_mode);
+ machine_mode);
+extern void mep_expand_reload (rtx *, machine_mode);
extern enum reg_class mep_preferred_reload_class (rtx, enum reg_class);
-extern int mep_register_move_cost (enum machine_mode, enum reg_class, enum reg_class);
+extern int mep_register_move_cost (machine_mode, enum reg_class, enum reg_class);
extern void mep_init_expanders (void);
extern rtx mep_return_addr_rtx (int);
extern bool mep_epilogue_uses (int);
extern void mep_init_cumulative_args (CUMULATIVE_ARGS *, tree, rtx, tree);
extern bool mep_return_in_memory (const_tree, const_tree);
extern rtx mep_function_value (const_tree, const_tree);
-extern rtx mep_libcall_value (enum machine_mode);
+extern rtx mep_libcall_value (machine_mode);
extern void mep_asm_output_opcode (FILE *, const char *);
extern void mep_note_pragma_disinterrupt (const char *);
extern void mep_note_pragma_call (const char *);
extern bool mep_have_core_copro_moves_p;
extern bool mep_have_copro_copro_moves_p;
-extern bool mep_cannot_change_mode_class (enum machine_mode, enum machine_mode,
+extern bool mep_cannot_change_mode_class (machine_mode, machine_mode,
enum reg_class);
/* These are called from mep-pragmas (front end) and then call into
extern void mep_init_regs (void);
-extern int cgen_h_uint_6a1_immediate (rtx, enum machine_mode);
-extern int cgen_h_uint_7a1_immediate (rtx, enum machine_mode);
-extern int cgen_h_uint_8a1_immediate (rtx, enum machine_mode);
-extern int cgen_h_uint_6a2_immediate (rtx, enum machine_mode);
-extern int cgen_h_uint_22a4_immediate (rtx, enum machine_mode);
-extern int cgen_h_sint_2a1_immediate (rtx, enum machine_mode);
-extern int cgen_h_uint_24a1_immediate (rtx, enum machine_mode);
-extern int cgen_h_sint_6a1_immediate (rtx, enum machine_mode);
-extern int cgen_h_uint_5a4_immediate (rtx, enum machine_mode);
-extern int cgen_h_uint_2a1_immediate (rtx, enum machine_mode);
-extern int cgen_h_uint_16a1_immediate (rtx, enum machine_mode);
-extern int cgen_h_uint_3a1_immediate (rtx, enum machine_mode);
-extern int cgen_h_uint_5a1_immediate (rtx, enum machine_mode);
-extern int cgen_h_sint_16a1_immediate (rtx, enum machine_mode);
-extern int cgen_h_sint_8a1_immediate (rtx, enum machine_mode);
-extern int cgen_h_sint_7a2_immediate (rtx, enum machine_mode);
-extern int cgen_h_sint_6a4_immediate (rtx, enum machine_mode);
-extern int cgen_h_sint_5a8_immediate (rtx, enum machine_mode);
-extern int cgen_h_uint_4a1_immediate (rtx, enum machine_mode);
-extern int cgen_h_sint_10a1_immediate (rtx, enum machine_mode);
-extern int cgen_h_sint_12a1_immediate (rtx, enum machine_mode);
-extern int cgen_h_uint_20a1_immediate (rtx, enum machine_mode);
+extern int cgen_h_uint_6a1_immediate (rtx, machine_mode);
+extern int cgen_h_uint_7a1_immediate (rtx, machine_mode);
+extern int cgen_h_uint_8a1_immediate (rtx, machine_mode);
+extern int cgen_h_uint_6a2_immediate (rtx, machine_mode);
+extern int cgen_h_uint_22a4_immediate (rtx, machine_mode);
+extern int cgen_h_sint_2a1_immediate (rtx, machine_mode);
+extern int cgen_h_uint_24a1_immediate (rtx, machine_mode);
+extern int cgen_h_sint_6a1_immediate (rtx, machine_mode);
+extern int cgen_h_uint_5a4_immediate (rtx, machine_mode);
+extern int cgen_h_uint_2a1_immediate (rtx, machine_mode);
+extern int cgen_h_uint_16a1_immediate (rtx, machine_mode);
+extern int cgen_h_uint_3a1_immediate (rtx, machine_mode);
+extern int cgen_h_uint_5a1_immediate (rtx, machine_mode);
+extern int cgen_h_sint_16a1_immediate (rtx, machine_mode);
+extern int cgen_h_sint_8a1_immediate (rtx, machine_mode);
+extern int cgen_h_sint_7a2_immediate (rtx, machine_mode);
+extern int cgen_h_sint_6a4_immediate (rtx, machine_mode);
+extern int cgen_h_sint_5a8_immediate (rtx, machine_mode);
+extern int cgen_h_uint_4a1_immediate (rtx, machine_mode);
+extern int cgen_h_sint_10a1_immediate (rtx, machine_mode);
+extern int cgen_h_sint_12a1_immediate (rtx, machine_mode);
+extern int cgen_h_uint_20a1_immediate (rtx, machine_mode);
static void mep_rewrite_mulsi3 (rtx_insn *, rtx, rtx, rtx);
static void mep_rewrite_maddsi3 (rtx_insn *, rtx, rtx, rtx, rtx);
static bool mep_reuse_lo_p_1 (rtx, rtx, rtx_insn *, bool);
-static bool move_needs_splitting (rtx, rtx, enum machine_mode);
+static bool move_needs_splitting (rtx, rtx, machine_mode);
static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
static bool mep_nongeneral_reg (rtx);
static bool mep_general_copro_reg (rtx);
static bool mep_function_ok_for_sibcall (tree, tree);
static int unique_bit_in (HOST_WIDE_INT);
static int bit_size_for_clip (HOST_WIDE_INT);
-static int bytesize (const_tree, enum machine_mode);
+static int bytesize (const_tree, machine_mode);
static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
static void mep_intrinsic_unavailable (int);
static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
static bool mep_get_move_insn (int, const struct cgen_insn **);
-static rtx mep_convert_arg (enum machine_mode, rtx);
+static rtx mep_convert_arg (machine_mode, rtx);
static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
-static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
+static rtx mep_expand_builtin (tree, rtx, rtx, machine_mode, int);
static int mep_adjust_cost (rtx_insn *, rtx, rtx_insn *, int);
static int mep_issue_rate (void);
static rtx_insn *mep_find_ready_insn (rtx_insn **, int, enum attr_slot, int);
static rtx_insn *mep_make_bundle (rtx, rtx_insn *);
static void mep_bundle_insns (rtx_insn *);
static bool mep_rtx_cost (rtx, int, int, int, int *, bool);
-static int mep_address_cost (rtx, enum machine_mode, addr_space_t, bool);
-static void mep_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
+static int mep_address_cost (rtx, machine_mode, addr_space_t, bool);
+static void mep_setup_incoming_varargs (cumulative_args_t, machine_mode,
tree, int *, int);
-static bool mep_pass_by_reference (cumulative_args_t cum, enum machine_mode,
+static bool mep_pass_by_reference (cumulative_args_t cum, machine_mode,
const_tree, bool);
-static rtx mep_function_arg (cumulative_args_t, enum machine_mode,
+static rtx mep_function_arg (cumulative_args_t, machine_mode,
const_tree, bool);
-static void mep_function_arg_advance (cumulative_args_t, enum machine_mode,
+static void mep_function_arg_advance (cumulative_args_t, machine_mode,
const_tree, bool);
-static bool mep_vector_mode_supported_p (enum machine_mode);
+static bool mep_vector_mode_supported_p (machine_mode);
static rtx mep_allocate_initial_value (rtx);
static void mep_asm_init_sections (void);
static int mep_comp_type_attributes (const_tree, const_tree);
{
rtx *reg, *mem;
unsigned int reg_bytes, mem_bytes;
- enum machine_mode reg_mode, mem_mode;
+ machine_mode reg_mode, mem_mode;
/* Only simple SETs can be converted. */
if (GET_CODE (set) != SET)
static bool
move_needs_splitting (rtx dest, rtx src,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
int s = mep_section_tag (src);
/* Implement TARGET_LEGITIMATE_CONSTANT_P. */
static bool
-mep_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+mep_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
/* We can't convert symbol values to gp- or tp-rel values after
reload, as reload might have used $gp or $tp for other
strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
bool
-mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
+mep_legitimate_address (machine_mode mode, rtx x, int strict)
{
int the_tag;
}
int
-mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
+mep_legitimize_reload_address (rtx *x, machine_mode mode, int opnum,
int type_i,
int ind_levels ATTRIBUTE_UNUSED)
{
#define DEBUG_EXPAND_MOV 0
bool
-mep_expand_mov (rtx *operands, enum machine_mode mode)
+mep_expand_mov (rtx *operands, machine_mode mode)
{
int i, t;
int tag[2];
/* Cases where the pattern can't be made to use at all. */
bool
-mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
+mep_mov_ok (rtx *operands, machine_mode mode ATTRIBUTE_UNUSED)
{
int i;
#define DEBUG_SPLIT_WIDE_MOVE 0
void
-mep_split_wide_move (rtx *operands, enum machine_mode mode)
+mep_split_wide_move (rtx *operands, machine_mode mode)
{
int i;
modes FROM to TO. */
bool
-mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
+mep_cannot_change_mode_class (machine_mode from, machine_mode to,
enum reg_class regclass)
{
if (from == to)
enum reg_class
mep_secondary_input_reload_class (enum reg_class rclass,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
rtx x)
{
int rv = NO_REGS;
enum reg_class
mep_secondary_output_reload_class (enum reg_class rclass,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
rtx x)
{
int rv = NO_REGS;
bool
mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
if (!mep_have_core_copro_moves_p)
{
}
void
-mep_expand_reload (rtx *operands, enum machine_mode mode)
+mep_expand_reload (rtx *operands, machine_mode mode)
{
/* There are three cases for each direction:
register, farsym
that requires a temporary register or temporary stack slot. */
int
-mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
+mep_register_move_cost (machine_mode mode, enum reg_class from, enum reg_class to)
{
if (mep_have_copro_copro_moves_p
&& reg_class_subset_p (from, CR_REGS)
{
rtx mem;
bool maybe_dead_p;
- enum machine_mode rmode;
+ machine_mode rmode;
rss = cfun->machine->reg_save_slot[i];
for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
if (mep_call_saves_register (i))
{
- enum machine_mode rmode;
+ machine_mode rmode;
int rss = cfun->machine->reg_save_slot[i];
if (mep_reg_size (i) == 8)
static void
mep_setup_incoming_varargs (cumulative_args_t cum,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED, int *pretend_size,
int second_time ATTRIBUTE_UNUSED)
{
}
static int
-bytesize (const_tree type, enum machine_mode mode)
+bytesize (const_tree type, machine_mode mode)
{
if (mode == BLKmode)
return int_size_in_bytes (type);
first arg. For varargs, we copy $1..$4 to the stack. */
static rtx
-mep_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+mep_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
static bool
mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type,
bool named ATTRIBUTE_UNUSED)
{
static void
mep_function_arg_advance (cumulative_args_t pcum,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
/* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
rtx
-mep_libcall_value (enum machine_mode mode)
+mep_libcall_value (machine_mode mode)
{
return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
}
to MODE using a subreg. Otherwise return ARG as-is. */
static rtx
-mep_convert_arg (enum machine_mode mode, rtx arg)
+mep_convert_arg (machine_mode mode, rtx arg)
{
if (GET_MODE (arg) != mode
&& register_operand (arg, VOIDmode)
static rtx
mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
rtx pat, op[10], arg[10];
if (cgen_insn->regnums[a].reference_p)
{
tree pointed_to = TREE_TYPE (TREE_TYPE (value));
- enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
+ machine_mode pointed_mode = TYPE_MODE (pointed_to);
arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
}
}
static bool
-mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
+mep_vector_mode_supported_p (machine_mode mode ATTRIBUTE_UNUSED)
{
return false;
}
static int
mep_address_cost (rtx addr ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as ATTRIBUTE_UNUSED,
bool ATTRIBUTE_UNUSED speed_p)
{
extern void microblaze_expand_epilogue (void);
extern void override_options (void);
extern int microblaze_expand_shift (rtx *);
-extern bool microblaze_expand_move (enum machine_mode, rtx *);
+extern bool microblaze_expand_move (machine_mode, rtx *);
extern bool microblaze_expand_block_move (rtx, rtx, rtx, rtx);
extern void microblaze_expand_divide (rtx *);
-extern void microblaze_expand_conditional_branch (enum machine_mode, rtx *);
+extern void microblaze_expand_conditional_branch (machine_mode, rtx *);
extern void microblaze_expand_conditional_branch_sf (rtx *);
extern int microblaze_can_use_return_insn (void);
extern void print_operand (FILE *, rtx, int);
extern void print_operand_address (FILE *, rtx);
extern void init_cumulative_args (CUMULATIVE_ARGS *,tree, rtx);
-extern bool microblaze_legitimate_address_p (enum machine_mode, rtx, bool);
+extern bool microblaze_legitimate_address_p (machine_mode, rtx, bool);
extern int microblaze_is_interrupt_variant (void);
extern int microblaze_is_break_handler (void);
extern int microblaze_break_function_p (tree func);
extern rtx microblaze_return_addr (int, rtx);
-extern int simple_memory_operand (rtx, enum machine_mode);
-extern int double_memory_operand (rtx, enum machine_mode);
+extern int simple_memory_operand (rtx, machine_mode);
+extern int double_memory_operand (rtx, machine_mode);
extern void microblaze_order_regs_for_local_alloc (void);
extern int microblaze_regno_ok_for_base_p (int, int);
extern HOST_WIDE_INT microblaze_initial_elimination_offset (int, int);
extern bool microblaze_tls_referenced_p (rtx);
extern int symbol_mentioned_p (rtx);
extern int label_mentioned_p (rtx);
-extern bool microblaze_cannot_force_const_mem (enum machine_mode, rtx);
+extern bool microblaze_cannot_force_const_mem (machine_mode, rtx);
#endif /* RTX_CODE */
/* Declare functions in microblaze-c.c. */
/* Return truth value if a CONST_DOUBLE is ok to be a legitimate constant. */
static bool
-microblaze_const_double_ok (rtx op, enum machine_mode mode)
+microblaze_const_double_ok (rtx op, machine_mode mode)
{
REAL_VALUE_TYPE d;
(ie, register + small offset) or (register + register). */
int
-simple_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
+simple_memory_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
{
rtx addr, plus0, plus1;
a doubleword. */
int
-double_memory_operand (rtx op, enum machine_mode mode)
+double_memory_operand (rtx op, machine_mode mode)
{
rtx addr;
static bool
microblaze_valid_base_register_p (rtx x,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int strict)
{
if (!strict && GET_CODE (x) == SUBREG)
}
bool
-microblaze_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+microblaze_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
return microblaze_tls_referenced_p(x);
}
static bool
microblaze_valid_index_register_p (rtx x,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int strict)
{
if (!strict && GET_CODE (x) == SUBREG)
static bool
microblaze_classify_address (struct microblaze_address_info *info, rtx x,
- enum machine_mode mode, int strict)
+ machine_mode mode, int strict)
{
rtx xplus0;
rtx xplus1;
is called during reload. */
bool
-microblaze_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
+microblaze_legitimate_address_p (machine_mode mode, rtx x, bool strict)
{
struct microblaze_address_info addr;
static rtx
microblaze_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
register rtx xinsn = x, result;
HOST_WIDE_INT offset, delta;
unsigned HOST_WIDE_INT bits;
int i;
- enum machine_mode mode;
+ machine_mode mode;
rtx *regs;
bits = BITS_PER_WORD;
int opno ATTRIBUTE_UNUSED, int *total,
bool speed ATTRIBUTE_UNUSED)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
switch (code)
{
of mode MODE at X. Return 0 if X isn't valid for MODE. */
static int
-microblaze_address_insns (rtx x, enum machine_mode mode)
+microblaze_address_insns (rtx x, machine_mode mode)
{
struct microblaze_address_info addr;
/* Provide the costs of an addressing mode that contains ADDR.
If ADDR is not a valid address, its cost is irrelevant. */
static int
-microblaze_address_cost (rtx addr, enum machine_mode mode ATTRIBUTE_UNUSED,
+microblaze_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as ATTRIBUTE_UNUSED,
bool speed ATTRIBUTE_UNUSED)
{
static void
microblaze_function_arg_advance (cumulative_args_t cum_v,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
or 0 if the argument is to be passed on the stack. */
static rtx
-microblaze_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+microblaze_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
if (mode == VOIDmode)
{
if (cum->num_adjusts > 0)
- ret = gen_rtx_PARALLEL ((enum machine_mode) cum->fp_code,
+ ret = gen_rtx_PARALLEL ((machine_mode) cum->fp_code,
gen_rtvec_v (cum->num_adjusts, cum->adjust));
}
/* Return number of bytes of argument to put in registers. */
static int
-function_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
+function_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
{
register int i, start;
register int regno;
- register enum machine_mode mode;
+ register machine_mode mode;
int ver;
microblaze_section_threshold = (global_options_set.x_g_switch_value
/* Set up array giving whether a given register can hold a given mode. */
for (mode = VOIDmode;
- mode != MAX_MACHINE_MODE; mode = (enum machine_mode) ((int) mode + 1))
+ mode != MAX_MACHINE_MODE; mode = (machine_mode) ((int) mode + 1))
{
register int size = GET_MODE_SIZE (mode);
for (cur_arg = fnargs; cur_arg != 0; cur_arg = next_arg)
{
tree passed_type = DECL_ARG_TYPE (cur_arg);
- enum machine_mode passed_mode = TYPE_MODE (passed_type);
+ machine_mode passed_mode = TYPE_MODE (passed_type);
rtx entry_parm;
if (TREE_ADDRESSABLE (passed_type))
static reg_class_t
microblaze_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
- reg_class_t rclass, enum machine_mode mode ATTRIBUTE_UNUSED,
+ reg_class_t rclass, machine_mode mode ATTRIBUTE_UNUSED,
secondary_reload_info *sri ATTRIBUTE_UNUSED)
{
if (rclass == ST_REGS)
}
static rtx
-expand_pic_symbol_ref (enum machine_mode mode ATTRIBUTE_UNUSED, rtx op)
+expand_pic_symbol_ref (machine_mode mode ATTRIBUTE_UNUSED, rtx op)
{
rtx result;
result = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op), UNSPEC_GOTOFF);
}
bool
-microblaze_expand_move (enum machine_mode mode, rtx operands[])
+microblaze_expand_move (machine_mode mode, rtx operands[])
{
rtx op0, op1;
second, generate correct branch instruction. */
void
-microblaze_expand_conditional_branch (enum machine_mode mode, rtx operands[])
+microblaze_expand_conditional_branch (machine_mode mode, rtx operands[])
{
enum rtx_code code = GET_CODE (operands[0]);
rtx cmp_op0 = operands[1];
At present, GAS doesn't understand li.[sd], so don't allow it
to be generated at present. */
static bool
-microblaze_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+microblaze_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
if (microblaze_cannot_force_const_mem(mode, x))
extern bool mips_symbolic_constant_p (rtx, enum mips_symbol_context,
enum mips_symbol_type *);
-extern int mips_regno_mode_ok_for_base_p (int, enum machine_mode, bool);
-extern bool mips_stack_address_p (rtx, enum machine_mode);
-extern int mips_address_insns (rtx, enum machine_mode, bool);
+extern int mips_regno_mode_ok_for_base_p (int, machine_mode, bool);
+extern bool mips_stack_address_p (rtx, machine_mode);
+extern int mips_address_insns (rtx, machine_mode, bool);
extern int mips_const_insns (rtx);
extern int mips_split_const_insns (rtx);
extern int mips_load_store_insns (rtx, rtx_insn *);
#endif
extern rtx mips_pic_base_register (rtx);
extern rtx mips_got_load (rtx, rtx, enum mips_symbol_type);
-extern bool mips_split_symbol (rtx, rtx, enum machine_mode, rtx *);
+extern bool mips_split_symbol (rtx, rtx, machine_mode, rtx *);
extern rtx mips_unspec_address (rtx, enum mips_symbol_type);
extern rtx mips_strip_unspec_address (rtx);
extern void mips_move_integer (rtx, rtx, unsigned HOST_WIDE_INT);
-extern bool mips_legitimize_move (enum machine_mode, rtx, rtx);
+extern bool mips_legitimize_move (machine_mode, rtx, rtx);
extern rtx mips_subword (rtx, bool);
extern bool mips_split_move_p (rtx, rtx, enum mips_split_type);
extern void mips_expand_synci_loop (rtx, rtx);
extern void mips_init_cumulative_args (CUMULATIVE_ARGS *, tree);
-extern bool mips_pad_arg_upward (enum machine_mode, const_tree);
-extern bool mips_pad_reg_upward (enum machine_mode, tree);
+extern bool mips_pad_arg_upward (machine_mode, const_tree);
+extern bool mips_pad_reg_upward (machine_mode, tree);
extern bool mips_expand_ext_as_unaligned_load (rtx, rtx, HOST_WIDE_INT,
HOST_WIDE_INT, bool);
extern bool mips_expand_ins_as_unaligned_store (rtx, rtx, HOST_WIDE_INT,
HOST_WIDE_INT);
-extern bool mips_mem_fits_mode_p (enum machine_mode mode, rtx x);
+extern bool mips_mem_fits_mode_p (machine_mode mode, rtx x);
extern HOST_WIDE_INT mips_debugger_offset (rtx, HOST_WIDE_INT);
extern void mips_push_asm_switch (struct mips_asm_switch *);
extern void mips_expand_epilogue (bool);
extern bool mips_can_use_return_insn (void);
-extern bool mips_cannot_change_mode_class (enum machine_mode,
- enum machine_mode, enum reg_class);
+extern bool mips_cannot_change_mode_class (machine_mode,
+ machine_mode, enum reg_class);
extern bool mips_dangerous_for_la25_p (rtx);
-extern bool mips_modes_tieable_p (enum machine_mode, enum machine_mode);
+extern bool mips_modes_tieable_p (machine_mode, machine_mode);
extern enum reg_class mips_secondary_reload_class (enum reg_class,
- enum machine_mode,
+ machine_mode,
rtx, bool);
-extern int mips_class_max_nregs (enum reg_class, enum machine_mode);
+extern int mips_class_max_nregs (enum reg_class, machine_mode);
extern int mips_adjust_insn_length (rtx_insn *, int);
extern void mips_output_load_label (rtx);
extern unsigned int mips_sync_loop_insns (rtx_insn *, rtx *);
extern const char *mips_output_division (const char *, rtx *);
extern const char *mips_output_probe_stack_range (rtx, rtx);
-extern unsigned int mips_hard_regno_nregs (int, enum machine_mode);
+extern unsigned int mips_hard_regno_nregs (int, machine_mode);
extern bool mips_linked_madd_p (rtx_insn *, rtx_insn *);
extern bool mips_store_data_bypass_p (rtx_insn *, rtx_insn *);
extern int mips_dspalu_bypass_p (rtx, rtx);
extern bool mips16e_save_restore_pattern_p (rtx, HOST_WIDE_INT,
struct mips16e_save_restore_info *);
-extern bool mask_low_and_shift_p (enum machine_mode, rtx, rtx, int);
-extern int mask_low_and_shift_len (enum machine_mode, rtx, rtx);
-extern bool and_operands_ok (enum machine_mode, rtx, rtx);
+extern bool mask_low_and_shift_p (machine_mode, rtx, rtx, int);
+extern int mask_low_and_shift_len (machine_mode, rtx, rtx);
+extern bool and_operands_ok (machine_mode, rtx, rtx);
extern bool mips_fmadd_bypass (rtx_insn *, rtx_insn *);
union mips_gen_fn_ptrs
extern bool umips_load_store_pair_p (bool, rtx *);
extern void umips_output_load_store_pair (bool, rtx *);
extern bool umips_movep_target_p (rtx, rtx);
-extern bool umips_12bit_offset_address_p (rtx, enum machine_mode);
-extern bool lwsp_swsp_address_p (rtx, enum machine_mode);
-extern bool m16_based_address_p (rtx, enum machine_mode,
+extern bool umips_12bit_offset_address_p (rtx, machine_mode);
+extern bool lwsp_swsp_address_p (rtx, machine_mode);
+extern bool m16_based_address_p (rtx, machine_mode,
int (*)(rtx_def*, machine_mode));
extern rtx mips_expand_thread_pointer (rtx);
extern void mips16_expand_get_fcsr (rtx);
};
\f
static rtx mips_find_pic_call_symbol (rtx_insn *, rtx, bool);
-static int mips_register_move_cost (enum machine_mode, reg_class_t,
+static int mips_register_move_cost (machine_mode, reg_class_t,
reg_class_t);
-static unsigned int mips_function_arg_boundary (enum machine_mode, const_tree);
+static unsigned int mips_function_arg_boundary (machine_mode, const_tree);
\f
struct mips16_flip_traits : default_hashmap_traits
{
/* Implement TARGET_LEGITIMATE_CONSTANT_P. */
static bool
-mips_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+mips_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
return mips_const_insns (x) > 0;
}
data section. */
static bool
-mips_rtx_constant_in_small_data_p (enum machine_mode mode)
+mips_rtx_constant_in_small_data_p (machine_mode mode)
{
return (!TARGET_EMBEDDED_DATA
&& TARGET_LOCAL_SDATA
extended ones. */
static int
-mips_symbol_insns_1 (enum mips_symbol_type type, enum machine_mode mode)
+mips_symbol_insns_1 (enum mips_symbol_type type, machine_mode mode)
{
if (mips_use_pcrel_pool_p[(int) type])
{
In both cases, instruction counts are based off BASE_INSN_LENGTH. */
static int
-mips_symbol_insns (enum mips_symbol_type type, enum machine_mode mode)
+mips_symbol_insns (enum mips_symbol_type type, machine_mode mode)
{
return mips_symbol_insns_1 (type, mode) * (TARGET_MIPS16 ? 2 : 1);
}
/* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
static bool
-mips_cannot_force_const_mem (enum machine_mode mode, rtx x)
+mips_cannot_force_const_mem (machine_mode mode, rtx x)
{
enum mips_symbol_type type;
rtx base, offset;
constants when we're using a per-function constant pool. */
static bool
-mips_use_blocks_for_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+mips_use_blocks_for_constant_p (machine_mode mode ATTRIBUTE_UNUSED,
const_rtx x ATTRIBUTE_UNUSED)
{
return !TARGET_MIPS16_PCREL_LOADS;
STRICT_P is true if REG_OK_STRICT is in effect. */
int
-mips_regno_mode_ok_for_base_p (int regno, enum machine_mode mode,
+mips_regno_mode_ok_for_base_p (int regno, machine_mode mode,
bool strict_p)
{
if (!HARD_REGISTER_NUM_P (regno))
STRICT_P is true if REG_OK_STRICT is in effect. */
static bool
-mips_valid_base_register_p (rtx x, enum machine_mode mode, bool strict_p)
+mips_valid_base_register_p (rtx x, machine_mode mode, bool strict_p)
{
if (!strict_p && GET_CODE (x) == SUBREG)
x = SUBREG_REG (x);
can address a value of mode MODE. */
static bool
-mips_valid_offset_p (rtx x, enum machine_mode mode)
+mips_valid_offset_p (rtx x, machine_mode mode)
{
/* Check that X is a signed 16-bit number. */
if (!const_arith_operand (x, Pmode))
LO_SUM symbol has type SYMBOL_TYPE. */
static bool
-mips_valid_lo_sum_p (enum mips_symbol_type symbol_type, enum machine_mode mode)
+mips_valid_lo_sum_p (enum mips_symbol_type symbol_type, machine_mode mode)
{
/* Check that symbols of type SYMBOL_TYPE can be used to access values
of mode MODE. */
static bool
mips_classify_address (struct mips_address_info *info, rtx x,
- enum machine_mode mode, bool strict_p)
+ machine_mode mode, bool strict_p)
{
switch (GET_CODE (x))
{
/* Implement TARGET_LEGITIMATE_ADDRESS_P. */
static bool
-mips_legitimate_address_p (enum machine_mode mode, rtx x, bool strict_p)
+mips_legitimate_address_p (machine_mode mode, rtx x, bool strict_p)
{
struct mips_address_info addr;
/* Return true if X is a legitimate $sp-based address for mode MDOE. */
bool
-mips_stack_address_p (rtx x, enum machine_mode mode)
+mips_stack_address_p (rtx x, machine_mode mode)
{
struct mips_address_info addr;
sense, because their use is so restricted. */
static bool
-mips_lx_address_p (rtx addr, enum machine_mode mode)
+mips_lx_address_p (rtx addr, machine_mode mode)
{
if (GET_CODE (addr) != PLUS
|| !REG_P (XEXP (addr, 0))
an 8-bit immediate field that's shifted left twice. */
static bool
-mips16_unextended_reference_p (enum machine_mode mode, rtx base,
+mips16_unextended_reference_p (machine_mode mode, rtx base,
unsigned HOST_WIDE_INT offset)
{
if (mode != BLKmode && offset % GET_MODE_SIZE (mode) == 0)
enough. */
int
-mips_address_insns (rtx x, enum machine_mode mode, bool might_split_p)
+mips_address_insns (rtx x, machine_mode mode, bool might_split_p)
{
struct mips_address_info addr;
int factor;
OFFSET_PREDICATE. */
bool
-m16_based_address_p (rtx x, enum machine_mode mode,
+m16_based_address_p (rtx x, machine_mode mode,
insn_operand_predicate_fn offset_predicate)
{
struct mips_address_info addr;
for a microMIPS LWSP or SWSP insn. */
bool
-lwsp_swsp_address_p (rtx x, enum machine_mode mode)
+lwsp_swsp_address_p (rtx x, machine_mode mode)
{
struct mips_address_info addr;
MODE is the mode of the value being accessed. */
bool
-umips_12bit_offset_address_p (rtx x, enum machine_mode mode)
+umips_12bit_offset_address_p (rtx x, machine_mode mode)
{
struct mips_address_info addr;
int
mips_load_store_insns (rtx mem, rtx_insn *insn)
{
- enum machine_mode mode;
+ machine_mode mode;
bool might_split_p;
rtx set;
Return that new register. */
static rtx
-mips_force_unary (enum machine_mode mode, enum rtx_code code, rtx op0)
+mips_force_unary (machine_mode mode, enum rtx_code code, rtx op0)
{
rtx reg;
of mode MODE. Return that new register. */
static rtx
-mips_force_binary (enum machine_mode mode, enum rtx_code code, rtx op0, rtx op1)
+mips_force_binary (machine_mode mode, enum rtx_code code, rtx op0, rtx op1)
{
rtx reg;
is guaranteed to be a legitimate address for mode MODE. */
bool
-mips_split_symbol (rtx temp, rtx addr, enum machine_mode mode, rtx *low_out)
+mips_split_symbol (rtx temp, rtx addr, machine_mode mode, rtx *low_out)
{
enum mips_symbol_context context;
enum mips_symbol_type symbol_type;
/* If X is not a valid address for mode MODE, force it into a register. */
static rtx
-mips_force_address (rtx x, enum machine_mode mode)
+mips_force_address (rtx x, machine_mode mode)
{
if (!mips_legitimate_address_p (mode, x, false))
x = force_reg (Pmode, x);
static rtx
mips_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
rtx base, addr;
HOST_WIDE_INT offset;
mips_move_integer (rtx temp, rtx dest, unsigned HOST_WIDE_INT value)
{
struct mips_integer_op codes[MIPS_MAX_INTEGER_OPS];
- enum machine_mode mode;
+ machine_mode mode;
unsigned int i, num_ops;
rtx x;
move_operand. */
static void
-mips_legitimize_const_move (enum machine_mode mode, rtx dest, rtx src)
+mips_legitimize_const_move (machine_mode mode, rtx dest, rtx src)
{
rtx base, offset;
sequence that is valid. */
bool
-mips_legitimize_move (enum machine_mode mode, rtx dest, rtx src)
+mips_legitimize_move (machine_mode mode, rtx dest, rtx src)
{
if (!register_operand (dest, mode) && !reg_or_0_operand (src, mode))
{
/* Return the cost of floating-point multiplications of mode MODE. */
static int
-mips_fp_mult_cost (enum machine_mode mode)
+mips_fp_mult_cost (machine_mode mode)
{
return mode == DFmode ? mips_cost->fp_mult_df : mips_cost->fp_mult_sf;
}
/* Return the cost of floating-point divisions of mode MODE. */
static int
-mips_fp_div_cost (enum machine_mode mode)
+mips_fp_div_cost (machine_mode mode)
{
return mode == DFmode ? mips_cost->fp_div_df : mips_cost->fp_div_sf;
}
cost of OP itself. */
static int
-mips_sign_extend_cost (enum machine_mode mode, rtx op)
+mips_sign_extend_cost (machine_mode mode, rtx op)
{
if (MEM_P (op))
/* Extended loads are as cheap as unextended ones. */
cost of OP itself. */
static int
-mips_zero_extend_cost (enum machine_mode mode, rtx op)
+mips_zero_extend_cost (machine_mode mode, rtx op)
{
if (MEM_P (op))
/* Extended loads are as cheap as unextended ones. */
assuming that the move will be in pieces of at most UNITS bytes. */
static int
-mips_set_reg_reg_piece_cost (enum machine_mode mode, unsigned int units)
+mips_set_reg_reg_piece_cost (machine_mode mode, unsigned int units)
{
return COSTS_N_INSNS ((GET_MODE_SIZE (mode) + units - 1) / units);
}
/* Return the cost of moving between two registers of mode MODE. */
static int
-mips_set_reg_reg_cost (enum machine_mode mode)
+mips_set_reg_reg_cost (machine_mode mode)
{
switch (GET_MODE_CLASS (mode))
{
mips_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
int *total, bool speed)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
bool float_mode_p = FLOAT_MODE_P (mode);
int cost;
rtx addr;
/* Implement TARGET_ADDRESS_COST. */
static int
-mips_address_cost (rtx addr, enum machine_mode mode,
+mips_address_cost (rtx addr, machine_mode mode,
addr_space_t as ATTRIBUTE_UNUSED,
bool speed ATTRIBUTE_UNUSED)
{
mips_subword (rtx op, bool high_p)
{
unsigned int byte, offset;
- enum machine_mode mode;
+ machine_mode mode;
mode = GET_MODE (op);
if (mode == VOIDmode)
mips_output_move (rtx dest, rtx src)
{
enum rtx_code dest_code, src_code;
- enum machine_mode mode;
+ machine_mode mode;
enum mips_symbol_type symbol_type;
bool dbl_p;
static bool
mips_canonicalize_int_order_test (enum rtx_code *code, rtx *cmp1,
- enum machine_mode mode)
+ machine_mode mode)
{
HOST_WIDE_INT plus_one;
mips_emit_int_order_test (enum rtx_code code, bool *invert_ptr,
rtx target, rtx cmp0, rtx cmp1)
{
- enum machine_mode mode;
+ machine_mode mode;
/* First see if there is a MIPS instruction that can do this operation.
If not, try doing the same for the inverse operation. If that also
a simple round-robin allocation scheme. */
static rtx
-mips_allocate_fcc (enum machine_mode mode)
+mips_allocate_fcc (machine_mode mode)
{
unsigned int regno, count;
mips_expand_conditional_trap (rtx comparison)
{
rtx op0, op1;
- enum machine_mode mode;
+ machine_mode mode;
enum rtx_code code;
/* MIPS conditional trap instructions don't have GT or LE flavors,
static void
mips_get_arg_info (struct mips_arg_info *info, const CUMULATIVE_ARGS *cum,
- enum machine_mode mode, const_tree type, bool named)
+ machine_mode mode, const_tree type, bool named)
{
bool doubleword_aligned_p;
unsigned int num_bytes, num_words, max_regs;
/* Implement TARGET_FUNCTION_ARG. */
static rtx
-mips_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+mips_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
if (mode == VOIDmode)
{
if (TARGET_MIPS16 && cum->fp_code != 0)
- return gen_rtx_REG ((enum machine_mode) cum->fp_code, 0);
+ return gen_rtx_REG ((machine_mode) cum->fp_code, 0);
else
return NULL;
}
&& GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
{
rtx real, imag;
- enum machine_mode inner;
+ machine_mode inner;
unsigned int regno;
inner = GET_MODE_INNER (mode);
/* Implement TARGET_FUNCTION_ARG_ADVANCE. */
static void
-mips_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+mips_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
static int
mips_arg_partial_bytes (cumulative_args_t cum,
- enum machine_mode mode, tree type, bool named)
+ machine_mode mode, tree type, bool named)
{
struct mips_arg_info info;
to STACK_BOUNDARY bits if the type requires it. */
static unsigned int
-mips_function_arg_boundary (enum machine_mode mode, const_tree type)
+mips_function_arg_boundary (machine_mode mode, const_tree type)
{
unsigned int alignment;
byte does. */
bool
-mips_pad_arg_upward (enum machine_mode mode, const_tree type)
+mips_pad_arg_upward (machine_mode mode, const_tree type)
{
/* On little-endian targets, the first byte of every stack argument
is passed in the first byte of the stack slot. */
the opposite if the most significant byte does. */
bool
-mips_pad_reg_upward (enum machine_mode mode, tree type)
+mips_pad_reg_upward (machine_mode mode, tree type)
{
/* No shifting is required for floating-point arguments. */
if (type != 0 ? FLOAT_TYPE_P (type) : GET_MODE_CLASS (mode) == MODE_FLOAT)
static bool
mips_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
- enum machine_mode mode, const_tree type,
+ machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
if (mips_abi == ABI_EABI)
static bool
mips_callee_copies (cumulative_args_t cum ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED, bool named)
{
return mips_abi == ABI_EABI && named;
floating-point register. */
static bool
-mips_return_mode_in_fpr_p (enum machine_mode mode)
+mips_return_mode_in_fpr_p (machine_mode mode)
{
return ((GET_MODE_CLASS (mode) == MODE_FLOAT
|| mode == V2SFmode
the structure itself has mode BLKmode. */
static rtx
-mips_return_fpr_single (enum machine_mode type_mode,
- enum machine_mode value_mode)
+mips_return_fpr_single (machine_mode type_mode,
+ machine_mode value_mode)
{
rtx x;
Otherwise the values are packed together as closely as possible. */
static rtx
-mips_return_fpr_pair (enum machine_mode mode,
- enum machine_mode mode1, HOST_WIDE_INT offset1,
- enum machine_mode mode2, HOST_WIDE_INT offset2)
+mips_return_fpr_pair (machine_mode mode,
+ machine_mode mode1, HOST_WIDE_INT offset1,
+ machine_mode mode2, HOST_WIDE_INT offset2)
{
int inc;
static rtx
mips_function_value_1 (const_tree valtype, const_tree fn_decl_or_type,
- enum machine_mode mode)
+ machine_mode mode)
{
if (valtype)
{
/* Implement TARGET_LIBCALL_VALUE. */
static rtx
-mips_libcall_value (enum machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
+mips_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
{
return mips_function_value_1 (NULL_TREE, NULL_TREE, mode);
}
/* Implement TARGET_SETUP_INCOMING_VARARGS. */
static void
-mips_setup_incoming_varargs (cumulative_args_t cum, enum machine_mode mode,
+mips_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode,
tree type, int *pretend_size ATTRIBUTE_UNUSED,
int no_rtl)
{
{
/* We can't use move_block_from_reg, because it will use
the wrong mode. */
- enum machine_mode mode;
+ machine_mode mode;
int off, i;
/* Set OFF to the offset from virtual_incoming_args_rtx of
return mode MODE in the name of a MIPS16 function stub. */
static const char *
-mips16_call_stub_mode_suffix (enum machine_mode mode)
+mips16_call_stub_mode_suffix (machine_mode mode)
{
if (mode == SFmode)
return "sf";
for (f = (unsigned int) fp_code; f != 0; f >>= 2)
{
- enum machine_mode mode;
+ machine_mode mode;
struct mips_arg_info info;
if ((f & 3) == 1)
{
rtx fn, insn, retval;
tree return_type;
- enum machine_mode return_mode;
+ machine_mode return_mode;
const char *name;
return_type = DECL_RESULT (current_function_decl);
HOST_WIDE_INT offset, delta;
unsigned HOST_WIDE_INT bits;
int i;
- enum machine_mode mode;
+ machine_mode mode;
rtx *regs;
/* Work out how many bits to move at a time. If both operands have
rtx orig_addr, memsi_addr, memsi, shift, shiftsi, unshifted_mask;
rtx unshifted_mask_reg, mask, inverted_mask, si_op;
rtx res = NULL;
- enum machine_mode mode;
+ machine_mode mode;
mode = GET_MODE (mem);
HOST_WIDE_INT bitpos)
{
rtx left, right;
- enum machine_mode mode;
+ machine_mode mode;
if (!mips_get_unaligned_mem (dest, width, bitpos, &left, &right))
return false;
/* Return true if X is a MEM with the same size as MODE. */
bool
-mips_mem_fits_mode_p (enum machine_mode mode, rtx x)
+mips_mem_fits_mode_p (machine_mode mode, rtx x)
{
return (MEM_P (x)
&& MEM_SIZE_KNOWN_P (x)
mask_low_and_shift_len for the actual definition. */
bool
-mask_low_and_shift_p (enum machine_mode mode, rtx mask, rtx shift, int maxlen)
+mask_low_and_shift_p (machine_mode mode, rtx mask, rtx shift, int maxlen)
{
return IN_RANGE (mask_low_and_shift_len (mode, mask, shift), 1, maxlen);
}
see the table in the comment before the pattern. */
bool
-and_operands_ok (enum machine_mode mode, rtx op1, rtx op2)
+and_operands_ok (machine_mode mode, rtx op1, rtx op2)
{
return (memory_operand (op1, mode)
? and_load_operand (op2, mode)
return the length of the mask, otherwise return -1. */
int
-mask_low_and_shift_len (enum machine_mode mode, rtx mask, rtx shift)
+mask_low_and_shift_len (machine_mode mode, rtx mask, rtx shift)
{
HOST_WIDE_INT shval;
/* Implement TARGET_SELECT_RTX_SECTION. */
static section *
-mips_select_rtx_section (enum machine_mode mode, rtx x,
+mips_select_rtx_section (machine_mode mode, rtx x,
unsigned HOST_WIDE_INT align)
{
/* ??? Consider using mergeable small data sections. */
mips_dwarf_register_span (rtx reg)
{
rtx high, low;
- enum machine_mode mode;
+ machine_mode mode;
/* By default, GCC maps increasing register numbers to increasing
memory locations, but paired FPRs are always little-endian,
stack pointer. */
static void
-mips_save_restore_reg (enum machine_mode mode, int regno,
+mips_save_restore_reg (machine_mode mode, int regno,
HOST_WIDE_INT offset, mips_save_restore_fn fn)
{
rtx mem;
mips_for_each_saved_gpr_and_fpr (HOST_WIDE_INT sp_offset,
mips_save_restore_fn fn)
{
- enum machine_mode fpr_mode;
+ machine_mode fpr_mode;
int regno;
const struct mips_frame_info *frame = &cfun->machine->frame;
HOST_WIDE_INT offset;
The result of this function is cached in mips_hard_regno_mode_ok. */
static bool
-mips_hard_regno_mode_ok_p (unsigned int regno, enum machine_mode mode)
+mips_hard_regno_mode_ok_p (unsigned int regno, machine_mode mode)
{
unsigned int size;
enum mode_class mclass;
/* Implement HARD_REGNO_NREGS. */
unsigned int
-mips_hard_regno_nregs (int regno, enum machine_mode mode)
+mips_hard_regno_nregs (int regno, machine_mode mode)
{
if (ST_REG_P (regno))
/* The size of FP status registers is always 4, because they only hold
in mips_hard_regno_nregs. */
int
-mips_class_max_nregs (enum reg_class rclass, enum machine_mode mode)
+mips_class_max_nregs (enum reg_class rclass, machine_mode mode)
{
int size;
HARD_REG_SET left;
/* Implement CANNOT_CHANGE_MODE_CLASS. */
bool
-mips_cannot_change_mode_class (enum machine_mode from,
- enum machine_mode to,
+mips_cannot_change_mode_class (machine_mode from,
+ machine_mode to,
enum reg_class rclass)
{
/* Allow conversions between different Loongson integer vectors,
/* Implement target hook small_register_classes_for_mode_p. */
static bool
-mips_small_register_classes_for_mode_p (enum machine_mode mode
+mips_small_register_classes_for_mode_p (machine_mode mode
ATTRIBUTE_UNUSED)
{
return TARGET_MIPS16;
/* Return true if moves in mode MODE can use the FPU's mov.fmt instruction. */
static bool
-mips_mode_ok_for_mov_fmt_p (enum machine_mode mode)
+mips_mode_ok_for_mov_fmt_p (machine_mode mode)
{
switch (mode)
{
/* Implement MODES_TIEABLE_P. */
bool
-mips_modes_tieable_p (enum machine_mode mode1, enum machine_mode mode2)
+mips_modes_tieable_p (machine_mode mode1, machine_mode mode2)
{
/* FPRs allow no mode punning, so it's not worth tying modes if we'd
prefer to put one of them in FPRs. */
the maximum for us. */
static int
-mips_register_move_cost (enum machine_mode mode,
+mips_register_move_cost (machine_mode mode,
reg_class_t from, reg_class_t to)
{
reg_class_t dregs;
/* Implement TARGET_MEMORY_MOVE_COST. */
static int
-mips_memory_move_cost (enum machine_mode mode, reg_class_t rclass, bool in)
+mips_memory_move_cost (machine_mode mode, reg_class_t rclass, bool in)
{
return (mips_cost->memory_latency
+ memory_move_secondary_cost (mode, rclass, in));
enum reg_class
mips_secondary_reload_class (enum reg_class rclass,
- enum machine_mode mode, rtx x, bool)
+ machine_mode mode, rtx x, bool)
{
int regno;
/* Implement TARGET_MODE_REP_EXTENDED. */
static int
-mips_mode_rep_extended (enum machine_mode mode, enum machine_mode mode_rep)
+mips_mode_rep_extended (machine_mode mode, machine_mode mode_rep)
{
/* On 64-bit targets, SImode register values are sign-extended to DImode. */
if (TARGET_64BIT && mode == SImode && mode_rep == DImode)
/* Implement TARGET_VALID_POINTER_MODE. */
static bool
-mips_valid_pointer_mode (enum machine_mode mode)
+mips_valid_pointer_mode (machine_mode mode)
{
return mode == SImode || (TARGET_64BIT && mode == DImode);
}
/* Implement TARGET_VECTOR_MODE_SUPPORTED_P. */
static bool
-mips_vector_mode_supported_p (enum machine_mode mode)
+mips_vector_mode_supported_p (machine_mode mode)
{
switch (mode)
{
/* Implement TARGET_SCALAR_MODE_SUPPORTED_P. */
static bool
-mips_scalar_mode_supported_p (enum machine_mode mode)
+mips_scalar_mode_supported_p (machine_mode mode)
{
if (ALL_FIXED_POINT_MODE_P (mode)
&& GET_MODE_PRECISION (mode) <= 2 * BITS_PER_WORD)
\f
/* Implement TARGET_VECTORIZE_PREFERRED_SIMD_MODE. */
-static enum machine_mode
-mips_preferred_simd_mode (enum machine_mode mode ATTRIBUTE_UNUSED)
+static machine_mode
+mips_preferred_simd_mode (machine_mode mode ATTRIBUTE_UNUSED)
{
if (TARGET_PAIRED_SINGLE_FLOAT
&& mode == SFmode)
of the vector itself. */
static tree
-mips_builtin_vector_type (tree type, enum machine_mode mode)
+mips_builtin_vector_type (tree type, machine_mode mode)
{
static tree types[2 * (int) MAX_MACHINE_MODE];
int mode_index;
static rtx
mips_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode, int ignore)
+ machine_mode mode, int ignore)
{
tree fndecl;
unsigned int fcode, avail;
struct mips16_constant *next;
rtx value;
rtx_code_label *label;
- enum machine_mode mode;
+ machine_mode mode;
};
/* Information about an incomplete MIPS16 constant pool. FIRST is the
static rtx_code_label *
mips16_add_constant (struct mips16_constant_pool *pool,
- rtx value, enum machine_mode mode)
+ rtx value, machine_mode mode)
{
struct mips16_constant **p, *c;
bool first_of_size_p;
instruction emitted. MODE is the mode of the constant. */
static rtx_insn *
-mips16_emit_constants_1 (enum machine_mode mode, rtx value, rtx_insn *insn)
+mips16_emit_constants_1 (machine_mode mode, rtx value, rtx_insn *insn)
{
if (SCALAR_INT_MODE_P (mode) || ALL_SCALAR_FIXED_POINT_MODE_P (mode))
{
{
HOST_WIDE_INT bitoffset, bitsize;
tree inner, var_offset;
- enum machine_mode mode;
+ machine_mode mode;
int unsigned_p, volatile_p;
inner = get_inner_reference (expr, &bitsize, &bitoffset, &var_offset, &mode,
mips_tuning_info.fast_mult_zero_zero_p = setting;
start_sequence ();
- enum machine_mode dword_mode = TARGET_64BIT ? TImode : DImode;
+ machine_mode dword_mode = TARGET_64BIT ? TImode : DImode;
rtx hilo = gen_rtx_REG (dword_mode, MD_REG_FIRST);
mips_emit_move_or_split (hilo, const0_rtx, SPLIT_FOR_SPEED);
for (mode = 0; mode < MAX_MACHINE_MODE; mode++)
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
mips_hard_regno_mode_ok[mode][regno]
- = mips_hard_regno_mode_ok_p (regno, (enum machine_mode) mode);
+ = mips_hard_regno_mode_ok_p (regno, (machine_mode) mode);
/* Function to allocate machine-dependent function status. */
init_machine_status = &mips_init_machine_status;
when TARGET_LOONGSON_VECTORS is true. */
static unsigned HOST_WIDE_INT
-mips_shift_truncation_mask (enum machine_mode mode)
+mips_shift_truncation_mask (machine_mode mode)
{
if (TARGET_LOONGSON_VECTORS && VECTOR_MODE_P (mode))
return 0;
{
rtx target, op0, op1;
unsigned char perm[MAX_VECT_LEN];
- enum machine_mode vmode;
+ machine_mode vmode;
unsigned char nelt;
bool one_vector_p;
bool testing_p;
mips_expand_vselect_vconcat (rtx target, rtx op0, rtx op1,
const unsigned char *perm, unsigned nelt)
{
- enum machine_mode v2mode;
+ machine_mode v2mode;
rtx x;
v2mode = GET_MODE_2XWIDER_MODE (GET_MODE (op0));
/* Implement TARGET_VECTORIZE_VEC_PERM_CONST_OK. */
static bool
-mips_vectorize_vec_perm_const_ok (enum machine_mode vmode,
+mips_vectorize_vec_perm_const_ok (machine_mode vmode,
const unsigned char *sel)
{
struct expand_vec_perm_d d;
void
mips_expand_vec_unpack (rtx operands[2], bool unsigned_p, bool high_p)
{
- enum machine_mode imode = GET_MODE (operands[1]);
+ machine_mode imode = GET_MODE (operands[1]);
rtx (*unpack) (rtx, rtx, rtx);
rtx (*cmpgt) (rtx, rtx, rtx);
rtx tmp, dest, zero;
/* A subroutine of mips_expand_vec_init, expand via broadcast. */
static void
-mips_expand_vi_broadcast (enum machine_mode vmode, rtx target, rtx elt)
+mips_expand_vi_broadcast (machine_mode vmode, rtx target, rtx elt)
{
struct expand_vec_perm_d d;
rtx t1;
elements of VALS with zeros, copy the constant vector to TARGET. */
static void
-mips_expand_vi_constant (enum machine_mode vmode, unsigned nelt,
+mips_expand_vi_constant (machine_mode vmode, unsigned nelt,
rtx target, rtx vals)
{
rtvec vec = shallow_copy_rtvec (XVEC (vals, 0));
/* A subroutine of mips_expand_vec_init, expand anything via memory. */
static void
-mips_expand_vi_general (enum machine_mode vmode, enum machine_mode imode,
+mips_expand_vi_general (machine_mode vmode, machine_mode imode,
unsigned nelt, unsigned nvar, rtx target, rtx vals)
{
rtx mem = assign_stack_temp (vmode, GET_MODE_SIZE (vmode));
void
mips_expand_vector_init (rtx target, rtx vals)
{
- enum machine_mode vmode = GET_MODE (target);
- enum machine_mode imode = GET_MODE_INNER (vmode);
+ machine_mode vmode = GET_MODE (target);
+ machine_mode imode = GET_MODE_INNER (vmode);
unsigned i, nelt = GET_MODE_NUNITS (vmode);
unsigned nvar = 0, one_var = -1u;
bool all_same = true;
void
mips_expand_vec_reduc (rtx target, rtx in, rtx (*gen)(rtx, rtx, rtx))
{
- enum machine_mode vmode = GET_MODE (in);
+ machine_mode vmode = GET_MODE (in);
unsigned char perm2[2];
rtx last, next, fold, x;
bool ok;
mips_expand_vec_minmax (rtx target, rtx op0, rtx op1,
rtx (*cmp) (rtx, rtx, rtx), bool min_p)
{
- enum machine_mode vmode = GET_MODE (target);
+ machine_mode vmode = GET_MODE (target);
rtx tc, t0, t1, x;
tc = gen_reg_rtx (vmode);
static reg_class_t
mips_spill_class (reg_class_t rclass ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
if (TARGET_MIPS16)
return SPILL_REGS;
extern int mmix_starting_frame_offset (void);
extern int mmix_function_arg_regno_p (int, int);
extern void mmix_function_profiler (FILE *, int);
-extern int mmix_reversible_cc_mode (enum machine_mode);
+extern int mmix_reversible_cc_mode (machine_mode);
extern const char *mmix_text_section_asm_op (void);
extern const char *mmix_data_section_asm_op (void);
extern void mmix_output_quoted_string (FILE *, const char *, int);
extern void mmix_asm_output_addr_diff_elt (FILE *, rtx, int, int);
extern void mmix_asm_output_addr_vec_elt (FILE *, int);
extern enum reg_class mmix_secondary_reload_class
- (enum reg_class, enum machine_mode, rtx, int);
+ (enum reg_class, machine_mode, rtx, int);
extern rtx mmix_dynamic_chain_address (rtx);
extern rtx mmix_return_addr_rtx (int, rtx);
extern rtx mmix_eh_return_stackadj_rtx (void);
extern int mmix_constant_address_p (rtx);
extern void mmix_expand_prologue (void);
extern void mmix_expand_epilogue (void);
-extern rtx mmix_get_hard_reg_initial_val (enum machine_mode, int);
+extern rtx mmix_get_hard_reg_initial_val (machine_mode, int);
extern int mmix_asm_preferred_eh_data_format (int, int);
extern void mmix_setup_frame_addresses (void);
#ifdef RTX_CODE
/* Needs to be ifdef:d for sake of enum rtx_code. */
-extern enum machine_mode mmix_select_cc_mode (enum rtx_code, rtx, rtx);
+extern machine_mode mmix_select_cc_mode (enum rtx_code, rtx, rtx);
extern void mmix_canonicalize_comparison (enum rtx_code *, rtx *, rtx *);
extern rtx mmix_gen_compare_reg (enum rtx_code, rtx, rtx);
#endif
static void mmix_target_asm_function_epilogue (FILE *, HOST_WIDE_INT);
static reg_class_t mmix_preferred_reload_class (rtx, reg_class_t);
static reg_class_t mmix_preferred_output_reload_class (rtx, reg_class_t);
-static bool mmix_legitimate_address_p (enum machine_mode, rtx, bool);
-static bool mmix_legitimate_constant_p (enum machine_mode, rtx);
+static bool mmix_legitimate_address_p (machine_mode, rtx, bool);
+static bool mmix_legitimate_constant_p (machine_mode, rtx);
static void mmix_reorg (void);
static void mmix_asm_output_mi_thunk
(FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT, tree);
static void mmix_setup_incoming_varargs
- (cumulative_args_t, enum machine_mode, tree, int *, int);
+ (cumulative_args_t, machine_mode, tree, int *, int);
static void mmix_file_start (void);
static void mmix_file_end (void);
static bool mmix_rtx_costs (rtx, int, int, int, int *, bool);
-static int mmix_register_move_cost (enum machine_mode,
+static int mmix_register_move_cost (machine_mode,
reg_class_t, reg_class_t);
static rtx mmix_struct_value_rtx (tree, int);
-static enum machine_mode mmix_promote_function_mode (const_tree,
- enum machine_mode,
+static machine_mode mmix_promote_function_mode (const_tree,
+ machine_mode,
int *, const_tree, int);
-static void mmix_function_arg_advance (cumulative_args_t, enum machine_mode,
+static void mmix_function_arg_advance (cumulative_args_t, machine_mode,
const_tree, bool);
-static rtx mmix_function_arg_1 (const cumulative_args_t, enum machine_mode,
+static rtx mmix_function_arg_1 (const cumulative_args_t, machine_mode,
const_tree, bool, bool);
-static rtx mmix_function_incoming_arg (cumulative_args_t, enum machine_mode,
+static rtx mmix_function_incoming_arg (cumulative_args_t, machine_mode,
const_tree, bool);
-static rtx mmix_function_arg (cumulative_args_t, enum machine_mode,
+static rtx mmix_function_arg (cumulative_args_t, machine_mode,
const_tree, bool);
static rtx mmix_function_value (const_tree, const_tree, bool);
-static rtx mmix_libcall_value (enum machine_mode, const_rtx);
+static rtx mmix_libcall_value (machine_mode, const_rtx);
static bool mmix_function_value_regno_p (const unsigned int);
static bool mmix_pass_by_reference (cumulative_args_t,
- enum machine_mode, const_tree, bool);
+ machine_mode, const_tree, bool);
static bool mmix_frame_pointer_required (void);
static void mmix_asm_trampoline_template (FILE *);
static void mmix_trampoline_init (rtx, tree, rtx);
enum reg_class
mmix_secondary_reload_class (enum reg_class rclass,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
rtx x ATTRIBUTE_UNUSED,
int in_p ATTRIBUTE_UNUSED)
{
}
static void
-mmix_function_arg_advance (cumulative_args_t argsp_v, enum machine_mode mode,
+mmix_function_arg_advance (cumulative_args_t argsp_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *argsp = get_cumulative_args (argsp_v);
static rtx
mmix_function_arg_1 (const cumulative_args_t argsp_v,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type,
bool named ATTRIBUTE_UNUSED,
bool incoming)
static rtx
mmix_function_arg (cumulative_args_t argsp,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type,
bool named)
{
static rtx
mmix_function_incoming_arg (cumulative_args_t argsp,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type,
bool named)
{
everything that goes by value. */
static bool
-mmix_pass_by_reference (cumulative_args_t argsp_v, enum machine_mode mode,
+mmix_pass_by_reference (cumulative_args_t argsp_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *argsp = get_cumulative_args (argsp_v);
const_tree func ATTRIBUTE_UNUSED,
bool outgoing)
{
- enum machine_mode mode = TYPE_MODE (valtype);
- enum machine_mode cmode;
+ machine_mode mode = TYPE_MODE (valtype);
+ machine_mode cmode;
int first_val_regnum = MMIX_OUTGOING_RETURN_VALUE_REGNUM;
rtx vec[MMIX_MAX_REGS_FOR_VALUE];
int i;
/* Implements TARGET_LIBCALL_VALUE. */
static rtx
-mmix_libcall_value (enum machine_mode mode,
+mmix_libcall_value (machine_mode mode,
const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, MMIX_RETURN_VALUE_REGNUM);
static void
mmix_setup_incoming_varargs (cumulative_args_t args_so_farp_v,
- enum machine_mode mode,
+ machine_mode mode,
tree vartype,
int *pretend_sizep,
int second_time ATTRIBUTE_UNUSED)
/* Return 1 if the address is OK, otherwise 0. */
bool
-mmix_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+mmix_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
rtx x,
bool strict_checking)
{
/* Implement TARGET_LEGITIMATE_CONSTANT_P. */
static bool
-mmix_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+mmix_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
RTX_CODE code = GET_CODE (x);
/* SELECT_CC_MODE. */
-enum machine_mode
+machine_mode
mmix_select_cc_mode (RTX_CODE op, rtx x, rtx y ATTRIBUTE_UNUSED)
{
/* We use CCmode, CC_UNSmode, CC_FPmode, CC_FPEQmode and CC_FUNmode to
/* REVERSIBLE_CC_MODE. */
int
-mmix_reversible_cc_mode (enum machine_mode mode)
+mmix_reversible_cc_mode (machine_mode mode)
{
/* That is, all integer and the EQ, NE, ORDERED and UNORDERED float
compares. */
need to check that their constraints match, so say 3 for them. */
static int
-mmix_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+mmix_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t from,
reg_class_t to)
{
from insn-emit.c. */
rtx
-mmix_get_hard_reg_initial_val (enum machine_mode mode, int regno)
+mmix_get_hard_reg_initial_val (machine_mode mode, int regno)
{
return get_hard_reg_initial_val (mode, regno);
}
rtx
mmix_gen_compare_reg (RTX_CODE code, rtx x, rtx y)
{
- enum machine_mode ccmode = SELECT_CC_MODE (code, x, y);
+ machine_mode ccmode = SELECT_CC_MODE (code, x, y);
return gen_reg_rtx (ccmode);
}
struct cc_type_conv
{
- enum machine_mode cc_mode;
+ machine_mode cc_mode;
/* Terminated with {UNKNOWN, NULL, NULL} */
const struct cc_conv *const convs;
size_t i;
int j;
- enum machine_mode mode = GET_MODE (XEXP (x, 0));
+ machine_mode mode = GET_MODE (XEXP (x, 0));
RTX_CODE cc = GET_CODE (x);
for (i = 0; i < ARRAY_SIZE (cc_convs); i++)
/* Worker function for TARGET_PROMOTE_FUNCTION_MODE. */
-enum machine_mode
+machine_mode
mmix_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
- enum machine_mode mode,
+ machine_mode mode,
int *punsignedp ATTRIBUTE_UNUSED,
const_tree fntype ATTRIBUTE_UNUSED,
int for_return)
#ifdef RTX_CODE
extern rtx mn10300_legitimize_pic_address (rtx, rtx);
extern int mn10300_legitimate_pic_operand_p (rtx);
-extern rtx mn10300_legitimize_reload_address (rtx, enum machine_mode,
+extern rtx mn10300_legitimize_reload_address (rtx, machine_mode,
int, int, int);
extern bool mn10300_function_value_regno_p (const unsigned int);
extern unsigned int mn10300_get_live_callee_saved_regs (unsigned int *);
-extern bool mn10300_hard_regno_mode_ok (unsigned int, enum machine_mode);
-extern bool mn10300_modes_tieable (enum machine_mode, enum machine_mode);
+extern bool mn10300_hard_regno_mode_ok (unsigned int, machine_mode);
+extern bool mn10300_modes_tieable (machine_mode, machine_mode);
extern const char *mn10300_output_add (rtx[3], bool);
extern void mn10300_print_operand (FILE *, rtx, int);
extern void mn10300_print_operand_address (FILE *, rtx);
extern void mn10300_print_reg_list (FILE *, int);
-extern enum machine_mode mn10300_select_cc_mode (enum rtx_code, rtx, rtx);
+extern machine_mode mn10300_select_cc_mode (enum rtx_code, rtx, rtx);
extern unsigned int mn10300_store_multiple_regs (rtx);
-extern int mn10300_symbolic_operand (rtx, enum machine_mode);
-extern void mn10300_split_cbranch (enum machine_mode, rtx, rtx);
+extern int mn10300_symbolic_operand (rtx, machine_mode);
+extern void mn10300_split_cbranch (machine_mode, rtx, rtx);
extern int mn10300_split_and_operand_count (rtx);
-extern bool mn10300_match_ccmode (rtx, enum machine_mode);
+extern bool mn10300_match_ccmode (rtx, machine_mode);
#endif /* RTX_CODE */
extern bool mn10300_regno_in_class_p (unsigned, int, bool);
#define CC_FLAG_C 4
#define CC_FLAG_V 8
-static int cc_flags_for_mode(enum machine_mode);
+static int cc_flags_for_mode(machine_mode);
static int cc_flags_for_code(enum rtx_code);
\f
/* Implement TARGET_OPTION_OVERRIDE. */
case 'B':
{
enum rtx_code cmp = GET_CODE (x);
- enum machine_mode mode = GET_MODE (XEXP (x, 0));
+ machine_mode mode = GET_MODE (XEXP (x, 0));
const char *str;
int have_flags;
static reg_class_t
mn10300_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
- enum machine_mode mode, secondary_reload_info *sri)
+ machine_mode mode, secondary_reload_info *sri)
{
enum reg_class rclass = (enum reg_class) rclass_i;
enum reg_class xclass = NO_REGS;
static bool
mn10300_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
- enum machine_mode mode, const_tree type,
+ machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
unsigned HOST_WIDE_INT size;
from a function. If the result is NULL_RTX, the argument is pushed. */
static rtx
-mn10300_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+mn10300_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
(TYPE is null for libcalls where that information may not be available.) */
static void
-mn10300_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+mn10300_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
partially in registers and partially in memory. */
static int
-mn10300_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
+mn10300_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
bool outgoing)
{
rtx rv;
- enum machine_mode mode = TYPE_MODE (valtype);
+ machine_mode mode = TYPE_MODE (valtype);
if (! POINTER_TYPE_P (valtype))
return gen_rtx_REG (mode, FIRST_DATA_REGNUM);
/* Implements TARGET_LIBCALL_VALUE. */
static rtx
-mn10300_libcall_value (enum machine_mode mode,
+mn10300_libcall_value (machine_mode mode,
const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, FIRST_DATA_REGNUM);
int
mn10300_symbolic_operand (rtx op,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
switch (GET_CODE (op))
{
static rtx
mn10300_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
if (flag_pic && ! mn10300_legitimate_pic_operand_p (x))
x = mn10300_legitimize_pic_address (oldx, NULL_RTX);
function record_unscaled_index_insn_codes. */
static bool
-mn10300_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
+mn10300_legitimate_address_p (machine_mode mode, rtx x, bool strict)
{
rtx base, index;
rtx
mn10300_legitimize_reload_address (rtx x,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int opnum, int type,
int ind_levels ATTRIBUTE_UNUSED)
{
those here. */
static bool
-mn10300_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+mn10300_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
switch (GET_CODE (x))
{
with an address register. */
static int
-mn10300_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
+mn10300_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as ATTRIBUTE_UNUSED, bool speed)
{
HOST_WIDE_INT i;
early exit from reload meaning no work is required. */
static int
-mn10300_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+mn10300_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t ifrom, reg_class_t ito)
{
enum reg_class from = (enum reg_class) ifrom;
move cost above. This is not a problem. */
static int
-mn10300_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+mn10300_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t iclass, bool in ATTRIBUTE_UNUSED)
{
enum reg_class rclass = (enum reg_class) iclass;
}
bool
-mn10300_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
+mn10300_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
{
if (REGNO_REG_CLASS (regno) == FP_REGS
|| REGNO_REG_CLASS (regno) == FP_ACC_REGS)
}
bool
-mn10300_modes_tieable (enum machine_mode mode1, enum machine_mode mode2)
+mn10300_modes_tieable (machine_mode mode1, machine_mode mode2)
{
if (GET_MODE_CLASS (mode1) == MODE_FLOAT
&& GET_MODE_CLASS (mode2) != MODE_FLOAT)
}
static int
-cc_flags_for_mode (enum machine_mode mode)
+cc_flags_for_mode (machine_mode mode)
{
switch (mode)
{
}
}
-enum machine_mode
+machine_mode
mn10300_select_cc_mode (enum rtx_code code, rtx x, rtx y ATTRIBUTE_UNUSED)
{
int req;
/* A helper function for splitting cbranch patterns after reload. */
void
-mn10300_split_cbranch (enum machine_mode cmp_mode, rtx cmp_op, rtx label_ref)
+mn10300_split_cbranch (machine_mode cmp_mode, rtx cmp_op, rtx label_ref)
{
rtx flags, x;
/* A helper function for matching parallels that set the flags. */
bool
-mn10300_match_ccmode (rtx insn, enum machine_mode cc_mode)
+mn10300_match_ccmode (rtx insn, machine_mode cc_mode)
{
rtx op1, flags;
- enum machine_mode flags_mode;
+ machine_mode flags_mode;
gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
We always return values in register $r0 for moxie. */
static rtx
-moxie_libcall_value (enum machine_mode mode,
+moxie_libcall_value (machine_mode mode,
const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, MOXIE_R0);
static void
moxie_setup_incoming_varargs (cumulative_args_t cum_v,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED,
int *pretend_size, int no_rtl)
{
NULL_RTX if there's no more space. */
static rtx
-moxie_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+moxie_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
: (unsigned) int_size_in_bytes (TYPE))
static void
-moxie_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+moxie_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
static bool
moxie_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
- enum machine_mode mode, const_tree type,
+ machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
unsigned HOST_WIDE_INT size;
static int
moxie_arg_partial_bytes (cumulative_args_t cum_v,
- enum machine_mode mode,
+ machine_mode mode,
tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
void msp430_expand_helper (rtx *operands, const char *, bool);
void msp430_expand_prologue (void);
const char * msp430x_extendhisi (rtx *);
-void msp430_fixup_compare_operands (enum machine_mode, rtx *);
-int msp430_hard_regno_mode_ok (int, enum machine_mode);
-int msp430_hard_regno_nregs (int, enum machine_mode);
-int msp430_hard_regno_nregs_has_padding (int, enum machine_mode);
-int msp430_hard_regno_nregs_with_padding (int, enum machine_mode);
+void msp430_fixup_compare_operands (machine_mode, rtx *);
+int msp430_hard_regno_mode_ok (int, machine_mode);
+int msp430_hard_regno_nregs (int, machine_mode);
+int msp430_hard_regno_nregs_has_padding (int, machine_mode);
+int msp430_hard_regno_nregs_with_padding (int, machine_mode);
bool msp430_hwmult_enabled (void);
rtx msp430_incoming_return_addr_rtx (void);
void msp430_init_cumulative_args (CUMULATIVE_ARGS *, tree, rtx, tree, int);
bool msp430_is_interrupt_func (void);
const char * msp430x_logical_shift_right (rtx);
const char * msp430_mcu_name (void);
-bool msp430_modes_tieable_p (enum machine_mode, enum machine_mode);
+bool msp430_modes_tieable_p (machine_mode, machine_mode);
void msp430_output_labelref (FILE *, const char *);
void msp430_register_pragmas (void);
rtx msp430_return_addr_rtx (int);
void msp430_split_movsi (rtx *);
void msp430_start_function (FILE *, const char *, tree);
-rtx msp430_subreg (enum machine_mode, rtx, enum machine_mode, int);
+rtx msp430_subreg (machine_mode, rtx, machine_mode, int);
bool msp430_use_f5_series_hwmult (void);
#endif /* GCC_MSP430_PROTOS_H */
#define TARGET_SCALAR_MODE_SUPPORTED_P msp430_scalar_mode_supported_p
static bool
-msp430_scalar_mode_supported_p (enum machine_mode m)
+msp430_scalar_mode_supported_p (machine_mode m)
{
if (m == PSImode && msp430x)
return true;
PSImode value, but not an SImode value. */
int
msp430_hard_regno_nregs (int regno ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
if (mode == PSImode && msp430x)
return 1;
/* Implements HARD_REGNO_NREGS_HAS_PADDING. */
int
msp430_hard_regno_nregs_has_padding (int regno ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
if (mode == PSImode && msp430x)
return 1;
/* Implements HARD_REGNO_NREGS_WITH_PADDING. */
int
msp430_hard_regno_nregs_with_padding (int regno ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
if (mode == PSImode)
return 2;
/* Implements HARD_REGNO_MODE_OK. */
int
msp430_hard_regno_mode_ok (int regno ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
return regno <= (ARG_POINTER_REGNUM - msp430_hard_regno_nregs (regno, mode));
}
/* Implements MODES_TIEABLE_P. */
bool
-msp430_modes_tieable_p (enum machine_mode mode1, enum machine_mode mode2)
+msp430_modes_tieable_p (machine_mode mode1, machine_mode mode2)
{
if ((mode1 == PSImode || mode2 == SImode)
|| (mode1 == SImode || mode2 == PSImode))
#undef TARGET_ADDR_SPACE_ADDRESS_MODE
#define TARGET_ADDR_SPACE_ADDRESS_MODE msp430_addr_space_pointer_mode
-static enum machine_mode
+static machine_mode
msp430_addr_space_pointer_mode (addr_space_t addrspace)
{
switch (addrspace)
#undef TARGET_UNWIND_WORD_MODE
#define TARGET_UNWIND_WORD_MODE msp430_unwind_word_mode
-static enum machine_mode
+static machine_mode
msp430_unwind_word_mode (void)
{
return TARGET_LARGE ? PSImode : HImode;
#define TARGET_LIBCALL_VALUE msp430_libcall_value
rtx
-msp430_libcall_value (enum machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
+msp430_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, 12);
}
code that determines where an argument will be passed. */
static void
msp430_evaluate_arg (cumulative_args_t cap,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named)
{
rtx
msp430_function_arg (cumulative_args_t cap,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type,
bool named)
{
int
msp430_arg_partial_bytes (cumulative_args_t cap,
- enum machine_mode mode,
+ machine_mode mode,
tree type,
bool named)
{
static bool
msp430_pass_by_reference (cumulative_args_t cap ATTRIBUTE_UNUSED,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type,
bool named ATTRIBUTE_UNUSED)
{
static bool
msp430_callee_copies (cumulative_args_t cap ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
void
msp430_function_arg_advance (cumulative_args_t cap,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type,
bool named)
{
#define TARGET_FUNCTION_ARG_BOUNDARY msp430_function_arg_boundary
static unsigned int
-msp430_function_arg_boundary (enum machine_mode mode, const_tree type)
+msp430_function_arg_boundary (machine_mode mode, const_tree type)
{
if (mode == BLKmode
&& int_size_in_bytes (type) > 1)
static bool
msp430_return_in_memory (const_tree ret_type, const_tree fntype ATTRIBUTE_UNUSED)
{
- enum machine_mode mode = TYPE_MODE (ret_type);
+ machine_mode mode = TYPE_MODE (ret_type);
if (mode == BLKmode
|| (fntype && TREE_CODE (TREE_TYPE (fntype)) == RECORD_TYPE)
#undef TARGET_GET_RAW_ARG_MODE
#define TARGET_GET_RAW_ARG_MODE msp430_get_raw_arg_mode
-static enum machine_mode
+static machine_mode
msp430_get_raw_arg_mode (int regno)
{
return (regno == ARG_POINTER_REGNUM) ? VOIDmode : Pmode;
#undef TARGET_GET_RAW_RESULT_MODE
#define TARGET_GET_RAW_RESULT_MODE msp430_get_raw_result_mode
-static enum machine_mode
+static machine_mode
msp430_get_raw_result_mode (int regno ATTRIBUTE_UNUSED)
{
return Pmode;
}
bool
-msp430_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+msp430_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
rtx x ATTRIBUTE_UNUSED,
bool strict ATTRIBUTE_UNUSED)
{
#define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P msp430_addr_space_legitimate_address_p
bool
-msp430_addr_space_legitimate_address_p (enum machine_mode mode,
+msp430_addr_space_legitimate_address_p (machine_mode mode,
rtx x,
bool strict,
addr_space_t as ATTRIBUTE_UNUSED)
#define TARGET_LEGITIMATE_CONSTANT_P msp430_legitimate_constant
static bool
-msp430_legitimate_constant (enum machine_mode mode, rtx x)
+msp430_legitimate_constant (machine_mode mode, rtx x)
{
return ! CONST_INT_P (x)
|| mode != PSImode
msp430_expand_builtin (tree exp,
rtx target ATTRIBUTE_UNUSED,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
char *helper_const = NULL;
int arg2 = 13;
int arg1sz = 1;
- enum machine_mode arg0mode = GET_MODE (operands[0]);
- enum machine_mode arg1mode = GET_MODE (operands[1]);
- enum machine_mode arg2mode = GET_MODE (operands[2]);
+ machine_mode arg0mode = GET_MODE (operands[0]);
+ machine_mode arg1mode = GET_MODE (operands[1]);
+ machine_mode arg2mode = GET_MODE (operands[2]);
int have_430x = msp430x ? 1 : 0;
if (CONST_INT_P (operands[2]))
/* Called by cbranch<mode>4 to coerce operands into usable forms. */
void
-msp430_fixup_compare_operands (enum machine_mode my_mode, rtx * operands)
+msp430_fixup_compare_operands (machine_mode my_mode, rtx * operands)
{
/* constants we're looking for, not constants which are allowed. */
int const_op_idx = 1;
need it to below, so we use this function for when we must get a
valid subreg in a "natural" state. */
rtx
-msp430_subreg (enum machine_mode mode, rtx r, enum machine_mode omode, int byte)
+msp430_subreg (machine_mode mode, rtx r, machine_mode omode, int byte)
{
rtx rv;
&& SUBREG_BYTE (r) == 0)
{
rtx ireg = SUBREG_REG (r);
- enum machine_mode imode = GET_MODE (ireg);
+ machine_mode imode = GET_MODE (ireg);
/* special case for (HI (SI (PSI ...), 0)) */
if (imode == PSImode
int
nds32_address_cost_impl (rtx address,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as ATTRIBUTE_UNUSED,
bool speed)
{
nds32_expand_builtin_impl (tree exp,
rtx target,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
enum nds32_16bit_address_type
nds32_mem_format (rtx op)
{
- enum machine_mode mode_test;
+ machine_mode mode_test;
int val;
int regno;
const char *
nds32_output_casesi_pc_relative (rtx *operands)
{
- enum machine_mode mode;
+ machine_mode mode;
rtx diff_vec;
diff_vec = PATTERN (NEXT_INSN (as_a <rtx_insn *> (operands[1])));
/* -- How Values Fit in Registers. */
-extern int nds32_hard_regno_nregs (int, enum machine_mode);
-extern int nds32_hard_regno_mode_ok (int, enum machine_mode);
+extern int nds32_hard_regno_nregs (int, machine_mode);
+extern int nds32_hard_regno_mode_ok (int, machine_mode);
\f
/* Register Classes. */
/* Auxiliary functions for auxiliary macros in nds32.h. */
-extern bool nds32_ls_333_p (rtx, rtx, rtx, enum machine_mode);
+extern bool nds32_ls_333_p (rtx, rtx, rtx, machine_mode);
/* Auxiliary functions for expanding rtl used in nds32-multiple.md. */
extern void nds32_init_builtins_impl (void);
extern rtx nds32_expand_builtin_impl (tree, rtx, rtx,
- enum machine_mode, int);
+ machine_mode, int);
/* Auxiliary functions for ISR implementation. */
/* Auxiliary functions for cost calculation. */
extern bool nds32_rtx_costs_impl (rtx, int, int, int, int *, bool);
-extern int nds32_address_cost_impl (rtx, enum machine_mode, addr_space_t, bool);
+extern int nds32_address_cost_impl (rtx, machine_mode, addr_space_t, bool);
/* ------------------------------------------------------------------------ */
/* Return true if MODE/TYPE need double word alignment. */
static bool
-nds32_needs_double_word_align (enum machine_mode mode, const_tree type)
+nds32_needs_double_word_align (machine_mode mode, const_tree type)
{
unsigned int align;
INDEX : Check if this rtx is valid to be a index for address.
STRICT : If it is true, we are in reload pass or after reload pass. */
static bool
-nds32_legitimate_index_p (enum machine_mode outer_mode,
+nds32_legitimate_index_p (machine_mode outer_mode,
rtx index,
bool strict)
{
static unsigned char
nds32_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
/* Return the maximum number of consecutive registers
needed to represent "mode" in a register of "rclass". */
/* -- Passing Arguments in Registers. */
static rtx
-nds32_function_arg (cumulative_args_t ca, enum machine_mode mode,
+nds32_function_arg (cumulative_args_t ca, machine_mode mode,
const_tree type, bool named)
{
unsigned int regno;
}
static bool
-nds32_must_pass_in_stack (enum machine_mode mode, const_tree type)
+nds32_must_pass_in_stack (machine_mode mode, const_tree type)
{
/* Return true if a type must be passed in memory.
If it is NOT using hard float abi, small aggregates can be
}
static int
-nds32_arg_partial_bytes (cumulative_args_t ca, enum machine_mode mode,
+nds32_arg_partial_bytes (cumulative_args_t ca, machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
/* Returns the number of bytes at the beginning of an argument that
}
static void
-nds32_function_arg_advance (cumulative_args_t ca, enum machine_mode mode,
+nds32_function_arg_advance (cumulative_args_t ca, machine_mode mode,
const_tree type, bool named)
{
- enum machine_mode sub_mode;
+ machine_mode sub_mode;
CUMULATIVE_ARGS *cum = get_cumulative_args (ca);
if (named)
}
static unsigned int
-nds32_function_arg_boundary (enum machine_mode mode, const_tree type)
+nds32_function_arg_boundary (machine_mode mode, const_tree type)
{
return (nds32_needs_double_word_align (mode, type)
? NDS32_DOUBLE_WORD_ALIGNMENT
const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
bool outgoing ATTRIBUTE_UNUSED)
{
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp;
mode = TYPE_MODE (ret_type);
}
static rtx
-nds32_libcall_value (enum machine_mode mode,
+nds32_libcall_value (machine_mode mode,
const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, NDS32_GPR_RET_FIRST_REGNUM);
static void
nds32_setup_incoming_varargs (cumulative_args_t ca,
- enum machine_mode mode,
+ machine_mode mode,
tree type,
int *pretend_args_size,
int second_time ATTRIBUTE_UNUSED)
/* Addressing Modes. */
static bool
-nds32_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
+nds32_legitimate_address_p (machine_mode mode, rtx x, bool strict)
{
/* For (mem:DI addr) or (mem:DF addr) case,
we only allow 'addr' to be [reg], [symbol_ref],
/* Describing Relative Costs of Operations. */
static int
-nds32_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+nds32_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t from,
reg_class_t to)
{
}
static int
-nds32_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+nds32_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t rclass ATTRIBUTE_UNUSED,
bool in ATTRIBUTE_UNUSED)
{
static int
nds32_address_cost (rtx address,
- enum machine_mode mode,
+ machine_mode mode,
addr_space_t as,
bool speed)
{
nds32_expand_builtin (tree exp,
rtx target,
rtx subtarget,
- enum machine_mode mode,
+ machine_mode mode,
int ignore)
{
return nds32_expand_builtin_impl (exp, target, subtarget, mode, ignore);
int
nds32_hard_regno_nregs (int regno ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
}
int
-nds32_hard_regno_mode_ok (int regno, enum machine_mode mode)
+nds32_hard_regno_mode_ok (int regno, machine_mode mode)
{
/* Restrict double-word quantities to even register pairs. */
if (HARD_REGNO_NREGS (regno, mode) == 1
This is auxiliary extern function for auxiliary macro in nds32.h.
Because it is a little complicated, we use function instead of macro. */
bool
-nds32_ls_333_p (rtx rt, rtx ra, rtx imm, enum machine_mode mode)
+nds32_ls_333_p (rtx rt, rtx ra, rtx imm, machine_mode mode)
{
if (REGNO_REG_CLASS (REGNO (rt)) == LOW_REGS
&& REGNO_REG_CLASS (REGNO (ra)) == LOW_REGS)
extern void nios2_function_profiler (FILE *, int);
#ifdef RTX_CODE
-extern int nios2_emit_move_sequence (rtx *, enum machine_mode);
-extern void nios2_emit_expensive_div (rtx *, enum machine_mode);
+extern int nios2_emit_move_sequence (rtx *, machine_mode);
+extern void nios2_emit_expensive_div (rtx *, machine_mode);
extern void nios2_adjust_call_address (rtx *);
extern rtx nios2_get_return_address (int);
extern void nios2_set_return_address (rtx, rtx);
-extern bool nios2_validate_compare (enum machine_mode, rtx *, rtx *, rtx *);
-extern bool nios2_validate_fpu_compare (enum machine_mode, rtx *, rtx *, rtx *,
+extern bool nios2_validate_compare (machine_mode, rtx *, rtx *, rtx *);
+extern bool nios2_validate_fpu_compare (machine_mode, rtx *, rtx *, rtx *,
bool);
extern bool nios2_fpu_insn_enabled (enum n2fpu_code);
#ifdef TREE_CODE
#ifdef ARGS_SIZE_RTX
/* expr.h defines both ARGS_SIZE_RTX and `enum direction' */
-extern enum direction nios2_function_arg_padding (enum machine_mode, const_tree);
-extern enum direction nios2_block_reg_padding (enum machine_mode, tree, int);
+extern enum direction nios2_function_arg_padding (machine_mode, const_tree);
+extern enum direction nios2_block_reg_padding (machine_mode, tree, int);
#endif /* ARGS_SIZE_RTX */
#endif /* TREE_CODE */
settings. */
static bool
-nios2_fpu_compare_enabled (enum rtx_code cond, enum machine_mode mode)
+nios2_fpu_compare_enabled (enum rtx_code cond, machine_mode mode)
{
if (mode == SFmode)
switch (cond)
sdata section we can save even more cycles by doing things
gp relative. */
void
-nios2_emit_expensive_div (rtx *operands, enum machine_mode mode)
+nios2_emit_expensive_div (rtx *operands, machine_mode mode)
{
rtx or_result, shift_left_result;
rtx lookup_value;
static void
nios2_alternate_compare_const (enum rtx_code code, rtx op,
enum rtx_code *alt_code, rtx *alt_op,
- enum machine_mode mode)
+ machine_mode mode)
{
HOST_WIDE_INT opval = INTVAL (op);
enum rtx_code scode = signed_condition (code);
Returns true if FPU compare can be done. */
bool
-nios2_validate_fpu_compare (enum machine_mode mode, rtx *cmp, rtx *op1, rtx *op2,
+nios2_validate_fpu_compare (machine_mode mode, rtx *cmp, rtx *op1, rtx *op2,
bool modify_p)
{
bool rev_p = false;
/* Checks and modifies the comparison in *CMP, *OP1, and *OP2 into valid
nios2 supported form. Returns true if success. */
bool
-nios2_validate_compare (enum machine_mode mode, rtx *cmp, rtx *op1, rtx *op2)
+nios2_validate_compare (machine_mode mode, rtx *cmp, rtx *op1, rtx *op2)
{
enum rtx_code code = GET_CODE (*cmp);
enum rtx_code alt_code;
/* Implement TARGET_LEGITIMATE_CONSTANT_P. */
static bool
-nios2_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+nios2_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
rtx base, offset;
split_const (x, &base, &offset);
/* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
static bool
-nios2_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+nios2_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
return nios2_legitimate_constant_p (mode, x) == false;
}
/* Implement TARGET_LEGITIMATE_ADDRESS_P. */
static bool
-nios2_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+nios2_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
rtx operand, bool strict_p)
{
switch (GET_CODE (operand))
/* Implement TARGET_LEGITIMIZE_ADDRESS. */
static rtx
nios2_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
if (CONSTANT_P (x))
return nios2_legitimize_constant_address (x);
/* Main expander function for RTL moves. */
int
-nios2_emit_move_sequence (rtx *operands, enum machine_mode mode)
+nios2_emit_move_sequence (rtx *operands, machine_mode mode)
{
rtx to = operands[0];
rtx from = operands[1];
int num_operands = N2FPU (code).num_operands;
const char *insn_name = N2FPU_NAME (code);
tree ftype = nios2_ftype (N2FPU_FTCODE (code));
- enum machine_mode dst_mode = TYPE_MODE (TREE_TYPE (ftype));
- enum machine_mode src_mode = TYPE_MODE (TREE_VALUE (TYPE_ARG_TYPES (ftype)));
+ machine_mode dst_mode = TYPE_MODE (TREE_TYPE (ftype));
+ machine_mode src_mode = TYPE_MODE (TREE_VALUE (TYPE_ARG_TYPES (ftype)));
/* Prepare X register for DF input operands. */
if (GET_MODE_SIZE (src_mode) == 8 && num_operands == 3)
(otherwise it is an extra parameter matching an ellipsis). */
static rtx
-nios2_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+nios2_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
static int
nios2_arg_partial_bytes (cumulative_args_t cum_v,
- enum machine_mode mode, tree type ATTRIBUTE_UNUSED,
+ machine_mode mode, tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
may not be available. */
static void
-nios2_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+nios2_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
}
enum direction
-nios2_function_arg_padding (enum machine_mode mode, const_tree type)
+nios2_function_arg_padding (machine_mode mode, const_tree type)
{
/* On little-endian targets, the first byte of every stack argument
is passed in the first byte of the stack slot. */
}
enum direction
-nios2_block_reg_padding (enum machine_mode mode, tree type,
+nios2_block_reg_padding (machine_mode mode, tree type,
int first ATTRIBUTE_UNUSED)
{
return nios2_function_arg_padding (mode, type);
/* Implement TARGET_LIBCALL_VALUE. */
static rtx
-nios2_libcall_value (enum machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
+nios2_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, FIRST_RETVAL_REGNO);
}
own va_arg type. */
static void
nios2_setup_incoming_varargs (cumulative_args_t cum_v,
- enum machine_mode mode, tree type,
+ machine_mode mode, tree type,
int *pretend_size, int second_time)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
enum insn_code icode = N2FPU_ICODE (code);
int nargs, argno, opno = 0;
int num_operands = N2FPU (code).num_operands;
- enum machine_mode dst_mode = TYPE_MODE (TREE_TYPE (exp));
+ machine_mode dst_mode = TYPE_MODE (TREE_TYPE (exp));
bool has_target_p = (dst_mode != VOIDmode);
if (N2FPU_N (code) < 0)
nios2_expand_custom_builtin (tree exp, unsigned int index, rtx target)
{
bool has_target_p = (TREE_TYPE (exp) != void_type_node);
- enum machine_mode tmode = VOIDmode;
+ machine_mode tmode = VOIDmode;
int nargs, argno;
rtx value, insn, unspec_args[3];
tree arg;
bool has_target_p;
rtx addr, mem, val;
struct expand_operand ops[MAX_RECOG_OPERANDS];
- enum machine_mode mode = insn_data[d->icode].operand[0].mode;
+ machine_mode mode = insn_data[d->icode].operand[0].mode;
addr = expand_normal (CALL_EXPR_ARG (exp, 0));
mem = gen_rtx_MEM (mode, addr);
static rtx
nios2_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
extern int pa_fmpyaddoperands (rtx *);
extern int pa_fmpysuboperands (rtx *);
extern void pa_emit_bcond_fp (rtx[]);
-extern int pa_emit_move_sequence (rtx *, enum machine_mode, rtx);
+extern int pa_emit_move_sequence (rtx *, machine_mode, rtx);
extern int pa_emit_hpdiv_const (rtx *, int);
extern int pa_is_function_label_plus_const (rtx);
extern int pa_fpstore_bypass_p (rtx_insn *, rtx_insn *);
extern int pa_attr_length_millicode_call (rtx_insn *);
extern int pa_attr_length_call (rtx_insn *, int);
extern int pa_attr_length_indirect_call (rtx_insn *);
-extern rtx pa_legitimize_reload_address (rtx, enum machine_mode,
+extern rtx pa_legitimize_reload_address (rtx, machine_mode,
int, int, int);
/* Declare functions defined in pa.c and used in templates. */
#ifdef ARGS_SIZE_RTX
/* expr.h defines ARGS_SIZE_RTX and `enum direction' */
#ifdef TREE_CODE
-extern enum direction pa_function_arg_padding (enum machine_mode, const_tree);
+extern enum direction pa_function_arg_padding (machine_mode, const_tree);
#endif
#endif /* ARGS_SIZE_RTX */
extern int pa_insn_refs_are_delayed (rtx_insn *);
unsigned HOST_WIDE_INT,
unsigned int);
extern void pa_hpux_asm_output_external (FILE *, tree, const char *);
-extern bool pa_cannot_change_mode_class (enum machine_mode, enum machine_mode,
+extern bool pa_cannot_change_mode_class (machine_mode, machine_mode,
enum reg_class);
-extern bool pa_modes_tieable_p (enum machine_mode, enum machine_mode);
+extern bool pa_modes_tieable_p (machine_mode, machine_mode);
extern HOST_WIDE_INT pa_initial_elimination_offset (int, int);
extern const int pa_magic_milli[];
int
pa_fpstore_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
{
- enum machine_mode store_mode;
- enum machine_mode other_mode;
+ machine_mode store_mode;
+ machine_mode other_mode;
rtx set;
if (recog_memoized (in_insn) < 0
static void pa_option_override (void);
static void copy_reg_pointer (rtx, rtx);
static void fix_range (const char *);
-static int hppa_register_move_cost (enum machine_mode mode, reg_class_t,
+static int hppa_register_move_cost (machine_mode mode, reg_class_t,
reg_class_t);
-static int hppa_address_cost (rtx, enum machine_mode mode, addr_space_t, bool);
+static int hppa_address_cost (rtx, machine_mode mode, addr_space_t, bool);
static bool hppa_rtx_costs (rtx, int, int, int, int *, bool);
-static inline rtx force_mode (enum machine_mode, rtx);
+static inline rtx force_mode (machine_mode, rtx);
static void pa_reorg (void);
static void pa_combine_instructions (void);
static int pa_can_combine_p (rtx_insn *, rtx_insn *, rtx_insn *, int, rtx,
static void load_reg (int, HOST_WIDE_INT, int);
static void set_reg_plus_d (int, int, HOST_WIDE_INT, int);
static rtx pa_function_value (const_tree, const_tree, bool);
-static rtx pa_libcall_value (enum machine_mode, const_rtx);
+static rtx pa_libcall_value (machine_mode, const_rtx);
static bool pa_function_value_regno_p (const unsigned int);
static void pa_output_function_prologue (FILE *, HOST_WIDE_INT);
static void update_total_code_bytes (unsigned int);
static void pa_asm_out_destructor (rtx, int);
#endif
static void pa_init_builtins (void);
-static rtx pa_expand_builtin (tree, rtx, rtx, enum machine_mode mode, int);
+static rtx pa_expand_builtin (tree, rtx, rtx, machine_mode mode, int);
static rtx hppa_builtin_saveregs (void);
static void hppa_va_start (tree, rtx);
static tree hppa_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
-static bool pa_scalar_mode_supported_p (enum machine_mode);
+static bool pa_scalar_mode_supported_p (machine_mode);
static bool pa_commutative_p (const_rtx x, int outer_code);
static void copy_fp_args (rtx) ATTRIBUTE_UNUSED;
static int length_fp_args (rtx) ATTRIBUTE_UNUSED;
-static rtx hppa_legitimize_address (rtx, rtx, enum machine_mode);
+static rtx hppa_legitimize_address (rtx, rtx, machine_mode);
static inline void pa_file_start_level (void) ATTRIBUTE_UNUSED;
static inline void pa_file_start_space (int) ATTRIBUTE_UNUSED;
static inline void pa_file_start_file (int) ATTRIBUTE_UNUSED;
#endif
static void pa_init_libfuncs (void);
static rtx pa_struct_value_rtx (tree, int);
-static bool pa_pass_by_reference (cumulative_args_t, enum machine_mode,
+static bool pa_pass_by_reference (cumulative_args_t, machine_mode,
const_tree, bool);
-static int pa_arg_partial_bytes (cumulative_args_t, enum machine_mode,
+static int pa_arg_partial_bytes (cumulative_args_t, machine_mode,
tree, bool);
-static void pa_function_arg_advance (cumulative_args_t, enum machine_mode,
+static void pa_function_arg_advance (cumulative_args_t, machine_mode,
const_tree, bool);
-static rtx pa_function_arg (cumulative_args_t, enum machine_mode,
+static rtx pa_function_arg (cumulative_args_t, machine_mode,
const_tree, bool);
-static unsigned int pa_function_arg_boundary (enum machine_mode, const_tree);
+static unsigned int pa_function_arg_boundary (machine_mode, const_tree);
static struct machine_function * pa_init_machine_status (void);
static reg_class_t pa_secondary_reload (bool, rtx, reg_class_t,
- enum machine_mode,
+ machine_mode,
secondary_reload_info *);
static void pa_extra_live_on_entry (bitmap);
-static enum machine_mode pa_promote_function_mode (const_tree,
- enum machine_mode, int *,
+static machine_mode pa_promote_function_mode (const_tree,
+ machine_mode, int *,
const_tree, int);
static void pa_asm_trampoline_template (FILE *);
static rtx pa_internal_arg_pointer (void);
static bool pa_can_eliminate (const int, const int);
static void pa_conditional_register_usage (void);
-static enum machine_mode pa_c_mode_for_suffix (char);
+static machine_mode pa_c_mode_for_suffix (char);
static section *pa_function_section (tree, enum node_frequency, bool, bool);
-static bool pa_cannot_force_const_mem (enum machine_mode, rtx);
-static bool pa_legitimate_constant_p (enum machine_mode, rtx);
+static bool pa_cannot_force_const_mem (machine_mode, rtx);
+static bool pa_legitimate_constant_p (machine_mode, rtx);
static unsigned int pa_section_type_flags (tree, const char *, int);
-static bool pa_legitimate_address_p (enum machine_mode, rtx, bool);
+static bool pa_legitimate_address_p (machine_mode, rtx, bool);
/* The following extra sections are only used for SOM. */
static GTY(()) section *som_readonly_data_section;
static rtx
pa_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
case PA_BUILTIN_INFQ:
case PA_BUILTIN_HUGE_VALQ:
{
- enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
+ machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
REAL_VALUE_TYPE inf;
rtx tmp;
than one register, we lose. */
static rtx
-legitimize_pic_address (rtx orig, enum machine_mode mode, rtx reg)
+legitimize_pic_address (rtx orig, machine_mode mode, rtx reg)
{
rtx pic_ref = orig;
rtx
hppa_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
rtx orig = x;
Other copies are reasonably cheap. */
static int
-hppa_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+hppa_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t from, reg_class_t to)
{
if (from == SHIFT_REGS)
as pa_legitimate_address_p. */
static int
-hppa_address_cost (rtx X, enum machine_mode mode ATTRIBUTE_UNUSED,
+hppa_address_cost (rtx X, machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as ATTRIBUTE_UNUSED,
bool speed ATTRIBUTE_UNUSED)
{
/* Ensure mode of ORIG, a REG rtx, is MODE. Returns either ORIG or a
new rtx with the correct mode. */
static inline rtx
-force_mode (enum machine_mode mode, rtx orig)
+force_mode (machine_mode mode, rtx orig)
{
if (mode == GET_MODE (orig))
return orig;
/* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
static bool
-pa_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+pa_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
return tls_referenced_p (x);
}
of SCRATCH_REG in the proper mode. */
int
-pa_emit_move_sequence (rtx *operands, enum machine_mode mode, rtx scratch_reg)
+pa_emit_move_sequence (rtx *operands, machine_mode mode, rtx scratch_reg)
{
register rtx operand0 = operands[0];
register rtx operand1 = operands[1];
pa_output_arg_descriptor (rtx call_insn)
{
const char *arg_regs[4];
- enum machine_mode arg_mode;
+ machine_mode arg_mode;
rtx link;
int i, output_flag = 0;
int regno;
static reg_class_t
pa_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
- enum machine_mode mode, secondary_reload_info *sri)
+ machine_mode mode, secondary_reload_info *sri)
{
int regno;
enum reg_class rclass = (enum reg_class) rclass_i;
static bool
pa_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
- enum machine_mode mode, const_tree type,
+ machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
HOST_WIDE_INT size;
}
enum direction
-pa_function_arg_padding (enum machine_mode mode, const_tree type)
+pa_function_arg_padding (machine_mode mode, const_tree type)
{
if (mode == BLKmode
|| (TARGET_64BIT
2 * BITS_PER_WORD isn't equal LONG_LONG_TYPE_SIZE. */
static bool
-pa_scalar_mode_supported_p (enum machine_mode mode)
+pa_scalar_mode_supported_p (machine_mode mode)
{
int precision = GET_MODE_PRECISION (mode);
int
pa_fmpyaddoperands (rtx *operands)
{
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
/* Must be a floating point mode. */
if (mode != SFmode && mode != DFmode)
int
pa_fmpysuboperands (rtx *operands)
{
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
/* Must be a floating point mode. */
if (mode != SFmode && mode != DFmode)
/* Promote the return value, but not the arguments. */
-static enum machine_mode
+static machine_mode
pa_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
- enum machine_mode mode,
+ machine_mode mode,
int *punsignedp ATTRIBUTE_UNUSED,
const_tree fntype ATTRIBUTE_UNUSED,
int for_return)
const_tree func ATTRIBUTE_UNUSED,
bool outgoing ATTRIBUTE_UNUSED)
{
- enum machine_mode valmode;
+ machine_mode valmode;
if (AGGREGATE_TYPE_P (valtype)
|| TREE_CODE (valtype) == COMPLEX_TYPE
/* Implement the TARGET_LIBCALL_VALUE hook. */
static rtx
-pa_libcall_value (enum machine_mode mode,
+pa_libcall_value (machine_mode mode,
const_rtx fun ATTRIBUTE_UNUSED)
{
if (! TARGET_SOFT_FLOAT
(TYPE is null for libcalls where that information may not be available.) */
static void
-pa_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+pa_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
??? We might want to restructure this so that it looks more like other
ports. */
static rtx
-pa_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+pa_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
/* Arguments larger than one word are double word aligned. */
static unsigned int
-pa_function_arg_boundary (enum machine_mode mode, const_tree type)
+pa_function_arg_boundary (machine_mode mode, const_tree type)
{
bool singleword = (type
? (integer_zerop (TYPE_SIZE (type))
then this routine should return zero. */
static int
-pa_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
+pa_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
in register class RCLASS is invalid. */
bool
-pa_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
+pa_cannot_change_mode_class (machine_mode from, machine_mode to,
enum reg_class rclass)
{
if (from == to)
in the floating-point registers. */
bool
-pa_modes_tieable_p (enum machine_mode mode1, enum machine_mode mode2)
+pa_modes_tieable_p (machine_mode mode1, machine_mode mode2)
{
/* Don't tie modes in different classes. */
if (GET_MODE_CLASS (mode1) != GET_MODE_CLASS (mode2))
/* Target hook for c_mode_for_suffix. */
-static enum machine_mode
+static machine_mode
pa_c_mode_for_suffix (char suffix)
{
if (HPUX_LONG_DOUBLE_LIBRARY)
to handle CONST_DOUBLES. */
static bool
-pa_legitimate_constant_p (enum machine_mode mode, rtx x)
+pa_legitimate_constant_p (machine_mode mode, rtx x)
{
if (GET_MODE_CLASS (mode) == MODE_FLOAT && x != CONST0_RTX (mode))
return false;
output as REG+SMALLINT. */
static bool
-pa_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
+pa_legitimate_address_p (machine_mode mode, rtx x, bool strict)
{
if ((REG_P (x)
&& (strict ? STRICT_REG_OK_FOR_BASE_P (x)
There may be more opportunities to improve code with this hook. */
rtx
-pa_legitimize_reload_address (rtx ad, enum machine_mode mode,
+pa_legitimize_reload_address (rtx ad, machine_mode mode,
int opnum, int type,
int ind_levels ATTRIBUTE_UNUSED)
{
/* declarations */
#ifdef RTX_CODE
-extern int simple_memory_operand (rtx, enum machine_mode);
+extern int simple_memory_operand (rtx, machine_mode);
extern int legitimate_const_double_p (rtx);
extern void notice_update_cc_on_set (rtx, rtx);
extern const char *output_block_move (rtx *);
extern const char *output_jump (enum rtx_code, int, int);
extern void print_operand_address (FILE *, rtx);
-extern bool pdp11_cannot_change_mode_class (enum machine_mode,
- enum machine_mode, enum reg_class);
+extern bool pdp11_cannot_change_mode_class (machine_mode,
+ machine_mode, enum reg_class);
extern bool pdp11_secondary_memory_needed (reg_class_t, reg_class_t,
- enum machine_mode);
+ machine_mode);
typedef enum { no_action, dec_before, inc_after } pdp11_action;
typedef enum { little, either, big } pdp11_partorder;
extern bool pdp11_expand_operands (rtx *, rtx [][2], int,
static bool pdp11_rtx_costs (rtx, int, int, int, int *, bool);
static bool pdp11_return_in_memory (const_tree, const_tree);
static rtx pdp11_function_value (const_tree, const_tree, bool);
-static rtx pdp11_libcall_value (enum machine_mode, const_rtx);
+static rtx pdp11_libcall_value (machine_mode, const_rtx);
static bool pdp11_function_value_regno_p (const unsigned int);
static void pdp11_trampoline_init (rtx, tree, rtx);
-static rtx pdp11_function_arg (cumulative_args_t, enum machine_mode,
+static rtx pdp11_function_arg (cumulative_args_t, machine_mode,
const_tree, bool);
static void pdp11_function_arg_advance (cumulative_args_t,
- enum machine_mode, const_tree, bool);
+ machine_mode, const_tree, bool);
static void pdp11_conditional_register_usage (void);
-static bool pdp11_legitimate_constant_p (enum machine_mode, rtx);
+static bool pdp11_legitimate_constant_p (machine_mode, rtx);
-static bool pdp11_scalar_mode_supported_p (enum machine_mode);
+static bool pdp11_scalar_mode_supported_p (machine_mode);
\f
/* Initialize the GCC target structure. */
#undef TARGET_ASM_BYTE_OP
-- as we do here with 10 -- or not ? */
static int
-pdp11_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+pdp11_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t c1, reg_class_t c2)
{
return move_costs[(int)c1][(int)c2];
int
-simple_memory_operand(rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
+simple_memory_operand(rtx op, machine_mode mode ATTRIBUTE_UNUSED)
{
rtx addr;
/* Implement CANNOT_CHANGE_MODE_CLASS. */
bool
-pdp11_cannot_change_mode_class (enum machine_mode from,
- enum machine_mode to,
+pdp11_cannot_change_mode_class (machine_mode from,
+ machine_mode to,
enum reg_class rclass)
{
/* Also, FPU registers contain a whole float value and the parts of
pdp11_secondary_reload (bool in_p ATTRIBUTE_UNUSED,
rtx x,
reg_class_t reload_class,
- enum machine_mode reload_mode ATTRIBUTE_UNUSED,
+ machine_mode reload_mode ATTRIBUTE_UNUSED,
secondary_reload_info *sri ATTRIBUTE_UNUSED)
{
if (reload_class != NO_LOAD_FPU_REGS || GET_CODE (x) != REG ||
*/
bool
pdp11_secondary_memory_needed (reg_class_t c1, reg_class_t c2,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
int fromfloat = (c1 == LOAD_FPU_REGS || c1 == NO_LOAD_FPU_REGS ||
c1 == FPU_REGS);
*/
static bool
-pdp11_legitimate_address_p (enum machine_mode mode,
+pdp11_legitimate_address_p (machine_mode mode,
rtx operand, bool strict)
{
rtx xfoob;
/* Worker function for TARGET_LIBCALL_VALUE. */
static rtx
-pdp11_libcall_value (enum machine_mode mode,
+pdp11_libcall_value (machine_mode mode,
const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, BASE_RETURN_VALUE_REG(mode));
static rtx
pdp11_function_arg (cumulative_args_t cum ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
may not be available.) */
static void
-pdp11_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+pdp11_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
/* Implement TARGET_LEGITIMATE_CONSTANT_P. */
static bool
-pdp11_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+pdp11_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
return GET_CODE (x) != CONST_DOUBLE || legitimate_const_double_p (x);
}
/* Implement TARGET_SCALAR_MODE_SUPPORTED_P. */
static bool
-pdp11_scalar_mode_supported_p (enum machine_mode mode)
+pdp11_scalar_mode_supported_p (machine_mode mode)
{
/* Support SFmode even with -mfloat64. */
if (mode == SFmode)
void rl78_expand_epilogue (void);
void rl78_expand_prologue (void);
int rl78_far_p (rtx x);
-int rl78_hard_regno_mode_ok (int, enum machine_mode);
-int rl78_hard_regno_nregs (int, enum machine_mode);
+int rl78_hard_regno_mode_ok (int, machine_mode);
+int rl78_hard_regno_nregs (int, machine_mode);
bool rl78_hl_b_c_addr_p (rtx);
int rl78_initial_elimination_offset (int, int);
-bool rl78_as_legitimate_address (enum machine_mode, rtx,
+bool rl78_as_legitimate_address (machine_mode, rtx,
bool, addr_space_t);
-int rl78_legitimize_reload_address (rtx *, enum machine_mode, int,int, int);
-enum reg_class rl78_mode_code_base_reg_class (enum machine_mode, addr_space_t, int, int);
+int rl78_legitimize_reload_address (rtx *, machine_mode, int,int, int);
+enum reg_class rl78_mode_code_base_reg_class (machine_mode, addr_space_t, int, int);
bool rl78_peep_movhi_p (rtx *);
bool rl78_real_insns_ok (void);
void rl78_register_pragmas (void);
-bool rl78_regno_mode_code_ok_for_base_p (int, enum machine_mode, addr_space_t, int, int);
+bool rl78_regno_mode_code_ok_for_base_p (int, machine_mode, addr_space_t, int, int);
void rl78_setup_peep_movhi (rtx *);
bool rl78_virt_insns_ok (void);
/* Implements HARD_REGNO_NREGS. */
int
-rl78_hard_regno_nregs (int regno, enum machine_mode mode)
+rl78_hard_regno_nregs (int regno, machine_mode mode)
{
int rs = register_sizes[regno];
if (rs < 1)
/* Implements HARD_REGNO_MODE_OK. */
int
-rl78_hard_regno_mode_ok (int regno, enum machine_mode mode)
+rl78_hard_regno_mode_ok (int regno, machine_mode mode)
{
int s = GET_MODE_SIZE (mode);
need it to below, so we use this function for when we must get a
valid subreg in a "natural" state. */
static rtx
-rl78_subreg (enum machine_mode mode, rtx r, enum machine_mode omode, int byte)
+rl78_subreg (machine_mode mode, rtx r, machine_mode omode, int byte)
{
if (GET_CODE (r) == MEM)
return adjust_address (r, mode, byte);
/* Return the appropriate mode for a named address pointer. */
#undef TARGET_ADDR_SPACE_POINTER_MODE
#define TARGET_ADDR_SPACE_POINTER_MODE rl78_addr_space_pointer_mode
-static enum machine_mode
+static machine_mode
rl78_addr_space_pointer_mode (addr_space_t addrspace)
{
switch (addrspace)
#undef TARGET_VALID_POINTER_MODE
#define TARGET_VALID_POINTER_MODE rl78_valid_pointer_mode
static bool
-rl78_valid_pointer_mode (enum machine_mode m)
+rl78_valid_pointer_mode (machine_mode m)
{
return (m == HImode || m == SImode);
}
/* Return the appropriate mode for a named address address. */
#undef TARGET_ADDR_SPACE_ADDRESS_MODE
#define TARGET_ADDR_SPACE_ADDRESS_MODE rl78_addr_space_address_mode
-static enum machine_mode
+static machine_mode
rl78_addr_space_address_mode (addr_space_t addrspace)
{
switch (addrspace)
#define TARGET_LEGITIMATE_CONSTANT_P rl78_is_legitimate_constant
static bool
-rl78_is_legitimate_constant (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED)
+rl78_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED)
{
return true;
}
#define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P rl78_as_legitimate_address
bool
-rl78_as_legitimate_address (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x,
+rl78_as_legitimate_address (machine_mode mode ATTRIBUTE_UNUSED, rtx x,
bool strict ATTRIBUTE_UNUSED, addr_space_t as ATTRIBUTE_UNUSED)
{
rtx base, index, addend;
/* Implements REGNO_MODE_CODE_OK_FOR_BASE_P. */
bool
-rl78_regno_mode_code_ok_for_base_p (int regno, enum machine_mode mode ATTRIBUTE_UNUSED,
+rl78_regno_mode_code_ok_for_base_p (int regno, machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t address_space ATTRIBUTE_UNUSED,
int outer_code ATTRIBUTE_UNUSED, int index_code)
{
/* Implements MODE_CODE_BASE_REG_CLASS. */
enum reg_class
-rl78_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
+rl78_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t address_space ATTRIBUTE_UNUSED,
int outer_code ATTRIBUTE_UNUSED,
int index_code ATTRIBUTE_UNUSED)
const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
bool outgoing ATTRIBUTE_UNUSED)
{
- enum machine_mode mode = TYPE_MODE (ret_type);
+ machine_mode mode = TYPE_MODE (ret_type);
return gen_rtx_REG (mode, 8);
}
#undef TARGET_PROMOTE_FUNCTION_MODE
#define TARGET_PROMOTE_FUNCTION_MODE rl78_promote_function_mode
-static enum machine_mode
+static machine_mode
rl78_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
- enum machine_mode mode,
+ machine_mode mode,
int *punsignedp ATTRIBUTE_UNUSED,
const_tree funtype ATTRIBUTE_UNUSED, int for_return ATTRIBUTE_UNUSED)
{
static rtx
rl78_function_arg (cumulative_args_t cum_v ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
#define TARGET_FUNCTION_ARG_ADVANCE rl78_function_arg_advance
static void
-rl78_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode, const_tree type,
+rl78_function_arg_advance (cumulative_args_t cum_v, machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
int rounded_size;
#define TARGET_FUNCTION_ARG_BOUNDARY rl78_function_arg_boundary
static unsigned int
-rl78_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
+rl78_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED)
{
return 16;
static unsigned char saved_update_index = NOT_KNOWN;
static unsigned char saved_update_value;
-static enum machine_mode saved_update_mode;
+static machine_mode saved_update_mode;
static inline void
static unsigned char
get_content_index (rtx loc)
{
- enum machine_mode mode;
+ machine_mode mode;
if (loc == NULL_RTX)
return NOT_KNOWN;
/* Return a string describing content INDEX in mode MODE.
WARNING: Can return a pointer to a static buffer. */
static const char *
-get_content_name (unsigned char index, enum machine_mode mode)
+get_content_name (unsigned char index, machine_mode mode)
{
static char buffer [128];
#endif
static void
-update_content (unsigned char index, unsigned char val, enum machine_mode mode)
+update_content (unsigned char index, unsigned char val, machine_mode mode)
{
unsigned int i;
static void
record_content (rtx loc, rtx value)
{
- enum machine_mode mode;
+ machine_mode mode;
unsigned char index;
unsigned char val;
static rtx
gen_and_emit_move (rtx to, rtx from, rtx where, bool before)
{
- enum machine_mode mode = GET_MODE (to);
+ machine_mode mode = GET_MODE (to);
if (optimize && before && already_contains (to, from))
{
move_to_acc (int opno, rtx before)
{
rtx src = OP (opno);
- enum machine_mode mode = GET_MODE (src);
+ machine_mode mode = GET_MODE (src);
if (REG_P (src) && REGNO (src) < 2)
return src;
static void
force_into_acc (rtx src, rtx before)
{
- enum machine_mode mode = GET_MODE (src);
+ machine_mode mode = GET_MODE (src);
rtx move;
if (REG_P (src) && REGNO (src) < 2)
move_from_acc (unsigned int opno, rtx after)
{
rtx dest = OP (opno);
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
if (REG_P (dest) && REGNO (dest) < 2)
return dest;
static rtx
move_acc_to_reg (rtx acc, int regno, rtx before)
{
- enum machine_mode mode = GET_MODE (acc);
+ machine_mode mode = GET_MODE (acc);
rtx reg;
reg = gen_rtx_REG (mode, regno);
move_to_x (int opno, rtx before)
{
rtx src = OP (opno);
- enum machine_mode mode = GET_MODE (src);
+ machine_mode mode = GET_MODE (src);
rtx reg;
if (mode == VOIDmode)
move_to_hl (int opno, rtx before)
{
rtx src = OP (opno);
- enum machine_mode mode = GET_MODE (src);
+ machine_mode mode = GET_MODE (src);
rtx reg;
if (mode == VOIDmode)
move_to_de (int opno, rtx before)
{
rtx src = OP (opno);
- enum machine_mode mode = GET_MODE (src);
+ machine_mode mode = GET_MODE (src);
rtx reg;
if (mode == VOIDmode)
#undef TARGET_UNWIND_WORD_MODE
#define TARGET_UNWIND_WORD_MODE rl78_unwind_word_mode
-static enum machine_mode
+static machine_mode
rl78_unwind_word_mode (void)
{
return HImode;
[(const_int 0)]
{
rtx dest = operands[0];
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
rtvec v;
int i, num_elements;
{
rtx dup = gen_easy_altivec_constant (operands[1]);
rtx const_vec;
- enum machine_mode op_mode = <MODE>mode;
+ machine_mode op_mode = <MODE>mode;
/* Divide the operand of the resulting VEC_DUPLICATE, and use
simplify_rtx to make a CONST_VECTOR. */
tree arg1_inner_type;
tree decl, stmt;
tree innerptrtype;
- enum machine_mode mode;
+ machine_mode mode;
/* No second argument. */
if (nargs != 2)
tree arg1_inner_type;
tree decl, stmt;
tree innerptrtype;
- enum machine_mode mode;
+ machine_mode mode;
/* No second or third arguments. */
if (nargs != 3)
#ifdef TREE_CODE
extern void init_cumulative_args (CUMULATIVE_ARGS *, tree, rtx, int, int, int,
- tree, enum machine_mode);
+ tree, machine_mode);
#endif /* TREE_CODE */
-extern bool easy_altivec_constant (rtx, enum machine_mode);
+extern bool easy_altivec_constant (rtx, machine_mode);
extern HOST_WIDE_INT const_vector_elt_as_int (rtx, unsigned int);
-extern bool macho_lo_sum_memory_operand (rtx, enum machine_mode);
-extern int num_insns_constant (rtx, enum machine_mode);
+extern bool macho_lo_sum_memory_operand (rtx, machine_mode);
+extern int num_insns_constant (rtx, machine_mode);
extern int num_insns_constant_wide (HOST_WIDE_INT);
-extern int small_data_operand (rtx, enum machine_mode);
-extern bool mem_operand_gpr (rtx, enum machine_mode);
+extern int small_data_operand (rtx, machine_mode);
+extern bool mem_operand_gpr (rtx, machine_mode);
extern bool toc_relative_expr_p (const_rtx, bool);
-extern bool invalid_e500_subreg (rtx, enum machine_mode);
-extern void validate_condition_mode (enum rtx_code, enum machine_mode);
-extern bool legitimate_constant_pool_address_p (const_rtx, enum machine_mode,
+extern bool invalid_e500_subreg (rtx, machine_mode);
+extern void validate_condition_mode (enum rtx_code, machine_mode);
+extern bool legitimate_constant_pool_address_p (const_rtx, machine_mode,
bool);
extern bool legitimate_indirect_address_p (rtx, int);
extern bool legitimate_indexed_address_p (rtx, int);
-extern bool avoiding_indexed_address_p (enum machine_mode);
+extern bool avoiding_indexed_address_p (machine_mode);
extern rtx rs6000_got_register (rtx);
extern rtx find_addr_reg (rtx);
extern bool altivec_expand_vec_perm_const (rtx op[4]);
extern void altivec_expand_vec_perm_le (rtx op[4]);
extern bool rs6000_expand_vec_perm_const (rtx op[4]);
-extern void altivec_expand_lvx_be (rtx, rtx, enum machine_mode, unsigned);
-extern void altivec_expand_stvx_be (rtx, rtx, enum machine_mode, unsigned);
-extern void altivec_expand_stvex_be (rtx, rtx, enum machine_mode, unsigned);
+extern void altivec_expand_lvx_be (rtx, rtx, machine_mode, unsigned);
+extern void altivec_expand_stvx_be (rtx, rtx, machine_mode, unsigned);
+extern void altivec_expand_stvex_be (rtx, rtx, machine_mode, unsigned);
extern void rs6000_expand_extract_even (rtx, rtx, rtx);
extern void rs6000_expand_interleave (rtx, rtx, rtx, bool);
extern void rs6000_scale_v2df (rtx, rtx, int);
extern enum reg_class (*rs6000_preferred_reload_class_ptr) (rtx,
enum reg_class);
extern enum reg_class (*rs6000_secondary_reload_class_ptr) (enum reg_class,
- enum machine_mode,
+ machine_mode,
rtx);
extern bool (*rs6000_secondary_memory_needed_ptr) (enum reg_class,
enum reg_class,
- enum machine_mode);
-extern bool (*rs6000_cannot_change_mode_class_ptr) (enum machine_mode,
- enum machine_mode,
+ machine_mode);
+extern bool (*rs6000_cannot_change_mode_class_ptr) (machine_mode,
+ machine_mode,
enum reg_class);
extern void rs6000_secondary_reload_inner (rtx, rtx, rtx, bool);
extern void rs6000_secondary_reload_gpr (rtx, rtx, rtx, bool);
extern void rs6000_output_function_entry (FILE *, const char *);
extern void print_operand (FILE *, rtx, int);
extern void print_operand_address (FILE *, rtx);
-extern enum rtx_code rs6000_reverse_condition (enum machine_mode,
+extern enum rtx_code rs6000_reverse_condition (machine_mode,
enum rtx_code);
-extern void rs6000_emit_sISEL (enum machine_mode, rtx[]);
-extern void rs6000_emit_sCOND (enum machine_mode, rtx[]);
-extern void rs6000_emit_cbranch (enum machine_mode, rtx[]);
+extern void rs6000_emit_sISEL (machine_mode, rtx[]);
+extern void rs6000_emit_sCOND (machine_mode, rtx[]);
+extern void rs6000_emit_cbranch (machine_mode, rtx[]);
extern char * output_cbranch (rtx, const char *, int, rtx_insn *);
extern char * output_e500_flip_gt_bit (rtx, rtx);
extern const char * output_probe_stack_range (rtx, rtx);
extern void rs6000_expand_atomic_op (enum rtx_code, rtx, rtx, rtx, rtx, rtx);
extern void rs6000_emit_swdiv (rtx, rtx, rtx, bool);
extern void rs6000_emit_swrsqrt (rtx, rtx);
-extern void output_toc (FILE *, rtx, int, enum machine_mode);
+extern void output_toc (FILE *, rtx, int, machine_mode);
extern rtx rs6000_longcall_ref (rtx);
extern void rs6000_fatal_bad_address (rtx);
extern rtx create_TOC_reference (rtx, rtx);
extern void rs6000_split_multireg_move (rtx, rtx);
-extern void rs6000_emit_le_vsx_move (rtx, rtx, enum machine_mode);
-extern void rs6000_emit_move (rtx, rtx, enum machine_mode);
-extern rtx rs6000_secondary_memory_needed_rtx (enum machine_mode);
-extern enum machine_mode rs6000_secondary_memory_needed_mode (enum
+extern void rs6000_emit_le_vsx_move (rtx, rtx, machine_mode);
+extern void rs6000_emit_move (rtx, rtx, machine_mode);
+extern rtx rs6000_secondary_memory_needed_rtx (machine_mode);
+extern machine_mode rs6000_secondary_memory_needed_mode (enum
machine_mode);
-extern rtx (*rs6000_legitimize_reload_address_ptr) (rtx, enum machine_mode,
+extern rtx (*rs6000_legitimize_reload_address_ptr) (rtx, machine_mode,
int, int, int, int *);
-extern bool rs6000_legitimate_offset_address_p (enum machine_mode, rtx,
+extern bool rs6000_legitimate_offset_address_p (machine_mode, rtx,
bool, bool);
extern rtx rs6000_find_base_term (rtx);
extern rtx rs6000_return_addr (int, rtx);
extern void rs6000_emit_popcount (rtx, rtx);
extern void rs6000_emit_parity (rtx, rtx);
-extern rtx rs6000_machopic_legitimize_pic_address (rtx, enum machine_mode,
+extern rtx rs6000_machopic_legitimize_pic_address (rtx, machine_mode,
rtx);
extern rtx rs6000_address_for_fpconvert (rtx);
extern rtx rs6000_address_for_altivec (rtx);
-extern rtx rs6000_allocate_stack_temp (enum machine_mode, bool, bool);
+extern rtx rs6000_allocate_stack_temp (machine_mode, bool, bool);
extern int rs6000_loop_align (rtx);
extern void rs6000_split_logical (rtx [], enum rtx_code, bool, bool, bool);
#endif /* RTX_CODE */
extern unsigned int darwin_rs6000_special_round_type_align (tree, unsigned int,
unsigned int);
extern tree altivec_resolve_overloaded_builtin (location_t, tree, void *);
-extern rtx rs6000_libcall_value (enum machine_mode);
+extern rtx rs6000_libcall_value (machine_mode);
extern rtx rs6000_va_arg (tree, tree);
extern int function_ok_for_sibcall (tree);
extern int rs6000_reg_parm_stack_space (tree, bool);
extern bool rs6000_elf_in_small_data_p (const_tree);
#ifdef ARGS_SIZE_RTX
/* expr.h defines ARGS_SIZE_RTX and `enum direction' */
-extern enum direction function_arg_padding (enum machine_mode, const_tree);
+extern enum direction function_arg_padding (machine_mode, const_tree);
#endif /* ARGS_SIZE_RTX */
#endif /* TREE_CODE */
/* Helper function to say whether a mode supports PRE_INC or PRE_DEC. */
static inline bool
-mode_supports_pre_incdec_p (enum machine_mode mode)
+mode_supports_pre_incdec_p (machine_mode mode)
{
return ((reg_addr[mode].addr_mask[RELOAD_REG_ANY] & RELOAD_REG_PRE_INCDEC)
!= 0);
/* Helper function to say whether a mode supports PRE_MODIFY. */
static inline bool
-mode_supports_pre_modify_p (enum machine_mode mode)
+mode_supports_pre_modify_p (machine_mode mode)
{
return ((reg_addr[mode].addr_mask[RELOAD_REG_ANY] & RELOAD_REG_PRE_MODIFY)
!= 0);
static tree (*rs6000_veclib_handler) (tree, tree, tree);
\f
-static bool rs6000_debug_legitimate_address_p (enum machine_mode, rtx, bool);
+static bool rs6000_debug_legitimate_address_p (machine_mode, rtx, bool);
static bool spe_func_has_64bit_regs_p (void);
static struct machine_function * rs6000_init_machine_status (void);
static int rs6000_ra_ever_killed (void);
static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
static tree rs6000_builtin_vectorized_libmass (tree, tree, tree);
static void rs6000_emit_set_long_const (rtx, HOST_WIDE_INT);
-static int rs6000_memory_move_cost (enum machine_mode, reg_class_t, bool);
+static int rs6000_memory_move_cost (machine_mode, reg_class_t, bool);
static bool rs6000_debug_rtx_costs (rtx, int, int, int, int *, bool);
-static int rs6000_debug_address_cost (rtx, enum machine_mode, addr_space_t,
+static int rs6000_debug_address_cost (rtx, machine_mode, addr_space_t,
bool);
static int rs6000_debug_adjust_cost (rtx_insn *, rtx, rtx_insn *, int);
static bool is_microcoded_insn (rtx_insn *);
static bool insn_must_be_first_in_group (rtx_insn *);
static bool insn_must_be_last_in_group (rtx_insn *);
static void altivec_init_builtins (void);
-static tree builtin_function_type (enum machine_mode, enum machine_mode,
- enum machine_mode, enum machine_mode,
+static tree builtin_function_type (machine_mode, machine_mode,
+ machine_mode, machine_mode,
enum rs6000_builtins, const char *name);
static void rs6000_common_init_builtins (void);
static void paired_init_builtins (void);
static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
static rs6000_stack_t *rs6000_stack_info (void);
static void is_altivec_return_reg (rtx, void *);
-int easy_vector_constant (rtx, enum machine_mode);
-static rtx rs6000_debug_legitimize_address (rtx, rtx, enum machine_mode);
+int easy_vector_constant (rtx, machine_mode);
+static rtx rs6000_debug_legitimize_address (rtx, rtx, machine_mode);
static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree,
bool, bool);
#if TARGET_MACHO
static void macho_branch_islands (void);
#endif
-static rtx rs6000_legitimize_reload_address (rtx, enum machine_mode, int, int,
+static rtx rs6000_legitimize_reload_address (rtx, machine_mode, int, int,
int, int *);
-static rtx rs6000_debug_legitimize_reload_address (rtx, enum machine_mode, int,
+static rtx rs6000_debug_legitimize_reload_address (rtx, machine_mode, int,
int, int, int *);
static bool rs6000_mode_dependent_address (const_rtx);
static bool rs6000_debug_mode_dependent_address (const_rtx);
static enum reg_class rs6000_secondary_reload_class (enum reg_class,
- enum machine_mode, rtx);
+ machine_mode, rtx);
static enum reg_class rs6000_debug_secondary_reload_class (enum reg_class,
- enum machine_mode,
+ machine_mode,
rtx);
static enum reg_class rs6000_preferred_reload_class (rtx, enum reg_class);
static enum reg_class rs6000_debug_preferred_reload_class (rtx,
enum reg_class);
static bool rs6000_secondary_memory_needed (enum reg_class, enum reg_class,
- enum machine_mode);
+ machine_mode);
static bool rs6000_debug_secondary_memory_needed (enum reg_class,
enum reg_class,
- enum machine_mode);
-static bool rs6000_cannot_change_mode_class (enum machine_mode,
- enum machine_mode,
+ machine_mode);
+static bool rs6000_cannot_change_mode_class (machine_mode,
+ machine_mode,
enum reg_class);
-static bool rs6000_debug_cannot_change_mode_class (enum machine_mode,
- enum machine_mode,
+static bool rs6000_debug_cannot_change_mode_class (machine_mode,
+ machine_mode,
enum reg_class);
static bool rs6000_save_toc_in_prologue_p (void);
-rtx (*rs6000_legitimize_reload_address_ptr) (rtx, enum machine_mode, int, int,
+rtx (*rs6000_legitimize_reload_address_ptr) (rtx, machine_mode, int, int,
int, int *)
= rs6000_legitimize_reload_address;
= rs6000_mode_dependent_address;
enum reg_class (*rs6000_secondary_reload_class_ptr) (enum reg_class,
- enum machine_mode, rtx)
+ machine_mode, rtx)
= rs6000_secondary_reload_class;
enum reg_class (*rs6000_preferred_reload_class_ptr) (rtx, enum reg_class)
= rs6000_preferred_reload_class;
bool (*rs6000_secondary_memory_needed_ptr) (enum reg_class, enum reg_class,
- enum machine_mode)
+ machine_mode)
= rs6000_secondary_memory_needed;
-bool (*rs6000_cannot_change_mode_class_ptr) (enum machine_mode,
- enum machine_mode,
+bool (*rs6000_cannot_change_mode_class_ptr) (machine_mode,
+ machine_mode,
enum reg_class)
= rs6000_cannot_change_mode_class;
static enum rs6000_reg_type register_to_reg_type (rtx, bool *);
static bool rs6000_secondary_reload_move (enum rs6000_reg_type,
enum rs6000_reg_type,
- enum machine_mode,
+ machine_mode,
secondary_reload_info *,
bool);
rtl_opt_pass *make_pass_analyze_swaps (gcc::context*);
/* `key' will satisfy CONSTANT_P; in fact, it will satisfy
ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
rtx key;
- enum machine_mode key_mode;
+ machine_mode key_mode;
int labelno;
};
struct GTY((for_user)) builtin_hash_struct
{
tree type;
- enum machine_mode mode[4]; /* return value + 3 arguments. */
+ machine_mode mode[4]; /* return value + 3 arguments. */
unsigned char uns_p[4]; /* and whether the types are unsigned. */
};
PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
static int
-rs6000_hard_regno_nregs_internal (int regno, enum machine_mode mode)
+rs6000_hard_regno_nregs_internal (int regno, machine_mode mode)
{
unsigned HOST_WIDE_INT reg_size;
/* Value is 1 if hard register REGNO can hold a value of machine-mode
MODE. */
static int
-rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
+rs6000_hard_regno_mode_ok (int regno, machine_mode mode)
{
int last_regno = regno + rs6000_hard_regno_nregs[mode][regno] - 1;
struct cl_target_option cl_opts;
/* Modes we want tieable information on. */
- static const enum machine_mode print_tieable_modes[] = {
+ static const machine_mode print_tieable_modes[] = {
QImode,
HImode,
SImode,
for (m1 = 0; m1 < ARRAY_SIZE (print_tieable_modes); m1++)
{
- enum machine_mode mode1 = print_tieable_modes[m1];
+ machine_mode mode1 = print_tieable_modes[m1];
bool first_time = true;
nl = (const char *)0;
for (m2 = 0; m2 < ARRAY_SIZE (print_tieable_modes); m2++)
{
- enum machine_mode mode2 = print_tieable_modes[m2];
+ machine_mode mode2 = print_tieable_modes[m2];
if (mode1 != mode2 && MODES_TIEABLE_P (mode1, mode2))
{
if (first_time)
for (m = 0; m < NUM_MACHINE_MODES; ++m)
{
- enum machine_mode m2 = (enum machine_mode)m;
+ machine_mode m2 = (machine_mode)m;
/* SDmode is special in that we want to access it only via REG+REG
addressing on power7 and above, since we want to use the LFIWZX and
for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
for (m = 0; m < NUM_MACHINE_MODES; ++m)
rs6000_hard_regno_nregs[m][r]
- = rs6000_hard_regno_nregs_internal (r, (enum machine_mode)m);
+ = rs6000_hard_regno_nregs_internal (r, (machine_mode)m);
/* Precalculate HARD_REGNO_MODE_OK. */
for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
for (m = 0; m < NUM_MACHINE_MODES; ++m)
- if (rs6000_hard_regno_mode_ok (r, (enum machine_mode)m))
+ if (rs6000_hard_regno_mode_ok (r, (machine_mode)m))
rs6000_hard_regno_mode_ok_p[m][r] = true;
/* Precalculate CLASS_MAX_NREGS sizes. */
for (m = 0; m < NUM_MACHINE_MODES; ++m)
{
- enum machine_mode m2 = (enum machine_mode)m;
+ machine_mode m2 = (machine_mode)m;
int reg_size2 = reg_size;
/* TFmode/TDmode always takes 2 registers, even in VSX. */
/* Return true if the vector misalignment factor is supported by the
target. */
static bool
-rs6000_builtin_support_vector_misalignment (enum machine_mode mode,
+rs6000_builtin_support_vector_misalignment (machine_mode mode,
const_tree type,
int misalignment,
bool is_packed)
/* Implement targetm.vectorize.preferred_simd_mode. */
-static enum machine_mode
-rs6000_preferred_simd_mode (enum machine_mode mode)
+static machine_mode
+rs6000_preferred_simd_mode (machine_mode mode)
{
if (TARGET_VSX)
switch (mode)
tree fntype, new_fndecl, bdecl = NULL_TREE;
int n_args = 1;
const char *bname;
- enum machine_mode el_mode, in_mode;
+ machine_mode el_mode, in_mode;
int n, in_n;
/* Libmass is suitable for unsafe math only as it does not correctly support
rs6000_builtin_vectorized_function (tree fndecl, tree type_out,
tree type_in)
{
- enum machine_mode in_mode, out_mode;
+ machine_mode in_mode, out_mode;
int in_n, out_n;
if (TARGET_DEBUG_BUILTIN)
}
int
-num_insns_constant (rtx op, enum machine_mode mode)
+num_insns_constant (rtx op, machine_mode mode)
{
HOST_WIDE_INT low, high;
static bool
vspltis_constant (rtx op, unsigned step, unsigned copies)
{
- enum machine_mode mode = GET_MODE (op);
- enum machine_mode inner = GET_MODE_INNER (mode);
+ machine_mode mode = GET_MODE (op);
+ machine_mode inner = GET_MODE_INNER (mode);
unsigned i;
unsigned nunits;
with a vspltisb, vspltish or vspltisw. */
bool
-easy_altivec_constant (rtx op, enum machine_mode mode)
+easy_altivec_constant (rtx op, machine_mode mode)
{
unsigned step, copies;
rtx
gen_easy_altivec_constant (rtx op)
{
- enum machine_mode mode = GET_MODE (op);
+ machine_mode mode = GET_MODE (op);
int nunits = GET_MODE_NUNITS (mode);
rtx val = CONST_VECTOR_ELT (op, BYTES_BIG_ENDIAN ? nunits - 1 : 0);
unsigned step = nunits / 4;
output_vec_const_move (rtx *operands)
{
int cst, cst2;
- enum machine_mode mode;
+ machine_mode mode;
rtx dest, vec;
dest = operands[0];
void
paired_expand_vector_init (rtx target, rtx vals)
{
- enum machine_mode mode = GET_MODE (target);
+ machine_mode mode = GET_MODE (target);
int n_elts = GET_MODE_NUNITS (mode);
int n_var = 0;
rtx x, new_rtx, tmp, constant_op, op1, op2;
void
rs6000_expand_vector_init (rtx target, rtx vals)
{
- enum machine_mode mode = GET_MODE (target);
- enum machine_mode inner_mode = GET_MODE_INNER (mode);
+ machine_mode mode = GET_MODE (target);
+ machine_mode inner_mode = GET_MODE_INNER (mode);
int n_elts = GET_MODE_NUNITS (mode);
int n_var = 0, one_var = -1;
bool all_same = true, all_const_zero = true;
void
rs6000_expand_vector_set (rtx target, rtx val, int elt)
{
- enum machine_mode mode = GET_MODE (target);
- enum machine_mode inner_mode = GET_MODE_INNER (mode);
+ machine_mode mode = GET_MODE (target);
+ machine_mode inner_mode = GET_MODE_INNER (mode);
rtx reg = gen_reg_rtx (mode);
rtx mask, mem, x;
int width = GET_MODE_SIZE (inner_mode);
void
rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
{
- enum machine_mode mode = GET_MODE (vec);
- enum machine_mode inner_mode = GET_MODE_INNER (mode);
+ machine_mode mode = GET_MODE (vec);
+ machine_mode inner_mode = GET_MODE_INNER (mode);
rtx mem;
if (VECTOR_MEM_VSX_P (mode))
/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
bool
-invalid_e500_subreg (rtx op, enum machine_mode mode)
+invalid_e500_subreg (rtx op, machine_mode mode)
{
if (TARGET_E500_DOUBLE)
{
int
small_data_operand (rtx op ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
#if TARGET_ELF
rtx sym_ref;
of the address calculation. */
bool
-mem_operand_gpr (rtx op, enum machine_mode mode)
+mem_operand_gpr (rtx op, machine_mode mode)
{
unsigned HOST_WIDE_INT offset;
int extra;
/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address_p. */
static bool
-reg_offset_addressing_ok_p (enum machine_mode mode)
+reg_offset_addressing_ok_p (machine_mode mode)
{
switch (mode)
{
static bool
offsettable_ok_by_alignment (rtx op, HOST_WIDE_INT offset,
- enum machine_mode mode)
+ machine_mode mode)
{
tree decl, type;
unsigned HOST_WIDE_INT dsize, dalign, lsb, mask;
else if (CONSTANT_POOL_ADDRESS_P (op))
{
/* It would be nice to have get_pool_align().. */
- enum machine_mode cmode = get_pool_mode (op);
+ machine_mode cmode = get_pool_mode (op);
dalign = GET_MODE_ALIGNMENT (cmode);
}
if X is a toc-relative address known to be offsettable within MODE. */
bool
-legitimate_constant_pool_address_p (const_rtx x, enum machine_mode mode,
+legitimate_constant_pool_address_p (const_rtx x, machine_mode mode,
bool strict)
{
return (toc_relative_expr_p (x, strict)
}
static bool
-legitimate_small_data_p (enum machine_mode mode, rtx x)
+legitimate_small_data_p (machine_mode mode, rtx x)
{
return (DEFAULT_ABI == ABI_V4
&& !flag_pic && !TARGET_TOC
#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
bool
-rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x,
+rs6000_legitimate_offset_address_p (machine_mode mode, rtx x,
bool strict, bool worst_case)
{
unsigned HOST_WIDE_INT offset;
}
bool
-avoiding_indexed_address_p (enum machine_mode mode)
+avoiding_indexed_address_p (machine_mode mode)
{
/* Avoid indexed addressing for modes that have non-indexed
load/store instruction forms. */
}
bool
-macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
+macho_lo_sum_memory_operand (rtx x, machine_mode mode)
{
if (!TARGET_MACHO || !flag_pic
|| mode != SImode || GET_CODE (x) != MEM)
}
static bool
-legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
+legitimate_lo_sum_address_p (machine_mode mode, rtx x, int strict)
{
if (GET_CODE (x) != LO_SUM)
return false;
static rtx
rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
unsigned int extra;
/* Debug version of rs6000_legitimize_address. */
static rtx
-rs6000_debug_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
+rs6000_debug_legitimize_address (rtx x, rtx oldx, machine_mode mode)
{
rtx ret;
rtx_insn *insns;
&& CONSTANT_POOL_ADDRESS_P (x))
{
rtx c = get_pool_constant (x);
- enum machine_mode cmode = get_pool_mode (x);
+ machine_mode cmode = get_pool_mode (x);
if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (c, cmode))
return true;
}
/* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
static bool
-rs6000_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+rs6000_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
if (GET_CODE (x) == HIGH
&& GET_CODE (XEXP (x, 0)) == UNSPEC)
The Darwin code is inside #if TARGET_MACHO because only then are the
machopic_* functions defined. */
static rtx
-rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
+rs6000_legitimize_reload_address (rtx x, machine_mode mode,
int opnum, int type,
int ind_levels ATTRIBUTE_UNUSED, int *win)
{
/* Debug version of rs6000_legitimize_reload_address. */
static rtx
-rs6000_debug_legitimize_reload_address (rtx x, enum machine_mode mode,
+rs6000_debug_legitimize_reload_address (rtx x, machine_mode mode,
int opnum, int type,
int ind_levels, int *win)
{
because adjacent memory cells are accessed by adding word-sized offsets
during assembly output. */
static bool
-rs6000_legitimate_address_p (enum machine_mode mode, rtx x, bool reg_ok_strict)
+rs6000_legitimate_address_p (machine_mode mode, rtx x, bool reg_ok_strict)
{
bool reg_offset_p = reg_offset_addressing_ok_p (mode);
/* Debug version of rs6000_legitimate_address_p. */
static bool
-rs6000_debug_legitimate_address_p (enum machine_mode mode, rtx x,
+rs6000_debug_legitimate_address_p (machine_mode mode, rtx x,
bool reg_ok_strict)
{
bool ret = rs6000_legitimate_address_p (mode, x, reg_ok_strict);
in 32-bit mode, that the recog predicate rejects. */
static bool
-rs6000_offsettable_memref_p (rtx op, enum machine_mode reg_mode)
+rs6000_offsettable_memref_p (rtx op, machine_mode reg_mode)
{
bool worst_case;
bool
rs6000_emit_set_const (rtx dest, rtx source)
{
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
rtx temp, set;
rtx_insn *insn;
HOST_WIDE_INT c;
/* Generate a vector of constants to permute MODE for a little-endian
storage operation by swapping the two halves of a vector. */
static rtvec
-rs6000_const_vec (enum machine_mode mode)
+rs6000_const_vec (machine_mode mode)
{
int i, subparts;
rtvec v;
/* Generate a permute rtx that represents an lxvd2x, stxvd2x, or xxpermdi
for a VSX load or store operation. */
rtx
-rs6000_gen_le_vsx_permute (rtx source, enum machine_mode mode)
+rs6000_gen_le_vsx_permute (rtx source, machine_mode mode)
{
rtx par = gen_rtx_PARALLEL (VOIDmode, rs6000_const_vec (mode));
return gen_rtx_VEC_SELECT (mode, source, par);
register DEST in mode MODE. The load is done with two permuting
insn's that represent an lxvd2x and xxpermdi. */
void
-rs6000_emit_le_vsx_load (rtx dest, rtx source, enum machine_mode mode)
+rs6000_emit_le_vsx_load (rtx dest, rtx source, machine_mode mode)
{
rtx tmp, permute_mem, permute_reg;
register SOURCE in mode MODE. The store is done with two permuting
insn's that represent an xxpermdi and an stxvd2x. */
void
-rs6000_emit_le_vsx_store (rtx dest, rtx source, enum machine_mode mode)
+rs6000_emit_le_vsx_store (rtx dest, rtx source, machine_mode mode)
{
rtx tmp, permute_src, permute_tmp;
handled with a split. The expand-time RTL generation allows
us to optimize away redundant pairs of register-permutes. */
void
-rs6000_emit_le_vsx_move (rtx dest, rtx source, enum machine_mode mode)
+rs6000_emit_le_vsx_move (rtx dest, rtx source, machine_mode mode)
{
gcc_assert (!BYTES_BIG_ENDIAN
&& VECTOR_MEM_VSX_P (mode)
/* Emit a move from SOURCE to DEST in mode MODE. */
void
-rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
+rs6000_emit_move (rtx dest, rtx source, machine_mode mode)
{
rtx operands[2];
operands[0] = dest;
fit into 1, whereas DI still needs two. */
static bool
-rs6000_member_type_forces_blk (const_tree field, enum machine_mode mode)
+rs6000_member_type_forces_blk (const_tree field, machine_mode mode)
{
return ((TARGET_SPE && TREE_CODE (TREE_TYPE (field)) == VECTOR_TYPE)
|| (TARGET_E500_DOUBLE && mode == DFmode));
sub-tree. */
static int
-rs6000_aggregate_candidate (const_tree type, enum machine_mode *modep)
+rs6000_aggregate_candidate (const_tree type, machine_mode *modep)
{
- enum machine_mode mode;
+ machine_mode mode;
HOST_WIDE_INT size;
switch (TREE_CODE (type))
Otherwise, set *ELT_MODE to MODE and *N_ELTS to 1, and return FALSE. */
static bool
-rs6000_discover_homogeneous_aggregate (enum machine_mode mode, const_tree type,
- enum machine_mode *elt_mode,
+rs6000_discover_homogeneous_aggregate (machine_mode mode, const_tree type,
+ machine_mode *elt_mode,
int *n_elts)
{
/* Note that we do not accept complex types at the top level as
can be elements of homogeneous aggregates, however. */
if (DEFAULT_ABI == ABI_ELFv2 && type && AGGREGATE_TYPE_P (type))
{
- enum machine_mode field_mode = VOIDmode;
+ machine_mode field_mode = VOIDmode;
int field_count = rs6000_aggregate_candidate (type, &field_mode);
if (field_count > 0)
rtx libname ATTRIBUTE_UNUSED, int incoming,
int libcall, int n_named_args,
tree fndecl ATTRIBUTE_UNUSED,
- enum machine_mode return_mode ATTRIBUTE_UNUSED)
+ machine_mode return_mode ATTRIBUTE_UNUSED)
{
static CUMULATIVE_ARGS zero_cumulative;
/* Return true if TYPE must be passed on the stack and not in registers. */
static bool
-rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
+rs6000_must_pass_in_stack (machine_mode mode, const_tree type)
{
if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2 || TARGET_64BIT)
return must_pass_in_stack_var_size (mode, type);
argument slot. */
enum direction
-function_arg_padding (enum machine_mode mode, const_tree type)
+function_arg_padding (machine_mode mode, const_tree type)
{
#ifndef AGGREGATE_PADDING_FIXED
#define AGGREGATE_PADDING_FIXED 0
Quadword align large synthetic vector types. */
static unsigned int
-rs6000_function_arg_boundary (enum machine_mode mode, const_tree type)
+rs6000_function_arg_boundary (machine_mode mode, const_tree type)
{
- enum machine_mode elt_mode;
+ machine_mode elt_mode;
int n_elts;
rs6000_discover_homogeneous_aggregate (mode, type, &elt_mode, &n_elts);
the parameter area. NWORDS of the parameter area are already used. */
static unsigned int
-rs6000_parm_start (enum machine_mode mode, const_tree type,
+rs6000_parm_start (machine_mode mode, const_tree type,
unsigned int nwords)
{
unsigned int align;
/* Compute the size (in words) of a function argument. */
static unsigned long
-rs6000_arg_size (enum machine_mode mode, const_tree type)
+rs6000_arg_size (machine_mode mode, const_tree type)
{
unsigned long size;
{
unsigned int startbit, endbit;
int intregs, intoffset;
- enum machine_mode mode;
+ machine_mode mode;
/* Handle the situations where a float is taking up the first half
of the GPR, and the other half is empty (typically due to
{
HOST_WIDE_INT bitpos = startbitpos;
tree ftype = TREE_TYPE (f);
- enum machine_mode mode;
+ machine_mode mode;
if (ftype == error_mark_node)
continue;
mode = TYPE_MODE (ftype);
bit ABI. These are record types where the mode is BLK or the structure is
8 bytes in size. */
static int
-rs6000_darwin64_struct_check_p (enum machine_mode mode, const_tree type)
+rs6000_darwin64_struct_check_p (machine_mode mode, const_tree type)
{
return rs6000_darwin64_abi
&& ((mode == BLKmode
itself. */
static void
-rs6000_function_arg_advance_1 (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+rs6000_function_arg_advance_1 (CUMULATIVE_ARGS *cum, machine_mode mode,
const_tree type, bool named, int depth)
{
- enum machine_mode elt_mode;
+ machine_mode elt_mode;
int n_elts;
rs6000_discover_homogeneous_aggregate (mode, type, &elt_mode, &n_elts);
}
static void
-rs6000_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
+rs6000_function_arg_advance (cumulative_args_t cum, machine_mode mode,
const_tree type, bool named)
{
rs6000_function_arg_advance_1 (get_cumulative_args (cum), mode, type, named,
}
static rtx
-spe_build_register_parallel (enum machine_mode mode, int gregno)
+spe_build_register_parallel (machine_mode mode, int gregno)
{
rtx r1, r3, r5, r7;
/* Determine where to put a SIMD argument on the SPE. */
static rtx
-rs6000_spe_function_arg (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
+rs6000_spe_function_arg (const CUMULATIVE_ARGS *cum, machine_mode mode,
const_tree type)
{
int gregno = cum->sysv_gregno;
if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
{
rtx r1, r2;
- enum machine_mode m = SImode;
+ machine_mode m = SImode;
r1 = gen_rtx_REG (m, gregno);
r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
HOST_WIDE_INT bitpos, rtx rvec[], int *k)
{
- enum machine_mode mode;
+ machine_mode mode;
unsigned int regno;
unsigned int startbit, endbit;
int this_regno, intregs, intoffset;
{
HOST_WIDE_INT bitpos = startbitpos;
tree ftype = TREE_TYPE (f);
- enum machine_mode mode;
+ machine_mode mode;
if (ftype == error_mark_node)
continue;
mode = TYPE_MODE (ftype);
/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
static rtx
-rs6000_mixed_function_arg (enum machine_mode mode, const_tree type,
+rs6000_mixed_function_arg (machine_mode mode, const_tree type,
int align_words)
{
int n_units;
to the GPRs and/or memory. Return the number of elements used. */
static int
-rs6000_psave_function_arg (enum machine_mode mode, const_tree type,
+rs6000_psave_function_arg (machine_mode mode, const_tree type,
int align_words, rtx *rvec)
{
int k = 0;
{
/* If this is partially on the stack, then we only
include the portion actually in registers here. */
- enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
+ machine_mode rmode = TARGET_32BIT ? SImode : DImode;
int i = 0;
if (align_words + n_words > GP_ARG_NUM_REG)
Construct the final function_arg return value from it. */
static rtx
-rs6000_finish_function_arg (enum machine_mode mode, rtx *rvec, int k)
+rs6000_finish_function_arg (machine_mode mode, rtx *rvec, int k)
{
gcc_assert (k >= 1);
itself. */
static rtx
-rs6000_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+rs6000_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
enum rs6000_abi abi = DEFAULT_ABI;
- enum machine_mode elt_mode;
+ machine_mode elt_mode;
int n_elts;
/* Return a marker to indicate whether CR1 needs to set or clear the
/* Vector parameters to varargs functions under AIX or Darwin
get passed in memory and possibly also in GPRs. */
int align, align_words, n_words;
- enum machine_mode part_mode;
+ machine_mode part_mode;
/* Vector parameters must be 16-byte aligned. In 32-bit
mode this means we need to take into account the offset
/* Check if the argument is split over registers and memory.
This can only ever happen for long double or _Decimal128;
complex types are handled via split_complex_arg. */
- enum machine_mode fmode = elt_mode;
+ machine_mode fmode = elt_mode;
if (cum->fregno + (i + 1) * n_fpreg > FP_ARG_MAX_REG + 1)
{
gcc_assert (fmode == TFmode || fmode == TDmode);
{
static bool warned;
- enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
+ machine_mode rmode = TARGET_32BIT ? SImode : DImode;
int n_words = rs6000_arg_size (mode, type);
align_words += fpr_words;
returns the number of bytes used by the first element of the PARALLEL. */
static int
-rs6000_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
+rs6000_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
bool passed_in_gprs = true;
int ret = 0;
int align_words;
- enum machine_mode elt_mode;
+ machine_mode elt_mode;
int n_elts;
rs6000_discover_homogeneous_aggregate (mode, type, &elt_mode, &n_elts);
static bool
rs6000_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
- enum machine_mode mode, const_tree type,
+ machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
static bool
rs6000_parm_needs_stack (cumulative_args_t args_so_far, tree type)
{
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp;
rtx entry_parm;
rs6000_move_block_from_reg (int regno, rtx x, int nregs)
{
int i;
- enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
+ machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
if (nregs == 0)
return;
stack and set PRETEND_SIZE to the length of the registers pushed. */
static void
-setup_incoming_varargs (cumulative_args_t cum, enum machine_mode mode,
+setup_incoming_varargs (cumulative_args_t cum, machine_mode mode,
tree type, int *pretend_size ATTRIBUTE_UNUSED,
int no_rtl)
{
if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
{
tree elem_type = TREE_TYPE (type);
- enum machine_mode elem_mode = TYPE_MODE (elem_type);
+ machine_mode elem_mode = TYPE_MODE (elem_type);
int elem_size = GET_MODE_SIZE (elem_mode);
if (elem_size < UNITS_PER_WORD)
rs6000_expand_zeroop_builtin (enum insn_code icode, rtx target)
{
rtx pat;
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
if (icode == CODE_FOR_nothing)
/* Builtin not supported on this processor. */
tree arg1 = CALL_EXPR_ARG (exp, 1);
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
- enum machine_mode mode0 = insn_data[icode].operand[0].mode;
- enum machine_mode mode1 = insn_data[icode].operand[1].mode;
+ machine_mode mode0 = insn_data[icode].operand[0].mode;
+ machine_mode mode1 = insn_data[icode].operand[1].mode;
if (icode == CODE_FOR_nothing)
/* Builtin not supported on this processor. */
rtx pat;
tree arg0 = CALL_EXPR_ARG (exp, 0);
rtx op0 = expand_normal (arg0);
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode0 = insn_data[icode].operand[1].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode0 = insn_data[icode].operand[1].mode;
if (icode == CODE_FOR_nothing)
/* Builtin not supported on this processor. */
rtx pat, scratch1, scratch2;
tree arg0 = CALL_EXPR_ARG (exp, 0);
rtx op0 = expand_normal (arg0);
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode0 = insn_data[icode].operand[1].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode0 = insn_data[icode].operand[1].mode;
/* If we have invalid arguments, bail out before generating bad rtl. */
if (arg0 == error_mark_node)
tree arg1 = CALL_EXPR_ARG (exp, 1);
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode0 = insn_data[icode].operand[1].mode;
- enum machine_mode mode1 = insn_data[icode].operand[2].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode0 = insn_data[icode].operand[1].mode;
+ machine_mode mode1 = insn_data[icode].operand[2].mode;
if (icode == CODE_FOR_nothing)
/* Builtin not supported on this processor. */
tree arg1 = CALL_EXPR_ARG (exp, 2);
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
- enum machine_mode tmode = SImode;
- enum machine_mode mode0 = insn_data[icode].operand[1].mode;
- enum machine_mode mode1 = insn_data[icode].operand[2].mode;
+ machine_mode tmode = SImode;
+ machine_mode mode0 = insn_data[icode].operand[1].mode;
+ machine_mode mode1 = insn_data[icode].operand[2].mode;
int cr6_form_int;
if (TREE_CODE (cr6_form) != INTEGER_CST)
rtx pat, addr;
tree arg0 = CALL_EXPR_ARG (exp, 0);
tree arg1 = CALL_EXPR_ARG (exp, 1);
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode0 = Pmode;
- enum machine_mode mode1 = Pmode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode0 = Pmode;
+ machine_mode mode1 = Pmode;
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
/* Return a constant vector for use as a little-endian permute control vector
to reverse the order of elements of the given vector mode. */
static rtx
-swap_selector_for_mode (enum machine_mode mode)
+swap_selector_for_mode (machine_mode mode)
{
/* These are little endian vectors, so their elements are reversed
from what you would normally expect for a permute control vector. */
with -maltivec=be specified. Issue the load followed by an element-reversing
permute. */
void
-altivec_expand_lvx_be (rtx op0, rtx op1, enum machine_mode mode, unsigned unspec)
+altivec_expand_lvx_be (rtx op0, rtx op1, machine_mode mode, unsigned unspec)
{
rtx tmp = gen_reg_rtx (mode);
rtx load = gen_rtx_SET (VOIDmode, tmp, op1);
with -maltivec=be specified. Issue the store preceded by an element-reversing
permute. */
void
-altivec_expand_stvx_be (rtx op0, rtx op1, enum machine_mode mode, unsigned unspec)
+altivec_expand_stvx_be (rtx op0, rtx op1, machine_mode mode, unsigned unspec)
{
rtx tmp = gen_reg_rtx (mode);
rtx store = gen_rtx_SET (VOIDmode, op0, tmp);
/* Generate code for a "stve*x" built-in for a little endian target with -maltivec=be
specified. Issue the store preceded by an element-reversing permute. */
void
-altivec_expand_stvex_be (rtx op0, rtx op1, enum machine_mode mode, unsigned unspec)
+altivec_expand_stvex_be (rtx op0, rtx op1, machine_mode mode, unsigned unspec)
{
- enum machine_mode inner_mode = GET_MODE_INNER (mode);
+ machine_mode inner_mode = GET_MODE_INNER (mode);
rtx tmp = gen_reg_rtx (mode);
rtx stvx = gen_rtx_UNSPEC (inner_mode, gen_rtvec (1, tmp), unspec);
rtx sel = swap_selector_for_mode (mode);
rtx pat, addr;
tree arg0 = CALL_EXPR_ARG (exp, 0);
tree arg1 = CALL_EXPR_ARG (exp, 1);
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode0 = Pmode;
- enum machine_mode mode1 = Pmode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode0 = Pmode;
+ machine_mode mode1 = Pmode;
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
rtx op1 = expand_normal (arg1);
rtx op2 = expand_normal (arg2);
rtx pat;
- enum machine_mode mode0 = insn_data[icode].operand[0].mode;
- enum machine_mode mode1 = insn_data[icode].operand[1].mode;
- enum machine_mode mode2 = insn_data[icode].operand[2].mode;
+ machine_mode mode0 = insn_data[icode].operand[0].mode;
+ machine_mode mode1 = insn_data[icode].operand[1].mode;
+ machine_mode mode2 = insn_data[icode].operand[2].mode;
/* Invalid arguments. Bail before doing anything stoopid! */
if (arg0 == error_mark_node
rtx op1 = expand_normal (arg1);
rtx op2 = expand_normal (arg2);
rtx pat, addr;
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode1 = Pmode;
- enum machine_mode mode2 = Pmode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode1 = Pmode;
+ machine_mode mode2 = Pmode;
/* Invalid arguments. Bail before doing anything stoopid! */
if (arg0 == error_mark_node
rtx op1 = expand_normal (arg1);
rtx op2 = expand_normal (arg2);
rtx pat, addr;
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode smode = insn_data[icode].operand[1].mode;
- enum machine_mode mode1 = Pmode;
- enum machine_mode mode2 = Pmode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode smode = insn_data[icode].operand[1].mode;
+ machine_mode mode1 = Pmode;
+ machine_mode mode2 = Pmode;
/* Invalid arguments. Bail before doing anything stoopid! */
if (arg0 == error_mark_node
if (nonvoid)
{
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
if (!target
|| GET_MODE (target) != tmode
|| !(*insn_data[icode].operand[0].predicate) (target, tmode))
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
rtx op2 = expand_normal (arg2);
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode0 = insn_data[icode].operand[1].mode;
- enum machine_mode mode1 = insn_data[icode].operand[2].mode;
- enum machine_mode mode2 = insn_data[icode].operand[3].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode0 = insn_data[icode].operand[1].mode;
+ machine_mode mode1 = insn_data[icode].operand[2].mode;
+ machine_mode mode2 = insn_data[icode].operand[3].mode;
if (icode == CODE_FOR_nothing)
/* Builtin not supported on this processor. */
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
tree arg0;
- enum machine_mode tmode, mode0;
+ machine_mode tmode, mode0;
rtx pat, op0;
enum insn_code icode;
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
tree arg0, arg1;
- enum machine_mode mode0, mode1;
+ machine_mode mode0, mode1;
rtx pat, op0, op1;
enum insn_code icode;
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
enum rs6000_builtins fcode = (enum rs6000_builtins) DECL_FUNCTION_CODE (fndecl);
tree arg0, arg1, arg2;
- enum machine_mode mode0, mode1;
+ machine_mode mode0, mode1;
rtx pat, op0, op1, op2;
const struct builtin_description *d;
size_t i;
static rtx
altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
{
- enum machine_mode tmode = TYPE_MODE (type);
- enum machine_mode inner_mode = GET_MODE_INNER (tmode);
+ machine_mode tmode = TYPE_MODE (type);
+ machine_mode inner_mode = GET_MODE_INNER (tmode);
int i, n_elt = GET_MODE_NUNITS (tmode);
gcc_assert (VECTOR_MODE_P (tmode));
static rtx
altivec_expand_vec_set_builtin (tree exp)
{
- enum machine_mode tmode, mode1;
+ machine_mode tmode, mode1;
tree arg0, arg1, arg2;
int elt;
rtx op0, op1;
static rtx
altivec_expand_vec_ext_builtin (tree exp, rtx target)
{
- enum machine_mode tmode, mode0;
+ machine_mode tmode, mode0;
tree arg0, arg1;
int elt;
rtx op0;
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
tree arg0;
rtx op0, pat;
- enum machine_mode tmode, mode0;
+ machine_mode tmode, mode0;
enum rs6000_builtins fcode
= (enum rs6000_builtins) DECL_FUNCTION_CODE (fndecl);
tree arg1, arg0;
enum rs6000_builtins fcode = (enum rs6000_builtins) DECL_FUNCTION_CODE (fndecl);
enum insn_code icode;
- enum machine_mode tmode, mode0;
+ machine_mode tmode, mode0;
rtx pat, op0;
const struct builtin_description *d;
size_t i;
tree arg1 = CALL_EXPR_ARG (exp, 2);
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
- enum machine_mode mode0 = insn_data[icode].operand[1].mode;
- enum machine_mode mode1 = insn_data[icode].operand[2].mode;
+ machine_mode mode0 = insn_data[icode].operand[1].mode;
+ machine_mode mode1 = insn_data[icode].operand[2].mode;
int form_int;
enum rtx_code code;
tree arg1 = CALL_EXPR_ARG (exp, 2);
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
- enum machine_mode mode0 = insn_data[icode].operand[1].mode;
- enum machine_mode mode1 = insn_data[icode].operand[2].mode;
+ machine_mode mode0 = insn_data[icode].operand[1].mode;
+ machine_mode mode1 = insn_data[icode].operand[2].mode;
int form_int;
enum rtx_code code;
rtx op1 = expand_normal (arg1);
rtx op2 = expand_normal (arg2);
rtx op3 = expand_normal (arg3);
- enum machine_mode mode0 = insn_data[icode].operand[1].mode;
- enum machine_mode mode1 = insn_data[icode].operand[2].mode;
+ machine_mode mode0 = insn_data[icode].operand[1].mode;
+ machine_mode mode1 = insn_data[icode].operand[2].mode;
gcc_assert (mode0 == mode1);
static rtx
rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
{
int icode = (BYTES_BIG_ENDIAN ? (int) CODE_FOR_altivec_lvsr_direct
: (int) CODE_FOR_altivec_lvsl_direct);
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
- enum machine_mode mode = insn_data[icode].operand[1].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode mode = insn_data[icode].operand[1].mode;
tree arg;
rtx op, addr, pat;
{
tree tdecl;
tree ftype;
- enum machine_mode mode;
+ machine_mode mode;
if (TARGET_DEBUG_BUILTIN)
fprintf (stderr, "rs6000_init_builtins%s%s%s%s\n",
d = bdesc_altivec_preds;
for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, d++)
{
- enum machine_mode mode1;
+ machine_mode mode1;
tree type;
if (rs6000_overloaded_builtin_p (d->code))
d = bdesc_abs;
for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
{
- enum machine_mode mode0;
+ machine_mode mode0;
tree type;
mode0 = insn_data[d->icode].operand[0].mode;
arguments. Functions with fewer than 3 arguments use VOIDmode as the type
of the argument. */
static tree
-builtin_function_type (enum machine_mode mode_ret, enum machine_mode mode_arg0,
- enum machine_mode mode_arg1, enum machine_mode mode_arg2,
+builtin_function_type (machine_mode mode_ret, machine_mode mode_arg0,
+ machine_mode mode_arg1, machine_mode mode_arg2,
enum rs6000_builtins builtin, const char *name)
{
struct builtin_hash_struct h;
d = bdesc_2arg;
for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
{
- enum machine_mode mode0, mode1, mode2;
+ machine_mode mode0, mode1, mode2;
tree type;
HOST_WIDE_INT mask = d->mask;
d = bdesc_1arg;
for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
{
- enum machine_mode mode0, mode1;
+ machine_mode mode0, mode1;
tree type;
HOST_WIDE_INT mask = d->mask;
for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
{
- enum machine_mode mode = BLKmode;
+ machine_mode mode = BLKmode;
rtx dest;
if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
rtx (*movmemsi) (rtx, rtx, rtx, rtx);
rtx (*mov) (rtx, rtx);
} gen_func;
- enum machine_mode mode = BLKmode;
+ machine_mode mode = BLKmode;
rtx src, dest;
/* Altivec first, since it will be faster than a string move
never be generated. */
void
-validate_condition_mode (enum rtx_code code, enum machine_mode mode)
+validate_condition_mode (enum rtx_code code, machine_mode mode)
{
gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
|| GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
\f
rtx
-rs6000_secondary_memory_needed_rtx (enum machine_mode mode)
+rs6000_secondary_memory_needed_rtx (machine_mode mode)
{
static bool eliminated = false;
rtx ret;
/* Return the mode to be used for memory when a secondary memory
location is needed. For SDmode values we need to use DDmode, in
all other cases we can use the same mode. */
-enum machine_mode
-rs6000_secondary_memory_needed_mode (enum machine_mode mode)
+machine_mode
+rs6000_secondary_memory_needed_mode (machine_mode mode)
{
if (lra_in_progress && mode == SDmode)
return DDmode;
static bool
rs6000_secondary_reload_simple_move (enum rs6000_reg_type to_type,
enum rs6000_reg_type from_type,
- enum machine_mode mode)
+ machine_mode mode)
{
int size;
static bool
rs6000_secondary_reload_direct_move (enum rs6000_reg_type to_type,
enum rs6000_reg_type from_type,
- enum machine_mode mode,
+ machine_mode mode,
secondary_reload_info *sri,
bool altivec_p)
{
static bool
rs6000_secondary_reload_move (enum rs6000_reg_type to_type,
enum rs6000_reg_type from_type,
- enum machine_mode mode,
+ machine_mode mode,
secondary_reload_info *sri,
bool altivec_p)
{
rs6000_secondary_reload (bool in_p,
rtx x,
reg_class_t rclass_i,
- enum machine_mode mode,
+ machine_mode mode,
secondary_reload_info *sri)
{
enum reg_class rclass = (enum reg_class) rclass_i;
rs6000_secondary_reload_inner (rtx reg, rtx mem, rtx scratch, bool store_p)
{
int regno = true_regnum (reg);
- enum machine_mode mode = GET_MODE (reg);
+ machine_mode mode = GET_MODE (reg);
enum reg_class rclass;
rtx addr;
rtx and_op2 = NULL_RTX;
static enum reg_class
rs6000_preferred_reload_class (rtx x, enum reg_class rclass)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
if (TARGET_VSX && x == CONST0_RTX (mode) && VSX_REG_CLASS_P (rclass))
return rclass;
static bool
rs6000_secondary_memory_needed (enum reg_class from_class,
enum reg_class to_class,
- enum machine_mode mode)
+ machine_mode mode)
{
enum rs6000_reg_type from_type, to_type;
bool altivec_p = ((from_class == ALTIVEC_REGS)
static bool
rs6000_debug_secondary_memory_needed (enum reg_class from_class,
enum reg_class to_class,
- enum machine_mode mode)
+ machine_mode mode)
{
bool ret = rs6000_secondary_memory_needed (from_class, to_class, mode);
NO_REGS is returned. */
static enum reg_class
-rs6000_secondary_reload_class (enum reg_class rclass, enum machine_mode mode,
+rs6000_secondary_reload_class (enum reg_class rclass, machine_mode mode,
rtx in)
{
int regno;
/* Debug version of rs6000_secondary_reload_class. */
static enum reg_class
rs6000_debug_secondary_reload_class (enum reg_class rclass,
- enum machine_mode mode, rtx in)
+ machine_mode mode, rtx in)
{
enum reg_class ret = rs6000_secondary_reload_class (rclass, mode, in);
fprintf (stderr,
/* Return nonzero if for CLASS a mode change from FROM to TO is invalid. */
static bool
-rs6000_cannot_change_mode_class (enum machine_mode from,
- enum machine_mode to,
+rs6000_cannot_change_mode_class (machine_mode from,
+ machine_mode to,
enum reg_class rclass)
{
unsigned from_size = GET_MODE_SIZE (from);
/* Debug version of rs6000_cannot_change_mode_class. */
static bool
-rs6000_debug_cannot_change_mode_class (enum machine_mode from,
- enum machine_mode to,
+rs6000_debug_cannot_change_mode_class (machine_mode from,
+ machine_mode to,
enum reg_class rclass)
{
bool ret = rs6000_cannot_change_mode_class (from, to, rclass);
{
rtx dest = operands[0];
rtx src = operands[1];
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
int dest_regno;
int src_regno;
bool dest_gpr_p, dest_fp_p, dest_vmx_p, dest_vsx_p;
bool
rs6000_move_128bit_ok_p (rtx operands[])
{
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
return (gpc_reg_operand (operands[0], mode)
|| gpc_reg_operand (operands[1], mode));
}
ccr_bit (rtx op, int scc_p)
{
enum rtx_code code = GET_CODE (op);
- enum machine_mode cc_mode;
+ machine_mode cc_mode;
int cc_regnum;
int base_bit;
rtx reg;
#endif
\f
enum rtx_code
-rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
+rs6000_reverse_condition (machine_mode mode, enum rtx_code code)
{
/* Reversal of FP compares takes care -- an ordered compare
becomes an unordered compare and vice versa. */
represents the result of the compare. */
static rtx
-rs6000_generate_compare (rtx cmp, enum machine_mode mode)
+rs6000_generate_compare (rtx cmp, machine_mode mode)
{
- enum machine_mode comp_mode;
+ machine_mode comp_mode;
rtx compare_result;
enum rtx_code code = GET_CODE (cmp);
rtx op0 = XEXP (cmp, 0);
&& FLOAT_MODE_P (mode))
{
rtx cmp, or_result, compare_result2;
- enum machine_mode op_mode = GET_MODE (op0);
+ machine_mode op_mode = GET_MODE (op0);
bool reverse_p;
if (op_mode == VOIDmode)
/* Emit the RTL for an sISEL pattern. */
void
-rs6000_emit_sISEL (enum machine_mode mode ATTRIBUTE_UNUSED, rtx operands[])
+rs6000_emit_sISEL (machine_mode mode ATTRIBUTE_UNUSED, rtx operands[])
{
rs6000_emit_int_cmove (operands[0], operands[1], const1_rtx, const0_rtx);
}
void
-rs6000_emit_sCOND (enum machine_mode mode, rtx operands[])
+rs6000_emit_sCOND (machine_mode mode, rtx operands[])
{
rtx condition_rtx;
- enum machine_mode op_mode;
+ machine_mode op_mode;
enum rtx_code cond_code;
rtx result = operands[0];
{
rtx not_result = gen_reg_rtx (CCEQmode);
rtx not_op, rev_cond_rtx;
- enum machine_mode cc_mode;
+ machine_mode cc_mode;
cc_mode = GET_MODE (XEXP (condition_rtx, 0));
/* Emit a branch of kind CODE to location LOC. */
void
-rs6000_emit_cbranch (enum machine_mode mode, rtx operands[])
+rs6000_emit_cbranch (machine_mode mode, rtx operands[])
{
rtx condition_rtx, loc_ref;
static char string[64];
enum rtx_code code = GET_CODE (op);
rtx cc_reg = XEXP (op, 0);
- enum machine_mode mode = GET_MODE (cc_reg);
+ machine_mode mode = GET_MODE (cc_reg);
int cc_regno = REGNO (cc_reg) - CR0_REGNO;
int need_longbranch = label != NULL && get_attr_length (insn) == 8;
int really_reversed = reversed ^ need_longbranch;
rs6000_emit_vector_compare_inner (enum rtx_code code, rtx op0, rtx op1)
{
rtx mask;
- enum machine_mode mode = GET_MODE (op0);
+ machine_mode mode = GET_MODE (op0);
switch (code)
{
static rtx
rs6000_emit_vector_compare (enum rtx_code rcode,
rtx op0, rtx op1,
- enum machine_mode dmode)
+ machine_mode dmode)
{
rtx mask;
bool swap_operands = false;
rs6000_emit_vector_cond_expr (rtx dest, rtx op_true, rtx op_false,
rtx cond, rtx cc_op0, rtx cc_op1)
{
- enum machine_mode dest_mode = GET_MODE (dest);
- enum machine_mode mask_mode = GET_MODE (cc_op0);
+ machine_mode dest_mode = GET_MODE (dest);
+ machine_mode mask_mode = GET_MODE (cc_op0);
enum rtx_code rcode = GET_CODE (cond);
- enum machine_mode cc_mode = CCmode;
+ machine_mode cc_mode = CCmode;
rtx mask;
rtx cond2;
rtx tmp;
rtx op0 = XEXP (op, 0);
rtx op1 = XEXP (op, 1);
REAL_VALUE_TYPE c1;
- enum machine_mode compare_mode = GET_MODE (op0);
- enum machine_mode result_mode = GET_MODE (dest);
+ machine_mode compare_mode = GET_MODE (op0);
+ machine_mode result_mode = GET_MODE (dest);
rtx temp;
bool is_against_zero;
rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
{
rtx condition_rtx, cr;
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
enum rtx_code cond_code;
rtx (*isel_func) (rtx, rtx, rtx, rtx, rtx);
bool signedp;
void
rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
{
- enum machine_mode mode = GET_MODE (op0);
+ machine_mode mode = GET_MODE (op0);
enum rtx_code c;
rtx target;
the zero_extend operation. */
static void
-emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
+emit_load_locked (machine_mode mode, rtx reg, rtx mem)
{
rtx (*fn) (rtx, rtx) = NULL;
instruction in MODE. */
static void
-emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
+emit_store_conditional (machine_mode mode, rtx res, rtx mem, rtx val)
{
rtx (*fn) (rtx, rtx, rtx) = NULL;
{
rtx addr, align, shift, mask, mem;
HOST_WIDE_INT shift_mask;
- enum machine_mode mode = GET_MODE (orig_mem);
+ machine_mode mode = GET_MODE (orig_mem);
/* For smaller modes, we have to implement this via SImode. */
shift_mask = (mode == QImode ? 0x18 : 0x10);
{
rtx boolval, retval, mem, oldval, newval, cond;
rtx label1, label2, x, mask, shift;
- enum machine_mode mode, orig_mode;
+ machine_mode mode, orig_mode;
enum memmodel mod_s, mod_f;
bool is_weak;
rs6000_expand_atomic_exchange (rtx operands[])
{
rtx retval, mem, val, cond;
- enum machine_mode mode;
+ machine_mode mode;
enum memmodel model;
rtx label, x, mask, shift;
rtx orig_before, rtx orig_after, rtx model_rtx)
{
enum memmodel model = (enum memmodel) INTVAL (model_rtx);
- enum machine_mode mode = GET_MODE (mem);
- enum machine_mode store_mode = mode;
+ machine_mode mode = GET_MODE (mem);
+ machine_mode store_mode = mode;
rtx label, x, cond, mask, shift;
rtx before = orig_before, after = orig_after;
/* The register number of the first register being moved. */
int reg;
/* The mode that is to be moved. */
- enum machine_mode mode;
+ machine_mode mode;
/* The mode that the move is being done in, and its size. */
- enum machine_mode reg_mode;
+ machine_mode reg_mode;
int reg_mode_size;
/* The number of registers that will be moved. */
int nregs;
i = PATTERN (insn);
if (GET_CODE (i) == SET)
{
- enum machine_mode mode = GET_MODE (SET_SRC (i));
+ machine_mode mode = GET_MODE (SET_SRC (i));
if (SPE_VECTOR_MODE (mode))
return true;
Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
static rtx
-emit_frame_save (rtx frame_reg, enum machine_mode mode,
+emit_frame_save (rtx frame_reg, machine_mode mode,
unsigned int regno, int offset, HOST_WIDE_INT frame_reg_to_sp)
{
rtx reg, insn;
converting to a valid addressing mode. */
static rtx
-gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
+gen_frame_mem_offset (machine_mode mode, rtx reg, int offset)
{
rtx int_rtx, offset_rtx;
static rtx
rs6000_emit_savres_rtx (rs6000_stack_t *info,
rtx frame_reg_rtx, int save_area_offset, int lr_offset,
- enum machine_mode reg_mode, int sel)
+ machine_mode reg_mode, int sel)
{
int i;
int offset, start_reg, end_reg, n_regs, use_reg;
rs6000_emit_prologue (void)
{
rs6000_stack_t *info = rs6000_stack_info ();
- enum machine_mode reg_mode = Pmode;
+ machine_mode reg_mode = Pmode;
int reg_size = TARGET_32BIT ? 4 : 8;
rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
rtx frame_reg_rtx = sp_reg_rtx;
rtx cfa_restores = NULL_RTX;
rtx insn;
rtx cr_save_reg = NULL_RTX;
- enum machine_mode reg_mode = Pmode;
+ machine_mode reg_mode = Pmode;
int reg_size = TARGET_32BIT ? 4 : 8;
int i;
bool exit_func;
decl; decl = DECL_CHAIN (decl))
{
rtx parameter = DECL_INCOMING_RTL (decl);
- enum machine_mode mode = GET_MODE (parameter);
+ machine_mode mode = GET_MODE (parameter);
if (GET_CODE (parameter) == REG)
{
rs6000_hash_constant (rtx k)
{
enum rtx_code code = GET_CODE (k);
- enum machine_mode mode = GET_MODE (k);
+ machine_mode mode = GET_MODE (k);
unsigned result = (code << 3) ^ mode;
const char *format;
int flen, fidx;
written. */
void
-output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
+output_toc (FILE *file, rtx x, int labelno, machine_mode mode)
{
char buf[256];
const char *name = buf;
bool *no_add_attrs)
{
tree type = *node, result = NULL_TREE;
- enum machine_mode mode;
+ machine_mode mode;
int unsigned_p;
char altivec_type
= ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
/* Implement TARGET_SELECT_RTX_SECTION. */
static section *
-rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
+rs6000_elf_select_rtx_section (machine_mode mode, rtx x,
unsigned HOST_WIDE_INT align)
{
if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
static bool
-rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
+rs6000_use_blocks_for_constant_p (machine_mode mode, const_rtx x)
{
return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
}
#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
rtx
-rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
+rs6000_machopic_legitimize_pic_address (rtx orig, machine_mode mode,
rtx reg)
{
rtx base, offset;
toc entry. */
static section *
-rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
+rs6000_xcoff_select_rtx_section (machine_mode mode, rtx x,
unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
{
if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
rs6000_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
int *total, bool speed)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
switch (code)
{
/* Debug form of ADDRESS_COST that is selected if -mdebug=cost. */
static int
-rs6000_debug_address_cost (rtx x, enum machine_mode mode,
+rs6000_debug_address_cost (rtx x, machine_mode mode,
addr_space_t as, bool speed)
{
int ret = TARGET_ADDRESS_COST (x, mode, as, speed);
CLASS1 to one of CLASS2. */
static int
-rs6000_register_move_cost (enum machine_mode mode,
+rs6000_register_move_cost (machine_mode mode,
reg_class_t from, reg_class_t to)
{
int ret;
or from memory. */
static int
-rs6000_memory_move_cost (enum machine_mode mode, reg_class_t rclass,
+rs6000_memory_move_cost (machine_mode mode, reg_class_t rclass,
bool in ATTRIBUTE_UNUSED)
{
int ret;
all of the vector elements. */
static rtx
-rs6000_load_constant_and_splat (enum machine_mode mode, REAL_VALUE_TYPE dconst)
+rs6000_load_constant_and_splat (machine_mode mode, REAL_VALUE_TYPE dconst)
{
rtx reg;
static void
rs6000_emit_madd (rtx target, rtx m1, rtx m2, rtx a)
{
- enum machine_mode mode = GET_MODE (target);
+ machine_mode mode = GET_MODE (target);
rtx dst;
dst = expand_ternary_op (mode, fma_optab, m1, m2, a, target, 0);
static void
rs6000_emit_msub (rtx target, rtx m1, rtx m2, rtx a)
{
- enum machine_mode mode = GET_MODE (target);
+ machine_mode mode = GET_MODE (target);
rtx dst;
/* Altivec does not support fms directly;
static void
rs6000_emit_nmsub (rtx dst, rtx m1, rtx m2, rtx a)
{
- enum machine_mode mode = GET_MODE (dst);
+ machine_mode mode = GET_MODE (dst);
rtx r;
/* This is a tad more complicated, since the fnma_optab is for
void
rs6000_emit_swdiv (rtx dst, rtx n, rtx d, bool note_p)
{
- enum machine_mode mode = GET_MODE (dst);
+ machine_mode mode = GET_MODE (dst);
rtx one, x0, e0, x1, xprev, eprev, xnext, enext, u, v;
int i;
void
rs6000_emit_swrsqrt (rtx dst, rtx src)
{
- enum machine_mode mode = GET_MODE (src);
+ machine_mode mode = GET_MODE (src);
rtx x0 = gen_reg_rtx (mode);
rtx y = gen_reg_rtx (mode);
void
rs6000_emit_popcount (rtx dst, rtx src)
{
- enum machine_mode mode = GET_MODE (dst);
+ machine_mode mode = GET_MODE (dst);
rtx tmp1, tmp2;
/* Use the PPC ISA 2.06 popcnt{w,d} instruction if we can. */
void
rs6000_emit_parity (rtx dst, rtx src)
{
- enum machine_mode mode = GET_MODE (dst);
+ machine_mode mode = GET_MODE (dst);
rtx tmp;
tmp = gen_reg_rtx (mode);
rtx sel = operands[3];
rtx tmp = target;
rtx norreg = gen_reg_rtx (V16QImode);
- enum machine_mode mode = GET_MODE (target);
+ machine_mode mode = GET_MODE (target);
/* Get everything in regs so the pattern matches. */
if (!REG_P (op0))
if (i == 16)
{
enum insn_code icode = patterns[j].impl;
- enum machine_mode omode = insn_data[icode].operand[0].mode;
- enum machine_mode imode = insn_data[icode].operand[1].mode;
+ machine_mode omode = insn_data[icode].operand[0].mode;
+ machine_mode imode = insn_data[icode].operand[1].mode;
/* For little-endian, don't use vpkuwum and vpkuhum if the
underlying vector type is not V4SI and V8HI, respectively.
/* Success! */
if (target != NULL)
{
- enum machine_mode vmode, dmode;
+ machine_mode vmode, dmode;
rtvec v;
vmode = GET_MODE (target);
/* Test whether a constant permutation is supported. */
static bool
-rs6000_vectorize_vec_perm_const_ok (enum machine_mode vmode,
+rs6000_vectorize_vec_perm_const_ok (machine_mode vmode,
const unsigned char *sel)
{
/* AltiVec (and thus VSX) can handle arbitrary permutations. */
static void
rs6000_do_expand_vec_perm (rtx target, rtx op0, rtx op1,
- enum machine_mode vmode, unsigned nelt, rtx perm[])
+ machine_mode vmode, unsigned nelt, rtx perm[])
{
- enum machine_mode imode;
+ machine_mode imode;
rtx x;
imode = vmode;
void
rs6000_expand_extract_even (rtx target, rtx op0, rtx op1)
{
- enum machine_mode vmode = GET_MODE (target);
+ machine_mode vmode = GET_MODE (target);
unsigned i, nelt = GET_MODE_NUNITS (vmode);
rtx perm[16];
void
rs6000_expand_interleave (rtx target, rtx op0, rtx op1, bool highp)
{
- enum machine_mode vmode = GET_MODE (target);
+ machine_mode vmode = GET_MODE (target);
unsigned i, high, nelt = GET_MODE_NUNITS (vmode);
rtx perm[16];
/* Return an RTX representing where to find the function value of a
function returning MODE. */
static rtx
-rs6000_complex_function_value (enum machine_mode mode)
+rs6000_complex_function_value (machine_mode mode)
{
unsigned int regno;
rtx r1, r2;
- enum machine_mode inner = GET_MODE_INNER (mode);
+ machine_mode inner = GET_MODE_INNER (mode);
unsigned int inner_bytes = GET_MODE_SIZE (inner);
if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
bool outgoing ATTRIBUTE_UNUSED)
{
- enum machine_mode mode;
+ machine_mode mode;
unsigned int regno;
- enum machine_mode elt_mode;
+ machine_mode elt_mode;
int n_elts;
/* Special handling for structs in darwin64. */
/* Define how to find the value returned by a library function
assuming the value has mode MODE. */
rtx
-rs6000_libcall_value (enum machine_mode mode)
+rs6000_libcall_value (machine_mode mode)
{
unsigned int regno;
rtx parts[8];
int i, words;
unsigned regno = REGNO (reg);
- enum machine_mode mode = GET_MODE (reg);
+ machine_mode mode = GET_MODE (reg);
if (TARGET_SPE
&& regno < 32
if (TARGET_SPE)
{
int i;
- enum machine_mode mode = TYPE_MODE (char_type_node);
+ machine_mode mode = TYPE_MODE (char_type_node);
rtx addr = expand_expr (address, NULL_RTX, VOIDmode, EXPAND_NORMAL);
rtx mem = gen_rtx_MEM (BLKmode, addr);
rtx value = gen_int_mode (4, mode);
if (TARGET_MACHO && ! TARGET_ALTIVEC)
{
int i;
- enum machine_mode mode = TYPE_MODE (char_type_node);
+ machine_mode mode = TYPE_MODE (char_type_node);
rtx addr = expand_expr (address, NULL_RTX, VOIDmode, EXPAND_NORMAL);
rtx mem = gen_rtx_MEM (BLKmode, addr);
rtx value = gen_int_mode (16, mode);
}
/* target hook eh_return_filter_mode */
-static enum machine_mode
+static machine_mode
rs6000_eh_return_filter_mode (void)
{
return TARGET_32BIT ? SImode : word_mode;
/* Target hook for scalar_mode_supported_p. */
static bool
-rs6000_scalar_mode_supported_p (enum machine_mode mode)
+rs6000_scalar_mode_supported_p (machine_mode mode)
{
if (DECIMAL_FLOAT_MODE_P (mode))
return default_decimal_float_supported_p ();
/* Target hook for vector_mode_supported_p. */
static bool
-rs6000_vector_mode_supported_p (enum machine_mode mode)
+rs6000_vector_mode_supported_p (machine_mode mode)
{
if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
memory requirements (either offetable or REG+REG addressing). */
rtx
-rs6000_allocate_stack_temp (enum machine_mode mode,
+rs6000_allocate_stack_temp (machine_mode mode,
bool offsettable_p,
bool reg_reg_p)
{
for particular insns, though. Only easy FP constants are acceptable. */
static bool
-rs6000_legitimate_constant_p (enum machine_mode mode, rtx x)
+rs6000_legitimate_constant_p (machine_mode mode, rtx x)
{
if (TARGET_ELF && tls_referenced_p (x))
return false;
rtx op1,
rtx op2,
enum rtx_code code,
- enum machine_mode mode,
+ machine_mode mode,
bool complement_final_p,
bool complement_op1_p,
bool complement_op2_p)
bool complement_op1_p,
bool complement_op2_p)
{
- enum machine_mode mode = GET_MODE (operands[0]);
- enum machine_mode sub_mode;
+ machine_mode mode = GET_MODE (operands[0]);
+ machine_mode sub_mode;
rtx op0, op1, op2;
int sub_size, regno0, regno1, nregs, i;
rtx orig_mem = operands[3];
rtx new_addr, new_mem, orig_addr, offset;
enum rtx_code plus_or_lo_sum;
- enum machine_mode target_mode = GET_MODE (target);
- enum machine_mode extend_mode = target_mode;
- enum machine_mode ptr_mode = Pmode;
+ machine_mode target_mode = GET_MODE (target);
+ machine_mode extend_mode = target_mode;
+ machine_mode ptr_mode = Pmode;
enum rtx_code extend = UNKNOWN;
if (GET_CODE (orig_mem) == ZERO_EXTEND
const char *load_str = NULL;
const char *mode_name = NULL;
char insn_template[80];
- enum machine_mode mode;
+ machine_mode mode;
const char *comment_str = ASM_COMMENT_START;
if (GET_CODE (mem) == ZERO_EXTEND)
FOR_EACH_INSN_INFO_USE (use, insn_info)
{
/* Ignore uses for addressability. */
- enum machine_mode mode = GET_MODE (DF_REF_REG (use));
+ machine_mode mode = GET_MODE (DF_REF_REG (use));
if (!VECTOR_MODE_P (mode))
continue;
rtx body = PATTERN (insn);
rtx mem_op = SET_SRC (body);
rtx tgt_reg = SET_DEST (body);
- enum machine_mode mode = GET_MODE (tgt_reg);
+ machine_mode mode = GET_MODE (tgt_reg);
int n_elts = GET_MODE_NUNITS (mode);
int half_elts = n_elts / 2;
rtx par = gen_rtx_PARALLEL (mode, rtvec_alloc (n_elts));
{
rtx body = PATTERN (insn);
rtx src_reg = SET_SRC (body);
- enum machine_mode mode = GET_MODE (src_reg);
+ machine_mode mode = GET_MODE (src_reg);
int n_elts = GET_MODE_NUNITS (mode);
int half_elts = n_elts / 2;
rtx par = gen_rtx_PARALLEL (mode, rtvec_alloc (n_elts));
FOR_EACH_INSN_INFO_USE (mention, insn_info)
{
/* We use DF_REF_REAL_REG here to get inside any subregs. */
- enum machine_mode mode = GET_MODE (DF_REF_REAL_REG (mention));
+ machine_mode mode = GET_MODE (DF_REF_REAL_REG (mention));
/* If a use gets its value from a call insn, it will be
a hard register and will look like (reg:V4SI 3 3).
FOR_EACH_INSN_INFO_DEF (mention, insn_info)
{
/* We use DF_REF_REAL_REG here to get inside any subregs. */
- enum machine_mode mode = GET_MODE (DF_REF_REAL_REG (mention));
+ machine_mode mode = GET_MODE (DF_REF_REAL_REG (mention));
/* If we're loading up a hard vector register for a call,
it looks like (set (reg:V4SI 9 9) (...)). The df
After generation of rtl, the compiler makes no further distinction
between pointers and any other objects of this machine mode. */
extern unsigned rs6000_pmode;
-#define Pmode ((enum machine_mode)rs6000_pmode)
+#define Pmode ((machine_mode)rs6000_pmode)
/* Supply definition of STACK_SIZE_MODE for allocate_dynamic_stack_space. */
#define STACK_SIZE_MODE (TARGET_32BIT ? SImode : DImode)
extern void rx_emit_stack_popm (rtx *, bool);
extern void rx_emit_stack_pushm (rtx *);
extern char * rx_gen_move_template (rtx *, bool);
-extern bool rx_is_legitimate_constant (enum machine_mode, rtx);
+extern bool rx_is_legitimate_constant (machine_mode, rtx);
extern bool rx_is_restricted_memory_address (rtx,
- enum machine_mode);
-extern bool rx_match_ccmode (rtx, enum machine_mode);
+ machine_mode);
+extern bool rx_match_ccmode (rtx, machine_mode);
extern rtx rx_maybe_pidify_operand (rtx, int);
extern void rx_notice_update_cc (rtx, rtx);
-extern void rx_split_cbranch (enum machine_mode, enum rtx_code,
+extern void rx_split_cbranch (machine_mode, enum rtx_code,
rtx, rtx, rtx);
-extern enum machine_mode rx_select_cc_mode (enum rtx_code, rtx, rtx);
+extern machine_mode rx_select_cc_mode (enum rtx_code, rtx, rtx);
#endif
#endif /* GCC_RX_PROTOS_H */
#define CC_FLAG_C (1 << 3)
#define CC_FLAG_FP (1 << 4) /* Fake, to differentiate CC_Fmode. */
-static unsigned int flags_from_mode (enum machine_mode mode);
+static unsigned int flags_from_mode (machine_mode mode);
static unsigned int flags_from_code (enum rtx_code code);
\f
/* Return true if OP is a reference to an object in a PID data area. */
static rtx
rx_legitimize_address (rtx x,
rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
if (rx_pid_data_operand (x) == PID_UNENCODED)
{
}
static bool
-rx_is_legitimate_address (enum machine_mode mode, rtx x,
+rx_is_legitimate_address (machine_mode mode, rtx x,
bool strict ATTRIBUTE_UNUSED)
{
if (RTX_OK_FOR_BASE (x, strict))
or pre/post increment/decrement. */
bool
-rx_is_restricted_memory_address (rtx mem, enum machine_mode mode)
+rx_is_restricted_memory_address (rtx mem, machine_mode mode)
{
if (! rx_is_legitimate_address
(mode, mem, reload_in_progress || reload_completed))
case 'B':
{
enum rtx_code code = GET_CODE (op);
- enum machine_mode mode = GET_MODE (XEXP (op, 0));
+ machine_mode mode = GET_MODE (XEXP (op, 0));
const char *ret;
if (mode == CC_Fmode)
occupied by an argument of type TYPE and mode MODE. */
static unsigned int
-rx_function_arg_size (enum machine_mode mode, const_tree type)
+rx_function_arg_size (machine_mode mode, const_tree type)
{
unsigned int num_bytes;
variable parameter list. */
static rtx
-rx_function_arg (cumulative_args_t cum, enum machine_mode mode,
+rx_function_arg (cumulative_args_t cum, machine_mode mode,
const_tree type, bool named)
{
unsigned int next_reg;
}
static void
-rx_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
+rx_function_arg_advance (cumulative_args_t cum, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
*get_cumulative_args (cum) += rx_function_arg_size (mode, type);
}
static unsigned int
-rx_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
+rx_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED)
{
/* Older versions of the RX backend aligned all on-stack arguments
const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
bool outgoing ATTRIBUTE_UNUSED)
{
- enum machine_mode mode = TYPE_MODE (ret_type);
+ machine_mode mode = TYPE_MODE (ret_type);
/* RX ABI specifies that small integer types are
promoted to int when returned by a function. */
/* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
regard to function returns as does TARGET_FUNCTION_VALUE. */
-static enum machine_mode
+static machine_mode
rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
- enum machine_mode mode,
+ machine_mode mode,
int * punsignedp ATTRIBUTE_UNUSED,
const_tree funtype ATTRIBUTE_UNUSED,
int for_return)
The only special thing we do here is to honor small data. */
static section *
-rx_select_rtx_section (enum machine_mode mode,
+rx_select_rtx_section (machine_mode mode,
rtx x,
unsigned HOST_WIDE_INT align)
{
rx_expand_builtin (tree exp,
rtx target,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
operand on the RX. X is already known to satisfy CONSTANT_P. */
bool
-rx_is_legitimate_constant (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+rx_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
switch (GET_CODE (x))
{
}
static int
-rx_address_cost (rtx addr, enum machine_mode mode ATTRIBUTE_UNUSED,
+rx_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as ATTRIBUTE_UNUSED, bool speed)
{
rtx a, b;
}
\f
static int
-rx_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+rx_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t regclass ATTRIBUTE_UNUSED,
bool in)
{
/* Convert a CC_MODE to the set of flags that it represents. */
static unsigned int
-flags_from_mode (enum machine_mode mode)
+flags_from_mode (machine_mode mode)
{
switch (mode)
{
/* Convert a set of flags to a CC_MODE that can implement it. */
-static enum machine_mode
+static machine_mode
mode_from_flags (unsigned int f)
{
if (f & CC_FLAG_FP)
/* Return a CC_MODE of which both M1 and M2 are subsets. */
-static enum machine_mode
-rx_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
+static machine_mode
+rx_cc_modes_compatible (machine_mode m1, machine_mode m2)
{
unsigned f;
/* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
-enum machine_mode
+machine_mode
rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
{
if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
CC_MODE, and use that in branches based on that compare. */
void
-rx_split_cbranch (enum machine_mode cc_mode, enum rtx_code cmp1,
+rx_split_cbranch (machine_mode cc_mode, enum rtx_code cmp1,
rtx c1, rtx c2, rtx label)
{
rtx flags, x;
/* A helper function for matching parallels that set the flags. */
bool
-rx_match_ccmode (rtx insn, enum machine_mode cc_mode)
+rx_match_ccmode (rtx insn, machine_mode cc_mode)
{
rtx op1, flags;
- enum machine_mode flags_mode;
+ machine_mode flags_mode;
gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
(define_special_predicate "load_multiple_operation"
(match_code "parallel")
{
- enum machine_mode elt_mode;
+ machine_mode elt_mode;
int count = XVECLEN (op, 0);
unsigned int dest_regno;
rtx src_addr;
(define_special_predicate "store_multiple_operation"
(match_code "parallel")
{
- enum machine_mode elt_mode;
+ machine_mode elt_mode;
int count = XVECLEN (op, 0);
unsigned int src_regno;
rtx dest_addr;
extern bool s390_can_use_return_insn (void);
extern void s390_function_profiler (FILE *, int);
extern void s390_set_has_landing_pad_p (bool);
-extern bool s390_hard_regno_mode_ok (unsigned int, enum machine_mode);
+extern bool s390_hard_regno_mode_ok (unsigned int, machine_mode);
extern bool s390_hard_regno_rename_ok (unsigned int, unsigned int);
-extern int s390_class_max_nregs (enum reg_class, enum machine_mode);
+extern int s390_class_max_nregs (enum reg_class, machine_mode);
#ifdef RTX_CODE
extern int s390_extra_constraint_str (rtx, int, const char *);
extern int s390_const_ok_for_constraint_p (HOST_WIDE_INT, int, const char *);
extern int s390_const_double_ok_for_constraint_p (rtx, int, const char *);
-extern int s390_single_part (rtx, enum machine_mode, enum machine_mode, int);
-extern unsigned HOST_WIDE_INT s390_extract_part (rtx, enum machine_mode, int);
+extern int s390_single_part (rtx, machine_mode, machine_mode, int);
+extern unsigned HOST_WIDE_INT s390_extract_part (rtx, machine_mode, int);
extern bool s390_contiguous_bitmask_p (unsigned HOST_WIDE_INT, int, int *, int *);
-extern bool s390_split_ok_p (rtx, rtx, enum machine_mode, int);
+extern bool s390_split_ok_p (rtx, rtx, machine_mode, int);
extern bool s390_overlap_p (rtx, rtx, HOST_WIDE_INT);
extern bool s390_offset_p (rtx, rtx, rtx);
extern int tls_symbolic_operand (rtx);
-extern bool s390_match_ccmode (rtx_insn *, enum machine_mode);
-extern enum machine_mode s390_tm_ccmode (rtx, rtx, bool);
-extern enum machine_mode s390_select_ccmode (enum rtx_code, rtx, rtx);
+extern bool s390_match_ccmode (rtx_insn *, machine_mode);
+extern machine_mode s390_tm_ccmode (rtx, rtx, bool);
+extern machine_mode s390_select_ccmode (enum rtx_code, rtx, rtx);
extern rtx s390_emit_compare (enum rtx_code, rtx, rtx);
extern rtx_insn *s390_emit_jump (rtx, rtx);
extern bool symbolic_reference_mentioned_p (rtx);
extern int legitimate_pic_operand_p (rtx);
extern bool legitimate_reload_constant_p (rtx);
extern rtx legitimize_pic_address (rtx, rtx);
-extern rtx legitimize_reload_address (rtx, enum machine_mode, int, int);
+extern rtx legitimize_reload_address (rtx, machine_mode, int, int);
extern enum reg_class s390_secondary_input_reload_class (enum reg_class,
- enum machine_mode,
+ machine_mode,
rtx);
extern enum reg_class s390_secondary_output_reload_class (enum reg_class,
- enum machine_mode,
+ machine_mode,
rtx);
extern void s390_reload_larl_operand (rtx , rtx , rtx);
extern void s390_reload_symref_address (rtx , rtx , rtx , bool);
extern bool s390_expand_cmpmem (rtx, rtx, rtx, rtx);
extern bool s390_expand_addcc (enum rtx_code, rtx, rtx, rtx, rtx, rtx);
extern bool s390_expand_insv (rtx, rtx, rtx, rtx);
-extern void s390_expand_cs_hqi (enum machine_mode, rtx, rtx, rtx,
+extern void s390_expand_cs_hqi (machine_mode, rtx, rtx, rtx,
rtx, rtx, bool);
-extern void s390_expand_atomic (enum machine_mode, enum rtx_code,
+extern void s390_expand_atomic (machine_mode, enum rtx_code,
rtx, rtx, rtx, bool);
extern void s390_expand_tbegin (rtx, rtx, rtx, bool);
extern rtx s390_return_addr_rtx (int, rtx);
extern rtx s390_back_chain_rtx (void);
extern rtx_insn *s390_emit_call (rtx, rtx, rtx, rtx);
extern void s390_expand_logical_operator (enum rtx_code,
- enum machine_mode, rtx *);
+ machine_mode, rtx *);
extern bool s390_logical_operator_ok_p (rtx *);
extern void s390_narrow_logical_operator (enum rtx_code, rtx *, rtx *);
extern void s390_split_access_reg (rtx, rtx *, rtx *);
extern void print_operand_address (FILE *, rtx);
extern void print_operand (FILE *, rtx, int);
-extern void s390_output_pool_entry (rtx, enum machine_mode, unsigned int);
+extern void s390_output_pool_entry (rtx, machine_mode, unsigned int);
extern int s390_label_align (rtx);
extern int s390_agen_dep_p (rtx_insn *, rtx_insn *);
extern rtx_insn *s390_load_got (void);
return align_labels_log;
}
-static enum machine_mode
+static machine_mode
s390_libgcc_cmp_return_mode (void)
{
return TARGET_64BIT ? DImode : SImode;
}
-static enum machine_mode
+static machine_mode
s390_libgcc_shift_count_mode (void)
{
return TARGET_64BIT ? DImode : SImode;
}
-static enum machine_mode
+static machine_mode
s390_unwind_word_mode (void)
{
return TARGET_64BIT ? DImode : SImode;
/* Return true if the back end supports mode MODE. */
static bool
-s390_scalar_mode_supported_p (enum machine_mode mode)
+s390_scalar_mode_supported_p (machine_mode mode)
{
/* In contrast to the default implementation reject TImode constants on 31bit
TARGET_ZARCH for ABI compliance. */
mode which is compatible with both. Otherwise, return
VOIDmode. */
-static enum machine_mode
-s390_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
+static machine_mode
+s390_cc_modes_compatible (machine_mode m1, machine_mode m2)
{
if (m1 == m2)
return m1;
CC mode is at least as constrained as REQ_MODE. */
static bool
-s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
+s390_match_ccmode_set (rtx set, machine_mode req_mode)
{
- enum machine_mode set_mode;
+ machine_mode set_mode;
gcc_assert (GET_CODE (set) == SET);
If REQ_MODE is VOIDmode, always return false. */
bool
-s390_match_ccmode (rtx_insn *insn, enum machine_mode req_mode)
+s390_match_ccmode (rtx_insn *insn, machine_mode req_mode)
{
int i;
CC1 and CC2 for mixed selected bits (TMxx), it is false
if the instruction cannot (TM). */
-enum machine_mode
+machine_mode
s390_tm_ccmode (rtx op1, rtx op2, bool mixed)
{
int bit0, bit1;
OP0 and OP1 of a COMPARE, return the mode to be used for the
comparison. */
-enum machine_mode
+machine_mode
s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
{
switch (code)
if (GET_CODE (op0) == AND)
{
/* Check whether we can potentially do it via TM. */
- enum machine_mode ccmode;
+ machine_mode ccmode;
ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
if (ccmode != VOIDmode)
{
rtx
s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
{
- enum machine_mode mode = s390_select_ccmode (code, op0, op1);
+ machine_mode mode = s390_select_ccmode (code, op0, op1);
rtx cc;
/* Do not output a redundant compare instruction if a compare_and_swap
contains such a part. */
unsigned HOST_WIDE_INT
-s390_extract_part (rtx op, enum machine_mode mode, int def)
+s390_extract_part (rtx op, machine_mode mode, int def)
{
unsigned HOST_WIDE_INT value = 0;
int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
int
s390_single_part (rtx op,
- enum machine_mode mode,
- enum machine_mode part_mode,
+ machine_mode mode,
+ machine_mode part_mode,
int def)
{
unsigned HOST_WIDE_INT value = 0;
moves, moving the subword FIRST_SUBWORD first. */
bool
-s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
+s390_split_ok_p (rtx dst, rtx src, machine_mode mode, int first_subword)
{
/* Floating point registers cannot be split. */
if (FP_REG_P (src) || FP_REG_P (dst))
/* Expand logical operator CODE in mode MODE with operands OPERANDS. */
void
-s390_expand_logical_operator (enum rtx_code code, enum machine_mode mode,
+s390_expand_logical_operator (enum rtx_code code, machine_mode mode,
rtx *operands)
{
- enum machine_mode wmode = mode;
+ machine_mode wmode = mode;
rtx dst = operands[0];
rtx src1 = operands[1];
rtx src2 = operands[2];
int
s390_N_constraint_str (const char *str, HOST_WIDE_INT value)
{
- enum machine_mode mode, part_mode;
+ machine_mode mode, part_mode;
int def;
int part, part_goal;
/* Implement TARGET_REGISTER_MOVE_COST. */
static int
-s390_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+s390_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t from, reg_class_t to)
{
/* On s390, copy between fprs and gprs is expensive as long as no
/* Implement TARGET_MEMORY_MOVE_COST. */
static int
-s390_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+s390_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t rclass ATTRIBUTE_UNUSED,
bool in ATTRIBUTE_UNUSED)
{
/* Return the cost of an address rtx ADDR. */
static int
-s390_address_cost (rtx addr, enum machine_mode mode ATTRIBUTE_UNUSED,
+s390_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as ATTRIBUTE_UNUSED,
bool speed ATTRIBUTE_UNUSED)
{
It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
static bool
-s390_legitimate_constant_p (enum machine_mode mode, rtx op)
+s390_legitimate_constant_p (machine_mode mode, rtx op)
{
/* Accept all non-symbolic constants. */
if (!SYMBOLIC_CONST (op))
not constant (TLS) or not known at final link time (PIC). */
static bool
-s390_cannot_force_const_mem (enum machine_mode mode, rtx x)
+s390_cannot_force_const_mem (machine_mode mode, rtx x)
{
switch (GET_CODE (x))
{
if (GET_CODE (op) == CONST_INT
&& trunc_int_for_mode (INTVAL (op), word_mode) != INTVAL (op))
{
- enum machine_mode dword_mode = word_mode == SImode ? DImode : TImode;
+ machine_mode dword_mode = word_mode == SImode ? DImode : TImode;
rtx hi = operand_subword (op, 0, 0, dword_mode);
rtx lo = operand_subword (op, 1, 0, dword_mode);
return legitimate_reload_constant_p (hi)
static reg_class_t
s390_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
- enum machine_mode mode, secondary_reload_info *sri)
+ machine_mode mode, secondary_reload_info *sri)
{
enum reg_class rclass = (enum reg_class) rclass_i;
STRICT specifies whether strict register checking applies. */
static bool
-s390_legitimate_address_p (enum machine_mode mode, rtx addr, bool strict)
+s390_legitimate_address_p (machine_mode mode, rtx addr, bool strict)
{
struct s390_address ad;
static rtx
s390_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
rtx constant_term = const0_rtx;
and TYPE is the reload type of the current reload. */
rtx
-legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED,
+legitimize_reload_address (rtx ad, machine_mode mode ATTRIBUTE_UNUSED,
int opnum, int type)
{
if (!optimize || TARGET_LONG_DISPLACEMENT)
rtx_code_label *loop_start_label = gen_label_rtx ();
rtx_code_label *loop_end_label = gen_label_rtx ();
rtx_code_label *end_label = gen_label_rtx ();
- enum machine_mode mode;
+ machine_mode mode;
mode = GET_MODE (len);
if (mode == VOIDmode)
rtx_code_label *loop_start_label = gen_label_rtx ();
rtx_code_label *loop_end_label = gen_label_rtx ();
rtx_code_label *end_label = gen_label_rtx ();
- enum machine_mode mode;
+ machine_mode mode;
mode = GET_MODE (len);
if (mode == VOIDmode)
rtx_code_label *loop_start_label = gen_label_rtx ();
rtx_code_label *loop_end_label = gen_label_rtx ();
rtx_code_label *end_label = gen_label_rtx ();
- enum machine_mode mode;
+ machine_mode mode;
mode = GET_MODE (len);
if (mode == VOIDmode)
s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
rtx dst, rtx src, rtx increment)
{
- enum machine_mode cmp_mode;
- enum machine_mode cc_mode;
+ machine_mode cmp_mode;
+ machine_mode cc_mode;
rtx op_res;
rtx insn;
rtvec p;
{
int bitsize = INTVAL (op1);
int bitpos = INTVAL (op2);
- enum machine_mode mode = GET_MODE (dest);
- enum machine_mode smode;
+ machine_mode mode = GET_MODE (dest);
+ machine_mode smode;
int smode_bsize, mode_bsize;
rtx op, clobber;
while (regpos > bitpos)
{
- enum machine_mode putmode;
+ machine_mode putmode;
int putsize;
if (TARGET_EXTIMM && (regpos % 32 == 0) && (regpos >= bitpos + 32))
/* For z10, generate ROTATE THEN INSERT SELECTED BITS (RISBG et al). */
if (TARGET_Z10 && (mode == DImode || mode == SImode))
{
- enum machine_mode mode_s = GET_MODE (src);
+ machine_mode mode_s = GET_MODE (src);
if (mode_s == VOIDmode)
{
register that holds VAL of mode MODE shifted by COUNT bits. */
static inline rtx
-s390_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count)
+s390_expand_mask_and_shift (rtx val, machine_mode mode, rtx count)
{
val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
NULL_RTX, 1, OPTAB_DIRECT);
static void
init_alignment_context (struct alignment_context *ac, rtx mem,
- enum machine_mode mode)
+ machine_mode mode)
{
ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
ac->aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
static rtx
s390_two_part_insv (struct alignment_context *ac, rtx *seq1, rtx *seq2,
- enum machine_mode mode, rtx val, rtx ins)
+ machine_mode mode, rtx val, rtx ins)
{
rtx tmp;
value to set if CMP == MEM. */
void
-s390_expand_cs_hqi (enum machine_mode mode, rtx btarget, rtx vtarget, rtx mem,
+s390_expand_cs_hqi (machine_mode mode, rtx btarget, rtx vtarget, rtx mem,
rtx cmp, rtx new_rtx, bool is_weak)
{
struct alignment_context ac;
store it to TARGET. */
void
-s390_expand_atomic (enum machine_mode mode, enum rtx_code code,
+s390_expand_atomic (machine_mode mode, enum rtx_code code,
rtx target, rtx mem, rtx val, bool after)
{
struct alignment_context ac;
constant tables in the middle of large functions. */
#define NR_C_MODES 11
-enum machine_mode constant_modes[NR_C_MODES] =
+machine_mode constant_modes[NR_C_MODES] =
{
TFmode, TImode, TDmode,
DFmode, DImode, DDmode,
/* Add constant VAL of mode MODE to the constant pool POOL. */
static void
-s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
+s390_add_constant (struct constant_pool *pool, rtx val, machine_mode mode)
{
struct constant *c;
int i;
static rtx
s390_find_constant (struct constant_pool *pool, rtx val,
- enum machine_mode mode)
+ machine_mode mode)
{
struct constant *c;
int i;
if (pool_ref)
{
rtx constant = get_pool_constant (pool_ref);
- enum machine_mode mode = get_pool_mode (pool_ref);
+ machine_mode mode = get_pool_mode (pool_ref);
s390_add_constant (pool, constant, mode);
}
}
if (pool_ref)
{
rtx constant = get_pool_constant (pool_ref);
- enum machine_mode mode = get_pool_mode (pool_ref);
+ machine_mode mode = get_pool_mode (pool_ref);
if (!curr_pool)
curr_pool = s390_start_pool (&pool_list, insn);
/* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
void
-s390_output_pool_entry (rtx exp, enum machine_mode mode, unsigned int align)
+s390_output_pool_entry (rtx exp, machine_mode mode, unsigned int align)
{
REAL_VALUE_TYPE r;
{
char *regs_ever_clobbered = (char *)data;
unsigned int i, regno;
- enum machine_mode mode = GET_MODE (setreg);
+ machine_mode mode = GET_MODE (setreg);
if (GET_CODE (setreg) == SUBREG)
{
/* Return true if it is legal to put a value with MODE into REGNO. */
bool
-s390_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
+s390_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
{
switch (REGNO_REG_CLASS (regno))
{
in a register of class RCLASS. */
int
-s390_class_max_nregs (enum reg_class rclass, enum machine_mode mode)
+s390_class_max_nregs (enum reg_class rclass, machine_mode mode)
{
switch (rclass)
{
MODE must be specified. */
static int
-s390_function_arg_size (enum machine_mode mode, const_tree type)
+s390_function_arg_size (machine_mode mode, const_tree type)
{
if (type)
return int_size_in_bytes (type);
is to be passed in a floating-point register, if available. */
static bool
-s390_function_arg_float (enum machine_mode mode, const_tree type)
+s390_function_arg_float (machine_mode mode, const_tree type)
{
int size = s390_function_arg_size (mode, type);
if (size > 8)
registers, if available. */
static bool
-s390_function_arg_integer (enum machine_mode mode, const_tree type)
+s390_function_arg_integer (machine_mode mode, const_tree type)
{
int size = s390_function_arg_size (mode, type);
if (size > 8)
static bool
s390_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
- enum machine_mode mode, const_tree type,
+ machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
int size = s390_function_arg_size (mode, type);
matching an ellipsis). */
static void
-s390_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+s390_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
are pushed to the stack. */
static rtx
-s390_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+s390_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
/* Function arguments and return values are promoted to word size. */
-static enum machine_mode
-s390_promote_function_mode (const_tree type, enum machine_mode mode,
+static machine_mode
+s390_promote_function_mode (const_tree type, machine_mode mode,
int *punsignedp,
const_tree fntype ATTRIBUTE_UNUSED,
int for_return ATTRIBUTE_UNUSED)
value of mode MODE from a libcall. */
static rtx
-s390_function_and_libcall_value (enum machine_mode mode,
+s390_function_and_libcall_value (machine_mode mode,
const_tree ret_type,
const_tree fntype_or_decl,
bool outgoing ATTRIBUTE_UNUSED)
MODE. */
static rtx
-s390_libcall_value (enum machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
+s390_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
{
return s390_function_and_libcall_value (mode, NULL_TREE,
NULL_TREE, true);
static rtx
s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
#define MAX_ARGS 2
if (nonvoid)
{
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
if (!target
|| GET_MODE (target) != tmode
|| !(*insn_data[icode].operand[0].predicate) (target, tmode))
}
static bool
-s390_valid_pointer_mode (enum machine_mode mode)
+s390_valid_pointer_mode (machine_mode mode)
{
return (mode == SImode || (TARGET_64BIT && mode == DImode));
}
CUMULATIVE_ARGS cum_v;
cumulative_args_t cum;
tree parameter;
- enum machine_mode mode;
+ machine_mode mode;
tree type;
rtx parm_rtx;
int reg, i;
/* Specify the machine mode that pointers have.
After generation of rtl, the compiler makes no further distinction
between pointers and any other objects of this machine mode. */
-#define Pmode ((enum machine_mode) (TARGET_64BIT ? DImode : SImode))
+#define Pmode ((machine_mode) (TARGET_64BIT ? DImode : SImode))
/* This is -1 for "pointer mode" extend. See ptr_extend in s390.md. */
#define POINTERS_EXTEND_UNSIGNED -1
(use (match_operand 2 "" ""))])]
"reload_completed"
{
- enum machine_mode mode;
+ machine_mode mode;
int regno;
int count;
rtx from;
(use (match_operand 2 "" ""))])]
"reload_completed"
{
- enum machine_mode mode;
+ machine_mode mode;
int regno;
int count;
rtx to;
(clobber (reg:CC CC_REGNUM))])]
""
{
- enum machine_mode sreg_mode = TARGET_ZARCH ? DImode : SImode;
- enum machine_mode dreg_mode = TARGET_ZARCH ? TImode : DImode;
+ machine_mode sreg_mode = TARGET_ZARCH ? DImode : SImode;
+ machine_mode dreg_mode = TARGET_ZARCH ? TImode : DImode;
rtx reg0 = gen_reg_rtx (dreg_mode);
rtx reg1 = gen_reg_rtx (dreg_mode);
rtx addr0 = gen_lowpart (Pmode, gen_highpart (sreg_mode, reg0));
(clobber (reg:CC CC_REGNUM))])]
""
{
- enum machine_mode sreg_mode = TARGET_ZARCH ? DImode : SImode;
- enum machine_mode dreg_mode = TARGET_ZARCH ? TImode : DImode;
+ machine_mode sreg_mode = TARGET_ZARCH ? DImode : SImode;
+ machine_mode dreg_mode = TARGET_ZARCH ? TImode : DImode;
rtx reg0 = gen_reg_rtx (dreg_mode);
rtx reg1 = gen_reg_rtx (dreg_mode);
rtx addr0 = gen_lowpart (Pmode, gen_highpart (sreg_mode, reg0));
(use (match_dup 3))])]
""
{
- enum machine_mode sreg_mode = TARGET_ZARCH ? DImode : SImode;
- enum machine_mode dreg_mode = TARGET_ZARCH ? TImode : DImode;
+ machine_mode sreg_mode = TARGET_ZARCH ? DImode : SImode;
+ machine_mode dreg_mode = TARGET_ZARCH ? TImode : DImode;
rtx reg0 = gen_reg_rtx (dreg_mode);
rtx reg1 = gen_reg_rtx (dreg_mode);
rtx addr0 = gen_lowpart (Pmode, gen_highpart (sreg_mode, reg0));
UNSPECV_POOL_ENTRY)]
""
{
- enum machine_mode mode = GET_MODE (PATTERN (insn));
+ machine_mode mode = GET_MODE (PATTERN (insn));
unsigned int align = GET_MODE_BITSIZE (mode);
s390_output_pool_entry (operands[0], mode, align);
return "";
;;(define_special_predicate "int_gpr_dest"
;; (match_code "subreg,reg")
;;{
-;; enum machine_mode op_mode = GET_MODE (op);
+;; machine_mode op_mode = GET_MODE (op);
;;
;; if (GET_MODE_CLASS (op_mode) != MODE_INT
;; || GET_MODE_SIZE (op_mode) >= UNITS_PER_WORD)
(define_special_predicate "trunc_hi_operand"
(match_code "subreg,reg,truncate")
{
- enum machine_mode op_mode = GET_MODE (op);
+ machine_mode op_mode = GET_MODE (op);
if (op_mode != SImode && op_mode != DImode
&& op_mode != V4HImode && op_mode != V2SImode)
extern const char *output_ieee_ccmpeq (rtx_insn *, rtx *);
extern const char *output_branchy_insn (enum rtx_code, const char *,
rtx_insn *, rtx *);
-extern const char *output_movedouble (rtx, rtx[], enum machine_mode);
-extern const char *output_movepcrel (rtx, rtx[], enum machine_mode);
+extern const char *output_movedouble (rtx, rtx[], machine_mode);
+extern const char *output_movepcrel (rtx, rtx[], machine_mode);
extern const char *output_far_jump (rtx_insn *, rtx);
extern rtx sfunc_uses_reg (rtx_insn *);
extern int sh_loop_align (rtx_insn *);
extern bool fp_zero_operand (rtx);
extern bool fp_one_operand (rtx);
-extern bool sh_legitimate_index_p (enum machine_mode, rtx, bool, bool);
-extern bool sh_legitimize_reload_address (rtx *, enum machine_mode, int, int);
-extern rtx legitimize_pic_address (rtx, enum machine_mode, rtx);
+extern bool sh_legitimate_index_p (machine_mode, rtx, bool, bool);
+extern bool sh_legitimize_reload_address (rtx *, machine_mode, int, int);
+extern rtx legitimize_pic_address (rtx, machine_mode, rtx);
extern bool nonpic_symbol_mentioned_p (rtx);
extern void output_pic_addr_const (FILE *, rtx);
extern bool expand_block_move (rtx *);
-extern void prepare_move_operands (rtx[], enum machine_mode mode);
+extern void prepare_move_operands (rtx[], machine_mode mode);
extern bool sh_expand_cmpstr (rtx *);
extern bool sh_expand_cmpnstr (rtx *);
extern bool sh_expand_strlen (rtx *);
extern void sh_expand_setmem (rtx *);
-extern enum rtx_code prepare_cbranch_operands (rtx *, enum machine_mode mode,
+extern enum rtx_code prepare_cbranch_operands (rtx *, machine_mode mode,
enum rtx_code comparison);
extern void expand_cbranchsi4 (rtx *operands, enum rtx_code comparison, int);
extern bool expand_cbranchdi4 (rtx *operands, enum rtx_code comparison);
extern void sh_emit_scc_to_t (enum rtx_code, rtx, rtx);
-extern rtx sh_emit_cheap_store_flag (enum machine_mode, enum rtx_code, rtx, rtx);
-extern void sh_emit_compare_and_branch (rtx *, enum machine_mode);
-extern void sh_emit_compare_and_set (rtx *, enum machine_mode);
+extern rtx sh_emit_cheap_store_flag (machine_mode, enum rtx_code, rtx, rtx);
+extern void sh_emit_compare_and_branch (rtx *, machine_mode);
+extern void sh_emit_compare_and_set (rtx *, machine_mode);
extern bool sh_ashlsi_clobbers_t_reg_p (rtx);
extern bool sh_lshrsi_clobbers_t_reg_p (rtx);
extern void gen_shifty_op (int, rtx *);
extern void fixup_addr_diff_vecs (rtx_insn *);
extern int get_dest_uid (rtx, int);
extern void final_prescan_insn (rtx_insn *, rtx *, int);
-extern enum tls_model tls_symbolic_operand (rtx, enum machine_mode);
-extern bool system_reg_operand (rtx, enum machine_mode);
+extern enum tls_model tls_symbolic_operand (rtx, machine_mode);
+extern bool system_reg_operand (rtx, machine_mode);
extern bool reg_unused_after (rtx, rtx_insn *);
extern int sh_insn_length_adjustment (rtx_insn *);
extern bool sh_can_redirect_branch (rtx_insn *, rtx_insn *);
extern void sh_expand_unop_v2sf (enum rtx_code, rtx, rtx);
extern void sh_expand_binop_v2sf (enum rtx_code, rtx, rtx, rtx);
extern bool sh_expand_t_scc (rtx *);
-extern rtx sh_gen_truncate (enum machine_mode, rtx, int);
-extern bool sh_vector_mode_supported_p (enum machine_mode);
+extern rtx sh_gen_truncate (machine_mode, rtx, int);
+extern bool sh_vector_mode_supported_p (machine_mode);
extern bool sh_cfun_trap_exit_p (void);
extern rtx sh_find_equiv_gbr_addr (rtx_insn* cur_insn, rtx mem);
extern int sh_eval_treg_value (rtx op);
extern bool sh_attr_renesas_p (const_tree);
extern bool sh_cfun_attr_renesas_p (void);
extern bool sh_cannot_change_mode_class
- (enum machine_mode, enum machine_mode, enum reg_class);
-extern bool sh_small_register_classes_for_mode_p (enum machine_mode);
+ (machine_mode, machine_mode, enum reg_class);
+extern bool sh_small_register_classes_for_mode_p (machine_mode);
extern void sh_mark_label (rtx, int);
extern bool check_use_sfunc_addr (rtx_insn *, rtx);
extern rtx sh_get_pr_initial_val (void);
extern void sh_init_cumulative_args (CUMULATIVE_ARGS *, tree, rtx, tree,
- signed int, enum machine_mode);
+ signed int, machine_mode);
extern rtx sh_dwarf_register_span (rtx);
extern rtx replace_n_hard_rtx (rtx, rtx *, int , int);
extern int sh2a_get_function_vector_number (rtx);
extern bool sh2a_is_function_vector_call (rtx);
extern void sh_fix_range (const char *);
-extern bool sh_hard_regno_mode_ok (unsigned int, enum machine_mode);
+extern bool sh_hard_regno_mode_ok (unsigned int, machine_mode);
extern bool sh_can_use_simple_return_p (void);
#endif /* ! GCC_SH_PROTOS_H */
static void split_branches (rtx_insn *);
static int branch_dest (rtx);
static void print_slot (rtx_sequence *);
-static rtx_code_label *add_constant (rtx, enum machine_mode, rtx);
+static rtx_code_label *add_constant (rtx, machine_mode, rtx);
static void dump_table (rtx_insn *, rtx_insn *);
static bool broken_move (rtx_insn *);
static bool mova_p (rtx_insn *);
static void sh_output_function_epilogue (FILE *, HOST_WIDE_INT);
static void sh_insert_attributes (tree, tree *);
static const char *sh_check_pch_target_flags (int);
-static int sh_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
+static int sh_register_move_cost (machine_mode, reg_class_t, reg_class_t);
static int sh_adjust_cost (rtx_insn *, rtx, rtx_insn *, int);
static int sh_issue_rate (void);
static int sh_dfa_new_cycle (FILE *, int, rtx_insn *, int, int, int *sort_p);
-static short find_set_regmode_weight (rtx, enum machine_mode);
-static short find_insn_regmode_weight (rtx, enum machine_mode);
-static void find_regmode_weight (basic_block, enum machine_mode);
+static short find_set_regmode_weight (rtx, machine_mode);
+static short find_insn_regmode_weight (rtx, machine_mode);
+static void find_regmode_weight (basic_block, machine_mode);
static int find_r0_life_regions (basic_block);
static void sh_md_init_global (FILE *, int, int);
static void sh_md_finish_global (FILE *, int);
static int rank_for_reorder (const void *, const void *);
static void swap_reorder (rtx_insn **, int);
static void ready_reorder (rtx_insn **, int);
-static bool high_pressure (enum machine_mode);
+static bool high_pressure (machine_mode);
static int sh_reorder (FILE *, int, rtx_insn **, int *, int);
static int sh_reorder2 (FILE *, int, rtx_insn **, int *, int);
static void sh_md_init (FILE *, int, int);
static void sh_init_builtins (void);
static tree sh_builtin_decl (unsigned, bool);
-static rtx sh_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
+static rtx sh_expand_builtin (tree, rtx, rtx, machine_mode, int);
static void sh_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
HOST_WIDE_INT, tree);
static void sh_file_start (void);
static bool unspec_caller_rtx_p (rtx);
static bool sh_cannot_copy_insn_p (rtx_insn *);
static bool sh_rtx_costs (rtx, int, int, int, int *, bool);
-static int sh_address_cost (rtx, enum machine_mode, addr_space_t, bool);
+static int sh_address_cost (rtx, machine_mode, addr_space_t, bool);
static int sh_pr_n_sets (void);
static rtx sh_allocate_initial_value (rtx);
static reg_class_t sh_preferred_reload_class (rtx, reg_class_t);
static reg_class_t sh_secondary_reload (bool, rtx, reg_class_t,
- enum machine_mode,
+ machine_mode,
struct secondary_reload_info *);
-static bool sh_legitimate_address_p (enum machine_mode, rtx, bool);
-static rtx sh_legitimize_address (rtx, rtx, enum machine_mode);
+static bool sh_legitimate_address_p (machine_mode, rtx, bool);
+static rtx sh_legitimize_address (rtx, rtx, machine_mode);
static rtx sh_delegitimize_address (rtx);
static int shmedia_target_regs_stack_space (HARD_REG_SET *);
static int shmedia_reserve_space_for_target_registers_p (int, HARD_REG_SET *);
static rtx sh_struct_value_rtx (tree, int);
static rtx sh_function_value (const_tree, const_tree, bool);
static bool sh_function_value_regno_p (const unsigned int);
-static rtx sh_libcall_value (enum machine_mode, const_rtx);
+static rtx sh_libcall_value (machine_mode, const_rtx);
static bool sh_return_in_memory (const_tree, const_tree);
static rtx sh_builtin_saveregs (void);
-static void sh_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
+static void sh_setup_incoming_varargs (cumulative_args_t, machine_mode,
tree, int *, int);
static bool sh_strict_argument_naming (cumulative_args_t);
static bool sh_pretend_outgoing_varargs_named (cumulative_args_t);
static void sh_va_start (tree, rtx);
static tree sh_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
static bool sh_promote_prototypes (const_tree);
-static enum machine_mode sh_promote_function_mode (const_tree type,
- enum machine_mode,
+static machine_mode sh_promote_function_mode (const_tree type,
+ machine_mode,
int *punsignedp,
const_tree funtype,
int for_return);
-static bool sh_pass_by_reference (cumulative_args_t, enum machine_mode,
+static bool sh_pass_by_reference (cumulative_args_t, machine_mode,
const_tree, bool);
-static bool sh_callee_copies (cumulative_args_t, enum machine_mode,
+static bool sh_callee_copies (cumulative_args_t, machine_mode,
const_tree, bool);
-static int sh_arg_partial_bytes (cumulative_args_t, enum machine_mode,
+static int sh_arg_partial_bytes (cumulative_args_t, machine_mode,
tree, bool);
-static void sh_function_arg_advance (cumulative_args_t, enum machine_mode,
+static void sh_function_arg_advance (cumulative_args_t, machine_mode,
const_tree, bool);
-static rtx sh_function_arg (cumulative_args_t, enum machine_mode,
+static rtx sh_function_arg (cumulative_args_t, machine_mode,
const_tree, bool);
-static bool sh_scalar_mode_supported_p (enum machine_mode);
+static bool sh_scalar_mode_supported_p (machine_mode);
static int sh_dwarf_calling_convention (const_tree);
static void sh_encode_section_info (tree, rtx, int);
static bool sh2a_function_vector_p (tree);
static void sh_trampoline_init (rtx, tree, rtx);
static rtx sh_trampoline_adjust_address (rtx);
static void sh_conditional_register_usage (void);
-static bool sh_legitimate_constant_p (enum machine_mode, rtx);
-static int mov_insn_size (enum machine_mode, bool);
-static int mov_insn_alignment_mask (enum machine_mode, bool);
+static bool sh_legitimate_constant_p (machine_mode, rtx);
+static int mov_insn_size (machine_mode, bool);
+static int mov_insn_alignment_mask (machine_mode, bool);
static bool sequence_insn_p (rtx_insn *);
static void sh_canonicalize_comparison (int *, rtx *, rtx *, bool);
static void sh_canonicalize_comparison (enum rtx_code&, rtx&, rtx&,
- enum machine_mode, bool);
+ machine_mode, bool);
static bool sh_fixed_condition_code_regs (unsigned int* p1, unsigned int* p2);
static void sh_init_sync_libfuncs (void) ATTRIBUTE_UNUSED;
sh_print_operand (FILE *stream, rtx x, int code)
{
int regno;
- enum machine_mode mode;
+ machine_mode mode;
switch (code)
{
{
rtx inner = XEXP (x, 0);
int offset = 0;
- enum machine_mode inner_mode;
+ machine_mode inner_mode;
/* We might see SUBREGs with vector mode registers inside. */
if (GET_CODE (inner) == SUBREG
/* Prepare operands for a move define_expand; specifically, one of the
operands must be in a register. */
void
-prepare_move_operands (rtx operands[], enum machine_mode mode)
+prepare_move_operands (rtx operands[], machine_mode mode)
{
if ((mode == SImode || mode == DImode)
&& flag_pic
canonicalize comparisons in cbranch pattern expanders. */
static void
sh_canonicalize_comparison (enum rtx_code& cmp, rtx& op0, rtx& op1,
- enum machine_mode mode,
+ machine_mode mode,
bool op0_preserve_value)
{
/* When invoked from within the combine pass the mode is not specified,
}
enum rtx_code
-prepare_cbranch_operands (rtx *operands, enum machine_mode mode,
+prepare_cbranch_operands (rtx *operands, machine_mode mode,
enum rtx_code comparison)
{
/* The scratch reg is only available when this is invoked from within
/* Emit INSN, possibly in a PARALLEL with an USE/CLOBBER of FPSCR bits in case
of floating-point comparisons. */
static void
-sh_emit_set_t_insn (rtx insn, enum machine_mode mode)
+sh_emit_set_t_insn (rtx insn, machine_mode mode)
{
if (TARGET_FPU_ANY && GET_MODE_CLASS (mode) == MODE_FLOAT
&& GET_CODE (insn) != PARALLEL)
{
rtx t_reg = get_t_reg_rtx ();
enum rtx_code oldcode = code;
- enum machine_mode mode;
+ machine_mode mode;
/* First need a compare insn. */
switch (code)
}
rtx
-sh_emit_cheap_store_flag (enum machine_mode mode, enum rtx_code code,
+sh_emit_cheap_store_flag (machine_mode mode, enum rtx_code code,
rtx op0, rtx op1)
{
rtx target = gen_reg_rtx (SImode);
/* Called from the md file, set up the operands of a compare instruction. */
void
-sh_emit_compare_and_branch (rtx *operands, enum machine_mode mode)
+sh_emit_compare_and_branch (rtx *operands, machine_mode mode)
{
enum rtx_code code = GET_CODE (operands[0]);
enum rtx_code branch_code;
}
void
-sh_emit_compare_and_set (rtx *operands, enum machine_mode mode)
+sh_emit_compare_and_set (rtx *operands, machine_mode mode)
{
enum rtx_code code = GET_CODE (operands[1]);
rtx op0 = operands[2];
to take care when we see overlapping source and dest registers. */
const char *
output_movedouble (rtx insn ATTRIBUTE_UNUSED, rtx operands[],
- enum machine_mode mode)
+ machine_mode mode)
{
rtx dst = operands[0];
rtx src = operands[1];
&& (register_operand (SET_SRC (x), VOIDmode)
|| satisfies_constraint_Z (SET_SRC (x))))
{
- const enum machine_mode mode = GET_MODE (SET_DEST (x));
+ const machine_mode mode = GET_MODE (SET_DEST (x));
*total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
/ mov_insn_size (mode, TARGET_SH2A));
return true;
/* Determine the size of the fundamental move insn that will be used
for the specified mode. */
static inline int
-mov_insn_size (enum machine_mode mode, bool consider_sh2a)
+mov_insn_size (machine_mode mode, bool consider_sh2a)
{
const int mode_sz = GET_MODE_SIZE (mode);
/* Determine the alignment mask for a move insn of the
specified mode. */
static inline int
-mov_insn_alignment_mask (enum machine_mode mode, bool consider_sh2a)
+mov_insn_alignment_mask (machine_mode mode, bool consider_sh2a)
{
const int mov_insn_sz = mov_insn_size (mode, consider_sh2a);
return mov_insn_sz > 0 ? (mov_insn_sz - 1) : 0;
/* Compute the cost of an address. */
static int
-sh_address_cost (rtx x, enum machine_mode mode,
+sh_address_cost (rtx x, machine_mode mode,
addr_space_t as ATTRIBUTE_UNUSED, bool speed ATTRIBUTE_UNUSED)
{
/* 'GBR + 0'. Account one more because of R0 restriction. */
rtx value; /* Value in table. */
rtx_code_label *label; /* Label of value. */
label_ref_list_t wend; /* End of window. */
- enum machine_mode mode; /* Mode of value. */
+ machine_mode mode; /* Mode of value. */
/* True if this constant is accessed as part of a post-increment
sequence. Note that HImode constants are never accessed in this way. */
/* Add a constant to the pool and return its label. */
static rtx_code_label *
-add_constant (rtx x, enum machine_mode mode, rtx last_value)
+add_constant (rtx x, machine_mode mode, rtx last_value)
{
int i;
rtx_code_label *lab, *new_rtx;
if (broken_move (from))
{
rtx pat, src, dst;
- enum machine_mode mode;
+ machine_mode mode;
pat = PATTERN (from);
if (GET_CODE (pat) == PARALLEL)
rtx src, dst;
rtx lab;
rtx newsrc;
- enum machine_mode mode;
+ machine_mode mode;
if (GET_CODE (pat) == PARALLEL)
patp = &XVECEXP (pat, 0, 0), pat = *patp;
int nreg = 0;
if (crtl->return_rtx)
{
- enum machine_mode mode;
+ machine_mode mode;
mode = GET_MODE (crtl->return_rtx);
if (BASE_RETURN_VALUE_REG (mode) == FIRST_RET_REG)
nreg = HARD_REGNO_NREGS (FIRST_RET_REG, mode);
for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
if (TEST_HARD_REG_BIT (*live_regs_mask, i))
{
- enum machine_mode mode = REGISTER_NATURAL_MODE (i);
+ machine_mode mode = REGISTER_NATURAL_MODE (i);
int reg = i;
if (current_function_interrupt)
tmp_pnt = schedule.temps;
for (entry = &schedule.entries[1]; entry->mode != VOIDmode; entry++)
{
- enum machine_mode mode = (enum machine_mode) entry->mode;
+ machine_mode mode = (machine_mode) entry->mode;
unsigned int reg = entry->reg;
rtx reg_rtx, mem_rtx, pre_dec = NULL_RTX;
rtx orig_reg_rtx;
tmp_pnt = schedule.temps;
for (; entry->mode != VOIDmode; entry--)
{
- enum machine_mode mode = (enum machine_mode) entry->mode;
+ machine_mode mode = (machine_mode) entry->mode;
int reg = entry->reg;
rtx reg_rtx, mem_rtx, post_inc = NULL_RTX;
gen_rtx_REG (SFmode, regno)));
}
-static enum machine_mode
-sh_promote_function_mode (const_tree type, enum machine_mode mode,
+static machine_mode
+sh_promote_function_mode (const_tree type, machine_mode mode,
int *punsignedp, const_tree funtype,
int for_return)
{
registers are passed by reference, so that an SHmedia trampoline
loads them into the full 64-bits registers. */
static int
-shcompact_byref (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
+shcompact_byref (const CUMULATIVE_ARGS *cum, machine_mode mode,
const_tree type, bool named)
{
unsigned HOST_WIDE_INT size;
}
static bool
-sh_pass_by_reference (cumulative_args_t cum_v, enum machine_mode mode,
+sh_pass_by_reference (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
}
static bool
-sh_callee_copies (cumulative_args_t cum, enum machine_mode mode,
+sh_callee_copies (cumulative_args_t cum, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
/* ??? How can it possibly be correct to return true only on the
}
static int
-sh_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
+sh_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
NPARM_REGS words is at least partially passed in a register unless
its data type forbids. */
static rtx
-sh_function_arg (cumulative_args_t ca_v, enum machine_mode mode,
+sh_function_arg (cumulative_args_t ca_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *ca = get_cumulative_args (ca_v);
(TYPE is null for libcalls where that information may not be
available.) */
static void
-sh_function_arg_advance (cumulative_args_t ca_v, enum machine_mode mode,
+sh_function_arg_advance (cumulative_args_t ca_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *ca = get_cumulative_args (ca_v);
const_tree type2 = (ca->byref && type
? TREE_TYPE (type)
: type);
- enum machine_mode mode2 = (ca->byref && type
+ machine_mode mode2 = (ca->byref && type
? TYPE_MODE (type2)
: mode);
int dwords = ((ca->byref
/* Worker function for TARGET_LIBCALL_VALUE. */
static rtx
-sh_libcall_value (enum machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
+sh_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, BASE_RETURN_VALUE_REG (mode));
}
function that tell if a function uses varargs or stdarg. */
static void
sh_setup_incoming_varargs (cumulative_args_t ca,
- enum machine_mode mode,
+ machine_mode mode,
tree type,
int *pretend_arg_size,
int second_time ATTRIBUTE_UNUSED)
/* Returns true if OP is MACL, MACH or PR. The input must be a REG rtx.
Used only in general_movsrc_operand. */
bool
-system_reg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
+system_reg_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
{
switch (REGNO (op))
{
/* Return the TLS type for TLS symbols. */
enum tls_model
-tls_symbolic_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
+tls_symbolic_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
{
if (GET_CODE (op) != SYMBOL_REF)
return TLS_MODEL_NONE;
/* Return TRUE for a valid displacement for the REG+disp addressing
with MODE. */
bool
-sh_legitimate_index_p (enum machine_mode mode, rtx op, bool consider_sh2a,
+sh_legitimate_index_p (machine_mode mode, rtx op, bool consider_sh2a,
bool allow_zero)
{
if (! CONST_INT_P (op))
GBR
GBR+disp */
static bool
-sh_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
+sh_legitimate_address_p (machine_mode mode, rtx x, bool strict)
{
if (! ALLOW_INDEXED_ADDRESS
&& GET_CODE (x) == PLUS && REG_P (XEXP (x, 0)) && REG_P (XEXP (x, 1)))
/* Convert a non-PIC address in `orig' to a PIC address using @GOT or
@GOTOFF in `reg'. */
rtx
-legitimize_pic_address (rtx orig, enum machine_mode mode ATTRIBUTE_UNUSED,
+legitimize_pic_address (rtx orig, machine_mode mode ATTRIBUTE_UNUSED,
rtx reg)
{
if (tls_symbolic_operand (orig, Pmode) != TLS_MODEL_NONE)
};
static struct disp_adjust
-sh_find_mov_disp_adjust (enum machine_mode mode, HOST_WIDE_INT offset)
+sh_find_mov_disp_adjust (machine_mode mode, HOST_WIDE_INT offset)
{
struct disp_adjust res = { NULL_RTX, NULL_RTX };
If we find one, return the new, valid address.
Otherwise, return the original address. */
static rtx
-sh_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
+sh_legitimize_address (rtx x, rtx oldx, machine_mode mode)
{
if (flag_pic)
x = legitimize_pic_address (oldx, mode, NULL_RTX);
Like for sh_legitimize_address, for the SH we try to get a normal form
of the address. That will allow inheritance of the address reloads. */
bool
-sh_legitimize_reload_address (rtx *p, enum machine_mode mode, int opnum,
+sh_legitimize_reload_address (rtx *p, machine_mode mode, int opnum,
int itype)
{
enum reload_type type = (enum reload_type) itype;
/* Get weight for mode for a set x. */
static short
-find_set_regmode_weight (rtx x, enum machine_mode mode)
+find_set_regmode_weight (rtx x, machine_mode mode)
{
if (GET_CODE (x) == CLOBBER && register_operand (SET_DEST (x), mode))
return 1;
/* Get regmode weight for insn. */
static short
-find_insn_regmode_weight (rtx insn, enum machine_mode mode)
+find_insn_regmode_weight (rtx insn, machine_mode mode)
{
short reg_weight = 0;
rtx x;
/* Calculate regmode weights for all insns of a basic block. */
static void
-find_regmode_weight (basic_block b, enum machine_mode mode)
+find_regmode_weight (basic_block b, machine_mode mode)
{
rtx_insn *insn, *next_tail, *head, *tail;
/* The scalar modes supported differs from the default version in TImode
for 32-bit SHMEDIA. */
static bool
-sh_scalar_mode_supported_p (enum machine_mode mode)
+sh_scalar_mode_supported_p (machine_mode mode)
{
if (TARGET_SHMEDIA32 && mode == TImode)
return false;
/* Return true if the pressure is high for MODE. */
static bool
-high_pressure (enum machine_mode mode)
+high_pressure (machine_mode mode)
{
/* Pressure on register r0 can lead to spill failures. so avoid sched1 for
functions that already have high pressure on r0. */
/* Implements target hook vector_mode_supported_p. */
bool
-sh_vector_mode_supported_p (enum machine_mode mode)
+sh_vector_mode_supported_p (machine_mode mode)
{
if (TARGET_FPU_ANY
&& ((mode == V2SFmode)
IGNORE is nonzero if the value is to be ignored. */
static rtx
sh_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED, int ignore)
+ machine_mode mode ATTRIBUTE_UNUSED, int ignore)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
if (ignore)
return NULL_RTX;
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
if (! target || GET_MODE (target) != tmode
|| ! (*insn_data[icode].operand[0].predicate) (target, tmode))
target = gen_reg_rtx (tmode);
for (int i = 1; i <= 3; i++, nop++)
{
tree arg;
- enum machine_mode opmode, argmode;
+ machine_mode opmode, argmode;
tree optype;
if (! signature_args[signature][i])
We want to allow TImode FP regs so that when V4SFmode is loaded as TImode,
it won't be ferried through GP registers first. */
bool
-sh_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
+sh_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
{
if (SPECIAL_REGISTER_P (regno))
return mode == SImode;
/* Return the class of registers for which a mode change from FROM to TO
is invalid. */
bool
-sh_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
+sh_cannot_change_mode_class (machine_mode from, machine_mode to,
enum reg_class rclass)
{
/* We want to enable the use of SUBREGs as a means to
/* Return true if registers in machine mode MODE will likely be
allocated to registers in small register classes. */
bool
-sh_small_register_classes_for_mode_p (enum machine_mode mode ATTRIBUTE_UNUSED)
+sh_small_register_classes_for_mode_p (machine_mode mode ATTRIBUTE_UNUSED)
{
return (! TARGET_SHMEDIA);
}
uses this information. Hence, the general register <-> floating point
register information here is not used for SFmode. */
static int
-sh_register_move_cost (enum machine_mode mode,
+sh_register_move_cost (machine_mode mode,
reg_class_t srcclass, reg_class_t dstclass)
{
if (dstclass == T_REGS || dstclass == PR_REGS)
rtx libname ATTRIBUTE_UNUSED,
tree fndecl,
signed int n_named_args,
- enum machine_mode mode)
+ machine_mode mode)
{
pcum->arg_count [(int) SH_ARG_FLOAT] = 0;
pcum->free_single_fp_reg = 0;
}
rtx
-sh_gen_truncate (enum machine_mode mode, rtx x, int need_sign_ext)
+sh_gen_truncate (machine_mode mode, rtx x, int need_sign_ext)
{
enum rtx_code code = TRUNCATE;
if (GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
{
rtx inner = XEXP (x, 0);
- enum machine_mode inner_mode = GET_MODE (inner);
+ machine_mode inner_mode = GET_MODE (inner);
if (inner_mode == mode)
return inner;
if (GET_CODE (x) == TRUNCATE)
{
rtx reg = XEXP (x, 0);
- enum machine_mode reg_mode = GET_MODE (reg);
+ machine_mode reg_mode = GET_MODE (reg);
if (REG_P (reg) && GET_MODE_SIZE (reg_mode) > 8)
{
int offset = subreg_lowpart_offset (DImode, reg_mode);
/* Implement TARGET_SECONDARY_RELOAD. */
static reg_class_t
sh_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
- enum machine_mode mode, secondary_reload_info *sri)
+ machine_mode mode, secondary_reload_info *sri)
{
enum reg_class rclass = (enum reg_class) rclass_i;
can_store_by_pieces constructs VOIDmode CONST_DOUBLEs. */
static bool
-sh_legitimate_constant_p (enum machine_mode mode, rtx x)
+sh_legitimate_constant_p (machine_mode mode, rtx x)
{
return (TARGET_SHMEDIA
? ((mode != DFmode && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
[(set (match_dup 5) (match_dup 4))
(set (match_dup 0) (sign_extend:DI (match_dup 5)))]
{
- enum machine_mode inmode = GET_MODE (operands[1]);
+ machine_mode inmode = GET_MODE (operands[1]);
int offset = 0;
if (GET_CODE (operands[0]) == SUBREG)
(pc)))]
"TARGET_SHMEDIA"
{
- enum machine_mode mode = GET_MODE (operands[1]);
+ machine_mode mode = GET_MODE (operands[1]);
if (mode == VOIDmode)
mode = GET_MODE (operands[2]);
if (GET_CODE (operands[0]) == EQ || GET_CODE (operands[0]) == NE)
(match_operand 3 "cmp_operand" "")]))]
"TARGET_SHMEDIA"
{
- enum machine_mode mode = GET_MODE (operands[2]);
+ machine_mode mode = GET_MODE (operands[2]);
enum rtx_code code = GET_CODE (operands[1]);
bool invert, swap;
if (mode == VOIDmode)
[(set (match_dup 0) (match_dup 1))]
{
rtx v = operands[1];
- enum machine_mode new_mode
+ machine_mode new_mode
= mode_for_size (GET_MODE_BITSIZE (GET_MODE (v)), MODE_INT, 0);
operands[0] = gen_rtx_REG (new_mode, true_regnum (operands[0]));
[(set (match_dup 0) (match_dup 3))]
{
rtx count = operands[2];
- enum machine_mode outer_mode = GET_MODE (operands[2]), inner_mode;
+ machine_mode outer_mode = GET_MODE (operands[2]), inner_mode;
while (GET_CODE (count) == ZERO_EXTEND || GET_CODE (count) == SIGN_EXTEND
|| (GET_CODE (count) == SUBREG && SUBREG_BYTE (count) == 0)
(define_predicate "symbolic_operand"
(match_code "symbol_ref,label_ref,const")
{
- enum machine_mode omode = GET_MODE (op);
+ machine_mode omode = GET_MODE (op);
if (omode != mode && omode != VOIDmode && mode != VOIDmode)
return false;
extern unsigned long sparc_type_code (tree);
#ifdef ARGS_SIZE_RTX
/* expr.h defines ARGS_SIZE_RTX and `enum direction' */
-extern enum direction function_arg_padding (enum machine_mode, const_tree);
+extern enum direction function_arg_padding (machine_mode, const_tree);
#endif /* ARGS_SIZE_RTX */
#endif /* TREE_CODE */
extern void sparc_target_macros (void);
#ifdef RTX_CODE
-extern enum machine_mode select_cc_mode (enum rtx_code, rtx, rtx);
+extern machine_mode select_cc_mode (enum rtx_code, rtx, rtx);
/* Define the function that build the compare insn for scc and bcc. */
extern rtx gen_compare_reg (rtx cmp);
extern rtx sparc_emit_float_lib_cmp (rtx, rtx, enum rtx_code);
-extern void sparc_emit_floatunsdi (rtx [2], enum machine_mode);
-extern void sparc_emit_fixunsdi (rtx [2], enum machine_mode);
+extern void sparc_emit_floatunsdi (rtx [2], machine_mode);
+extern void sparc_emit_fixunsdi (rtx [2], machine_mode);
extern void emit_tfmode_binop (enum rtx_code, rtx *);
extern void emit_tfmode_unop (enum rtx_code, rtx *);
extern void emit_tfmode_cvt (enum rtx_code, rtx *);
extern bool constant_address_p (rtx);
extern bool legitimate_pic_operand_p (rtx);
-extern rtx sparc_legitimize_reload_address (rtx, enum machine_mode, int, int,
+extern rtx sparc_legitimize_reload_address (rtx, machine_mode, int, int,
int, int *win);
extern void load_got_register (void);
extern void sparc_emit_call_insn (rtx, rtx);
extern void sparc_defer_case_vector (rtx, rtx, int);
-extern bool sparc_expand_move (enum machine_mode, rtx *);
+extern bool sparc_expand_move (machine_mode, rtx *);
extern void sparc_emit_set_symbolic_const64 (rtx, rtx, rtx);
extern int sparc_splitdi_legitimate (rtx, rtx);
extern int sparc_split_regreg_legitimate (rtx, rtx);
extern void emit_conditional_branch_insn (rtx []);
extern int registers_ok_for_ldd_peep (rtx, rtx);
extern int mems_ok_for_ldd_peep (rtx, rtx, rtx);
-extern rtx widen_mem_for_ldd_peep (rtx, rtx, enum machine_mode);
+extern rtx widen_mem_for_ldd_peep (rtx, rtx, machine_mode);
extern int empty_delay_slot (rtx_insn *);
extern int emit_cbcond_nop (rtx);
extern int eligible_for_call_delay (rtx_insn *);
extern int eligible_for_return_delay (rtx_insn *);
extern int eligible_for_sibcall_delay (rtx_insn *);
-extern int emit_move_sequence (rtx, enum machine_mode);
+extern int emit_move_sequence (rtx, machine_mode);
extern int fp_sethi_p (rtx);
extern int fp_mov_p (rtx);
extern int fp_high_losum_p (rtx);
extern rtx gen_df_reg (rtx, int);
extern void sparc_expand_compare_and_swap (rtx op[]);
extern void sparc_expand_vector_init (rtx, rtx);
-extern void sparc_expand_vec_perm_bmask(enum machine_mode, rtx);
-extern bool sparc_expand_conditional_move (enum machine_mode, rtx *);
-extern void sparc_expand_vcond (enum machine_mode, rtx *, int, int);
-unsigned int sparc_regmode_natural_size (enum machine_mode);
-bool sparc_modes_tieable_p (enum machine_mode, enum machine_mode);
+extern void sparc_expand_vec_perm_bmask(machine_mode, rtx);
+extern bool sparc_expand_conditional_move (machine_mode, rtx *);
+extern void sparc_expand_vcond (machine_mode, rtx *, int, int);
+unsigned int sparc_regmode_natural_size (machine_mode);
+bool sparc_modes_tieable_p (machine_mode, machine_mode);
#endif /* RTX_CODE */
extern void sparc_emit_membar_for_model (enum memmodel, int, int);
static void sparc_option_override (void);
static void sparc_init_modes (void);
static void scan_record_type (const_tree, int *, int *, int *);
-static int function_arg_slotno (const CUMULATIVE_ARGS *, enum machine_mode,
+static int function_arg_slotno (const CUMULATIVE_ARGS *, machine_mode,
const_tree, bool, bool, int *, int *);
static int supersparc_adjust_cost (rtx_insn *, rtx, rtx_insn *, int);
static void sparc_output_addr_vec (rtx);
static void sparc_output_addr_diff_vec (rtx);
static void sparc_output_deferred_case_vectors (void);
-static bool sparc_legitimate_address_p (enum machine_mode, rtx, bool);
-static bool sparc_legitimate_constant_p (enum machine_mode, rtx);
+static bool sparc_legitimate_address_p (machine_mode, rtx, bool);
+static bool sparc_legitimate_constant_p (machine_mode, rtx);
static rtx sparc_builtin_saveregs (void);
static int epilogue_renumber (rtx *, int);
static bool sparc_assemble_integer (rtx, unsigned int, int);
static void sparc_fpu_init_builtins (void);
static void sparc_vis_init_builtins (void);
static tree sparc_builtin_decl (unsigned, bool);
-static rtx sparc_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
+static rtx sparc_expand_builtin (tree, rtx, rtx, machine_mode, int);
static tree sparc_fold_builtin (tree, int, tree *, bool);
static void sparc_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
HOST_WIDE_INT, tree);
static bool sparc_can_output_mi_thunk (const_tree, HOST_WIDE_INT,
HOST_WIDE_INT, const_tree);
static struct machine_function * sparc_init_machine_status (void);
-static bool sparc_cannot_force_const_mem (enum machine_mode, rtx);
+static bool sparc_cannot_force_const_mem (machine_mode, rtx);
static rtx sparc_tls_get_addr (void);
static rtx sparc_tls_got (void);
-static int sparc_register_move_cost (enum machine_mode,
+static int sparc_register_move_cost (machine_mode,
reg_class_t, reg_class_t);
static bool sparc_rtx_costs (rtx, int, int, int, int *, bool);
static rtx sparc_function_value (const_tree, const_tree, bool);
-static rtx sparc_libcall_value (enum machine_mode, const_rtx);
+static rtx sparc_libcall_value (machine_mode, const_rtx);
static bool sparc_function_value_regno_p (const unsigned int);
static rtx sparc_struct_value_rtx (tree, int);
-static enum machine_mode sparc_promote_function_mode (const_tree, enum machine_mode,
+static machine_mode sparc_promote_function_mode (const_tree, machine_mode,
int *, const_tree, int);
static bool sparc_return_in_memory (const_tree, const_tree);
static bool sparc_strict_argument_naming (cumulative_args_t);
static void sparc_va_start (tree, rtx);
static tree sparc_gimplify_va_arg (tree, tree, gimple_seq *, gimple_seq *);
-static bool sparc_vector_mode_supported_p (enum machine_mode);
+static bool sparc_vector_mode_supported_p (machine_mode);
static bool sparc_tls_referenced_p (rtx);
static rtx sparc_legitimize_tls_address (rtx);
static rtx sparc_legitimize_pic_address (rtx, rtx);
-static rtx sparc_legitimize_address (rtx, rtx, enum machine_mode);
+static rtx sparc_legitimize_address (rtx, rtx, machine_mode);
static rtx sparc_delegitimize_address (rtx);
static bool sparc_mode_dependent_address_p (const_rtx, addr_space_t);
static bool sparc_pass_by_reference (cumulative_args_t,
- enum machine_mode, const_tree, bool);
+ machine_mode, const_tree, bool);
static void sparc_function_arg_advance (cumulative_args_t,
- enum machine_mode, const_tree, bool);
+ machine_mode, const_tree, bool);
static rtx sparc_function_arg_1 (cumulative_args_t,
- enum machine_mode, const_tree, bool, bool);
+ machine_mode, const_tree, bool, bool);
static rtx sparc_function_arg (cumulative_args_t,
- enum machine_mode, const_tree, bool);
+ machine_mode, const_tree, bool);
static rtx sparc_function_incoming_arg (cumulative_args_t,
- enum machine_mode, const_tree, bool);
-static unsigned int sparc_function_arg_boundary (enum machine_mode,
+ machine_mode, const_tree, bool);
+static unsigned int sparc_function_arg_boundary (machine_mode,
const_tree);
static int sparc_arg_partial_bytes (cumulative_args_t,
- enum machine_mode, tree, bool);
+ machine_mode, tree, bool);
static void sparc_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
static void sparc_file_end (void);
static bool sparc_frame_pointer_required (void);
static const char *sparc_mangle_type (const_tree);
#endif
static void sparc_trampoline_init (rtx, tree, rtx);
-static enum machine_mode sparc_preferred_simd_mode (enum machine_mode);
+static machine_mode sparc_preferred_simd_mode (machine_mode);
static reg_class_t sparc_preferred_reload_class (rtx x, reg_class_t rclass);
static bool sparc_print_operand_punct_valid_p (unsigned char);
static void sparc_print_operand (FILE *, rtx, int);
static void sparc_print_operand_address (FILE *, rtx);
static reg_class_t sparc_secondary_reload (bool, rtx, reg_class_t,
- enum machine_mode,
+ machine_mode,
secondary_reload_info *);
-static enum machine_mode sparc_cstore_mode (enum insn_code icode);
+static machine_mode sparc_cstore_mode (enum insn_code icode);
static void sparc_atomic_assign_expand_fenv (tree *, tree *, tree *);
\f
#ifdef SUBTARGET_ATTRIBUTE_TABLE
/* Expand a move instruction. Return true if all work is done. */
bool
-sparc_expand_move (enum machine_mode mode, rtx *operands)
+sparc_expand_move (machine_mode mode, rtx *operands)
{
/* Handle sets of MEM first. */
if (GET_CODE (operands[0]) == MEM)
static void
sparc_emit_set_const32 (rtx op0, rtx op1)
{
- enum machine_mode mode = GET_MODE (op0);
+ machine_mode mode = GET_MODE (op0);
rtx temp = op0;
if (can_create_pseudo_p ())
is a PLUS, MINUS, NEG, or ASHIFT. CCmode should be used when no special
processing is needed. */
-enum machine_mode
+machine_mode
select_cc_mode (enum rtx_code op, rtx x, rtx y ATTRIBUTE_UNUSED)
{
if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
static rtx
gen_compare_reg_1 (enum rtx_code code, rtx x, rtx y)
{
- enum machine_mode mode;
+ machine_mode mode;
rtx cc_reg;
if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
not constant (TLS) or not known at final link time (PIC). */
static bool
-sparc_cannot_force_const_mem (enum machine_mode mode, rtx x)
+sparc_cannot_force_const_mem (machine_mode mode, rtx x)
{
switch (GET_CODE (x))
{
satisfies CONSTANT_P. */
static bool
-sparc_legitimate_constant_p (enum machine_mode mode, rtx x)
+sparc_legitimate_constant_p (machine_mode mode, rtx x)
{
switch (GET_CODE (x))
{
ordinarily. This changes a bit when generating PIC. */
static bool
-sparc_legitimate_address_p (enum machine_mode mode, rtx addr, bool strict)
+sparc_legitimate_address_p (machine_mode mode, rtx addr, bool strict)
{
rtx rs1 = NULL, rs2 = NULL, imm1 = NULL;
static rtx
sparc_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
rtx orig_x = x;
operand or requires a scratch register. */
rtx
-sparc_legitimize_reload_address (rtx x, enum machine_mode mode,
+sparc_legitimize_reload_address (rtx x, machine_mode mode,
int opnum, int type,
int ind_levels ATTRIBUTE_UNUSED, int *win)
{
for (i = 0; i < NUM_MACHINE_MODES; i++)
{
- enum machine_mode m = (enum machine_mode) i;
+ machine_mode m = (machine_mode) i;
unsigned int size = GET_MODE_SIZE (m);
switch (GET_MODE_CLASS (m))
{
bool reg0 = save_p (i, leaf_function);
bool reg1 = save_p (i + 1, leaf_function);
- enum machine_mode mode;
+ machine_mode mode;
int regno;
if (reg0 && reg1)
/* Handle promotion of pointer and integer arguments. */
-static enum machine_mode
+static machine_mode
sparc_promote_function_mode (const_tree type,
- enum machine_mode mode,
+ machine_mode mode,
int *punsignedp,
const_tree fntype ATTRIBUTE_UNUSED,
int for_return ATTRIBUTE_UNUSED)
*PPADDING records the amount of padding needed in words. */
static int
-function_arg_slotno (const struct sparc_args *cum, enum machine_mode mode,
+function_arg_slotno (const struct sparc_args *cum, machine_mode mode,
const_tree type, bool named, bool incoming_p,
int *pregno, int *ppadding)
{
(const_tree, HOST_WIDE_INT, struct function_arg_record_value_parms *, bool);
static void function_arg_record_value_1
(const_tree, HOST_WIDE_INT, struct function_arg_record_value_parms *, bool);
-static rtx function_arg_record_value (const_tree, enum machine_mode, int, int, int);
-static rtx function_arg_union_value (int, enum machine_mode, int, int);
+static rtx function_arg_record_value (const_tree, machine_mode, int, int, int);
+static rtx function_arg_union_value (int, machine_mode, int, int);
/* A subroutine of function_arg_record_value. Traverse the structure
recursively and determine how many registers will be required. */
function_arg_record_value_3 (HOST_WIDE_INT bitpos,
struct function_arg_record_value_parms *parms)
{
- enum machine_mode mode;
+ machine_mode mode;
unsigned int regno;
unsigned int startbit, endbit;
int this_slotno, intslots, intoffset;
{
int this_slotno = parms->slotno + bitpos / BITS_PER_WORD;
int regno, nregs, pos;
- enum machine_mode mode = DECL_MODE (field);
+ machine_mode mode = DECL_MODE (field);
rtx reg;
function_arg_record_value_3 (bitpos, parms);
REGBASE is the regno of the base register for the parameter array. */
static rtx
-function_arg_record_value (const_tree type, enum machine_mode mode,
+function_arg_record_value (const_tree type, machine_mode mode,
int slotno, int named, int regbase)
{
HOST_WIDE_INT typesize = int_size_in_bytes (type);
REGNO is the hard register the union will be passed in. */
static rtx
-function_arg_union_value (int size, enum machine_mode mode, int slotno,
+function_arg_union_value (int size, machine_mode mode, int slotno,
int regno)
{
int nwords = ROUND_ADVANCE (size), i;
TARGET_FUNCTION_INCOMING_ARG. */
static rtx
-sparc_function_arg_1 (cumulative_args_t cum_v, enum machine_mode mode,
+sparc_function_arg_1 (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named, bool incoming_p)
{
const CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
/* Handle the TARGET_FUNCTION_ARG target hook. */
static rtx
-sparc_function_arg (cumulative_args_t cum, enum machine_mode mode,
+sparc_function_arg (cumulative_args_t cum, machine_mode mode,
const_tree type, bool named)
{
return sparc_function_arg_1 (cum, mode, type, named, false);
/* Handle the TARGET_FUNCTION_INCOMING_ARG target hook. */
static rtx
-sparc_function_incoming_arg (cumulative_args_t cum, enum machine_mode mode,
+sparc_function_incoming_arg (cumulative_args_t cum, machine_mode mode,
const_tree type, bool named)
{
return sparc_function_arg_1 (cum, mode, type, named, true);
/* For sparc64, objects requiring 16 byte alignment are passed that way. */
static unsigned int
-sparc_function_arg_boundary (enum machine_mode mode, const_tree type)
+sparc_function_arg_boundary (machine_mode mode, const_tree type)
{
return ((TARGET_ARCH64
&& (GET_MODE_ALIGNMENT (mode) == 128
mode] will be split between that reg and memory. */
static int
-sparc_arg_partial_bytes (cumulative_args_t cum, enum machine_mode mode,
+sparc_arg_partial_bytes (cumulative_args_t cum, machine_mode mode,
tree type, bool named)
{
int slotno, regno, padding;
static bool
sparc_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
- enum machine_mode mode, const_tree type,
+ machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
if (TARGET_ARCH32)
TYPE is null for libcalls where that information may not be available. */
static void
-sparc_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+sparc_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
argument slot. */
enum direction
-function_arg_padding (enum machine_mode mode, const_tree type)
+function_arg_padding (machine_mode mode, const_tree type)
{
if (TARGET_ARCH64 && type != 0 && AGGREGATE_TYPE_P (type))
return upward;
except that up to 32 bytes may be returned in registers. */
static rtx
-sparc_function_value_1 (const_tree type, enum machine_mode mode,
+sparc_function_value_1 (const_tree type, machine_mode mode,
bool outgoing)
{
/* Beware that the two values are swapped here wrt function_arg. */
/* Handle TARGET_LIBCALL_VALUE. */
static rtx
-sparc_libcall_value (enum machine_mode mode,
+sparc_libcall_value (machine_mode mode,
const_rtx fun ATTRIBUTE_UNUSED)
{
return sparc_function_value_1 (NULL_TREE, mode, false);
Specify whether the vector mode is supported by the hardware. */
static bool
-sparc_vector_mode_supported_p (enum machine_mode mode)
+sparc_vector_mode_supported_p (machine_mode mode)
{
return TARGET_VIS && VECTOR_MODE_P (mode) ? true : false;
}
\f
/* Implement the TARGET_VECTORIZE_PREFERRED_SIMD_MODE target hook. */
-static enum machine_mode
-sparc_preferred_simd_mode (enum machine_mode mode)
+static machine_mode
+sparc_preferred_simd_mode (machine_mode mode)
{
if (TARGET_VIS)
switch (mode)
static char string[64];
enum rtx_code code = GET_CODE (op);
rtx cc_reg = XEXP (op, 0);
- enum machine_mode mode = GET_MODE (cc_reg);
+ machine_mode mode = GET_MODE (cc_reg);
const char *labelno, *branch;
int spaces = 8, far;
char *p;
{
const char *qpfunc;
rtx slot0, slot1, result, tem, tem2, libfunc;
- enum machine_mode mode;
+ machine_mode mode;
enum rtx_code new_comparison;
switch (comparison)
optabs would emit if we didn't have TFmode patterns. */
void
-sparc_emit_floatunsdi (rtx *operands, enum machine_mode mode)
+sparc_emit_floatunsdi (rtx *operands, machine_mode mode)
{
rtx i0, i1, f0, in, out;
optabs would emit if we didn't have TFmode patterns. */
void
-sparc_emit_fixunsdi (rtx *operands, enum machine_mode mode)
+sparc_emit_fixunsdi (rtx *operands, machine_mode mode)
{
rtx i0, i1, f0, in, out, limit;
const char *
output_cbcond (rtx op, rtx dest, rtx_insn *insn)
{
- enum machine_mode mode = GET_MODE (XEXP (op, 0));
+ machine_mode mode = GET_MODE (XEXP (op, 0));
enum rtx_code code = GET_CODE (op);
const char *cond_str, *tmpl;
int far, emit_nop, len;
{
static char string[64];
enum rtx_code code = GET_CODE (op);
- enum machine_mode mode = GET_MODE (XEXP (op, 0));
+ machine_mode mode = GET_MODE (XEXP (op, 0));
rtx note;
int far;
char *p;
/* Return the widened memory access made of MEM1 and MEM2 in MODE. */
rtx
-widen_mem_for_ldd_peep (rtx mem1, rtx mem2, enum machine_mode mode)
+widen_mem_for_ldd_peep (rtx mem1, rtx mem2, machine_mode mode)
{
rtx x = widen_memory_access (mem1, mode, 0);
MEM_NOTRAP_P (x) = MEM_NOTRAP_P (mem1) && MEM_NOTRAP_P (mem2);
static rtx
sparc_expand_builtin (tree exp, rtx target,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode tmode ATTRIBUTE_UNUSED,
+ machine_mode tmode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
if (nonvoid)
{
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
if (!target
|| GET_MODE (target) != tmode
|| ! (*insn_data[icode].operand[0].predicate) (target, tmode))
sparc_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
int *total, bool speed ATTRIBUTE_UNUSED)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
bool float_mode_p = FLOAT_MODE_P (mode);
switch (code)
/* Implement TARGET_REGISTER_MOVE_COST. */
static int
-sparc_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+sparc_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t from, reg_class_t to)
{
bool need_memory = false;
sparc_expand_compare_and_swap (rtx operands[])
{
rtx bval, retval, mem, oldval, newval;
- enum machine_mode mode;
+ machine_mode mode;
enum memmodel model;
bval = operands[0];
}
void
-sparc_expand_vec_perm_bmask (enum machine_mode vmode, rtx sel)
+sparc_expand_vec_perm_bmask (machine_mode vmode, rtx sel)
{
rtx t_1, t_2, t_3;
static reg_class_t
sparc_preferred_reload_class (rtx x, reg_class_t rclass)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
if (CONSTANT_P (x))
{
if (FP_REG_CLASS_P (rclass)
and INNER_MODE are the modes describing TARGET. */
static void
-vector_init_bshuffle (rtx target, rtx elt, enum machine_mode mode,
- enum machine_mode inner_mode)
+vector_init_bshuffle (rtx target, rtx elt, machine_mode mode,
+ machine_mode inner_mode)
{
rtx t1, final_insn, sel;
int bmask;
void
sparc_expand_vector_init (rtx target, rtx vals)
{
- const enum machine_mode mode = GET_MODE (target);
- const enum machine_mode inner_mode = GET_MODE_INNER (mode);
+ const machine_mode mode = GET_MODE (target);
+ const machine_mode inner_mode = GET_MODE_INNER (mode);
const int n_elts = GET_MODE_NUNITS (mode);
int i, n_var = 0;
bool all_same;
static reg_class_t
sparc_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
- enum machine_mode mode, secondary_reload_info *sri)
+ machine_mode mode, secondary_reload_info *sri)
{
enum reg_class rclass = (enum reg_class) rclass_i;
OPERANDS[0] in MODE. OPERANDS[1] is the operator of the condition. */
bool
-sparc_expand_conditional_move (enum machine_mode mode, rtx *operands)
+sparc_expand_conditional_move (machine_mode mode, rtx *operands)
{
enum rtx_code rc = GET_CODE (operands[1]);
- enum machine_mode cmp_mode;
+ machine_mode cmp_mode;
rtx cc_reg, dst, cmp;
cmp = operands[1];
code to be used for the condition mask. */
void
-sparc_expand_vcond (enum machine_mode mode, rtx *operands, int ccode, int fcode)
+sparc_expand_vcond (machine_mode mode, rtx *operands, int ccode, int fcode)
{
rtx mask, cop0, cop1, fcmp, cmask, bshuf, gsr;
enum rtx_code code = GET_CODE (operands[3]);
registers should return 4 here. */
unsigned int
-sparc_regmode_natural_size (enum machine_mode mode)
+sparc_regmode_natural_size (machine_mode mode)
{
int size = UNITS_PER_WORD;
point registers are 32-bit addressable. */
bool
-sparc_modes_tieable_p (enum machine_mode mode1, enum machine_mode mode2)
+sparc_modes_tieable_p (machine_mode mode1, machine_mode mode2)
{
enum mode_class mclass1, mclass2;
unsigned short size1, size2;
/* Implement TARGET_CSTORE_MODE. */
-static enum machine_mode
+static machine_mode
sparc_cstore_mode (enum insn_code icode ATTRIBUTE_UNUSED)
{
return (TARGET_ARCH64 ? DImode : SImode);
#if HOST_BITS_PER_WIDE_INT == 32
gcc_unreachable ();
#else
- enum machine_mode mode = GET_MODE (operands[1]);
+ machine_mode mode = GET_MODE (operands[1]);
rtx tem = simplify_subreg (DImode, operands[1], mode, 0);
emit_insn (gen_movdi (operands[0], tem));
#endif
}
else
{
- enum machine_mode mode = GET_MODE (operands[1]);
+ machine_mode mode = GET_MODE (operands[1]);
rtx hi = simplify_subreg (SImode, operands[1], mode, 0);
rtx lo = simplify_subreg (SImode, operands[1], mode, 4);
extern rtx spu_return_addr (int count, rtx frame);
#ifdef RTX_CODE
-extern rtx hwint_to_const_double (enum machine_mode mode, HOST_WIDE_INT v);
-extern rtx spu_const (enum machine_mode mode, HOST_WIDE_INT val);
-extern rtx spu_const_from_ints (enum machine_mode mode,
+extern rtx hwint_to_const_double (machine_mode mode, HOST_WIDE_INT v);
+extern rtx spu_const (machine_mode mode, HOST_WIDE_INT val);
+extern rtx spu_const_from_ints (machine_mode mode,
int a, int b, int c, int d);
extern rtx spu_float_const (const char *string,
- enum machine_mode mode);
-extern int immediate_load_p (rtx op, enum machine_mode mode);
-extern int logical_immediate_p (rtx op, enum machine_mode mode);
-extern int iohl_immediate_p (rtx op, enum machine_mode mode);
-extern int arith_immediate_p (rtx op, enum machine_mode mode,
+ machine_mode mode);
+extern int immediate_load_p (rtx op, machine_mode mode);
+extern int logical_immediate_p (rtx op, machine_mode mode);
+extern int iohl_immediate_p (rtx op, machine_mode mode);
+extern int arith_immediate_p (rtx op, machine_mode mode,
HOST_WIDE_INT low, HOST_WIDE_INT high);
-extern bool exp2_immediate_p (rtx op, enum machine_mode mode, int low,
+extern bool exp2_immediate_p (rtx op, machine_mode mode, int low,
int high);
extern int spu_constant_address_p (rtx x);
-extern bool spu_legitimate_constant_p (enum machine_mode, rtx);
+extern bool spu_legitimate_constant_p (machine_mode, rtx);
extern int spu_initial_elimination_offset (int from, int to);
extern rtx spu_function_value (const_tree type, const_tree func);
-extern int spu_expand_mov (rtx * ops, enum machine_mode mode);
+extern int spu_expand_mov (rtx * ops, machine_mode mode);
extern int spu_split_load (rtx * ops);
extern int spu_split_store (rtx * ops);
extern int fsmbi_const_p (rtx x);
-extern int cpat_const_p (rtx x, enum machine_mode mode);
+extern int cpat_const_p (rtx x, machine_mode mode);
extern rtx gen_cpat_const (rtx * ops);
-extern void constant_to_array (enum machine_mode mode, rtx x,
+extern void constant_to_array (machine_mode mode, rtx x,
unsigned char *arr);
-extern rtx array_to_constant (enum machine_mode mode, const unsigned char *arr);
-extern rtx spu_gen_exp2 (enum machine_mode mode, rtx x);
+extern rtx array_to_constant (machine_mode mode, const unsigned char *arr);
+extern rtx spu_gen_exp2 (machine_mode mode, rtx x);
extern void spu_allocate_stack (rtx op0, rtx op1);
extern void spu_restore_stack_nonlocal (rtx op0, rtx op1);
extern void spu_restore_stack_block (rtx op0, rtx op1);
-extern rtx spu_gen_subreg (enum machine_mode mode, rtx x);
+extern rtx spu_gen_subreg (machine_mode mode, rtx x);
extern int spu_safe_dma(HOST_WIDE_INT channel);
extern void spu_builtin_splats (rtx ops[]);
extern void spu_builtin_extract (rtx ops[]);
extern void spu_builtin_promote (rtx ops[]);
extern void spu_expand_sign_extend (rtx ops[]);
extern void spu_expand_vector_init (rtx target, rtx vals);
-extern rtx spu_legitimize_reload_address (rtx, enum machine_mode, int, int);
+extern rtx spu_legitimize_reload_address (rtx, machine_mode, int, int);
#endif /* RTX_CODE */
extern void spu_init_expanders (void);
extern tree spu_resolve_overloaded_builtin (location_t, tree fndecl,
void *fnargs);
extern rtx spu_expand_builtin (tree exp, rtx target, rtx subtarget,
- enum machine_mode mode, int ignore);
-extern rtx spu_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
+ machine_mode mode, int ignore);
+extern rtx spu_expand_builtin (tree, rtx, rtx, machine_mode, int);
#endif /* _SPU_PROTOS_ */
static enum spu_immediate which_logical_immediate (HOST_WIDE_INT val);
static int cpat_info(unsigned char *arr, int size, int *prun, int *pstart);
static enum immediate_class classify_immediate (rtx op,
- enum machine_mode mode);
+ machine_mode mode);
/* Pointer mode for __ea references. */
#define EAmode (spu_ea_model != 32 ? DImode : SImode)
be manipulated in non-trivial ways. In particular, this means all
the arithmetic is supported. */
static bool
-spu_scalar_mode_supported_p (enum machine_mode mode)
+spu_scalar_mode_supported_p (machine_mode mode)
{
switch (mode)
{
least some operations are supported; need to check optabs or builtins
for further details. */
static bool
-spu_vector_mode_supported_p (enum machine_mode mode)
+spu_vector_mode_supported_p (machine_mode mode)
{
switch (mode)
{
int
valid_subreg (rtx op)
{
- enum machine_mode om = GET_MODE (op);
- enum machine_mode im = GET_MODE (SUBREG_REG (op));
+ machine_mode om = GET_MODE (op);
+ machine_mode im = GET_MODE (SUBREG_REG (op));
return om != VOIDmode && im != VOIDmode
&& (GET_MODE_SIZE (im) == GET_MODE_SIZE (om)
|| (GET_MODE_SIZE (im) <= 4 && GET_MODE_SIZE (om) <= 4)
static rtx
adjust_operand (rtx op, HOST_WIDE_INT * start)
{
- enum machine_mode mode;
+ machine_mode mode;
int op_size;
/* Strip any paradoxical SUBREG. */
if (GET_CODE (op) == SUBREG
HOST_WIDE_INT width = INTVAL (ops[1]);
HOST_WIDE_INT start = INTVAL (ops[2]);
HOST_WIDE_INT maskbits;
- enum machine_mode dst_mode;
+ machine_mode dst_mode;
rtx dst = ops[0], src = ops[3];
int dst_size;
rtx mask;
if (CONSTANT_P (src))
{
- enum machine_mode m =
+ machine_mode m =
(width <= 32 ? SImode : width <= 64 ? DImode : TImode);
src = force_reg (m, convert_to_mode (m, src, 0));
}
int reverse_test = 0;
rtx compare_result, eq_result;
rtx comp_rtx, eq_rtx;
- enum machine_mode comp_mode;
- enum machine_mode op_mode;
+ machine_mode comp_mode;
+ machine_mode op_mode;
enum spu_comp_code scode, eq_code;
enum insn_code ior_code;
enum rtx_code code = GET_CODE (cmp);
rtx target = operands[0];
int compare_size = GET_MODE_BITSIZE (comp_mode);
int target_size = GET_MODE_BITSIZE (GET_MODE (target));
- enum machine_mode mode = mode_for_size (target_size, MODE_INT, 0);
+ machine_mode mode = mode_for_size (target_size, MODE_INT, 0);
rtx select_mask;
rtx op_t = operands[2];
rtx op_f = operands[3];
}
rtx
-hwint_to_const_double (enum machine_mode mode, HOST_WIDE_INT v)
+hwint_to_const_double (machine_mode mode, HOST_WIDE_INT v)
{
long tv[2];
REAL_VALUE_TYPE rv;
void
print_operand (FILE * file, rtx x, int code)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
HOST_WIDE_INT val;
unsigned char arr[16];
int xcode = GET_CODE (x);
int
spu_split_immediate (rtx * ops)
{
- enum machine_mode mode = GET_MODE (ops[0]);
+ machine_mode mode = GET_MODE (ops[0]);
enum immediate_class c = classify_immediate (ops[1], mode);
switch (c)
unsigned char arrlo[16];
rtx to, temp, hi, lo;
int i;
- enum machine_mode imode = mode;
+ machine_mode imode = mode;
/* We need to do reals as ints because the constant used in the
IOR might not be a legitimate real constant. */
imode = int_mode_for_mode (mode);
unsigned char arr_andbi[16];
rtx to, reg_fsmbi, reg_and;
int i;
- enum machine_mode imode = mode;
+ machine_mode imode = mode;
/* We need to do reals as ints because the constant used in the
* AND might not be a legitimate real constant. */
imode = int_mode_for_mode (mode);
If MODE is a vector mode, every element will be VAL.
For TImode, VAL will be zero extended to 128 bits. */
rtx
-spu_const (enum machine_mode mode, HOST_WIDE_INT val)
+spu_const (machine_mode mode, HOST_WIDE_INT val)
{
rtx inner;
rtvec v;
/* Create a MODE vector constant from 4 ints. */
rtx
-spu_const_from_ints(enum machine_mode mode, int a, int b, int c, int d)
+spu_const_from_ints(machine_mode mode, int a, int b, int c, int d)
{
unsigned char arr[16];
arr[0] = (a >> 24) & 0xff;
\f
/* Create a CONST_DOUBLE from a string. */
rtx
-spu_float_const (const char *string, enum machine_mode mode)
+spu_float_const (const char *string, machine_mode mode)
{
REAL_VALUE_TYPE value;
value = REAL_VALUE_ATOF (string, mode);
/* Return true when OP can be loaded by one of the il instructions, or
when flow2 is not completed and OP can be loaded using ilhu and iohl. */
int
-immediate_load_p (rtx op, enum machine_mode mode)
+immediate_load_p (rtx op, machine_mode mode)
{
if (CONSTANT_P (op))
{
/* OP is a CONSTANT_P. Determine what instructions can be used to load
it into a register. MODE is only valid when OP is a CONST_INT. */
static enum immediate_class
-classify_immediate (rtx op, enum machine_mode mode)
+classify_immediate (rtx op, machine_mode mode)
{
HOST_WIDE_INT val;
unsigned char arr[16];
}
int
-logical_immediate_p (rtx op, enum machine_mode mode)
+logical_immediate_p (rtx op, machine_mode mode)
{
HOST_WIDE_INT val;
unsigned char arr[16];
}
int
-iohl_immediate_p (rtx op, enum machine_mode mode)
+iohl_immediate_p (rtx op, machine_mode mode)
{
HOST_WIDE_INT val;
unsigned char arr[16];
}
int
-arith_immediate_p (rtx op, enum machine_mode mode,
+arith_immediate_p (rtx op, machine_mode mode,
HOST_WIDE_INT low, HOST_WIDE_INT high)
{
HOST_WIDE_INT val;
OP is 2^scale, scale >= LOW && scale <= HIGH. When OP is a vector,
all entries must be the same. */
bool
-exp2_immediate_p (rtx op, enum machine_mode mode, int low, int high)
+exp2_immediate_p (rtx op, machine_mode mode, int low, int high)
{
- enum machine_mode int_mode;
+ machine_mode int_mode;
HOST_WIDE_INT val;
unsigned char arr[16];
int bytes, i, j;
(DImode, DFmode)
- a 128-bit constant where the four 32-bit words match. */
bool
-spu_legitimate_constant_p (enum machine_mode mode, rtx x)
+spu_legitimate_constant_p (machine_mode mode, rtx x)
{
subrtx_iterator::array_type array;
if (GET_CODE (x) == HIGH)
16 byte modes because the expand phase will change all smaller MEM
references to TImode. */
static bool
-spu_legitimate_address_p (enum machine_mode mode,
+spu_legitimate_address_p (machine_mode mode,
rtx x, bool reg_ok_strict)
{
int aligned = GET_MODE_SIZE (mode) >= 16;
/* Like spu_legitimate_address_p, except with named addresses. */
static bool
-spu_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
+spu_addr_space_legitimate_address_p (machine_mode mode, rtx x,
bool reg_ok_strict, addr_space_t as)
{
if (as == ADDR_SPACE_EA)
register. */
static rtx
spu_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
rtx op0, op1;
/* Make sure both operands are registers. */
/* Like spu_legitimate_address, except with named address support. */
static rtx
-spu_addr_space_legitimize_address (rtx x, rtx oldx, enum machine_mode mode,
+spu_addr_space_legitimize_address (rtx x, rtx oldx, machine_mode mode,
addr_space_t as)
{
if (as != ADDR_SPACE_GENERIC)
/* Reload reg + const_int for out-of-range displacements. */
rtx
-spu_legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED,
+spu_legitimize_reload_address (rtx ad, machine_mode mode ATTRIBUTE_UNUSED,
int opnum, int type)
{
bool removed_and = false;
int flags ATTRIBUTE_UNUSED, bool * no_add_attrs)
{
tree type = *node, result = NULL_TREE;
- enum machine_mode mode;
+ machine_mode mode;
int unsigned_p;
while (POINTER_TYPE_P (type)
rtx
spu_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
{
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
int byte_size = ((mode == BLKmode)
? int_size_in_bytes (type) : GET_MODE_SIZE (mode));
if ((mode == BLKmode || (type && AGGREGATE_TYPE_P (type)))
&& byte_size <= UNITS_PER_WORD * MAX_REGISTER_RETURN && byte_size > 0)
{
- enum machine_mode smode;
+ machine_mode smode;
rtvec v;
int i;
int nregs = (byte_size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
static rtx
spu_function_arg (cumulative_args_t cum_v,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
if ((mode == BLKmode || (type && AGGREGATE_TYPE_P (type)))
&& byte_size < UNITS_PER_WORD && byte_size > 0)
{
- enum machine_mode smode;
+ machine_mode smode;
rtx gr_reg;
if (byte_size < 4)
byte_size = 4;
}
static void
-spu_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+spu_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
/* Variable sized types are passed by reference. */
static bool
spu_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
in the stack then save no registers. Set pretend_args_size to the
amount of space needed to save the registers. */
static void
-spu_setup_incoming_varargs (cumulative_args_t cum, enum machine_mode mode,
+spu_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode,
tree type, int *pretend_size, int no_rtl)
{
if (!no_rtl)
static int
store_with_one_insn_p (rtx mem)
{
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
rtx addr = XEXP (mem, 0);
if (mode == BLKmode)
return 0;
}
int
-spu_expand_mov (rtx * ops, enum machine_mode mode)
+spu_expand_mov (rtx * ops, machine_mode mode)
{
if (GET_CODE (ops[0]) == SUBREG && !valid_subreg (ops[0]))
{
if (GET_CODE (ops[1]) == SUBREG && !valid_subreg (ops[1]))
{
rtx from = SUBREG_REG (ops[1]);
- enum machine_mode imode = int_mode_for_mode (GET_MODE (from));
+ machine_mode imode = int_mode_for_mode (GET_MODE (from));
gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
&& GET_MODE_CLASS (imode) == MODE_INT
static void
spu_convert_move (rtx dst, rtx src)
{
- enum machine_mode mode = GET_MODE (dst);
- enum machine_mode int_mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0);
+ machine_mode mode = GET_MODE (dst);
+ machine_mode int_mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0);
rtx reg;
gcc_assert (GET_MODE (src) == TImode);
reg = int_mode != mode ? gen_reg_rtx (int_mode) : dst;
int
spu_split_load (rtx * ops)
{
- enum machine_mode mode = GET_MODE (ops[0]);
+ machine_mode mode = GET_MODE (ops[0]);
rtx addr, load, rot;
int rot_amt;
int
spu_split_store (rtx * ops)
{
- enum machine_mode mode = GET_MODE (ops[0]);
+ machine_mode mode = GET_MODE (ops[0]);
rtx reg;
rtx addr, p0, p1, p1_lo, smem;
int aform;
/* Return TRUE if x is a CONST_INT, CONST_DOUBLE or CONST_VECTOR that
can be generated using the cbd, chd, cwd or cdd instruction. */
int
-cpat_const_p (rtx x, enum machine_mode mode)
+cpat_const_p (rtx x, machine_mode mode)
{
if (CONSTANT_P (x))
{
array. Use MODE for CONST_INT's. When the constant's mode is smaller
than 16 bytes, the value is repeated across the rest of the array. */
void
-constant_to_array (enum machine_mode mode, rtx x, unsigned char arr[16])
+constant_to_array (machine_mode mode, rtx x, unsigned char arr[16])
{
HOST_WIDE_INT val;
int i, j, first;
smaller than 16 bytes, use the bytes that would represent that value
in a register, e.g., for QImode return the value of arr[3]. */
rtx
-array_to_constant (enum machine_mode mode, const unsigned char arr[16])
+array_to_constant (machine_mode mode, const unsigned char arr[16])
{
- enum machine_mode inner_mode;
+ machine_mode inner_mode;
rtvec v;
int units, size, i, j, k;
HOST_WIDE_INT val;
int opno ATTRIBUTE_UNUSED, int *total,
bool speed ATTRIBUTE_UNUSED)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
int cost = COSTS_N_INSNS (2);
/* Folding to a CONST_VECTOR will use extra space but there might
return true;
}
-static enum machine_mode
+static machine_mode
spu_unwind_word_mode (void)
{
return SImode;
/* Make a subreg, stripping any existing subreg. We could possibly just
call simplify_subreg, but in this case we know what we want. */
rtx
-spu_gen_subreg (enum machine_mode mode, rtx x)
+spu_gen_subreg (machine_mode mode, rtx x)
{
if (GET_CODE (x) == SUBREG)
x = SUBREG_REG (x);
void
spu_builtin_splats (rtx ops[])
{
- enum machine_mode mode = GET_MODE (ops[0]);
+ machine_mode mode = GET_MODE (ops[0]);
if (GET_CODE (ops[1]) == CONST_INT || GET_CODE (ops[1]) == CONST_DOUBLE)
{
unsigned char arr[16];
void
spu_builtin_extract (rtx ops[])
{
- enum machine_mode mode;
+ machine_mode mode;
rtx rot, from, tmp;
mode = GET_MODE (ops[1]);
void
spu_builtin_insert (rtx ops[])
{
- enum machine_mode mode = GET_MODE (ops[0]);
- enum machine_mode imode = GET_MODE_INNER (mode);
+ machine_mode mode = GET_MODE (ops[0]);
+ machine_mode imode = GET_MODE_INNER (mode);
rtx mask = gen_reg_rtx (TImode);
rtx offset;
void
spu_builtin_promote (rtx ops[])
{
- enum machine_mode mode, imode;
+ machine_mode mode, imode;
rtx rot, from, offset;
HOST_WIDE_INT pos;
void
spu_expand_vector_init (rtx target, rtx vals)
{
- enum machine_mode mode = GET_MODE (target);
+ machine_mode mode = GET_MODE (target);
int n_elts = GET_MODE_NUNITS (mode);
int n_var = 0;
bool all_same = true;
static int
get_vec_cmp_insn (enum rtx_code code,
- enum machine_mode dest_mode,
- enum machine_mode op_mode)
+ machine_mode dest_mode,
+ machine_mode op_mode)
{
switch (code)
static rtx
spu_emit_vector_compare (enum rtx_code rcode,
rtx op0, rtx op1,
- enum machine_mode dmode)
+ machine_mode dmode)
{
int vec_cmp_insn;
rtx mask;
- enum machine_mode dest_mode;
- enum machine_mode op_mode = GET_MODE (op1);
+ machine_mode dest_mode;
+ machine_mode op_mode = GET_MODE (op1);
gcc_assert (GET_MODE (op0) == GET_MODE (op1));
spu_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
rtx cond, rtx cc_op0, rtx cc_op1)
{
- enum machine_mode dest_mode = GET_MODE (dest);
+ machine_mode dest_mode = GET_MODE (dest);
enum rtx_code rcode = GET_CODE (cond);
rtx mask;
}
static rtx
-spu_force_reg (enum machine_mode mode, rtx op)
+spu_force_reg (machine_mode mode, rtx op)
{
rtx x, r;
if (GET_MODE (op) == VOIDmode || GET_MODE (op) == BLKmode)
rtx pat;
rtx ops[8];
enum insn_code icode = (enum insn_code) d->icode;
- enum machine_mode mode, tmode;
+ machine_mode mode, tmode;
int i, p;
int n_operands;
tree return_type;
if (d->fcode == SPU_MASK_FOR_LOAD)
{
- enum machine_mode mode = insn_data[icode].operand[1].mode;
+ machine_mode mode = insn_data[icode].operand[1].mode;
tree arg;
rtx addr, op, pat;
else
{
rtx reg = gen_reg_rtx (mode);
- enum machine_mode imode = GET_MODE_INNER (mode);
+ machine_mode imode = GET_MODE_INNER (mode);
if (!spu_nonmem_operand (ops[i], GET_MODE (ops[i])))
ops[i] = force_reg (GET_MODE (ops[i]), ops[i]);
if (imode != GET_MODE (ops[i]))
spu_expand_builtin (tree exp,
rtx target,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
}
/* Return the appropriate mode for a named address pointer. */
-static enum machine_mode
+static machine_mode
spu_addr_space_pointer_mode (addr_space_t addrspace)
{
switch (addrspace)
}
/* Return the appropriate mode for a named address address. */
-static enum machine_mode
+static machine_mode
spu_addr_space_address_mode (addr_space_t addrspace)
{
switch (addrspace)
}
}
-static enum machine_mode
+static machine_mode
spu_libgcc_cmp_return_mode (void)
{
return SImode;
}
-static enum machine_mode
+static machine_mode
spu_libgcc_shift_count_mode (void)
{
/* For SPU word mode is TI mode so it is better to use SImode
the result is valid for MODE. Currently, MODE must be V4SFmode and
SCALE must be SImode. */
rtx
-spu_gen_exp2 (enum machine_mode mode, rtx scale)
+spu_gen_exp2 (machine_mode mode, rtx scale)
{
gcc_assert (mode == V4SFmode);
gcc_assert (GET_MODE (scale) == SImode || GET_CODE (scale) == CONST_INT);
#endif
#if defined (HAVE_MACHINE_MODES) && defined (RTX_CODE)
-extern void xstormy16_split_cbranch (enum machine_mode, rtx, rtx, rtx);
-extern int short_memory_operand (rtx, enum machine_mode);
-extern int nonimmediate_nonstack_operand (rtx, enum machine_mode);
+extern void xstormy16_split_cbranch (machine_mode, rtx, rtx, rtx);
+extern int short_memory_operand (rtx, machine_mode);
+extern int nonimmediate_nonstack_operand (rtx, machine_mode);
extern enum reg_class xstormy16_secondary_reload_class
- (enum reg_class, enum machine_mode, rtx);
-extern void xstormy16_split_move (enum machine_mode, rtx, rtx);
-extern void xstormy16_expand_move (enum machine_mode, rtx, rtx);
-extern void xstormy16_expand_arith (enum machine_mode, enum rtx_code,
+ (enum reg_class, machine_mode, rtx);
+extern void xstormy16_split_move (machine_mode, rtx, rtx);
+extern void xstormy16_expand_move (machine_mode, rtx, rtx);
+extern void xstormy16_expand_arith (machine_mode, enum rtx_code,
rtx, rtx, rtx);
-extern const char * xstormy16_output_shift (enum machine_mode, enum rtx_code,
+extern const char * xstormy16_output_shift (machine_mode, enum rtx_code,
rtx, rtx, rtx);
-extern int xstormy16_below100_symbol (rtx, enum machine_mode);
-extern int xstormy16_splittable_below100_operand (rtx, enum machine_mode);
-extern bool xstormy16_legitimate_address_p (enum machine_mode, rtx, bool);
+extern int xstormy16_below100_symbol (rtx, machine_mode);
+extern int xstormy16_splittable_below100_operand (rtx, machine_mode);
+extern bool xstormy16_legitimate_address_p (machine_mode, rtx, bool);
#endif
HOST_WIDE_INT, tree);
static void xstormy16_init_builtins (void);
-static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
+static rtx xstormy16_expand_builtin (tree, rtx, rtx, machine_mode, int);
static bool xstormy16_rtx_costs (rtx, int, int, int, int *, bool);
-static int xstormy16_address_cost (rtx, enum machine_mode, addr_space_t, bool);
+static int xstormy16_address_cost (rtx, machine_mode, addr_space_t, bool);
static bool xstormy16_return_in_memory (const_tree, const_tree);
static GTY(()) section *bss100_section;
}
static int
-xstormy16_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
+xstormy16_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as ATTRIBUTE_UNUSED,
bool speed ATTRIBUTE_UNUSED)
{
/* Worker function for TARGET_MEMORY_MOVE_COST. */
static int
-xstormy16_memory_move_cost (enum machine_mode mode, reg_class_t rclass,
+xstormy16_memory_move_cost (machine_mode mode, reg_class_t rclass,
bool in)
{
return (5 + memory_move_secondary_cost (mode, rclass, in));
{
rtx condition_rtx, loc_ref, branch, cy_clobber;
rtvec vec;
- enum machine_mode mode;
+ machine_mode mode;
mode = GET_MODE (op0);
gcc_assert (mode == HImode || mode == SImode);
xstormy16_expand_arith. */
void
-xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
+xstormy16_split_cbranch (machine_mode mode, rtx label, rtx comparison,
rtx dest)
{
rtx op0 = XEXP (comparison, 0);
enum reg_class
xstormy16_secondary_reload_class (enum reg_class rclass,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
rtx x)
{
/* This chip has the interesting property that only the first eight
int
xstormy16_below100_symbol (rtx x,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
if (GET_CODE (x) == CONST)
x = XEXP (x, 0);
MEM will get split into smaller sized accesses. */
int
-xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
+xstormy16_splittable_below100_operand (rtx x, machine_mode mode)
{
if (MEM_P (x) && MEM_VOLATILE_P (x))
return 0;
&& (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
bool
-xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+xstormy16_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
rtx x, bool strict)
{
if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
}
int
-short_memory_operand (rtx x, enum machine_mode mode)
+short_memory_operand (rtx x, machine_mode mode)
{
if (! memory_operand (x, mode))
return 0;
This function is only called when reload_completed. */
void
-xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
+xstormy16_split_move (machine_mode mode, rtx dest, rtx src)
{
int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
int direction, end, i;
mode MODE from SRC to DEST. */
void
-xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
+xstormy16_expand_move (machine_mode mode, rtx dest, rtx src)
{
if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
{
the word count. */
static void
-xstormy16_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+xstormy16_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
}
static rtx
-xstormy16_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+xstormy16_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
const_tree func ATTRIBUTE_UNUSED,
bool outgoing ATTRIBUTE_UNUSED)
{
- enum machine_mode mode;
+ machine_mode mode;
mode = TYPE_MODE (valtype);
PROMOTE_MODE (mode, 0, valtype);
return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
/* Worker function for TARGET_LIBCALL_VALUE. */
static rtx
-xstormy16_libcall_value (enum machine_mode mode,
+xstormy16_libcall_value (machine_mode mode,
const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
{
rtx call, temp;
- enum machine_mode mode;
+ machine_mode mode;
gcc_assert (MEM_P (dest));
dest = XEXP (dest, 0);
(this saves duplicating code in xstormy16_split_cbranch). */
void
-xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
+xstormy16_expand_arith (machine_mode mode, enum rtx_code code,
rtx dest, rtx src0, rtx src1)
{
int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
SIZE_R will be a CONST_INT, X will be a hard register. */
const char *
-xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
+xstormy16_output_shift (machine_mode mode, enum rtx_code code,
rtx x, rtx size_r, rtx temp)
{
HOST_WIDE_INT size;
static rtx
xstormy16_expand_builtin (tree exp, rtx target,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
rtx op[10], args[10], pat, copyto[10], retval = 0;
{
char ao = s16builtins[i].arg_ops[o];
char c = insn_data[code].operand[o].constraint[0];
- enum machine_mode omode;
+ machine_mode omode;
copyto[o] = 0;
- omode = (enum machine_mode) insn_data[code].operand[o].mode;
+ omode = (machine_mode) insn_data[code].operand[o].mode;
if (ao == 'r')
op[o] = target ? target : gen_reg_rtx (omode);
else if (ao == 't')
unsigned int mask;
rtx cond, reg, qireg, mem;
rtx_insn *and_insn, *load;
- enum machine_mode load_mode = QImode;
- enum machine_mode and_mode = QImode;
+ machine_mode load_mode = QImode;
+ machine_mode and_mode = QImode;
rtx_insn *shift = NULL;
insn_code = recog_memoized (insn);
extern void tilegx_compute_pcrel_address (rtx, rtx);
extern void tilegx_compute_pcrel_plt_address (rtx, rtx);
extern bool tilegx_legitimate_pic_operand_p (rtx);
-extern rtx tilegx_simd_int (rtx, enum machine_mode);
+extern rtx tilegx_simd_int (rtx, machine_mode);
#ifdef RTX_CODE
extern bool tilegx_bitfield_operand_p (HOST_WIDE_INT, int *, int *);
extern void tilegx_expand_set_const64 (rtx, rtx);
-extern bool tilegx_expand_mov (enum machine_mode, rtx *);
+extern bool tilegx_expand_mov (machine_mode, rtx *);
extern void tilegx_expand_unaligned_load (rtx, rtx, HOST_WIDE_INT,
HOST_WIDE_INT, bool);
-extern void tilegx_expand_movmisalign (enum machine_mode, rtx *);
+extern void tilegx_expand_movmisalign (machine_mode, rtx *);
extern void tilegx_allocate_stack (rtx, rtx);
extern bool tilegx_expand_muldi (rtx, rtx, rtx);
extern void tilegx_expand_smuldi3_highpart (rtx, rtx, rtx);
extern void tilegx_expand_umuldi3_highpart (rtx, rtx, rtx);
-extern bool tilegx_emit_setcc (rtx[], enum machine_mode);
-extern void tilegx_emit_conditional_branch (rtx[], enum machine_mode);
+extern bool tilegx_emit_setcc (rtx[], machine_mode);
+extern void tilegx_emit_conditional_branch (rtx[], machine_mode);
extern rtx tilegx_emit_conditional_move (rtx);
extern const char *tilegx_output_cbranch_with_opcode (rtx_insn *, rtx *,
const char *,
extern const char *tilegx_output_cbranch (rtx_insn *, rtx *, bool);
extern void tilegx_expand_tablejump (rtx, rtx);
extern void tilegx_expand_builtin_vector_binop (rtx (*)(rtx, rtx, rtx),
- enum machine_mode, rtx,
- enum machine_mode, rtx, rtx,
+ machine_mode, rtx,
+ machine_mode, rtx, rtx,
bool);
extern void tilegx_pre_atomic_barrier (enum memmodel);
extern void tilegx_post_atomic_barrier (enum memmodel);
/* In case of a POST_INC or POST_DEC memory reference, we must report
the mode of the memory reference from TARGET_PRINT_OPERAND to
TARGET_PRINT_OPERAND_ADDRESS. */
-static enum machine_mode output_memory_reference_mode;
+static machine_mode output_memory_reference_mode;
/* Report whether we're printing out the first address fragment of a
POST_INC or POST_DEC memory reference, from TARGET_PRINT_OPERAND to
/* Implement TARGET_SCALAR_MODE_SUPPORTED_P. */
static bool
-tilegx_scalar_mode_supported_p (enum machine_mode mode)
+tilegx_scalar_mode_supported_p (machine_mode mode)
{
switch (mode)
{
/* Implement TARGET_VECTOR_MODE_SUPPORTED_P. */
static bool
-tilegx_vector_mode_supported_p (enum machine_mode mode)
+tilegx_vector_mode_supported_p (machine_mode mode)
{
return mode == V8QImode || mode == V4HImode || mode == V2SImode;
}
/* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
static bool
-tilegx_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED,
+tilegx_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED,
rtx x ATTRIBUTE_UNUSED)
{
return true;
passed by reference. */
static bool
tilegx_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
return (type && TYPE_SIZE (type)
/* Implement TARGET_MODE_REP_EXTENDED. */
static int
-tilegx_mode_rep_extended (enum machine_mode mode, enum machine_mode mode_rep)
+tilegx_mode_rep_extended (machine_mode mode, machine_mode mode_rep)
{
/* SImode register values are sign-extended to DImode. */
if (mode == SImode && mode_rep == DImode)
/* Implement TARGET_FUNCTION_ARG_BOUNDARY. */
static unsigned int
-tilegx_function_arg_boundary (enum machine_mode mode, const_tree type)
+tilegx_function_arg_boundary (machine_mode mode, const_tree type)
{
unsigned int alignment;
/* Implement TARGET_FUNCTION_ARG. */
static rtx
tilegx_function_arg (cumulative_args_t cum_v,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS cum = *get_cumulative_args (cum_v);
/* Implement TARGET_FUNCTION_ARG_ADVANCE. */
static void
tilegx_function_arg_advance (cumulative_args_t cum_v,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
tilegx_function_value (const_tree valtype, const_tree fn_decl_or_type,
bool outgoing ATTRIBUTE_UNUSED)
{
- enum machine_mode mode;
+ machine_mode mode;
int unsigned_p;
mode = TYPE_MODE (valtype);
/* Implement TARGET_LIBCALL_VALUE. */
static rtx
-tilegx_libcall_value (enum machine_mode mode,
+tilegx_libcall_value (machine_mode mode,
const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, 0);
/* Implement TARGET_SETUP_INCOMING_VARARGS. */
static void
tilegx_setup_incoming_varargs (cumulative_args_t cum,
- enum machine_mode mode,
+ machine_mode mode,
tree type, int *pretend_args, int no_rtl)
{
CUMULATIVE_ARGS local_cum = *get_cumulative_args (cum);
/* Create a temporary variable to hold a partial result, to enable
CSE. */
static rtx
-create_temp_reg_if_possible (enum machine_mode mode, rtx default_reg)
+create_temp_reg_if_possible (machine_mode mode, rtx default_reg)
{
return can_create_pseudo_p () ? gen_reg_rtx (mode) : default_reg;
}
modulo 64. SImode shifts sometimes use the 64 bit version so do
not hold such guarantee. */
static unsigned HOST_WIDE_INT
-tilegx_shift_truncation_mask (enum machine_mode mode)
+tilegx_shift_truncation_mask (machine_mode mode)
{
return mode == DImode ? 63 : 0;
}
pattern. TLS cannot be treated as a constant because it can
include a function call. */
static bool
-tilegx_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+tilegx_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
switch (GET_CODE (x))
{
/* Return true if the rtx X can be used as an address operand. */
static bool
-tilegx_legitimate_address_p (enum machine_mode ARG_UNUSED (mode), rtx x,
+tilegx_legitimate_address_p (machine_mode ARG_UNUSED (mode), rtx x,
bool strict)
{
if (GET_CODE (x) == SUBREG)
nonzero, otherwise we allocate register(s) as necessary. */
static rtx
tilegx_legitimize_pic_address (rtx orig,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
rtx reg)
{
if (GET_CODE (orig) == SYMBOL_REF)
/* Implement TARGET_LEGITIMIZE_ADDRESS. */
static rtx
tilegx_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
&& symbolic_operand (x, Pmode) && tilegx_tls_referenced_p (x))
replicating it to fill an interger of mode DImode. NUM is first
truncated to fit in MODE. */
rtx
-tilegx_simd_int (rtx num, enum machine_mode mode)
+tilegx_simd_int (rtx num, machine_mode mode)
{
HOST_WIDE_INT n = 0;
/* Expand a move instruction. Return true if all work is done. */
bool
-tilegx_expand_mov (enum machine_mode mode, rtx *operands)
+tilegx_expand_mov (machine_mode mode, rtx *operands)
{
/* Handle sets of MEM first. */
if (MEM_P (operands[0]))
tilegx_expand_unaligned_load (rtx dest_reg, rtx mem, HOST_WIDE_INT bitsize,
HOST_WIDE_INT bit_offset, bool sign)
{
- enum machine_mode mode;
+ machine_mode mode;
rtx addr_lo, addr_hi;
rtx mem_lo, mem_hi, hi;
rtx mema, wide_result;
memory that is not naturally aligned. Emit instructions to load
it. */
void
-tilegx_expand_movmisalign (enum machine_mode mode, rtx *operands)
+tilegx_expand_movmisalign (machine_mode mode, rtx *operands)
{
if (MEM_P (operands[1]))
{
/* Produce the rtx yielding a bool for a floating point
comparison. */
static bool
-tilegx_emit_fp_setcc (rtx res, enum rtx_code code, enum machine_mode mode,
+tilegx_emit_fp_setcc (rtx res, enum rtx_code code, machine_mode mode,
rtx op0, rtx op1)
{
/* TODO: Certain compares again constants can be done using entirely
work. */
static bool
tilegx_emit_setcc_internal (rtx res, enum rtx_code code, rtx op0, rtx op1,
- enum machine_mode cmp_mode)
+ machine_mode cmp_mode)
{
rtx tmp;
bool swap = false;
/* Implement cstore patterns. */
bool
-tilegx_emit_setcc (rtx operands[], enum machine_mode cmp_mode)
+tilegx_emit_setcc (rtx operands[], machine_mode cmp_mode)
{
return
tilegx_emit_setcc_internal (operands[0], GET_CODE (operands[1]),
/* Generate the comparison for a DImode conditional branch. */
static rtx
tilegx_emit_cc_test (enum rtx_code code, rtx op0, rtx op1,
- enum machine_mode cmp_mode, bool eq_ne_only)
+ machine_mode cmp_mode, bool eq_ne_only)
{
enum rtx_code branch_code;
rtx temp;
/* Generate the comparison for a conditional branch. */
void
-tilegx_emit_conditional_branch (rtx operands[], enum machine_mode cmp_mode)
+tilegx_emit_conditional_branch (rtx operands[], machine_mode cmp_mode)
{
rtx cmp_rtx =
tilegx_emit_cc_test (GET_CODE (operands[0]), operands[1], operands[2],
src0 and src1 (if DO_SRC1 is true) is converted to SRC_MODE. */
void
tilegx_expand_builtin_vector_binop (rtx (*gen) (rtx, rtx, rtx),
- enum machine_mode dest_mode,
+ machine_mode dest_mode,
rtx dest,
- enum machine_mode src_mode,
+ machine_mode src_mode,
rtx src0, rtx src1, bool do_src1)
{
dest = gen_lowpart (dest_mode, dest);
tilegx_expand_builtin (tree exp,
rtx target,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
#define MAX_BUILTIN_ARGS 4
if (!(*insn_op->predicate) (op[opnum], insn_op->mode))
{
- enum machine_mode opmode = insn_op->mode;
+ machine_mode opmode = insn_op->mode;
/* pointer_operand and pmode_register_operand operands do
not specify a mode, so use the operand's mode instead
except for constants, which are VOIDmode). */
if (opmode == VOIDmode)
{
- enum machine_mode m = GET_MODE (op[opnum]);
+ machine_mode m = GET_MODE (op[opnum]);
gcc_assert (m == Pmode || m == VOIDmode);
opmode = Pmode;
}
if (nonvoid)
{
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
if (!target
|| GET_MODE (target) != tmode
|| !(*insn_data[icode].operand[0].predicate) (target, tmode))
int
tilegx_adjust_insn_length (rtx_insn *insn, int length)
{
- enum machine_mode mode = GET_MODE (insn);
+ machine_mode mode = GET_MODE (insn);
/* A non-termininating instruction in a bundle has length 0. */
if (mode == SImode)
/* Machine mode of current insn, for determining curly brace
placement. */
-static enum machine_mode insn_mode;
+static machine_mode insn_mode;
/* Implement FINAL_PRESCAN_INSN. This is used to emit bundles. */
rtx s0;
rtx bcomp;
rtx loc_ref;
- enum machine_mode mode = GET_MODE (operands[0]);
+ machine_mode mode = GET_MODE (operands[0]);
/* only deal with loop counters in SImode or DImode */
if (mode != SImode && mode != DImode)
extern void tilepro_init_expanders (void);
extern bool tilepro_legitimate_pic_operand_p (rtx);
-extern rtx tilepro_simd_int (rtx, enum machine_mode);
+extern rtx tilepro_simd_int (rtx, machine_mode);
#ifdef RTX_CODE
extern void split_di (rtx[], int, rtx[], rtx[]);
extern bool tilepro_bitfield_operand_p (HOST_WIDE_INT, int *, int *);
extern void tilepro_expand_set_const32 (rtx, rtx);
-extern bool tilepro_expand_mov (enum machine_mode, rtx *);
+extern bool tilepro_expand_mov (machine_mode, rtx *);
extern void tilepro_expand_insv (rtx operands[4]);
extern void tilepro_expand_unaligned_load (rtx, rtx, HOST_WIDE_INT,
HOST_WIDE_INT, bool);
-extern void tilepro_expand_movmisalign (enum machine_mode, rtx *);
+extern void tilepro_expand_movmisalign (machine_mode, rtx *);
extern bool tilepro_expand_addsi (rtx, rtx, rtx);
extern void tilepro_allocate_stack (rtx, rtx);
extern bool tilepro_expand_mulsi (rtx, rtx, rtx);
extern void tilepro_expand_smulsi3_highpart (rtx, rtx, rtx);
extern void tilepro_expand_umulsi3_highpart (rtx, rtx, rtx);
-extern bool tilepro_emit_setcc (rtx[], enum machine_mode);
-extern void tilepro_emit_conditional_branch (rtx[], enum machine_mode);
+extern bool tilepro_emit_setcc (rtx[], machine_mode);
+extern void tilepro_emit_conditional_branch (rtx[], machine_mode);
extern rtx tilepro_emit_conditional_move (rtx);
extern const char *tilepro_output_cbranch_with_opcode (rtx_insn *, rtx *,
const char *,
extern const char *tilepro_output_cbranch (rtx_insn *, rtx *, bool);
extern void tilepro_expand_tablejump (rtx, rtx);
extern void tilepro_expand_builtin_vector_binop (rtx (*)(rtx, rtx, rtx),
- enum machine_mode, rtx,
- enum machine_mode, rtx, rtx,
+ machine_mode, rtx,
+ machine_mode, rtx, rtx,
bool);
#endif /* RTX_CODE */
/* In case of a POST_INC or POST_DEC memory reference, we must report
the mode of the memory reference from TARGET_PRINT_OPERAND to
TARGET_PRINT_OPERAND_ADDRESS. */
-static enum machine_mode output_memory_reference_mode;
+static machine_mode output_memory_reference_mode;
/* Report whether we're printing out the first address fragment of a
POST_INC or POST_DEC memory reference, from TARGET_PRINT_OPERAND to
/* Implement TARGET_SCALAR_MODE_SUPPORTED_P. */
static bool
-tilepro_scalar_mode_supported_p (enum machine_mode mode)
+tilepro_scalar_mode_supported_p (machine_mode mode)
{
switch (mode)
{
/* Implement TARGET_VECTOR_MODE_SUPPORTED_P. */
static bool
-tile_vector_mode_supported_p (enum machine_mode mode)
+tile_vector_mode_supported_p (machine_mode mode)
{
return mode == V4QImode || mode == V2HImode;
}
/* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
static bool
-tilepro_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED,
+tilepro_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED,
rtx x ATTRIBUTE_UNUSED)
{
return true;
passed by reference. */
static bool
tilepro_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
return (type && TYPE_SIZE (type)
/* Implement TARGET_FUNCTION_ARG_BOUNDARY. */
static unsigned int
-tilepro_function_arg_boundary (enum machine_mode mode, const_tree type)
+tilepro_function_arg_boundary (machine_mode mode, const_tree type)
{
unsigned int alignment;
/* Implement TARGET_FUNCTION_ARG. */
static rtx
tilepro_function_arg (cumulative_args_t cum_v,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS cum = *get_cumulative_args (cum_v);
/* Implement TARGET_FUNCTION_ARG_ADVANCE. */
static void
tilepro_function_arg_advance (cumulative_args_t cum_v,
- enum machine_mode mode,
+ machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
tilepro_function_value (const_tree valtype, const_tree fn_decl_or_type,
bool outgoing ATTRIBUTE_UNUSED)
{
- enum machine_mode mode;
+ machine_mode mode;
int unsigned_p;
mode = TYPE_MODE (valtype);
/* Implement TARGET_LIBCALL_VALUE. */
static rtx
-tilepro_libcall_value (enum machine_mode mode,
+tilepro_libcall_value (machine_mode mode,
const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG (mode, 0);
/* Implement TARGET_SETUP_INCOMING_VARARGS. */
static void
tilepro_setup_incoming_varargs (cumulative_args_t cum,
- enum machine_mode mode,
+ machine_mode mode,
tree type, int *pretend_args, int no_rtl)
{
CUMULATIVE_ARGS local_cum = *get_cumulative_args (cum);
/* Create a temporary variable to hold a partial result, to enable
CSE. */
static rtx
-create_temp_reg_if_possible (enum machine_mode mode, rtx default_reg)
+create_temp_reg_if_possible (machine_mode mode, rtx default_reg)
{
return can_create_pseudo_p ()? gen_reg_rtx (mode) : default_reg;
}
pattern. TLS cannot be treated as a constant because it can
include a function call. */
static bool
-tilepro_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+tilepro_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
switch (GET_CODE (x))
{
/* Return true if the rtx X can be used as an address operand. */
static bool
-tilepro_legitimate_address_p (enum machine_mode ARG_UNUSED (mode), rtx x,
+tilepro_legitimate_address_p (machine_mode ARG_UNUSED (mode), rtx x,
bool strict)
{
if (GET_CODE (x) == SUBREG)
nonzero, otherwise we allocate register(s) as necessary. */
static rtx
tilepro_legitimize_pic_address (rtx orig,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
rtx reg)
{
if (GET_CODE (orig) == SYMBOL_REF)
/* Implement TARGET_LEGITIMIZE_ADDRESS. */
static rtx
tilepro_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
&& symbolic_operand (x, Pmode) && tilepro_tls_referenced_p (x))
replicating it to fill an interger of mode SImode. NUM is first
truncated to fit in MODE. */
rtx
-tilepro_simd_int (rtx num, enum machine_mode mode)
+tilepro_simd_int (rtx num, machine_mode mode)
{
HOST_WIDE_INT n = 0;
void
tilepro_expand_set_const32 (rtx op0, rtx op1)
{
- enum machine_mode mode = GET_MODE (op0);
+ machine_mode mode = GET_MODE (op0);
rtx temp;
if (CONST_INT_P (op1))
/* Expand a move instruction. Return true if all work is done. */
bool
-tilepro_expand_mov (enum machine_mode mode, rtx *operands)
+tilepro_expand_mov (machine_mode mode, rtx *operands)
{
/* Handle sets of MEM first. */
if (MEM_P (operands[0]))
tilepro_expand_unaligned_load (rtx dest_reg, rtx mem, HOST_WIDE_INT bitsize,
HOST_WIDE_INT bit_offset, bool sign)
{
- enum machine_mode mode;
+ machine_mode mode;
rtx addr_lo, addr_hi;
rtx mem_lo, mem_hi, hi;
rtx mema, wide_result;
memory that is not naturally aligned. Emit instructions to load
it. */
void
-tilepro_expand_movmisalign (enum machine_mode mode, rtx *operands)
+tilepro_expand_movmisalign (machine_mode mode, rtx *operands)
{
if (MEM_P (operands[1]))
{
work. */
static bool
tilepro_emit_setcc_internal (rtx res, enum rtx_code code, rtx op0, rtx op1,
- enum machine_mode cmp_mode)
+ machine_mode cmp_mode)
{
rtx tmp;
bool swap = false;
/* Implement cstore patterns. */
bool
-tilepro_emit_setcc (rtx operands[], enum machine_mode cmp_mode)
+tilepro_emit_setcc (rtx operands[], machine_mode cmp_mode)
{
return
tilepro_emit_setcc_internal (operands[0], GET_CODE (operands[1]),
/* Generate the comparison for an SImode conditional branch. */
static rtx
tilepro_emit_cc_test (enum rtx_code code, rtx op0, rtx op1,
- enum machine_mode cmp_mode, bool eq_ne_only)
+ machine_mode cmp_mode, bool eq_ne_only)
{
enum rtx_code branch_code;
rtx temp;
/* Generate the comparison for a conditional branch. */
void
-tilepro_emit_conditional_branch (rtx operands[], enum machine_mode cmp_mode)
+tilepro_emit_conditional_branch (rtx operands[], machine_mode cmp_mode)
{
rtx cmp_rtx =
tilepro_emit_cc_test (GET_CODE (operands[0]), operands[1], operands[2],
src0 and src1 (if DO_SRC1 is true) is converted to SRC_MODE. */
void
tilepro_expand_builtin_vector_binop (rtx (*gen) (rtx, rtx, rtx),
- enum machine_mode dest_mode,
+ machine_mode dest_mode,
rtx dest,
- enum machine_mode src_mode,
+ machine_mode src_mode,
rtx src0, rtx src1, bool do_src1)
{
dest = gen_lowpart (dest_mode, dest);
tilepro_expand_builtin (tree exp,
rtx target,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
#define MAX_BUILTIN_ARGS 4
if (nonvoid)
{
- enum machine_mode tmode = insn_data[icode].operand[0].mode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
if (!target
|| GET_MODE (target) != tmode
|| !(*insn_data[icode].operand[0].predicate) (target, tmode))
int
tilepro_adjust_insn_length (rtx_insn *insn, int length)
{
- enum machine_mode mode = GET_MODE (insn);
+ machine_mode mode = GET_MODE (insn);
/* A non-termininating instruction in a bundle has length 0. */
if (mode == SImode)
/* Machine mode of current insn, for determining curly brace
placement. */
-static enum machine_mode insn_mode;
+static machine_mode insn_mode;
/* Implement FINAL_PRESCAN_INSN. This is used to emit bundles. */
#ifdef HAVE_MACHINE_MODES
extern char * construct_dispose_instruction (rtx);
extern char * construct_prepare_instruction (rtx);
-extern int ep_memory_operand (rtx, enum machine_mode, int);
+extern int ep_memory_operand (rtx, machine_mode, int);
extern int v850_adjust_insn_length (rtx_insn *, int);
extern const char * v850_gen_movdi (rtx *);
-extern rtx v850_gen_compare (enum rtx_code, enum machine_mode,
+extern rtx v850_gen_compare (enum rtx_code, machine_mode,
rtx, rtx);
-extern enum machine_mode v850_gen_float_compare (enum rtx_code,
- enum machine_mode, rtx, rtx);
-extern enum machine_mode v850_select_cc_mode (RTX_CODE, rtx, rtx);
+extern machine_mode v850_gen_float_compare (enum rtx_code,
+ machine_mode, rtx, rtx);
+extern machine_mode v850_select_cc_mode (RTX_CODE, rtx, rtx);
#endif
#endif /* RTX_CODE */
static bool
v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
- enum machine_mode mode, const_tree type,
+ machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
unsigned HOST_WIDE_INT size;
is NULL_RTX, the argument will be pushed. */
static rtx
-v850_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+v850_function_arg (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
/* Return the number of bytes which must be put into registers
for values which are part in registers and part in memory. */
static int
-v850_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
+v850_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
(TYPE is null for libcalls where that information may not be available.) */
static void
-v850_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+v850_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
return "";
}
-enum machine_mode
+machine_mode
v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1 ATTRIBUTE_UNUSED)
{
if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
return CCmode;
}
-enum machine_mode
-v850_gen_float_compare (enum rtx_code cond, enum machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
+machine_mode
+v850_gen_float_compare (enum rtx_code cond, machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
{
if (GET_MODE (op0) == DFmode)
{
}
rtx
-v850_gen_compare (enum rtx_code cond, enum machine_mode mode, rtx op0, rtx op1)
+v850_gen_compare (enum rtx_code cond, machine_mode mode, rtx op0, rtx op1)
{
if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
{
MODE and signedness UNSIGNEDP. */
static int
-ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
+ep_memory_offset (machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
{
int max_offset = 0;
/* Return true if OP is a valid short EP memory reference */
int
-ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
+ep_memory_operand (rtx op, machine_mode mode, int unsigned_load)
{
rtx addr, op0, op1;
int max_offset;
for the register */
if (GET_CODE (dest) == REG)
{
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
int regno;
int endregno;
/* Implement TARGET_LEGITIMATE_CONSTANT_P. */
static bool
-v850_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+v850_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
return (GET_CODE (x) == CONST_DOUBLE
|| !(GET_CODE (x) == CONST
}
static int
-v850_memory_move_cost (enum machine_mode mode,
+v850_memory_move_cost (machine_mode mode,
reg_class_t reg_class ATTRIBUTE_UNUSED,
bool in)
{
"TARGET_USE_FPU"
{
enum rtx_code cond = GET_CODE (operands[0]);
- enum machine_mode mode;
+ machine_mode mode;
rtx fcc_reg;
rtx cc_reg;
rtx tmp;
"TARGET_USE_FPU"
{
enum rtx_code cond = GET_CODE (operands[0]);
- enum machine_mode mode;
+ machine_mode mode;
rtx fcc_reg;
rtx cc_reg;
rtx tmp;
#ifdef RTX_CODE
extern const char *cond_name (rtx);
-extern bool adjacent_operands_p (rtx, rtx, enum machine_mode);
+extern bool adjacent_operands_p (rtx, rtx, machine_mode);
extern const char *rev_cond_name (rtx);
extern void print_operand_address (FILE *, rtx);
extern void print_operand (FILE *, rtx, int);
extern void vax_notice_update_cc (rtx, rtx);
extern void vax_expand_addsub_di_operands (rtx *, enum rtx_code);
-extern const char * vax_output_int_move (rtx, rtx *, enum machine_mode);
-extern const char * vax_output_int_add (rtx, rtx *, enum machine_mode);
-extern const char * vax_output_int_subtract (rtx, rtx *, enum machine_mode);
+extern const char * vax_output_int_move (rtx, rtx *, machine_mode);
+extern const char * vax_output_int_add (rtx, rtx *, machine_mode);
+extern const char * vax_output_int_subtract (rtx, rtx *, machine_mode);
extern const char * vax_output_movmemsi (rtx, rtx *);
#endif /* RTX_CODE */
#ifdef REAL_VALUE_TYPE
-extern int check_float_value (enum machine_mode, REAL_VALUE_TYPE *, int);
+extern int check_float_value (machine_mode, REAL_VALUE_TYPE *, int);
#endif /* REAL_VALUE_TYPE */
#include "builtins.h"
static void vax_option_override (void);
-static bool vax_legitimate_address_p (enum machine_mode, rtx, bool);
+static bool vax_legitimate_address_p (machine_mode, rtx, bool);
static void vax_file_start (void);
static void vax_init_libfuncs (void);
static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
HOST_WIDE_INT, tree);
static int vax_address_cost_1 (rtx);
-static int vax_address_cost (rtx, enum machine_mode, addr_space_t, bool);
+static int vax_address_cost (rtx, machine_mode, addr_space_t, bool);
static bool vax_rtx_costs (rtx, int, int, int, int *, bool);
-static rtx vax_function_arg (cumulative_args_t, enum machine_mode,
+static rtx vax_function_arg (cumulative_args_t, machine_mode,
const_tree, bool);
-static void vax_function_arg_advance (cumulative_args_t, enum machine_mode,
+static void vax_function_arg_advance (cumulative_args_t, machine_mode,
const_tree, bool);
static rtx vax_struct_value_rtx (tree, int);
static rtx vax_builtin_setjmp_frame_value (void);
static bool
vax_float_literal (rtx c)
{
- enum machine_mode mode;
+ machine_mode mode;
REAL_VALUE_TYPE r, s;
int i;
}
static int
-vax_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
+vax_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
addr_space_t as ATTRIBUTE_UNUSED,
bool speed ATTRIBUTE_UNUSED)
{
vax_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
int *total, bool speed ATTRIBUTE_UNUSED)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
int i = 0; /* may be modified in switch */
const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
const char *
vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
- enum machine_mode mode)
+ machine_mode mode)
{
rtx hi[3], lo[3];
const char *pattern_hi, *pattern_lo;
which are not modified very often. */
const char *
-vax_output_int_add (rtx insn, rtx *operands, enum machine_mode mode)
+vax_output_int_add (rtx insn, rtx *operands, machine_mode mode)
{
switch (mode)
{
}
const char *
-vax_output_int_subtract (rtx insn, rtx *operands, enum machine_mode mode)
+vax_output_int_subtract (rtx insn, rtx *operands, machine_mode mode)
{
switch (mode)
{
than or equal 8 bytes, or just a reg if MODE is one byte. */
static bool
-index_term_p (rtx prod, enum machine_mode mode, bool strict)
+index_term_p (rtx prod, machine_mode mode, bool strict)
{
rtx xfoo0, xfoo1;
/* Return true if X is the sum of a register
and a valid index term for mode MODE. */
static bool
-reg_plus_index_p (rtx x, enum machine_mode mode, bool strict)
+reg_plus_index_p (rtx x, machine_mode mode, bool strict)
{
rtx xfoo0, xfoo1;
/* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
static bool
-indexable_address_p (rtx xfoo0, rtx xfoo1, enum machine_mode mode, bool strict)
+indexable_address_p (rtx xfoo0, rtx xfoo1, machine_mode mode, bool strict)
{
if (!CONSTANT_ADDRESS_P (xfoo0))
return false;
The MODE argument is the machine mode for the MEM expression
that wants to use this address. */
bool
-vax_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
+vax_legitimate_address_p (machine_mode mode, rtx x, bool strict)
{
rtx xfoo0, xfoo1;
}
static rtx
-fixup_mathdi_operand (rtx x, enum machine_mode mode)
+fixup_mathdi_operand (rtx x, machine_mode mode)
{
if (illegal_addsub_di_memory_operand (x, mode))
{
}
bool
-adjacent_operands_p (rtx lo, rtx hi, enum machine_mode mode)
+adjacent_operands_p (rtx lo, rtx hi, machine_mode mode)
{
HOST_WIDE_INT lo_offset;
HOST_WIDE_INT hi_offset;
static rtx
vax_function_arg (cumulative_args_t cum ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
may not be available.) */
static void
-vax_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+vax_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
/* #pragma __pointer_size */
-static enum machine_mode saved_pointer_mode;
+static machine_mode saved_pointer_mode;
static void
handle_pragma_pointer_size (const char *pragma_name)
extern bool xtensa_b4const_or_zero (HOST_WIDE_INT);
extern bool xtensa_b4constu (HOST_WIDE_INT);
extern bool xtensa_mask_immediate (HOST_WIDE_INT);
-extern bool xtensa_mem_offset (unsigned, enum machine_mode);
+extern bool xtensa_mem_offset (unsigned, machine_mode);
/* Functions within xtensa.c that we reference. */
#ifdef RTX_CODE
extern int xt_true_regnum (rtx);
-extern int xtensa_valid_move (enum machine_mode, rtx *);
+extern int xtensa_valid_move (machine_mode, rtx *);
extern int smalloffset_mem_p (rtx);
extern int constantpool_mem_p (rtx);
extern void xtensa_extend_reg (rtx, rtx);
-extern void xtensa_expand_conditional_branch (rtx *, enum machine_mode);
+extern void xtensa_expand_conditional_branch (rtx *, machine_mode);
extern int xtensa_expand_conditional_move (rtx *, int);
-extern int xtensa_expand_scc (rtx *, enum machine_mode);
+extern int xtensa_expand_scc (rtx *, machine_mode);
extern int xtensa_expand_block_move (rtx *);
-extern void xtensa_split_operand_pair (rtx *, enum machine_mode);
-extern int xtensa_emit_move_sequence (rtx *, enum machine_mode);
+extern void xtensa_split_operand_pair (rtx *, machine_mode);
+extern int xtensa_emit_move_sequence (rtx *, machine_mode);
extern rtx xtensa_copy_incoming_a7 (rtx);
extern void xtensa_expand_nonlocal_goto (rtx *);
extern void xtensa_expand_compare_and_swap (rtx, rtx, rtx, rtx);
extern void print_operand (FILE *, rtx, int);
extern void print_operand_address (FILE *, rtx);
-extern void xtensa_output_literal (FILE *, rtx, enum machine_mode, int);
+extern void xtensa_output_literal (FILE *, rtx, machine_mode, int);
extern rtx xtensa_return_addr (int, rtx);
#endif /* RTX_CODE */
static enum internal_test map_test_to_internal_test (enum rtx_code);
static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
static rtx gen_float_relational (enum rtx_code, rtx, rtx);
-static rtx gen_conditional_move (enum rtx_code, enum machine_mode, rtx, rtx);
+static rtx gen_conditional_move (enum rtx_code, machine_mode, rtx, rtx);
static rtx fixup_subreg_mem (rtx);
static struct machine_function * xtensa_init_machine_status (void);
static rtx xtensa_legitimize_tls_address (rtx);
-static rtx xtensa_legitimize_address (rtx, rtx, enum machine_mode);
+static rtx xtensa_legitimize_address (rtx, rtx, machine_mode);
static bool xtensa_mode_dependent_address_p (const_rtx, addr_space_t);
static bool xtensa_return_in_msb (const_tree);
static void printx (FILE *, signed int);
static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT);
static rtx xtensa_builtin_saveregs (void);
-static bool xtensa_legitimate_address_p (enum machine_mode, rtx, bool);
+static bool xtensa_legitimate_address_p (machine_mode, rtx, bool);
static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
int) ATTRIBUTE_UNUSED;
-static section *xtensa_select_rtx_section (enum machine_mode, rtx,
+static section *xtensa_select_rtx_section (machine_mode, rtx,
unsigned HOST_WIDE_INT);
static bool xtensa_rtx_costs (rtx, int, int, int, int *, bool);
-static int xtensa_register_move_cost (enum machine_mode, reg_class_t,
+static int xtensa_register_move_cost (machine_mode, reg_class_t,
reg_class_t);
-static int xtensa_memory_move_cost (enum machine_mode, reg_class_t, bool);
+static int xtensa_memory_move_cost (machine_mode, reg_class_t, bool);
static tree xtensa_build_builtin_va_list (void);
static bool xtensa_return_in_memory (const_tree, const_tree);
static tree xtensa_gimplify_va_arg_expr (tree, tree, gimple_seq *,
gimple_seq *);
-static void xtensa_function_arg_advance (cumulative_args_t, enum machine_mode,
+static void xtensa_function_arg_advance (cumulative_args_t, machine_mode,
const_tree, bool);
-static rtx xtensa_function_arg (cumulative_args_t, enum machine_mode,
+static rtx xtensa_function_arg (cumulative_args_t, machine_mode,
const_tree, bool);
static rtx xtensa_function_incoming_arg (cumulative_args_t,
- enum machine_mode, const_tree, bool);
+ machine_mode, const_tree, bool);
static rtx xtensa_function_value (const_tree, const_tree, bool);
-static rtx xtensa_libcall_value (enum machine_mode, const_rtx);
+static rtx xtensa_libcall_value (machine_mode, const_rtx);
static bool xtensa_function_value_regno_p (const unsigned int);
-static unsigned int xtensa_function_arg_boundary (enum machine_mode,
+static unsigned int xtensa_function_arg_boundary (machine_mode,
const_tree);
static void xtensa_init_builtins (void);
static tree xtensa_fold_builtin (tree, int, tree *, bool);
-static rtx xtensa_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
+static rtx xtensa_expand_builtin (tree, rtx, rtx, machine_mode, int);
static void xtensa_va_start (tree, rtx);
static bool xtensa_frame_pointer_required (void);
static rtx xtensa_static_chain (const_tree, bool);
static void xtensa_asm_trampoline_template (FILE *);
static void xtensa_trampoline_init (rtx, tree, rtx);
static bool xtensa_output_addr_const_extra (FILE *, rtx);
-static bool xtensa_cannot_force_const_mem (enum machine_mode, rtx);
+static bool xtensa_cannot_force_const_mem (machine_mode, rtx);
static reg_class_t xtensa_preferred_reload_class (rtx, reg_class_t);
static reg_class_t xtensa_preferred_output_reload_class (rtx, reg_class_t);
static reg_class_t xtensa_secondary_reload (bool, rtx, reg_class_t,
- enum machine_mode,
+ machine_mode,
struct secondary_reload_info *);
static bool constantpool_address_p (const_rtx addr);
-static bool xtensa_legitimate_constant_p (enum machine_mode, rtx);
+static bool xtensa_legitimate_constant_p (machine_mode, rtx);
static bool xtensa_member_type_forces_blk (const_tree,
- enum machine_mode mode);
+ machine_mode mode);
static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
REG_ALLOC_ORDER;
int
-xtensa_valid_move (enum machine_mode mode, rtx *operands)
+xtensa_valid_move (machine_mode mode, rtx *operands)
{
/* Either the destination or source must be a register, and the
MAC16 accumulator doesn't count. */
bool
-xtensa_mem_offset (unsigned v, enum machine_mode mode)
+xtensa_mem_offset (unsigned v, machine_mode mode)
{
switch (mode)
{
};
enum internal_test test;
- enum machine_mode mode;
+ machine_mode mode;
struct cmp_info *p_info;
test = map_test_to_internal_test (test_code);
void
-xtensa_expand_conditional_branch (rtx *operands, enum machine_mode mode)
+xtensa_expand_conditional_branch (rtx *operands, machine_mode mode)
{
enum rtx_code test_code = GET_CODE (operands[0]);
rtx cmp0 = operands[1];
static rtx
-gen_conditional_move (enum rtx_code code, enum machine_mode mode,
+gen_conditional_move (enum rtx_code code, machine_mode mode,
rtx op0, rtx op1)
{
if (mode == SImode)
{
rtx dest = operands[0];
rtx cmp = operands[1];
- enum machine_mode cmp_mode = GET_MODE (XEXP (cmp, 0));
+ machine_mode cmp_mode = GET_MODE (XEXP (cmp, 0));
rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
if (!(cmp = gen_conditional_move (GET_CODE (cmp), cmp_mode,
int
-xtensa_expand_scc (rtx operands[4], enum machine_mode cmp_mode)
+xtensa_expand_scc (rtx operands[4], machine_mode cmp_mode)
{
rtx dest = operands[0];
rtx cmp;
for the output, i.e., the input operands are twice as big as MODE. */
void
-xtensa_split_operand_pair (rtx operands[4], enum machine_mode mode)
+xtensa_split_operand_pair (rtx operands[4], machine_mode mode)
{
switch (GET_CODE (operands[1]))
{
normally. */
int
-xtensa_emit_move_sequence (rtx *operands, enum machine_mode mode)
+xtensa_emit_move_sequence (rtx *operands, machine_mode mode)
{
rtx src = operands[1];
{
rtx entry_insns = 0;
rtx reg, tmp;
- enum machine_mode mode;
+ machine_mode mode;
if (!cfun->machine->need_a7_copy)
return opnd;
int
xtensa_expand_block_move (rtx *operands)
{
- static const enum machine_mode mode_from_align[] =
+ static const machine_mode mode_from_align[] =
{
VOIDmode, QImode, HImode, VOIDmode, SImode,
};
HOST_WIDE_INT bytes, align;
int num_pieces, move_ratio;
rtx temp[2];
- enum machine_mode mode[2];
+ machine_mode mode[2];
int amount[2];
bool active[2];
int phase = 0;
/* Shift VAL of mode MODE left by COUNT bits. */
static inline rtx
-xtensa_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count)
+xtensa_expand_mask_and_shift (rtx val, machine_mode mode, rtx count)
{
val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
NULL_RTX, 1, OPTAB_DIRECT);
static void
init_alignment_context (struct alignment_context *ac, rtx mem)
{
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
rtx byteoffset = NULL_RTX;
bool aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
void
xtensa_expand_compare_and_swap (rtx target, rtx mem, rtx cmp, rtx new_rtx)
{
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
struct alignment_context ac;
rtx tmp, cmpv, newv, val;
rtx oldval = gen_reg_rtx (SImode);
xtensa_expand_atomic (enum rtx_code code, rtx target, rtx mem, rtx val,
bool after)
{
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
struct alignment_context ac;
rtx_code_label *csloop = gen_label_rtx ();
rtx cmp, tmp;
bool
-xtensa_legitimate_address_p (enum machine_mode mode, rtx addr, bool strict)
+xtensa_legitimate_address_p (machine_mode mode, rtx addr, bool strict)
{
/* Allow constant pool addresses. */
if (mode != BLKmode && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
rtx
xtensa_legitimize_address (rtx x,
rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ machine_mode mode)
{
if (xtensa_tls_symbol_p (x))
return xtensa_legitimize_tls_address (x);
/* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
static bool
-xtensa_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+xtensa_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
return xtensa_tls_referenced_p (x);
}
/* Advance the argument to the next argument position. */
static void
-xtensa_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
+xtensa_function_arg_advance (cumulative_args_t cum, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
int words, max;
if this is an incoming argument to the current function. */
static rtx
-xtensa_function_arg_1 (cumulative_args_t cum_v, enum machine_mode mode,
+xtensa_function_arg_1 (cumulative_args_t cum_v, machine_mode mode,
const_tree type, bool incoming_p)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
/* Implement TARGET_FUNCTION_ARG. */
static rtx
-xtensa_function_arg (cumulative_args_t cum, enum machine_mode mode,
+xtensa_function_arg (cumulative_args_t cum, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
return xtensa_function_arg_1 (cum, mode, type, false);
/* Implement TARGET_FUNCTION_INCOMING_ARG. */
static rtx
-xtensa_function_incoming_arg (cumulative_args_t cum, enum machine_mode mode,
+xtensa_function_incoming_arg (cumulative_args_t cum, machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
return xtensa_function_arg_1 (cum, mode, type, true);
}
static unsigned int
-xtensa_function_arg_boundary (enum machine_mode mode, const_tree type)
+xtensa_function_arg_boundary (machine_mode mode, const_tree type)
{
unsigned int alignment;
xtensa_option_override (void)
{
int regno;
- enum machine_mode mode;
+ machine_mode mode;
if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
error ("boolean registers required for the floating-point option");
/* Set up array giving whether a given register can hold a given mode. */
for (mode = VOIDmode;
mode != MAX_MACHINE_MODE;
- mode = (enum machine_mode) ((int) mode + 1))
+ mode = (machine_mode) ((int) mode + 1))
{
int size = GET_MODE_SIZE (mode);
enum mode_class mclass = GET_MODE_CLASS (mode);
void
-xtensa_output_literal (FILE *file, rtx x, enum machine_mode mode, int labelno)
+xtensa_output_literal (FILE *file, rtx x, machine_mode mode, int labelno)
{
long value_long[2];
REAL_VALUE_TYPE r;
uses of the register, only one of which would be replaced. */
static bool
-xtensa_member_type_forces_blk (const_tree, enum machine_mode mode)
+xtensa_member_type_forces_blk (const_tree, machine_mode mode)
{
return mode == CQImode || mode == CHImode;
}
static rtx
xtensa_expand_builtin (tree exp, rtx target,
rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
int ignore)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
static reg_class_t
xtensa_secondary_reload (bool in_p, rtx x, reg_class_t rclass,
- enum machine_mode mode, secondary_reload_info *sri)
+ machine_mode mode, secondary_reload_info *sri)
{
int regno;
/* The literal pool stays with the function. */
static section *
-xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
+xtensa_select_rtx_section (machine_mode mode ATTRIBUTE_UNUSED,
rtx x ATTRIBUTE_UNUSED,
unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
{
/* Worker function for TARGET_REGISTER_MOVE_COST. */
static int
-xtensa_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+xtensa_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t from, reg_class_t to)
{
if (from == to && from != BR_REGS && to != BR_REGS)
/* Worker function for TARGET_MEMORY_MOVE_COST. */
static int
-xtensa_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+xtensa_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t rclass ATTRIBUTE_UNUSED,
bool in ATTRIBUTE_UNUSED)
{
case ABS:
{
- enum machine_mode xmode = GET_MODE (x);
+ machine_mode xmode = GET_MODE (x);
if (xmode == SFmode)
*total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
else if (xmode == DFmode)
case PLUS:
case MINUS:
{
- enum machine_mode xmode = GET_MODE (x);
+ machine_mode xmode = GET_MODE (x);
if (xmode == SFmode)
*total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
else if (xmode == DFmode || xmode == DImode)
case MULT:
{
- enum machine_mode xmode = GET_MODE (x);
+ machine_mode xmode = GET_MODE (x);
if (xmode == SFmode)
*total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
else if (xmode == DFmode)
case DIV:
case MOD:
{
- enum machine_mode xmode = GET_MODE (x);
+ machine_mode xmode = GET_MODE (x);
if (xmode == SFmode)
{
*total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
case UDIV:
case UMOD:
{
- enum machine_mode xmode = GET_MODE (x);
+ machine_mode xmode = GET_MODE (x);
if (xmode == DImode)
*total = COSTS_N_INSNS (50);
else if (TARGET_DIV32)
/* Worker function for TARGET_LIBCALL_VALUE. */
static rtx
-xtensa_libcall_value (enum machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
+xtensa_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
{
return gen_rtx_REG ((GET_MODE_CLASS (mode) == MODE_INT
&& GET_MODE_SIZE (mode) < UNITS_PER_WORD)
/* Implement TARGET_LEGITIMATE_CONSTANT_P. */
static bool
-xtensa_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+xtensa_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
return !xtensa_tls_referenced_p (x);
}
tree
get_gcov_type (void)
{
- enum machine_mode mode = smallest_mode_for_size (GCOV_TYPE_SIZE, MODE_INT);
+ machine_mode mode = smallest_mode_for_size (GCOV_TYPE_SIZE, MODE_INT);
return lang_hooks.types.type_for_mode (mode, false);
}
static tree
get_gcov_unsigned_t (void)
{
- enum machine_mode mode = smallest_mode_for_size (32, MODE_INT);
+ machine_mode mode = smallest_mode_for_size (32, MODE_INT);
return lang_hooks.types.type_for_mode (mode, true);
}
\f
+2014-10-29 Richard Sandiford <richard.sandiford@arm.com>
+
+ * constexpr.c: Remove redundant enum from machine_mode.
+
2014-10-28 Jason Merrill <jason@redhat.com>
* constexpr.c (cxx_eval_outermost_constant_expr): Tweak.
static int
check_automatic_or_tls (tree ref)
{
- enum machine_mode mode;
+ machine_mode mode;
HOST_WIDE_INT bitsize, bitpos;
tree offset;
int volatilep = 0, unsignedp = 0;
static bool
implicit_set_cond_p (const_rtx cond)
{
- enum machine_mode mode;
+ machine_mode mode;
rtx cst;
/* COND must be either an EQ or NE comparison. */
the mode in which the constant should be interpreted. */
static rtx this_insn_cc0, prev_insn_cc0;
-static enum machine_mode this_insn_cc0_mode, prev_insn_cc0_mode;
+static machine_mode this_insn_cc0_mode, prev_insn_cc0_mode;
#endif
/* Insn being scanned. */
static int notreg_cost (rtx, enum rtx_code, int);
static int preferable (int, int, int, int);
static void new_basic_block (void);
-static void make_new_qty (unsigned int, enum machine_mode);
+static void make_new_qty (unsigned int, machine_mode);
static void make_regs_eqv (unsigned int, unsigned int);
static void delete_reg_equiv (unsigned int);
static int mention_regs (rtx);
static int insert_regs (rtx, struct table_elt *, int);
static void remove_from_table (struct table_elt *, unsigned);
static void remove_pseudo_from_table (rtx, unsigned);
-static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
-static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
+static struct table_elt *lookup (rtx, unsigned, machine_mode);
+static struct table_elt *lookup_for_remove (rtx, unsigned, machine_mode);
static rtx lookup_as_function (rtx, enum rtx_code);
static struct table_elt *insert_with_costs (rtx, struct table_elt *, unsigned,
- enum machine_mode, int, int);
+ machine_mode, int, int);
static struct table_elt *insert (rtx, struct table_elt *, unsigned,
- enum machine_mode);
+ machine_mode);
static void merge_equiv_classes (struct table_elt *, struct table_elt *);
-static void invalidate (rtx, enum machine_mode);
+static void invalidate (rtx, machine_mode);
static void remove_invalid_refs (unsigned int);
static void remove_invalid_subreg_refs (unsigned int, unsigned int,
- enum machine_mode);
+ machine_mode);
static void rehash_using_reg (rtx);
static void invalidate_memory (void);
static void invalidate_for_call (void);
static rtx use_related_value (rtx, struct table_elt *);
-static inline unsigned canon_hash (rtx, enum machine_mode);
-static inline unsigned safe_hash (rtx, enum machine_mode);
+static inline unsigned canon_hash (rtx, machine_mode);
+static inline unsigned safe_hash (rtx, machine_mode);
static inline unsigned hash_rtx_string (const char *);
static rtx canon_reg (rtx, rtx_insn *);
static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
- enum machine_mode *,
- enum machine_mode *);
+ machine_mode *,
+ machine_mode *);
static rtx fold_rtx (rtx, rtx_insn *);
static rtx equiv_constant (rtx);
static void record_jump_equiv (rtx_insn *, bool);
-static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
+static void record_jump_cond (enum rtx_code, machine_mode, rtx, rtx,
int);
static void cse_insn (rtx_insn *);
static void cse_prescan_path (struct cse_basic_block_data *);
static bool set_live_p (rtx, rtx_insn *, int *);
static void cse_change_cc_mode_insn (rtx_insn *, rtx);
static void cse_change_cc_mode_insns (rtx_insn *, rtx_insn *, rtx);
-static enum machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx,
+static machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx,
bool);
\f
register before and initialize that quantity. */
static void
-make_new_qty (unsigned int reg, enum machine_mode mode)
+make_new_qty (unsigned int reg, machine_mode mode)
{
int q;
struct qty_table_elem *ent;
static void
insert_const_anchor (HOST_WIDE_INT anchor, rtx reg, HOST_WIDE_INT offs,
- enum machine_mode mode)
+ machine_mode mode)
{
struct table_elt *elt;
unsigned hash;
register-offset expressions using REG. */
static void
-insert_const_anchors (rtx reg, rtx cst, enum machine_mode mode)
+insert_const_anchors (rtx reg, rtx cst, machine_mode mode)
{
HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
otherwise. */
static rtx
-try_const_anchors (rtx src_const, enum machine_mode mode)
+try_const_anchors (rtx src_const, machine_mode mode)
{
struct table_elt *lower_elt, *upper_elt;
HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
looks like X. */
static struct table_elt *
-lookup (rtx x, unsigned int hash, enum machine_mode mode)
+lookup (rtx x, unsigned int hash, machine_mode mode)
{
struct table_elt *p;
Also ignore discrepancies in the machine mode of a register. */
static struct table_elt *
-lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
+lookup_for_remove (rtx x, unsigned int hash, machine_mode mode)
{
struct table_elt *p;
static struct table_elt *
insert_with_costs (rtx x, struct table_elt *classp, unsigned int hash,
- enum machine_mode mode, int cost, int reg_cost)
+ machine_mode mode, int cost, int reg_cost)
{
struct table_elt *elt;
static struct table_elt *
insert (rtx x, struct table_elt *classp, unsigned int hash,
- enum machine_mode mode)
+ machine_mode mode)
{
return
insert_with_costs (x, classp, hash, mode, COST (x), approx_reg_cost (x));
{
unsigned int hash;
rtx exp = elt->exp;
- enum machine_mode mode = elt->mode;
+ machine_mode mode = elt->mode;
next = elt->next_same_value;
ADDR are as for canon_anti_dependence. */
static bool
-check_dependence (const_rtx x, rtx exp, enum machine_mode mode, rtx addr)
+check_dependence (const_rtx x, rtx exp, machine_mode mode, rtx addr)
{
subrtx_iterator::array_type array;
FOR_EACH_SUBRTX (iter, array, x, NONCONST)
or it may be either of those plus a numeric offset. */
static void
-invalidate (rtx x, enum machine_mode full_mode)
+invalidate (rtx x, machine_mode full_mode)
{
int i;
struct table_elt *p;
and mode MODE. */
static void
remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
- enum machine_mode mode)
+ machine_mode mode)
{
unsigned int i;
struct table_elt *p, *next;
When the callback returns true, we continue with the new rtx. */
unsigned
-hash_rtx_cb (const_rtx x, enum machine_mode mode,
+hash_rtx_cb (const_rtx x, machine_mode mode,
int *do_not_record_p, int *hash_arg_in_memory_p,
bool have_reg_qty, hash_rtx_callback_function cb)
{
unsigned hash = 0;
enum rtx_code code;
const char *fmt;
- enum machine_mode newmode;
+ machine_mode newmode;
rtx newx;
/* Used to turn recursion into iteration. We can't rely on GCC's
is just (int) MEM plus the hash code of the address. */
unsigned
-hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
+hash_rtx (const_rtx x, machine_mode mode, int *do_not_record_p,
int *hash_arg_in_memory_p, bool have_reg_qty)
{
return hash_rtx_cb (x, mode, do_not_record_p,
does not have the MEM_READONLY_P flag set. */
static inline unsigned
-canon_hash (rtx x, enum machine_mode mode)
+canon_hash (rtx x, machine_mode mode)
{
return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
}
and hash_arg_in_memory are not changed. */
static inline unsigned
-safe_hash (rtx x, enum machine_mode mode)
+safe_hash (rtx x, machine_mode mode)
{
int dummy_do_not_record;
return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
static enum rtx_code
find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
- enum machine_mode *pmode1, enum machine_mode *pmode2)
+ machine_mode *pmode1, machine_mode *pmode2)
{
rtx arg1, arg2;
hash_set<rtx> *visited = NULL;
for (; p; p = p->next_same_value)
{
- enum machine_mode inner_mode = GET_MODE (p->exp);
+ machine_mode inner_mode = GET_MODE (p->exp);
#ifdef FLOAT_STORE_FLAG_VALUE
REAL_VALUE_TYPE fsfv;
#endif
fold_rtx (rtx x, rtx_insn *insn)
{
enum rtx_code code;
- enum machine_mode mode;
+ machine_mode mode;
const char *fmt;
int i;
rtx new_rtx = 0;
/* The mode of the first operand of X. We need this for sign and zero
extends. */
- enum machine_mode mode_arg0;
+ machine_mode mode_arg0;
if (x == 0)
return x;
if (fmt[i] == 'e')
{
rtx folded_arg = XEXP (x, i), const_arg;
- enum machine_mode mode_arg = GET_MODE (folded_arg);
+ machine_mode mode_arg = GET_MODE (folded_arg);
switch (GET_CODE (folded_arg))
{
{
struct table_elt *p0, *p1;
rtx true_rtx, false_rtx;
- enum machine_mode mode_arg1;
+ machine_mode mode_arg1;
if (SCALAR_FLOAT_MODE_P (mode))
{
if (GET_CODE (x) == SUBREG)
{
- enum machine_mode mode = GET_MODE (x);
- enum machine_mode imode = GET_MODE (SUBREG_REG (x));
+ machine_mode mode = GET_MODE (x);
+ machine_mode imode = GET_MODE (SUBREG_REG (x));
rtx new_rtx;
/* See if we previously assigned a constant value to this SUBREG. */
int cond_known_true;
rtx op0, op1;
rtx set;
- enum machine_mode mode, mode0, mode1;
+ machine_mode mode, mode0, mode1;
int reversed_nonequality = 0;
enum rtx_code code;
MODE, and we should assume OP has MODE iff it is naturally modeless. */
static rtx
-record_jump_cond_subreg (enum machine_mode mode, rtx op)
+record_jump_cond_subreg (machine_mode mode, rtx op)
{
- enum machine_mode op_mode = GET_MODE (op);
+ machine_mode op_mode = GET_MODE (op);
if (op_mode == mode || op_mode == VOIDmode)
return op;
return lowpart_subreg (mode, op, op_mode);
above function and called recursively. */
static void
-record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
+record_jump_cond (enum rtx_code code, machine_mode mode, rtx op0,
rtx op1, int reversed_nonequality)
{
unsigned op0_hash, op1_hash;
/* Note that GET_MODE (op0) may not equal MODE. */
if (code == EQ && paradoxical_subreg_p (op0))
{
- enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
+ machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
rtx tem = record_jump_cond_subreg (inner_mode, op1);
if (tem)
record_jump_cond (code, mode, SUBREG_REG (op0), tem,
if (code == EQ && paradoxical_subreg_p (op1))
{
- enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
+ machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
rtx tem = record_jump_cond_subreg (inner_mode, op0);
if (tem)
record_jump_cond (code, mode, SUBREG_REG (op1), tem,
&& (GET_MODE_SIZE (GET_MODE (op0))
< GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
{
- enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
+ machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
rtx tem = record_jump_cond_subreg (inner_mode, op1);
if (tem)
record_jump_cond (code, mode, SUBREG_REG (op0), tem,
&& (GET_MODE_SIZE (GET_MODE (op1))
< GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
{
- enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
+ machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
rtx tem = record_jump_cond_subreg (inner_mode, op0);
if (tem)
record_jump_cond (code, mode, SUBREG_REG (op1), tem,
rtx src, dest;
rtx src_folded;
struct table_elt *elt = 0, *p;
- enum machine_mode mode;
+ machine_mode mode;
rtx src_eqv_here;
rtx src_const = 0;
rtx src_related = 0;
if (src_eqv)
{
- enum machine_mode eqvmode = mode;
+ machine_mode eqvmode = mode;
if (GET_CODE (dest) == STRICT_LOW_PART)
eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
do_not_record = 0;
&& GET_MODE_CLASS (mode) == MODE_INT
&& GET_MODE_PRECISION (mode) < BITS_PER_WORD)
{
- enum machine_mode wider_mode;
+ machine_mode wider_mode;
for (wider_mode = GET_MODE_WIDER_MODE (mode);
wider_mode != VOIDmode
&& GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1))
&& GET_MODE_SIZE (mode) < UNITS_PER_WORD)
{
- enum machine_mode tmode;
+ machine_mode tmode;
rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
for (tmode = GET_MODE_WIDER_MODE (mode);
{
struct rtx_def memory_extend_buf;
rtx memory_extend_rtx = &memory_extend_buf;
- enum machine_mode tmode;
+ machine_mode tmode;
/* Set what we are trying to extend and the operation it might
have been extended with. */
struct table_elt *elt;
struct table_elt *classp = sets[0].src_elt;
rtx dest = SET_DEST (sets[0].rtl);
- enum machine_mode eqvmode = GET_MODE (dest);
+ machine_mode eqvmode = GET_MODE (dest);
if (GET_CODE (dest) == STRICT_LOW_PART)
{
struct table_elt *classp = src_eqv_elt;
rtx src = sets[i].src;
rtx dest = SET_DEST (sets[i].rtl);
- enum machine_mode mode
+ machine_mode mode
= GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
/* It's possible that we have a source value known to be
{
rtx x = sets[i].inner_dest;
struct table_elt *elt;
- enum machine_mode mode;
+ machine_mode mode;
unsigned hash;
if (MEM_P (x))
>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
&& sets[i].src_elt != 0)
{
- enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
+ machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
struct table_elt *elt, *classp = 0;
for (elt = sets[i].src_elt->first_same_value; elt;
We may have more than one duplicate which we can eliminate, and we
try to find a mode which will work for multiple duplicates. */
-static enum machine_mode
+static machine_mode
cse_cc_succs (basic_block bb, basic_block orig_bb, rtx cc_reg, rtx cc_src,
bool can_change_mode)
{
bool found_equiv;
- enum machine_mode mode;
+ machine_mode mode;
unsigned int insn_count;
edge e;
rtx_insn *insns[2];
- enum machine_mode modes[2];
+ machine_mode modes[2];
rtx_insn *last_insns[2];
unsigned int i;
rtx newreg;
&& REGNO (SET_DEST (set)) == REGNO (cc_reg))
{
bool found;
- enum machine_mode set_mode;
- enum machine_mode comp_mode;
+ machine_mode set_mode;
+ machine_mode comp_mode;
found = false;
set_mode = GET_MODE (SET_SRC (set));
further blocks and this block. */
if (insn == end)
{
- enum machine_mode submode;
+ machine_mode submode;
submode = cse_cc_succs (e->dest, orig_bb, cc_reg, cc_src, false);
if (submode != VOIDmode)
rtx_insn *insn;
rtx_insn *cc_src_insn;
rtx cc_src;
- enum machine_mode mode;
- enum machine_mode orig_mode;
+ machine_mode mode;
+ machine_mode orig_mode;
/* Look for blocks which end with a conditional jump based on a
condition code register. Then look for the instruction which
static void unchain_one_elt_list (struct elt_list **);
static void unchain_one_elt_loc_list (struct elt_loc_list **);
static void remove_useless_values (void);
-static int rtx_equal_for_cselib_1 (rtx, rtx, enum machine_mode);
-static unsigned int cselib_hash_rtx (rtx, int, enum machine_mode);
-static cselib_val *new_cselib_val (unsigned int, enum machine_mode, rtx);
+static int rtx_equal_for_cselib_1 (rtx, rtx, machine_mode);
+static unsigned int cselib_hash_rtx (rtx, int, machine_mode);
+static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx);
static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
static cselib_val *cselib_lookup_mem (rtx, int);
-static void cselib_invalidate_regno (unsigned int, enum machine_mode);
+static void cselib_invalidate_regno (unsigned int, machine_mode);
static void cselib_invalidate_mem (rtx);
static void cselib_record_set (rtx, cselib_val *, cselib_val *);
static void cselib_record_sets (rtx_insn *);
struct compare_type {
/* The rtx value and its mode (needed separately for constant
integers). */
- enum machine_mode mode;
+ machine_mode mode;
rtx x;
/* The mode of the contaning MEM, if any, otherwise VOIDmode. */
- enum machine_mode memmode;
+ machine_mode memmode;
};
static inline hashval_t hash (const value_type *);
static inline bool equal (const value_type *, const compare_type *);
{
struct elt_loc_list *l;
rtx x = x_arg->x;
- enum machine_mode mode = x_arg->mode;
- enum machine_mode memmode = x_arg->memmode;
+ machine_mode mode = x_arg->mode;
+ machine_mode memmode = x_arg->memmode;
if (mode != GET_MODE (v->val_rtx))
return false;
MEMMODE should specify the mode of the MEM. */
static cselib_val **
-cselib_find_slot (enum machine_mode mode, rtx x, hashval_t hash,
- enum insert_option insert, enum machine_mode memmode)
+cselib_find_slot (machine_mode mode, rtx x, hashval_t hash,
+ enum insert_option insert, machine_mode memmode)
{
cselib_val **slot = NULL;
cselib_hasher::compare_type lookup = { mode, x, memmode };
set is not known, or the value was already clobbered, return
VOIDmode. */
-enum machine_mode
+machine_mode
cselib_reg_set_mode (const_rtx x)
{
if (!REG_P (x))
storing the offset, if any, in *OFF. */
static rtx
-autoinc_split (rtx x, rtx *off, enum machine_mode memmode)
+autoinc_split (rtx x, rtx *off, machine_mode memmode)
{
switch (GET_CODE (x))
{
addresses, MEMMODE should be VOIDmode. */
static int
-rtx_equal_for_cselib_1 (rtx x, rtx y, enum machine_mode memmode)
+rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode)
{
enum rtx_code code;
const char *fmt;
in a comparison anyway, since relying on hash differences is unsafe. */
static unsigned int
-cselib_hash_rtx (rtx x, int create, enum machine_mode memmode)
+cselib_hash_rtx (rtx x, int create, machine_mode memmode)
{
cselib_val *e;
int i, j;
value is MODE. */
static inline cselib_val *
-new_cselib_val (unsigned int hash, enum machine_mode mode, rtx x)
+new_cselib_val (unsigned int hash, machine_mode mode, rtx x)
{
cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool);
static cselib_val *
cselib_lookup_mem (rtx x, int create)
{
- enum machine_mode mode = GET_MODE (x);
- enum machine_mode addr_mode;
+ machine_mode mode = GET_MODE (x);
+ machine_mode addr_mode;
cselib_val **slot;
cselib_val *addr;
cselib_val *mem_elt;
int i, j;
RTX_CODE code;
const char *format_ptr;
- enum machine_mode mode;
+ machine_mode mode;
code = GET_CODE (orig);
If X is within a MEM, MEMMODE must be the mode of the MEM. */
rtx
-cselib_subst_to_values (rtx x, enum machine_mode memmode)
+cselib_subst_to_values (rtx x, machine_mode memmode)
{
enum rtx_code code = GET_CODE (x);
const char *fmt = GET_RTX_FORMAT (code);
/* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */
rtx
-cselib_subst_to_values_from_insn (rtx x, enum machine_mode memmode, rtx_insn *insn)
+cselib_subst_to_values_from_insn (rtx x, machine_mode memmode, rtx_insn *insn)
{
rtx ret;
gcc_assert (!cselib_current_insn);
we're tracking autoinc expressions. */
static cselib_val *
-cselib_lookup_1 (rtx x, enum machine_mode mode,
- int create, enum machine_mode memmode)
+cselib_lookup_1 (rtx x, machine_mode mode,
+ int create, machine_mode memmode)
{
cselib_val **slot;
cselib_val *e;
/* Wrapper for cselib_lookup, that indicates X is in INSN. */
cselib_val *
-cselib_lookup_from_insn (rtx x, enum machine_mode mode,
- int create, enum machine_mode memmode, rtx_insn *insn)
+cselib_lookup_from_insn (rtx x, machine_mode mode,
+ int create, machine_mode memmode, rtx_insn *insn)
{
cselib_val *ret;
maintains invariants related with debug insns. */
cselib_val *
-cselib_lookup (rtx x, enum machine_mode mode,
- int create, enum machine_mode memmode)
+cselib_lookup (rtx x, machine_mode mode,
+ int create, machine_mode memmode)
{
cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
invalidating call clobbered registers across a call. */
static void
-cselib_invalidate_regno (unsigned int regno, enum machine_mode mode)
+cselib_invalidate_regno (unsigned int regno, machine_mode mode)
{
unsigned int endregno;
unsigned int i;
sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
if (MEM_P (dest))
{
- enum machine_mode address_mode = get_address_mode (dest);
+ machine_mode address_mode = get_address_mode (dest);
sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
address_mode, 1,
extern void (*cselib_record_sets_hook) (rtx_insn *insn, struct cselib_set *sets,
int n_sets);
-extern cselib_val *cselib_lookup (rtx, enum machine_mode,
- int, enum machine_mode);
-extern cselib_val *cselib_lookup_from_insn (rtx, enum machine_mode,
- int, enum machine_mode, rtx_insn *);
+extern cselib_val *cselib_lookup (rtx, machine_mode,
+ int, machine_mode);
+extern cselib_val *cselib_lookup_from_insn (rtx, machine_mode,
+ int, machine_mode, rtx_insn *);
extern void cselib_init (int);
extern void cselib_clear_table (void);
extern void cselib_finish (void);
extern void cselib_process_insn (rtx_insn *);
extern bool fp_setter_insn (rtx);
-extern enum machine_mode cselib_reg_set_mode (const_rtx);
+extern machine_mode cselib_reg_set_mode (const_rtx);
extern int rtx_equal_for_cselib_p (rtx, rtx);
extern int references_value_p (const_rtx, int);
extern rtx cselib_expand_value_rtx (rtx, bitmap, int);
cselib_expand_callback, void *);
extern bool cselib_dummy_expand_value_rtx_cb (rtx, bitmap, int,
cselib_expand_callback, void *);
-extern rtx cselib_subst_to_values (rtx, enum machine_mode);
-extern rtx cselib_subst_to_values_from_insn (rtx, enum machine_mode, rtx_insn *);
+extern rtx cselib_subst_to_values (rtx, machine_mode);
+extern rtx cselib_subst_to_values_from_insn (rtx, machine_mode, rtx_insn *);
extern void cselib_invalidate_rtx (rtx);
extern void cselib_reset_table (unsigned int);
case ARRAY_RANGE_REF:
case BIT_FIELD_REF:
{
- enum machine_mode mode;
+ machine_mode mode;
HOST_WIDE_INT bitsize, bitpos;
tree offset, tem;
int volatilep = 0, unsignedp = 0;
FOR_EACH_INSN_DEF (def, insn)
{
- enum machine_mode mode = GET_MODE (DF_REF_REG (def));
+ machine_mode mode = GET_MODE (DF_REF_REG (def));
if (GET_MODE_CLASS (mode) == MODE_CC)
return true;
{
rtx orig_reg = DF_REF_REG (ref);
rtx reg = orig_reg;
- enum machine_mode reg_mode;
+ machine_mode reg_mode;
unsigned regno;
/* Left at -1 for whole accesses. */
int which_subword = -1;
static void
decimal_to_binary (REAL_VALUE_TYPE *to, const REAL_VALUE_TYPE *from,
- enum machine_mode mode)
+ machine_mode mode)
{
char string[256];
const decimal128 *const d128 = (const decimal128 *) from->sig;
binary and decimal types. */
void
-decimal_real_convert (REAL_VALUE_TYPE *r, enum machine_mode mode,
+decimal_real_convert (REAL_VALUE_TYPE *r, machine_mode mode,
const REAL_VALUE_TYPE *a)
{
const struct real_format *fmt = REAL_MODE_FORMAT (mode);
If SIGN is nonzero, R is set to the most negative finite value. */
void
-decimal_real_maxval (REAL_VALUE_TYPE *r, int sign, enum machine_mode mode)
+decimal_real_maxval (REAL_VALUE_TYPE *r, int sign, machine_mode mode)
{
const char *max;
int decimal_do_compare (const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, int);
void decimal_real_from_string (REAL_VALUE_TYPE *, const char *);
void decimal_round_for_format (const struct real_format *, REAL_VALUE_TYPE *);
-void decimal_real_convert (REAL_VALUE_TYPE *, enum machine_mode, const REAL_VALUE_TYPE *);
+void decimal_real_convert (REAL_VALUE_TYPE *, machine_mode, const REAL_VALUE_TYPE *);
void decimal_real_to_decimal (char *, const REAL_VALUE_TYPE *, size_t, size_t, int);
void decimal_do_fix_trunc (REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *);
-void decimal_real_maxval (REAL_VALUE_TYPE *, int, enum machine_mode);
+void decimal_real_maxval (REAL_VALUE_TYPE *, int, machine_mode);
wide_int decimal_real_to_integer (const REAL_VALUE_TYPE *, bool *, int);
HOST_WIDE_INT decimal_real_to_integer (const REAL_VALUE_TYPE *);
int
commutative_integer_operator (x, mode)
rtx x;
- enum machine_mode mode;
+ machine_mode mode;
@{
enum rtx_code code = GET_CODE (x);
if (GET_MODE (x) != mode)
@section Machine Modes
@cindex machine modes
-@findex enum machine_mode
+@findex machine_mode
A machine mode describes a size of data object and the representation used
for it. In the C code, machine modes are represented by an enumeration
-type, @code{enum machine_mode}, defined in @file{machmode.def}. Each RTL
+type, @code{machine_mode}, defined in @file{machmode.def}. Each RTL
expression has room for a machine mode and so do certain kinds of tree
expressions (declarations and types, to be precise).
Do not define this macro if it would never modify @var{m}.
@end defmac
-@deftypefn {Target Hook} {enum machine_mode} TARGET_PROMOTE_FUNCTION_MODE (const_tree @var{type}, enum machine_mode @var{mode}, int *@var{punsignedp}, const_tree @var{funtype}, int @var{for_return})
+@deftypefn {Target Hook} machine_mode TARGET_PROMOTE_FUNCTION_MODE (const_tree @var{type}, machine_mode @var{mode}, int *@var{punsignedp}, const_tree @var{funtype}, int @var{for_return})
Like @code{PROMOTE_MODE}, but it is applied to outgoing function arguments or
function return values. The target hook should return the new mode
and possibly change @code{*@var{punsignedp}} if the promotion should
The default is @code{false}.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_MEMBER_TYPE_FORCES_BLK (const_tree @var{field}, enum machine_mode @var{mode})
+@deftypefn {Target Hook} bool TARGET_MEMBER_TYPE_FORCES_BLK (const_tree @var{field}, machine_mode @var{mode})
Return true if a structure, union or array containing @var{field} should
be accessed using @code{BLKMODE}.
@end defmac
@defmac STACK_SAVEAREA_MODE (@var{save_level})
-If defined, an expression of type @code{enum machine_mode} that
+If defined, an expression of type @code{machine_mode} that
specifies the mode of the save area operand of a
@code{save_stack_@var{level}} named pattern (@pxref{Standard Names}).
@var{save_level} is one of @code{SAVE_BLOCK}, @code{SAVE_FUNCTION}, or
@end defmac
@defmac STACK_SIZE_MODE
-If defined, an expression of type @code{enum machine_mode} that
+If defined, an expression of type @code{machine_mode} that
specifies the mode of the size increment operand of an
@code{allocate_stack} named pattern (@pxref{Standard Names}).
pattern needs to support both a 32- and a 64-bit mode.
@end defmac
-@deftypefn {Target Hook} {enum machine_mode} TARGET_LIBGCC_CMP_RETURN_MODE (void)
+@deftypefn {Target Hook} machine_mode TARGET_LIBGCC_CMP_RETURN_MODE (void)
This target hook should return the mode to be used for the return value
of compare instructions expanded to libgcc calls. If not defined
@code{word_mode} is returned which is the right choice for a majority of
targets.
@end deftypefn
-@deftypefn {Target Hook} {enum machine_mode} TARGET_LIBGCC_SHIFT_COUNT_MODE (void)
+@deftypefn {Target Hook} machine_mode TARGET_LIBGCC_SHIFT_COUNT_MODE (void)
This target hook should return the mode to be used for the shift count operand
of shift instructions expanded to libgcc calls. If not defined
@code{word_mode} is returned which is the right choice for a majority of
targets.
@end deftypefn
-@deftypefn {Target Hook} {enum machine_mode} TARGET_UNWIND_WORD_MODE (void)
+@deftypefn {Target Hook} machine_mode TARGET_UNWIND_WORD_MODE (void)
Return machine mode to be used for @code{_Unwind_Word} type.
The default is to use @code{word_mode}.
@end deftypefn
require the macro to do something nontrivial.
@end defmac
-@deftypefn {Target Hook} reg_class_t TARGET_SECONDARY_RELOAD (bool @var{in_p}, rtx @var{x}, reg_class_t @var{reload_class}, enum machine_mode @var{reload_mode}, secondary_reload_info *@var{sri})
+@deftypefn {Target Hook} reg_class_t TARGET_SECONDARY_RELOAD (bool @var{in_p}, rtx @var{x}, reg_class_t @var{reload_class}, machine_mode @var{reload_mode}, secondary_reload_info *@var{sri})
Many machines have some registers that cannot be copied directly to or
from memory or even from other types of registers. An example is the
@samp{MQ} register, which on most machines, can only be copied to or
pressure.
@end deftypefn
-@deftypefn {Target Hook} {unsigned char} TARGET_CLASS_MAX_NREGS (reg_class_t @var{rclass}, enum machine_mode @var{mode})
+@deftypefn {Target Hook} {unsigned char} TARGET_CLASS_MAX_NREGS (reg_class_t @var{rclass}, machine_mode @var{mode})
A target hook returns the maximum number of consecutive registers
of class @var{rclass} needed to hold a value of mode @var{mode}.
A target hook which returns true if an address with the same structure can have different maximal legitimate displacement. For example, the displacement can depend on memory mode or on operand combinations in the insn. The default version of this target hook returns always false.
@end deftypefn
-@deftypefn {Target Hook} reg_class_t TARGET_SPILL_CLASS (reg_class_t, enum @var{machine_mode})
+@deftypefn {Target Hook} reg_class_t TARGET_SPILL_CLASS (reg_class_t, @var{machine_mode})
This hook defines a class of registers which could be used for spilling pseudos of the given mode and class, or @code{NO_REGS} if only memory should be used. Not defining this hook is equivalent to returning @code{NO_REGS} for all inputs.
@end deftypefn
-@deftypefn {Target Hook} {enum machine_mode} TARGET_CSTORE_MODE (enum insn_code @var{icode})
+@deftypefn {Target Hook} machine_mode TARGET_CSTORE_MODE (enum insn_code @var{icode})
This hook defines the machine mode to use for the boolean result of conditional store patterns. The ICODE argument is the instruction code for the cstore being performed. Not definiting this hook is the same as accepting the mode encoded into operand 0 of the cstore expander patterns.
@end deftypefn
types of arguments are passed in registers or how they are arranged in
the stack.
-@deftypefn {Target Hook} rtx TARGET_FUNCTION_ARG (cumulative_args_t @var{ca}, enum machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
+@deftypefn {Target Hook} rtx TARGET_FUNCTION_ARG (cumulative_args_t @var{ca}, machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
Return an RTX indicating whether a function argument is passed in a
register and if so, which register.
a register.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_MUST_PASS_IN_STACK (enum machine_mode @var{mode}, const_tree @var{type})
+@deftypefn {Target Hook} bool TARGET_MUST_PASS_IN_STACK (machine_mode @var{mode}, const_tree @var{type})
This target hook should return @code{true} if we should not pass @var{type}
solely in registers. The file @file{expr.h} defines a
definition that is usually appropriate, refer to @file{expr.h} for additional
documentation.
@end deftypefn
-@deftypefn {Target Hook} rtx TARGET_FUNCTION_INCOMING_ARG (cumulative_args_t @var{ca}, enum machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
+@deftypefn {Target Hook} rtx TARGET_FUNCTION_INCOMING_ARG (cumulative_args_t @var{ca}, machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
Define this hook if the target machine has ``register windows'', so
that the register in which a function sees an arguments is not
necessarily the same as the one in which the caller passed the
This hook is called at the start of register allocation.
@end deftypefn
-@deftypefn {Target Hook} int TARGET_ARG_PARTIAL_BYTES (cumulative_args_t @var{cum}, enum machine_mode @var{mode}, tree @var{type}, bool @var{named})
+@deftypefn {Target Hook} int TARGET_ARG_PARTIAL_BYTES (cumulative_args_t @var{cum}, machine_mode @var{mode}, tree @var{type}, bool @var{named})
This target hook returns the number of bytes at the beginning of an
argument that must be put in registers. The value must be zero for
arguments that are passed entirely in registers or that are entirely
@code{TARGET_FUNCTION_INCOMING_ARG}, for the called function.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_PASS_BY_REFERENCE (cumulative_args_t @var{cum}, enum machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
+@deftypefn {Target Hook} bool TARGET_PASS_BY_REFERENCE (cumulative_args_t @var{cum}, machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
This target hook should return @code{true} if an argument at the
position indicated by @var{cum} should be passed by reference. This
predicate is queried after target independent reasons for being
to that type.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_CALLEE_COPIES (cumulative_args_t @var{cum}, enum machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
+@deftypefn {Target Hook} bool TARGET_CALLEE_COPIES (cumulative_args_t @var{cum}, machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
The function argument described by the parameters to this hook is
known to be passed by reference. The hook should return true if the
function argument should be copied by the callee instead of copied
@c --mew 5feb93 i switched the order of the sentences. --mew 10feb93
@end defmac
-@deftypefn {Target Hook} void TARGET_FUNCTION_ARG_ADVANCE (cumulative_args_t @var{ca}, enum machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
+@deftypefn {Target Hook} void TARGET_FUNCTION_ARG_ADVANCE (cumulative_args_t @var{ca}, machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
This hook updates the summarizer variable pointed to by @var{ca} to
advance past an argument in the argument list. The values @var{mode},
@var{type} and @var{named} describe that argument. Once this is done,
required.
@end defmac
-@deftypefn {Target Hook} {unsigned int} TARGET_FUNCTION_ARG_BOUNDARY (enum machine_mode @var{mode}, const_tree @var{type})
+@deftypefn {Target Hook} {unsigned int} TARGET_FUNCTION_ARG_BOUNDARY (machine_mode @var{mode}, const_tree @var{type})
This hook returns the alignment boundary, in bits, of an argument
with the specified mode and type. The default hook returns
@code{PARM_BOUNDARY} for all arguments.
@end deftypefn
-@deftypefn {Target Hook} {unsigned int} TARGET_FUNCTION_ARG_ROUND_BOUNDARY (enum machine_mode @var{mode}, const_tree @var{type})
+@deftypefn {Target Hook} {unsigned int} TARGET_FUNCTION_ARG_ROUND_BOUNDARY (machine_mode @var{mode}, const_tree @var{type})
Normally, the size of an argument is rounded up to @code{PARM_BOUNDARY},
which is the default value for this hook. You can define this hook to
return a different value if an argument size must be rounded to a larger
@code{gimplify.c:gimplify_expr}.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_VALID_POINTER_MODE (enum machine_mode @var{mode})
+@deftypefn {Target Hook} bool TARGET_VALID_POINTER_MODE (machine_mode @var{mode})
Define this to return nonzero if the port can handle pointers
with machine mode @var{mode}. The default version of this
hook returns true for both @code{ptr_mode} and @code{Pmode}.
Define this to return nonzero if the memory reference @var{ref} may alias with the system C library errno location. The default version of this hook assumes the system C library errno location is either a declaration of type int or accessed by dereferencing a pointer to int.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_SCALAR_MODE_SUPPORTED_P (enum machine_mode @var{mode})
+@deftypefn {Target Hook} bool TARGET_SCALAR_MODE_SUPPORTED_P (machine_mode @var{mode})
Define this to return nonzero if the port is prepared to handle
insns involving scalar mode @var{mode}. For a scalar mode to be
considered supported, all the basic arithmetic and comparisons
code in @file{optabs.c}.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_VECTOR_MODE_SUPPORTED_P (enum machine_mode @var{mode})
+@deftypefn {Target Hook} bool TARGET_VECTOR_MODE_SUPPORTED_P (machine_mode @var{mode})
Define this to return nonzero if the port is prepared to handle
insns involving vector mode @var{mode}. At the very least, it
must have move patterns for this mode.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_ARRAY_MODE_SUPPORTED_P (enum machine_mode @var{mode}, unsigned HOST_WIDE_INT @var{nelems})
+@deftypefn {Target Hook} bool TARGET_ARRAY_MODE_SUPPORTED_P (machine_mode @var{mode}, unsigned HOST_WIDE_INT @var{nelems})
Return true if GCC should try to use a scalar mode to store an array
of @var{nelems} elements, given that each element has mode @var{mode}.
Returning true here overrides the usual @code{MAX_FIXED_MODE} limit
@code{int8x8x3_t}s in registers rather than forcing them onto the stack.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_LIBGCC_FLOATING_MODE_SUPPORTED_P (enum machine_mode @var{mode})
+@deftypefn {Target Hook} bool TARGET_LIBGCC_FLOATING_MODE_SUPPORTED_P (machine_mode @var{mode})
Define this to return nonzero if libgcc provides support for the
floating-point mode @var{mode}, which is known to pass
@code{TARGET_SCALAR_MODE_SUPPORTED_P}. The default version of this
@code{XFmode} and @code{TFmode}, if such modes exist.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_SMALL_REGISTER_CLASSES_FOR_MODE_P (enum machine_mode @var{mode})
+@deftypefn {Target Hook} bool TARGET_SMALL_REGISTER_CLASSES_FOR_MODE_P (machine_mode @var{mode})
Define this to return nonzero for machine modes for which the port has
small register classes. If this target hook returns nonzero for a given
@var{mode}, the compiler will try to minimize the lifetime of registers
compiled.
@end defmac
-@deftypefn {Target Hook} rtx TARGET_LIBCALL_VALUE (enum machine_mode @var{mode}, const_rtx @var{fun})
+@deftypefn {Target Hook} rtx TARGET_LIBCALL_VALUE (machine_mode @var{mode}, const_rtx @var{fun})
Define this hook if the back-end needs to know the name of the libcall
function in order to determine where the result should be returned.
nothing when you use @option{-freg-struct-return} mode.
@end defmac
-@deftypefn {Target Hook} {enum machine_mode} TARGET_GET_RAW_RESULT_MODE (int @var{regno})
+@deftypefn {Target Hook} machine_mode TARGET_GET_RAW_RESULT_MODE (int @var{regno})
This target hook returns the mode to be used when accessing raw return registers in @code{__builtin_return}. Define this macro if the value in @var{reg_raw_mode} is not correct.
@end deftypefn
-@deftypefn {Target Hook} {enum machine_mode} TARGET_GET_RAW_ARG_MODE (int @var{regno})
+@deftypefn {Target Hook} machine_mode TARGET_GET_RAW_ARG_MODE (int @var{regno})
This target hook returns the mode to be used when accessing raw argument registers in @code{__builtin_apply_args}. Define this macro if the value in @var{reg_raw_mode} is not correct.
@end deftypefn
to use as the return of @code{__builtin_saveregs}.
@end deftypefn
-@deftypefn {Target Hook} void TARGET_SETUP_INCOMING_VARARGS (cumulative_args_t @var{args_so_far}, enum machine_mode @var{mode}, tree @var{type}, int *@var{pretend_args_size}, int @var{second_time})
+@deftypefn {Target Hook} void TARGET_SETUP_INCOMING_VARARGS (cumulative_args_t @var{args_so_far}, machine_mode @var{mode}, tree @var{type}, int *@var{pretend_args_size}, int @var{second_time})
This target hook offers an alternative to using
@code{__builtin_saveregs} and defining the hook
@code{TARGET_EXPAND_BUILTIN_SAVEREGS}. Use it to store the anonymous
accept.
@end defmac
-@deftypefn {Target Hook} bool TARGET_LEGITIMATE_ADDRESS_P (enum machine_mode @var{mode}, rtx @var{x}, bool @var{strict})
+@deftypefn {Target Hook} bool TARGET_LEGITIMATE_ADDRESS_P (machine_mode @var{mode}, rtx @var{x}, bool @var{strict})
A function that returns whether @var{x} (an RTX) is a legitimate memory
address on the target machine for a memory operand of mode @var{mode}.
a label_ref or symbol_ref within an UNSPEC@.
@end defmac
-@deftypefn {Target Hook} rtx TARGET_LEGITIMIZE_ADDRESS (rtx @var{x}, rtx @var{oldx}, enum machine_mode @var{mode})
+@deftypefn {Target Hook} rtx TARGET_LEGITIMIZE_ADDRESS (rtx @var{x}, rtx @var{oldx}, machine_mode @var{mode})
This hook is given an invalid memory address @var{x} for an
operand of mode @var{mode} and should try to return a valid memory
address.
The default version of this hook returns @code{false}.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_LEGITIMATE_CONSTANT_P (enum machine_mode @var{mode}, rtx @var{x})
+@deftypefn {Target Hook} bool TARGET_LEGITIMATE_CONSTANT_P (machine_mode @var{mode}, rtx @var{x})
This hook returns true if @var{x} is a legitimate constant for a
@var{mode}-mode immediate operand on the target machine. You can assume that
@var{x} satisfies @code{CONSTANT_P}, so you need not check this.
debug sections.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_CANNOT_FORCE_CONST_MEM (enum machine_mode @var{mode}, rtx @var{x})
+@deftypefn {Target Hook} bool TARGET_CANNOT_FORCE_CONST_MEM (machine_mode @var{mode}, rtx @var{x})
This hook should return true if @var{x} is of a form that cannot (or
should not) be spilled to the constant pool. @var{mode} is the mode
of @var{x}.
of TLS symbols for various targets.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_USE_BLOCKS_FOR_CONSTANT_P (enum machine_mode @var{mode}, const_rtx @var{x})
+@deftypefn {Target Hook} bool TARGET_USE_BLOCKS_FOR_CONSTANT_P (machine_mode @var{mode}, const_rtx @var{x})
This hook should return true if pool entries for constant @var{x} can
be placed in an @code{object_block} structure. @var{mode} is the mode
of @var{x}.
Return true if vector alignment is reachable (by peeling N iterations) for the given type.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_VECTORIZE_VEC_PERM_CONST_OK (enum @var{machine_mode}, const unsigned char *@var{sel})
+@deftypefn {Target Hook} bool TARGET_VECTORIZE_VEC_PERM_CONST_OK (machine_mode, const unsigned char *@var{sel})
Return true if a vector created for @code{vec_perm_const} is valid.
@end deftypefn
@var{vec_type_out} and the argument types should be @var{vec_type_in}.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_VECTORIZE_SUPPORT_VECTOR_MISALIGNMENT (enum machine_mode @var{mode}, const_tree @var{type}, int @var{misalignment}, bool @var{is_packed})
+@deftypefn {Target Hook} bool TARGET_VECTORIZE_SUPPORT_VECTOR_MISALIGNMENT (machine_mode @var{mode}, const_tree @var{type}, int @var{misalignment}, bool @var{is_packed})
This hook should return true if the target supports misaligned vector
store/load of a specific factor denoted in the @var{misalignment}
parameter. The vector store/load should be of machine mode @var{mode} and
parameter is true if the memory access is defined in a packed struct.
@end deftypefn
-@deftypefn {Target Hook} {enum machine_mode} TARGET_VECTORIZE_PREFERRED_SIMD_MODE (enum machine_mode @var{mode})
+@deftypefn {Target Hook} machine_mode TARGET_VECTORIZE_PREFERRED_SIMD_MODE (machine_mode @var{mode})
This hook should return the preferred mode for vectorizing scalar
mode @var{mode}. The default is
equal to @code{word_mode}, because the vectorizer can do some
The default version of this hook returns false.
@end deftypefn
-@deftypefn {Target Hook} {enum machine_mode} TARGET_CC_MODES_COMPATIBLE (enum machine_mode @var{m1}, enum machine_mode @var{m2})
+@deftypefn {Target Hook} machine_mode TARGET_CC_MODES_COMPATIBLE (machine_mode @var{m1}, machine_mode @var{m2})
On targets which use multiple condition code modes in class
@code{MODE_CC}, it is sometimes the case that a comparison can be
validly done in more than one mode. On such a system, define this
@code{TARGET_REGISTER_MOVE_COST} instead.
@end defmac
-@deftypefn {Target Hook} int TARGET_REGISTER_MOVE_COST (enum machine_mode @var{mode}, reg_class_t @var{from}, reg_class_t @var{to})
+@deftypefn {Target Hook} int TARGET_REGISTER_MOVE_COST (machine_mode @var{mode}, reg_class_t @var{from}, reg_class_t @var{to})
This target hook should return the cost of moving data of mode @var{mode}
from a register in class @var{from} to one in class @var{to}. The classes
are expressed using the enumeration values such as @code{GENERAL_REGS}.
@code{TARGET_MEMORY_MOVE_COST} instead.
@end defmac
-@deftypefn {Target Hook} int TARGET_MEMORY_MOVE_COST (enum machine_mode @var{mode}, reg_class_t @var{rclass}, bool @var{in})
+@deftypefn {Target Hook} int TARGET_MEMORY_MOVE_COST (machine_mode @var{mode}, reg_class_t @var{rclass}, bool @var{in})
This target hook should return the cost of moving data of mode @var{mode}
between a register of class @var{rclass} and memory; @var{in} is @code{false}
if the value is to be written to memory, @code{true} if it is to be read in.
processed, and false when @code{rtx_cost} should recurse.
@end deftypefn
-@deftypefn {Target Hook} int TARGET_ADDRESS_COST (rtx @var{address}, enum machine_mode @var{mode}, addr_space_t @var{as}, bool @var{speed})
+@deftypefn {Target Hook} int TARGET_ADDRESS_COST (rtx @var{address}, machine_mode @var{mode}, addr_space_t @var{as}, bool @var{speed})
This hook computes the cost of an addressing mode that contains
@var{address}. If not defined, the cost is computed from
the @var{address} expression and the @code{TARGET_RTX_COST} hook.
also the latencies of operations.
@end deftypevr
-@deftypefn {Target Hook} int TARGET_SCHED_REASSOCIATION_WIDTH (unsigned int @var{opc}, enum machine_mode @var{mode})
+@deftypefn {Target Hook} int TARGET_SCHED_REASSOCIATION_WIDTH (unsigned int @var{opc}, machine_mode @var{mode})
This hook is called by tree reassociator to determine a level of
parallelism required in output calculations chain.
@end deftypefn
Return the section that should be used for transactional memory clone tables.
@end deftypefn
-@deftypefn {Target Hook} {section *} TARGET_ASM_SELECT_RTX_SECTION (enum machine_mode @var{mode}, rtx @var{x}, unsigned HOST_WIDE_INT @var{align})
+@deftypefn {Target Hook} {section *} TARGET_ASM_SELECT_RTX_SECTION (machine_mode @var{mode}, rtx @var{x}, unsigned HOST_WIDE_INT @var{align})
Return the section into which a constant @var{x}, of mode @var{mode},
should be placed. You can assume that @var{x} is some kind of
constant in RTL@. The argument @var{mode} is redundant except in the
If not defined, the default is to return @code{NULL_RTX}.
@end deftypefn
-@deftypefn {Target Hook} {enum machine_mode} TARGET_DWARF_FRAME_REG_MODE (int @var{regno})
+@deftypefn {Target Hook} machine_mode TARGET_DWARF_FRAME_REG_MODE (int @var{regno})
Given a register, this hook should return the mode which the
corresponding Dwarf frame register should have. This is normally
used to return a smaller mode than the raw mode to prevent call
@var{x} is negative, returns zero.
@end deftypefn
-@deftypefn Macro REAL_VALUE_TYPE REAL_VALUE_ATOF (const char *@var{string}, enum machine_mode @var{mode})
+@deftypefn Macro REAL_VALUE_TYPE REAL_VALUE_ATOF (const char *@var{string}, machine_mode @var{mode})
Converts @var{string} into a floating point number in the target machine's
representation for mode @var{mode}. This routine can handle both
decimal and hexadecimal floating point constants, using the syntax
c_register_addr_space ("__ea", ADDR_SPACE_EA);
@end smallexample
-@deftypefn {Target Hook} {enum machine_mode} TARGET_ADDR_SPACE_POINTER_MODE (addr_space_t @var{address_space})
+@deftypefn {Target Hook} machine_mode TARGET_ADDR_SPACE_POINTER_MODE (addr_space_t @var{address_space})
Define this to return the machine mode to use for pointers to
@var{address_space} if the target supports named address spaces.
The default version of this hook returns @code{ptr_mode} for the
generic address space only.
@end deftypefn
-@deftypefn {Target Hook} {enum machine_mode} TARGET_ADDR_SPACE_ADDRESS_MODE (addr_space_t @var{address_space})
+@deftypefn {Target Hook} machine_mode TARGET_ADDR_SPACE_ADDRESS_MODE (addr_space_t @var{address_space})
Define this to return the machine mode to use for addresses in
@var{address_space} if the target supports named address spaces.
The default version of this hook returns @code{Pmode} for the
generic address space only.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_ADDR_SPACE_VALID_POINTER_MODE (enum machine_mode @var{mode}, addr_space_t @var{as})
+@deftypefn {Target Hook} bool TARGET_ADDR_SPACE_VALID_POINTER_MODE (machine_mode @var{mode}, addr_space_t @var{as})
Define this to return nonzero if the port can handle pointers
with machine mode @var{mode} to address space @var{as}. This target
hook is the same as the @code{TARGET_VALID_POINTER_MODE} target hook,
target hooks for the given address space.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P (enum machine_mode @var{mode}, rtx @var{exp}, bool @var{strict}, addr_space_t @var{as})
+@deftypefn {Target Hook} bool TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P (machine_mode @var{mode}, rtx @var{exp}, bool @var{strict}, addr_space_t @var{as})
Define this to return true if @var{exp} is a valid address for mode
@var{mode} in the named address space @var{as}. The @var{strict}
parameter says whether strict addressing is in effect after reload has
explicit named address space support.
@end deftypefn
-@deftypefn {Target Hook} rtx TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS (rtx @var{x}, rtx @var{oldx}, enum machine_mode @var{mode}, addr_space_t @var{as})
+@deftypefn {Target Hook} rtx TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS (rtx @var{x}, rtx @var{oldx}, machine_mode @var{mode}, addr_space_t @var{as})
Define this to modify an invalid address @var{x} to be a valid address
with mode @var{mode} in the named address space @var{as}. This target
hook is the same as the @code{TARGET_LEGITIMIZE_ADDRESS} target hook,
extends.
@end defmac
-@deftypefn {Target Hook} {unsigned int} TARGET_MIN_DIVISIONS_FOR_RECIP_MUL (enum machine_mode @var{mode})
+@deftypefn {Target Hook} {unsigned int} TARGET_MIN_DIVISIONS_FOR_RECIP_MUL (machine_mode @var{mode})
When @option{-ffast-math} is in effect, GCC tries to optimize
divisions by the same divisor, by turning them into multiplications by
the reciprocal. This target hook specifies the minimum number of divisions
@end defmac
@anchor{TARGET_SHIFT_TRUNCATION_MASK}
-@deftypefn {Target Hook} {unsigned HOST_WIDE_INT} TARGET_SHIFT_TRUNCATION_MASK (enum machine_mode @var{mode})
+@deftypefn {Target Hook} {unsigned HOST_WIDE_INT} TARGET_SHIFT_TRUNCATION_MASK (machine_mode @var{mode})
This function describes how the standard shift patterns for @var{mode}
deal with shifts by negative amounts or by more than the width of the mode.
@xref{shift patterns}.
such cases may improve things.
@end defmac
-@deftypefn {Target Hook} int TARGET_MODE_REP_EXTENDED (enum machine_mode @var{mode}, enum machine_mode @var{rep_mode})
+@deftypefn {Target Hook} int TARGET_MODE_REP_EXTENDED (machine_mode @var{mode}, machine_mode @var{rep_mode})
The representation of an integral mode can be such that the values
are always extended to a wider integral mode. Return
@code{SIGN_EXTEND} if values of @var{mode} are represented in
@code{error_mark_node}.
@end deftypefn
-@deftypefn {Target Hook} rtx TARGET_EXPAND_BUILTIN (tree @var{exp}, rtx @var{target}, rtx @var{subtarget}, enum machine_mode @var{mode}, int @var{ignore})
+@deftypefn {Target Hook} rtx TARGET_EXPAND_BUILTIN (tree @var{exp}, rtx @var{target}, rtx @var{subtarget}, machine_mode @var{mode}, int @var{ignore})
Expand a call to a machine specific built-in function that was set up by
@samp{TARGET_INIT_BUILTINS}. @var{exp} is the expression for the
The default value of this hook is based on target's libc.
@end deftypefn
-@deftypefn {Target Hook} {unsigned int} TARGET_ATOMIC_ALIGN_FOR_MODE (enum machine_mode @var{mode})
+@deftypefn {Target Hook} {unsigned int} TARGET_ATOMIC_ALIGN_FOR_MODE (machine_mode @var{mode})
If defined, this function returns an appropriate alignment in bits for an atomic object of machine_mode @var{mode}. If 0 is returned then the default alignment for the specified mode is used.
@end deftypefn
@end defmac
@defmac STACK_SAVEAREA_MODE (@var{save_level})
-If defined, an expression of type @code{enum machine_mode} that
+If defined, an expression of type @code{machine_mode} that
specifies the mode of the save area operand of a
@code{save_stack_@var{level}} named pattern (@pxref{Standard Names}).
@var{save_level} is one of @code{SAVE_BLOCK}, @code{SAVE_FUNCTION}, or
@end defmac
@defmac STACK_SIZE_MODE
-If defined, an expression of type @code{enum machine_mode} that
+If defined, an expression of type @code{machine_mode} that
specifies the mode of the size increment operand of an
@code{allocate_stack} named pattern (@pxref{Standard Names}).
@var{x} is negative, returns zero.
@end deftypefn
-@deftypefn Macro REAL_VALUE_TYPE REAL_VALUE_ATOF (const char *@var{string}, enum machine_mode @var{mode})
+@deftypefn Macro REAL_VALUE_TYPE REAL_VALUE_ATOF (const char *@var{string}, machine_mode @var{mode})
Converts @var{string} into a floating point number in the target machine's
representation for mode @var{mode}. This routine can handle both
decimal and hexadecimal floating point constants, using the syntax
#include "basic-block.h"
#include "tm_p.h"
-static bool prefer_and_bit_test (enum machine_mode, int);
+static bool prefer_and_bit_test (machine_mode, int);
static void do_jump_by_parts_greater (tree, tree, int, rtx, rtx, int);
static void do_jump_by_parts_equality (tree, tree, rtx, rtx, int);
static void do_compare_and_jump (tree, tree, enum rtx_code, enum rtx_code, rtx,
is preferred. */
static bool
-prefer_and_bit_test (enum machine_mode mode, int bitnum)
+prefer_and_bit_test (machine_mode mode, int bitnum)
{
bool speed_p;
wide_int mask = wi::set_bit_in_zero (bitnum, GET_MODE_PRECISION (mode));
do_jump_1 (enum tree_code code, tree op0, tree op1,
rtx if_false_label, rtx if_true_label, int prob)
{
- enum machine_mode mode;
+ machine_mode mode;
rtx_code_label *drop_through_label = 0;
switch (code)
rtx temp;
int i;
tree type;
- enum machine_mode mode;
+ machine_mode mode;
rtx_code_label *drop_through_label = 0;
switch (code)
Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
static void
-do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
+do_jump_by_parts_greater_rtx (machine_mode mode, int unsignedp, rtx op0,
rtx op1, rtx if_false_label, rtx if_true_label,
int prob)
{
{
rtx op0 = expand_normal (swap ? treeop1 : treeop0);
rtx op1 = expand_normal (swap ? treeop0 : treeop1);
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
+ machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
int unsignedp = TYPE_UNSIGNED (TREE_TYPE (treeop0));
do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
to indicate drop through. */
static void
-do_jump_by_parts_zero_rtx (enum machine_mode mode, rtx op0,
+do_jump_by_parts_zero_rtx (machine_mode mode, rtx op0,
rtx if_false_label, rtx if_true_label, int prob)
{
int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
to indicate drop through. */
static void
-do_jump_by_parts_equality_rtx (enum machine_mode mode, rtx op0, rtx op1,
+do_jump_by_parts_equality_rtx (machine_mode mode, rtx op0, rtx op1,
rtx if_false_label, rtx if_true_label, int prob)
{
int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
{
rtx op0 = expand_normal (treeop0);
rtx op1 = expand_normal (treeop1);
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
+ machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
if_true_label, prob);
}
the conditions must be ANDed, false if they must be ORed. */
bool
-split_comparison (enum rtx_code code, enum machine_mode mode,
+split_comparison (enum rtx_code code, machine_mode mode,
enum rtx_code *code1, enum rtx_code *code2)
{
switch (code)
void
do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
- enum machine_mode mode, rtx size, rtx if_false_label,
+ machine_mode mode, rtx size, rtx if_false_label,
rtx if_true_label, int prob)
{
rtx tem;
{
rtx op0, op1;
tree type;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp;
enum rtx_code code;
struct clear_alias_mode_holder
{
alias_set_type alias_set;
- enum machine_mode mode;
+ machine_mode mode;
};
/* This is true except if cfun->stdarg -- i.e. we cannot do
HOST_WIDE_INT *offset,
cselib_val **base)
{
- enum machine_mode address_mode = get_address_mode (mem);
+ machine_mode address_mode = get_address_mode (mem);
rtx mem_address = XEXP (mem, 0);
rtx expanded_address, address;
int expanded;
}
-static rtx get_stored_val (store_info_t, enum machine_mode, HOST_WIDE_INT,
+static rtx get_stored_val (store_info_t, machine_mode, HOST_WIDE_INT,
HOST_WIDE_INT, basic_block, bool);
static rtx
find_shift_sequence (int access_size,
store_info_t store_info,
- enum machine_mode read_mode,
+ machine_mode read_mode,
int shift, bool speed, bool require_cst)
{
- enum machine_mode store_mode = GET_MODE (store_info->mem);
- enum machine_mode new_mode;
+ machine_mode store_mode = GET_MODE (store_info->mem);
+ machine_mode new_mode;
rtx read_reg = NULL;
/* Some machines like the x86 have shift insns for each size of
if not successful. If REQUIRE_CST is true, return always constant. */
static rtx
-get_stored_val (store_info_t store_info, enum machine_mode read_mode,
+get_stored_val (store_info_t store_info, machine_mode read_mode,
HOST_WIDE_INT read_begin, HOST_WIDE_INT read_end,
basic_block bb, bool require_cst)
{
- enum machine_mode store_mode = GET_MODE (store_info->mem);
+ machine_mode store_mode = GET_MODE (store_info->mem);
int shift;
int access_size; /* In bytes. */
rtx read_reg;
read_info_t read_info, insn_info_t read_insn, rtx *loc,
bitmap regs_live)
{
- enum machine_mode store_mode = GET_MODE (store_info->mem);
- enum machine_mode read_mode = GET_MODE (read_info->mem);
+ machine_mode store_mode = GET_MODE (store_info->mem);
+ machine_mode read_mode = GET_MODE (read_info->mem);
rtx_insn *insns, *this_insn;
rtx read_reg;
basic_block bb;
arg != void_list_node && idx < nargs;
arg = TREE_CHAIN (arg), idx++)
{
- enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
+ machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
rtx reg, link, tmp;
reg = targetm.calls.function_arg (args_so_far, mode, NULL_TREE, true);
if (!reg || !REG_P (reg) || GET_MODE (reg) != mode
which has mode MODE. Initialize column C as a return address column. */
static void
-init_return_column_size (enum machine_mode mode, rtx mem, unsigned int c)
+init_return_column_size (machine_mode mode, rtx mem, unsigned int c)
{
HOST_WIDE_INT offset = c * GET_MODE_SIZE (mode);
HOST_WIDE_INT size = GET_MODE_SIZE (Pmode);
expand_builtin_init_dwarf_reg_sizes (tree address)
{
unsigned int i;
- enum machine_mode mode = TYPE_MODE (char_type_node);
+ machine_mode mode = TYPE_MODE (char_type_node);
rtx addr = expand_normal (address);
rtx mem = gen_rtx_MEM (BLKmode, addr);
bool wrote_return_column = false;
{
HOST_WIDE_INT offset = rnum * GET_MODE_SIZE (mode);
HOST_WIDE_INT size;
- enum machine_mode save_mode = targetm.dwarf_frame_reg_mode (i);
+ machine_mode save_mode = targetm.dwarf_frame_reg_mode (i);
if (dnum == DWARF_FRAME_RETURN_COLUMN)
{
static bool resolve_one_addr (rtx *);
static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
enum var_init_status);
-static dw_loc_descr_ref loc_descriptor (rtx, enum machine_mode mode,
+static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
enum var_init_status);
static dw_loc_list_ref loc_list_from_tree (tree, int);
static dw_loc_descr_ref loc_descriptor_from_tree (tree, int);
if possible, NULL otherwise. */
static dw_die_ref
-base_type_for_mode (enum machine_mode mode, bool unsignedp)
+base_type_for_mode (machine_mode mode, bool unsignedp)
{
dw_die_ref type_die;
tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
possible. */
static dw_loc_descr_ref
-convert_descriptor_to_mode (enum machine_mode mode, dw_loc_descr_ref op)
+convert_descriptor_to_mode (machine_mode mode, dw_loc_descr_ref op)
{
- enum machine_mode outer_mode = mode;
+ machine_mode outer_mode = mode;
dw_die_ref type_die;
dw_loc_descr_ref cvt;
static dw_loc_descr_ref
scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
- enum machine_mode mem_mode)
+ machine_mode mem_mode)
{
- enum machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
+ machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
dw_loc_descr_ref op0, op1;
int shift;
static dw_loc_descr_ref
ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
- enum machine_mode mem_mode)
+ machine_mode mem_mode)
{
- enum machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
+ machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
dw_loc_descr_ref op0, op1;
if (op_mode == VOIDmode)
/* Return location descriptor for {U,S}{MIN,MAX}. */
static dw_loc_descr_ref
-minmax_loc_descriptor (rtx rtl, enum machine_mode mode,
- enum machine_mode mem_mode)
+minmax_loc_descriptor (rtx rtl, machine_mode mode,
+ machine_mode mem_mode)
{
enum dwarf_location_atom op;
dw_loc_descr_ref op0, op1, ret;
static dw_loc_descr_ref
typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
- enum machine_mode mode, enum machine_mode mem_mode)
+ machine_mode mode, machine_mode mem_mode)
{
dw_loc_descr_ref cvt, op0, op1;
L4: DW_OP_nop */
static dw_loc_descr_ref
-clz_loc_descriptor (rtx rtl, enum machine_mode mode,
- enum machine_mode mem_mode)
+clz_loc_descriptor (rtx rtl, machine_mode mode,
+ machine_mode mem_mode)
{
dw_loc_descr_ref op0, ret, tmp;
HOST_WIDE_INT valv;
L2: DW_OP_drop */
static dw_loc_descr_ref
-popcount_loc_descriptor (rtx rtl, enum machine_mode mode,
- enum machine_mode mem_mode)
+popcount_loc_descriptor (rtx rtl, machine_mode mode,
+ machine_mode mem_mode)
{
dw_loc_descr_ref op0, ret, tmp;
dw_loc_descr_ref l1jump, l1label;
L2: DW_OP_drop DW_OP_swap DW_OP_drop */
static dw_loc_descr_ref
-bswap_loc_descriptor (rtx rtl, enum machine_mode mode,
- enum machine_mode mem_mode)
+bswap_loc_descriptor (rtx rtl, machine_mode mode,
+ machine_mode mem_mode)
{
dw_loc_descr_ref op0, ret, tmp;
dw_loc_descr_ref l1jump, l1label;
[ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
static dw_loc_descr_ref
-rotate_loc_descriptor (rtx rtl, enum machine_mode mode,
- enum machine_mode mem_mode)
+rotate_loc_descriptor (rtx rtl, machine_mode mode,
+ machine_mode mem_mode)
{
rtx rtlop1 = XEXP (rtl, 1);
dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
Return 0 if we can't represent the location. */
dw_loc_descr_ref
-mem_loc_descriptor (rtx rtl, enum machine_mode mode,
- enum machine_mode mem_mode,
+mem_loc_descriptor (rtx rtl, machine_mode mode,
+ machine_mode mem_mode,
enum var_init_status initialized)
{
dw_loc_descr_ref mem_loc_result = NULL;
masking. */
&& GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) <= 4)
{
- enum machine_mode imode = GET_MODE (XEXP (rtl, 0));
+ machine_mode imode = GET_MODE (XEXP (rtl, 0));
mem_loc_result = op0;
add_loc_descr (&mem_loc_result,
int_loc_descriptor (GET_MODE_MASK (imode)));
|| GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT))
{
dw_die_ref type_die = base_type_for_mode (mode, 1);
- enum machine_mode amode;
+ machine_mode amode;
if (type_die == NULL)
return NULL;
amode = mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT,
If we don't know how to describe it, return 0. */
static dw_loc_descr_ref
-loc_descriptor (rtx rtl, enum machine_mode mode,
+loc_descriptor (rtx rtl, machine_mode mode,
enum var_init_status initialized)
{
dw_loc_descr_ref loc_result = NULL;
{
rtvec par_elems = XVEC (rtl, 0);
int num_elem = GET_NUM_ELEM (par_elems);
- enum machine_mode mode;
+ machine_mode mode;
int i;
/* Create the first one, so we have something to add to. */
= ggc_vec_alloc<unsigned char> (length * elt_size);
unsigned int i;
unsigned char *p;
- enum machine_mode imode = GET_MODE_INNER (mode);
+ machine_mode imode = GET_MODE_INNER (mode);
gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
switch (GET_MODE_CLASS (mode))
{
int have_address = 0;
dw_loc_descr_ref descr;
- enum machine_mode mode;
+ machine_mode mode;
if (want_address != 2)
{
{
tree obj, offset;
HOST_WIDE_INT bitsize, bitpos, bytepos;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp, volatilep = 0;
dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
ret = new_addr_loc_descr (rtl, dtprel_false);
else
{
- enum machine_mode mode, mem_mode;
+ machine_mode mode, mem_mode;
/* Certain constructs can only be represented at top-level. */
if (want_address == 2)
{
tree obj, offset;
HOST_WIDE_INT bitsize, bitpos, bytepos;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp, volatilep = 0;
obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
constant requires more than one word in order to be adequately
represented. */
{
- enum machine_mode mode = GET_MODE (rtl);
+ machine_mode mode = GET_MODE (rtl);
if (TARGET_SUPPORTS_WIDE_INT == 0 && !SCALAR_FLOAT_MODE_P (mode))
add_AT_double (die, DW_AT_const_value,
case CONST_VECTOR:
{
- enum machine_mode mode = GET_MODE (rtl);
+ machine_mode mode = GET_MODE (rtl);
unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
unsigned int length = CONST_VECTOR_NUNITS (rtl);
unsigned char *array
= ggc_vec_alloc<unsigned char> (length * elt_size);
unsigned int i;
unsigned char *p;
- enum machine_mode imode = GET_MODE_INNER (mode);
+ machine_mode imode = GET_MODE_INNER (mode);
switch (GET_MODE_CLASS (mode))
{
{
tree enttype = TREE_TYPE (type);
tree domain = TYPE_DOMAIN (type);
- enum machine_mode mode = TYPE_MODE (enttype);
+ machine_mode mode = TYPE_MODE (enttype);
if (GET_MODE_CLASS (mode) == MODE_INT && GET_MODE_SIZE (mode) == 1
&& domain
{
tree declared_type = TREE_TYPE (decl);
tree passed_type = DECL_ARG_TYPE (decl);
- enum machine_mode dmode = TYPE_MODE (declared_type);
- enum machine_mode pmode = TYPE_MODE (passed_type);
+ machine_mode dmode = TYPE_MODE (declared_type);
+ machine_mode pmode = TYPE_MODE (passed_type);
/* This decl represents a formal parameter which was optimized out.
Note that DECL_INCOMING_RTL may be NULL in here, but we handle
&& (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl)))
< UNITS_PER_WORD))
{
- enum machine_mode addr_mode = get_address_mode (rtl);
+ machine_mode addr_mode = get_address_mode (rtl);
int offset = (UNITS_PER_WORD
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
&& GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl))
&& BYTES_BIG_ENDIAN)
{
- enum machine_mode addr_mode = get_address_mode (rtl);
+ machine_mode addr_mode = get_address_mode (rtl);
int rsize = GET_MODE_SIZE (GET_MODE (rtl));
int dsize = GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl)));
fortran_common (tree decl, HOST_WIDE_INT *value)
{
tree val_expr, cvar;
- enum machine_mode mode;
+ machine_mode mode;
HOST_WIDE_INT bitsize, bitpos;
tree offset;
int unsignedp, volatilep = 0;
if (TREE_CODE (type) == ARRAY_TYPE)
{
tree enttype = TREE_TYPE (type);
- enum machine_mode mode = TYPE_MODE (enttype);
+ machine_mode mode = TYPE_MODE (enttype);
if (GET_MODE_CLASS (mode) != MODE_INT || GET_MODE_SIZE (mode) != 1)
return false;
arg; arg = next_arg)
{
dw_loc_descr_ref reg, val;
- enum machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
+ machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
dw_die_ref cdie, tdie = NULL;
next_arg = XEXP (arg, 1);
extern struct dw_loc_descr_node *build_cfa_aligned_loc
(dw_cfa_location *, HOST_WIDE_INT offset, HOST_WIDE_INT alignment);
extern struct dw_loc_descr_node *mem_loc_descriptor
- (rtx, enum machine_mode mode, enum machine_mode mem_mode,
+ (rtx, machine_mode mode, machine_mode mem_mode,
enum var_init_status);
extern bool loc_descr_equal_p (dw_loc_descr_ref, dw_loc_descr_ref);
extern dw_fde_ref dwarf2out_alloc_current_fde (void);
/* Commonly used modes. */
-enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
-enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
-enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
-enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
+machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
+machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
+machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
+machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
/* Datastructures maintained for currently processed function in RTL form. */
static hashval_t reg_attrs_htab_hash (const void *);
static int reg_attrs_htab_eq (const void *, const void *);
static reg_attrs *get_reg_attrs (tree, int);
-static rtx gen_const_vector (enum machine_mode, int);
+static rtx gen_const_vector (machine_mode, int);
static void copy_rtx_if_shared_1 (rtx *orig);
/* Probability of the conditional branch currently proceeded by try_split.
frame_pointer_rtx). */
rtx
-gen_raw_REG (enum machine_mode mode, int regno)
+gen_raw_REG (machine_mode mode, int regno)
{
rtx x = gen_rtx_raw_REG (mode, regno);
ORIGINAL_REGNO (x) = regno;
special_rtx in gengenrtl.c as well. */
rtx_expr_list *
-gen_rtx_EXPR_LIST (enum machine_mode mode, rtx expr, rtx expr_list)
+gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
{
return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
expr_list));
}
rtx_insn_list *
-gen_rtx_INSN_LIST (enum machine_mode mode, rtx insn, rtx insn_list)
+gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
{
return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
insn_list));
}
rtx_insn *
-gen_rtx_INSN (enum machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
+gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
basic_block bb, rtx pattern, int location, int code,
rtx reg_notes)
{
}
rtx
-gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
+gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
{
void **slot;
}
rtx
-gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
+gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
{
return GEN_INT (trunc_int_for_mode (c, mode));
}
/* Return a CONST_DOUBLE rtx for a floating-point value specified by
VALUE in mode MODE. */
rtx
-const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
+const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
{
rtx real = rtx_alloc (CONST_DOUBLE);
PUT_MODE (real, mode);
VALUE in mode MODE. */
rtx
-const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
+const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
{
rtx fixed = rtx_alloc (CONST_FIXED);
PUT_MODE (fixed, mode);
(if TARGET_SUPPORTS_WIDE_INT). */
rtx
-immed_wide_int_const (const wide_int_ref &v, enum machine_mode mode)
+immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
{
unsigned int len = v.get_len ();
unsigned int prec = GET_MODE_PRECISION (mode);
CONST_DOUBLE_FROM_REAL_VALUE. */
rtx
-immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
+immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
{
rtx value;
unsigned int i;
#endif
rtx
-gen_rtx_REG (enum machine_mode mode, unsigned int regno)
+gen_rtx_REG (machine_mode mode, unsigned int regno)
{
/* In case the MD file explicitly references the frame pointer, have
all such references point to the same frame pointer. This is
}
rtx
-gen_rtx_MEM (enum machine_mode mode, rtx addr)
+gen_rtx_MEM (machine_mode mode, rtx addr)
{
rtx rt = gen_rtx_raw_MEM (mode, addr);
/* Generate a memory referring to non-trapping constant memory. */
rtx
-gen_const_mem (enum machine_mode mode, rtx addr)
+gen_const_mem (machine_mode mode, rtx addr)
{
rtx mem = gen_rtx_MEM (mode, addr);
MEM_READONLY_P (mem) = 1;
save areas. */
rtx
-gen_frame_mem (enum machine_mode mode, rtx addr)
+gen_frame_mem (machine_mode mode, rtx addr)
{
rtx mem = gen_rtx_MEM (mode, addr);
MEM_NOTRAP_P (mem) = 1;
of the fixed stack frame. For example, something which is pushed
by a target splitter. */
rtx
-gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
+gen_tmp_stack_mem (machine_mode mode, rtx addr)
{
rtx mem = gen_rtx_MEM (mode, addr);
MEM_NOTRAP_P (mem) = 1;
this construct would be valid, and false otherwise. */
bool
-validate_subreg (enum machine_mode omode, enum machine_mode imode,
+validate_subreg (machine_mode omode, machine_mode imode,
const_rtx reg, unsigned int offset)
{
unsigned int isize = GET_MODE_SIZE (imode);
if (osize < UNITS_PER_WORD
&& ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
{
- enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
+ machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
unsigned int low_off = subreg_lowpart_offset (omode, wmode);
if (offset % UNITS_PER_WORD != low_off)
return false;
}
rtx
-gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
+gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
{
gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
return gen_rtx_raw_SUBREG (mode, reg, offset);
is smaller than mode of REG, otherwise paradoxical SUBREG. */
rtx
-gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
+gen_lowpart_SUBREG (machine_mode mode, rtx reg)
{
- enum machine_mode inmode;
+ machine_mode inmode;
inmode = GET_MODE (reg);
if (inmode == VOIDmode)
}
rtx
-gen_rtx_VAR_LOCATION (enum machine_mode mode, tree decl, rtx loc,
+gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
enum var_init_status status)
{
rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
on big-endian targets. */
int
-byte_lowpart_offset (enum machine_mode outer_mode,
- enum machine_mode inner_mode)
+byte_lowpart_offset (machine_mode outer_mode,
+ machine_mode inner_mode)
{
if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
return subreg_lowpart_offset (outer_mode, inner_mode);
This pseudo is assigned the next sequential register number. */
rtx
-gen_reg_rtx (enum machine_mode mode)
+gen_reg_rtx (machine_mode mode)
{
rtx val;
unsigned int align = GET_MODE_ALIGNMENT (mode);
which makes much better code. Besides, allocating DCmode
pseudos overstrains reload on some machines like the 386. */
rtx realpart, imagpart;
- enum machine_mode partmode = GET_MODE_INNER (mode);
+ machine_mode partmode = GET_MODE_INNER (mode);
realpart = gen_reg_rtx (partmode);
imagpart = gen_reg_rtx (partmode);
added to the REG_OFFSET. */
rtx
-gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
+gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
int offset)
{
rtx new_rtx = gen_rtx_REG (mode, regno);
with OFFSET added to the REG_OFFSET. */
rtx
-gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
+gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
{
rtx new_rtx = gen_reg_rtx (mode);
new register is a (possibly paradoxical) lowpart of the old one. */
void
-adjust_reg_mode (rtx reg, enum machine_mode mode)
+adjust_reg_mode (rtx reg, machine_mode mode)
{
update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
PUT_MODE (reg, mode);
If this is not a case we can handle, return 0. */
rtx
-gen_lowpart_common (enum machine_mode mode, rtx x)
+gen_lowpart_common (machine_mode mode, rtx x)
{
int msize = GET_MODE_SIZE (mode);
int xsize;
int offset = 0;
- enum machine_mode innermode;
+ machine_mode innermode;
/* Unfortunately, this routine doesn't take a parameter for the mode of X,
so we have to make one up. Yuk. */
}
\f
rtx
-gen_highpart (enum machine_mode mode, rtx x)
+gen_highpart (machine_mode mode, rtx x)
{
unsigned int msize = GET_MODE_SIZE (mode);
rtx result;
/* Like gen_highpart, but accept mode of EXP operand in case EXP can
be VOIDmode constant. */
rtx
-gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
+gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
{
if (GET_MODE (exp) != VOIDmode)
{
/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
unsigned int
-subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
+subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
{
unsigned int offset = 0;
int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
/* Return offset in bytes to get OUTERMODE high part
of the value in mode INNERMODE stored in memory in target format. */
unsigned int
-subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
+subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
{
unsigned int offset = 0;
int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
*/
rtx
-operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
+operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
{
if (mode == VOIDmode)
mode = GET_MODE (op);
MODE is the mode of OP, in case it is CONST_INT. */
rtx
-operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
+operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
{
rtx result = operand_subword (op, offset, 1, mode);
The memory attributes are not changed. */
static rtx
-change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate,
+change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
bool inplace)
{
addr_space_t as;
way we are changing MEMREF, so we only preserve the alias set. */
rtx
-change_address (rtx memref, enum machine_mode mode, rtx addr)
+change_address (rtx memref, machine_mode mode, rtx addr)
{
rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
- enum machine_mode mmode = GET_MODE (new_rtx);
+ machine_mode mmode = GET_MODE (new_rtx);
struct mem_attrs attrs, *defattrs;
attrs = *get_mem_attrs (memref);
has no inherent size. */
rtx
-adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
+adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
int validate, int adjust_address, int adjust_object,
HOST_WIDE_INT size)
{
rtx addr = XEXP (memref, 0);
rtx new_rtx;
- enum machine_mode address_mode;
+ machine_mode address_mode;
int pbits;
struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
unsigned HOST_WIDE_INT max_align;
#ifdef POINTERS_EXTEND_UNSIGNED
- enum machine_mode pointer_mode
+ machine_mode pointer_mode
= targetm.addr_space.pointer_mode (attrs.addrspace);
#endif
nonzero, the memory address is forced to be valid. */
rtx
-adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
+adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
HOST_WIDE_INT offset, int validate)
{
memref = change_address_1 (memref, VOIDmode, addr, validate, false);
offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
{
rtx new_rtx, addr = XEXP (memref, 0);
- enum machine_mode address_mode;
+ machine_mode address_mode;
struct mem_attrs attrs, *defattrs;
attrs = *get_mem_attrs (memref);
operations plus masking logic. */
rtx
-widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
+widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
{
rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
struct mem_attrs attrs;
/* Generate a vector constant for mode MODE and constant value CONSTANT. */
static rtx
-gen_const_vector (enum machine_mode mode, int constant)
+gen_const_vector (machine_mode mode, int constant)
{
rtx tem;
rtvec v;
int units, i;
- enum machine_mode inner;
+ machine_mode inner;
units = GET_MODE_NUNITS (mode);
inner = GET_MODE_INNER (mode);
/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
all elements are zero, and the one vector when all elements are one. */
rtx
-gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
+gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
{
- enum machine_mode inner = GET_MODE_INNER (mode);
+ machine_mode inner = GET_MODE_INNER (mode);
int nunits = GET_MODE_NUNITS (mode);
rtx x;
int i;
init_emit_regs (void)
{
int i;
- enum machine_mode mode;
+ machine_mode mode;
mem_attrs *attrs;
/* Reset register attributes */
for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
{
- mode = (enum machine_mode) i;
+ mode = (machine_mode) i;
attrs = ggc_cleared_alloc<mem_attrs> ();
attrs->align = BITS_PER_UNIT;
attrs->addrspace = ADDR_SPACE_GENERIC;
byte_mode = VOIDmode;
word_mode = VOIDmode;
- for (enum machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
+ for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
{
init_emit_once (void)
{
int i;
- enum machine_mode mode;
- enum machine_mode double_mode;
+ machine_mode mode;
+ machine_mode double_mode;
/* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
CONST_FIXED, and memory attribute hash tables. */
for (mode = MIN_MODE_PARTIAL_INT;
mode <= MAX_MODE_PARTIAL_INT;
- mode = (enum machine_mode)((int)(mode) + 1))
+ mode = (machine_mode)((int)(mode) + 1))
const_tiny_rtx[i][(int) mode] = GEN_INT (i);
}
for (mode = MIN_MODE_PARTIAL_INT;
mode <= MAX_MODE_PARTIAL_INT;
- mode = (enum machine_mode)((int)(mode) + 1))
+ mode = (machine_mode)((int)(mode) + 1))
const_tiny_rtx[3][(int) mode] = constm1_rtx;
for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
}
for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
- if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
+ if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
const_tiny_rtx[0][i] = const0_rtx;
const_tiny_rtx[0][(int) BImode] = const0_rtx;
static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
rtx
-gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
+gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
{
if (hard_reg_clobbers[mode][regno])
return hard_reg_clobbers[mode][regno];
extern rtx copy_insn_1 (rtx);
extern rtx copy_insn (rtx);
extern rtx_insn *copy_delay_slot_insn (rtx_insn *);
-extern rtx gen_int_mode (HOST_WIDE_INT, enum machine_mode);
+extern rtx gen_int_mode (HOST_WIDE_INT, machine_mode);
extern rtx_insn *emit_copy_of_insn_after (rtx_insn *, rtx_insn *);
extern void set_reg_attrs_from_value (rtx, rtx);
extern void set_reg_attrs_for_parm (rtx, rtx);
extern void set_reg_attrs_for_decl_rtl (tree t, rtx x);
-extern void adjust_reg_mode (rtx, enum machine_mode);
+extern void adjust_reg_mode (rtx, machine_mode);
extern int mem_expr_equal_p (const_tree, const_tree);
extern bool need_atomic_barrier_p (enum memmodel, bool);
static void
sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
{
- enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
- enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
+ machine_mode unwind_word_mode = targetm.unwind_word_mode ();
+ machine_mode filter_mode = targetm.eh_return_filter_mode ();
eh_landing_pad lp;
rtx mem, fc, before, exc_ptr_reg, filter_reg;
rtx_insn *seq;
= expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
eh_region src
= expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
- enum machine_mode fmode = targetm.eh_return_filter_mode ();
+ machine_mode fmode = targetm.eh_return_filter_mode ();
if (dst->exc_ptr_reg == NULL)
dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
/* Truncate and perhaps sign-extend C as appropriate for MODE. */
HOST_WIDE_INT
-trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
+trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
{
int width = GET_MODE_PRECISION (mode);
if it must be treated as immutable. */
rtx
-plus_constant (enum machine_mode mode, rtx x, HOST_WIDE_INT c,
+plus_constant (machine_mode mode, rtx x, HOST_WIDE_INT c,
bool inplace)
{
RTX_CODE code;
a CONST. */
static rtx
-convert_memory_address_addr_space_1 (enum machine_mode to_mode ATTRIBUTE_UNUSED,
+convert_memory_address_addr_space_1 (machine_mode to_mode ATTRIBUTE_UNUSED,
rtx x, addr_space_t as ATTRIBUTE_UNUSED,
bool in_const ATTRIBUTE_UNUSED)
{
gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
return x;
#else /* defined(POINTERS_EXTEND_UNSIGNED) */
- enum machine_mode pointer_mode, address_mode, from_mode;
+ machine_mode pointer_mode, address_mode, from_mode;
rtx temp;
enum rtx_code code;
arithmetic insns can be used. */
rtx
-convert_memory_address_addr_space (enum machine_mode to_mode, rtx x, addr_space_t as)
+convert_memory_address_addr_space (machine_mode to_mode, rtx x, addr_space_t as)
{
return convert_memory_address_addr_space_1 (to_mode, x, as, false);
}
this works by copying X or subexpressions of it into registers. */
rtx
-memory_address_addr_space (enum machine_mode mode, rtx x, addr_space_t as)
+memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
{
rtx oldx = x;
- enum machine_mode address_mode = targetm.addr_space.address_mode (as);
+ machine_mode address_mode = targetm.addr_space.address_mode (as);
x = convert_memory_address_addr_space (address_mode, x, as);
{
rtx base;
HOST_WIDE_INT offset;
- enum machine_mode mode;
+ machine_mode mode;
if (!flag_section_anchors)
return x;
in case X is a constant. */
rtx
-copy_to_mode_reg (enum machine_mode mode, rtx x)
+copy_to_mode_reg (machine_mode mode, rtx x)
{
rtx temp = gen_reg_rtx (mode);
since we mark it as a "constant" register. */
rtx
-force_reg (enum machine_mode mode, rtx x)
+force_reg (machine_mode mode, rtx x)
{
rtx temp, set;
rtx_insn *insn;
MODE is the mode to use for X in case it is a constant. */
rtx
-copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
+copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
{
rtx temp;
FOR_RETURN is nonzero if the caller is promoting the return value
of FNDECL, else it is for promoting args. */
-enum machine_mode
-promote_function_mode (const_tree type, enum machine_mode mode, int *punsignedp,
+machine_mode
+promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
const_tree funtype, int for_return)
{
/* Called without a type node for a libcall. */
PUNSIGNEDP points to the signedness of the type and may be adjusted
to show what signedness to use on extension operations. */
-enum machine_mode
-promote_mode (const_tree type ATTRIBUTE_UNUSED, enum machine_mode mode,
+machine_mode
+promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
int *punsignedp ATTRIBUTE_UNUSED)
{
#ifdef PROMOTE_MODE
mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
of DECL after promotion. */
-enum machine_mode
+machine_mode
promote_decl_mode (const_tree decl, int *punsignedp)
{
tree type = TREE_TYPE (decl);
int unsignedp = TYPE_UNSIGNED (type);
- enum machine_mode mode = DECL_MODE (decl);
- enum machine_mode pmode;
+ machine_mode mode = DECL_MODE (decl);
+ machine_mode pmode;
if (TREE_CODE (decl) == RESULT_DECL
|| TREE_CODE (decl) == PARM_DECL)
rtx sa = *psave;
/* The default is that we use a move insn and save in a Pmode object. */
rtx (*fcn) (rtx, rtx) = gen_move_insn;
- enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
+ machine_mode mode = STACK_SAVEAREA_MODE (save_level);
/* See if this machine has anything special to do for this kind of save. */
switch (save_level)
&& GET_MODE (val) == BLKmode)
{
unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
- enum machine_mode tmpmode;
+ machine_mode tmpmode;
/* int_size_in_bytes can return -1. We don't need a check here
since the value of bytes will then be large enough that no
in which a scalar value of mode MODE was returned by a library call. */
rtx
-hard_libcall_value (enum machine_mode mode, rtx fun)
+hard_libcall_value (machine_mode mode, rtx fun)
{
return targetm.calls.libcall_value (mode, fun);
}
unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT,
rtx);
-static rtx extract_fixed_bit_field (enum machine_mode, rtx,
+static rtx extract_fixed_bit_field (machine_mode, rtx,
unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT, rtx, int);
-static rtx extract_fixed_bit_field_1 (enum machine_mode, rtx,
+static rtx extract_fixed_bit_field_1 (machine_mode, rtx,
unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT, rtx, int);
-static rtx lshift_value (enum machine_mode, unsigned HOST_WIDE_INT, int);
+static rtx lshift_value (machine_mode, unsigned HOST_WIDE_INT, int);
static rtx extract_split_bit_field (rtx, unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT, int);
-static void do_cmp_and_jump (rtx, rtx, enum rtx_code, enum machine_mode, rtx_code_label *);
-static rtx expand_smod_pow2 (enum machine_mode, rtx, HOST_WIDE_INT);
-static rtx expand_sdiv_pow2 (enum machine_mode, rtx, HOST_WIDE_INT);
+static void do_cmp_and_jump (rtx, rtx, enum rtx_code, machine_mode, rtx_code_label *);
+static rtx expand_smod_pow2 (machine_mode, rtx, HOST_WIDE_INT);
+static rtx expand_sdiv_pow2 (machine_mode, rtx, HOST_WIDE_INT);
/* Return a constant integer mask value of mode MODE with BITSIZE ones
followed by BITPOS zeros, or the complement of that if COMPLEMENT.
mask is zero-extended if BITSIZE+BITPOS is too small for MODE. */
static inline rtx
-mask_rtx (enum machine_mode mode, int bitpos, int bitsize, bool complement)
+mask_rtx (machine_mode mode, int bitpos, int bitsize, bool complement)
{
return immed_wide_int_const
(wi::shifted_mask (bitpos, bitsize, complement,
};
static void
-init_expmed_one_conv (struct init_expmed_rtl *all, enum machine_mode to_mode,
- enum machine_mode from_mode, bool speed)
+init_expmed_one_conv (struct init_expmed_rtl *all, machine_mode to_mode,
+ machine_mode from_mode, bool speed)
{
int to_size, from_size;
rtx which;
static void
init_expmed_one_mode (struct init_expmed_rtl *all,
- enum machine_mode mode, int speed)
+ machine_mode mode, int speed)
{
int m, n, mode_bitsize;
- enum machine_mode mode_from;
+ machine_mode mode_from;
mode_bitsize = GET_MODE_UNIT_BITSIZE (mode);
if (SCALAR_INT_MODE_P (mode))
{
for (mode_from = MIN_MODE_INT; mode_from <= MAX_MODE_INT;
- mode_from = (enum machine_mode)(mode_from + 1))
+ mode_from = (machine_mode)(mode_from + 1))
init_expmed_one_conv (all, mode, mode_from, speed);
}
if (GET_MODE_CLASS (mode) == MODE_INT)
{
- enum machine_mode wider_mode = GET_MODE_WIDER_MODE (mode);
+ machine_mode wider_mode = GET_MODE_WIDER_MODE (mode);
if (wider_mode != VOIDmode)
{
PUT_MODE (all->zext, wider_mode);
init_expmed (void)
{
struct init_expmed_rtl all;
- enum machine_mode mode = QImode;
+ machine_mode mode = QImode;
int m, speed;
memset (&all, 0, sizeof all);
set_zero_cost (speed, set_src_cost (const0_rtx, speed));
for (mode = MIN_MODE_INT; mode <= MAX_MODE_INT;
- mode = (enum machine_mode)(mode + 1))
+ mode = (machine_mode)(mode + 1))
init_expmed_one_mode (&all, mode, speed);
if (MIN_MODE_PARTIAL_INT != VOIDmode)
for (mode = MIN_MODE_PARTIAL_INT; mode <= MAX_MODE_PARTIAL_INT;
- mode = (enum machine_mode)(mode + 1))
+ mode = (machine_mode)(mode + 1))
init_expmed_one_mode (&all, mode, speed);
if (MIN_MODE_VECTOR_INT != VOIDmode)
for (mode = MIN_MODE_VECTOR_INT; mode <= MAX_MODE_VECTOR_INT;
- mode = (enum machine_mode)(mode + 1))
+ mode = (machine_mode)(mode + 1))
init_expmed_one_mode (&all, mode, speed);
}
useful if X is a CONST_INT. */
rtx
-negate_rtx (enum machine_mode mode, rtx x)
+negate_rtx (machine_mode mode, rtx x)
{
rtx result = simplify_unary_operation (NEG, mode, x, mode);
Set *NEW_BITNUM to the bit position of the field within the new memory. */
static rtx
-narrow_bit_field_mem (rtx mem, enum machine_mode mode,
+narrow_bit_field_mem (rtx mem, machine_mode mode,
unsigned HOST_WIDE_INT bitsize,
unsigned HOST_WIDE_INT bitnum,
unsigned HOST_WIDE_INT *new_bitnum)
HOST_WIDE_INT bitnum,
unsigned HOST_WIDE_INT bitregion_start,
unsigned HOST_WIDE_INT bitregion_end,
- enum machine_mode fieldmode,
+ machine_mode fieldmode,
unsigned HOST_WIDE_INT *new_bitnum)
{
bit_field_mode_iterator iter (bitsize, bitnum, bitregion_start,
bitregion_end, MEM_ALIGN (op0),
MEM_VOLATILE_P (op0));
- enum machine_mode best_mode;
+ machine_mode best_mode;
if (iter.next_mode (&best_mode))
{
/* We can use a memory in BEST_MODE. See whether this is true for
{
/* Limit the search to the mode required by the corresponding
register insertion or extraction instruction, if any. */
- enum machine_mode limit_mode = word_mode;
+ machine_mode limit_mode = word_mode;
extraction_insn insn;
if (get_best_reg_extraction_insn (&insn, pattern,
GET_MODE_BITSIZE (best_mode),
fieldmode))
limit_mode = insn.field_mode;
- enum machine_mode wider_mode;
+ machine_mode wider_mode;
while (iter.next_mode (&wider_mode)
&& GET_MODE_SIZE (wider_mode) <= GET_MODE_SIZE (limit_mode))
best_mode = wider_mode;
static bool
lowpart_bit_field_p (unsigned HOST_WIDE_INT bitnum,
unsigned HOST_WIDE_INT bitsize,
- enum machine_mode struct_mode)
+ machine_mode struct_mode)
{
if (BYTES_BIG_ENDIAN)
return (bitnum % BITS_PER_UNIT == 0
static bool
strict_volatile_bitfield_p (rtx op0, unsigned HOST_WIDE_INT bitsize,
unsigned HOST_WIDE_INT bitnum,
- enum machine_mode fieldmode,
+ machine_mode fieldmode,
unsigned HOST_WIDE_INT bitregion_start,
unsigned HOST_WIDE_INT bitregion_end)
{
static bool
simple_mem_bitfield_p (rtx op0, unsigned HOST_WIDE_INT bitsize,
- unsigned HOST_WIDE_INT bitnum, enum machine_mode mode)
+ unsigned HOST_WIDE_INT bitnum, machine_mode mode)
{
return (MEM_P (op0)
&& bitnum % BITS_PER_UNIT == 0
rtx_insn *last = get_last_insn ();
bool copy_back = false;
- enum machine_mode op_mode = insv->field_mode;
+ machine_mode op_mode = insv->field_mode;
unsigned int unit = GET_MODE_BITSIZE (op_mode);
if (bitsize == 0 || bitsize > unit)
return false;
unsigned HOST_WIDE_INT bitnum,
unsigned HOST_WIDE_INT bitregion_start,
unsigned HOST_WIDE_INT bitregion_end,
- enum machine_mode fieldmode,
+ machine_mode fieldmode,
rtx value, bool fallback_p)
{
rtx op0 = str_rtx;
&& !(bitnum % GET_MODE_BITSIZE (GET_MODE_INNER (GET_MODE (op0)))))
{
struct expand_operand ops[3];
- enum machine_mode outermode = GET_MODE (op0);
- enum machine_mode innermode = GET_MODE_INNER (outermode);
+ machine_mode outermode = GET_MODE (op0);
+ machine_mode innermode = GET_MODE_INNER (outermode);
enum insn_code icode = optab_handler (vec_set_optab, outermode);
int pos = bitnum / GET_MODE_BITSIZE (innermode);
since that case is valid for any mode. The following cases are only
valid for integral modes. */
{
- enum machine_mode imode = int_mode_for_mode (GET_MODE (op0));
+ machine_mode imode = int_mode_for_mode (GET_MODE (op0));
if (imode != GET_MODE (op0))
{
if (MEM_P (op0))
unsigned HOST_WIDE_INT bitnum,
unsigned HOST_WIDE_INT bitregion_start,
unsigned HOST_WIDE_INT bitregion_end,
- enum machine_mode fieldmode,
+ machine_mode fieldmode,
rtx value)
{
/* Handle -fstrict-volatile-bitfields in the cases where it applies. */
bit region. */
if (MEM_P (str_rtx) && bitregion_start > 0)
{
- enum machine_mode bestmode;
+ machine_mode bestmode;
HOST_WIDE_INT offset, size;
gcc_assert ((bitregion_start % BITS_PER_UNIT) == 0);
if (MEM_P (op0))
{
- enum machine_mode mode = GET_MODE (op0);
+ machine_mode mode = GET_MODE (op0);
if (GET_MODE_BITSIZE (mode) == 0
|| GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (word_mode))
mode = word_mode;
unsigned HOST_WIDE_INT bitnum,
rtx value)
{
- enum machine_mode mode;
+ machine_mode mode;
rtx temp;
int all_zero = 0;
int all_one = 0;
{
int word_offset = (SUBREG_BYTE (op0) / UNITS_PER_WORD)
+ (offset * unit / BITS_PER_WORD);
- enum machine_mode sub_mode = GET_MODE (SUBREG_REG (op0));
+ machine_mode sub_mode = GET_MODE (SUBREG_REG (op0));
if (sub_mode != BLKmode && GET_MODE_SIZE (sub_mode) < UNITS_PER_WORD)
word = word_offset ? const0_rtx : op0;
else
}
else if (REG_P (op0))
{
- enum machine_mode op0_mode = GET_MODE (op0);
+ machine_mode op0_mode = GET_MODE (op0);
if (op0_mode != BLKmode && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD)
word = offset ? const0_rtx : op0;
else
to extract_bit_field. */
static rtx
-convert_extracted_bit_field (rtx x, enum machine_mode mode,
- enum machine_mode tmode, bool unsignedp)
+convert_extracted_bit_field (rtx x, machine_mode mode,
+ machine_mode tmode, bool unsignedp)
{
if (GET_MODE (x) == tmode || GET_MODE (x) == mode)
return x;
value via a SUBREG. */
if (!SCALAR_INT_MODE_P (tmode))
{
- enum machine_mode smode;
+ machine_mode smode;
smode = mode_for_size (GET_MODE_BITSIZE (tmode), MODE_INT, 0);
x = convert_to_mode (smode, x, unsignedp);
unsigned HOST_WIDE_INT bitsize,
unsigned HOST_WIDE_INT bitnum,
int unsignedp, rtx target,
- enum machine_mode mode, enum machine_mode tmode)
+ machine_mode mode, machine_mode tmode)
{
struct expand_operand ops[4];
rtx spec_target = target;
rtx spec_target_subreg = 0;
- enum machine_mode ext_mode = extv->field_mode;
+ machine_mode ext_mode = extv->field_mode;
unsigned unit = GET_MODE_BITSIZE (ext_mode);
if (bitsize == 0 || unit < bitsize)
static rtx
extract_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
unsigned HOST_WIDE_INT bitnum, int unsignedp, rtx target,
- enum machine_mode mode, enum machine_mode tmode,
+ machine_mode mode, machine_mode tmode,
bool fallback_p)
{
rtx op0 = str_rtx;
- enum machine_mode int_mode;
- enum machine_mode mode1;
+ machine_mode int_mode;
+ machine_mode mode1;
if (tmode == VOIDmode)
tmode = mode;
&& !MEM_P (op0)
&& GET_MODE_INNER (GET_MODE (op0)) != tmode)
{
- enum machine_mode new_mode;
+ machine_mode new_mode;
if (GET_MODE_CLASS (tmode) == MODE_FLOAT)
new_mode = MIN_MODE_VECTOR_FLOAT;
== bitnum / GET_MODE_BITSIZE (GET_MODE_INNER (GET_MODE (op0)))))
{
struct expand_operand ops[3];
- enum machine_mode outermode = GET_MODE (op0);
- enum machine_mode innermode = GET_MODE_INNER (outermode);
+ machine_mode outermode = GET_MODE (op0);
+ machine_mode innermode = GET_MODE_INNER (outermode);
enum insn_code icode = optab_handler (vec_extract_optab, outermode);
unsigned HOST_WIDE_INT pos = bitnum / GET_MODE_BITSIZE (innermode);
/* Make sure we are playing with integral modes. Pun with subregs
if we aren't. */
{
- enum machine_mode imode = int_mode_for_mode (GET_MODE (op0));
+ machine_mode imode = int_mode_for_mode (GET_MODE (op0));
if (imode != GET_MODE (op0))
{
if (MEM_P (op0))
mode1 = mode;
if (SCALAR_INT_MODE_P (tmode))
{
- enum machine_mode try_mode = mode_for_size (bitsize,
+ machine_mode try_mode = mode_for_size (bitsize,
GET_MODE_CLASS (tmode), 0);
if (try_mode != BLKmode)
mode1 = try_mode;
rtx
extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
unsigned HOST_WIDE_INT bitnum, int unsignedp, rtx target,
- enum machine_mode mode, enum machine_mode tmode)
+ machine_mode mode, machine_mode tmode)
{
- enum machine_mode mode1;
+ machine_mode mode1;
/* Handle -fstrict-volatile-bitfields in the cases where it applies. */
if (GET_MODE_BITSIZE (GET_MODE (str_rtx)) > 0)
If TARGET is not used, create a pseudo-reg of mode TMODE for the value. */
static rtx
-extract_fixed_bit_field (enum machine_mode tmode, rtx op0,
+extract_fixed_bit_field (machine_mode tmode, rtx op0,
unsigned HOST_WIDE_INT bitsize,
unsigned HOST_WIDE_INT bitnum, rtx target,
int unsignedp)
{
if (MEM_P (op0))
{
- enum machine_mode mode
+ machine_mode mode
= get_best_mode (bitsize, bitnum, 0, 0, MEM_ALIGN (op0), word_mode,
MEM_VOLATILE_P (op0));
the bit field always using the MODE of OP0. */
static rtx
-extract_fixed_bit_field_1 (enum machine_mode tmode, rtx op0,
+extract_fixed_bit_field_1 (machine_mode tmode, rtx op0,
unsigned HOST_WIDE_INT bitsize,
unsigned HOST_WIDE_INT bitnum, rtx target,
int unsignedp)
{
- enum machine_mode mode = GET_MODE (op0);
+ machine_mode mode = GET_MODE (op0);
gcc_assert (SCALAR_INT_MODE_P (mode));
/* Note that bitsize + bitnum can be greater than GET_MODE_BITSIZE (mode)
VALUE << BITPOS. */
static rtx
-lshift_value (enum machine_mode mode, unsigned HOST_WIDE_INT value,
+lshift_value (machine_mode mode, unsigned HOST_WIDE_INT value,
int bitpos)
{
return immed_wide_int_const (wi::lshift (value, bitpos), mode);
operations. */
rtx
-extract_low_bits (enum machine_mode mode, enum machine_mode src_mode, rtx src)
+extract_low_bits (machine_mode mode, machine_mode src_mode, rtx src)
{
- enum machine_mode int_mode, src_int_mode;
+ machine_mode int_mode, src_int_mode;
if (mode == src_mode)
return src;
Return the rtx for where the value is. */
static rtx
-expand_shift_1 (enum tree_code code, enum machine_mode mode, rtx shifted,
+expand_shift_1 (enum tree_code code, machine_mode mode, rtx shifted,
rtx amount, rtx target, int unsignedp)
{
rtx op1, temp = 0;
optab rshift_uns_optab = lshr_optab;
optab lrotate_optab = rotl_optab;
optab rrotate_optab = rotr_optab;
- enum machine_mode op1_mode;
- enum machine_mode scalar_mode = mode;
+ machine_mode op1_mode;
+ machine_mode scalar_mode = mode;
int attempt;
bool speed = optimize_insn_for_speed_p ();
Return the rtx for where the value is. */
rtx
-expand_shift (enum tree_code code, enum machine_mode mode, rtx shifted,
+expand_shift (enum tree_code code, machine_mode mode, rtx shifted,
int amount, rtx target, int unsignedp)
{
return expand_shift_1 (code, mode,
Return the rtx for where the value is. */
rtx
-expand_variable_shift (enum tree_code code, enum machine_mode mode, rtx shifted,
+expand_variable_shift (enum tree_code code, machine_mode mode, rtx shifted,
tree amount, rtx target, int unsignedp)
{
return expand_shift_1 (code, mode,
enum mult_variant {basic_variant, negate_variant, add_variant};
static void synth_mult (struct algorithm *, unsigned HOST_WIDE_INT,
- const struct mult_cost *, enum machine_mode mode);
-static bool choose_mult_variant (enum machine_mode, HOST_WIDE_INT,
+ const struct mult_cost *, machine_mode mode);
+static bool choose_mult_variant (machine_mode, HOST_WIDE_INT,
struct algorithm *, enum mult_variant *, int);
-static rtx expand_mult_const (enum machine_mode, rtx, HOST_WIDE_INT, rtx,
+static rtx expand_mult_const (machine_mode, rtx, HOST_WIDE_INT, rtx,
const struct algorithm *, enum mult_variant);
static unsigned HOST_WIDE_INT invert_mod2n (unsigned HOST_WIDE_INT, int);
-static rtx extract_high_half (enum machine_mode, rtx);
-static rtx expmed_mult_highpart (enum machine_mode, rtx, rtx, rtx, int, int);
-static rtx expmed_mult_highpart_optab (enum machine_mode, rtx, rtx, rtx,
+static rtx extract_high_half (machine_mode, rtx);
+static rtx expmed_mult_highpart (machine_mode, rtx, rtx, rtx, int, int);
+static rtx expmed_mult_highpart_optab (machine_mode, rtx, rtx, rtx,
int, int);
/* Compute and return the best algorithm for multiplying by T.
The algorithm must cost less than cost_limit
static void
synth_mult (struct algorithm *alg_out, unsigned HOST_WIDE_INT t,
- const struct mult_cost *cost_limit, enum machine_mode mode)
+ const struct mult_cost *cost_limit, machine_mode mode)
{
int m;
struct algorithm *alg_in, *best_alg;
bool cache_hit = false;
enum alg_code cache_alg = alg_zero;
bool speed = optimize_insn_for_speed_p ();
- enum machine_mode imode;
+ machine_mode imode;
struct alg_hash_entry *entry_ptr;
/* Indicate that no algorithm is yet found. If no algorithm
describing the algorithm in *ALG and final fixup in *VARIANT. */
static bool
-choose_mult_variant (enum machine_mode mode, HOST_WIDE_INT val,
+choose_mult_variant (machine_mode mode, HOST_WIDE_INT val,
struct algorithm *alg, enum mult_variant *variant,
int mult_cost)
{
the final fixup specified by VARIANT. */
static rtx
-expand_mult_const (enum machine_mode mode, rtx op0, HOST_WIDE_INT val,
+expand_mult_const (machine_mode mode, rtx op0, HOST_WIDE_INT val,
rtx target, const struct algorithm *alg,
enum mult_variant variant)
{
rtx_insn *insn;
rtx accum, tem;
int opno;
- enum machine_mode nmode;
+ machine_mode nmode;
/* Avoid referencing memory over and over and invalid sharing
on SUBREGs. */
you should swap the two operands if OP0 would be constant. */
rtx
-expand_mult (enum machine_mode mode, rtx op0, rtx op1, rtx target,
+expand_mult (machine_mode mode, rtx op0, rtx op1, rtx target,
int unsignedp)
{
enum mult_variant variant;
COEFFicient in the given MODE and SPEED. */
int
-mult_by_coeff_cost (HOST_WIDE_INT coeff, enum machine_mode mode, bool speed)
+mult_by_coeff_cost (HOST_WIDE_INT coeff, machine_mode mode, bool speed)
{
int max_cost;
struct algorithm algorithm;
and adds. */
rtx
-expand_widening_mult (enum machine_mode mode, rtx op0, rtx op1, rtx target,
+expand_widening_mult (machine_mode mode, rtx op0, rtx op1, rtx target,
int unsignedp, optab this_optab)
{
bool speed = optimize_insn_for_speed_p ();
MODE is the mode of operation. */
rtx
-expand_mult_highpart_adjust (enum machine_mode mode, rtx adj_operand, rtx op0,
+expand_mult_highpart_adjust (machine_mode mode, rtx adj_operand, rtx op0,
rtx op1, rtx target, int unsignedp)
{
rtx tem;
/* Subroutine of expmed_mult_highpart. Return the MODE high part of OP. */
static rtx
-extract_high_half (enum machine_mode mode, rtx op)
+extract_high_half (machine_mode mode, rtx op)
{
- enum machine_mode wider_mode;
+ machine_mode wider_mode;
if (mode == word_mode)
return gen_highpart (mode, op);
optab. OP1 is an rtx for the constant operand. */
static rtx
-expmed_mult_highpart_optab (enum machine_mode mode, rtx op0, rtx op1,
+expmed_mult_highpart_optab (machine_mode mode, rtx op0, rtx op1,
rtx target, int unsignedp, int max_cost)
{
rtx narrow_op1 = gen_int_mode (INTVAL (op1), mode);
- enum machine_mode wider_mode;
+ machine_mode wider_mode;
optab moptab;
rtx tem;
int size;
MAX_COST is the total allowed cost for the expanded RTL. */
static rtx
-expmed_mult_highpart (enum machine_mode mode, rtx op0, rtx op1,
+expmed_mult_highpart (machine_mode mode, rtx op0, rtx op1,
rtx target, int unsignedp, int max_cost)
{
- enum machine_mode wider_mode = GET_MODE_WIDER_MODE (mode);
+ machine_mode wider_mode = GET_MODE_WIDER_MODE (mode);
unsigned HOST_WIDE_INT cnst1;
int extra_cost;
bool sign_adjust = false;
/* Expand signed modulus of OP0 by a power of two D in mode MODE. */
static rtx
-expand_smod_pow2 (enum machine_mode mode, rtx op0, HOST_WIDE_INT d)
+expand_smod_pow2 (machine_mode mode, rtx op0, HOST_WIDE_INT d)
{
rtx result, temp, shift;
rtx_code_label *label;
This routine is only called for positive values of D. */
static rtx
-expand_sdiv_pow2 (enum machine_mode mode, rtx op0, HOST_WIDE_INT d)
+expand_sdiv_pow2 (machine_mode mode, rtx op0, HOST_WIDE_INT d)
{
rtx temp;
rtx_code_label *label;
*/
rtx
-expand_divmod (int rem_flag, enum tree_code code, enum machine_mode mode,
+expand_divmod (int rem_flag, enum tree_code code, machine_mode mode,
rtx op0, rtx op1, rtx target, int unsignedp)
{
- enum machine_mode compute_mode;
+ machine_mode compute_mode;
rtx tquotient;
rtx quotient = 0, remainder = 0;
rtx_insn *last;
If TARGET is 0, a pseudo-register or constant is returned. */
rtx
-expand_and (enum machine_mode mode, rtx op0, rtx op1, rtx target)
+expand_and (machine_mode mode, rtx op0, rtx op1, rtx target)
{
rtx tem = 0;
/* Helper function for emit_store_flag. */
static rtx
emit_cstore (rtx target, enum insn_code icode, enum rtx_code code,
- enum machine_mode mode, enum machine_mode compare_mode,
+ machine_mode mode, machine_mode compare_mode,
int unsignedp, rtx x, rtx y, int normalizep,
- enum machine_mode target_mode)
+ machine_mode target_mode)
{
struct expand_operand ops[4];
rtx op0, comparison, subtarget;
rtx_insn *last;
- enum machine_mode result_mode = targetm.cstore_mode (icode);
+ machine_mode result_mode = targetm.cstore_mode (icode);
last = get_last_insn ();
x = prepare_operand (icode, x, 2, mode, compare_mode, unsignedp);
static rtx
emit_store_flag_1 (rtx target, enum rtx_code code, rtx op0, rtx op1,
- enum machine_mode mode, int unsignedp, int normalizep,
- enum machine_mode target_mode)
+ machine_mode mode, int unsignedp, int normalizep,
+ machine_mode target_mode)
{
rtx subtarget;
enum insn_code icode;
- enum machine_mode compare_mode;
+ machine_mode compare_mode;
enum mode_class mclass;
enum rtx_code scode;
rtx tem;
for (compare_mode = mode; compare_mode != VOIDmode;
compare_mode = GET_MODE_WIDER_MODE (compare_mode))
{
- enum machine_mode optab_mode = mclass == MODE_CC ? CCmode : compare_mode;
+ machine_mode optab_mode = mclass == MODE_CC ? CCmode : compare_mode;
icode = optab_handler (cstore_optab, optab_mode);
if (icode != CODE_FOR_nothing)
{
rtx
emit_store_flag (rtx target, enum rtx_code code, rtx op0, rtx op1,
- enum machine_mode mode, int unsignedp, int normalizep)
+ machine_mode mode, int unsignedp, int normalizep)
{
- enum machine_mode target_mode = target ? GET_MODE (target) : VOIDmode;
+ machine_mode target_mode = target ? GET_MODE (target) : VOIDmode;
enum rtx_code rcode;
rtx subtarget;
rtx tem, trueval;
rtx
emit_store_flag_force (rtx target, enum rtx_code code, rtx op0, rtx op1,
- enum machine_mode mode, int unsignedp, int normalizep)
+ machine_mode mode, int unsignedp, int normalizep)
{
rtx tem;
rtx_code_label *label;
now a thin wrapper around do_compare_rtx_and_jump. */
static void
-do_cmp_and_jump (rtx arg1, rtx arg2, enum rtx_code op, enum machine_mode mode,
+do_cmp_and_jump (rtx arg1, rtx arg2, enum rtx_code op, machine_mode mode,
rtx_code_label *label)
{
int unsignedp = (op == LTU || op == LEU || op == GTU || op == GEU);
unsigned HOST_WIDE_INT t;
/* The mode in which we are multiplying something by T. */
- enum machine_mode mode;
+ machine_mode mode;
/* The best multiplication algorithm for t. */
enum alg_code alg;
/* Compute an index into the cost arrays by mode class. */
static inline int
-expmed_mode_index (enum machine_mode mode)
+expmed_mode_index (machine_mode mode)
{
switch (GET_MODE_CLASS (mode))
{
static inline bool *
expmed_op_cheap_ptr (struct expmed_op_cheap *eoc, bool speed,
- enum machine_mode mode)
+ machine_mode mode)
{
int idx = expmed_mode_index (mode);
return &eoc->cheap[speed][idx];
static inline int *
expmed_op_cost_ptr (struct expmed_op_costs *costs, bool speed,
- enum machine_mode mode)
+ machine_mode mode)
{
int idx = expmed_mode_index (mode);
return &costs->cost[speed][idx];
/* Subroutine of {set_,}sdiv_pow2_cheap. Not to be used otherwise. */
static inline bool *
-sdiv_pow2_cheap_ptr (bool speed, enum machine_mode mode)
+sdiv_pow2_cheap_ptr (bool speed, machine_mode mode)
{
return expmed_op_cheap_ptr (&this_target_expmed->x_sdiv_pow2_cheap,
speed, mode);
when optimizing for SPEED. */
static inline void
-set_sdiv_pow2_cheap (bool speed, enum machine_mode mode, bool cheap_p)
+set_sdiv_pow2_cheap (bool speed, machine_mode mode, bool cheap_p)
{
*sdiv_pow2_cheap_ptr (speed, mode) = cheap_p;
}
when optimizing for SPEED. */
static inline bool
-sdiv_pow2_cheap (bool speed, enum machine_mode mode)
+sdiv_pow2_cheap (bool speed, machine_mode mode)
{
return *sdiv_pow2_cheap_ptr (speed, mode);
}
/* Subroutine of {set_,}smod_pow2_cheap. Not to be used otherwise. */
static inline bool *
-smod_pow2_cheap_ptr (bool speed, enum machine_mode mode)
+smod_pow2_cheap_ptr (bool speed, machine_mode mode)
{
return expmed_op_cheap_ptr (&this_target_expmed->x_smod_pow2_cheap,
speed, mode);
optimizing for SPEED. */
static inline void
-set_smod_pow2_cheap (bool speed, enum machine_mode mode, bool cheap)
+set_smod_pow2_cheap (bool speed, machine_mode mode, bool cheap)
{
*smod_pow2_cheap_ptr (speed, mode) = cheap;
}
when optimizing for SPEED. */
static inline bool
-smod_pow2_cheap (bool speed, enum machine_mode mode)
+smod_pow2_cheap (bool speed, machine_mode mode)
{
return *smod_pow2_cheap_ptr (speed, mode);
}
/* Subroutine of {set_,}add_cost. Not to be used otherwise. */
static inline int *
-add_cost_ptr (bool speed, enum machine_mode mode)
+add_cost_ptr (bool speed, machine_mode mode)
{
return expmed_op_cost_ptr (&this_target_expmed->x_add_cost, speed, mode);
}
/* Set the COST of computing an add in MODE when optimizing for SPEED. */
static inline void
-set_add_cost (bool speed, enum machine_mode mode, int cost)
+set_add_cost (bool speed, machine_mode mode, int cost)
{
*add_cost_ptr (speed, mode) = cost;
}
/* Return the cost of computing an add in MODE when optimizing for SPEED. */
static inline int
-add_cost (bool speed, enum machine_mode mode)
+add_cost (bool speed, machine_mode mode)
{
return *add_cost_ptr (speed, mode);
}
/* Subroutine of {set_,}neg_cost. Not to be used otherwise. */
static inline int *
-neg_cost_ptr (bool speed, enum machine_mode mode)
+neg_cost_ptr (bool speed, machine_mode mode)
{
return expmed_op_cost_ptr (&this_target_expmed->x_neg_cost, speed, mode);
}
/* Set the COST of computing a negation in MODE when optimizing for SPEED. */
static inline void
-set_neg_cost (bool speed, enum machine_mode mode, int cost)
+set_neg_cost (bool speed, machine_mode mode, int cost)
{
*neg_cost_ptr (speed, mode) = cost;
}
SPEED. */
static inline int
-neg_cost (bool speed, enum machine_mode mode)
+neg_cost (bool speed, machine_mode mode)
{
return *neg_cost_ptr (speed, mode);
}
/* Subroutine of {set_,}shift_cost. Not to be used otherwise. */
static inline int *
-shift_cost_ptr (bool speed, enum machine_mode mode, int bits)
+shift_cost_ptr (bool speed, machine_mode mode, int bits)
{
return expmed_op_cost_ptr (&this_target_expmed->x_shift_cost[bits],
speed, mode);
/* Set the COST of doing a shift in MODE by BITS when optimizing for SPEED. */
static inline void
-set_shift_cost (bool speed, enum machine_mode mode, int bits, int cost)
+set_shift_cost (bool speed, machine_mode mode, int bits, int cost)
{
*shift_cost_ptr (speed, mode, bits) = cost;
}
SPEED. */
static inline int
-shift_cost (bool speed, enum machine_mode mode, int bits)
+shift_cost (bool speed, machine_mode mode, int bits)
{
return *shift_cost_ptr (speed, mode, bits);
}
/* Subroutine of {set_,}shiftadd_cost. Not to be used otherwise. */
static inline int *
-shiftadd_cost_ptr (bool speed, enum machine_mode mode, int bits)
+shiftadd_cost_ptr (bool speed, machine_mode mode, int bits)
{
return expmed_op_cost_ptr (&this_target_expmed->x_shiftadd_cost[bits],
speed, mode);
optimizing for SPEED. */
static inline void
-set_shiftadd_cost (bool speed, enum machine_mode mode, int bits, int cost)
+set_shiftadd_cost (bool speed, machine_mode mode, int bits, int cost)
{
*shiftadd_cost_ptr (speed, mode, bits) = cost;
}
when optimizing for SPEED. */
static inline int
-shiftadd_cost (bool speed, enum machine_mode mode, int bits)
+shiftadd_cost (bool speed, machine_mode mode, int bits)
{
return *shiftadd_cost_ptr (speed, mode, bits);
}
/* Subroutine of {set_,}shiftsub0_cost. Not to be used otherwise. */
static inline int *
-shiftsub0_cost_ptr (bool speed, enum machine_mode mode, int bits)
+shiftsub0_cost_ptr (bool speed, machine_mode mode, int bits)
{
return expmed_op_cost_ptr (&this_target_expmed->x_shiftsub0_cost[bits],
speed, mode);
value when optimizing for SPEED. */
static inline void
-set_shiftsub0_cost (bool speed, enum machine_mode mode, int bits, int cost)
+set_shiftsub0_cost (bool speed, machine_mode mode, int bits, int cost)
{
*shiftsub0_cost_ptr (speed, mode, bits) = cost;
}
a value when optimizing for SPEED. */
static inline int
-shiftsub0_cost (bool speed, enum machine_mode mode, int bits)
+shiftsub0_cost (bool speed, machine_mode mode, int bits)
{
return *shiftsub0_cost_ptr (speed, mode, bits);
}
/* Subroutine of {set_,}shiftsub1_cost. Not to be used otherwise. */
static inline int *
-shiftsub1_cost_ptr (bool speed, enum machine_mode mode, int bits)
+shiftsub1_cost_ptr (bool speed, machine_mode mode, int bits)
{
return expmed_op_cost_ptr (&this_target_expmed->x_shiftsub1_cost[bits],
speed, mode);
optimizing for SPEED. */
static inline void
-set_shiftsub1_cost (bool speed, enum machine_mode mode, int bits, int cost)
+set_shiftsub1_cost (bool speed, machine_mode mode, int bits, int cost)
{
*shiftsub1_cost_ptr (speed, mode, bits) = cost;
}
when optimizing for SPEED. */
static inline int
-shiftsub1_cost (bool speed, enum machine_mode mode, int bits)
+shiftsub1_cost (bool speed, machine_mode mode, int bits)
{
return *shiftsub1_cost_ptr (speed, mode, bits);
}
/* Subroutine of {set_,}mul_cost. Not to be used otherwise. */
static inline int *
-mul_cost_ptr (bool speed, enum machine_mode mode)
+mul_cost_ptr (bool speed, machine_mode mode)
{
return expmed_op_cost_ptr (&this_target_expmed->x_mul_cost, speed, mode);
}
SPEED. */
static inline void
-set_mul_cost (bool speed, enum machine_mode mode, int cost)
+set_mul_cost (bool speed, machine_mode mode, int cost)
{
*mul_cost_ptr (speed, mode) = cost;
}
for SPEED. */
static inline int
-mul_cost (bool speed, enum machine_mode mode)
+mul_cost (bool speed, machine_mode mode)
{
return *mul_cost_ptr (speed, mode);
}
/* Subroutine of {set_,}sdiv_cost. Not to be used otherwise. */
static inline int *
-sdiv_cost_ptr (bool speed, enum machine_mode mode)
+sdiv_cost_ptr (bool speed, machine_mode mode)
{
return expmed_op_cost_ptr (&this_target_expmed->x_sdiv_cost, speed, mode);
}
for SPEED. */
static inline void
-set_sdiv_cost (bool speed, enum machine_mode mode, int cost)
+set_sdiv_cost (bool speed, machine_mode mode, int cost)
{
*sdiv_cost_ptr (speed, mode) = cost;
}
for SPEED. */
static inline int
-sdiv_cost (bool speed, enum machine_mode mode)
+sdiv_cost (bool speed, machine_mode mode)
{
return *sdiv_cost_ptr (speed, mode);
}
/* Subroutine of {set_,}udiv_cost. Not to be used otherwise. */
static inline int *
-udiv_cost_ptr (bool speed, enum machine_mode mode)
+udiv_cost_ptr (bool speed, machine_mode mode)
{
return expmed_op_cost_ptr (&this_target_expmed->x_udiv_cost, speed, mode);
}
for SPEED. */
static inline void
-set_udiv_cost (bool speed, enum machine_mode mode, int cost)
+set_udiv_cost (bool speed, machine_mode mode, int cost)
{
*udiv_cost_ptr (speed, mode) = cost;
}
optimizing for SPEED. */
static inline int
-udiv_cost (bool speed, enum machine_mode mode)
+udiv_cost (bool speed, machine_mode mode)
{
return *udiv_cost_ptr (speed, mode);
}
/* Subroutine of {set_,}mul_widen_cost. Not to be used otherwise. */
static inline int *
-mul_widen_cost_ptr (bool speed, enum machine_mode mode)
+mul_widen_cost_ptr (bool speed, machine_mode mode)
{
gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
optimizing for SPEED. */
static inline void
-set_mul_widen_cost (bool speed, enum machine_mode mode, int cost)
+set_mul_widen_cost (bool speed, machine_mode mode, int cost)
{
*mul_widen_cost_ptr (speed, mode) = cost;
}
optimizing for SPEED. */
static inline int
-mul_widen_cost (bool speed, enum machine_mode mode)
+mul_widen_cost (bool speed, machine_mode mode)
{
return *mul_widen_cost_ptr (speed, mode);
}
/* Subroutine of {set_,}mul_highpart_cost. Not to be used otherwise. */
static inline int *
-mul_highpart_cost_ptr (bool speed, enum machine_mode mode)
+mul_highpart_cost_ptr (bool speed, machine_mode mode)
{
gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
when optimizing for SPEED. */
static inline void
-set_mul_highpart_cost (bool speed, enum machine_mode mode, int cost)
+set_mul_highpart_cost (bool speed, machine_mode mode, int cost)
{
*mul_highpart_cost_ptr (speed, mode) = cost;
}
when optimizing for SPEED. */
static inline int
-mul_highpart_cost (bool speed, enum machine_mode mode)
+mul_highpart_cost (bool speed, machine_mode mode)
{
return *mul_highpart_cost_ptr (speed, mode);
}
/* Subroutine of {set_,}convert_cost. Not to be used otherwise. */
static inline int *
-convert_cost_ptr (enum machine_mode to_mode, enum machine_mode from_mode,
+convert_cost_ptr (machine_mode to_mode, machine_mode from_mode,
bool speed)
{
int to_idx = expmed_mode_index (to_mode);
for SPEED. */
static inline void
-set_convert_cost (enum machine_mode to_mode, enum machine_mode from_mode,
+set_convert_cost (machine_mode to_mode, machine_mode from_mode,
bool speed, int cost)
{
*convert_cost_ptr (to_mode, from_mode, speed) = cost;
for SPEED. */
static inline int
-convert_cost (enum machine_mode to_mode, enum machine_mode from_mode,
+convert_cost (machine_mode to_mode, machine_mode from_mode,
bool speed)
{
return *convert_cost_ptr (to_mode, from_mode, speed);
}
-extern int mult_by_coeff_cost (HOST_WIDE_INT, enum machine_mode, bool);
+extern int mult_by_coeff_cost (HOST_WIDE_INT, machine_mode, bool);
#endif
int explicit_inc_to;
unsigned HOST_WIDE_INT len;
HOST_WIDE_INT offset;
- rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
+ rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
void *constfundata;
int reverse;
};
unsigned HOST_WIDE_INT);
static tree emit_block_move_libcall_fn (int);
static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
-static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
+static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
static void store_by_pieces_2 (insn_gen_fn, machine_mode,
static rtx_insn *compress_float_constant (rtx, rtx);
static rtx get_subtarget (rtx);
static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
- HOST_WIDE_INT, enum machine_mode,
+ HOST_WIDE_INT, machine_mode,
tree, int, alias_set_type);
static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
- enum machine_mode, tree, alias_set_type, bool);
+ machine_mode, tree, alias_set_type, bool);
static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
static void expand_operands (tree, tree, rtx, rtx*, rtx*,
enum expand_modifier);
static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
-static rtx do_store_flag (sepops, rtx, enum machine_mode);
+static rtx do_store_flag (sepops, rtx, machine_mode);
#ifdef PUSH_ROUNDING
-static void emit_single_push_insn (enum machine_mode, rtx, tree);
+static void emit_single_push_insn (machine_mode, rtx, tree);
#endif
-static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
+static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
static rtx const_vector_from_tree (tree);
static void write_complex_part (rtx, rtx, bool);
init_expr_target (void)
{
rtx insn, pat;
- enum machine_mode mode;
+ machine_mode mode;
int num_clobbers;
rtx mem, mem1;
rtx reg;
PATTERN (insn) = pat;
for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
- mode = (enum machine_mode) ((int) mode + 1))
+ mode = (machine_mode) ((int) mode + 1))
{
int regno;
for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
{
- enum machine_mode srcmode;
+ machine_mode srcmode;
for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
srcmode = GET_MODE_WIDER_MODE (srcmode))
{
void
convert_move (rtx to, rtx from, int unsignedp)
{
- enum machine_mode to_mode = GET_MODE (to);
- enum machine_mode from_mode = GET_MODE (from);
+ machine_mode to_mode = GET_MODE (to);
+ machine_mode from_mode = GET_MODE (from);
int to_real = SCALAR_FLOAT_MODE_P (to_mode);
int from_real = SCALAR_FLOAT_MODE_P (from_mode);
enum insn_code code;
xImode for all MODE_PARTIAL_INT modes they use, but no others. */
if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
{
- enum machine_mode full_mode
+ machine_mode full_mode
= smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
{
rtx new_from;
- enum machine_mode full_mode
+ machine_mode full_mode
= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
convert_optab ctab = unsignedp ? zext_optab : sext_optab;
enum insn_code icode;
rtx fill_value;
rtx lowfrom;
int i;
- enum machine_mode lowpart_mode;
+ machine_mode lowpart_mode;
int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
/* Try converting directly if the insn is supported. */
}
else
{
- enum machine_mode intermediate;
+ machine_mode intermediate;
rtx tmp;
int shift_amount;
or by copying to a new temporary with conversion. */
rtx
-convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
+convert_to_mode (machine_mode mode, rtx x, int unsignedp)
{
return convert_modes (mode, VOIDmode, x, unsignedp);
}
You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
rtx
-convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
+convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
{
rtx temp;
static unsigned int
alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
{
- enum machine_mode tmode;
+ machine_mode tmode;
tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
if (align >= GET_MODE_ALIGNMENT (tmode))
align = GET_MODE_ALIGNMENT (tmode);
else
{
- enum machine_mode tmode, xmode;
+ machine_mode tmode, xmode;
for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
tmode != VOIDmode;
/* Return the widest integer mode no wider than SIZE. If no such mode
can be found, return VOIDmode. */
-static enum machine_mode
+static machine_mode
widest_int_mode_for_size (unsigned int size)
{
- enum machine_mode tmode, mode = VOIDmode;
+ machine_mode tmode, mode = VOIDmode;
for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
unsigned int align, int endp)
{
struct move_by_pieces_d data;
- enum machine_mode to_addr_mode;
- enum machine_mode from_addr_mode = get_address_mode (from);
+ machine_mode to_addr_mode;
+ machine_mode from_addr_mode = get_address_mode (from);
rtx to_addr, from_addr = XEXP (from, 0);
unsigned int max_size = MOVE_MAX_PIECES + 1;
enum insn_code icode;
/* Find the mode of the largest move...
MODE might not be used depending on the definitions of the
USE_* macros below. */
- enum machine_mode mode ATTRIBUTE_UNUSED
+ machine_mode mode ATTRIBUTE_UNUSED
= widest_int_mode_for_size (max_size);
if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
while (max_size > 1 && data.len > 0)
{
- enum machine_mode mode = widest_int_mode_for_size (max_size);
+ machine_mode mode = widest_int_mode_for_size (max_size);
if (mode == VOIDmode)
break;
while (max_size > 1 && l > 0)
{
- enum machine_mode mode;
+ machine_mode mode;
enum insn_code icode;
mode = widest_int_mode_for_size (max_size);
arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
{
- enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
+ machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
rtx tmp = targetm.calls.function_arg (args_so_far, mode,
NULL_TREE, true);
if (!tmp || !REG_P (tmp))
unsigned HOST_WIDE_INT probable_max_size)
{
int save_volatile_ok = volatile_ok;
- enum machine_mode mode;
+ machine_mode mode;
if (expected_align < align)
expected_align = align;
{
rtx dst_addr, src_addr;
tree call_expr, fn, src_tree, dst_tree, size_tree;
- enum machine_mode size_mode;
+ machine_mode size_mode;
rtx retval;
/* Emit code to copy the addresses of DST and SRC and SIZE into new
{
rtx_code_label *cmp_label, *top_label;
rtx iter, x_addr, y_addr, tmp;
- enum machine_mode x_addr_mode = get_address_mode (x);
- enum machine_mode y_addr_mode = get_address_mode (y);
- enum machine_mode iter_mode;
+ machine_mode x_addr_mode = get_address_mode (x);
+ machine_mode y_addr_mode = get_address_mode (y);
+ machine_mode iter_mode;
iter_mode = GET_MODE (size);
if (iter_mode == VOIDmode)
The number of registers to be filled is NREGS. */
void
-move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
+move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
{
int i;
#ifdef HAVE_load_multiple
for (; i < length; i++)
{
- enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
+ machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
{
rtx src;
int start, i;
- enum machine_mode m = GET_MODE (orig_src);
+ machine_mode m = GET_MODE (orig_src);
gcc_assert (GET_CODE (dst) == PARALLEL);
&& !MEM_P (orig_src)
&& GET_CODE (orig_src) != CONCAT)
{
- enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
+ machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
if (imode == BLKmode)
src = assign_stack_temp (GET_MODE (orig_src), ssize);
else
/* Process the pieces. */
for (i = start; i < XVECLEN (dst, 0); i++)
{
- enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
+ machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
unsigned int bytelen = GET_MODE_SIZE (mode);
int shift = 0;
{
rtx *tmps, dst;
int start, finish, i;
- enum machine_mode m = GET_MODE (orig_dst);
+ machine_mode m = GET_MODE (orig_dst);
gcc_assert (GET_CODE (src) == PARALLEL);
if (!SCALAR_INT_MODE_P (m)
&& !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
{
- enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
+ machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
if (imode == BLKmode)
dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
else
}
else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
{
- enum machine_mode outer = GET_MODE (dst);
- enum machine_mode inner;
+ machine_mode outer = GET_MODE (dst);
+ machine_mode inner;
HOST_WIDE_INT bytepos;
bool done = false;
rtx temp;
for (i = start; i < finish; i++)
{
HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
- enum machine_mode mode = GET_MODE (tmps[i]);
+ machine_mode mode = GET_MODE (tmps[i]);
unsigned int bytelen = GET_MODE_SIZE (mode);
unsigned int adj_bytelen;
rtx dest = dst;
}
else
{
- enum machine_mode dest_mode = GET_MODE (dest);
- enum machine_mode tmp_mode = GET_MODE (tmps[i]);
+ machine_mode dest_mode = GET_MODE (dest);
+ machine_mode tmp_mode = GET_MODE (tmps[i]);
gcc_assert (bytepos == 0 && XVECLEN (src, 0));
rtx
maybe_emit_group_store (rtx x, tree type)
{
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
if (GET_CODE (x) == PARALLEL)
{
rtx src = NULL, dst = NULL;
unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
- enum machine_mode mode = GET_MODE (srcreg);
- enum machine_mode tmode = GET_MODE (target);
- enum machine_mode copy_mode;
+ machine_mode mode = GET_MODE (srcreg);
+ machine_mode tmode = GET_MODE (target);
+ machine_mode copy_mode;
/* BLKmode registers created in the back-end shouldn't have survived. */
gcc_assert (mode != BLKmode);
copy_mode = word_mode;
if (MEM_P (target))
{
- enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
+ machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
if (mem_mode != BLKmode)
copy_mode = mem_mode;
}
This is used on targets that return BLKmode values in registers. */
rtx
-copy_blkmode_to_reg (enum machine_mode mode, tree src)
+copy_blkmode_to_reg (machine_mode mode, tree src)
{
int i, n_regs;
unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
unsigned int bitsize;
rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
- enum machine_mode dst_mode;
+ machine_mode dst_mode;
gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
to by CALL_FUSAGE. REG must denote a hard register. */
void
-use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
+use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
{
gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
to by CALL_FUSAGE. REG must denote a hard register. */
void
-clobber_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
+clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
{
gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
int
can_store_by_pieces (unsigned HOST_WIDE_INT len,
- rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
+ rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
void *constfundata, unsigned int align, bool memsetp)
{
unsigned HOST_WIDE_INT l;
unsigned int max_size;
HOST_WIDE_INT offset = 0;
- enum machine_mode mode;
+ machine_mode mode;
enum insn_code icode;
int reverse;
/* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
rtx
store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
- rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
+ rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
void *constfundata, unsigned int align, bool memsetp, int endp)
{
- enum machine_mode to_addr_mode = get_address_mode (to);
+ machine_mode to_addr_mode = get_address_mode (to);
struct store_by_pieces_d data;
if (len == 0)
static rtx
clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
return const0_rtx;
}
store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
unsigned int align ATTRIBUTE_UNUSED)
{
- enum machine_mode to_addr_mode = get_address_mode (data->to);
+ machine_mode to_addr_mode = get_address_mode (data->to);
rtx to_addr = XEXP (data->to, 0);
unsigned int max_size = STORE_MAX_PIECES + 1;
enum insn_code icode;
/* Determine the main mode we'll be using.
MODE might not be used depending on the definitions of the
USE_* macros below. */
- enum machine_mode mode ATTRIBUTE_UNUSED
+ machine_mode mode ATTRIBUTE_UNUSED
= widest_int_mode_for_size (max_size);
if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
while (max_size > 1 && data->len > 0)
{
- enum machine_mode mode = widest_int_mode_for_size (max_size);
+ machine_mode mode = widest_int_mode_for_size (max_size);
if (mode == VOIDmode)
break;
unsigned HOST_WIDE_INT max_size,
unsigned HOST_WIDE_INT probable_max_size)
{
- enum machine_mode mode = GET_MODE (object);
+ machine_mode mode = GET_MODE (object);
unsigned int align;
gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
{
tree call_expr, fn, object_tree, size_tree, val_tree;
- enum machine_mode size_mode;
+ machine_mode size_mode;
rtx retval;
/* Emit code to copy OBJECT and SIZE into new pseudos. We can then
including more than one in the machine description unless
the more limited one has some advantage. */
- enum machine_mode mode;
+ machine_mode mode;
if (expected_align < align)
expected_align = align;
static void
write_complex_part (rtx cplx, rtx val, bool imag_p)
{
- enum machine_mode cmode;
- enum machine_mode imode;
+ machine_mode cmode;
+ machine_mode imode;
unsigned ibitsize;
if (GET_CODE (cplx) == CONCAT)
static rtx
read_complex_part (rtx cplx, bool imag_p)
{
- enum machine_mode cmode, imode;
+ machine_mode cmode, imode;
unsigned ibitsize;
if (GET_CODE (cplx) == CONCAT)
we'll force-create a SUBREG if needed. */
static rtx
-emit_move_change_mode (enum machine_mode new_mode,
- enum machine_mode old_mode, rtx x, bool force)
+emit_move_change_mode (machine_mode new_mode,
+ machine_mode old_mode, rtx x, bool force)
{
rtx ret;
emitted, or NULL if such a move could not be generated. */
static rtx_insn *
-emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
+emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
{
- enum machine_mode imode;
+ machine_mode imode;
enum insn_code code;
/* There must exist a mode of the exact size we require. */
Return an equivalent MEM that does not use an auto-increment. */
rtx
-emit_move_resolve_push (enum machine_mode mode, rtx x)
+emit_move_resolve_push (machine_mode mode, rtx x)
{
enum rtx_code code = GET_CODE (XEXP (x, 0));
HOST_WIDE_INT adjust;
Returns the last instruction emitted. */
rtx_insn *
-emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
+emit_move_complex_push (machine_mode mode, rtx x, rtx y)
{
- enum machine_mode submode = GET_MODE_INNER (mode);
+ machine_mode submode = GET_MODE_INNER (mode);
bool imag_first;
#ifdef PUSH_ROUNDING
MODE is known to be complex. Returns the last instruction emitted. */
static rtx_insn *
-emit_move_complex (enum machine_mode mode, rtx x, rtx y)
+emit_move_complex (machine_mode mode, rtx x, rtx y)
{
bool try_int;
MODE is known to be MODE_CC. Returns the last instruction emitted. */
static rtx_insn *
-emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
+emit_move_ccmode (machine_mode mode, rtx x, rtx y)
{
rtx_insn *ret;
static bool
undefined_operand_subword_p (const_rtx op, int i)
{
- enum machine_mode innermode, innermostmode;
+ machine_mode innermode, innermostmode;
int offset;
if (GET_CODE (op) != SUBREG)
return false;
patterns, even if they must turn into multiple assembler instructions. */
static rtx_insn *
-emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
+emit_move_multi_word (machine_mode mode, rtx x, rtx y)
{
rtx_insn *last_insn = 0;
rtx_insn *seq;
rtx_insn *
emit_move_insn_1 (rtx x, rtx y)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
enum insn_code code;
gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
rtx_insn *
emit_move_insn (rtx x, rtx y)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
rtx y_cst = NULL_RTX;
rtx_insn *last_insn;
rtx set;
static rtx_insn *
compress_float_constant (rtx x, rtx y)
{
- enum machine_mode dstmode = GET_MODE (x);
- enum machine_mode orig_srcmode = GET_MODE (y);
- enum machine_mode srcmode;
+ machine_mode dstmode = GET_MODE (x);
+ machine_mode orig_srcmode = GET_MODE (y);
+ machine_mode srcmode;
REAL_VALUE_TYPE r;
int oldcost, newcost;
bool speed = optimize_insn_for_speed_p ();
/* Emit single push insn. */
static void
-emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
+emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
{
rtx dest_addr;
unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
/* Emit and annotate a single push insn. */
static void
-emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
+emit_single_push_insn (machine_mode mode, rtx x, tree type)
{
int delta, old_delta = stack_pointer_delta;
rtx_insn *prev = get_last_insn ();
of bytes required. */
void
-emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
+emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
unsigned int align, int partial, rtx reg, int extra,
rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
rtx alignment_pad)
unsigned HOST_WIDE_INT bitpos,
unsigned HOST_WIDE_INT bitregion_start,
unsigned HOST_WIDE_INT bitregion_end,
- enum machine_mode mode1, rtx str_rtx,
+ machine_mode mode1, rtx str_rtx,
tree to, tree src)
{
- enum machine_mode str_mode = GET_MODE (str_rtx);
+ machine_mode str_mode = GET_MODE (str_rtx);
unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
tree op0, op1;
rtx value, result;
useful purpose. This can occur in Ada. */
if (handled_component_p (TREE_OPERAND (exp, 0)))
{
- enum machine_mode rmode;
+ machine_mode rmode;
HOST_WIDE_INT rbitsize, rbitpos;
tree roffset;
int unsignedp;
{
rtx to_rtx = 0;
rtx result;
- enum machine_mode mode;
+ machine_mode mode;
unsigned int align;
enum insn_code icode;
&& mem_ref_refers_to_non_mem_p (to))
|| TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
{
- enum machine_mode mode1;
+ machine_mode mode1;
HOST_WIDE_INT bitsize, bitpos;
unsigned HOST_WIDE_INT bitregion_start = 0;
unsigned HOST_WIDE_INT bitregion_end = 0;
if (offset != 0)
{
- enum machine_mode address_mode;
+ machine_mode address_mode;
rtx offset_rtx;
if (!MEM_P (to_rtx))
emit_storent_insn (rtx to, rtx from)
{
struct expand_operand ops[2];
- enum machine_mode mode = GET_MODE (to);
+ machine_mode mode = GET_MODE (to);
enum insn_code code = optab_handler (storent_optab, mode);
if (code == CODE_FOR_nothing)
? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
else
{
- enum machine_mode pointer_mode
+ machine_mode pointer_mode
= targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
- enum machine_mode address_mode = get_address_mode (target);
+ machine_mode address_mode = get_address_mode (target);
/* Compute the size of the data to copy from the string. */
tree copy_size
static void
store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
- HOST_WIDE_INT bitpos, enum machine_mode mode,
+ HOST_WIDE_INT bitpos, machine_mode mode,
tree exp, int cleared, alias_set_type alias_set)
{
if (TREE_CODE (exp) == CONSTRUCTOR
corresponding field of TARGET. */
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
{
- enum machine_mode mode;
+ machine_mode mode;
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos = 0;
tree offset;
if (offset)
{
- enum machine_mode address_mode;
+ machine_mode address_mode;
rtx offset_rtx;
offset
elements. */
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
{
- enum machine_mode mode;
+ machine_mode mode;
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos;
rtx xtarget = target;
int icode = CODE_FOR_nothing;
tree elttype = TREE_TYPE (type);
int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
- enum machine_mode eltmode = TYPE_MODE (elttype);
+ machine_mode eltmode = TYPE_MODE (elttype);
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos;
rtvec vector = NULL;
n_elts = TYPE_VECTOR_SUBPARTS (type);
if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
{
- enum machine_mode mode = GET_MODE (target);
+ machine_mode mode = GET_MODE (target);
icode = (int) optab_handler (vec_init_optab, mode);
/* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
}
else
{
- enum machine_mode value_mode =
+ machine_mode value_mode =
TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
? TYPE_MODE (TREE_TYPE (value))
: eltmode;
store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
unsigned HOST_WIDE_INT bitregion_start,
unsigned HOST_WIDE_INT bitregion_end,
- enum machine_mode mode, tree exp,
+ machine_mode mode, tree exp,
alias_set_type alias_set, bool nontemporal)
{
if (TREE_CODE (exp) == ERROR_MARK)
tree
get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
HOST_WIDE_INT *pbitpos, tree *poffset,
- enum machine_mode *pmode, int *punsignedp,
+ machine_mode *pmode, int *punsignedp,
int *pvolatilep, bool keep_aligning)
{
tree size_tree = 0;
- enum machine_mode mode = VOIDmode;
+ machine_mode mode = VOIDmode;
bool blkmode_bitfield = false;
tree offset = size_zero_node;
offset_int bit_offset = 0;
The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
static rtx
-expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
+expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
enum expand_modifier modifier, addr_space_t as)
{
rtx result, subtarget;
tree inner, offset;
HOST_WIDE_INT bitsize, bitpos;
int volatilep, unsignedp;
- enum machine_mode mode1;
+ machine_mode mode1;
/* If we are taking the address of a constant and are at the top level,
we have to use output_constant_def since we can't call force_const_mem
The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
static rtx
-expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
+expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
enum expand_modifier modifier)
{
addr_space_t as = ADDR_SPACE_GENERIC;
- enum machine_mode address_mode = Pmode;
- enum machine_mode pointer_mode = ptr_mode;
- enum machine_mode rmode;
+ machine_mode address_mode = Pmode;
+ machine_mode pointer_mode = ptr_mode;
+ machine_mode rmode;
rtx result;
/* Target mode of VOIDmode says "whatever's natural". */
bool avoid_temp_mem)
{
tree type = TREE_TYPE (exp);
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
/* Try to avoid creating a temporary at all. This is possible
if all of the initializer is zero.
The normal operating mode is to pass FALSE for this parameter. */
rtx
-expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
+expand_expr_real (tree exp, rtx target, machine_mode tmode,
enum expand_modifier modifier, rtx *alt_rtl,
bool inner_reference_p)
{
rtx insn;
rtx op00, op01, op1, op2;
enum rtx_code comparison_code;
- enum machine_mode comparison_mode;
+ machine_mode comparison_mode;
gimple srcstmt;
rtx temp;
tree type = TREE_TYPE (treeop1);
int unsignedp = TYPE_UNSIGNED (type);
- enum machine_mode mode = TYPE_MODE (type);
- enum machine_mode orig_mode = mode;
+ machine_mode mode = TYPE_MODE (type);
+ machine_mode orig_mode = mode;
/* If we cannot do a conditional move on the mode, try doing it
with the promoted mode. */
}
rtx
-expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
+expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
enum expand_modifier modifier)
{
rtx op0, op1, op2, temp;
tree type;
int unsignedp;
- enum machine_mode mode;
+ machine_mode mode;
enum tree_code code = ops->code;
optab this_optab;
rtx subtarget, original_target;
else if (CONSTANT_P (op0))
{
tree inner_type = TREE_TYPE (treeop0);
- enum machine_mode inner_mode = GET_MODE (op0);
+ machine_mode inner_mode = GET_MODE (op0);
if (inner_mode == VOIDmode)
inner_mode = TYPE_MODE (inner_type);
{
rtx constant_part;
HOST_WIDE_INT wc;
- enum machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
+ machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
op1 = expand_expr (treeop1, subtarget, VOIDmode,
EXPAND_SUM);
{
rtx constant_part;
HOST_WIDE_INT wc;
- enum machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
+ machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
op0 = expand_expr (treeop0, subtarget, VOIDmode,
(modifier == EXPAND_INITIALIZER
&& (TYPE_UNSIGNED (TREE_TYPE (treeop0))
!= TYPE_UNSIGNED (TREE_TYPE (treeop1))))
{
- enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
+ machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
this_optab = usmul_widen_optab;
if (find_widening_optab_handler (this_optab, mode, innermode, 0)
!= CODE_FOR_nothing)
== TYPE_UNSIGNED (TREE_TYPE (treeop0))))
{
tree op0type = TREE_TYPE (treeop0);
- enum machine_mode innermode = TYPE_MODE (op0type);
+ machine_mode innermode = TYPE_MODE (op0type);
bool zextend_p = TYPE_UNSIGNED (op0type);
optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
GET_MODE_INNER (GET_MODE (target)), 0);
if (reg_overlap_mentioned_p (temp, op1))
{
- enum machine_mode imode = GET_MODE_INNER (GET_MODE (target));
+ machine_mode imode = GET_MODE_INNER (GET_MODE (target));
temp = adjust_address_nv (target, imode,
GET_MODE_SIZE (imode));
if (reg_overlap_mentioned_p (temp, op0))
{
op0 = expand_normal (treeop0);
this_optab = optab_for_tree_code (code, type, optab_default);
- enum machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
+ machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
{
if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
{
tree sel_type = TREE_TYPE (treeop2);
- enum machine_mode vmode
+ machine_mode vmode
= mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
TYPE_VECTOR_SUBPARTS (sel_type));
gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
}
rtx
-expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
+expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
enum expand_modifier modifier, rtx *alt_rtl,
bool inner_reference_p)
{
rtx op0, op1, temp, decl_rtl;
tree type;
int unsignedp;
- enum machine_mode mode;
+ machine_mode mode;
enum tree_code code = TREE_CODE (exp);
rtx subtarget, original_target;
int ignore;
&& DECL_MODE (exp) != BLKmode
&& GET_MODE (decl_rtl) != DECL_MODE (exp))
{
- enum machine_mode pmode;
+ machine_mode pmode;
/* Get the signedness to be used for this variable. Ensure we get
the same mode we got when the variable was declared. */
/* Handle evaluating a complex constant in a CONCAT target. */
if (original_target && GET_CODE (original_target) == CONCAT)
{
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
+ machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
rtx rtarg, itarg;
rtarg = XEXP (original_target, 0);
{
addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
- enum machine_mode address_mode;
+ machine_mode address_mode;
tree base = TREE_OPERAND (exp, 0);
gimple def_stmt;
enum insn_code icode;
if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
{
tree type = TREE_TYPE (TREE_TYPE (init));
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
if (GET_MODE_CLASS (mode) == MODE_INT
&& GET_MODE_SIZE (mode) == 1)
if (DECL_BIT_FIELD (field))
{
HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
- enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
+ machine_mode imode = TYPE_MODE (TREE_TYPE (field));
if (TYPE_UNSIGNED (TREE_TYPE (field)))
{
case ARRAY_RANGE_REF:
normal_inner_ref:
{
- enum machine_mode mode1, mode2;
+ machine_mode mode1, mode2;
HOST_WIDE_INT bitsize, bitpos;
tree offset;
int volatilep = 0, must_force_mem;
if (offset)
{
- enum machine_mode address_mode;
+ machine_mode address_mode;
rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
EXPAND_SUM);
&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
bitsize)))
{
- enum machine_mode ext_mode = mode;
+ machine_mode ext_mode = mode;
if (ext_mode == BLKmode
&& ! (target != 0 && MEM_P (op0)
&& TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
&& handled_component_p (treeop0))
{
- enum machine_mode mode1;
+ machine_mode mode1;
HOST_WIDE_INT bitsize, bitpos;
tree offset;
int unsignedp;
}
else if (TYPE_UNSIGNED (type))
{
- enum machine_mode mode = GET_MODE (exp);
+ machine_mode mode = GET_MODE (exp);
rtx mask = immed_wide_int_const
(wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
return expand_and (mode, exp, mask, target);
set/jump/set sequence. */
static rtx
-do_store_flag (sepops ops, rtx target, enum machine_mode mode)
+do_store_flag (sepops ops, rtx target, machine_mode mode)
{
enum rtx_code code;
tree arg0, arg1, type;
tree tem;
- enum machine_mode operand_mode;
+ machine_mode operand_mode;
int unsignedp;
rtx op0, op1;
rtx subtarget = target;
int default_probability)
{
struct expand_operand ops[5];
- enum machine_mode index_mode = SImode;
+ machine_mode index_mode = SImode;
rtx op1, op2, index;
if (! HAVE_casesi)
/* Convert the index to SImode. */
if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
{
- enum machine_mode omode = TYPE_MODE (index_type);
+ machine_mode omode = TYPE_MODE (index_type);
rtx rangertx = expand_normal (range);
/* We must handle the endpoints in the original mode. */
the default label. */
static void
-do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
+do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
rtx default_label, int default_probability)
{
rtx temp, vector;
unsigned i;
int units;
tree elt;
- enum machine_mode inner, mode;
+ machine_mode inner, mode;
mode = TYPE_MODE (TREE_TYPE (exp));
/* Generate code for a simple binary or unary operation. "Simple" in
this case means "can be unambiguously described by a (mode, code)
pair and mapped to a single optab." */
-extern rtx expand_simple_binop (enum machine_mode, enum rtx_code, rtx,
+extern rtx expand_simple_binop (machine_mode, enum rtx_code, rtx,
rtx, rtx, int, enum optab_methods);
-extern rtx expand_simple_unop (enum machine_mode, enum rtx_code, rtx, rtx,
+extern rtx expand_simple_unop (machine_mode, enum rtx_code, rtx, rtx,
int);
/* Report whether the machine description contains an insn which can
perform the operation described by CODE and MODE. */
-extern int have_insn_for (enum rtx_code, enum machine_mode);
+extern int have_insn_for (enum rtx_code, machine_mode);
/* Emit code to make a call to a constant function or a library call. */
extern void emit_libcall_block (rtx, rtx, rtx, rtx);
/* Emit a pair of rtl insns to compare two rtx's and to jump
to a label if the comparison is true. */
extern void emit_cmp_and_jump_insns (rtx, rtx, enum rtx_code, rtx,
- enum machine_mode, int, rtx, int prob=-1);
+ machine_mode, int, rtx, int prob=-1);
/* Generate code to indirectly jump to a location given in the rtx LOC. */
extern void emit_indirect_jump (rtx);
#ifdef HAVE_conditional_move
/* Emit a conditional move operation. */
-rtx emit_conditional_move (rtx, enum rtx_code, rtx, rtx, enum machine_mode,
- rtx, rtx, enum machine_mode, int);
+rtx emit_conditional_move (rtx, enum rtx_code, rtx, rtx, machine_mode,
+ rtx, rtx, machine_mode, int);
/* Return nonzero if the conditional move is supported. */
-int can_conditionally_move_p (enum machine_mode mode);
+int can_conditionally_move_p (machine_mode mode);
#endif
-rtx emit_conditional_add (rtx, enum rtx_code, rtx, rtx, enum machine_mode,
- rtx, rtx, enum machine_mode, int);
+rtx emit_conditional_add (rtx, enum rtx_code, rtx, rtx, machine_mode,
+ rtx, rtx, machine_mode, int);
rtx expand_sync_operation (rtx, rtx, enum rtx_code);
rtx expand_sync_fetch_operation (rtx, rtx, enum rtx_code, bool, rtx);
/* Arguments MODE, RTX: return an rtx for the negation of that value.
May emit insns. */
-extern rtx negate_rtx (enum machine_mode, rtx);
+extern rtx negate_rtx (machine_mode, rtx);
/* Expand a logical AND operation. */
-extern rtx expand_and (enum machine_mode, rtx, rtx, rtx);
+extern rtx expand_and (machine_mode, rtx, rtx, rtx);
/* Emit a store-flag operation. */
-extern rtx emit_store_flag (rtx, enum rtx_code, rtx, rtx, enum machine_mode,
+extern rtx emit_store_flag (rtx, enum rtx_code, rtx, rtx, machine_mode,
int, int);
/* Like emit_store_flag, but always succeeds. */
extern rtx emit_store_flag_force (rtx, enum rtx_code, rtx, rtx,
- enum machine_mode, int, int);
+ machine_mode, int, int);
/* Choose a minimal N + 1 bit approximation to 1/D that can be used to
replace division by D, and put the least significant N bits of the result
extern void convert_move (rtx, rtx, int);
/* Convert an rtx to specified machine mode and return the result. */
-extern rtx convert_to_mode (enum machine_mode, rtx, int);
+extern rtx convert_to_mode (machine_mode, rtx, int);
/* Convert an rtx to MODE from OLDMODE and return the result. */
-extern rtx convert_modes (enum machine_mode, enum machine_mode, rtx, int);
+extern rtx convert_modes (machine_mode, machine_mode, rtx, int);
/* Emit code to move a block Y to a block X. */
/* Copy all or part of a value X into registers starting at REGNO.
The number of registers to be filled is NREGS. */
-extern void move_block_to_reg (int, rtx, int, enum machine_mode);
+extern void move_block_to_reg (int, rtx, int, machine_mode);
/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
The number of registers to be filled is NREGS. */
/* Mark REG as holding a parameter for the next CALL_INSN.
Mode is TYPE_MODE of the non-promoted parameter, or VOIDmode. */
-extern void use_reg_mode (rtx *, rtx, enum machine_mode);
-extern void clobber_reg_mode (rtx *, rtx, enum machine_mode);
+extern void use_reg_mode (rtx *, rtx, machine_mode);
+extern void clobber_reg_mode (rtx *, rtx, machine_mode);
-extern rtx copy_blkmode_to_reg (enum machine_mode, tree);
+extern rtx copy_blkmode_to_reg (machine_mode, tree);
/* Mark REG as holding a parameter for the next CALL_INSN. */
static inline void
of a const string. */
extern int can_store_by_pieces (unsigned HOST_WIDE_INT,
rtx (*) (void *, HOST_WIDE_INT,
- enum machine_mode),
+ machine_mode),
void *, unsigned int, bool);
/* Generate several move instructions to store LEN bytes generated by
MEMSETP is true if this is a real memset/bzero, not a copy.
Returns TO + LEN. */
extern rtx store_by_pieces (rtx, unsigned HOST_WIDE_INT,
- rtx (*) (void *, HOST_WIDE_INT, enum machine_mode),
+ rtx (*) (void *, HOST_WIDE_INT, machine_mode),
void *, unsigned int, bool, int);
/* Emit insns to set X from Y. */
/* Emit insns to set X from Y, with no frills. */
extern rtx_insn *emit_move_insn_1 (rtx, rtx);
-extern rtx_insn *emit_move_complex_push (enum machine_mode, rtx, rtx);
+extern rtx_insn *emit_move_complex_push (machine_mode, rtx, rtx);
extern rtx_insn *emit_move_complex_parts (rtx, rtx);
-extern rtx emit_move_resolve_push (enum machine_mode, rtx);
+extern rtx emit_move_resolve_push (machine_mode, rtx);
/* Push a block of length SIZE (perhaps variable)
and return an rtx to address the beginning of the block. */
extern rtx push_block (rtx, int, int);
/* Generate code to push something onto the stack, given its mode and type. */
-extern void emit_push_insn (rtx, enum machine_mode, tree, rtx, unsigned int,
+extern void emit_push_insn (rtx, machine_mode, tree, rtx, unsigned int,
int, rtx, int, rtx, rtx, int, rtx);
/* Expand an assignment that stores the value of FROM into TO. */
extern rtx force_operand (rtx, rtx);
/* Work horses for expand_expr. */
-extern rtx expand_expr_real (tree, rtx, enum machine_mode,
+extern rtx expand_expr_real (tree, rtx, machine_mode,
enum expand_modifier, rtx *, bool);
-extern rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
+extern rtx expand_expr_real_1 (tree, rtx, machine_mode,
enum expand_modifier, rtx *, bool);
-extern rtx expand_expr_real_2 (sepops, rtx, enum machine_mode,
+extern rtx expand_expr_real_2 (sepops, rtx, machine_mode,
enum expand_modifier);
/* Generate code for computing expression EXP.
An rtx for the computed value is returned. The value is never null.
In the case of a void EXP, const0_rtx is returned. */
static inline rtx
-expand_expr (tree exp, rtx target, enum machine_mode mode,
+expand_expr (tree exp, rtx target, machine_mode mode,
enum expand_modifier modifier)
{
return expand_expr_real (exp, target, mode, modifier, NULL, false);
extern void do_jump_1 (enum tree_code, tree, tree, rtx, rtx, int);
extern void do_compare_rtx_and_jump (rtx, rtx, enum rtx_code, int,
- enum machine_mode, rtx, rtx, rtx, int);
+ machine_mode, rtx, rtx, rtx, int);
/* Two different ways of generating switch statements. */
extern int try_casesi (tree, tree, tree, tree, rtx, rtx, rtx, int);
extern rtx prepare_call_address (tree, rtx, rtx, rtx *, int, int);
-extern bool shift_return_value (enum machine_mode, bool, rtx);
+extern bool shift_return_value (machine_mode, bool, rtx);
extern rtx expand_call (tree, rtx, int);
extern void fixup_tail_calls (void);
#ifdef TREE_CODE
-extern rtx expand_variable_shift (enum tree_code, enum machine_mode,
+extern rtx expand_variable_shift (enum tree_code, machine_mode,
rtx, tree, rtx, int);
-extern rtx expand_shift (enum tree_code, enum machine_mode, rtx, int, rtx,
+extern rtx expand_shift (enum tree_code, machine_mode, rtx, int, rtx,
int);
-extern rtx expand_divmod (int, enum tree_code, enum machine_mode, rtx, rtx,
+extern rtx expand_divmod (int, enum tree_code, machine_mode, rtx, rtx,
rtx, int);
#endif
/* Convert arg to a valid memory address for specified machine mode that points
to a specific named address space, by emitting insns to perform arithmetic
if necessary. */
-extern rtx memory_address_addr_space (enum machine_mode, rtx, addr_space_t);
+extern rtx memory_address_addr_space (machine_mode, rtx, addr_space_t);
/* Like memory_address_addr_space, except assume the memory address points to
the generic named address space. */
to MODE and its address changed to ADDR.
(VOIDmode means don't change the mode.
NULL for ADDR means don't change the address.) */
-extern rtx change_address (rtx, enum machine_mode, rtx);
+extern rtx change_address (rtx, machine_mode, rtx);
/* Return a memory reference like MEMREF, but with its mode changed
to MODE and its address offset by OFFSET bytes. */
#define adjust_automodify_address_nv(MEMREF, MODE, ADDR, OFFSET) \
adjust_automodify_address_1 (MEMREF, MODE, ADDR, OFFSET, 0)
-extern rtx adjust_address_1 (rtx, enum machine_mode, HOST_WIDE_INT, int, int,
+extern rtx adjust_address_1 (rtx, machine_mode, HOST_WIDE_INT, int, int,
int, HOST_WIDE_INT);
-extern rtx adjust_automodify_address_1 (rtx, enum machine_mode, rtx,
+extern rtx adjust_automodify_address_1 (rtx, machine_mode, rtx,
HOST_WIDE_INT, int);
/* Return a memory reference like MEMREF, but whose address is changed by
/* Return a memory reference like MEMREF, but with its mode widened to
MODE and adjusted by OFFSET. */
-extern rtx widen_memory_access (rtx, enum machine_mode, HOST_WIDE_INT);
+extern rtx widen_memory_access (rtx, machine_mode, HOST_WIDE_INT);
/* Return a memory reference like MEMREF, but which is known to have a
valid address. */
extern rtx copy_addr_to_reg (rtx);
/* Like copy_to_reg but always make the reg the specified mode MODE. */
-extern rtx copy_to_mode_reg (enum machine_mode, rtx);
+extern rtx copy_to_mode_reg (machine_mode, rtx);
/* Copy given rtx to given temp reg and return that. */
-extern rtx copy_to_suggested_reg (rtx, rtx, enum machine_mode);
+extern rtx copy_to_suggested_reg (rtx, rtx, machine_mode);
/* Copy a value to a register if it isn't already a register.
Args are mode (in case value is a constant) and the value. */
-extern rtx force_reg (enum machine_mode, rtx);
+extern rtx force_reg (machine_mode, rtx);
/* Return given rtx, copied into a new temp reg if it was in memory. */
extern rtx force_not_mem (rtx);
/* Return mode and signedness to use when an argument or result in the
given mode is promoted. */
-extern enum machine_mode promote_function_mode (const_tree, enum machine_mode, int *,
+extern machine_mode promote_function_mode (const_tree, machine_mode, int *,
const_tree, int);
/* Return mode and signedness to use when an object in the given mode
is promoted. */
-extern enum machine_mode promote_mode (const_tree, enum machine_mode, int *);
+extern machine_mode promote_mode (const_tree, machine_mode, int *);
/* Return mode and signedness to use when object is promoted. */
-enum machine_mode promote_decl_mode (const_tree, int *);
+machine_mode promote_decl_mode (const_tree, int *);
/* Remove some bytes from the stack. An rtx says how many. */
extern void adjust_stack (rtx);
/* Return an rtx that refers to the value returned by a library call
in its original home. This becomes invalid if any more code is emitted. */
-extern rtx hard_libcall_value (enum machine_mode, rtx);
+extern rtx hard_libcall_value (machine_mode, rtx);
extern void store_bit_field (rtx, unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT,
- enum machine_mode, rtx);
+ machine_mode, rtx);
extern rtx extract_bit_field (rtx, unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT, int, rtx,
- enum machine_mode, enum machine_mode);
-extern rtx extract_low_bits (enum machine_mode, enum machine_mode, rtx);
-extern rtx expand_mult (enum machine_mode, rtx, rtx, rtx, int);
-extern rtx expand_mult_highpart_adjust (enum machine_mode, rtx, rtx, rtx, rtx, int);
+ machine_mode, machine_mode);
+extern rtx extract_low_bits (machine_mode, machine_mode, rtx);
+extern rtx expand_mult (machine_mode, rtx, rtx, rtx, int);
+extern rtx expand_mult_highpart_adjust (machine_mode, rtx, rtx, rtx, rtx, int);
extern rtx assemble_static_space (unsigned HOST_WIDE_INT);
extern int safe_from_p (const_rtx, tree, int);
-extern bool split_comparison (enum rtx_code, enum machine_mode,
+extern bool split_comparison (enum rtx_code, machine_mode,
enum rtx_code *, enum rtx_code *);
/* Call this once to initialize the contents of the optabs
rtx_insn *prev;
int rel_align = 0;
addr_diff_vec_flags flags;
- enum machine_mode vec_mode;
+ machine_mode vec_mode;
/* Avoid automatic aggregate initialization. */
flags = ADDR_DIFF_VEC_FLAGS (body);
FIXED_MAX_EPS, if it is equal to the maximum plus the epsilon. */
static enum fixed_value_range_code
-check_real_for_fixed_mode (REAL_VALUE_TYPE *real_value, enum machine_mode mode)
+check_real_for_fixed_mode (REAL_VALUE_TYPE *real_value, machine_mode mode)
{
REAL_VALUE_TYPE max_value, min_value, epsilon_value;
The bits in PAYLOAD are sign-extended/zero-extended according to MODE. */
FIXED_VALUE_TYPE
-fixed_from_double_int (double_int payload, enum machine_mode mode)
+fixed_from_double_int (double_int payload, machine_mode mode)
{
FIXED_VALUE_TYPE value;
/* Initialize from a decimal or hexadecimal string. */
void
-fixed_from_string (FIXED_VALUE_TYPE *f, const char *str, enum machine_mode mode)
+fixed_from_string (FIXED_VALUE_TYPE *f, const char *str, machine_mode mode)
{
REAL_VALUE_TYPE real_value, fixed_value, base_value;
unsigned int fbit;
Return true, if !SAT_P and overflow. */
static bool
-fixed_saturate1 (enum machine_mode mode, double_int a, double_int *f,
+fixed_saturate1 (machine_mode mode, double_int a, double_int *f,
bool sat_p)
{
bool overflow_p = false;
Return true, if !SAT_P and overflow. */
static bool
-fixed_saturate2 (enum machine_mode mode, double_int a_high, double_int a_low,
+fixed_saturate2 (machine_mode mode, double_int a_high, double_int a_low,
double_int *f, bool sat_p)
{
bool overflow_p = false;
Return true, if !SAT_P and overflow. */
bool
-fixed_convert (FIXED_VALUE_TYPE *f, enum machine_mode mode,
+fixed_convert (FIXED_VALUE_TYPE *f, machine_mode mode,
const FIXED_VALUE_TYPE *a, bool sat_p)
{
bool overflow_p = false;
Return true, if !SAT_P and overflow. */
bool
-fixed_convert_from_int (FIXED_VALUE_TYPE *f, enum machine_mode mode,
+fixed_convert_from_int (FIXED_VALUE_TYPE *f, machine_mode mode,
double_int a, bool unsigned_p, bool sat_p)
{
bool overflow_p = false;
Return true, if !SAT_P and overflow. */
bool
-fixed_convert_from_real (FIXED_VALUE_TYPE *f, enum machine_mode mode,
+fixed_convert_from_real (FIXED_VALUE_TYPE *f, machine_mode mode,
const REAL_VALUE_TYPE *a, bool sat_p)
{
bool overflow_p = false;
/* Convert to a new real mode from a fixed-point. */
void
-real_convert_from_fixed (REAL_VALUE_TYPE *r, enum machine_mode mode,
+real_convert_from_fixed (REAL_VALUE_TYPE *r, machine_mode mode,
const FIXED_VALUE_TYPE *f)
{
REAL_VALUE_TYPE base_value, fixed_value, real_value;
struct GTY(()) fixed_value
{
double_int data; /* Store data up to 2 wide integers. */
- enum machine_mode mode; /* Use machine mode to know IBIT and FBIT. */
+ machine_mode mode; /* Use machine mode to know IBIT and FBIT. */
};
#define FIXED_VALUE_TYPE struct fixed_value
/* Return a CONST_FIXED with value R and mode M. */
#define CONST_FIXED_FROM_FIXED_VALUE(r, m) \
const_fixed_from_fixed_value (r, m)
-extern rtx const_fixed_from_fixed_value (FIXED_VALUE_TYPE, enum machine_mode);
+extern rtx const_fixed_from_fixed_value (FIXED_VALUE_TYPE, machine_mode);
/* Construct a FIXED_VALUE from a bit payload and machine mode MODE.
The bits in PAYLOAD are sign-extended/zero-extended according to MODE. */
extern FIXED_VALUE_TYPE fixed_from_double_int (double_int,
- enum machine_mode);
+ machine_mode);
/* Return a CONST_FIXED from a bit payload and machine mode MODE.
The bits in PAYLOAD are sign-extended/zero-extended according to MODE. */
static inline rtx
const_fixed_from_double_int (double_int payload,
- enum machine_mode mode)
+ machine_mode mode)
{
return
const_fixed_from_fixed_value (fixed_from_double_int (payload, mode),
/* Initialize from a decimal or hexadecimal string. */
extern void fixed_from_string (FIXED_VALUE_TYPE *, const char *,
- enum machine_mode);
+ machine_mode);
/* In tree.c: wrap up a FIXED_VALUE_TYPE in a tree node. */
extern tree build_fixed (tree, FIXED_VALUE_TYPE);
/* Extend or truncate to a new mode. */
-extern bool fixed_convert (FIXED_VALUE_TYPE *, enum machine_mode,
+extern bool fixed_convert (FIXED_VALUE_TYPE *, machine_mode,
const FIXED_VALUE_TYPE *, bool);
/* Convert to a fixed-point mode from an integer. */
-extern bool fixed_convert_from_int (FIXED_VALUE_TYPE *, enum machine_mode,
+extern bool fixed_convert_from_int (FIXED_VALUE_TYPE *, machine_mode,
double_int, bool, bool);
/* Convert to a fixed-point mode from a real. */
-extern bool fixed_convert_from_real (FIXED_VALUE_TYPE *, enum machine_mode,
+extern bool fixed_convert_from_real (FIXED_VALUE_TYPE *, machine_mode,
const REAL_VALUE_TYPE *, bool);
/* Convert to a real mode from a fixed-point. */
-extern void real_convert_from_fixed (REAL_VALUE_TYPE *, enum machine_mode,
+extern void real_convert_from_fixed (REAL_VALUE_TYPE *, machine_mode,
const FIXED_VALUE_TYPE *);
/* Compare two fixed-point objects for bitwise identity. */
tree, tree, tree);
static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
HOST_WIDE_INT *,
- enum machine_mode *, int *, int *,
+ machine_mode *, int *, int *,
tree *, tree *);
static tree sign_bit_p (tree, const_tree);
static int simple_operand_p (const_tree);
if (TREE_CODE (arg1) == REAL_CST)
{
- enum machine_mode mode;
+ machine_mode mode;
REAL_VALUE_TYPE d1;
REAL_VALUE_TYPE d2;
REAL_VALUE_TYPE value;
tree type = TREE_TYPE (lhs);
tree unsigned_type;
int const_p = TREE_CODE (rhs) == INTEGER_CST;
- enum machine_mode lmode, rmode, nmode;
+ machine_mode lmode, rmode, nmode;
int lunsignedp, runsignedp;
int lvolatilep = 0, rvolatilep = 0;
tree linner, rinner = NULL_TREE;
static tree
decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
- HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
+ HOST_WIDE_INT *pbitpos, machine_mode *pmode,
int *punsignedp, int *pvolatilep,
tree *pmask, tree *pand_mask)
{
HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
- enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
- enum machine_mode lnmode, rnmode;
+ machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
+ machine_mode lnmode, rnmode;
tree ll_mask, lr_mask, rl_mask, rr_mask;
tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
tree l_const, r_const;
if (BUILTIN_SQRT_P (fcode))
{
tree arg = CALL_EXPR_ARG (arg0, 0);
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
+ machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
c = TREE_REAL_CST (arg1);
if (REAL_VALUE_NEGATIVE (c))
fold_inf_compare (location_t loc, enum tree_code code, tree type,
tree arg0, tree arg1)
{
- enum machine_mode mode;
+ machine_mode mode;
REAL_VALUE_TYPE max;
tree temp;
bool neg;
tree inner = TREE_OPERAND (arg0, 0);
tree type = TREE_TYPE (arg0);
int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
- enum machine_mode operand_mode = TYPE_MODE (type);
+ machine_mode operand_mode = TYPE_MODE (type);
int ops_unsigned;
tree signed_type, unsigned_type, intermediate_type;
tree tem, one;
native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
{
tree type = TREE_TYPE (expr);
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
int total_bytes = GET_MODE_SIZE (mode);
FIXED_VALUE_TYPE value;
tree i_value, i_type;
static tree
native_interpret_real (tree type, const unsigned char *ptr, int len)
{
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
int total_bytes = GET_MODE_SIZE (mode);
int byte, offset, word, words, bitpos;
unsigned char value;
{
HOST_WIDE_INT bitsize, bitpos;
tree offset;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp, volatilep;
tree base = TREE_OPERAND (op0, 0);
base = get_inner_reference (base, &bitsize, &bitpos, &offset,
{
tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
- enum machine_mode mode;
+ machine_mode mode;
int volatilep, unsignedp;
bool indirect_base0 = false, indirect_base1 = false;
{
REAL_VALUE_TYPE r;
tree unit_type, *elts;
- enum machine_mode mode;
+ machine_mode mode;
unsigned vec_nelts, i;
switch (TREE_CODE (cst))
HOST_WIDE_INT *pbitpos, tree *poffset)
{
tree core;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp, volatilep;
HOST_WIDE_INT bitsize;
location_t loc = EXPR_LOCATION (exp);
+2014-10-29 Richard Sandiford <richard.sandiford@arm.com>
+
+ * trans-types.c, trans-types.h: Remove redundant enum from
+ machine_mode.
+
2014-10-28 Manuel López-Ibáñez <manu@gcc.gnu.org>
PR fortran/44054
{
int kind, bitsize;
- if (!targetm.scalar_mode_supported_p ((enum machine_mode) mode))
+ if (!targetm.scalar_mode_supported_p ((machine_mode) mode))
continue;
/* The middle end doesn't support constants larger than 2*HWI.
Perhaps the target hook shouldn't have accepted these either,
but just to be safe... */
- bitsize = GET_MODE_BITSIZE ((enum machine_mode) mode);
+ bitsize = GET_MODE_BITSIZE ((machine_mode) mode);
if (bitsize > 2*HOST_BITS_PER_WIDE_INT)
continue;
for (r_index = 0, mode = MIN_MODE_FLOAT; mode <= MAX_MODE_FLOAT; mode++)
{
const struct real_format *fmt =
- REAL_MODE_FORMAT ((enum machine_mode) mode);
+ REAL_MODE_FORMAT ((machine_mode) mode);
int kind;
if (fmt == NULL)
continue;
- if (!targetm.scalar_mode_supported_p ((enum machine_mode) mode))
+ if (!targetm.scalar_mode_supported_p ((machine_mode) mode))
continue;
/* Only let float, double, long double and __float128 go through.
Runtime support for others is not provided, so they would be
useless. */
- if (!targetm.libgcc_floating_mode_supported_p ((enum machine_mode)
+ if (!targetm.libgcc_floating_mode_supported_p ((machine_mode)
mode))
continue;
if (mode != TYPE_MODE (float_type_node)
integer, then UNSIGNEDP selects between signed and unsigned types. */
tree
-gfc_type_for_mode (enum machine_mode mode, int unsignedp)
+gfc_type_for_mode (machine_mode mode, int unsignedp)
{
int i;
tree *base;
}
else if (VECTOR_MODE_P (mode))
{
- enum machine_mode inner_mode = GET_MODE_INNER (mode);
+ machine_mode inner_mode = GET_MODE_INNER (mode);
tree inner_type = gfc_type_for_mode (inner_mode, unsignedp);
if (inner_type != NULL_TREE)
return build_vector_type_for_mode (inner_type, mode);
tree gfc_get_function_type (gfc_symbol *);
tree gfc_type_for_size (unsigned, int);
-tree gfc_type_for_mode (enum machine_mode, int);
+tree gfc_type_for_mode (machine_mode, int);
tree gfc_build_uint_type (int);
tree gfc_get_element_type (tree);
static struct temp_slot *find_temp_slot_from_address (rtx);
static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
-static void pad_below (struct args_size *, enum machine_mode, tree);
+static void pad_below (struct args_size *, machine_mode, tree);
static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
static int all_blocks (tree, tree *);
static tree *get_block_vector (tree, int *);
/* Return stack slot alignment in bits for TYPE and MODE. */
static unsigned int
-get_stack_local_alignment (tree type, enum machine_mode mode)
+get_stack_local_alignment (tree type, machine_mode mode)
{
unsigned int alignment;
We do not round to stack_boundary here. */
rtx
-assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
+assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size,
int align, int kind)
{
rtx x, addr;
/* Wrap up assign_stack_local_1 with last parameter as false. */
rtx
-assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
+assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align)
{
return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
}
TYPE is the type that will be used for the stack slot. */
rtx
-assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
+assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size,
tree type)
{
unsigned int align;
reuse. First two arguments are same as in preceding function. */
rtx
-assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size)
+assign_stack_temp (machine_mode mode, HOST_WIDE_INT size)
{
return assign_stack_temp_for_type (mode, size, NULL_TREE);
}
int dont_promote ATTRIBUTE_UNUSED)
{
tree type, decl;
- enum machine_mode mode;
+ machine_mode mode;
#ifdef PROMOTE_MODE
int unsignedp;
#endif
initial value of hard register REGNO. Return an rtx for such a pseudo. */
rtx
-get_hard_reg_initial_val (enum machine_mode mode, unsigned int regno)
+get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
{
struct initial_value_struct *ivs;
rtx rv;
the associated pseudo if so, otherwise return NULL. */
rtx
-has_hard_reg_initial_val (enum machine_mode mode, unsigned int regno)
+has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
{
struct initial_value_struct *ivs;
int i;
/* Return true if TYPE should be passed by invisible reference. */
bool
-pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
+pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
tree type, bool named_arg)
{
if (type)
copied instead of caller copied. */
bool
-reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
+reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
tree type, bool named_arg)
{
if (type && TREE_ADDRESSABLE (type))
tree passed_type;
rtx entry_parm;
rtx stack_parm;
- enum machine_mode nominal_mode;
- enum machine_mode passed_mode;
- enum machine_mode promoted_mode;
+ machine_mode nominal_mode;
+ machine_mode passed_mode;
+ machine_mode promoted_mode;
struct locate_and_pad_arg_data locate;
int partial;
BOOL_BITFIELD named_arg : 1;
struct assign_parm_data_one *data)
{
tree nominal_type, passed_type;
- enum machine_mode nominal_mode, passed_mode, promoted_mode;
+ machine_mode nominal_mode, passed_mode, promoted_mode;
int unsignedp;
memset (data, 0, sizeof (*data));
that mode's store operation. */
else if (size <= UNITS_PER_WORD)
{
- enum machine_mode mode
+ machine_mode mode
= mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
if (mode != BLKmode
{
rtx parmreg, validated_mem;
rtx equiv_stack_parm;
- enum machine_mode promoted_nominal_mode;
+ machine_mode promoted_nominal_mode;
int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
bool did_conversion = false;
bool need_conversion, moved;
/* Mark complex types separately. */
if (GET_CODE (parmreg) == CONCAT)
{
- enum machine_mode submode
+ machine_mode submode
= GET_MODE_INNER (GET_MODE (parmreg));
int regnor = REGNO (XEXP (parmreg, 0));
int regnoi = REGNO (XEXP (parmreg, 1));
&& targetm.calls.split_complex_arg (TREE_TYPE (parm)))
{
rtx tmp, real, imag;
- enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
+ machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
real = DECL_RTL (fnargs[i]);
imag = DECL_RTL (fnargs[i + 1]);
if (DECL_RESULT (fndecl))
{
tree type = TREE_TYPE (DECL_RESULT (fndecl));
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
if (mode != BLKmode
&& mode != VOIDmode
INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
void
-locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
+locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
int reg_parm_stack_space, int partial,
tree fndecl ATTRIBUTE_UNUSED,
struct args_size *initial_offset_ptr,
}
static void
-pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
+pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
{
if (passed_mode != BLKmode)
{
return FALSE. */
extern bool frame_offset_overflow (HOST_WIDE_INT, tree);
-extern rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int, int);
-extern rtx assign_stack_local (enum machine_mode, HOST_WIDE_INT, int);
-extern rtx assign_stack_temp_for_type (enum machine_mode, HOST_WIDE_INT, tree);
-extern rtx assign_stack_temp (enum machine_mode, HOST_WIDE_INT);
+extern rtx assign_stack_local_1 (machine_mode, HOST_WIDE_INT, int, int);
+extern rtx assign_stack_local (machine_mode, HOST_WIDE_INT, int);
+extern rtx assign_stack_temp_for_type (machine_mode, HOST_WIDE_INT, tree);
+extern rtx assign_stack_temp (machine_mode, HOST_WIDE_INT);
extern rtx assign_temp (tree, int, int);
extern void update_temp_slot_address (rtx, rtx);
extern void preserve_temp_slots (rtx);
extern void pop_temp_slots (void);
extern void init_temp_slots (void);
extern rtx get_hard_reg_initial_reg (rtx);
-extern rtx get_hard_reg_initial_val (enum machine_mode, unsigned int);
-extern rtx has_hard_reg_initial_val (enum machine_mode, unsigned int);
+extern rtx get_hard_reg_initial_val (machine_mode, unsigned int);
+extern rtx has_hard_reg_initial_val (machine_mode, unsigned int);
/* Called from gimple_expand_cfg. */
extern unsigned int emit_initial_value_sets (void);
extern void instantiate_decl_rtl (rtx x);
extern int aggregate_value_p (const_tree, const_tree);
extern bool use_register_for_decl (const_tree);
-extern bool pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
+extern bool pass_by_reference (CUMULATIVE_ARGS *, machine_mode,
tree, bool);
-extern bool reference_callee_copied (CUMULATIVE_ARGS *, enum machine_mode,
+extern bool reference_callee_copied (CUMULATIVE_ARGS *, machine_mode,
tree, bool);
extern gimple_seq gimplify_parameters (void);
-extern void locate_and_pad_parm (enum machine_mode, tree, int, int, int,
+extern void locate_and_pad_parm (machine_mode, tree, int, int, int,
tree, struct args_size *,
struct locate_and_pad_arg_data *);
extern void generate_setjmp_warnings (void);
for a memory access in the given MODE. */
static bool
-should_replace_address (rtx old_rtx, rtx new_rtx, enum machine_mode mode,
+should_replace_address (rtx old_rtx, rtx new_rtx, machine_mode mode,
addr_space_t as, bool speed)
{
int gain;
{
rtx x = *px, tem = NULL_RTX, op0, op1, op2;
enum rtx_code code = GET_CODE (x);
- enum machine_mode mode = GET_MODE (x);
- enum machine_mode op_mode;
+ machine_mode mode = GET_MODE (x);
+ machine_mode op_mode;
bool can_appear = (flags & PR_CAN_APPEAR) != 0;
bool valid_ops = true;
Otherwise, we accept simplifications that have a lower or equal cost. */
static rtx
-propagate_rtx (rtx x, enum machine_mode mode, rtx old_rtx, rtx new_rtx,
+propagate_rtx (rtx x, machine_mode mode, rtx old_rtx, rtx new_rtx,
bool speed)
{
rtx tem;
rtx src;
/* Only consider subregs... */
- enum machine_mode use_mode = GET_MODE (use_reg);
+ machine_mode use_mode = GET_MODE (use_reg);
if (GET_CODE (use_reg) != SUBREG
|| !REG_P (SET_DEST (def_set)))
return false;
rtx use_set = single_set (use_insn);
rtx src, reg, new_rtx, *loc;
bool set_reg_equal;
- enum machine_mode mode;
+ machine_mode mode;
int asm_use = -1;
if (INSN_CODE (use_insn) < 0)
static int oprs_unchanged_p (const_rtx, const rtx_insn *, int);
static int oprs_anticipatable_p (const_rtx, const rtx_insn *);
static int oprs_available_p (const_rtx, const rtx_insn *);
-static void insert_expr_in_table (rtx, enum machine_mode, rtx_insn *, int, int,
+static void insert_expr_in_table (rtx, machine_mode, rtx_insn *, int, int,
int, struct gcse_hash_table_d *);
-static unsigned int hash_expr (const_rtx, enum machine_mode, int *, int);
+static unsigned int hash_expr (const_rtx, machine_mode, int *, int);
static void record_last_reg_set_info (rtx, int);
static void record_last_mem_set_info (rtx_insn *);
static void record_last_set_info (rtx, const_rtx, void *);
#ifdef AVOID_CCMODE_COPIES
can_copy[i] = 0;
#else
- reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
+ reg = gen_rtx_REG ((machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
if (recog (PATTERN (insn), insn, NULL) >= 0)
can_copy[i] = 1;
/* Returns whether the mode supports reg/reg copy operations. */
bool
-can_copy_p (enum machine_mode mode)
+can_copy_p (machine_mode mode)
{
if (! can_copy_init_p)
{
the current size of the hash table to be probed. */
static unsigned int
-hash_expr (const_rtx x, enum machine_mode mode, int *do_not_record_p,
+hash_expr (const_rtx x, machine_mode mode, int *do_not_record_p,
int hash_table_size)
{
unsigned int hash;
be moved. */
static void
-insert_expr_in_table (rtx x, enum machine_mode mode, rtx_insn *insn,
+insert_expr_in_table (rtx x, machine_mode mode, rtx_insn *insn,
int antic_p,
int avail_p, int max_distance, struct gcse_hash_table_d *table)
{
/* Start by writing the definition of the function name and the types
of the arguments. */
- printf ("static inline rtx\ngen_rtx_fmt_%s_stat (RTX_CODE code, enum machine_mode mode", format);
+ printf ("static inline rtx\ngen_rtx_fmt_%s_stat (RTX_CODE code, machine_mode mode", format);
for (p = format, i = 0; *p != 0; p++)
if (*p != '0')
printf (",\n\t%sarg%d", type_from_format (*p), i++);
POS_HERE (do_scalar_typedef ("jword", &pos));
POS_HERE (do_scalar_typedef ("JCF_u2", &pos));
POS_HERE (do_scalar_typedef ("void", &pos));
+ POS_HERE (do_scalar_typedef ("machine_mode", &pos));
POS_HERE (do_typedef ("PTR",
create_pointer (resolve_typedef ("void", &pos)),
&pos));
extern __inline__ __attribute__((__always_inline__, __gnu_inline__))\n\
#endif\n\
unsigned char\n\
-mode_size_inline (enum machine_mode mode)\n\
+mode_size_inline (machine_mode mode)\n\
{\n\
extern %sunsigned char mode_size[NUM_MACHINE_MODES];\n\
switch (mode)\n\
extern __inline__ __attribute__((__always_inline__, __gnu_inline__))\n\
#endif\n\
unsigned char\n\
-mode_nunits_inline (enum machine_mode mode)\n\
+mode_nunits_inline (machine_mode mode)\n\
{\n\
extern const unsigned char mode_nunits[NUM_MACHINE_MODES];\n\
switch (mode)\n\
extern __inline__ __attribute__((__always_inline__, __gnu_inline__))\n\
#endif\n\
unsigned char\n\
-mode_inner_inline (enum machine_mode mode)\n\
+mode_inner_inline (machine_mode mode)\n\
{\n\
extern const unsigned char mode_inner[NUM_MACHINE_MODES];\n\
switch (mode)\n\
printf ("#define NUM_INT_N_ENTS %d\n", n_int_n_ents);
- puts ("\n#if GCC_VERSION >= 4001\n");
+ puts ("\n#if !defined (USED_FOR_TARGET) && GCC_VERSION >= 4001\n");
emit_mode_size_inline ();
emit_mode_nunits_inline ();
emit_mode_inner_inline ();
fprintf (s_file,
"bool\n"
- "swap_optab_enable (optab op, enum machine_mode m, bool set)\n"
+ "swap_optab_enable (optab op, machine_mode m, bool set)\n"
"{\n"
" unsigned scode = (op << 16) | m;\n"
" int i = lookup_handler (scode);\n"
int index;
const char *predicate;
const char *constraint;
- enum machine_mode mode;
+ machine_mode mode;
unsigned char n_alternatives;
char address_p;
char strict_low;
becomes
- static inline int basereg_operand_1(rtx op, enum machine_mode mode)
+ static inline int basereg_operand_1(rtx op, machine_mode mode)
{
if (GET_CODE (op) == SUBREG)
op = SUBREG_REG (op);
p->exp = and_exp;
printf ("static inline int\n"
- "%s_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)\n",
+ "%s_1 (rtx op, machine_mode mode ATTRIBUTE_UNUSED)\n",
p->name);
print_md_ptr_loc (p->c_block);
if (p->c_block[0] == '{')
write_predicate_subfunction (p);
add_mode_tests (p);
- /* A normal predicate can legitimately not look at enum machine_mode
+ /* A normal predicate can legitimately not look at machine_mode
if it accepts only CONST_INTs and/or CONST_WIDE_INT and/or CONST_DOUBLEs. */
- printf ("int\n%s (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)\n{\n",
+ printf ("int\n%s (rtx op, machine_mode mode ATTRIBUTE_UNUSED)\n{\n",
p->name);
write_predicate_stmts (p->exp);
fputs ("}\n\n", stdout);
"{\n", c->c_name,
needs_op ? "op" : "ARG_UNUSED (op)");
if (needs_mode)
- puts (" enum machine_mode mode = GET_MODE (op);");
+ puts (" machine_mode mode = GET_MODE (op);");
if (needs_ival)
puts (" HOST_WIDE_INT ival = 0;");
if (needs_hval)
#ifdef HAVE_MACHINE_MODES");
FOR_ALL_PREDICATES (p)
- printf ("extern int %s (rtx, enum machine_mode);\n", p->name);
+ printf ("extern int %s (rtx, machine_mode);\n", p->name);
puts ("#endif /* HAVE_MACHINE_MODES */\n");
union
{
int num_insns; /* Number if insn in a define_peephole2. */
- enum machine_mode mode; /* Machine mode of node. */
+ machine_mode mode; /* Machine mode of node. */
RTX_CODE code; /* Code to test. */
struct
const char *name; /* Predicate to call. */
const struct pred_data *data;
/* Optimization hints for this predicate. */
- enum machine_mode mode; /* Machine mode for node. */
+ machine_mode mode; /* Machine mode for node. */
} pred;
const char *c_test; /* Additional test to perform. */
case SET:
{
- enum machine_mode dmode, smode;
+ machine_mode dmode, smode;
rtx dest, src;
dest = SET_DEST (pattern);
size_t i;
const char *fmt;
int len;
- enum machine_mode mode;
+ machine_mode mode;
enum position_type pos_type;
if (pos->depth > max_depth)
the gensupport programs. */
rtx
-gen_rtx_CONST_INT (enum machine_mode ARG_UNUSED (mode),
+gen_rtx_CONST_INT (machine_mode ARG_UNUSED (mode),
HOST_WIDE_INT arg)
{
rtx rt = rtx_alloc (CONST_INT);
stmt_cost (gimple gs, bool speed)
{
tree lhs, rhs1, rhs2;
- enum machine_mode lhs_mode;
+ machine_mode lhs_mode;
gcc_assert (is_gimple_assign (gs));
lhs = gimple_assign_lhs (gs);
{
tree ref_expr, base, offset, type;
HOST_WIDE_INT bitsize, bitpos;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp, volatilep;
slsr_cand_t c;
up sometime. */
static void
-analyze_increments (slsr_cand_t first_dep, enum machine_mode mode, bool speed)
+analyze_increments (slsr_cand_t first_dep, machine_mode mode, bool speed)
{
unsigned i;
less expensive to calculate than the replaced statements. */
else
{
- enum machine_mode mode;
+ machine_mode mode;
bool speed;
/* Determine whether we'll be generating pointer arithmetic
+2014-10-29 Richard Sandiford <richard.sandiford@arm.com>
+
+ * go-lang.c: Remove redundant enum from machine_mode.
+
2014-10-28 Andrew MacLeod <amacleod@redhat.com>
* go-gcc.cc: Adjust include files.
}
static tree
-go_langhook_type_for_mode (enum machine_mode mode, int unsignedp)
+go_langhook_type_for_mode (machine_mode mode, int unsignedp)
{
tree type;
/* Go has no vector types. Build them here. FIXME: It does not
static tree
max_precision_type (tree type1, tree type2)
{
- enum machine_mode mode;
+ machine_mode mode;
int p1, p2, precision;
tree type;
{
bool unsigned_p;
tree type;
- enum machine_mode mode;
+ machine_mode mode;
int wider_precision;
int precision = MAX (mpz_sizeinbase (bound_one, 2),
mpz_sizeinbase (bound_two, 2));
{
int i, change, before, after, hard_regno;
int excess_cost_change;
- enum machine_mode mode;
+ machine_mode mode;
enum reg_class cl;
struct reg_pressure_data *pressure_info;
int *max_reg_pressure;
return true;
}
-/* Generic hook that takes (enum machine_mode) and returns false. */
+/* Generic hook that takes (machine_mode) and returns false. */
bool
-hook_bool_mode_false (enum machine_mode mode ATTRIBUTE_UNUSED)
+hook_bool_mode_false (machine_mode mode ATTRIBUTE_UNUSED)
{
return false;
}
-/* Generic hook that takes (enum machine_mode) and returns true. */
+/* Generic hook that takes (machine_mode) and returns true. */
bool
-hook_bool_mode_true (enum machine_mode mode ATTRIBUTE_UNUSED)
+hook_bool_mode_true (machine_mode mode ATTRIBUTE_UNUSED)
{
return true;
}
-/* Generic hook that takes (enum machine_mode, const_rtx) and returns false. */
+/* Generic hook that takes (machine_mode, const_rtx) and returns false. */
bool
-hook_bool_mode_const_rtx_false (enum machine_mode mode ATTRIBUTE_UNUSED,
+hook_bool_mode_const_rtx_false (machine_mode mode ATTRIBUTE_UNUSED,
const_rtx value ATTRIBUTE_UNUSED)
{
return false;
}
-/* Generic hook that takes (enum machine_mode, const_rtx) and returns true. */
+/* Generic hook that takes (machine_mode, const_rtx) and returns true. */
bool
-hook_bool_mode_const_rtx_true (enum machine_mode mode ATTRIBUTE_UNUSED,
+hook_bool_mode_const_rtx_true (machine_mode mode ATTRIBUTE_UNUSED,
const_rtx value ATTRIBUTE_UNUSED)
{
return true;
}
-/* Generic hook that takes (enum machine_mode, rtx) and returns false. */
+/* Generic hook that takes (machine_mode, rtx) and returns false. */
bool
-hook_bool_mode_rtx_false (enum machine_mode mode ATTRIBUTE_UNUSED,
+hook_bool_mode_rtx_false (machine_mode mode ATTRIBUTE_UNUSED,
rtx value ATTRIBUTE_UNUSED)
{
return false;
}
-/* Generic hook that takes (enum machine_mode, rtx) and returns true. */
+/* Generic hook that takes (machine_mode, rtx) and returns true. */
bool
-hook_bool_mode_rtx_true (enum machine_mode mode ATTRIBUTE_UNUSED,
+hook_bool_mode_rtx_true (machine_mode mode ATTRIBUTE_UNUSED,
rtx value ATTRIBUTE_UNUSED)
{
return true;
return true;
}
-/* Generic hook that takes (enum machine_mode, unsigned HOST_WIDE_INT)
+/* Generic hook that takes (machine_mode, unsigned HOST_WIDE_INT)
and returns false. */
bool
-hook_bool_mode_uhwi_false (enum machine_mode mode ATTRIBUTE_UNUSED,
+hook_bool_mode_uhwi_false (machine_mode mode ATTRIBUTE_UNUSED,
unsigned HOST_WIDE_INT value ATTRIBUTE_UNUSED)
{
return false;
int
hook_int_uint_mode_1 (unsigned int a ATTRIBUTE_UNUSED,
- enum machine_mode b ATTRIBUTE_UNUSED)
+ machine_mode b ATTRIBUTE_UNUSED)
{
return 1;
}
}
int
-hook_int_rtx_mode_as_bool_0 (rtx, enum machine_mode, addr_space_t, bool)
+hook_int_rtx_mode_as_bool_0 (rtx, machine_mode, addr_space_t, bool)
{
return 0;
}
/* Generic hook that takes a machine mode and returns an unsigned int 0. */
unsigned int
-hook_uint_mode_0 (enum machine_mode m ATTRIBUTE_UNUSED)
+hook_uint_mode_0 (machine_mode m ATTRIBUTE_UNUSED)
{
return 0;
}
extern bool hook_bool_bool_false (bool);
extern bool hook_bool_bool_gcc_optionsp_false (bool, struct gcc_options *);
extern bool hook_bool_const_int_const_int_true (const int, const int);
-extern bool hook_bool_mode_false (enum machine_mode);
-extern bool hook_bool_mode_true (enum machine_mode);
-extern bool hook_bool_mode_const_rtx_false (enum machine_mode, const_rtx);
-extern bool hook_bool_mode_const_rtx_true (enum machine_mode, const_rtx);
-extern bool hook_bool_mode_rtx_false (enum machine_mode, rtx);
-extern bool hook_bool_mode_rtx_true (enum machine_mode, rtx);
+extern bool hook_bool_mode_false (machine_mode);
+extern bool hook_bool_mode_true (machine_mode);
+extern bool hook_bool_mode_const_rtx_false (machine_mode, const_rtx);
+extern bool hook_bool_mode_const_rtx_true (machine_mode, const_rtx);
+extern bool hook_bool_mode_rtx_false (machine_mode, rtx);
+extern bool hook_bool_mode_rtx_true (machine_mode, rtx);
extern bool hook_bool_const_rtx_insn_const_rtx_insn_true (const rtx_insn *,
const rtx_insn *);
-extern bool hook_bool_mode_uhwi_false (enum machine_mode,
+extern bool hook_bool_mode_uhwi_false (machine_mode,
unsigned HOST_WIDE_INT);
extern bool hook_bool_tree_false (tree);
extern bool hook_bool_const_tree_false (const_tree);
extern void hook_void_int_int (int, int);
extern void hook_void_gcc_optionsp (struct gcc_options *);
-extern int hook_int_uint_mode_1 (unsigned int, enum machine_mode);
+extern int hook_int_uint_mode_1 (unsigned int, machine_mode);
extern int hook_int_const_tree_0 (const_tree);
extern int hook_int_const_tree_const_tree_1 (const_tree, const_tree);
extern int hook_int_rtx_0 (rtx);
extern int hook_int_rtx_1 (rtx);
extern int hook_int_rtx_insn_unreachable (rtx_insn *);
extern int hook_int_rtx_bool_0 (rtx, bool);
-extern int hook_int_rtx_mode_as_bool_0 (rtx, enum machine_mode, addr_space_t,
+extern int hook_int_rtx_mode_as_bool_0 (rtx, machine_mode, addr_space_t,
bool);
extern tree hook_tree_const_tree_null (const_tree);
extern tree hook_tree_tree_int_treep_bool_null (tree, int, tree *, bool);
extern unsigned hook_uint_void_0 (void);
-extern unsigned int hook_uint_mode_0 (enum machine_mode);
+extern unsigned int hook_uint_mode_0 (machine_mode);
extern bool default_can_output_mi_thunk_no_vcall (const_tree, HOST_WIDE_INT,
HOST_WIDE_INT, const_tree);
if (CALL_P (insn) && prob_val >= 0)
validate_change (insn, ®_NOTES (insn),
- gen_rtx_INT_LIST ((enum machine_mode) REG_BR_PROB,
+ gen_rtx_INT_LIST ((machine_mode) REG_BR_PROB,
prob_val, REG_NOTES (insn)), 1);
insn_done:
static void
noce_emit_move_insn (rtx x, rtx y)
{
- enum machine_mode outmode;
+ machine_mode outmode;
rtx outer, inner;
int bitpos;
int reversep;
HOST_WIDE_INT itrue, ifalse, diff, tmp;
int normalize, can_reverse;
- enum machine_mode mode;
+ machine_mode mode;
if (CONST_INT_P (if_info->a)
&& CONST_INT_P (if_info->b))
&& MEM_ADDR_SPACE (a) == MEM_ADDR_SPACE (b)
&& if_info->branch_cost >= 5)
{
- enum machine_mode address_mode = get_address_mode (a);
+ machine_mode address_mode = get_address_mode (a);
a = XEXP (a, 0);
b = XEXP (b, 0);
{
rtx cond, t, m, c;
rtx_insn *seq;
- enum machine_mode mode;
+ machine_mode mode;
enum rtx_code code;
bool t_unconditional;
{
rtx cond, x, a, result;
rtx_insn *seq;
- enum machine_mode mode;
+ machine_mode mode;
enum rtx_code code;
int bitnum;
get_multi_vector_move (tree array_type, convert_optab optab)
{
enum insn_code icode;
- enum machine_mode imode;
- enum machine_mode vmode;
+ machine_mode imode;
+ machine_mode vmode;
gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
imode = TYPE_MODE (array_type);
op0 = expand_normal (arg0);
op1 = expand_normal (arg1);
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
+ machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
if (lhs)
target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
do_pending_stack_adjust ();
op1 = expand_normal (arg1);
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
+ machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
if (lhs)
target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
op0 = expand_normal (arg0);
op1 = expand_normal (arg1);
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
+ machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
if (lhs)
target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
if (icode == CODE_FOR_nothing)
{
struct separate_ops ops;
- enum machine_mode hmode
+ machine_mode hmode
= mode_for_size (GET_MODE_PRECISION (mode) / 2, MODE_INT, 1);
ops.op0 = arg0;
ops.op1 = arg1;
if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
&& targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
{
- enum machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
+ machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
ops.code = WIDEN_MULT_EXPR;
ops.type
= build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), 0);
void
ira_create_allocno_objects (ira_allocno_t a)
{
- enum machine_mode mode = ALLOCNO_MODE (a);
+ machine_mode mode = ALLOCNO_MODE (a);
enum reg_class aclass = ALLOCNO_CLASS (a);
int n = ira_reg_class_max_nregs[aclass][mode];
int i;
a = ira_create_allocno (regno, false, ira_curr_loop_tree_node);
if (outer != NULL && GET_CODE (outer) == SUBREG)
{
- enum machine_mode wmode = GET_MODE (outer);
+ machine_mode wmode = GET_MODE (outer);
if (GET_MODE_SIZE (wmode) > GET_MODE_SIZE (ALLOCNO_WMODE (a)))
ALLOCNO_WMODE (a) = wmode;
}
ira_allocno_t a;
bitmap_iterator bi;
enum reg_class aclass;
- enum machine_mode mode;
+ machine_mode mode;
allocno_color_data_t data;
/* Initial set up from allocno classes and explicitly conflicting
int divisor, bool decr_p, bool record_p)
{
int cost, update_cost;
- enum machine_mode mode;
+ machine_mode mode;
enum reg_class rclass, aclass;
ira_allocno_t another_allocno, from = NULL;
ira_copy_t cp, next_cp;
{
int j, nwords, nregs;
enum reg_class aclass;
- enum machine_mode mode;
+ machine_mode mode;
aclass = ALLOCNO_CLASS (a);
mode = ALLOCNO_MODE (a);
function prologue/epilogue if we allocate HARD_REGNO to hold value
of MODE. */
static int
-calculate_saved_nregs (int hard_regno, enum machine_mode mode)
+calculate_saved_nregs (int hard_regno, machine_mode mode)
{
int i;
int nregs = 0;
int cost, mem_cost, min_cost, full_cost, min_full_cost, nwords, word;
int *a_costs;
enum reg_class aclass;
- enum machine_mode mode;
+ machine_mode mode;
static int costs[FIRST_PSEUDO_REGISTER], full_costs[FIRST_PSEUDO_REGISTER];
int saved_nregs;
enum reg_class rclass;
calculate_allocno_spill_cost (ira_allocno_t a)
{
int regno, cost;
- enum machine_mode mode;
+ machine_mode mode;
enum reg_class rclass;
ira_allocno_t parent_allocno;
ira_loop_tree_node_t parent_node, loop_node;
int check, spill_cost, min_cost, nregs, conflict_nregs, r, best;
bool try_p;
enum reg_class aclass;
- enum machine_mode mode;
+ machine_mode mode;
int *allocno_costs;
int costs[FIRST_PSEUDO_REGISTER];
HARD_REG_SET conflicting_regs[2], profitable_hard_regs;
int cost, exit_freq, enter_freq;
unsigned int j;
bitmap_iterator bi;
- enum machine_mode mode;
+ machine_mode mode;
enum reg_class rclass, aclass, pclass;
ira_allocno_t a, subloop_allocno;
ira_loop_tree_node_t subloop_node;
int cost, regno, hard_regno, hard_regno2, index;
bool changed_p;
int enter_freq, exit_freq;
- enum machine_mode mode;
+ machine_mode mode;
enum reg_class rclass;
ira_allocno_t a, parent_allocno, subloop_allocno;
ira_loop_tree_node_t parent, loop_node, subloop_node;
update_curr_costs (ira_allocno_t a)
{
int i, hard_regno, cost;
- enum machine_mode mode;
+ machine_mode mode;
enum reg_class aclass, rclass;
ira_allocno_t another_a;
ira_copy_t cp, next_cp;
bool no_stack_reg_p;
#endif
enum reg_class aclass;
- enum machine_mode mode;
+ machine_mode mode;
ira_allocno_t a;
ira_allocno_iterator ai;
live_range_t r;
bool only_regs_p;
ira_allocno_t a;
reg_class_t rclass, aclass;
- enum machine_mode mode;
+ machine_mode mode;
ira_copy_t cp;
gcc_assert (REG_SUBREG_P (reg1) && REG_SUBREG_P (reg2));
/* Now we deal with paradoxical subreg cases where certain registers
cannot be accessed in the widest mode. */
- enum machine_mode outer_mode = ALLOCNO_WMODE (a);
- enum machine_mode inner_mode = ALLOCNO_MODE (a);
+ machine_mode outer_mode = ALLOCNO_WMODE (a);
+ machine_mode inner_mode = ALLOCNO_MODE (a);
if (GET_MODE_SIZE (outer_mode) > GET_MODE_SIZE (inner_mode))
{
enum reg_class aclass = ALLOCNO_CLASS (a);
valid for mode MODE. Both FULL and the returned class are globally
allocated. */
static cost_classes_t
-restrict_cost_classes (cost_classes_t full, enum machine_mode mode,
+restrict_cost_classes (cost_classes_t full, machine_mode mode,
const HARD_REG_SET ®s)
{
static struct cost_classes narrow;
calculation for such important classes is only wasting CPU
time. */
static void
-setup_regno_cost_classes_by_mode (int regno, enum machine_mode mode)
+setup_regno_cost_classes_by_mode (int regno, machine_mode mode)
{
if (const HARD_REG_SET *valid_regs = valid_mode_changes_for_regno (regno))
regno_cost_classes[regno] = restrict_cost_classes (&all_cost_classes,
TO_P is FALSE) a register of class RCLASS in mode MODE. X must not
be a pseudo register. */
static int
-copy_cost (rtx x, enum machine_mode mode, reg_class_t rclass, bool to_p,
+copy_cost (rtx x, machine_mode mode, reg_class_t rclass, bool to_p,
secondary_reload_info *prev_sri)
{
secondary_reload_info sri;
the alternatives. */
static void
record_reg_classes (int n_alts, int n_ops, rtx *ops,
- enum machine_mode *modes, const char **constraints,
+ machine_mode *modes, const char **constraints,
rtx_insn *insn, enum reg_class *pref)
{
int alt;
unsigned char c;
const char *p = constraints[i];
rtx op = ops[i];
- enum machine_mode mode = modes[i];
+ machine_mode mode = modes[i];
int allows_addr = 0;
int win = 0;
pseudo-registers should count as OK. Arguments as for
regno_ok_for_base_p. */
static inline bool
-ok_for_base_p_nonstrict (rtx reg, enum machine_mode mode, addr_space_t as,
+ok_for_base_p_nonstrict (rtx reg, machine_mode mode, addr_space_t as,
enum rtx_code outer_code, enum rtx_code index_code)
{
unsigned regno = REGNO (reg);
SCALE is twice the amount to multiply the cost by (it is twice so
we can represent half-cost adjustments). */
static void
-record_address_regs (enum machine_mode mode, addr_space_t as, rtx x,
+record_address_regs (machine_mode mode, addr_space_t as, rtx x,
int context, enum rtx_code outer_code,
enum rtx_code index_code, int scale)
{
record_operand_costs (rtx_insn *insn, enum reg_class *pref)
{
const char *constraints[MAX_RECOG_OPERANDS];
- enum machine_mode modes[MAX_RECOG_OPERANDS];
+ machine_mode modes[MAX_RECOG_OPERANDS];
rtx ops[MAX_RECOG_OPERANDS];
rtx set;
int i;
|| ((regno = REGNO (dest)) >= FIRST_PSEUDO_REGISTER
&& (other_regno = REGNO (src)) < FIRST_PSEUDO_REGISTER)))
{
- enum machine_mode mode = GET_MODE (src);
+ machine_mode mode = GET_MODE (src);
cost_classes_t cost_classes_ptr = regno_cost_classes[regno];
enum reg_class *cost_classes = cost_classes_ptr->classes;
reg_class_t rclass;
{
int cost;
enum reg_class hard_reg_class;
- enum machine_mode mode;
+ machine_mode mode;
mode = ALLOCNO_MODE (a);
hard_reg_class = REGNO_REG_CLASS (hard_regno);
int j, n, regno;
int cost, min_cost, *reg_costs;
enum reg_class aclass, rclass;
- enum machine_mode mode;
+ machine_mode mode;
ira_allocno_t a;
ira_allocno_iterator ai;
ira_allocno_object_iterator oi;
int to_regno, from_regno, cost, regno;
rtx_insn *result, *insn;
rtx set;
- enum machine_mode mode;
+ machine_mode mode;
enum reg_class aclass;
grow_reg_equivs ();
extern void ira_print_disposition (FILE *);
extern void ira_debug_disposition (void);
extern void ira_debug_allocno_classes (void);
-extern void ira_init_register_move_cost (enum machine_mode);
+extern void ira_init_register_move_cost (machine_mode);
extern void ira_setup_alts (rtx_insn *insn, HARD_REG_SET &alts);
extern int ira_get_dup_out_num (int op_num, HARD_REG_SET &alts);
/* Initialize register costs for MODE if necessary. */
static inline void
-ira_init_register_move_cost_if_necessary (enum machine_mode mode)
+ira_init_register_move_cost_if_necessary (machine_mode mode)
{
if (ira_register_move_cost[mode] == NULL)
ira_init_register_move_cost (mode);
starting with HARD_REGNO and containing value of MODE are in set
HARD_REGSET. */
static inline bool
-ira_hard_reg_set_intersection_p (int hard_regno, enum machine_mode mode,
+ira_hard_reg_set_intersection_p (int hard_regno, machine_mode mode,
HARD_REG_SET hard_regset)
{
int i;
HARD_REGNO and containing value of MODE are fully in set
HARD_REGSET. */
static inline bool
-ira_hard_reg_in_set_p (int hard_regno, enum machine_mode mode,
+ira_hard_reg_in_set_p (int hard_regno, machine_mode mode,
HARD_REG_SET hard_regset)
{
int i;
int i, c, regno = 0;
enum reg_class cl;
rtx op;
- enum machine_mode mode;
+ machine_mode mode;
CLEAR_HARD_REG_SET (*set);
for (i = 0; i < recog_data.n_operands; i++)
a simplification of:
(subreg:YMODE (reg:XMODE XREGNO) OFFSET). */
- enum machine_mode ymode, xmode;
+ machine_mode ymode, xmode;
int xregno, yregno;
HOST_WIDE_INT offset;
{
ira_max_memory_move_cost[mode][cl][0]
= ira_memory_move_cost[mode][cl][0]
- = memory_move_cost ((enum machine_mode) mode,
+ = memory_move_cost ((machine_mode) mode,
(reg_class_t) cl, false);
ira_max_memory_move_cost[mode][cl][1]
= ira_memory_move_cost[mode][cl][1]
- = memory_move_cost ((enum machine_mode) mode,
+ = memory_move_cost ((machine_mode) mode,
(reg_class_t) cl, true);
/* Costs for NO_REGS are used in cost calculation on the
1st pass when the preferred register classes are not
ira_prohibited_class_mode_regs[cl][m]);
if (hard_reg_set_empty_p (temp_hard_regset))
continue;
- ira_init_register_move_cost_if_necessary ((enum machine_mode) m);
+ ira_init_register_move_cost_if_necessary ((machine_mode) m);
cost = ira_register_move_cost[m][cl][cl];
if (cost <= ira_max_memory_move_cost[m][cl][1]
|| cost <= ira_max_memory_move_cost[m][cl][0])
for (m = 0; m < NUM_MACHINE_MODES; m++)
if (contains_reg_of_mode[cl][m] && contains_reg_of_mode[cl2][m])
{
- ira_init_register_move_cost_if_necessary ((enum machine_mode) m);
+ ira_init_register_move_cost_if_necessary ((machine_mode) m);
if (ira_register_move_cost[m][cl][cl]
!= ira_register_move_cost[m][cl2][cl2])
break;
for (cl = 0; cl < N_REG_CLASSES; cl++)
ira_reg_class_max_nregs[cl][m]
= ira_reg_class_min_nregs[cl][m]
- = targetm.class_max_nregs ((reg_class_t) cl, (enum machine_mode) m);
+ = targetm.class_max_nregs ((reg_class_t) cl, (machine_mode) m);
for (cl = 0; cl < N_REG_CLASSES; cl++)
for (i = 0;
(cl2 = alloc_reg_class_subclasses[cl][i]) != LIM_REG_CLASSES;
for (k = ira_class_hard_regs_num[cl] - 1; k >= 0; k--)
{
hard_regno = ira_class_hard_regs[cl][k];
- if (! HARD_REGNO_MODE_OK (hard_regno, (enum machine_mode) j))
+ if (! HARD_REGNO_MODE_OK (hard_regno, (machine_mode) j))
SET_HARD_REG_BIT (ira_prohibited_class_mode_regs[cl][j],
hard_regno);
else if (in_hard_reg_set_p (temp_hard_regset,
- (enum machine_mode) j, hard_regno))
+ (machine_mode) j, hard_regno))
{
last_hard_regno = hard_regno;
count++;
if (!TEST_HARD_REG_BIT (ira_prohibited_class_mode_regs[cl][j],
hard_regno))
add_to_hard_reg_set (&ira_useful_class_mode_regs[cl][j],
- (enum machine_mode) j, hard_regno);
+ (machine_mode) j, hard_regno);
}
}
}
/* Allocate and initialize IRA_REGISTER_MOVE_COST, IRA_MAY_MOVE_IN_COST
and IRA_MAY_MOVE_OUT_COST for MODE. */
void
-ira_init_register_move_cost (enum machine_mode mode)
+ira_init_register_move_cost (machine_mode mode)
{
static unsigned short last_move_cost[N_REG_CLASSES][N_REG_CLASSES];
bool all_match = true;
SET_HARD_REG_SET (ira_prohibited_mode_move_regs[i]);
for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
{
- if (! HARD_REGNO_MODE_OK (j, (enum machine_mode) i))
+ if (! HARD_REGNO_MODE_OK (j, (machine_mode) i))
continue;
SET_REGNO_RAW (test_reg1, j);
- PUT_MODE (test_reg1, (enum machine_mode) i);
+ PUT_MODE (test_reg1, (machine_mode) i);
SET_REGNO_RAW (test_reg2, j);
- PUT_MODE (test_reg2, (enum machine_mode) i);
+ PUT_MODE (test_reg2, (machine_mode) i);
INSN_CODE (move_insn) = -1;
recog_memoized (move_insn);
if (INSN_CODE (move_insn) < 0)
}
else if (function_invariant_p (x))
{
- enum machine_mode mode;
+ machine_mode mode;
mode = GET_MODE (SET_DEST (set));
if (GET_CODE (x) == PLUS
df_ref def, use;
unsigned regno;
bool all_dominated, all_local;
- enum machine_mode mode;
+ machine_mode mode;
def = df_single_def (insn_info);
/* There must be exactly one def in this insn. */
extern void ira_init_once (void);
extern void ira_init (void);
extern void ira_setup_eliminable_regset (void);
-extern rtx ira_eliminate_regs (rtx, enum machine_mode);
+extern rtx ira_eliminate_regs (rtx, machine_mode);
extern void ira_set_pseudo_classes (bool, FILE *);
extern void ira_implicitly_set_insn_hard_regs (HARD_REG_SET *);
extern void ira_expand_reg_equiv (void);
+2014-10-29 Richard Sandiford <richard.sandiford@arm.com>
+
+ * builtins.c, java-tree.h, typeck.c: Remove redundant enum from
+ machine_mode.
+
2014-10-28 Andrew MacLeod <amacleod@redhat.com>
* class.c: Adjust include files.
compareAndSwapInt_builtin (tree method_return_type ATTRIBUTE_UNUSED,
tree orig_call)
{
- enum machine_mode mode = TYPE_MODE (int_type_node);
+ machine_mode mode = TYPE_MODE (int_type_node);
if (can_compare_and_swap_p (mode, flag_use_atomic_builtins))
{
tree addr, stmt;
compareAndSwapLong_builtin (tree method_return_type ATTRIBUTE_UNUSED,
tree orig_call)
{
- enum machine_mode mode = TYPE_MODE (long_type_node);
+ machine_mode mode = TYPE_MODE (long_type_node);
/* We don't trust flag_use_atomic_builtins for multi-word compareAndSwap.
Some machines such as ARM have atomic libfuncs but not the multi-word
versions. */
compareAndSwapObject_builtin (tree method_return_type ATTRIBUTE_UNUSED,
tree orig_call)
{
- enum machine_mode mode = TYPE_MODE (ptr_type_node);
+ machine_mode mode = TYPE_MODE (ptr_type_node);
if (can_compare_and_swap_p (mode, flag_use_atomic_builtins))
{
tree addr, stmt;
VMSupportsCS8_builtin (tree method_return_type,
tree orig_call ATTRIBUTE_UNUSED)
{
- enum machine_mode mode = TYPE_MODE (long_type_node);
+ machine_mode mode = TYPE_MODE (long_type_node);
gcc_assert (method_return_type == boolean_type_node);
if (can_compare_and_swap_p (mode, false))
return boolean_true_node;
struct eh_range;
extern void java_parse_file (void);
-extern tree java_type_for_mode (enum machine_mode, int);
+extern tree java_type_for_mode (machine_mode, int);
extern tree java_type_for_size (unsigned int, int);
extern tree java_truthvalue_conversion (tree);
extern void add_assume_compiled (const char *, int);
then UNSIGNEDP selects between signed and unsigned types. */
tree
-java_type_for_mode (enum machine_mode mode, int unsignedp)
+java_type_for_mode (machine_mode mode, int unsignedp)
{
if (mode == TYPE_MODE (int_type_node))
return unsignedp ? unsigned_int_type_node : int_type_node;
reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
const_rtx arg1, const_rtx insn)
{
- enum machine_mode mode;
+ machine_mode mode;
/* If this is not actually a comparison, we can't reverse it. */
if (GET_RTX_CLASS (code) != RTX_COMPARE
/* Return comparison with reversed code of EXP.
Return NULL_RTX in case we fail to do the reversal. */
rtx
-reversed_comparison (const_rtx exp, enum machine_mode mode)
+reversed_comparison (const_rtx exp, machine_mode mode)
{
enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
if (reversed_code == UNKNOWN)
/* Given MODE and UNSIGNEDP, return a suitable type-tree with that
mode. */
- tree (*type_for_mode) (enum machine_mode, int);
+ tree (*type_for_mode) (machine_mode, int);
/* Given PRECISION and UNSIGNEDP, return a suitable type-tree for an
integer type with at least that precision. */
};
/* Information about an optab-related libfunc. The op field is logically
- an enum optab_d, and the mode fields are logically enum machine_mode.
+ an enum optab_d, and the mode fields are logically machine_mode.
However, in the absence of forward-declared enums, there's no practical
benefit of pulling in the defining headers.
PUT_REG_NOTE_KIND (r, kind);
}
else
- r = gen_rtx_EXPR_LIST ((enum machine_mode) kind, val, next);
+ r = gen_rtx_EXPR_LIST ((machine_mode) kind, val, next);
return r;
}
{
rtx_insn *seq, *jump;
rtx label;
- enum machine_mode mode;
+ machine_mode mode;
rtx op0 = XEXP (cond, 0), op1 = XEXP (cond, 1);
enum rtx_code code = GET_CODE (cond);
basic_block bb;
int nonneg = 0;
bool increment_count;
basic_block loop_end = desc->out_edge->src;
- enum machine_mode mode;
+ machine_mode mode;
rtx true_prob_val;
widest_int iterations;
static bool
doloop_optimize (struct loop *loop)
{
- enum machine_mode mode;
+ machine_mode mode;
rtx doloop_seq, doloop_pat, doloop_reg;
rtx count;
widest_int iterations, iterations_max;
rtx expr;
/* Its mode. */
- enum machine_mode mode;
+ machine_mode mode;
/* Its hash. */
hashval_t hash;
insert INV to the table for this expression and return INV. */
static struct invariant *
-find_or_insert_inv (invariant_htab_type *eq, rtx expr, enum machine_mode mode,
+find_or_insert_inv (invariant_htab_type *eq, rtx expr, machine_mode mode,
struct invariant *inv)
{
hashval_t hash = hash_invariant_expr_1 (inv->insn, expr);
bitmap_iterator bi;
struct invariant *dep;
rtx expr, set;
- enum machine_mode mode;
+ machine_mode mode;
struct invariant *tmp;
if (inv->eqto != ~0u)
INNER_MODE) to OUTER_MODE. */
rtx
-lowpart_subreg (enum machine_mode outer_mode, rtx expr,
- enum machine_mode inner_mode)
+lowpart_subreg (machine_mode outer_mode, rtx expr,
+ machine_mode inner_mode)
{
return simplify_gen_subreg (outer_mode, expr, inner_mode,
subreg_lowpart_offset (outer_mode, inner_mode));
consistency with other iv manipulation functions that may fail). */
static bool
-iv_constant (struct rtx_iv *iv, rtx cst, enum machine_mode mode)
+iv_constant (struct rtx_iv *iv, rtx cst, machine_mode mode)
{
if (mode == VOIDmode)
mode = GET_MODE (cst);
/* Evaluates application of subreg to MODE on IV. */
static bool
-iv_subreg (struct rtx_iv *iv, enum machine_mode mode)
+iv_subreg (struct rtx_iv *iv, machine_mode mode)
{
/* If iv is invariant, just calculate the new value. */
if (iv->step == const0_rtx
/* Evaluates application of EXTEND to MODE on IV. */
static bool
-iv_extend (struct rtx_iv *iv, enum iv_extend_code extend, enum machine_mode mode)
+iv_extend (struct rtx_iv *iv, enum iv_extend_code extend, machine_mode mode)
{
/* If iv is invariant, just calculate the new value. */
if (iv->step == const0_rtx
static bool
iv_add (struct rtx_iv *iv0, struct rtx_iv *iv1, enum rtx_code op)
{
- enum machine_mode mode;
+ machine_mode mode;
rtx arg;
/* Extend the constant to extend_mode of the other operand if necessary. */
static bool
iv_mult (struct rtx_iv *iv, rtx mby)
{
- enum machine_mode mode = iv->extend_mode;
+ machine_mode mode = iv->extend_mode;
if (GET_MODE (mby) != VOIDmode
&& GET_MODE (mby) != mode)
static bool
iv_shift (struct rtx_iv *iv, rtx mby)
{
- enum machine_mode mode = iv->extend_mode;
+ machine_mode mode = iv->extend_mode;
if (GET_MODE (mby) != VOIDmode
&& GET_MODE (mby) != mode)
static bool
get_biv_step_1 (df_ref def, rtx reg,
- rtx *inner_step, enum machine_mode *inner_mode,
- enum iv_extend_code *extend, enum machine_mode outer_mode,
+ rtx *inner_step, machine_mode *inner_mode,
+ enum iv_extend_code *extend, machine_mode outer_mode,
rtx *outer_step)
{
rtx set, rhs, op0 = NULL_RTX, op1 = NULL_RTX;
if (GET_CODE (next) == SUBREG)
{
- enum machine_mode amode = GET_MODE (next);
+ machine_mode amode = GET_MODE (next);
if (GET_MODE_SIZE (amode) > GET_MODE_SIZE (*inner_mode))
return false;
static bool
get_biv_step (df_ref last_def, rtx reg, rtx *inner_step,
- enum machine_mode *inner_mode, enum iv_extend_code *extend,
- enum machine_mode *outer_mode, rtx *outer_step)
+ machine_mode *inner_mode, enum iv_extend_code *extend,
+ machine_mode *outer_mode, rtx *outer_step)
{
*outer_mode = GET_MODE (reg);
iv_analyze_biv (rtx def, struct rtx_iv *iv)
{
rtx inner_step, outer_step;
- enum machine_mode inner_mode, outer_mode;
+ machine_mode inner_mode, outer_mode;
enum iv_extend_code extend;
df_ref last_def;
The mode of the induction variable is MODE. */
bool
-iv_analyze_expr (rtx_insn *insn, rtx rhs, enum machine_mode mode,
+iv_analyze_expr (rtx_insn *insn, rtx rhs, machine_mode mode,
struct rtx_iv *iv)
{
rtx mby = NULL_RTX, tmp;
rtx op0 = NULL_RTX, op1 = NULL_RTX;
struct rtx_iv iv0, iv1;
enum rtx_code code = GET_CODE (rhs);
- enum machine_mode omode = mode;
+ machine_mode omode = mode;
iv->mode = VOIDmode;
iv->base = NULL_RTX;
implies_p (rtx a, rtx b)
{
rtx op0, op1, opb0, opb1, r;
- enum machine_mode mode;
+ machine_mode mode;
if (rtx_equal_p (a, b))
return true;
rtx tem;
rtx op0, op1;
enum rtx_code code;
- enum machine_mode mode;
+ machine_mode mode;
code = GET_CODE (cond);
op0 = XEXP (cond, 0);
is SIGNED_P to DESC. */
static void
-shorten_into_mode (struct rtx_iv *iv, enum machine_mode mode,
+shorten_into_mode (struct rtx_iv *iv, machine_mode mode,
enum rtx_code cond, bool signed_p, struct niter_desc *desc)
{
rtx mmin, mmax, cond_over, cond_under;
canonicalize_iv_subregs (struct rtx_iv *iv0, struct rtx_iv *iv1,
enum rtx_code cond, struct niter_desc *desc)
{
- enum machine_mode comp_mode;
+ machine_mode comp_mode;
bool signed_p;
/* If the ivs behave specially in the first iteration, or are
struct rtx_iv iv0, iv1, tmp_iv;
rtx assumption, may_not_xform;
enum rtx_code cond;
- enum machine_mode mode, comp_mode;
+ machine_mode mode, comp_mode;
rtx mmin, mmax, mode_mmin, mode_mmax;
uint64_t s, size, d, inv, max;
int64_t up, down, inc, step_val;
{
rtx_insn *seq, *jump;
rtx cond;
- enum machine_mode mode;
+ machine_mode mode;
mode = GET_MODE (op0);
if (mode == VOIDmode)
{
rtx expr, *loc, incr, var;
rtx_insn *seq;
- enum machine_mode mode = GET_MODE (ivts->base_var);
+ machine_mode mode = GET_MODE (ivts->base_var);
rtx src, dest, set;
/* Construct base + DELTA * step. */
rtx_insn *seq;
rtx var, zero_init;
unsigned i;
- enum machine_mode mode = GET_MODE (ve->reg);
+ machine_mode mode = GET_MODE (ve->reg);
bool honor_signed_zero_p = HONOR_SIGNED_ZEROS (mode);
if (ve->var_expansions.length () == 0)
static int
shift_cost (bool speed_p, struct cost_rtxes *rtxes, enum rtx_code code,
- enum machine_mode mode, int op1)
+ machine_mode mode, int op1)
{
PUT_CODE (rtxes->shift, code);
PUT_MODE (rtxes->shift, mode);
for (i = 0; i < MAX_MACHINE_MODE; i++)
{
- enum machine_mode mode = (enum machine_mode) i;
+ machine_mode mode = (machine_mode) i;
int factor = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
if (factor > 1)
{
{
rtx x;
rtx set;
- enum machine_mode mode;
+ machine_mode mode;
if (recog_data.n_operands != 2)
return NULL_RTX;
/* Get a SUBREG of a CONCATN. */
static rtx
-simplify_subreg_concatn (enum machine_mode outermode, rtx op,
+simplify_subreg_concatn (machine_mode outermode, rtx op,
unsigned int byte)
{
unsigned int inner_size;
- enum machine_mode innermode, partmode;
+ machine_mode innermode, partmode;
rtx part;
unsigned int final_offset;
/* Wrapper around simplify_gen_subreg which handles CONCATN. */
static rtx
-simplify_gen_subreg_concatn (enum machine_mode outermode, rtx op,
- enum machine_mode innermode, unsigned int byte)
+simplify_gen_subreg_concatn (machine_mode outermode, rtx op,
+ machine_mode innermode, unsigned int byte)
{
rtx ret;
{
rtx src, dest, real_dest;
rtx_insn *insns;
- enum machine_mode orig_mode, dest_mode;
+ machine_mode orig_mode, dest_mode;
unsigned int words;
bool pushing;
resolve_clobber (rtx pat, rtx_insn *insn)
{
rtx reg;
- enum machine_mode orig_mode;
+ machine_mode orig_mode;
unsigned int words, i;
int ret;
fprintf (dump_file, "Choices when optimizing for %s:\n", description);
for (i = 0; i < MAX_MACHINE_MODE; i++)
- if (GET_MODE_SIZE ((enum machine_mode) i) > UNITS_PER_WORD)
+ if (GET_MODE_SIZE ((machine_mode) i) > UNITS_PER_WORD)
fprintf (dump_file, " %s mode %s for copy lowering.\n",
choices[speed_p].move_modes_to_split[i]
? "Splitting"
: "Skipping",
- GET_MODE_NAME ((enum machine_mode) i));
+ GET_MODE_NAME ((machine_mode) i));
fprintf (dump_file, " %s mode %s for zero_extend lowering.\n",
choices[speed_p].splitting_zext ? "Splitting" : "Skipping",
for (i = FIRST_PSEUDO_REGISTER; i < max; ++i)
if (regno_reg_rtx[i] != NULL)
{
- enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
+ machine_mode mode = GET_MODE (regno_reg_rtx[i]);
if (choices[false].move_modes_to_split[(int) mode]
|| choices[true].move_modes_to_split[(int) mode])
{
/* Target-specific information for the subreg lowering pass. */
struct target_lower_subreg {
/* An integer mode that is twice as wide as word_mode. */
- enum machine_mode x_twice_word_mode;
+ machine_mode x_twice_word_mode;
/* What we have decided to do when optimizing for size (index 0)
and speed (index 1). */
lra_live_range_t r;
int p, i, j, rclass_size, best_hard_regno, priority, hard_regno;
int hr, conflict_hr, nregs;
- enum machine_mode biggest_mode;
+ machine_mode biggest_mode;
unsigned int k, conflict_regno;
int offset, val, biggest_nregs, nregs_diff;
enum reg_class rclass;
setup_try_hard_regno_pseudos (int p, enum reg_class rclass)
{
int i, hard_regno;
- enum machine_mode mode;
+ machine_mode mode;
unsigned int spill_regno;
bitmap_iterator bi;
{
int i, j, n, p, hard_regno, best_hard_regno, cost, best_cost, rclass_size;
int reload_hard_regno, reload_cost;
- enum machine_mode mode;
+ machine_mode mode;
enum reg_class rclass;
unsigned int spill_regno, reload_regno, uid;
int insn_pseudos_num, best_insn_pseudos_num;
unsigned int k, conflict_regno;
int val, offset;
HARD_REG_SET conflict_set;
- enum machine_mode mode;
+ machine_mode mode;
lra_live_range_t r;
bitmap_iterator bi;
int max_regno = max_reg_num ();
static basic_block curr_bb;
static lra_insn_recog_data_t curr_id;
static struct lra_static_insn_data *curr_static_id;
-static enum machine_mode curr_operand_mode[MAX_RECOG_OPERANDS];
+static machine_mode curr_operand_mode[MAX_RECOG_OPERANDS];
\f
in_class_p (rtx reg, enum reg_class cl, enum reg_class *new_class)
{
enum reg_class rclass, common_class;
- enum machine_mode reg_mode;
+ machine_mode reg_mode;
int class_size, hard_regno, nregs, i, j;
int regno = REGNO (reg);
space AS, and check that each pseudo has the proper kind of hard
reg. */
static int
-valid_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+valid_address_p (machine_mode mode ATTRIBUTE_UNUSED,
rtx addr, addr_space_t as)
{
#ifdef GO_IF_LEGITIMATE_ADDRESS
int nop = curr_static_id->n_operands;
for (int i = 0; i < nop; i++)
{
- enum machine_mode mode = GET_MODE (*curr_id->operand_loc[i]);
+ machine_mode mode = GET_MODE (*curr_id->operand_loc[i]);
if (mode == VOIDmode)
{
/* The .md mode for address operands is the mode of the
reused the already created input reload pseudo. Use TITLE to
describe new registers for debug purposes. */
static bool
-get_reload_reg (enum op_type type, enum machine_mode mode, rtx original,
+get_reload_reg (enum op_type type, machine_mode mode, rtx original,
enum reg_class rclass, bool in_subreg_p,
const char *title, rtx *result_reg)
{
/* A version of regno_ok_for_base_p for use here, when all pseudos
should count as OK. Arguments as for regno_ok_for_base_p. */
static inline bool
-ok_for_base_p_nonstrict (rtx reg, enum machine_mode mode, addr_space_t as,
+ok_for_base_p_nonstrict (rtx reg, machine_mode mode, addr_space_t as,
enum rtx_code outer_code, enum rtx_code index_code)
{
unsigned regno = REGNO (reg);
REGNO1 + lra_constraint_offset (REGNO1, MODE1)
== REGNO2 + lra_constraint_offset (REGNO2, MODE2) */
int
-lra_constraint_offset (int regno, enum machine_mode mode)
+lra_constraint_offset (int regno, machine_mode mode)
{
lra_assert (regno < FIRST_PSEUDO_REGISTER);
if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (mode) > UNITS_PER_WORD
{
int i, in;
rtx new_in_reg, new_out_reg, reg, clobber;
- enum machine_mode inmode, outmode;
+ machine_mode inmode, outmode;
rtx in_rtx = *curr_id->operand_loc[ins[0]];
rtx out_rtx = out < 0 ? in_rtx : *curr_id->operand_loc[out];
rtx dest, src, dreg, sreg, old_sreg, new_reg, scratch_reg;
rtx_insn *before;
enum reg_class dclass, sclass, secondary_class;
- enum machine_mode sreg_mode;
+ machine_mode sreg_mode;
secondary_reload_info sri;
lra_assert (curr_insn_set != NULL_RTX);
enum reg_class rclass, new_class;
rtx reg;
rtx new_reg;
- enum machine_mode mode;
+ machine_mode mode;
bool subreg_p, before_p = false;
subreg_p = GET_CODE (*loc) == SUBREG;
}
}
-static int valid_address_p (enum machine_mode mode, rtx addr, addr_space_t as);
+static int valid_address_p (machine_mode mode, rtx addr, addr_space_t as);
/* Make reloads for subreg in operand NOP with internal subreg mode
REG_MODE, add new reloads for further processing. Return true if
any reload was generated. */
static bool
-simplify_operand_subreg (int nop, enum machine_mode reg_mode)
+simplify_operand_subreg (int nop, machine_mode reg_mode)
{
int hard_regno;
rtx_insn *before, *after;
- enum machine_mode mode;
+ machine_mode mode;
rtx reg, new_reg;
rtx operand = *curr_id->operand_loc[nop];
enum reg_class regclass;
uses_hard_regs_p (rtx x, HARD_REG_SET set)
{
int i, j, x_hard_regno;
- enum machine_mode mode;
+ machine_mode mode;
const char *fmt;
enum rtx_code code;
otherwise NULL. */
rtx operand_reg[MAX_RECOG_OPERANDS];
int hard_regno[MAX_RECOG_OPERANDS];
- enum machine_mode biggest_mode[MAX_RECOG_OPERANDS];
+ machine_mode biggest_mode[MAX_RECOG_OPERANDS];
int reload_nregs, reload_sum;
bool costly_p;
enum reg_class cl;
bool this_alternative_match_win, this_alternative_win;
bool this_alternative_offmemok;
bool scratch_p;
- enum machine_mode mode;
+ machine_mode mode;
enum constraint_num cn;
opalt_num = nalt * n_operands + nop;
static inline void
swap_operands (int nop)
{
- enum machine_mode mode = curr_operand_mode[nop];
+ machine_mode mode = curr_operand_mode[nop];
curr_operand_mode[nop] = curr_operand_mode[nop + 1];
curr_operand_mode[nop + 1] = mode;
rtx x = *curr_id->operand_loc[nop];
if (use_sec_mem_p)
{
rtx new_reg, src, dest, rld;
- enum machine_mode sec_mode, rld_mode;
+ machine_mode sec_mode, rld_mode;
lra_assert (sec_mem_p);
lra_assert (curr_static_id->operand[0].type == OP_OUT
char c;
rtx op = *curr_id->operand_loc[i];
rtx subreg = NULL_RTX;
- enum machine_mode mode = curr_operand_mode[i];
+ machine_mode mode = curr_operand_mode[i];
if (GET_CODE (op) == SUBREG)
{
}
else if (goal_alt_matched[i][0] == -1)
{
- enum machine_mode mode;
+ machine_mode mode;
rtx reg, *loc;
int hard_regno, byte;
enum op_type type = curr_static_id->operand[i].type;
code = GET_CODE (x);
if (code == REG && (int) REGNO (x) == old_regno)
{
- enum machine_mode mode = GET_MODE (*loc);
- enum machine_mode inner_mode = GET_MODE (new_reg);
+ machine_mode mode = GET_MODE (*loc);
+ machine_mode inner_mode = GET_MODE (new_reg);
if (mode != inner_mode)
{
static enum reg_class
choose_split_class (enum reg_class allocno_class,
int hard_regno ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
#ifndef SECONDARY_MEMORY_NEEDED
return allocno_class;
" ((((((((((((((((((((((((((((((((((((((((((((((((\n");
if (call_save_p)
{
- enum machine_mode mode = GET_MODE (original_reg);
+ machine_mode mode = GET_MODE (original_reg);
mode = HARD_REGNO_CALLER_SAVE_MODE (hard_regno,
hard_regno_nregs[hard_regno][mode],
uid before starting INSN processing. Return true if we succeed in
such transformation. */
static bool
-split_if_necessary (int regno, enum machine_mode mode,
+split_if_necessary (int regno, machine_mode mode,
HARD_REG_SET potential_reload_hard_regs,
bool before_p, rtx_insn *insn, int max_uid)
{
form_sum (rtx x, rtx y)
{
rtx tem;
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
if (mode == VOIDmode)
mode = GET_MODE (y);
If we make full substitution to SP for non-null INSN, add the insn
sp offset. */
rtx
-lra_eliminate_regs_1 (rtx_insn *insn, rtx x, enum machine_mode mem_mode,
+lra_eliminate_regs_1 (rtx_insn *insn, rtx x, machine_mode mem_mode,
bool subst_p, bool update_p, bool full_p)
{
enum rtx_code code = GET_CODE (x);
/* This function is used externally in subsequent passes of GCC. It
always does a full elimination of X. */
rtx
-lra_eliminate_regs (rtx x, enum machine_mode mem_mode,
+lra_eliminate_regs (rtx x, machine_mode mem_mode,
rtx insn ATTRIBUTE_UNUSED)
{
return lra_eliminate_regs_1 (NULL, x, mem_mode, true, false, true);
MEM_MODE is the mode of an enclosing MEM rtx, or VOIDmode if not
within a MEM. */
static void
-mark_not_eliminable (rtx x, enum machine_mode mem_mode)
+mark_not_eliminable (rtx x, machine_mode mem_mode)
{
enum rtx_code code = GET_CODE (x);
struct lra_elim_table *ep;
lra-coalesce.c. */
/* The biggest size mode in which each pseudo reg is referred in
whole function (possibly via subreg). */
- enum machine_mode biggest_mode;
+ machine_mode biggest_mode;
/* Live ranges of the pseudo. */
lra_live_range_t live_ranges;
/* This member is set up in lra-lives.c for subsequent
extern rtx_insn *lra_pop_insn (void);
extern unsigned int lra_insn_stack_length (void);
-extern rtx lra_create_new_reg_with_unique_value (enum machine_mode, rtx,
+extern rtx lra_create_new_reg_with_unique_value (machine_mode, rtx,
enum reg_class, const char *);
extern void lra_set_regno_unique_value (int);
extern void lra_invalidate_insn_data (rtx_insn *);
/* lra-constraints.c: */
extern void lra_init_equiv (void);
-extern int lra_constraint_offset (int, enum machine_mode);
+extern int lra_constraint_offset (int, machine_mode);
extern int lra_constraint_iter;
extern bool lra_risky_transformations_p;
extern void lra_debug_elim_table (void);
extern int lra_get_elimination_hard_regno (int);
-extern rtx lra_eliminate_regs_1 (rtx_insn *, rtx, enum machine_mode, bool,
+extern rtx lra_eliminate_regs_1 (rtx_insn *, rtx, machine_mode, bool,
bool, bool);
extern void lra_eliminate (bool, bool);
Return TRUE if the liveness tracking sets were modified,
or FALSE if nothing changed. */
static bool
-mark_regno_live (int regno, enum machine_mode mode, int point)
+mark_regno_live (int regno, machine_mode mode, int point)
{
int last;
bool changed = false;
Return TRUE if the liveness tracking sets were modified,
or FALSE if nothing changed. */
static bool
-mark_regno_dead (int regno, enum machine_mode mode, int point)
+mark_regno_dead (int regno, machine_mode mode, int point)
{
int last;
bool changed = false;
assign_mem_slot (int i)
{
rtx x = NULL_RTX;
- enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
+ machine_mode mode = GET_MODE (regno_reg_rtx[i]);
unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
unsigned int max_ref_width = GET_MODE_SIZE (lra_reg_info[i].biggest_mode);
{
int i, k, p, regno, res, spill_class_size, hard_regno, nr;
enum reg_class rclass, spill_class;
- enum machine_mode mode;
+ machine_mode mode;
lra_live_range_t r;
rtx_insn *insn;
rtx set;
attributes of ORIGINAL if it is a register. The created register
will have unique held value. */
rtx
-lra_create_new_reg_with_unique_value (enum machine_mode md_mode, rtx original,
+lra_create_new_reg_with_unique_value (machine_mode md_mode, rtx original,
enum reg_class rclass, const char *title)
{
- enum machine_mode mode;
+ machine_mode mode;
rtx new_reg;
if (original == NULL_RTX || (mode = GET_MODE (original)) == VOIDmode)
/* Analogous to the previous function but also inherits value of
ORIGINAL. */
rtx
-lra_create_new_reg (enum machine_mode md_mode, rtx original,
+lra_create_new_reg (machine_mode md_mode, rtx original,
enum reg_class rclass, const char *title)
{
rtx new_reg;
info (NEXT). */
static struct lra_insn_reg *
new_insn_reg (rtx_insn *insn, int regno, enum op_type type,
- enum machine_mode mode,
+ machine_mode mode,
bool subreg_p, bool early_clobber, struct lra_insn_reg *next)
{
struct lra_insn_reg *ir;
{
int i, j, regno, last;
bool subreg_p;
- enum machine_mode mode;
+ machine_mode mode;
struct lra_insn_reg *curr;
rtx op = *x;
enum rtx_code code = GET_CODE (op);
if (icode < 0)
{
int nop, nalt;
- enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
+ machine_mode operand_mode[MAX_RECOG_OPERANDS];
const char *constraints[MAX_RECOG_OPERANDS];
nop = asm_noperands (PATTERN (insn));
if (data->icode < 0)
{
int nop;
- enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
+ machine_mode operand_mode[MAX_RECOG_OPERANDS];
const char *constraints[MAX_RECOG_OPERANDS];
nop = asm_noperands (PATTERN (insn));
{
int i, j, regno;
bool subreg_p;
- enum machine_mode mode;
+ machine_mode mode;
const char *fmt;
enum rtx_code code;
struct lra_insn_reg *curr;
for (cl = 0; cl < (int) LIM_REG_CLASSES; cl++)
for (mode = 0; mode < MAX_MACHINE_MODE; mode++)
if (targetm.spill_class ((enum reg_class) cl,
- (enum machine_mode) mode) != NO_REGS)
+ (machine_mode) mode) != NO_REGS)
{
lra_reg_spill_p = true;
return;
return reg_allocno_class (regno);
}
-extern rtx lra_create_new_reg (enum machine_mode, rtx, enum reg_class,
+extern rtx lra_create_new_reg (machine_mode, rtx, enum reg_class,
const char *);
-extern rtx lra_eliminate_regs (rtx, enum machine_mode, rtx);
+extern rtx lra_eliminate_regs (rtx, machine_mode, rtx);
extern void lra (FILE *);
extern void lra_init_once (void);
extern void lra_finish_once (void);
+2014-10-29 Richard Sandiford <richard.sandiford@arm.com>
+
+ * lto-lang.c: Remove redundant enum from machine_mode.
+
2014-10-28 Andrew MacLeod <amacleod@redhat.com>
* lto.c: Adjust include files.
then UNSIGNEDP selects between saturating and nonsaturating types. */
static tree
-lto_type_for_mode (enum machine_mode mode, int unsigned_p)
+lto_type_for_mode (machine_mode mode, int unsigned_p)
{
tree t;
if (COMPLEX_MODE_P (mode))
{
- enum machine_mode inner_mode;
+ machine_mode inner_mode;
tree inner_type;
if (mode == TYPE_MODE (complex_float_type_node))
}
else if (VECTOR_MODE_P (mode))
{
- enum machine_mode inner_mode = GET_MODE_INNER (mode);
+ machine_mode inner_mode = GET_MODE_INNER (mode);
tree inner_type = lto_type_for_mode (inner_mode, unsigned_p);
if (inner_type != NULL_TREE)
return build_vector_type_for_mode (inner_type, mode);
extern const unsigned char mode_inner[NUM_MACHINE_MODES];
#if GCC_VERSION >= 4001
#define GET_MODE_INNER(MODE) \
- ((enum machine_mode) (__builtin_constant_p (MODE) \
+ ((machine_mode) (__builtin_constant_p (MODE) \
? mode_inner_inline (MODE) : mode_inner[MODE]))
#else
-#define GET_MODE_INNER(MODE) ((enum machine_mode) mode_inner[MODE])
+#define GET_MODE_INNER(MODE) ((machine_mode) mode_inner[MODE])
#endif
/* Get the size in bytes or bites of the basic parts of an
/* Get the next wider natural mode (eg, QI -> HI -> SI -> DI -> TI). */
extern const unsigned char mode_wider[NUM_MACHINE_MODES];
-#define GET_MODE_WIDER_MODE(MODE) ((enum machine_mode) mode_wider[MODE])
+#define GET_MODE_WIDER_MODE(MODE) ((machine_mode) mode_wider[MODE])
/* For scalars, this is a mode with twice the precision. For vectors,
this is a mode with the same inner mode but with twice the elements. */
extern const unsigned char mode_2xwider[NUM_MACHINE_MODES];
-#define GET_MODE_2XWIDER_MODE(MODE) ((enum machine_mode) mode_2xwider[MODE])
+#define GET_MODE_2XWIDER_MODE(MODE) ((machine_mode) mode_2xwider[MODE])
/* Return the mode for data of a given size SIZE and mode class CLASS.
If LIMIT is nonzero, then don't use modes bigger than MAX_FIXED_MODE_SIZE.
The value is BLKmode if no other mode is found. */
-extern enum machine_mode mode_for_size (unsigned int, enum mode_class, int);
+extern machine_mode mode_for_size (unsigned int, enum mode_class, int);
/* Similar, but find the smallest mode for a given width. */
-extern enum machine_mode smallest_mode_for_size (unsigned int,
+extern machine_mode smallest_mode_for_size (unsigned int,
enum mode_class);
/* Return an integer mode of the exact same size as the input mode,
or BLKmode on failure. */
-extern enum machine_mode int_mode_for_mode (enum machine_mode);
+extern machine_mode int_mode_for_mode (machine_mode);
-extern enum machine_mode bitwise_mode_for_mode (enum machine_mode);
+extern machine_mode bitwise_mode_for_mode (machine_mode);
/* Return a mode that is suitable for representing a vector,
or BLKmode on failure. */
-extern enum machine_mode mode_for_vector (enum machine_mode, unsigned);
+extern machine_mode mode_for_vector (machine_mode, unsigned);
/* A class for iterating through possible bitfield modes. */
class bit_field_mode_iterator
bit_field_mode_iterator (HOST_WIDE_INT, HOST_WIDE_INT,
HOST_WIDE_INT, HOST_WIDE_INT,
unsigned int, bool);
- bool next_mode (enum machine_mode *);
+ bool next_mode (machine_mode *);
bool prefer_smaller_modes ();
private:
- enum machine_mode m_mode;
+ machine_mode m_mode;
/* We use signed values here because the bit position can be negative
for invalid input such as gcc.dg/pr48335-8.c. */
HOST_WIDE_INT m_bitsize;
/* Find the best mode to use to access a bit field. */
-extern enum machine_mode get_best_mode (int, int,
+extern machine_mode get_best_mode (int, int,
unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT,
unsigned int,
- enum machine_mode, bool);
+ machine_mode, bool);
/* Determine alignment, 1<=result<=BIGGEST_ALIGNMENT. */
extern CONST_MODE_BASE_ALIGN unsigned char mode_base_align[NUM_MACHINE_MODES];
-extern unsigned get_mode_alignment (enum machine_mode);
+extern unsigned get_mode_alignment (machine_mode);
#define GET_MODE_ALIGNMENT(MODE) get_mode_alignment (MODE)
/* Get the precision of the mode or its inner mode if it has one. */
-extern unsigned int element_precision (enum machine_mode);
+extern unsigned int element_precision (machine_mode);
/* For each class, get the narrowest mode in that class. */
extern const unsigned char class_narrowest_mode[MAX_MODE_CLASS];
#define GET_CLASS_NARROWEST_MODE(CLASS) \
- ((enum machine_mode) class_narrowest_mode[CLASS])
+ ((machine_mode) class_narrowest_mode[CLASS])
/* Define the integer modes whose sizes are BITS_PER_UNIT and BITS_PER_WORD
and the mode whose class is Pmode and whose size is POINTER_SIZE. */
-extern enum machine_mode byte_mode;
-extern enum machine_mode word_mode;
-extern enum machine_mode ptr_mode;
+extern machine_mode byte_mode;
+extern machine_mode word_mode;
+extern machine_mode ptr_mode;
/* Target-dependent machine mode initialization - in insn-modes.c. */
extern void init_adjust_machine_modes (void);
typedef struct {
/* These parts are initailized by genmodes output */
unsigned int bitsize;
- enum machine_mode m;
+ machine_mode m;
/* RID_* is RID_INTN_BASE + index into this array */
} int_n_data_t;
/* Otherwise return implementation defined alignment. */
unsigned int al = 1;
- enum machine_mode mode, vmode;
+ machine_mode mode, vmode;
int vs = targetm.vectorize.autovectorize_vector_sizes ();
if (vs)
vs = 1 << floor_log2 (vs);
vs = 1 << floor_log2 (vs);
return vs;
}
- enum machine_mode vqimode = targetm.vectorize.preferred_simd_mode (QImode);
+ machine_mode vqimode = targetm.vectorize.preferred_simd_mode (QImode);
if (GET_MODE_CLASS (vqimode) == MODE_VECTOR_INT)
return GET_MODE_NUNITS (vqimode);
return 1;
location_t loc;
gimple stmt;
tree decl, call, type, itype;
- enum machine_mode imode;
+ machine_mode imode;
bool exchange;
gsi = gsi_last_bb (load_bb);
location_t loc;
enum tree_code code;
bool need_old, need_new;
- enum machine_mode imode;
+ machine_mode imode;
bool seq_cst;
/* We expect to find the following sequences:
(this_target_libfuncs->x_libfunc_hash)
static void prepare_float_lib_cmp (rtx, rtx, enum rtx_code, rtx *,
- enum machine_mode *);
-static rtx expand_unop_direct (enum machine_mode, optab, rtx, rtx, int);
+ machine_mode *);
+static rtx expand_unop_direct (machine_mode, optab, rtx, rtx, int);
static void emit_libcall_block_1 (rtx_insn *, rtx, rtx, rtx, bool);
/* Debug facility for use in GDB. */
from MODE2 to MODE1. Trigger lazy initialization if needed, return NULL
if no libfunc is available. */
rtx
-convert_optab_libfunc (convert_optab optab, enum machine_mode mode1,
- enum machine_mode mode2)
+convert_optab_libfunc (convert_optab optab, machine_mode mode1,
+ machine_mode mode2)
{
struct libfunc_entry e;
struct libfunc_entry **slot;
Trigger lazy initialization if needed, return NULL if no libfunc is
available. */
rtx
-optab_libfunc (optab optab, enum machine_mode mode)
+optab_libfunc (optab optab, machine_mode mode)
{
struct libfunc_entry e;
struct libfunc_entry **slot;
for a widening operation would be. In most cases this would be OP0, but if
that's a constant it'll be VOIDmode, which isn't useful. */
-static enum machine_mode
-widened_mode (enum machine_mode to_mode, rtx op0, rtx op1)
+static machine_mode
+widened_mode (machine_mode to_mode, rtx op0, rtx op1)
{
- enum machine_mode m0 = GET_MODE (op0);
- enum machine_mode m1 = GET_MODE (op1);
- enum machine_mode result;
+ machine_mode m0 = GET_MODE (op0);
+ machine_mode m1 = GET_MODE (op1);
+ machine_mode result;
if (m0 == VOIDmode && m1 == VOIDmode)
return to_mode;
TO_MODE and a FROM_MODE. */
enum insn_code
-widening_optab_handler (optab op, enum machine_mode to_mode,
- enum machine_mode from_mode)
+widening_optab_handler (optab op, machine_mode to_mode,
+ machine_mode from_mode)
{
unsigned scode = (op << 16) | to_mode;
if (to_mode != from_mode && from_mode != VOIDmode)
non-widening optabs also. */
enum insn_code
-find_widening_optab_handler_and_mode (optab op, enum machine_mode to_mode,
- enum machine_mode from_mode,
+find_widening_optab_handler_and_mode (optab op, machine_mode to_mode,
+ machine_mode from_mode,
int permit_non_widening,
- enum machine_mode *found_mode)
+ machine_mode *found_mode)
{
for (; (permit_non_widening || from_mode != to_mode)
&& GET_MODE_SIZE (from_mode) <= GET_MODE_SIZE (to_mode)
of logical operations, but not right shifts. */
static rtx
-widen_operand (rtx op, enum machine_mode mode, enum machine_mode oldmode,
+widen_operand (rtx op, machine_mode mode, machine_mode oldmode,
int unsignedp, int no_extend)
{
rtx result;
{
struct expand_operand eops[4];
tree oprnd0, oprnd1, oprnd2;
- enum machine_mode wmode = VOIDmode, tmode0, tmode1 = VOIDmode;
+ machine_mode wmode = VOIDmode, tmode0, tmode1 = VOIDmode;
optab widen_pattern_optab;
enum insn_code icode;
int nops = TREE_CODE_LENGTH (ops->code);
this may or may not be TARGET. */
rtx
-expand_ternary_op (enum machine_mode mode, optab ternary_optab, rtx op0,
+expand_ternary_op (machine_mode mode, optab ternary_optab, rtx op0,
rtx op1, rtx op2, rtx target, int unsignedp)
{
struct expand_operand ops[4];
otherwise the same as for expand_binop. */
rtx
-simplify_expand_binop (enum machine_mode mode, optab binoptab,
+simplify_expand_binop (machine_mode mode, optab binoptab,
rtx op0, rtx op1, rtx target, int unsignedp,
enum optab_methods methods)
{
Return true if the expansion succeeded. */
bool
-force_expand_binop (enum machine_mode mode, optab binoptab,
+force_expand_binop (machine_mode mode, optab binoptab,
rtx op0, rtx op1, rtx target, int unsignedp,
enum optab_methods methods)
{
struct expand_operand eops[3];
enum insn_code icode;
rtx rtx_op1, rtx_op2;
- enum machine_mode mode = TYPE_MODE (ops->type);
+ machine_mode mode = TYPE_MODE (ops->type);
tree vec_oprnd = ops->op0;
tree shift_oprnd = ops->op1;
then the return value will be a constant. */
static rtx
-expand_vector_broadcast (enum machine_mode vmode, rtx op)
+expand_vector_broadcast (machine_mode vmode, rtx op)
{
enum insn_code icode;
rtvec vec;
value are the same as for the parent routine. */
static bool
-expand_subword_shift (enum machine_mode op1_mode, optab binoptab,
+expand_subword_shift (machine_mode op1_mode, optab binoptab,
rtx outof_input, rtx into_input, rtx op1,
rtx outof_target, rtx into_target,
int unsignedp, enum optab_methods methods,
arguments are the same as the parent routine. */
static bool
-expand_doubleword_shift_condmove (enum machine_mode op1_mode, optab binoptab,
+expand_doubleword_shift_condmove (machine_mode op1_mode, optab binoptab,
enum rtx_code cmp_code, rtx cmp1, rtx cmp2,
rtx outof_input, rtx into_input,
rtx subword_op1, rtx superword_op1,
Return true if the shift could be successfully synthesized. */
static bool
-expand_doubleword_shift (enum machine_mode op1_mode, optab binoptab,
+expand_doubleword_shift (machine_mode op1_mode, optab binoptab,
rtx outof_input, rtx into_input, rtx op1,
rtx outof_target, rtx into_target,
int unsignedp, enum optab_methods methods,
the 0 or -1. */
static rtx
-expand_doubleword_mult (enum machine_mode mode, rtx op0, rtx op1, rtx target,
+expand_doubleword_mult (machine_mode mode, rtx op0, rtx op1, rtx target,
bool umulp, enum optab_methods methods)
{
int low = (WORDS_BIG_ENDIAN ? 1 : 0);
the operation to perform, not an optab pointer. All other
arguments are the same. */
rtx
-expand_simple_binop (enum machine_mode mode, enum rtx_code code, rtx op0,
+expand_simple_binop (machine_mode mode, enum rtx_code code, rtx op0,
rtx op1, rtx target, int unsignedp,
enum optab_methods methods)
{
register. Return X otherwise. UNSIGNEDP says whether X is unsigned. */
static rtx
-avoid_expensive_constant (enum machine_mode mode, optab binoptab,
+avoid_expensive_constant (machine_mode mode, optab binoptab,
int opn, rtx x, bool unsignedp)
{
bool speed = optimize_insn_for_speed_p ();
is an insn that directly implements the indicated operation.
Returns null if this is not possible. */
static rtx
-expand_binop_directly (enum machine_mode mode, optab binoptab,
+expand_binop_directly (machine_mode mode, optab binoptab,
rtx op0, rtx op1,
rtx target, int unsignedp, enum optab_methods methods,
rtx_insn *last)
{
- enum machine_mode from_mode = widened_mode (mode, op0, op1);
+ machine_mode from_mode = widened_mode (mode, op0, op1);
enum insn_code icode = find_widening_optab_handler (binoptab, mode,
from_mode, 1);
- enum machine_mode xmode0 = insn_data[(int) icode].operand[1].mode;
- enum machine_mode xmode1 = insn_data[(int) icode].operand[2].mode;
- enum machine_mode mode0, mode1, tmp_mode;
+ machine_mode xmode0 = insn_data[(int) icode].operand[1].mode;
+ machine_mode xmode1 = insn_data[(int) icode].operand[2].mode;
+ machine_mode mode0, mode1, tmp_mode;
struct expand_operand ops[3];
bool commutative_p;
rtx pat;
this may or may not be TARGET. */
rtx
-expand_binop (enum machine_mode mode, optab binoptab, rtx op0, rtx op1,
+expand_binop (machine_mode mode, optab binoptab, rtx op0, rtx op1,
rtx target, int unsignedp, enum optab_methods methods)
{
enum optab_methods next_methods
= (methods == OPTAB_LIB || methods == OPTAB_LIB_WIDEN
? OPTAB_WIDEN : methods);
enum mode_class mclass;
- enum machine_mode wider_mode;
+ machine_mode wider_mode;
rtx libfunc;
rtx temp;
rtx_insn *entry_last = get_last_insn ();
&& optab_handler (lshr_optab, word_mode) != CODE_FOR_nothing)
{
unsigned HOST_WIDE_INT shift_mask, double_shift_mask;
- enum machine_mode op1_mode;
+ machine_mode op1_mode;
double_shift_mask = targetm.shift_truncation_mask (mode);
shift_mask = targetm.shift_truncation_mask (word_mode);
{
rtx_insn *insns;
rtx op1x = op1;
- enum machine_mode op1_mode = mode;
+ machine_mode op1_mode = mode;
rtx value;
start_sequence ();
of an unsigned wider operation, since the result would be the same. */
rtx
-sign_expand_binop (enum machine_mode mode, optab uoptab, optab soptab,
+sign_expand_binop (machine_mode mode, optab uoptab, optab soptab,
rtx op0, rtx op1, rtx target, int unsignedp,
enum optab_methods methods)
{
expand_twoval_unop (optab unoptab, rtx op0, rtx targ0, rtx targ1,
int unsignedp)
{
- enum machine_mode mode = GET_MODE (targ0 ? targ0 : targ1);
+ machine_mode mode = GET_MODE (targ0 ? targ0 : targ1);
enum mode_class mclass;
- enum machine_mode wider_mode;
+ machine_mode wider_mode;
rtx_insn *entry_last = get_last_insn ();
rtx_insn *last;
expand_twoval_binop (optab binoptab, rtx op0, rtx op1, rtx targ0, rtx targ1,
int unsignedp)
{
- enum machine_mode mode = GET_MODE (targ0 ? targ0 : targ1);
+ machine_mode mode = GET_MODE (targ0 ? targ0 : targ1);
enum mode_class mclass;
- enum machine_mode wider_mode;
+ machine_mode wider_mode;
rtx_insn *entry_last = get_last_insn ();
rtx_insn *last;
{
struct expand_operand ops[4];
enum insn_code icode = optab_handler (binoptab, mode);
- enum machine_mode mode0 = insn_data[icode].operand[1].mode;
- enum machine_mode mode1 = insn_data[icode].operand[2].mode;
+ machine_mode mode0 = insn_data[icode].operand[1].mode;
+ machine_mode mode1 = insn_data[icode].operand[2].mode;
rtx xop0 = op0, xop1 = op1;
/* If we are optimizing, force expensive constants into a register. */
expand_twoval_binop_libfunc (optab binoptab, rtx op0, rtx op1,
rtx targ0, rtx targ1, enum rtx_code code)
{
- enum machine_mode mode;
- enum machine_mode libval_mode;
+ machine_mode mode;
+ machine_mode libval_mode;
rtx libval;
rtx_insn *insns;
rtx libfunc;
the operation to perform, not an optab pointer. All other
arguments are the same. */
rtx
-expand_simple_unop (enum machine_mode mode, enum rtx_code code, rtx op0,
+expand_simple_unop (machine_mode mode, enum rtx_code code, rtx op0,
rtx target, int unsignedp)
{
optab unop = code_to_optab (code);
A similar operation can be used for clrsb. UNOPTAB says which operation
we are trying to expand. */
static rtx
-widen_leading (enum machine_mode mode, rtx op0, rtx target, optab unoptab)
+widen_leading (machine_mode mode, rtx op0, rtx target, optab unoptab)
{
enum mode_class mclass = GET_MODE_CLASS (mode);
if (CLASS_HAS_WIDER_MODES_P (mclass))
{
- enum machine_mode wider_mode;
+ machine_mode wider_mode;
for (wider_mode = GET_MODE_WIDER_MODE (mode);
wider_mode != VOIDmode;
wider_mode = GET_MODE_WIDER_MODE (wider_mode))
/* Try calculating clz of a double-word quantity as two clz's of word-sized
quantities, choosing which based on whether the high word is nonzero. */
static rtx
-expand_doubleword_clz (enum machine_mode mode, rtx op0, rtx target)
+expand_doubleword_clz (machine_mode mode, rtx op0, rtx target)
{
rtx xop0 = force_reg (mode, op0);
rtx subhi = gen_highpart (word_mode, xop0);
as
(lshiftrt:wide (bswap:wide x) ((width wide) - (width narrow))). */
static rtx
-widen_bswap (enum machine_mode mode, rtx op0, rtx target)
+widen_bswap (machine_mode mode, rtx op0, rtx target)
{
enum mode_class mclass = GET_MODE_CLASS (mode);
- enum machine_mode wider_mode;
+ machine_mode wider_mode;
rtx x;
rtx_insn *last;
/* Try calculating bswap as two bswaps of two word-sized operands. */
static rtx
-expand_doubleword_bswap (enum machine_mode mode, rtx op, rtx target)
+expand_doubleword_bswap (machine_mode mode, rtx op, rtx target)
{
rtx t0, t1;
/* Try calculating (parity x) as (and (popcount x) 1), where
popcount can also be done in a wider mode. */
static rtx
-expand_parity (enum machine_mode mode, rtx op0, rtx target)
+expand_parity (machine_mode mode, rtx op0, rtx target)
{
enum mode_class mclass = GET_MODE_CLASS (mode);
if (CLASS_HAS_WIDER_MODES_P (mclass))
{
- enum machine_mode wider_mode;
+ machine_mode wider_mode;
for (wider_mode = mode; wider_mode != VOIDmode;
wider_mode = GET_MODE_WIDER_MODE (wider_mode))
{
less convenient for expand_ffs anyway. */
static rtx
-expand_ctz (enum machine_mode mode, rtx op0, rtx target)
+expand_ctz (machine_mode mode, rtx op0, rtx target)
{
rtx_insn *seq;
rtx temp;
may have an undefined value in that case. If they do not give us a
convenient value, we have to generate a test and branch. */
static rtx
-expand_ffs (enum machine_mode mode, rtx op0, rtx target)
+expand_ffs (machine_mode mode, rtx op0, rtx target)
{
HOST_WIDE_INT val = 0;
bool defined_at_zero = false;
register will work around the situation. */
static rtx
-lowpart_subreg_maybe_copy (enum machine_mode omode, rtx val,
- enum machine_mode imode)
+lowpart_subreg_maybe_copy (machine_mode omode, rtx val,
+ machine_mode imode)
{
rtx ret;
ret = lowpart_subreg (omode, val, imode);
logical operation on the sign bit. */
static rtx
-expand_absneg_bit (enum rtx_code code, enum machine_mode mode,
+expand_absneg_bit (enum rtx_code code, machine_mode mode,
rtx op0, rtx target)
{
const struct real_format *fmt;
int bitpos, word, nwords, i;
- enum machine_mode imode;
+ machine_mode imode;
rtx temp;
rtx_insn *insns;
/* As expand_unop, but will fail rather than attempt the operation in a
different mode or with a libcall. */
static rtx
-expand_unop_direct (enum machine_mode mode, optab unoptab, rtx op0, rtx target,
+expand_unop_direct (machine_mode mode, optab unoptab, rtx op0, rtx target,
int unsignedp)
{
if (optab_handler (unoptab, mode) != CODE_FOR_nothing)
this may or may not be TARGET. */
rtx
-expand_unop (enum machine_mode mode, optab unoptab, rtx op0, rtx target,
+expand_unop (machine_mode mode, optab unoptab, rtx op0, rtx target,
int unsignedp)
{
enum mode_class mclass = GET_MODE_CLASS (mode);
- enum machine_mode wider_mode;
+ machine_mode wider_mode;
rtx temp;
rtx libfunc;
rtx_insn *insns;
rtx value;
rtx eq_value;
- enum machine_mode outmode = mode;
+ machine_mode outmode = mode;
/* All of these functions return small values. Thus we choose to
have them return something that isn't a double-word. */
*/
rtx
-expand_abs_nojump (enum machine_mode mode, rtx op0, rtx target,
+expand_abs_nojump (machine_mode mode, rtx op0, rtx target,
int result_unsignedp)
{
rtx temp;
}
rtx
-expand_abs (enum machine_mode mode, rtx op0, rtx target,
+expand_abs (machine_mode mode, rtx op0, rtx target,
int result_unsignedp, int safe)
{
rtx temp;
different but can be deduced from MODE. */
rtx
-expand_one_cmpl_abs_nojump (enum machine_mode mode, rtx op0, rtx target)
+expand_one_cmpl_abs_nojump (machine_mode mode, rtx op0, rtx target)
{
rtx temp;
and not playing with subregs so much, will help the register allocator. */
static rtx
-expand_copysign_absneg (enum machine_mode mode, rtx op0, rtx op1, rtx target,
+expand_copysign_absneg (machine_mode mode, rtx op0, rtx op1, rtx target,
int bitpos, bool op0_is_abs)
{
- enum machine_mode imode;
+ machine_mode imode;
enum insn_code icode;
rtx sign;
rtx_code_label *label;
is true if op0 is known to have its sign bit clear. */
static rtx
-expand_copysign_bit (enum machine_mode mode, rtx op0, rtx op1, rtx target,
+expand_copysign_bit (machine_mode mode, rtx op0, rtx op1, rtx target,
int bitpos, bool op0_is_abs)
{
- enum machine_mode imode;
+ machine_mode imode;
int word, nwords, i;
rtx temp;
rtx_insn *insns;
rtx
expand_copysign (rtx op0, rtx op1, rtx target)
{
- enum machine_mode mode = GET_MODE (op0);
+ machine_mode mode = GET_MODE (op0);
const struct real_format *fmt;
bool op0_is_abs;
rtx temp;
required to implement all (or any) of the unordered bcc operations. */
int
-can_compare_p (enum rtx_code code, enum machine_mode mode,
+can_compare_p (enum rtx_code code, machine_mode mode,
enum can_compare_purpose purpose)
{
rtx test;
static void
prepare_cmp_insn (rtx x, rtx y, enum rtx_code comparison, rtx size,
int unsignedp, enum optab_methods methods,
- rtx *ptest, enum machine_mode *pmode)
+ rtx *ptest, machine_mode *pmode)
{
- enum machine_mode mode = *pmode;
+ machine_mode mode = *pmode;
rtx libfunc, test;
- enum machine_mode cmp_mode;
+ machine_mode cmp_mode;
enum mode_class mclass;
/* The other methods are not needed. */
if (mode == BLKmode)
{
- enum machine_mode result_mode;
+ machine_mode result_mode;
enum insn_code cmp_code;
tree length_type;
rtx libfunc;
if (!SCALAR_FLOAT_MODE_P (mode))
{
rtx result;
- enum machine_mode ret_mode;
+ machine_mode ret_mode;
/* Handle a libcall just for the mode we are using. */
libfunc = optab_libfunc (cmp_optab, mode);
that it is accepted by the operand predicate. Return the new value. */
rtx
-prepare_operand (enum insn_code icode, rtx x, int opnum, enum machine_mode mode,
- enum machine_mode wider_mode, int unsignedp)
+prepare_operand (enum insn_code icode, rtx x, int opnum, machine_mode mode,
+ machine_mode wider_mode, int unsignedp)
{
if (mode != wider_mode)
x = convert_modes (wider_mode, mode, x, unsignedp);
we can do the branch. */
static void
-emit_cmp_and_jump_insn_1 (rtx test, enum machine_mode mode, rtx label, int prob)
+emit_cmp_and_jump_insn_1 (rtx test, machine_mode mode, rtx label, int prob)
{
- enum machine_mode optab_mode;
+ machine_mode optab_mode;
enum mode_class mclass;
enum insn_code icode;
rtx_insn *insn;
void
emit_cmp_and_jump_insns (rtx x, rtx y, enum rtx_code comparison, rtx size,
- enum machine_mode mode, int unsignedp, rtx label,
+ machine_mode mode, int unsignedp, rtx label,
int prob)
{
rtx op0 = x, op1 = y;
static void
prepare_float_lib_cmp (rtx x, rtx y, enum rtx_code comparison,
- rtx *ptest, enum machine_mode *pmode)
+ rtx *ptest, machine_mode *pmode)
{
enum rtx_code swapped = swap_condition (comparison);
enum rtx_code reversed = reverse_condition_maybe_unordered (comparison);
- enum machine_mode orig_mode = GET_MODE (x);
- enum machine_mode mode, cmp_mode;
+ machine_mode orig_mode = GET_MODE (x);
+ machine_mode mode, cmp_mode;
rtx true_rtx, false_rtx;
rtx value, target, equiv;
rtx_insn *insns;
rtx
emit_conditional_move (rtx target, enum rtx_code code, rtx op0, rtx op1,
- enum machine_mode cmode, rtx op2, rtx op3,
- enum machine_mode mode, int unsignedp)
+ machine_mode cmode, rtx op2, rtx op3,
+ machine_mode mode, int unsignedp)
{
rtx tem, comparison;
rtx_insn *last;
comparisons, and vice versa. How do we handle them? */
int
-can_conditionally_move_p (enum machine_mode mode)
+can_conditionally_move_p (machine_mode mode)
{
if (direct_optab_handler (movcc_optab, mode) != CODE_FOR_nothing)
return 1;
rtx
emit_conditional_add (rtx target, enum rtx_code code, rtx op0, rtx op1,
- enum machine_mode cmode, rtx op2, rtx op3,
- enum machine_mode mode, int unsignedp)
+ machine_mode cmode, rtx op2, rtx op3,
+ machine_mode mode, int unsignedp)
{
rtx tem, comparison;
rtx_insn *last;
no such operation exists, CODE_FOR_nothing will be returned. */
enum insn_code
-can_extend_p (enum machine_mode to_mode, enum machine_mode from_mode,
+can_extend_p (machine_mode to_mode, machine_mode from_mode,
int unsignedp)
{
convert_optab tab;
into X (with mode MTO). Do zero-extension if UNSIGNEDP is nonzero. */
rtx
-gen_extend_insn (rtx x, rtx y, enum machine_mode mto,
- enum machine_mode mfrom, int unsignedp)
+gen_extend_insn (rtx x, rtx y, machine_mode mto,
+ machine_mode mfrom, int unsignedp)
{
enum insn_code icode = can_extend_p (mto, mfrom, unsignedp);
return GEN_FCN (icode) (x, y);
an explicit FTRUNC insn before the fix insn; otherwise 0. */
static enum insn_code
-can_fix_p (enum machine_mode fixmode, enum machine_mode fltmode,
+can_fix_p (machine_mode fixmode, machine_mode fltmode,
int unsignedp, int *truncp_ptr)
{
convert_optab tab;
}
enum insn_code
-can_float_p (enum machine_mode fltmode, enum machine_mode fixmode,
+can_float_p (machine_mode fltmode, machine_mode fixmode,
int unsignedp)
{
convert_optab tab;
tree vectype_out, tree vectype_in,
tree *decl, enum tree_code *code1)
{
- enum machine_mode m1,m2;
+ machine_mode m1,m2;
int truncp;
m1 = TYPE_MODE (vectype_out);
{
enum insn_code icode;
rtx target = to;
- enum machine_mode fmode, imode;
+ machine_mode fmode, imode;
bool can_do_signed = false;
/* Crash now, because we won't be able to decide which mode to use. */
{
enum insn_code icode;
rtx target = to;
- enum machine_mode fmode, imode;
+ machine_mode fmode, imode;
int must_trunc = 0;
/* We first try to find a pair of modes, one real and one integer, at
void
expand_fixed_convert (rtx to, rtx from, int uintp, int satp)
{
- enum machine_mode to_mode = GET_MODE (to);
- enum machine_mode from_mode = GET_MODE (from);
+ machine_mode to_mode = GET_MODE (to);
+ machine_mode from_mode = GET_MODE (from);
convert_optab tab;
enum rtx_code this_code;
enum insn_code code;
{
enum insn_code icode;
rtx target = to;
- enum machine_mode fmode, imode;
+ machine_mode fmode, imode;
/* We first try to find a pair of modes, one real and one integer, at
least as wide as FROM and TO, respectively, in which we can open-code
/* Report whether we have an instruction to perform the operation
specified by CODE on operands of mode MODE. */
int
-have_insn_for (enum rtx_code code, enum machine_mode mode)
+have_insn_for (enum rtx_code code, machine_mode mode)
{
return (code_to_optab (code)
&& (optab_handler (code_to_optab (code), mode)
static void
gen_libfunc (optab optable, const char *opname, int suffix,
- enum machine_mode mode)
+ machine_mode mode)
{
unsigned opname_len = strlen (opname);
const char *mname = GET_MODE_NAME (mode);
void
gen_int_libfunc (optab optable, const char *opname, char suffix,
- enum machine_mode mode)
+ machine_mode mode)
{
int maxsize = 2 * BITS_PER_WORD;
int minsize = BITS_PER_WORD;
void
gen_fp_libfunc (optab optable, const char *opname, char suffix,
- enum machine_mode mode)
+ machine_mode mode)
{
char *dec_opname;
void
gen_fixed_libfunc (optab optable, const char *opname, char suffix,
- enum machine_mode mode)
+ machine_mode mode)
{
if (!ALL_FIXED_POINT_MODE_P (mode))
return;
void
gen_signed_fixed_libfunc (optab optable, const char *opname, char suffix,
- enum machine_mode mode)
+ machine_mode mode)
{
if (!SIGNED_FIXED_POINT_MODE_P (mode))
return;
void
gen_unsigned_fixed_libfunc (optab optable, const char *opname, char suffix,
- enum machine_mode mode)
+ machine_mode mode)
{
if (!UNSIGNED_FIXED_POINT_MODE_P (mode))
return;
void
gen_int_fp_libfunc (optab optable, const char *name, char suffix,
- enum machine_mode mode)
+ machine_mode mode)
{
if (DECIMAL_FLOAT_MODE_P (mode) || GET_MODE_CLASS (mode) == MODE_FLOAT)
gen_fp_libfunc (optable, name, suffix, mode);
void
gen_intv_fp_libfunc (optab optable, const char *name, char suffix,
- enum machine_mode mode)
+ machine_mode mode)
{
if (DECIMAL_FLOAT_MODE_P (mode) || GET_MODE_CLASS (mode) == MODE_FLOAT)
gen_fp_libfunc (optable, name, suffix, mode);
void
gen_int_fp_fixed_libfunc (optab optable, const char *name, char suffix,
- enum machine_mode mode)
+ machine_mode mode)
{
if (DECIMAL_FLOAT_MODE_P (mode) || GET_MODE_CLASS (mode) == MODE_FLOAT)
gen_fp_libfunc (optable, name, suffix, mode);
void
gen_int_fp_signed_fixed_libfunc (optab optable, const char *name, char suffix,
- enum machine_mode mode)
+ machine_mode mode)
{
if (DECIMAL_FLOAT_MODE_P (mode) || GET_MODE_CLASS (mode) == MODE_FLOAT)
gen_fp_libfunc (optable, name, suffix, mode);
void
gen_int_fixed_libfunc (optab optable, const char *name, char suffix,
- enum machine_mode mode)
+ machine_mode mode)
{
if (INTEGRAL_MODE_P (mode))
gen_int_libfunc (optable, name, suffix, mode);
void
gen_int_signed_fixed_libfunc (optab optable, const char *name, char suffix,
- enum machine_mode mode)
+ machine_mode mode)
{
if (INTEGRAL_MODE_P (mode))
gen_int_libfunc (optable, name, suffix, mode);
void
gen_int_unsigned_fixed_libfunc (optab optable, const char *name, char suffix,
- enum machine_mode mode)
+ machine_mode mode)
{
if (INTEGRAL_MODE_P (mode))
gen_int_libfunc (optable, name, suffix, mode);
void
gen_interclass_conv_libfunc (convert_optab tab,
const char *opname,
- enum machine_mode tmode,
- enum machine_mode fmode)
+ machine_mode tmode,
+ machine_mode fmode)
{
size_t opname_len = strlen (opname);
size_t mname_len = 0;
void
gen_int_to_fp_conv_libfunc (convert_optab tab,
const char *opname,
- enum machine_mode tmode,
- enum machine_mode fmode)
+ machine_mode tmode,
+ machine_mode fmode)
{
if (GET_MODE_CLASS (fmode) != MODE_INT)
return;
void
gen_ufloat_conv_libfunc (convert_optab tab,
const char *opname ATTRIBUTE_UNUSED,
- enum machine_mode tmode,
- enum machine_mode fmode)
+ machine_mode tmode,
+ machine_mode fmode)
{
if (DECIMAL_FLOAT_MODE_P (tmode))
gen_int_to_fp_conv_libfunc (tab, "floatuns", tmode, fmode);
void
gen_int_to_fp_nondecimal_conv_libfunc (convert_optab tab,
const char *opname,
- enum machine_mode tmode,
- enum machine_mode fmode)
+ machine_mode tmode,
+ machine_mode fmode)
{
if (GET_MODE_CLASS (fmode) != MODE_INT)
return;
void
gen_fp_to_int_conv_libfunc (convert_optab tab,
const char *opname,
- enum machine_mode tmode,
- enum machine_mode fmode)
+ machine_mode tmode,
+ machine_mode fmode)
{
if (GET_MODE_CLASS (fmode) != MODE_FLOAT && !DECIMAL_FLOAT_MODE_P (fmode))
return;
void
gen_intraclass_conv_libfunc (convert_optab tab, const char *opname,
- enum machine_mode tmode, enum machine_mode fmode)
+ machine_mode tmode, machine_mode fmode)
{
size_t opname_len = strlen (opname);
size_t mname_len = 0;
void
gen_trunc_conv_libfunc (convert_optab tab,
const char *opname,
- enum machine_mode tmode,
- enum machine_mode fmode)
+ machine_mode tmode,
+ machine_mode fmode)
{
if (GET_MODE_CLASS (tmode) != MODE_FLOAT && !DECIMAL_FLOAT_MODE_P (tmode))
return;
void
gen_extend_conv_libfunc (convert_optab tab,
const char *opname ATTRIBUTE_UNUSED,
- enum machine_mode tmode,
- enum machine_mode fmode)
+ machine_mode tmode,
+ machine_mode fmode)
{
if (GET_MODE_CLASS (tmode) != MODE_FLOAT && !DECIMAL_FLOAT_MODE_P (tmode))
return;
void
gen_fract_conv_libfunc (convert_optab tab,
const char *opname,
- enum machine_mode tmode,
- enum machine_mode fmode)
+ machine_mode tmode,
+ machine_mode fmode)
{
if (tmode == fmode)
return;
void
gen_fractuns_conv_libfunc (convert_optab tab,
const char *opname,
- enum machine_mode tmode,
- enum machine_mode fmode)
+ machine_mode tmode,
+ machine_mode fmode)
{
if (tmode == fmode)
return;
void
gen_satfract_conv_libfunc (convert_optab tab,
const char *opname,
- enum machine_mode tmode,
- enum machine_mode fmode)
+ machine_mode tmode,
+ machine_mode fmode)
{
if (tmode == fmode)
return;
void
gen_satfractuns_conv_libfunc (convert_optab tab,
const char *opname,
- enum machine_mode tmode,
- enum machine_mode fmode)
+ machine_mode tmode,
+ machine_mode fmode)
{
if (tmode == fmode)
return;
/* Call this to reset the function entry for one optab (OPTABLE) in mode
MODE to NAME, which should be either 0 or a string constant. */
void
-set_optab_libfunc (optab op, enum machine_mode mode, const char *name)
+set_optab_libfunc (optab op, machine_mode mode, const char *name)
{
rtx val;
struct libfunc_entry e;
(OPTABLE) from mode FMODE to mode TMODE to NAME, which should be
either 0 or a string constant. */
void
-set_conv_libfunc (convert_optab optab, enum machine_mode tmode,
- enum machine_mode fmode, const char *name)
+set_conv_libfunc (convert_optab optab, machine_mode tmode,
+ machine_mode fmode, const char *name)
{
rtx val;
struct libfunc_entry e;
static void
init_sync_libfuncs_1 (optab tab, const char *base, int max)
{
- enum machine_mode mode;
+ machine_mode mode;
char buf[64];
size_t len = strlen (base);
int i;
for (i = FIRST_NORM_OPTAB; i <= LAST_NORMLIB_OPTAB; ++i)
for (j = 0; j < NUM_MACHINE_MODES; ++j)
{
- rtx l = optab_libfunc ((optab) i, (enum machine_mode) j);
+ rtx l = optab_libfunc ((optab) i, (machine_mode) j);
if (l)
{
gcc_assert (GET_CODE (l) == SYMBOL_REF);
for (j = 0; j < NUM_MACHINE_MODES; ++j)
for (k = 0; k < NUM_MACHINE_MODES; ++k)
{
- rtx l = convert_optab_libfunc ((optab) i, (enum machine_mode) j,
- (enum machine_mode) k);
+ rtx l = convert_optab_libfunc ((optab) i, (machine_mode) j,
+ (machine_mode) k);
if (l)
{
gcc_assert (GET_CODE (l) == SYMBOL_REF);
rtx
gen_cond_trap (enum rtx_code code, rtx op1, rtx op2, rtx tcode)
{
- enum machine_mode mode = GET_MODE (op1);
+ machine_mode mode = GET_MODE (op1);
enum insn_code icode;
rtx insn;
rtx trap_rtx;
of the CPU. SEL may be NULL, which stands for an unknown constant. */
bool
-can_vec_perm_p (enum machine_mode mode, bool variable,
+can_vec_perm_p (machine_mode mode, bool variable,
const unsigned char *sel)
{
- enum machine_mode qimode;
+ machine_mode qimode;
/* If the target doesn't implement a vector mode for the vector type,
then no operations are supported. */
expand_vec_perm_1 (enum insn_code icode, rtx target,
rtx v0, rtx v1, rtx sel)
{
- enum machine_mode tmode = GET_MODE (target);
- enum machine_mode smode = GET_MODE (sel);
+ machine_mode tmode = GET_MODE (target);
+ machine_mode smode = GET_MODE (sel);
struct expand_operand ops[4];
create_output_operand (&ops[0], target, tmode);
and three operands. */
rtx
-expand_vec_perm (enum machine_mode mode, rtx v0, rtx v1, rtx sel, rtx target)
+expand_vec_perm (machine_mode mode, rtx v0, rtx v1, rtx sel, rtx target)
{
enum insn_code icode;
- enum machine_mode qimode;
+ machine_mode qimode;
unsigned int i, w, e, u;
rtx tmp, sel_qi = NULL;
rtvec vec;
if (sel_qi == NULL)
{
/* Multiply each element by its byte size. */
- enum machine_mode selmode = GET_MODE (sel);
+ machine_mode selmode = GET_MODE (sel);
if (u == 2)
sel = expand_simple_binop (selmode, PLUS, sel, sel,
sel, 0, OPTAB_DIRECT);
mode CMODE, unsigned if UNS is true, resulting in a value of mode VMODE. */
static inline enum insn_code
-get_vcond_icode (enum machine_mode vmode, enum machine_mode cmode, bool uns)
+get_vcond_icode (machine_mode vmode, machine_mode cmode, bool uns)
{
enum insn_code icode = CODE_FOR_nothing;
if (uns)
bool
expand_vec_cond_expr_p (tree value_type, tree cmp_op_type)
{
- enum machine_mode value_mode = TYPE_MODE (value_type);
- enum machine_mode cmp_op_mode = TYPE_MODE (cmp_op_type);
+ machine_mode value_mode = TYPE_MODE (value_type);
+ machine_mode cmp_op_mode = TYPE_MODE (cmp_op_type);
if (GET_MODE_SIZE (value_mode) != GET_MODE_SIZE (cmp_op_mode)
|| GET_MODE_NUNITS (value_mode) != GET_MODE_NUNITS (cmp_op_mode)
|| get_vcond_icode (TYPE_MODE (value_type), TYPE_MODE (cmp_op_type),
struct expand_operand ops[6];
enum insn_code icode;
rtx comparison, rtx_op1, rtx_op2;
- enum machine_mode mode = TYPE_MODE (vec_cond_type);
- enum machine_mode cmp_op_mode;
+ machine_mode mode = TYPE_MODE (vec_cond_type);
+ machine_mode cmp_op_mode;
bool unsignedp;
tree op0a, op0b;
enum tree_code tcode;
2 for even/odd widening, and 3 for hi/lo widening. */
int
-can_mult_highpart_p (enum machine_mode mode, bool uns_p)
+can_mult_highpart_p (machine_mode mode, bool uns_p)
{
optab op;
unsigned char *sel;
/* Expand a highpart multiply. */
rtx
-expand_mult_highpart (enum machine_mode mode, rtx op0, rtx op1,
+expand_mult_highpart (machine_mode mode, rtx op0, rtx op1,
rtx target, bool uns_p)
{
struct expand_operand eops[3];
enum insn_code icode;
int method, i, nunits;
- enum machine_mode wmode;
+ machine_mode wmode;
rtx m1, m2, perm;
optab tab1, tab2;
rtvec v;
/* Return true if target supports vector masked load/store for mode. */
bool
-can_vec_mask_load_store_p (enum machine_mode mode, bool is_load)
+can_vec_mask_load_store_p (machine_mode mode, bool is_load)
{
optab op = is_load ? maskload_optab : maskstore_optab;
- enum machine_mode vmode;
+ machine_mode vmode;
unsigned int vector_sizes;
/* If mode is vector mode, check it directly. */
/* Return true if there is a compare_and_swap pattern. */
bool
-can_compare_and_swap_p (enum machine_mode mode, bool allow_libcall)
+can_compare_and_swap_p (machine_mode mode, bool allow_libcall)
{
enum insn_code icode;
/* Return true if an atomic exchange can be performed. */
bool
-can_atomic_exchange_p (enum machine_mode mode, bool allow_libcall)
+can_atomic_exchange_p (machine_mode mode, bool allow_libcall)
{
enum insn_code icode;
static bool
expand_compare_and_swap_loop (rtx mem, rtx old_reg, rtx new_reg, rtx seq)
{
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
rtx_code_label *label;
rtx cmp_reg, success, oldval;
static rtx
maybe_emit_atomic_exchange (rtx target, rtx mem, rtx val, enum memmodel model)
{
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
enum insn_code icode;
/* If the target supports the exchange directly, great. */
maybe_emit_sync_lock_test_and_set (rtx target, rtx mem, rtx val,
enum memmodel model)
{
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
enum insn_code icode;
rtx_insn *last_insn = get_last_insn ();
static rtx
maybe_emit_compare_and_swap_exchange_loop (rtx target, rtx mem, rtx val)
{
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
if (can_compare_and_swap_p (mode, true))
{
static rtx
maybe_emit_atomic_test_and_set (rtx target, rtx mem, enum memmodel model)
{
- enum machine_mode pat_bool_mode;
+ machine_mode pat_bool_mode;
struct expand_operand ops[3];
if (!HAVE_atomic_test_and_set)
rtx
expand_atomic_test_and_set (rtx target, rtx mem, enum memmodel model)
{
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
rtx ret, trueval, subtarget;
ret = maybe_emit_atomic_test_and_set (target, mem, model);
bool is_weak, enum memmodel succ_model,
enum memmodel fail_model)
{
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
struct expand_operand ops[8];
enum insn_code icode;
rtx target_oval, target_bool = NULL_RTX;
icode = direct_optab_handler (atomic_compare_and_swap_optab, mode);
if (icode != CODE_FOR_nothing)
{
- enum machine_mode bool_mode = insn_data[icode].operand[0].mode;
+ machine_mode bool_mode = insn_data[icode].operand[0].mode;
/* Make sure we always have a place for the bool operand. */
if (ptarget_bool == NULL
rtx
expand_atomic_load (rtx target, rtx mem, enum memmodel model)
{
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
enum insn_code icode;
/* If the target supports the load directly, great. */
rtx
expand_atomic_store (rtx mem, rtx val, enum memmodel model, bool use_release)
{
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
enum insn_code icode;
struct expand_operand ops[3];
maybe_emit_op (const struct atomic_op_functions *optab, rtx target, rtx mem,
rtx val, bool use_memmodel, enum memmodel model, bool after)
{
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
struct expand_operand ops[4];
enum insn_code icode;
int op_counter = 0;
enum rtx_code code, enum memmodel model,
bool after)
{
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
struct atomic_op_functions optab;
rtx result;
bool unused_result = (target == const0_rtx);
expand_atomic_fetch_op (rtx target, rtx mem, rtx val, enum rtx_code code,
enum memmodel model, bool after)
{
- enum machine_mode mode = GET_MODE (mem);
+ machine_mode mode = GET_MODE (mem);
rtx result;
bool unused_result = (target == const0_rtx);
bool
valid_multiword_target_p (rtx target)
{
- enum machine_mode mode;
+ machine_mode mode;
int i;
mode = GET_MODE (target);
&& !side_effects_p (addr))
{
rtx_insn *last;
- enum machine_mode mode;
+ machine_mode mode;
last = get_last_insn ();
mode = get_address_mode (mem);
maybe_legitimize_operand (enum insn_code icode, unsigned int opno,
struct expand_operand *op)
{
- enum machine_mode mode, imode;
+ machine_mode mode, imode;
bool old_volatile_ok, result;
mode = op->mode;
static bool
get_traditional_extraction_insn (extraction_insn *insn,
enum extraction_type type,
- enum machine_mode mode,
+ machine_mode mode,
enum insn_code icode,
int struct_op, int field_op)
{
const struct insn_data_d *data = &insn_data[icode];
- enum machine_mode struct_mode = data->operand[struct_op].mode;
+ machine_mode struct_mode = data->operand[struct_op].mode;
if (struct_mode == VOIDmode)
struct_mode = word_mode;
if (mode != struct_mode)
return false;
- enum machine_mode field_mode = data->operand[field_op].mode;
+ machine_mode field_mode = data->operand[field_op].mode;
if (field_mode == VOIDmode)
field_mode = word_mode;
- enum machine_mode pos_mode = data->operand[struct_op + 2].mode;
+ machine_mode pos_mode = data->operand[struct_op + 2].mode;
if (pos_mode == VOIDmode)
pos_mode = word_mode;
static bool
get_optab_extraction_insn (struct extraction_insn *insn,
enum extraction_type type,
- enum machine_mode mode, direct_optab reg_optab,
+ machine_mode mode, direct_optab reg_optab,
direct_optab misalign_optab, int pos_op)
{
direct_optab optab = (type == ET_unaligned_mem ? misalign_optab : reg_optab);
get_extraction_insn (extraction_insn *insn,
enum extraction_pattern pattern,
enum extraction_type type,
- enum machine_mode mode)
+ machine_mode mode)
{
switch (pattern)
{
enum extraction_pattern pattern,
enum extraction_type type,
unsigned HOST_WIDE_INT struct_bits,
- enum machine_mode field_mode)
+ machine_mode field_mode)
{
- enum machine_mode mode = smallest_mode_for_size (struct_bits, MODE_INT);
+ machine_mode mode = smallest_mode_for_size (struct_bits, MODE_INT);
while (mode != VOIDmode)
{
if (get_extraction_insn (insn, pattern, type, mode))
get_best_reg_extraction_insn (extraction_insn *insn,
enum extraction_pattern pattern,
unsigned HOST_WIDE_INT struct_bits,
- enum machine_mode field_mode)
+ machine_mode field_mode)
{
return get_best_extraction_insn (insn, pattern, ET_reg, struct_bits,
field_mode);
get_best_mem_extraction_insn (extraction_insn *insn,
enum extraction_pattern pattern,
HOST_WIDE_INT bitsize, HOST_WIDE_INT bitnum,
- enum machine_mode field_mode)
+ machine_mode field_mode)
{
unsigned HOST_WIDE_INT struct_bits = (bitnum % BITS_PER_UNIT
+ bitsize
char libcall_suffix;
const char *libcall_basename;
void (*libcall_gen) (optab, const char *name,
- char suffix, enum machine_mode);
+ char suffix, machine_mode);
};
struct convert_optab_libcall_d
{
const char *libcall_basename;
void (*libcall_gen) (convert_optab, const char *name,
- enum machine_mode, enum machine_mode);
+ machine_mode, machine_mode);
};
/* Given an enum insn_code, access the function to construct
/* Returns the active icode for the given (encoded) optab. */
extern enum insn_code raw_optab_handler (unsigned);
-extern bool swap_optab_enable (optab, enum machine_mode, bool);
+extern bool swap_optab_enable (optab, machine_mode, bool);
/* Target-dependent globals. */
struct target_optabs {
extern rtx expand_widen_pattern_expr (sepops ops, rtx op0, rtx op1, rtx wide_op,
rtx target, int unsignedp);
-extern rtx expand_ternary_op (enum machine_mode mode, optab ternary_optab,
+extern rtx expand_ternary_op (machine_mode mode, optab ternary_optab,
rtx op0, rtx op1, rtx op2, rtx target,
int unsignedp);
/* Expand a binary operation given optab and rtx operands. */
-extern rtx expand_binop (enum machine_mode, optab, rtx, rtx, rtx, int,
+extern rtx expand_binop (machine_mode, optab, rtx, rtx, rtx, int,
enum optab_methods);
-extern rtx simplify_expand_binop (enum machine_mode mode, optab binoptab,
+extern rtx simplify_expand_binop (machine_mode mode, optab binoptab,
rtx op0, rtx op1, rtx target, int unsignedp,
enum optab_methods methods);
-extern bool force_expand_binop (enum machine_mode, optab, rtx, rtx, rtx, int,
+extern bool force_expand_binop (machine_mode, optab, rtx, rtx, rtx, int,
enum optab_methods);
/* Expand a binary operation with both signed and unsigned forms. */
-extern rtx sign_expand_binop (enum machine_mode, optab, optab, rtx, rtx,
+extern rtx sign_expand_binop (machine_mode, optab, optab, rtx, rtx,
rtx, int, enum optab_methods);
/* Generate code to perform an operation on one operand with two results. */
enum rtx_code);
/* Expand a unary arithmetic operation given optab rtx operand. */
-extern rtx expand_unop (enum machine_mode, optab, rtx, rtx, int);
+extern rtx expand_unop (machine_mode, optab, rtx, rtx, int);
/* Expand the absolute value operation. */
-extern rtx expand_abs_nojump (enum machine_mode, rtx, rtx, int);
-extern rtx expand_abs (enum machine_mode, rtx, rtx, int, int);
+extern rtx expand_abs_nojump (machine_mode, rtx, rtx, int);
+extern rtx expand_abs (machine_mode, rtx, rtx, int, int);
/* Expand the one's complement absolute value operation. */
-extern rtx expand_one_cmpl_abs_nojump (enum machine_mode, rtx, rtx);
+extern rtx expand_one_cmpl_abs_nojump (machine_mode, rtx, rtx);
/* Expand the copysign operation. */
extern rtx expand_copysign (rtx, rtx, rtx);
#define find_widening_optab_handler(A,B,C,D) \
find_widening_optab_handler_and_mode (A, B, C, D, NULL)
extern enum insn_code find_widening_optab_handler_and_mode (optab,
- enum machine_mode,
- enum machine_mode,
+ machine_mode,
+ machine_mode,
int,
- enum machine_mode *);
-extern enum insn_code widening_optab_handler (optab, enum machine_mode,
- enum machine_mode);
+ machine_mode *);
+extern enum insn_code widening_optab_handler (optab, machine_mode,
+ machine_mode);
/* An extra flag to control optab_for_tree_code's behavior. This is needed to
distinguish between machines with a vector shift that takes a scalar for the
/* Nonzero if a compare of mode MODE can be done straightforwardly
(without splitting it into pieces). */
-extern int can_compare_p (enum rtx_code, enum machine_mode,
+extern int can_compare_p (enum rtx_code, machine_mode,
enum can_compare_purpose);
/* Return the INSN_CODE to use for an extend operation. */
-extern enum insn_code can_extend_p (enum machine_mode, enum machine_mode, int);
+extern enum insn_code can_extend_p (machine_mode, machine_mode, int);
/* Generate the body of an insn to extend Y (with mode MFROM)
into X (with mode MTO). Do zero-extension if UNSIGNEDP is nonzero. */
-extern rtx gen_extend_insn (rtx, rtx, enum machine_mode,
- enum machine_mode, int);
+extern rtx gen_extend_insn (rtx, rtx, machine_mode,
+ machine_mode, int);
/* Call this to reset the function entry for one optab. */
-extern void set_optab_libfunc (optab, enum machine_mode, const char *);
-extern void set_conv_libfunc (convert_optab, enum machine_mode,
- enum machine_mode, const char *);
+extern void set_optab_libfunc (optab, machine_mode, const char *);
+extern void set_conv_libfunc (convert_optab, machine_mode,
+ machine_mode, const char *);
/* Call this to install all of the __sync libcalls up to size MAX. */
extern void init_sync_libfuncs (int max);
extern void expand_float (rtx, rtx, int);
/* Return the insn_code for a FLOAT_EXPR. */
-enum insn_code can_float_p (enum machine_mode, enum machine_mode, int);
+enum insn_code can_float_p (machine_mode, machine_mode, int);
/* Return true if there is an inline compare and swap pattern. */
-extern bool can_compare_and_swap_p (enum machine_mode, bool);
+extern bool can_compare_and_swap_p (machine_mode, bool);
/* Return true if there is an inline atomic exchange pattern. */
-extern bool can_atomic_exchange_p (enum machine_mode, bool);
+extern bool can_atomic_exchange_p (machine_mode, bool);
/* Generate code for a compare and swap. */
extern bool expand_atomic_compare_and_swap (rtx *, rtx *, rtx, rtx, rtx, bool,
extern bool expand_sfix_optab (rtx, rtx, convert_optab);
/* Generate code for a widening multiply. */
-extern rtx expand_widening_mult (enum machine_mode, rtx, rtx, rtx, int, optab);
+extern rtx expand_widening_mult (machine_mode, rtx, rtx, rtx, int, optab);
/* Return tree if target supports vector operations for COND_EXPR. */
bool expand_vec_cond_expr_p (tree, tree);
extern rtx expand_vec_shift_expr (sepops, rtx);
/* Return true if target supports vector operations for VEC_PERM_EXPR. */
-extern bool can_vec_perm_p (enum machine_mode, bool, const unsigned char *);
+extern bool can_vec_perm_p (machine_mode, bool, const unsigned char *);
/* Generate code for VEC_PERM_EXPR. */
-extern rtx expand_vec_perm (enum machine_mode, rtx, rtx, rtx, rtx);
+extern rtx expand_vec_perm (machine_mode, rtx, rtx, rtx, rtx);
/* Return non-zero if target supports a given highpart multiplication. */
-extern int can_mult_highpart_p (enum machine_mode, bool);
+extern int can_mult_highpart_p (machine_mode, bool);
/* Generate code for MULT_HIGHPART_EXPR. */
-extern rtx expand_mult_highpart (enum machine_mode, rtx, rtx, rtx, bool);
+extern rtx expand_mult_highpart (machine_mode, rtx, rtx, rtx, bool);
/* Return true if target supports vector masked load/store for mode. */
-extern bool can_vec_mask_load_store_p (enum machine_mode, bool);
+extern bool can_vec_mask_load_store_p (machine_mode, bool);
/* Return the insn used to implement mode MODE of OP, or CODE_FOR_nothing
if the target does not have such an insn. */
static inline enum insn_code
-optab_handler (optab op, enum machine_mode mode)
+optab_handler (optab op, machine_mode mode)
{
unsigned scode = (op << 16) | mode;
gcc_assert (op > LAST_CONV_OPTAB);
such an insn. */
static inline enum insn_code
-convert_optab_handler (convert_optab op, enum machine_mode to_mode,
- enum machine_mode from_mode)
+convert_optab_handler (convert_optab op, machine_mode to_mode,
+ machine_mode from_mode)
{
unsigned scode = (op << 16) | (from_mode << 8) | to_mode;
gcc_assert (op > unknown_optab && op <= LAST_CONV_OPTAB);
if the target does not have such an insn. */
static inline enum insn_code
-direct_optab_handler (direct_optab op, enum machine_mode mode)
+direct_optab_handler (direct_optab op, machine_mode mode)
{
return optab_handler (op, mode);
}
|| binoptab == smulv_optab);
}
-extern rtx optab_libfunc (optab optab, enum machine_mode mode);
-extern rtx convert_optab_libfunc (convert_optab optab, enum machine_mode mode1,
- enum machine_mode mode2);
+extern rtx optab_libfunc (optab optab, machine_mode mode);
+extern rtx convert_optab_libfunc (convert_optab optab, machine_mode mode1,
+ machine_mode mode2);
/* Describes an instruction that inserts or extracts a bitfield. */
struct extraction_insn
/* The mode that the structure operand should have. This is byte_mode
when using the legacy insv, extv and extzv patterns to access memory. */
- enum machine_mode struct_mode;
+ machine_mode struct_mode;
/* The mode of the field to be inserted or extracted, and by extension
the mode of the insertion or extraction itself. */
- enum machine_mode field_mode;
+ machine_mode field_mode;
/* The mode of the field's bit position. This is only important
when the position is variable rather than constant. */
- enum machine_mode pos_mode;
+ machine_mode pos_mode;
};
/* Enumerates the possible extraction_insn operations. */
extern bool get_best_reg_extraction_insn (extraction_insn *,
enum extraction_pattern,
unsigned HOST_WIDE_INT,
- enum machine_mode);
+ machine_mode);
extern bool get_best_mem_extraction_insn (extraction_insn *,
enum extraction_pattern,
HOST_WIDE_INT, HOST_WIDE_INT,
- enum machine_mode);
+ machine_mode);
extern bool insn_operand_matches (enum insn_code icode, unsigned int opno,
rtx operand);
static inline void
create_expand_operand (struct expand_operand *op,
enum expand_operand_type type,
- rtx value, enum machine_mode mode,
+ rtx value, machine_mode mode,
bool unsigned_p)
{
op->type = type;
static inline void
create_output_operand (struct expand_operand *op, rtx x,
- enum machine_mode mode)
+ machine_mode mode)
{
create_expand_operand (op, EXPAND_OUTPUT, x, mode, false);
}
static inline void
create_input_operand (struct expand_operand *op, rtx value,
- enum machine_mode mode)
+ machine_mode mode)
{
create_expand_operand (op, EXPAND_INPUT, value, mode, false);
}
static inline void
create_convert_operand_to (struct expand_operand *op, rtx value,
- enum machine_mode mode, bool unsigned_p)
+ machine_mode mode, bool unsigned_p)
{
create_expand_operand (op, EXPAND_CONVERT_TO, value, mode, unsigned_p);
}
static inline void
create_convert_operand_from (struct expand_operand *op, rtx value,
- enum machine_mode mode, bool unsigned_p)
+ machine_mode mode, bool unsigned_p)
{
create_expand_operand (op, EXPAND_CONVERT_FROM, value, mode, unsigned_p);
}
extern void expand_jump_insn (enum insn_code icode, unsigned int nops,
struct expand_operand *ops);
-extern rtx prepare_operand (enum insn_code, rtx, int, enum machine_mode,
- enum machine_mode, int);
+extern rtx prepare_operand (enum insn_code, rtx, int, machine_mode,
+ machine_mode, int);
-extern void gen_int_libfunc (optab, const char *, char, enum machine_mode);
-extern void gen_fp_libfunc (optab, const char *, char, enum machine_mode);
-extern void gen_fixed_libfunc (optab, const char *, char, enum machine_mode);
+extern void gen_int_libfunc (optab, const char *, char, machine_mode);
+extern void gen_fp_libfunc (optab, const char *, char, machine_mode);
+extern void gen_fixed_libfunc (optab, const char *, char, machine_mode);
extern void gen_signed_fixed_libfunc (optab, const char *, char,
- enum machine_mode);
+ machine_mode);
extern void gen_unsigned_fixed_libfunc (optab, const char *, char,
- enum machine_mode);
-extern void gen_int_fp_libfunc (optab, const char *, char, enum machine_mode);
-extern void gen_intv_fp_libfunc (optab, const char *, char, enum machine_mode);
+ machine_mode);
+extern void gen_int_fp_libfunc (optab, const char *, char, machine_mode);
+extern void gen_intv_fp_libfunc (optab, const char *, char, machine_mode);
extern void gen_int_fp_fixed_libfunc (optab, const char *, char,
- enum machine_mode);
+ machine_mode);
extern void gen_int_fp_signed_fixed_libfunc (optab, const char *, char,
- enum machine_mode);
+ machine_mode);
extern void gen_int_fixed_libfunc (optab, const char *, char,
- enum machine_mode);
+ machine_mode);
extern void gen_int_signed_fixed_libfunc (optab, const char *, char,
- enum machine_mode);
+ machine_mode);
extern void gen_int_unsigned_fixed_libfunc (optab, const char *, char,
- enum machine_mode);
+ machine_mode);
extern void gen_interclass_conv_libfunc (convert_optab, const char *,
- enum machine_mode, enum machine_mode);
+ machine_mode, machine_mode);
extern void gen_int_to_fp_conv_libfunc (convert_optab, const char *,
- enum machine_mode, enum machine_mode);
+ machine_mode, machine_mode);
extern void gen_ufloat_conv_libfunc (convert_optab, const char *,
- enum machine_mode, enum machine_mode);
+ machine_mode, machine_mode);
extern void gen_int_to_fp_nondecimal_conv_libfunc (convert_optab,
const char *,
- enum machine_mode,
- enum machine_mode);
+ machine_mode,
+ machine_mode);
extern void gen_fp_to_int_conv_libfunc (convert_optab, const char *,
- enum machine_mode, enum machine_mode);
+ machine_mode, machine_mode);
extern void gen_intraclass_conv_libfunc (convert_optab, const char *,
- enum machine_mode, enum machine_mode);
+ machine_mode, machine_mode);
extern void gen_trunc_conv_libfunc (convert_optab, const char *,
- enum machine_mode, enum machine_mode);
+ machine_mode, machine_mode);
extern void gen_extend_conv_libfunc (convert_optab, const char *,
- enum machine_mode, enum machine_mode);
+ machine_mode, machine_mode);
extern void gen_fract_conv_libfunc (convert_optab, const char *,
- enum machine_mode, enum machine_mode);
+ machine_mode, machine_mode);
extern void gen_fractuns_conv_libfunc (convert_optab, const char *,
- enum machine_mode, enum machine_mode);
+ machine_mode, machine_mode);
extern void gen_satfract_conv_libfunc (convert_optab, const char *,
- enum machine_mode, enum machine_mode);
+ machine_mode, machine_mode);
extern void gen_satfractuns_conv_libfunc (convert_optab, const char *,
- enum machine_mode,
- enum machine_mode);
+ machine_mode,
+ machine_mode);
extern void init_tree_optimization_optabs (tree);
extern bool lshift_cheap_p (bool);
#ifdef REAL_VALUE_TYPE_SIZE
/* Assemble the floating-point constant D into an object of size MODE. */
-extern void assemble_real (REAL_VALUE_TYPE, enum machine_mode, unsigned);
+extern void assemble_real (REAL_VALUE_TYPE, machine_mode, unsigned);
#endif
/* Write the address of the entity given by SYMBOL to SEC. */
extern void place_block_symbol (rtx);
extern rtx get_section_anchor (struct object_block *, HOST_WIDE_INT,
enum tls_model);
-extern section *mergeable_constant_section (enum machine_mode,
+extern section *mergeable_constant_section (machine_mode,
unsigned HOST_WIDE_INT,
unsigned int);
extern section *function_section (tree);
extern section *default_function_rodata_section (tree);
extern section *default_no_function_rodata_section (tree);
extern section *default_clone_table_section (void);
-extern section *default_select_rtx_section (enum machine_mode, rtx,
+extern section *default_select_rtx_section (machine_mode, rtx,
unsigned HOST_WIDE_INT);
-extern section *default_elf_select_rtx_section (enum machine_mode, rtx,
+extern section *default_elf_select_rtx_section (machine_mode, rtx,
unsigned HOST_WIDE_INT);
extern void default_encode_section_info (tree, rtx, int);
extern const char *default_strip_name_encoding (const char *);
extern void default_elf_fini_array_asm_out_destructor (rtx, int);
extern int maybe_assemble_visibility (tree);
-extern int default_address_cost (rtx, enum machine_mode, addr_space_t, bool);
+extern int default_address_cost (rtx, machine_mode, addr_space_t, bool);
/* Output stack usage information. */
extern void output_stack_usage (void);
alternative_mask preferred = get_preferred_alternatives (insn);
for (i = 0; i < recog_data.n_operands; i++)
{
- enum machine_mode mode;
+ machine_mode mode;
int regno;
const char *p;
for (i = 0; i < recog_data.n_operands; i++)
{
- enum machine_mode mode = recog_data.operand_mode[i];
+ machine_mode mode = recog_data.operand_mode[i];
if (op_alt_regno[i][j] == -1)
continue;
for (i = recog_data.n_dups - 1; i >= 0; i--)
{
int op = recog_data.dup_num[i];
- enum machine_mode mode = recog_data.operand_mode[op];
+ machine_mode mode = recog_data.operand_mode[op];
if (op_alt_regno[op][j] == -1)
continue;
{
int regno = 0;
int i;
- enum machine_mode mode = GET_MODE (dst);
+ machine_mode mode = GET_MODE (dst);
if (GET_CODE (dst) == SUBREG)
{
static HOST_WIDE_INT reg_offset[FIRST_PSEUDO_REGISTER];
static int reg_base_reg[FIRST_PSEUDO_REGISTER];
static rtx reg_symbol_ref[FIRST_PSEUDO_REGISTER];
-static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
+static machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
/* move2add_luid is linearly increased while scanning the instructions
from first to last. It is used to set reg_set_luid in
move2add_record_mode (rtx reg)
{
int regno, nregs;
- enum machine_mode mode = GET_MODE (reg);
+ machine_mode mode = GET_MODE (reg);
if (GET_CODE (reg) == SUBREG)
{
/* Check if REGNO contains a valid value in MODE. */
static bool
-move2add_valid_value_p (int regno, enum machine_mode mode)
+move2add_valid_value_p (int regno, machine_mode mode)
{
if (reg_set_luid[regno] <= move2add_last_label_luid)
return false;
changed = validate_change (insn, &SET_SRC (pat), tem, 0);
else if (sym == NULL_RTX && GET_MODE (reg) != BImode)
{
- enum machine_mode narrow_mode;
+ machine_mode narrow_mode;
for (narrow_mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
narrow_mode != VOIDmode
&& narrow_mode != GET_MODE (reg);
{
rtx_insn *insn = (rtx_insn *) data;
unsigned int regno = 0;
- enum machine_mode mode = GET_MODE (dst);
+ machine_mode mode = GET_MODE (dst);
/* Some targets do argument pushes without adding REG_INC notes. */
{
int hash;
struct bucket *b;
- enum machine_mode mode;
+ machine_mode mode;
enum tree_code_class tclass;
int len;
int i;
static void
apply_mode_iterator (void *loc, int mode)
{
- PUT_MODE ((rtx) loc, (enum machine_mode) mode);
+ PUT_MODE ((rtx) loc, (machine_mode) mode);
}
/* Implementations of the iterator_group callbacks for codes. */
mode MODE. Return true if successful. */
bool
-exact_real_inverse (enum machine_mode mode, REAL_VALUE_TYPE *r)
+exact_real_inverse (machine_mode mode, REAL_VALUE_TYPE *r)
{
const REAL_VALUE_TYPE *one = real_digit (1);
REAL_VALUE_TYPE u;
in TMODE. */
bool
-real_can_shorten_arithmetic (enum machine_mode imode, enum machine_mode tmode)
+real_can_shorten_arithmetic (machine_mode imode, machine_mode tmode)
{
const struct real_format *tfmt, *ifmt;
tfmt = REAL_MODE_FORMAT (tmode);
void
real_to_decimal_for_mode (char *str, const REAL_VALUE_TYPE *r_orig,
size_t buf_size, size_t digits,
- int crop_trailing_zeros, enum machine_mode mode)
+ int crop_trailing_zeros, machine_mode mode)
{
const struct real_format *fmt = NULL;
const REAL_VALUE_TYPE *one, *ten;
/* Legacy. Similar, but return the result directly. */
REAL_VALUE_TYPE
-real_from_string2 (const char *s, enum machine_mode mode)
+real_from_string2 (const char *s, machine_mode mode)
{
REAL_VALUE_TYPE r;
/* Initialize R from string S and desired MODE. */
void
-real_from_string3 (REAL_VALUE_TYPE *r, const char *s, enum machine_mode mode)
+real_from_string3 (REAL_VALUE_TYPE *r, const char *s, machine_mode mode)
{
if (DECIMAL_FLOAT_MODE_P (mode))
decimal_real_from_string (r, s);
/* Initialize R from the wide_int VAL_IN. The MODE is not VOIDmode,*/
void
-real_from_integer (REAL_VALUE_TYPE *r, enum machine_mode mode,
+real_from_integer (REAL_VALUE_TYPE *r, machine_mode mode,
const wide_int_ref &val_in, signop sgn)
{
if (val_in == 0)
bool
real_nan (REAL_VALUE_TYPE *r, const char *str, int quiet,
- enum machine_mode mode)
+ machine_mode mode)
{
const struct real_format *fmt;
If SIGN is nonzero, R is set to the most negative finite value. */
void
-real_maxval (REAL_VALUE_TYPE *r, int sign, enum machine_mode mode)
+real_maxval (REAL_VALUE_TYPE *r, int sign, machine_mode mode)
{
const struct real_format *fmt;
int np2;
/* Fills R with 2**N. */
void
-real_2expN (REAL_VALUE_TYPE *r, int n, enum machine_mode fmode)
+real_2expN (REAL_VALUE_TYPE *r, int n, machine_mode fmode)
{
memset (r, 0, sizeof (*r));
/* Extend or truncate to a new mode. */
void
-real_convert (REAL_VALUE_TYPE *r, enum machine_mode mode,
+real_convert (REAL_VALUE_TYPE *r, machine_mode mode,
const REAL_VALUE_TYPE *a)
{
const struct real_format *fmt;
/* Legacy. Likewise, except return the struct directly. */
REAL_VALUE_TYPE
-real_value_truncate (enum machine_mode mode, REAL_VALUE_TYPE a)
+real_value_truncate (machine_mode mode, REAL_VALUE_TYPE a)
{
REAL_VALUE_TYPE r;
real_convert (&r, mode, &a);
/* Return true if truncating to MODE is exact. */
bool
-exact_real_truncate (enum machine_mode mode, const REAL_VALUE_TYPE *a)
+exact_real_truncate (machine_mode mode, const REAL_VALUE_TYPE *a)
{
const struct real_format *fmt;
REAL_VALUE_TYPE t;
/* Similar, but look up the format from MODE. */
long
-real_to_target (long *buf, const REAL_VALUE_TYPE *r, enum machine_mode mode)
+real_to_target (long *buf, const REAL_VALUE_TYPE *r, machine_mode mode)
{
const struct real_format *fmt;
/* Similar, but look up the format from MODE. */
void
-real_from_target (REAL_VALUE_TYPE *r, const long *buf, enum machine_mode mode)
+real_from_target (REAL_VALUE_TYPE *r, const long *buf, machine_mode mode)
{
const struct real_format *fmt;
/* ??? Legacy. Should get access to real_format directly. */
int
-significand_size (enum machine_mode mode)
+significand_size (machine_mode mode)
{
const struct real_format *fmt;
Algorithms", "The Art of Computer Programming", Volume 2. */
bool
-real_powi (REAL_VALUE_TYPE *r, enum machine_mode mode,
+real_powi (REAL_VALUE_TYPE *r, machine_mode mode,
const REAL_VALUE_TYPE *x, HOST_WIDE_INT n)
{
unsigned HOST_WIDE_INT bit;
towards zero, placing the result in R in mode MODE. */
void
-real_trunc (REAL_VALUE_TYPE *r, enum machine_mode mode,
+real_trunc (REAL_VALUE_TYPE *r, machine_mode mode,
const REAL_VALUE_TYPE *x)
{
do_fix_trunc (r, x);
down, placing the result in R in mode MODE. */
void
-real_floor (REAL_VALUE_TYPE *r, enum machine_mode mode,
+real_floor (REAL_VALUE_TYPE *r, machine_mode mode,
const REAL_VALUE_TYPE *x)
{
REAL_VALUE_TYPE t;
up, placing the result in R in mode MODE. */
void
-real_ceil (REAL_VALUE_TYPE *r, enum machine_mode mode,
+real_ceil (REAL_VALUE_TYPE *r, machine_mode mode,
const REAL_VALUE_TYPE *x)
{
REAL_VALUE_TYPE t;
zero. */
void
-real_round (REAL_VALUE_TYPE *r, enum machine_mode mode,
+real_round (REAL_VALUE_TYPE *r, machine_mode mode,
const REAL_VALUE_TYPE *x)
{
do_add (r, x, &dconsthalf, x->sign);
/* Check whether the real constant value given is an integer. */
bool
-real_isinteger (const REAL_VALUE_TYPE *c, enum machine_mode mode)
+real_isinteger (const REAL_VALUE_TYPE *c, machine_mode mode)
{
REAL_VALUE_TYPE cint;
extern bool real_identical (const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *);
/* Extend or truncate to a new mode. */
-extern void real_convert (REAL_VALUE_TYPE *, enum machine_mode,
+extern void real_convert (REAL_VALUE_TYPE *, machine_mode,
const REAL_VALUE_TYPE *);
/* Return true if truncating to NEW is exact. */
-extern bool exact_real_truncate (enum machine_mode, const REAL_VALUE_TYPE *);
+extern bool exact_real_truncate (machine_mode, const REAL_VALUE_TYPE *);
/* Render R as a decimal floating point constant. */
extern void real_to_decimal (char *, const REAL_VALUE_TYPE *, size_t,
/* Render R as a decimal floating point constant, rounded so as to be
parsed back to the same value when interpreted in mode MODE. */
extern void real_to_decimal_for_mode (char *, const REAL_VALUE_TYPE *, size_t,
- size_t, int, enum machine_mode);
+ size_t, int, machine_mode);
/* Render R as a hexadecimal floating point constant. */
extern void real_to_hexadecimal (char *, const REAL_VALUE_TYPE *,
the value underflows, +1 if overflows, and 0 otherwise. */
extern int real_from_string (REAL_VALUE_TYPE *, const char *);
/* Wrapper to allow different internal representation for decimal floats. */
-extern void real_from_string3 (REAL_VALUE_TYPE *, const char *, enum machine_mode);
+extern void real_from_string3 (REAL_VALUE_TYPE *, const char *, machine_mode);
extern long real_to_target_fmt (long *, const REAL_VALUE_TYPE *,
const struct real_format *);
-extern long real_to_target (long *, const REAL_VALUE_TYPE *, enum machine_mode);
+extern long real_to_target (long *, const REAL_VALUE_TYPE *, machine_mode);
extern void real_from_target_fmt (REAL_VALUE_TYPE *, const long *,
const struct real_format *);
extern void real_from_target (REAL_VALUE_TYPE *, const long *,
- enum machine_mode);
+ machine_mode);
extern void real_inf (REAL_VALUE_TYPE *);
-extern bool real_nan (REAL_VALUE_TYPE *, const char *, int, enum machine_mode);
+extern bool real_nan (REAL_VALUE_TYPE *, const char *, int, machine_mode);
-extern void real_maxval (REAL_VALUE_TYPE *, int, enum machine_mode);
+extern void real_maxval (REAL_VALUE_TYPE *, int, machine_mode);
-extern void real_2expN (REAL_VALUE_TYPE *, int, enum machine_mode);
+extern void real_2expN (REAL_VALUE_TYPE *, int, machine_mode);
extern unsigned int real_hash (const REAL_VALUE_TYPE *);
#define REAL_VALUE_TO_TARGET_DECIMAL32(IN, OUT) \
((OUT) = real_to_target (NULL, &(IN), mode_for_size (32, MODE_DECIMAL_FLOAT, 0)))
-extern REAL_VALUE_TYPE real_value_truncate (enum machine_mode,
+extern REAL_VALUE_TYPE real_value_truncate (machine_mode,
REAL_VALUE_TYPE);
extern REAL_VALUE_TYPE real_value_negate (const REAL_VALUE_TYPE *);
extern REAL_VALUE_TYPE real_value_abs (const REAL_VALUE_TYPE *);
-extern int significand_size (enum machine_mode);
+extern int significand_size (machine_mode);
-extern REAL_VALUE_TYPE real_from_string2 (const char *, enum machine_mode);
+extern REAL_VALUE_TYPE real_from_string2 (const char *, machine_mode);
#define REAL_VALUE_ATOF(s, m) \
real_from_string2 (s, m)
/* Return a CONST_DOUBLE with value R and mode M. */
#define CONST_DOUBLE_FROM_REAL_VALUE(r, m) \
const_double_from_real_value (r, m)
-extern rtx const_double_from_real_value (REAL_VALUE_TYPE, enum machine_mode);
+extern rtx const_double_from_real_value (REAL_VALUE_TYPE, machine_mode);
/* Replace R by 1/R in the given machine mode, if the result is exact. */
-extern bool exact_real_inverse (enum machine_mode, REAL_VALUE_TYPE *);
+extern bool exact_real_inverse (machine_mode, REAL_VALUE_TYPE *);
/* Return true if arithmetic on values in IMODE that were promoted
from values in TMODE is equivalent to direct arithmetic on values
in TMODE. */
-bool real_can_shorten_arithmetic (enum machine_mode, enum machine_mode);
+bool real_can_shorten_arithmetic (machine_mode, machine_mode);
/* In tree.c: wrap up a REAL_VALUE_TYPE in a tree node. */
extern tree build_real (tree, REAL_VALUE_TYPE);
/* Calculate R as X raised to the integer exponent N in mode MODE. */
-extern bool real_powi (REAL_VALUE_TYPE *, enum machine_mode,
+extern bool real_powi (REAL_VALUE_TYPE *, machine_mode,
const REAL_VALUE_TYPE *, HOST_WIDE_INT);
/* Standard round to integer value functions. */
-extern void real_trunc (REAL_VALUE_TYPE *, enum machine_mode,
+extern void real_trunc (REAL_VALUE_TYPE *, machine_mode,
const REAL_VALUE_TYPE *);
-extern void real_floor (REAL_VALUE_TYPE *, enum machine_mode,
+extern void real_floor (REAL_VALUE_TYPE *, machine_mode,
const REAL_VALUE_TYPE *);
-extern void real_ceil (REAL_VALUE_TYPE *, enum machine_mode,
+extern void real_ceil (REAL_VALUE_TYPE *, machine_mode,
const REAL_VALUE_TYPE *);
-extern void real_round (REAL_VALUE_TYPE *, enum machine_mode,
+extern void real_round (REAL_VALUE_TYPE *, machine_mode,
const REAL_VALUE_TYPE *);
/* Set the sign of R to the sign of X. */
extern void real_copysign (REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *);
/* Check whether the real constant value given is an integer. */
-extern bool real_isinteger (const REAL_VALUE_TYPE *c, enum machine_mode mode);
+extern bool real_isinteger (const REAL_VALUE_TYPE *c, machine_mode mode);
/* Write into BUF the maximum representable finite floating-point
number, (1 - b**-p) * b**emax for a given FP format FMT as a hex
#ifndef GENERATOR_FILE
/* real related routines. */
extern wide_int real_to_integer (const REAL_VALUE_TYPE *, bool *, int);
-extern void real_from_integer (REAL_VALUE_TYPE *, enum machine_mode,
+extern void real_from_integer (REAL_VALUE_TYPE *, machine_mode,
const wide_int_ref &, signop);
#endif
static void
simplify_while_replacing (rtx *loc, rtx to, rtx object,
- enum machine_mode op0_mode)
+ machine_mode op0_mode)
{
rtx x = *loc;
enum rtx_code code = GET_CODE (x);
MEM_ADDR_SPACE (XEXP (x, 0)))
&& !MEM_VOLATILE_P (XEXP (x, 0)))
{
- enum machine_mode wanted_mode = VOIDmode;
- enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
+ machine_mode wanted_mode = VOIDmode;
+ machine_mode is_mode = GET_MODE (XEXP (x, 0));
int pos = INTVAL (XEXP (x, 2));
if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
const char *fmt;
rtx x = *loc;
enum rtx_code code;
- enum machine_mode op0_mode = VOIDmode;
+ machine_mode op0_mode = VOIDmode;
int prev_changes = num_changes;
if (!x)
expressions in the machine description. */
int
-general_operand (rtx op, enum machine_mode mode)
+general_operand (rtx op, machine_mode mode)
{
enum rtx_code code = GET_CODE (op);
expressions in the machine description. */
int
-address_operand (rtx op, enum machine_mode mode)
+address_operand (rtx op, machine_mode mode)
{
return memory_address_p (mode, op);
}
expressions in the machine description. */
int
-register_operand (rtx op, enum machine_mode mode)
+register_operand (rtx op, machine_mode mode)
{
if (GET_CODE (op) == SUBREG)
{
/* Return 1 for a register in Pmode; ignore the tested mode. */
int
-pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
+pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
{
return register_operand (op, Pmode);
}
or a hard register. */
int
-scratch_operand (rtx op, enum machine_mode mode)
+scratch_operand (rtx op, machine_mode mode)
{
if (GET_MODE (op) != mode && mode != VOIDmode)
return 0;
expressions in the machine description. */
int
-immediate_operand (rtx op, enum machine_mode mode)
+immediate_operand (rtx op, machine_mode mode)
{
/* Don't accept CONST_INT or anything similar
if the caller wants something floating. */
/* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
int
-const_int_operand (rtx op, enum machine_mode mode)
+const_int_operand (rtx op, machine_mode mode)
{
if (!CONST_INT_P (op))
return 0;
/* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
of mode MODE. */
int
-const_scalar_int_operand (rtx op, enum machine_mode mode)
+const_scalar_int_operand (rtx op, machine_mode mode)
{
if (!CONST_SCALAR_INT_P (op))
return 0;
floating-point number of MODE. */
int
-const_double_operand (rtx op, enum machine_mode mode)
+const_double_operand (rtx op, machine_mode mode)
{
return (GET_CODE (op) == CONST_DOUBLE)
&& (GET_MODE (op) == mode || mode == VOIDmode);
floating-point number of MODE. */
int
-const_double_operand (rtx op, enum machine_mode mode)
+const_double_operand (rtx op, machine_mode mode)
{
/* Don't accept CONST_INT or anything similar
if the caller wants something floating. */
operand of mode MODE. */
int
-nonimmediate_operand (rtx op, enum machine_mode mode)
+nonimmediate_operand (rtx op, machine_mode mode)
{
return (general_operand (op, mode) && ! CONSTANT_P (op));
}
/* Return 1 if OP is a register reference or immediate value of mode MODE. */
int
-nonmemory_operand (rtx op, enum machine_mode mode)
+nonmemory_operand (rtx op, machine_mode mode)
{
if (CONSTANT_P (op))
return immediate_operand (op, mode);
expressions in the machine description. */
int
-push_operand (rtx op, enum machine_mode mode)
+push_operand (rtx op, machine_mode mode)
{
unsigned int rounded_size = GET_MODE_SIZE (mode);
expressions in the machine description. */
int
-pop_operand (rtx op, enum machine_mode mode)
+pop_operand (rtx op, machine_mode mode)
{
if (!MEM_P (op))
return 0;
for mode MODE in address space AS. */
int
-memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
rtx addr, addr_space_t as)
{
#ifdef GO_IF_LEGITIMATE_ADDRESS
expressions in the machine description. */
int
-memory_operand (rtx op, enum machine_mode mode)
+memory_operand (rtx op, machine_mode mode)
{
rtx inner;
that is, a memory reference whose address is a general_operand. */
int
-indirect_operand (rtx op, enum machine_mode mode)
+indirect_operand (rtx op, machine_mode mode)
{
/* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
if (! reload_completed
ORDERED and UNORDERED). */
int
-ordered_comparison_operator (rtx op, enum machine_mode mode)
+ordered_comparison_operator (rtx op, machine_mode mode)
{
if (mode != VOIDmode && GET_MODE (op) != mode)
return false;
MATCH_OPERATOR to recognize all the branch insns. */
int
-comparison_operator (rtx op, enum machine_mode mode)
+comparison_operator (rtx op, machine_mode mode)
{
return ((mode == VOIDmode || GET_MODE (op) == mode)
&& COMPARISON_P (op));
const char *
decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
- const char **constraints, enum machine_mode *modes,
+ const char **constraints, machine_mode *modes,
location_t *loc)
{
int nbase = 0, n, i;
for the sake of use in reload.c. */
int
-offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
+offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
addr_space_t as)
{
enum rtx_code ycode = GET_CODE (y);
rtx z;
rtx y1 = y;
rtx *y2;
- int (*addressp) (enum machine_mode, rtx, addr_space_t) =
+ int (*addressp) (machine_mode, rtx, addr_space_t) =
(strictp ? strict_memory_address_addr_space_p
: memory_address_addr_space_p);
unsigned int mode_sz = GET_MODE_SIZE (mode);
if (mode_dependent_address_p (y, as))
return 0;
- enum machine_mode address_mode = GET_MODE (y);
+ machine_mode address_mode = GET_MODE (y);
if (address_mode == VOIDmode)
address_mode = targetm.addr_space.address_mode (as);
#ifdef POINTERS_EXTEND_UNSIGNED
- enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
+ machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
#endif
/* ??? How much offset does an offsettable BLKmode reference need?
for (opno = 0; opno < recog_data.n_operands; opno++)
{
rtx op = recog_data.operand[opno];
- enum machine_mode mode = GET_MODE (op);
+ machine_mode mode = GET_MODE (op);
const char *p = constraints[opno];
int offset = 0;
int win = 0;
bool
reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
- enum machine_mode mode)
+ machine_mode mode)
{
unsigned int regno = REGNO (operand);
rtx
peep2_find_free_register (int from, int to, const char *class_str,
- enum machine_mode mode, HARD_REG_SET *reg_set)
+ machine_mode mode, HARD_REG_SET *reg_set)
{
enum reg_class cl;
HARD_REG_SET live;
extern void cancel_changes (int);
extern int constrain_operands (int, alternative_mask);
extern int constrain_operands_cached (rtx_insn *, int);
-extern int memory_address_addr_space_p (enum machine_mode, rtx, addr_space_t);
+extern int memory_address_addr_space_p (machine_mode, rtx, addr_space_t);
#define memory_address_p(mode,addr) \
memory_address_addr_space_p ((mode), (addr), ADDR_SPACE_GENERIC)
-extern int strict_memory_address_addr_space_p (enum machine_mode, rtx,
+extern int strict_memory_address_addr_space_p (machine_mode, rtx,
addr_space_t);
#define strict_memory_address_p(mode,addr) \
strict_memory_address_addr_space_p ((mode), (addr), ADDR_SPACE_GENERIC)
#ifdef HAVE_cc0
extern int next_insn_tests_no_inequality (rtx);
#endif
-extern bool reg_fits_class_p (const_rtx, reg_class_t, int, enum machine_mode);
+extern bool reg_fits_class_p (const_rtx, reg_class_t, int, machine_mode);
extern int offsettable_memref_p (rtx);
extern int offsettable_nonstrict_memref_p (rtx);
-extern int offsettable_address_addr_space_p (int, enum machine_mode, rtx,
+extern int offsettable_address_addr_space_p (int, machine_mode, rtx,
addr_space_t);
#define offsettable_address_p(strict,mode,addr) \
offsettable_address_addr_space_p ((strict), (mode), (addr), \
extern int peep2_reg_dead_p (int, rtx);
#ifdef CLEAR_HARD_REG_SET
extern rtx peep2_find_free_register (int, int, const char *,
- enum machine_mode, HARD_REG_SET *);
+ machine_mode, HARD_REG_SET *);
#endif
extern rtx peephole2_insns (rtx, rtx, int *);
char is_operator[MAX_RECOG_OPERANDS];
/* Gives the mode of operand N. */
- enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
+ machine_mode operand_mode[MAX_RECOG_OPERANDS];
/* Gives the type (in, out, inout) for operand N. */
enum op_type operand_type[MAX_RECOG_OPERANDS];
/* A table defined in insn-output.c that give information about
each insn-code value. */
-typedef int (*insn_operand_predicate_fn) (rtx, enum machine_mode);
+typedef int (*insn_operand_predicate_fn) (rtx, machine_mode);
typedef const char * (*insn_output_fn) (rtx *, rtx_insn *);
struct insn_gen_fn
enum rtx_code code;
/* The destination mode. */
- enum machine_mode mode;
+ machine_mode mode;
/* The instruction where it lives. */
rtx_insn *insn;
{
/* Zero-extend the negative constant by masking out the bits outside
the source mode. */
- enum machine_mode src_mode = GET_MODE (SET_DEST (*orig_set));
+ machine_mode src_mode = GET_MODE (SET_DEST (*orig_set));
rtx new_const_int
= gen_int_mode (INTVAL (orig_src) & GET_MODE_MASK (src_mode),
GET_MODE (new_reg));
static bool
merge_def_and_ext (ext_cand *cand, rtx_insn *def_insn, ext_state *state)
{
- enum machine_mode ext_src_mode;
+ machine_mode ext_src_mode;
rtx *sub_rtx;
ext_src_mode = GET_MODE (XEXP (SET_SRC (cand->expr), 0));
if (!SCALAR_INT_MODE_P (GET_MODE (SET_DEST (PATTERN (cand->insn)))))
return false;
- enum machine_mode dst_mode = GET_MODE (SET_DEST (PATTERN (cand->insn)));
+ machine_mode dst_mode = GET_MODE (SET_DEST (PATTERN (cand->insn)));
rtx src_reg = get_extended_src_reg (SET_SRC (PATTERN (cand->insn)));
/* Ensure the number of hard registers of the copy match. */
mode if possible, or punt. */
if (state->modified[INSN_UID (cand->insn)].kind != EXT_MODIFIED_NONE)
{
- enum machine_mode mode;
+ machine_mode mode;
rtx set;
if (state->modified[INSN_UID (cand->insn)].kind
unsigned *def_map)
{
enum rtx_code code;
- enum machine_mode mode;
+ machine_mode mode;
unsigned int idx;
rtx src, dest;
/* Create the replacement registers up front. */
for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
{
- enum machine_mode mode;
+ machine_mode mode;
for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
struct value_data_entry
{
- enum machine_mode mode;
+ machine_mode mode;
unsigned int oldest_regno;
unsigned int next_regno;
struct queued_debug_insn_change *debug_insn_changes;
static void kill_value_one_regno (unsigned, struct value_data *);
static void kill_value_regno (unsigned, unsigned, struct value_data *);
static void kill_value (const_rtx, struct value_data *);
-static void set_value_regno (unsigned, enum machine_mode, struct value_data *);
+static void set_value_regno (unsigned, machine_mode, struct value_data *);
static void init_value_data (struct value_data *);
static void kill_clobbered_value (rtx, const_rtx, void *);
static void kill_set_value (rtx, const_rtx, void *);
static void copy_value (rtx, rtx, struct value_data *);
-static bool mode_change_ok (enum machine_mode, enum machine_mode,
+static bool mode_change_ok (machine_mode, machine_mode,
unsigned int);
-static rtx maybe_mode_change (enum machine_mode, enum machine_mode,
- enum machine_mode, unsigned int, unsigned int);
+static rtx maybe_mode_change (machine_mode, machine_mode,
+ machine_mode, unsigned int, unsigned int);
static rtx find_oldest_value_reg (enum reg_class, rtx, struct value_data *);
static bool replace_oldest_value_reg (rtx *, enum reg_class, rtx_insn *,
struct value_data *);
static bool replace_oldest_value_addr (rtx *, enum reg_class,
- enum machine_mode, addr_space_t,
+ machine_mode, addr_space_t,
rtx_insn *, struct value_data *);
static bool replace_oldest_value_mem (rtx, rtx_insn *, struct value_data *);
static bool copyprop_hardreg_forward_1 (basic_block, struct value_data *);
/* Remember that REGNO is valid in MODE. */
static void
-set_value_regno (unsigned int regno, enum machine_mode mode,
+set_value_regno (unsigned int regno, machine_mode mode,
struct value_data *vd)
{
unsigned int nregs;
/* Return true if a mode change from ORIG to NEW is allowed for REGNO. */
static bool
-mode_change_ok (enum machine_mode orig_mode, enum machine_mode new_mode,
+mode_change_ok (machine_mode orig_mode, machine_mode new_mode,
unsigned int regno ATTRIBUTE_UNUSED)
{
if (GET_MODE_SIZE (orig_mode) < GET_MODE_SIZE (new_mode))
Return a NEW_MODE rtx for REGNO if that's OK, otherwise return NULL_RTX. */
static rtx
-maybe_mode_change (enum machine_mode orig_mode, enum machine_mode copy_mode,
- enum machine_mode new_mode, unsigned int regno,
+maybe_mode_change (machine_mode orig_mode, machine_mode copy_mode,
+ machine_mode new_mode, unsigned int regno,
unsigned int copy_regno ATTRIBUTE_UNUSED)
{
if (GET_MODE_SIZE (copy_mode) < GET_MODE_SIZE (orig_mode)
find_oldest_value_reg (enum reg_class cl, rtx reg, struct value_data *vd)
{
unsigned int regno = REGNO (reg);
- enum machine_mode mode = GET_MODE (reg);
+ machine_mode mode = GET_MODE (reg);
unsigned int i;
/* If we are accessing REG in some mode other that what we set it in,
for (i = vd->e[regno].oldest_regno; i != regno; i = vd->e[i].next_regno)
{
- enum machine_mode oldmode = vd->e[i].mode;
+ machine_mode oldmode = vd->e[i].mode;
rtx new_rtx;
if (!in_hard_reg_set_p (reg_class_contents[cl], mode, i))
static bool
replace_oldest_value_addr (rtx *loc, enum reg_class cl,
- enum machine_mode mode, addr_space_t as,
+ machine_mode mode, addr_space_t as,
rtx_insn *insn, struct value_data *vd)
{
rtx x = *loc;
{
rtx src = SET_SRC (set);
unsigned int regno = REGNO (src);
- enum machine_mode mode = GET_MODE (src);
+ machine_mode mode = GET_MODE (src);
unsigned int i;
rtx new_rtx;
init_reg_sets_1 (void)
{
unsigned int i, j;
- unsigned int /* enum machine_mode */ m;
+ unsigned int /* machine_mode */ m;
restore_register_info ();
HARD_REG_SET ok_regs;
CLEAR_HARD_REG_SET (ok_regs);
for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
- if (!fixed_regs [j] && HARD_REGNO_MODE_OK (j, (enum machine_mode) m))
+ if (!fixed_regs [j] && HARD_REGNO_MODE_OK (j, (machine_mode) m))
SET_HARD_REG_BIT (ok_regs, j);
for (i = 0; i < N_REG_CLASSES; i++)
- if ((targetm.class_max_nregs ((reg_class_t) i, (enum machine_mode) m)
+ if ((targetm.class_max_nregs ((reg_class_t) i, (machine_mode) m)
<= reg_class_size[i])
&& hard_reg_set_intersect_p (ok_regs, reg_class_contents[i]))
{
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
for (j = 0; j < MAX_MACHINE_MODE; j++)
- hard_regno_nregs[i][j] = HARD_REGNO_NREGS (i, (enum machine_mode)j);
+ hard_regno_nregs[i][j] = HARD_REGNO_NREGS (i, (machine_mode)j);
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
{
int i;
for (i = 0; i < MAX_MACHINE_MODE; i++)
- top_of_stack[i] = gen_rtx_MEM ((enum machine_mode) i, stack_pointer_rtx);
+ top_of_stack[i] = gen_rtx_MEM ((machine_mode) i, stack_pointer_rtx);
}
TO, using MODE. */
int
-register_move_cost (enum machine_mode mode, reg_class_t from, reg_class_t to)
+register_move_cost (machine_mode mode, reg_class_t from, reg_class_t to)
{
return targetm.register_move_cost (mode, from, to);
}
/* Compute cost of moving registers to/from memory. */
int
-memory_move_cost (enum machine_mode mode, reg_class_t rclass, bool in)
+memory_move_cost (machine_mode mode, reg_class_t rclass, bool in)
{
return targetm.memory_move_cost (mode, rclass, in);
}
/* Compute extra cost of moving registers to/from memory due to reloads.
Only needed if secondary reloads are required for memory moves. */
int
-memory_move_secondary_cost (enum machine_mode mode, reg_class_t rclass,
+memory_move_secondary_cost (machine_mode mode, reg_class_t rclass,
bool in)
{
reg_class_t altclass;
/* Return a machine mode that is legitimate for hard reg REGNO and large
enough to save nregs. If we can't find one, return VOIDmode.
If CALL_SAVED is true, only consider modes that are call saved. */
-enum machine_mode
+machine_mode
choose_hard_reg_mode (unsigned int regno ATTRIBUTE_UNUSED,
unsigned int nregs, bool call_saved)
{
- unsigned int /* enum machine_mode */ m;
- enum machine_mode found_mode = VOIDmode, mode;
+ unsigned int /* machine_mode */ m;
+ machine_mode found_mode = VOIDmode, mode;
/* We first look for the largest integer mode that can be validly
held in REGNO. If none, we look for the largest floating-point mode.
/* Iterate over all of the CCmodes. */
for (m = (unsigned int) CCmode; m < (unsigned int) NUM_MACHINE_MODES; ++m)
{
- mode = (enum machine_mode) m;
+ mode = (machine_mode) m;
if ((unsigned) hard_regno_nregs[regno][mode] == nregs
&& HARD_REGNO_MODE_OK (regno, mode)
&& (! call_saved || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
check_new_reg_p (int reg ATTRIBUTE_UNUSED, int new_reg,
struct du_head *this_head, HARD_REG_SET this_unavailable)
{
- enum machine_mode mode = GET_MODE (*this_head->first->loc);
+ machine_mode mode = GET_MODE (*this_head->first->loc);
int nregs = hard_regno_nregs[new_reg][mode];
int i;
struct du_chain *tmp;
{
struct du_chain *chain;
unsigned int base_regno = head->regno;
- enum machine_mode mode;
+ machine_mode mode;
for (chain = head->first; chain; chain = chain->next_use)
{
{
struct du_head **p;
rtx x = *loc;
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
unsigned this_regno = REGNO (x);
int this_nregs = hard_regno_nregs[this_regno][mode];
static void
scan_rtx_address (rtx_insn *insn, rtx *loc, enum reg_class cl,
- enum scan_actions action, enum machine_mode mode,
+ enum scan_actions action, machine_mode mode,
addr_space_t as)
{
rtx x = *loc;
&& REG_P (op)
&& !verify_reg_tracked (op))
{
- enum machine_mode mode = GET_MODE (op);
+ machine_mode mode = GET_MODE (op);
unsigned this_regno = REGNO (op);
unsigned this_nregs = hard_regno_nregs[this_regno][mode];
create_new_chain (this_regno, this_nregs, NULL, NULL,
This will be a MODE_INT mode if the register can hold integers. Otherwise
it will be a MODE_FLOAT or a MODE_CC mode, whichever is valid for the
register. */
- enum machine_mode x_reg_raw_mode[FIRST_PSEUDO_REGISTER];
+ machine_mode x_reg_raw_mode[FIRST_PSEUDO_REGISTER];
/* Vector indexed by machine mode saying whether there are regs of
that mode. */
register (reg:MODE REGNO). */
static inline unsigned int
-end_hard_regno (enum machine_mode mode, unsigned int regno)
+end_hard_regno (machine_mode mode, unsigned int regno)
{
return regno + hard_regno_nregs[regno][(int) mode];
}
in register REGNO. */
static inline void
-add_to_hard_reg_set (HARD_REG_SET *regs, enum machine_mode mode,
+add_to_hard_reg_set (HARD_REG_SET *regs, machine_mode mode,
unsigned int regno)
{
unsigned int end_regno;
/* Likewise, but remove the registers. */
static inline void
-remove_from_hard_reg_set (HARD_REG_SET *regs, enum machine_mode mode,
+remove_from_hard_reg_set (HARD_REG_SET *regs, machine_mode mode,
unsigned int regno)
{
unsigned int end_regno;
/* Return true if REGS contains the whole of (reg:MODE REGNO). */
static inline bool
-in_hard_reg_set_p (const HARD_REG_SET regs, enum machine_mode mode,
+in_hard_reg_set_p (const HARD_REG_SET regs, machine_mode mode,
unsigned int regno)
{
unsigned int end_regno;
/* Return true if (reg:MODE REGNO) includes an element of REGS. */
static inline bool
-overlaps_hard_reg_set_p (const HARD_REG_SET regs, enum machine_mode mode,
+overlaps_hard_reg_set_p (const HARD_REG_SET regs, machine_mode mode,
unsigned int regno)
{
unsigned int end_regno;
{
rtx *where; /* Location to store in */
int what; /* which reload this is for */
- enum machine_mode mode; /* mode it must have */
+ machine_mode mode; /* mode it must have */
};
static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
: (type)))
static int push_secondary_reload (int, rtx, int, int, enum reg_class,
- enum machine_mode, enum reload_type,
+ machine_mode, enum reload_type,
enum insn_code *, secondary_reload_info *);
-static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
+static enum reg_class find_valid_class (machine_mode, machine_mode,
int, unsigned int);
-static void push_replacement (rtx *, int, enum machine_mode);
+static void push_replacement (rtx *, int, machine_mode);
static void dup_replacements (rtx *, rtx *);
static void combine_reloads (void);
static int find_reusable_reload (rtx *, rtx, enum reg_class,
enum reload_type, int, int);
-static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
- enum machine_mode, reg_class_t, int, int);
+static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
+ machine_mode, reg_class_t, int, int);
static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
static struct decomposition decompose (rtx);
static int immune_p (rtx, rtx, struct decomposition);
static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
rtx_insn *, int *);
static rtx make_memloc (rtx, int);
-static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
+static int maybe_memory_address_addr_space_p (machine_mode, rtx,
addr_space_t, rtx *);
-static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
+static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
int, enum reload_type, int, rtx_insn *);
static rtx subst_reg_equivs (rtx, rtx_insn *);
static rtx subst_indexed_address (rtx);
static void update_auto_inc_notes (rtx_insn *, int, int);
-static int find_reloads_address_1 (enum machine_mode, addr_space_t, rtx, int,
+static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
enum rtx_code, enum rtx_code, rtx *,
int, enum reload_type,int, rtx_insn *);
static void find_reloads_address_part (rtx, rtx *, enum reg_class,
- enum machine_mode, int,
+ machine_mode, int,
enum reload_type, int);
static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
int, rtx_insn *, int *);
static int
push_secondary_reload (int in_p, rtx x, int opnum, int optional,
enum reg_class reload_class,
- enum machine_mode reload_mode, enum reload_type type,
+ machine_mode reload_mode, enum reload_type type,
enum insn_code *picode, secondary_reload_info *prev_sri)
{
enum reg_class rclass = NO_REGS;
enum reg_class scratch_class;
- enum machine_mode mode = reload_mode;
+ machine_mode mode = reload_mode;
enum insn_code icode = CODE_FOR_nothing;
enum insn_code t_icode = CODE_FOR_nothing;
enum reload_type secondary_type;
register and a scratch register is needed, we return the class of the
intermediate register. */
reg_class_t
-secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode,
+secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
rtx x)
{
enum insn_code icode;
call find_reloads_address on the location being returned. */
rtx
-get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
+get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
int opnum, enum reload_type type)
{
rtx loc;
into REGNO. Such a class must exist. */
static enum reg_class
-find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
- enum machine_mode inner ATTRIBUTE_UNUSED, int n,
+find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
+ machine_mode inner ATTRIBUTE_UNUSED, int n,
unsigned int dest_regno ATTRIBUTE_UNUSED)
{
int best_cost = -1;
which we would eventually like to obtain the object. */
static enum reg_class
-find_valid_class_1 (enum machine_mode outer ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
enum reg_class dest_class ATTRIBUTE_UNUSED)
{
int best_cost = -1;
the function is invoked for the output part of an enclosing reload. */
static bool
-reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, bool output)
+reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
{
rtx inner;
patterns by register elimination and substituting pseudos without a home
by their function-invariant equivalences. */
static int
-can_reload_into (rtx in, int regno, enum machine_mode mode)
+can_reload_into (rtx in, int regno, machine_mode mode)
{
rtx dst;
rtx_insn *test_insn;
int
push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
- enum reg_class rclass, enum machine_mode inmode,
- enum machine_mode outmode, int strict_low, int optional,
+ enum reg_class rclass, machine_mode inmode,
+ machine_mode outmode, int strict_low, int optional,
int opnum, enum reload_type type)
{
int i;
is specified. */
if (this_insn_is_asm)
{
- enum machine_mode mode;
+ machine_mode mode;
if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
mode = inmode;
else
{
rtx note;
int regno;
- enum machine_mode rel_mode = inmode;
+ machine_mode rel_mode = inmode;
if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
rel_mode = outmode;
This is used in insn patterns that use match_dup. */
static void
-push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
+push_replacement (rtx *loc, int reloadnum, machine_mode mode)
{
if (replace_reloads)
{
static rtx
find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
- enum machine_mode inmode, enum machine_mode outmode,
+ machine_mode inmode, machine_mode outmode,
reg_class_t rclass, int for_real, int earlyclobber)
{
rtx in = real_in;
proper kind of hard reg. */
int
-strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
rtx addr, addr_space_t as)
{
#ifdef GO_IF_LEGITIMATE_ADDRESS
rtx body = PATTERN (insn);
rtx set = single_set (insn);
int goal_earlyclobber = 0, this_earlyclobber;
- enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
+ machine_mode operand_mode[MAX_RECOG_OPERANDS];
int retval = 0;
this_insn = insn;
this_insn_is_asm = insn_code_number < 0;
memcpy (operand_mode, recog_data.operand_mode,
- noperands * sizeof (enum machine_mode));
+ noperands * sizeof (machine_mode));
memcpy (constraints, recog_data.constraints,
noperands * sizeof (const char *));
rtx op = recog_data.operand[i];
rtx subreg = NULL_RTX;
rtx plus = NULL_RTX;
- enum machine_mode mode = operand_mode[i];
+ machine_mode mode = operand_mode[i];
/* Reloads of SUBREGs of CONSTANT RTXs are handled later in
push_reload so we have to let them pass here. */
use the default address mode as mode of the reload register,
as would have been done by find_reloads_address. */
addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
- enum machine_mode address_mode;
+ machine_mode address_mode;
address_mode = get_address_mode (recog_data.operand[i]);
operand_reloadnum[i]
by PART into a register. */
static int
-maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
+maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
addr_space_t as, rtx *part)
{
int retv;
to a hard register, and frame pointer elimination. */
static int
-find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
+find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
rtx *loc, int opnum, enum reload_type type,
int ind_levels, rtx_insn *insn)
{
into a register. */
if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
{
- enum machine_mode address_mode = GET_MODE (ad);
+ machine_mode address_mode = GET_MODE (ad);
if (address_mode == VOIDmode)
address_mode = targetm.addr_space.address_mode (as);
This routine assumes both inputs are already in canonical form. */
rtx
-form_sum (enum machine_mode mode, rtx x, rtx y)
+form_sum (machine_mode mode, rtx x, rtx y)
{
rtx tem;
handles those cases gracefully. */
static int
-find_reloads_address_1 (enum machine_mode mode, addr_space_t as,
+find_reloads_address_1 (machine_mode mode, addr_space_t as,
rtx x, int context,
enum rtx_code outer_code, enum rtx_code index_code,
rtx *loc, int opnum, enum reload_type type,
static void
find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
- enum machine_mode mode, int opnum,
+ machine_mode mode, int opnum,
enum reload_type type, int ind_levels)
{
if (CONSTANT_P (x)
int ind_levels, rtx_insn *insn,
int *address_reloaded)
{
- enum machine_mode outer_mode = GET_MODE (x);
- enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
+ machine_mode outer_mode = GET_MODE (x);
+ machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
int regno = REGNO (SUBREG_REG (x));
int reloaded = 0;
rtx tem, orig;
rtx
find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
- short *reload_reg_p, int goalreg, enum machine_mode mode)
+ short *reload_reg_p, int goalreg, machine_mode mode)
{
rtx_insn *p = insn;
rtx goaltry, valtry, value;
REG_INC. REGNO must refer to a hard register. */
int
-regno_clobbered_p (unsigned int regno, rtx_insn *insn, enum machine_mode mode,
+regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
int sets)
{
unsigned int nregs, endregno;
/* Find the low part, with mode MODE, of a hard regno RELOADREG. */
rtx
-reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
+reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
{
int regno;
SECONDARY_RELOAD_CLASS (CLASS, MODE, X)
#endif
-extern int register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
-extern int memory_move_cost (enum machine_mode, reg_class_t, bool);
-extern int memory_move_secondary_cost (enum machine_mode, reg_class_t, bool);
+extern int register_move_cost (machine_mode, reg_class_t, reg_class_t);
+extern int memory_move_cost (machine_mode, reg_class_t, bool);
+extern int memory_move_secondary_cost (machine_mode, reg_class_t, bool);
/* Maximum number of reloads we can need. */
#define MAX_RELOADS (2 * MAX_RECOG_OPERANDS * (MAX_REGS_PER_ADDRESS + 1))
enum reg_class rclass;
/* The mode this operand should have when reloaded, on input. */
- enum machine_mode inmode;
+ machine_mode inmode;
/* The mode this operand should have when reloaded, on output. */
- enum machine_mode outmode;
+ machine_mode outmode;
/* The mode of the reload register. */
- enum machine_mode mode;
+ machine_mode mode;
/* the largest number of registers this reload will require. */
unsigned int nregs;
enough to save the entire contents of the register. When saving the
register because it is live we first try to save in multi-register modes.
If that is not possible the save is done one register at a time. */
- enum machine_mode (x_regno_save_mode
+ machine_mode (x_regno_save_mode
[FIRST_PSEUDO_REGISTER]
[MAX_MOVE_MAX / MIN_UNITS_PER_WORD + 1]);
/* Functions from reload.c: */
extern reg_class_t secondary_reload_class (bool, reg_class_t,
- enum machine_mode, rtx);
+ machine_mode, rtx);
#ifdef GCC_INSN_CODES_H
extern enum reg_class scratch_reload_class (enum insn_code);
/* Return a memory location that will be used to copy X in mode MODE.
If we haven't already made a location for this mode in this insn,
call find_reloads_address on the location being returned. */
-extern rtx get_secondary_mem (rtx, enum machine_mode, int, enum reload_type);
+extern rtx get_secondary_mem (rtx, machine_mode, int, enum reload_type);
/* Clear any secondary memory locations we've made. */
extern void clear_secondary_mem (void);
address, namely: sum constant integers, surround the sum of two
constants with a CONST, put the constant as the second operand, and
group the constant on the outermost sum. */
-extern rtx form_sum (enum machine_mode, rtx, rtx);
+extern rtx form_sum (machine_mode, rtx, rtx);
/* Substitute into the current INSN the registers into which we have reloaded
the things that need reloading. */
/* Check the insns before INSN to see if there is a suitable register
containing the same value as GOAL. */
extern rtx find_equiv_reg (rtx, rtx_insn *, enum reg_class, int, short *,
- int, enum machine_mode);
+ int, machine_mode);
/* Return 1 if register REGNO is the subject of a clobber in insn INSN. */
-extern int regno_clobbered_p (unsigned int, rtx_insn *, enum machine_mode, int);
+extern int regno_clobbered_p (unsigned int, rtx_insn *, machine_mode, int);
/* Return 1 if X is an operand of an insn that is being earlyclobbered. */
extern int earlyclobber_operand_p (rtx);
/* Record one reload that needs to be performed. */
extern int push_reload (rtx, rtx, rtx *, rtx *, enum reg_class,
- enum machine_mode, enum machine_mode,
+ machine_mode, machine_mode,
int, int, int, enum reload_type);
/* Functions in reload1.c: */
/* Scan X and replace any eliminable registers (such as fp) with a
replacement (such as sp), plus an offset. */
-extern rtx eliminate_regs (rtx, enum machine_mode, rtx);
+extern rtx eliminate_regs (rtx, machine_mode, rtx);
extern bool elimination_target_reg_p (rtx);
/* Called from the register allocator to estimate costs of eliminating
/* Compute the actual register we should reload to, in case we're
reloading to/from a register that is wider than a word. */
-extern rtx reload_adjust_reg_for_mode (rtx, enum machine_mode);
+extern rtx reload_adjust_reg_for_mode (rtx, machine_mode);
/* Allocate or grow the reg_equiv tables, initializing new entries to 0. */
extern void grow_reg_equivs (void);
static int num_labels;
\f
-static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
+static void replace_pseudos_in (rtx *, machine_mode, rtx);
static void maybe_fix_stack_asms (void);
static void copy_reloads (struct insn_chain *);
static void calculate_needs_all_insns (int);
static void alter_reg (int, int, bool);
static void set_label_offsets (rtx, rtx_insn *, int);
static void check_eliminable_occurrences (rtx);
-static void elimination_effects (rtx, enum machine_mode);
-static rtx eliminate_regs_1 (rtx, enum machine_mode, rtx, bool, bool);
+static void elimination_effects (rtx, machine_mode);
+static rtx eliminate_regs_1 (rtx, machine_mode, rtx, bool, bool);
static int eliminate_regs_in_insn (rtx_insn *, int);
static void update_eliminable_offsets (void);
static void mark_not_eliminable (rtx, const_rtx, void *);
static void forget_marked_reloads (regset);
static int reload_reg_class_lower (const void *, const void *);
static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
- enum machine_mode);
+ machine_mode);
static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
- enum machine_mode);
+ machine_mode);
static int reload_reg_free_p (unsigned int, int, enum reload_type);
static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
rtx, rtx, int, int);
-static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
+static int free_for_value_p (int, machine_mode, int, enum reload_type,
rtx, rtx, int, int);
static int allocate_reload_reg (struct insn_chain *, int, int);
static int conflicts_with_override (rtx);
equivalences. */
static void
-replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
+replace_pseudos_in (rtx *loc, machine_mode mem_mode, rtx usage)
{
rtx x = *loc;
enum rtx_code code;
{
#ifdef STACK_REGS
const char *constraints[MAX_RECOG_OPERANDS];
- enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
+ machine_mode operand_mode[MAX_RECOG_OPERANDS];
struct insn_chain *chain;
for (chain = reload_insn_chain; chain != 0; chain = chain->next)
&& reg_equiv_memory_loc (i) == 0)
{
rtx x = NULL_RTX;
- enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
+ machine_mode mode = GET_MODE (regno_reg_rtx[i]);
unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
pseudo-reg number REGNO, accessed in MODE. */
static void
-mark_home_live_1 (int regno, enum machine_mode mode)
+mark_home_live_1 (int regno, machine_mode mode)
{
int i, lim;
the proper thing. */
static rtx
-eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
+eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn,
bool may_use_invariant, bool for_costs)
{
enum rtx_code code = GET_CODE (x);
}
rtx
-eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
+eliminate_regs (rtx x, machine_mode mem_mode, rtx insn)
{
return eliminate_regs_1 (x, mem_mode, insn, false, false);
}
the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
static void
-elimination_effects (rtx x, enum machine_mode mem_mode)
+elimination_effects (rtx x, machine_mode mem_mode)
{
enum rtx_code code = GET_CODE (x);
struct elim_table *ep;
}
else if (function_invariant_p (x))
{
- enum machine_mode mode;
+ machine_mode mode;
mode = GET_MODE (SET_DEST (set));
if (GET_CODE (x) == PLUS)
{
rtx reload_reg = rld[i].reg_rtx;
- enum machine_mode mode = GET_MODE (reload_reg);
+ machine_mode mode = GET_MODE (reload_reg);
int n = 0;
rtx_insn *p;
static void
mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
- enum machine_mode mode)
+ machine_mode mode)
{
switch (type)
{
static void
clear_reload_reg_in_use (unsigned int regno, int opnum,
- enum reload_type type, enum machine_mode mode)
+ enum reload_type type, machine_mode mode)
{
unsigned int nregs = hard_regno_nregs[regno][mode];
unsigned int start_regno, end_regno, r;
register. */
static int
-free_for_value_p (int regno, enum machine_mode mode, int opnum,
+free_for_value_p (int regno, machine_mode mode, int opnum,
enum reload_type type, rtx value, rtx out, int reloadnum,
int ignore_address_reloads)
{
This used to be one `if', but Sequent compiler can't handle that. */
if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
{
- enum machine_mode test_mode = VOIDmode;
+ machine_mode test_mode = VOIDmode;
if (rld[r].in)
test_mode = GET_MODE (rld[r].in);
/* If rld[r].in has VOIDmode, it means we will load it
otherwise it is NULL. */
static int
-compute_reload_subreg_offset (enum machine_mode outermode,
+compute_reload_subreg_offset (machine_mode outermode,
rtx subreg,
- enum machine_mode innermode)
+ machine_mode innermode)
{
int outer_offset;
- enum machine_mode middlemode;
+ machine_mode middlemode;
if (!subreg)
return subreg_lowpart_offset (outermode, innermode);
{
int byte = 0;
int regno = -1;
- enum machine_mode mode = VOIDmode;
+ machine_mode mode = VOIDmode;
rtx subreg = NULL_RTX;
if (rld[r].in == 0)
static bool
reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
enum reg_class new_class,
- enum machine_mode new_mode)
+ machine_mode new_mode)
{
rtx reg;
{
enum reg_class new_class = scratch_reload_class (icode);
- enum machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
+ machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
new_class, new_mode);
rtx oldequiv_reg = 0;
rtx oldequiv = 0;
int special = 0;
- enum machine_mode mode;
+ machine_mode mode;
rtx_insn **where;
/* delete_output_reload is only invoked properly if old contains
rtx_insn *insn = chain->insn;
int special = 0;
rtx old = rl->out;
- enum machine_mode mode;
+ machine_mode mode;
rtx_insn *p;
rtx rl_reg_rtx;
if (old && reg_rtx)
{
- enum machine_mode mode;
+ machine_mode mode;
/* Determine the mode to reload in.
This is very tricky because we have three to choose from.
if (rl->out && reg_rtx)
{
- enum machine_mode mode;
+ machine_mode mode;
/* Determine the mode to reload in.
See comments above (for input reloading). */
static bool
inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
int src ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
#ifdef CANNOT_CHANGE_MODE_CLASS
return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
reg = reload_reg_rtx_for_output[r];
if (reload_reg_rtx_reaches_end_p (reg, r))
{
- enum machine_mode mode = GET_MODE (reg);
+ machine_mode mode = GET_MODE (reg);
int regno = REGNO (reg);
int nregs = hard_regno_nregs[regno][mode];
rtx out = (REG_P (rld[r].out)
reg = reload_reg_rtx_for_input[r];
if (reload_reg_rtx_reaches_end_p (reg, r))
{
- enum machine_mode mode;
+ machine_mode mode;
int regno;
int nregs;
int in_regno;
rtx out = ((rld[r].out && REG_P (rld[r].out))
? rld[r].out : rld[r].out_reg);
int out_regno = REGNO (out);
- enum machine_mode mode = GET_MODE (out);
+ machine_mode mode = GET_MODE (out);
/* REG_RTX is now set or clobbered by the main instruction.
As the comment above explains, forget_old_reloads_1 only
}
void
-rtl_check_failed_code_mode (const_rtx r, enum rtx_code code, enum machine_mode mode,
+rtl_check_failed_code_mode (const_rtx r, enum rtx_code code, machine_mode mode,
bool not_mode, const char *file, int line,
const char *func)
{
const char *rt_str;
rtx rt_rtx;
rtvec rt_rtvec;
- enum machine_mode rt_type;
+ machine_mode rt_type;
addr_diff_vec_flags rt_addr_diff_vec_flags;
struct cselib_val *rt_cselib;
tree rt_tree;
#define GET_CODE(RTX) ((enum rtx_code) (RTX)->code)
#define PUT_CODE(RTX, CODE) ((RTX)->code = (CODE))
-#define GET_MODE(RTX) ((enum machine_mode) (RTX)->mode)
+#define GET_MODE(RTX) ((machine_mode) (RTX)->mode)
#define PUT_MODE(RTX, MODE) ((RTX)->mode = (MODE))
/* RTL vector. These appear inside RTX's when there is a need
extern void rtl_check_failed_code2 (const_rtx, enum rtx_code, enum rtx_code,
const char *, int, const char *)
ATTRIBUTE_NORETURN;
-extern void rtl_check_failed_code_mode (const_rtx, enum rtx_code, enum machine_mode,
+extern void rtl_check_failed_code_mode (const_rtx, enum rtx_code, machine_mode,
bool, const char *, int, const char *)
ATTRIBUTE_NORETURN;
extern void rtl_check_failed_block_symbol (const char *, int, const char *)
/* Define macros to extract and insert the reg-note kind in an EXPR_LIST. */
#define REG_NOTE_KIND(LINK) ((enum reg_note) GET_MODE (LINK))
#define PUT_REG_NOTE_KIND(LINK, KIND) \
- PUT_MODE (LINK, (enum machine_mode) (KIND))
+ PUT_MODE (LINK, (machine_mode) (KIND))
/* Names for REG_NOTE's in EXPR_LIST insn's. */
offset == the SUBREG_BYTE
outer_mode == the mode of the SUBREG itself. */
struct subreg_shape {
- subreg_shape (enum machine_mode, unsigned int, enum machine_mode);
+ subreg_shape (machine_mode, unsigned int, machine_mode);
bool operator == (const subreg_shape &) const;
bool operator != (const subreg_shape &) const;
unsigned int unique_id () const;
- enum machine_mode inner_mode;
+ machine_mode inner_mode;
unsigned int offset;
- enum machine_mode outer_mode;
+ machine_mode outer_mode;
};
inline
-subreg_shape::subreg_shape (enum machine_mode inner_mode_in,
+subreg_shape::subreg_shape (machine_mode inner_mode_in,
unsigned int offset_in,
- enum machine_mode outer_mode_in)
+ machine_mode outer_mode_in)
: inner_mode (inner_mode_in), offset (offset_in), outer_mode (outer_mode_in)
{}
struct address_info {
/* The mode of the value being addressed, or VOIDmode if this is
a load-address operation with no known address mode. */
- enum machine_mode mode;
+ machine_mode mode;
/* The address space. */
addr_space_t as;
/* This is used to bundle an rtx and a mode together so that the pair
can be used with the wi:: routines. If we ever put modes into rtx
integer constants, this should go away and then just pass an rtx in. */
-typedef std::pair <rtx, enum machine_mode> rtx_mode_t;
+typedef std::pair <rtx, machine_mode> rtx_mode_t;
namespace wi
{
namespace wi
{
- hwi_with_prec shwi (HOST_WIDE_INT, enum machine_mode mode);
- wide_int min_value (enum machine_mode, signop);
- wide_int max_value (enum machine_mode, signop);
+ hwi_with_prec shwi (HOST_WIDE_INT, machine_mode mode);
+ wide_int min_value (machine_mode, signop);
+ wide_int max_value (machine_mode, signop);
}
inline wi::hwi_with_prec
-wi::shwi (HOST_WIDE_INT val, enum machine_mode mode)
+wi::shwi (HOST_WIDE_INT val, machine_mode mode)
{
return shwi (val, GET_MODE_PRECISION (mode));
}
/* Produce the smallest number that is represented in MODE. The precision
is taken from MODE and the sign from SGN. */
inline wide_int
-wi::min_value (enum machine_mode mode, signop sgn)
+wi::min_value (machine_mode mode, signop sgn)
{
return min_value (GET_MODE_PRECISION (mode), sgn);
}
/* Produce the largest number that is represented in MODE. The precision
is taken from MODE and the sign from SGN. */
inline wide_int
-wi::max_value (enum machine_mode mode, signop sgn)
+wi::max_value (machine_mode mode, signop sgn)
{
return max_value (GET_MODE_PRECISION (mode), sgn);
}
extern void init_rtlanal (void);
extern int rtx_cost (rtx, enum rtx_code, int, bool);
-extern int address_cost (rtx, enum machine_mode, addr_space_t, bool);
+extern int address_cost (rtx, machine_mode, addr_space_t, bool);
extern void get_full_rtx_cost (rtx, enum rtx_code, int,
struct full_rtx_costs *);
extern unsigned int subreg_lsb (const_rtx);
-extern unsigned int subreg_lsb_1 (enum machine_mode, enum machine_mode,
+extern unsigned int subreg_lsb_1 (machine_mode, machine_mode,
unsigned int);
-extern unsigned int subreg_regno_offset (unsigned int, enum machine_mode,
- unsigned int, enum machine_mode);
-extern bool subreg_offset_representable_p (unsigned int, enum machine_mode,
- unsigned int, enum machine_mode);
+extern unsigned int subreg_regno_offset (unsigned int, machine_mode,
+ unsigned int, machine_mode);
+extern bool subreg_offset_representable_p (unsigned int, machine_mode,
+ unsigned int, machine_mode);
extern unsigned int subreg_regno (const_rtx);
-extern int simplify_subreg_regno (unsigned int, enum machine_mode,
- unsigned int, enum machine_mode);
+extern int simplify_subreg_regno (unsigned int, machine_mode,
+ unsigned int, machine_mode);
extern unsigned int subreg_nregs (const_rtx);
extern unsigned int subreg_nregs_with_regno (unsigned int, const_rtx);
-extern unsigned HOST_WIDE_INT nonzero_bits (const_rtx, enum machine_mode);
-extern unsigned int num_sign_bit_copies (const_rtx, enum machine_mode);
+extern unsigned HOST_WIDE_INT nonzero_bits (const_rtx, machine_mode);
+extern unsigned int num_sign_bit_copies (const_rtx, machine_mode);
extern bool constant_pool_constant_p (rtx);
-extern bool truncated_to_mode (enum machine_mode, const_rtx);
-extern int low_bitmask_len (enum machine_mode, unsigned HOST_WIDE_INT);
+extern bool truncated_to_mode (machine_mode, const_rtx);
+extern int low_bitmask_len (machine_mode, unsigned HOST_WIDE_INT);
extern void split_double (rtx, rtx *, rtx *);
extern rtx *strip_address_mutations (rtx *, enum rtx_code * = 0);
extern void decompose_address (struct address_info *, rtx *,
- enum machine_mode, addr_space_t, enum rtx_code);
+ machine_mode, addr_space_t, enum rtx_code);
extern void decompose_lea_address (struct address_info *, rtx *);
extern void decompose_mem_address (struct address_info *, rtx);
extern void update_address (struct address_info *);
/* Generally useful functions. */
/* In explow.c */
-extern HOST_WIDE_INT trunc_int_for_mode (HOST_WIDE_INT, enum machine_mode);
-extern rtx plus_constant (enum machine_mode, rtx, HOST_WIDE_INT, bool = false);
+extern HOST_WIDE_INT trunc_int_for_mode (HOST_WIDE_INT, machine_mode);
+extern rtx plus_constant (machine_mode, rtx, HOST_WIDE_INT, bool = false);
/* In rtl.c */
extern rtx rtx_alloc_stat (RTX_CODE MEM_STAT_DECL);
/* In emit-rtl.c */
extern rtvec gen_rtvec_v (int, rtx *);
extern rtvec gen_rtvec_v (int, rtx_insn **);
-extern rtx gen_reg_rtx (enum machine_mode);
-extern rtx gen_rtx_REG_offset (rtx, enum machine_mode, unsigned int, int);
-extern rtx gen_reg_rtx_offset (rtx, enum machine_mode, int);
+extern rtx gen_reg_rtx (machine_mode);
+extern rtx gen_rtx_REG_offset (rtx, machine_mode, unsigned int, int);
+extern rtx gen_reg_rtx_offset (rtx, machine_mode, int);
extern rtx gen_reg_rtx_and_attrs (rtx);
extern rtx_code_label *gen_label_rtx (void);
-extern rtx gen_lowpart_common (enum machine_mode, rtx);
+extern rtx gen_lowpart_common (machine_mode, rtx);
/* In cse.c */
-extern rtx gen_lowpart_if_possible (enum machine_mode, rtx);
+extern rtx gen_lowpart_if_possible (machine_mode, rtx);
/* In emit-rtl.c */
-extern rtx gen_highpart (enum machine_mode, rtx);
-extern rtx gen_highpart_mode (enum machine_mode, enum machine_mode, rtx);
-extern rtx operand_subword (rtx, unsigned int, int, enum machine_mode);
+extern rtx gen_highpart (machine_mode, rtx);
+extern rtx gen_highpart_mode (machine_mode, machine_mode, rtx);
+extern rtx operand_subword (rtx, unsigned int, int, machine_mode);
/* In emit-rtl.c */
-extern rtx operand_subword_force (rtx, unsigned int, enum machine_mode);
+extern rtx operand_subword_force (rtx, unsigned int, machine_mode);
extern bool paradoxical_subreg_p (const_rtx);
extern int subreg_lowpart_p (const_rtx);
-extern unsigned int subreg_lowpart_offset (enum machine_mode,
- enum machine_mode);
-extern unsigned int subreg_highpart_offset (enum machine_mode,
- enum machine_mode);
-extern int byte_lowpart_offset (enum machine_mode, enum machine_mode);
+extern unsigned int subreg_lowpart_offset (machine_mode,
+ machine_mode);
+extern unsigned int subreg_highpart_offset (machine_mode,
+ machine_mode);
+extern int byte_lowpart_offset (machine_mode, machine_mode);
extern rtx make_safe_from (rtx, rtx);
-extern rtx convert_memory_address_addr_space (enum machine_mode, rtx,
+extern rtx convert_memory_address_addr_space (machine_mode, rtx,
addr_space_t);
#define convert_memory_address(to_mode,x) \
convert_memory_address_addr_space ((to_mode), (x), ADDR_SPACE_GENERIC)
#endif
extern void cwi_output_hex (FILE *, const_rtx);
#ifndef GENERATOR_FILE
-extern rtx immed_wide_int_const (const wide_int_ref &, enum machine_mode);
+extern rtx immed_wide_int_const (const wide_int_ref &, machine_mode);
#endif
#if TARGET_SUPPORTS_WIDE_INT == 0
extern rtx immed_double_const (HOST_WIDE_INT, HOST_WIDE_INT,
- enum machine_mode);
+ machine_mode);
#endif
/* In loop-iv.c */
-extern rtx lowpart_subreg (enum machine_mode, rtx, enum machine_mode);
+extern rtx lowpart_subreg (machine_mode, rtx, machine_mode);
/* In varasm.c */
-extern rtx force_const_mem (enum machine_mode, rtx);
+extern rtx force_const_mem (machine_mode, rtx);
/* In varasm.c */
struct function;
extern rtx get_pool_constant (const_rtx);
extern rtx get_pool_constant_mark (rtx, bool *);
-extern enum machine_mode get_pool_mode (const_rtx);
+extern machine_mode get_pool_mode (const_rtx);
extern rtx simplify_subtraction (rtx);
extern void decide_function_section (tree);
extern rtx split_insns (rtx, rtx);
/* In simplify-rtx.c */
-extern rtx simplify_const_unary_operation (enum rtx_code, enum machine_mode,
- rtx, enum machine_mode);
-extern rtx simplify_unary_operation (enum rtx_code, enum machine_mode, rtx,
- enum machine_mode);
-extern rtx simplify_const_binary_operation (enum rtx_code, enum machine_mode,
+extern rtx simplify_const_unary_operation (enum rtx_code, machine_mode,
+ rtx, machine_mode);
+extern rtx simplify_unary_operation (enum rtx_code, machine_mode, rtx,
+ machine_mode);
+extern rtx simplify_const_binary_operation (enum rtx_code, machine_mode,
rtx, rtx);
-extern rtx simplify_binary_operation (enum rtx_code, enum machine_mode, rtx,
+extern rtx simplify_binary_operation (enum rtx_code, machine_mode, rtx,
rtx);
-extern rtx simplify_ternary_operation (enum rtx_code, enum machine_mode,
- enum machine_mode, rtx, rtx, rtx);
+extern rtx simplify_ternary_operation (enum rtx_code, machine_mode,
+ machine_mode, rtx, rtx, rtx);
extern rtx simplify_const_relational_operation (enum rtx_code,
- enum machine_mode, rtx, rtx);
-extern rtx simplify_relational_operation (enum rtx_code, enum machine_mode,
- enum machine_mode, rtx, rtx);
-extern rtx simplify_gen_binary (enum rtx_code, enum machine_mode, rtx, rtx);
-extern rtx simplify_gen_unary (enum rtx_code, enum machine_mode, rtx,
- enum machine_mode);
-extern rtx simplify_gen_ternary (enum rtx_code, enum machine_mode,
- enum machine_mode, rtx, rtx, rtx);
-extern rtx simplify_gen_relational (enum rtx_code, enum machine_mode,
- enum machine_mode, rtx, rtx);
-extern rtx simplify_subreg (enum machine_mode, rtx, enum machine_mode,
+ machine_mode, rtx, rtx);
+extern rtx simplify_relational_operation (enum rtx_code, machine_mode,
+ machine_mode, rtx, rtx);
+extern rtx simplify_gen_binary (enum rtx_code, machine_mode, rtx, rtx);
+extern rtx simplify_gen_unary (enum rtx_code, machine_mode, rtx,
+ machine_mode);
+extern rtx simplify_gen_ternary (enum rtx_code, machine_mode,
+ machine_mode, rtx, rtx, rtx);
+extern rtx simplify_gen_relational (enum rtx_code, machine_mode,
+ machine_mode, rtx, rtx);
+extern rtx simplify_subreg (machine_mode, rtx, machine_mode,
unsigned int);
-extern rtx simplify_gen_subreg (enum machine_mode, rtx, enum machine_mode,
+extern rtx simplify_gen_subreg (machine_mode, rtx, machine_mode,
unsigned int);
extern rtx simplify_replace_fn_rtx (rtx, const_rtx,
rtx (*fn) (rtx, const_rtx, void *), void *);
extern rtx simplify_rtx (const_rtx);
extern rtx avoid_constant_pool_reference (rtx);
extern rtx delegitimize_mem_from_attrs (rtx);
-extern bool mode_signbit_p (enum machine_mode, const_rtx);
-extern bool val_signbit_p (enum machine_mode, unsigned HOST_WIDE_INT);
-extern bool val_signbit_known_set_p (enum machine_mode,
+extern bool mode_signbit_p (machine_mode, const_rtx);
+extern bool val_signbit_p (machine_mode, unsigned HOST_WIDE_INT);
+extern bool val_signbit_known_set_p (machine_mode,
unsigned HOST_WIDE_INT);
-extern bool val_signbit_known_clear_p (enum machine_mode,
+extern bool val_signbit_known_clear_p (machine_mode,
unsigned HOST_WIDE_INT);
/* In reginfo.c */
-extern enum machine_mode choose_hard_reg_mode (unsigned int, unsigned int,
+extern machine_mode choose_hard_reg_mode (unsigned int, unsigned int,
bool);
#ifdef HARD_CONST
extern const HARD_REG_SET &simplifiable_subregs (const subreg_shape &);
return single_set_2 (insn, PATTERN (insn));
}
-extern enum machine_mode get_address_mode (rtx mem);
+extern machine_mode get_address_mode (rtx mem);
extern int rtx_addr_can_trap_p (const_rtx);
extern bool nonzero_address_p (const_rtx);
extern int rtx_unstable_p (const_rtx);
extern int rtx_equal_p_cb (const_rtx, const_rtx,
rtx_equal_p_callback_function);
-typedef int (*hash_rtx_callback_function) (const_rtx, enum machine_mode, rtx *,
- enum machine_mode *);
-extern unsigned hash_rtx_cb (const_rtx, enum machine_mode, int *, int *,
+typedef int (*hash_rtx_callback_function) (const_rtx, machine_mode, rtx *,
+ machine_mode *);
+extern unsigned hash_rtx_cb (const_rtx, machine_mode, int *, int *,
bool, hash_rtx_callback_function);
extern rtx regno_use_in (unsigned int, rtx);
bool representable_p;
};
-extern void subreg_get_info (unsigned int, enum machine_mode,
- unsigned int, enum machine_mode,
+extern void subreg_get_info (unsigned int, machine_mode,
+ unsigned int, machine_mode,
struct subreg_info *);
/* lists.c */
extern rtx extract_asm_operands (rtx);
extern int asm_noperands (const_rtx);
extern const char *decode_asm_operands (rtx, rtx *, rtx **, const char **,
- enum machine_mode *, location_t *);
+ machine_mode *, location_t *);
extern void get_referenced_operands (const char *, bool *, unsigned int);
extern enum reg_class reg_preferred_class (int);
generation functions included above do the raw handling. If you
add to this list, modify special_rtx in gengenrtl.c as well. */
-extern rtx_expr_list *gen_rtx_EXPR_LIST (enum machine_mode, rtx, rtx);
-extern rtx_insn_list *gen_rtx_INSN_LIST (enum machine_mode, rtx, rtx);
+extern rtx_expr_list *gen_rtx_EXPR_LIST (machine_mode, rtx, rtx);
+extern rtx_insn_list *gen_rtx_INSN_LIST (machine_mode, rtx, rtx);
extern rtx_insn *
-gen_rtx_INSN (enum machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
+gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
basic_block bb, rtx pattern, int location, int code,
rtx reg_notes);
-extern rtx gen_rtx_CONST_INT (enum machine_mode, HOST_WIDE_INT);
-extern rtx gen_rtx_CONST_VECTOR (enum machine_mode, rtvec);
-extern rtx gen_raw_REG (enum machine_mode, int);
-extern rtx gen_rtx_REG (enum machine_mode, unsigned);
-extern rtx gen_rtx_SUBREG (enum machine_mode, rtx, int);
-extern rtx gen_rtx_MEM (enum machine_mode, rtx);
-extern rtx gen_rtx_VAR_LOCATION (enum machine_mode, tree, rtx,
+extern rtx gen_rtx_CONST_INT (machine_mode, HOST_WIDE_INT);
+extern rtx gen_rtx_CONST_VECTOR (machine_mode, rtvec);
+extern rtx gen_raw_REG (machine_mode, int);
+extern rtx gen_rtx_REG (machine_mode, unsigned);
+extern rtx gen_rtx_SUBREG (machine_mode, rtx, int);
+extern rtx gen_rtx_MEM (machine_mode, rtx);
+extern rtx gen_rtx_VAR_LOCATION (machine_mode, tree, rtx,
enum var_init_status);
#define GEN_INT(N) gen_rtx_CONST_INT (VOIDmode, (N))
/* In cse.c */
extern int delete_trivially_dead_insns (rtx_insn *, int);
extern int exp_equiv_p (const_rtx, const_rtx, int, bool);
-extern unsigned hash_rtx (const_rtx x, enum machine_mode, int *, int *, bool);
+extern unsigned hash_rtx (const_rtx x, machine_mode, int *, int *, bool);
/* In dse.c */
extern bool check_for_inc_dec (rtx_insn *insn);
extern int redirect_jump (rtx, rtx, int);
extern void rebuild_jump_labels (rtx_insn *);
extern void rebuild_jump_labels_chain (rtx_insn *);
-extern rtx reversed_comparison (const_rtx, enum machine_mode);
+extern rtx reversed_comparison (const_rtx, machine_mode);
extern enum rtx_code reversed_comparison_code (const_rtx, const_rtx);
extern enum rtx_code reversed_comparison_code_parts (enum rtx_code, const_rtx,
const_rtx, const_rtx);
extern void remove_insn (rtx);
extern rtx_insn *emit (rtx);
extern void emit_insn_at_entry (rtx);
-extern rtx gen_lowpart_SUBREG (enum machine_mode, rtx);
-extern rtx gen_const_mem (enum machine_mode, rtx);
-extern rtx gen_frame_mem (enum machine_mode, rtx);
-extern rtx gen_tmp_stack_mem (enum machine_mode, rtx);
-extern bool validate_subreg (enum machine_mode, enum machine_mode,
+extern rtx gen_lowpart_SUBREG (machine_mode, rtx);
+extern rtx gen_const_mem (machine_mode, rtx);
+extern rtx gen_frame_mem (machine_mode, rtx);
+extern rtx gen_tmp_stack_mem (machine_mode, rtx);
+extern bool validate_subreg (machine_mode, machine_mode,
const_rtx, unsigned int);
/* In combine.c */
-extern unsigned int extended_count (const_rtx, enum machine_mode, int);
+extern unsigned int extended_count (const_rtx, machine_mode, int);
extern rtx remove_death (unsigned int, rtx_insn *);
extern void dump_combine_stats (FILE *);
extern void dump_combine_total_stats (FILE *);
extern void init_lower_subreg (void);
/* In gcse.c */
-extern bool can_copy_p (enum machine_mode);
+extern bool can_copy_p (machine_mode);
extern bool can_assign_to_reg_without_clobbers_p (rtx);
extern rtx fis_get_condition (rtx_insn *);
LCT_RETURNS_TWICE = 5
};
-extern void emit_library_call (rtx, enum libcall_type, enum machine_mode, int,
+extern void emit_library_call (rtx, enum libcall_type, machine_mode, int,
...);
extern rtx emit_library_call_value (rtx, rtx, enum libcall_type,
- enum machine_mode, int, ...);
+ machine_mode, int, ...);
/* In varasm.c */
extern void init_varasm_once (void);
/* In alias.c */
extern rtx canon_rtx (rtx);
-extern int true_dependence (const_rtx, enum machine_mode, const_rtx);
+extern int true_dependence (const_rtx, machine_mode, const_rtx);
extern rtx get_addr (rtx);
-extern int canon_true_dependence (const_rtx, enum machine_mode, rtx,
+extern int canon_true_dependence (const_rtx, machine_mode, rtx,
const_rtx, rtx);
extern int read_dependence (const_rtx, const_rtx);
extern int anti_dependence (const_rtx, const_rtx);
extern int canon_anti_dependence (const_rtx, bool,
- const_rtx, enum machine_mode, rtx);
+ const_rtx, machine_mode, rtx);
extern int output_dependence (const_rtx, const_rtx);
extern int may_alias_p (const_rtx, const_rtx);
extern void init_alias_target (void);
extern bool memory_modified_in_insn_p (const_rtx, const_rtx);
extern bool memory_must_be_modified_in_insn_p (const_rtx, const_rtx);
extern bool may_be_sp_based_p (rtx);
-extern rtx gen_hard_reg_clobber (enum machine_mode, unsigned int);
+extern rtx gen_hard_reg_clobber (machine_mode, unsigned int);
extern rtx get_reg_known_value (unsigned int);
extern bool get_reg_known_equiv_p (unsigned int);
extern rtx get_reg_base_value (unsigned int);
extern unsigned int variable_tracking_main (void);
/* In stor-layout.c. */
-extern void get_mode_bounds (enum machine_mode, int, enum machine_mode,
+extern void get_mode_bounds (machine_mode, int, machine_mode,
rtx *, rtx *);
/* In loop-iv.c */
\f
struct rtl_hooks
{
- rtx (*gen_lowpart) (enum machine_mode, rtx);
- rtx (*gen_lowpart_no_emit) (enum machine_mode, rtx);
- rtx (*reg_nonzero_bits) (const_rtx, enum machine_mode, const_rtx, enum machine_mode,
+ rtx (*gen_lowpart) (machine_mode, rtx);
+ rtx (*gen_lowpart_no_emit) (machine_mode, rtx);
+ rtx (*reg_nonzero_bits) (const_rtx, machine_mode, const_rtx, machine_mode,
unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT *);
- rtx (*reg_num_sign_bit_copies) (const_rtx, enum machine_mode, const_rtx, enum machine_mode,
+ rtx (*reg_num_sign_bit_copies) (const_rtx, machine_mode, const_rtx, machine_mode,
unsigned int, unsigned int *);
- bool (*reg_truncated_to_mode) (enum machine_mode, const_rtx);
+ bool (*reg_truncated_to_mode) (machine_mode, const_rtx);
/* Whenever you add entries here, make sure you adjust rtlhooks-def.h. */
};
static int computed_jump_p_1 (const_rtx);
static void parms_set (rtx, const_rtx, void *);
-static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode,
- const_rtx, enum machine_mode,
+static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, machine_mode,
+ const_rtx, machine_mode,
unsigned HOST_WIDE_INT);
-static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode,
- const_rtx, enum machine_mode,
+static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, machine_mode,
+ const_rtx, machine_mode,
unsigned HOST_WIDE_INT);
-static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx,
- enum machine_mode,
+static unsigned int cached_num_sign_bit_copies (const_rtx, machine_mode, const_rtx,
+ machine_mode,
unsigned int);
-static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx,
- enum machine_mode, unsigned int);
+static unsigned int num_sign_bit_copies1 (const_rtx, machine_mode, const_rtx,
+ machine_mode, unsigned int);
/* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
-1 if a code has no such operand. */
static int
rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size,
- enum machine_mode mode, bool unaligned_mems)
+ machine_mode mode, bool unaligned_mems)
{
enum rtx_code code = GET_CODE (x);
add_int_reg_note (rtx insn, enum reg_note kind, int datum)
{
gcc_checking_assert (int_reg_note_p (kind));
- REG_NOTES (insn) = gen_rtx_INT_LIST ((enum machine_mode) kind,
+ REG_NOTES (insn) = gen_rtx_INT_LIST ((machine_mode) kind,
datum, REG_NOTES (insn));
}
(counting from the least significant bit of the operand). */
unsigned int
-subreg_lsb_1 (enum machine_mode outer_mode,
- enum machine_mode inner_mode,
+subreg_lsb_1 (machine_mode outer_mode,
+ machine_mode inner_mode,
unsigned int subreg_byte)
{
unsigned int bitpos;
use simplify_subreg_regno. */
void
-subreg_get_info (unsigned int xregno, enum machine_mode xmode,
- unsigned int offset, enum machine_mode ymode,
+subreg_get_info (unsigned int xregno, machine_mode xmode,
+ unsigned int offset, machine_mode ymode,
struct subreg_info *info)
{
int nregs_xmode, nregs_ymode;
that it is made up of its units concatenated together. */
if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
{
- enum machine_mode xmode_unit;
+ machine_mode xmode_unit;
nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
if (GET_MODE_INNER (xmode) == VOIDmode)
ymode - The mode of a top level SUBREG (or what may become one).
RETURN - The regno offset which would be used. */
unsigned int
-subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
- unsigned int offset, enum machine_mode ymode)
+subreg_regno_offset (unsigned int xregno, machine_mode xmode,
+ unsigned int offset, machine_mode ymode)
{
struct subreg_info info;
subreg_get_info (xregno, xmode, offset, ymode, &info);
ymode - The mode of a top level SUBREG (or what may become one).
RETURN - Whether the offset is representable. */
bool
-subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
- unsigned int offset, enum machine_mode ymode)
+subreg_offset_representable_p (unsigned int xregno, machine_mode xmode,
+ unsigned int offset, machine_mode ymode)
{
struct subreg_info info;
subreg_get_info (xregno, xmode, offset, ymode, &info);
XREGNO is a hard register number. */
int
-simplify_subreg_regno (unsigned int xregno, enum machine_mode xmode,
- unsigned int offset, enum machine_mode ymode)
+simplify_subreg_regno (unsigned int xregno, machine_mode xmode,
+ unsigned int offset, machine_mode ymode)
{
struct subreg_info info;
unsigned int yregno;
be returned. */
int
-address_cost (rtx x, enum machine_mode mode, addr_space_t as, bool speed)
+address_cost (rtx x, machine_mode mode, addr_space_t as, bool speed)
{
/* We may be asked for cost of various unusual addresses, such as operands
of push instruction. It is not worthwhile to complicate writing
/* If the target doesn't override, compute the cost as with arithmetic. */
int
-default_address_cost (rtx x, enum machine_mode, addr_space_t, bool speed)
+default_address_cost (rtx x, machine_mode, addr_space_t, bool speed)
{
return rtx_cost (x, MEM, 0, speed);
}
\f
unsigned HOST_WIDE_INT
-nonzero_bits (const_rtx x, enum machine_mode mode)
+nonzero_bits (const_rtx x, machine_mode mode)
{
return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
}
unsigned int
-num_sign_bit_copies (const_rtx x, enum machine_mode mode)
+num_sign_bit_copies (const_rtx x, machine_mode mode)
{
return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
}
identical subexpressions on the first or the second level. */
static unsigned HOST_WIDE_INT
-cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x,
- enum machine_mode known_mode,
+cached_nonzero_bits (const_rtx x, machine_mode mode, const_rtx known_x,
+ machine_mode known_mode,
unsigned HOST_WIDE_INT known_ret)
{
if (x == known_x && mode == known_mode)
an arithmetic operation, we can do better. */
static unsigned HOST_WIDE_INT
-nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
- enum machine_mode known_mode,
+nonzero_bits1 (const_rtx x, machine_mode mode, const_rtx known_x,
+ machine_mode known_mode,
unsigned HOST_WIDE_INT known_ret)
{
unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
unsigned HOST_WIDE_INT inner_nz;
enum rtx_code code;
- enum machine_mode inner_mode;
+ machine_mode inner_mode;
unsigned int mode_width = GET_MODE_PRECISION (mode);
/* For floating-point and vector values, assume all bits are needed. */
&& INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
&& INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
{
- enum machine_mode inner_mode = GET_MODE (x);
+ machine_mode inner_mode = GET_MODE (x);
unsigned int width = GET_MODE_PRECISION (inner_mode);
int count = INTVAL (XEXP (x, 1));
unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
first or the second level. */
static unsigned int
-cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x,
- enum machine_mode known_mode,
+cached_num_sign_bit_copies (const_rtx x, machine_mode mode, const_rtx known_x,
+ machine_mode known_mode,
unsigned int known_ret)
{
if (x == known_x && mode == known_mode)
be between 1 and the number of bits in MODE. */
static unsigned int
-num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
- enum machine_mode known_mode,
+num_sign_bit_copies1 (const_rtx x, machine_mode mode, const_rtx known_x,
+ machine_mode known_mode,
unsigned int known_ret)
{
enum rtx_code code = GET_CODE (x);
rtx tem;
rtx op0, op1;
int reverse_code = 0;
- enum machine_mode mode;
+ machine_mode mode;
basic_block bb = BLOCK_FOR_INSN (insn);
code = GET_CODE (cond);
relevant. */
if (set)
{
- enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
+ machine_mode inner_mode = GET_MODE (SET_DEST (set));
#ifdef FLOAT_STORE_FLAG_VALUE
REAL_VALUE_TYPE fsfv;
#endif
static void
init_num_sign_bit_copies_in_rep (void)
{
- enum machine_mode mode, in_mode;
+ machine_mode mode, in_mode;
for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
in_mode = GET_MODE_WIDER_MODE (mode))
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
mode = GET_MODE_WIDER_MODE (mode))
{
- enum machine_mode i;
+ machine_mode i;
/* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
extends to the next widest mode. */
have to be copies of the sign-bit. */
for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
{
- enum machine_mode wider = GET_MODE_WIDER_MODE (i);
+ machine_mode wider = GET_MODE_WIDER_MODE (i);
if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
/* We can only check sign-bit copies starting from the
assume it already contains a truncated value of MODE. */
bool
-truncated_to_mode (enum machine_mode mode, const_rtx x)
+truncated_to_mode (machine_mode mode, const_rtx x)
{
/* This register has already been used in MODE without explicit
truncation. */
M is used in machine mode MODE. */
int
-low_bitmask_len (enum machine_mode mode, unsigned HOST_WIDE_INT m)
+low_bitmask_len (machine_mode mode, unsigned HOST_WIDE_INT m)
{
if (mode != VOIDmode)
{
/* Return the mode of MEM's address. */
-enum machine_mode
+machine_mode
get_address_mode (rtx mem)
{
- enum machine_mode mode;
+ machine_mode mode;
gcc_assert (MEM_P (mem));
mode = GET_MODE (XEXP (mem, 0));
{
if (GET_RTX_CLASS (GET_CODE (x)) == RTX_BITFIELD_OPS)
{
- enum machine_mode mode = GET_MODE (XEXP (x, 0));
+ machine_mode mode = GET_MODE (XEXP (x, 0));
HOST_WIDE_INT len = INTVAL (XEXP (x, 1));
HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
MODE, AS, OUTER_CODE and INDEX_CODE are as for ok_for_base_p_1. */
static int
-baseness (rtx x, enum machine_mode mode, addr_space_t as,
+baseness (rtx x, machine_mode mode, addr_space_t as,
enum rtx_code outer_code, enum rtx_code index_code)
{
/* Believe *_POINTER unless the address shape requires otherwise. */
OUTER_CODE is MEM if *LOC is a MEM address and ADDRESS otherwise. */
void
-decompose_address (struct address_info *info, rtx *loc, enum machine_mode mode,
+decompose_address (struct address_info *info, rtx *loc, machine_mode mode,
addr_space_t as, enum rtx_code outer_code)
{
memset (info, 0, sizeof (*info));
add_rtx (const_rtx x, hash &hstate)
{
enum rtx_code code;
- enum machine_mode mode;
+ machine_mode mode;
int i, j;
const char *fmt;
RTL_HOOKS_REG_TRUNCATED_TO_MODE \
}
-extern rtx gen_lowpart_general (enum machine_mode, rtx);
-extern rtx reg_nonzero_bits_general (const_rtx, enum machine_mode, const_rtx,
- enum machine_mode,
+extern rtx gen_lowpart_general (machine_mode, rtx);
+extern rtx reg_nonzero_bits_general (const_rtx, machine_mode, const_rtx,
+ machine_mode,
unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT *);
-extern rtx reg_num_sign_bit_copies_general (const_rtx, enum machine_mode, const_rtx,
- enum machine_mode,
+extern rtx reg_num_sign_bit_copies_general (const_rtx, machine_mode, const_rtx,
+ machine_mode,
unsigned int, unsigned int *);
-extern bool reg_truncated_to_mode_general (enum machine_mode, const_rtx);
+extern bool reg_truncated_to_mode_general (machine_mode, const_rtx);
#endif /* GCC_RTL_HOOKS_DEF_H */
struct rtl_hooks rtl_hooks = RTL_HOOKS_INITIALIZER;
rtx
-gen_lowpart_general (enum machine_mode mode, rtx x)
+gen_lowpart_general (machine_mode mode, rtx x)
{
rtx result = gen_lowpart_common (mode, x);
rtx
reg_num_sign_bit_copies_general (const_rtx x ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_rtx known_x ATTRIBUTE_UNUSED,
- enum machine_mode known_mode ATTRIBUTE_UNUSED,
+ machine_mode known_mode ATTRIBUTE_UNUSED,
unsigned int known_ret ATTRIBUTE_UNUSED,
unsigned int *result ATTRIBUTE_UNUSED)
{
rtx
reg_nonzero_bits_general (const_rtx x ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_rtx known_x ATTRIBUTE_UNUSED,
- enum machine_mode known_mode ATTRIBUTE_UNUSED,
+ machine_mode known_mode ATTRIBUTE_UNUSED,
unsigned HOST_WIDE_INT known_ret ATTRIBUTE_UNUSED,
unsigned HOST_WIDE_INT *nonzero ATTRIBUTE_UNUSED)
{
}
bool
-reg_truncated_to_mode_general (enum machine_mode mode ATTRIBUTE_UNUSED,
+reg_truncated_to_mode_general (machine_mode mode ATTRIBUTE_UNUSED,
const_rtx x ATTRIBUTE_UNUSED)
{
return false;
This is similar to gen_lowpart_general. */
rtx
-gen_lowpart_if_possible (enum machine_mode mode, rtx x)
+gen_lowpart_if_possible (machine_mode mode, rtx x)
{
rtx result = gen_lowpart_common (mode, x);
CLOBBER, PRE_DEC, POST_DEC, PRE_INC, POST_INC or USE. */
static void
-sched_analyze_reg (struct deps_desc *deps, int regno, enum machine_mode mode,
+sched_analyze_reg (struct deps_desc *deps, int regno, machine_mode mode,
enum rtx_code ref, rtx_insn *insn)
{
/* We could emit new pseudos in renaming. Extend the reg structures. */
if (REG_P (dest))
{
int regno = REGNO (dest);
- enum machine_mode mode = GET_MODE (dest);
+ machine_mode mode = GET_MODE (dest);
sched_analyze_reg (deps, regno, mode, code, insn);
if (sched_deps_info->use_cselib)
{
- enum machine_mode address_mode = get_address_mode (dest);
+ machine_mode address_mode = get_address_mode (dest);
t = shallow_copy_rtx (dest);
cselib_lookup_from_insn (XEXP (t, 0), address_mode, 1,
case REG:
{
int regno = REGNO (x);
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
sched_analyze_reg (deps, regno, mode, USE, insn);
if (sched_deps_info->use_cselib)
{
- enum machine_mode address_mode = get_address_mode (t);
+ machine_mode address_mode = get_address_mode (t);
t = shallow_copy_rtx (t);
cselib_lookup_from_insn (XEXP (t, 0), address_mode, 1,
debug_mem_addr_value (rtx x)
{
rtx t, addr;
- enum machine_mode address_mode;
+ machine_mode address_mode;
gcc_assert (MEM_P (x));
address_mode = get_address_mode (x);
to support ia64 speculation. When changes are needed, new rtx X and new mode
NMODE are written, and the callback returns true. */
static int
-hash_with_unspec_callback (const_rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
- rtx *nx, enum machine_mode* nmode)
+hash_with_unspec_callback (const_rtx x, machine_mode mode ATTRIBUTE_UNUSED,
+ rtx *nx, machine_mode* nmode)
{
if (GET_CODE (x) == UNSPEC
&& targetm.sched.skip_rtx_p
/* Extracts machine mode MODE and destination location DST_LOC
for given INSN. */
void
-get_dest_and_mode (rtx insn, rtx *dst_loc, enum machine_mode *mode)
+get_dest_and_mode (rtx insn, rtx *dst_loc, machine_mode *mode)
{
rtx pat = PATTERN (insn);
/* Functions to work with insns. */
extern bool lhs_of_insn_equals_to_dest_p (insn_t, rtx);
extern bool insn_eligible_for_subst_p (insn_t);
-extern void get_dest_and_mode (rtx, rtx *, enum machine_mode *);
+extern void get_dest_and_mode (rtx, rtx *, machine_mode *);
extern bool bookkeeping_can_be_created_if_moved_through_p (insn_t);
extern bool sel_remove_insn (insn_t, bool, bool);
replace_src_with_reg_ok_p (insn_t insn, rtx new_src_reg)
{
vinsn_t vi = INSN_VINSN (insn);
- enum machine_mode mode;
+ machine_mode mode;
rtx dst_loc;
bool res;
/* Calculate set of registers that are capable of holding MODE. */
static void
-init_regs_for_mode (enum machine_mode mode)
+init_regs_for_mode (machine_mode mode)
{
int cur_reg;
mark_unavailable_hard_regs (def_t def, struct reg_rename *reg_rename_p,
regset used_regs ATTRIBUTE_UNUSED)
{
- enum machine_mode mode;
+ machine_mode mode;
enum reg_class cl = NO_REGS;
rtx orig_dest;
unsigned cur_reg, regno;
{
int best_new_reg;
unsigned cur_reg;
- enum machine_mode mode = VOIDmode;
+ machine_mode mode = VOIDmode;
unsigned regno, i, n;
hard_reg_set_iterator hrsi;
def_list_iterator di;
{
def_list_iterator i;
def_t def;
- enum machine_mode mode = VOIDmode;
+ machine_mode mode = VOIDmode;
bool bad_hard_regs = false;
/* We should not use this after reload. */
struct reg_rename *reg_rename_p)
{
unsigned n, i, regno;
- enum machine_mode mode;
+ machine_mode mode;
bool target_available, live_available, hard_available;
if (!REG_P (EXPR_LHS (expr)) || EXPR_TARGET_AVAILABLE (expr) < 0)
#define HWI_SIGN_EXTEND(low) \
((((HOST_WIDE_INT) low) < 0) ? ((HOST_WIDE_INT) -1) : ((HOST_WIDE_INT) 0))
-static rtx neg_const_int (enum machine_mode, const_rtx);
+static rtx neg_const_int (machine_mode, const_rtx);
static bool plus_minus_operand_p (const_rtx);
static bool simplify_plus_minus_op_data_cmp (rtx, rtx);
-static rtx simplify_plus_minus (enum rtx_code, enum machine_mode, rtx, rtx);
-static rtx simplify_immed_subreg (enum machine_mode, rtx, enum machine_mode,
+static rtx simplify_plus_minus (enum rtx_code, machine_mode, rtx, rtx);
+static rtx simplify_immed_subreg (machine_mode, rtx, machine_mode,
unsigned int);
-static rtx simplify_associative_operation (enum rtx_code, enum machine_mode,
+static rtx simplify_associative_operation (enum rtx_code, machine_mode,
rtx, rtx);
-static rtx simplify_relational_operation_1 (enum rtx_code, enum machine_mode,
- enum machine_mode, rtx, rtx);
-static rtx simplify_unary_operation_1 (enum rtx_code, enum machine_mode, rtx);
-static rtx simplify_binary_operation_1 (enum rtx_code, enum machine_mode,
+static rtx simplify_relational_operation_1 (enum rtx_code, machine_mode,
+ machine_mode, rtx, rtx);
+static rtx simplify_unary_operation_1 (enum rtx_code, machine_mode, rtx);
+static rtx simplify_binary_operation_1 (enum rtx_code, machine_mode,
rtx, rtx, rtx, rtx);
\f
/* Negate a CONST_INT rtx, truncating (because a conversion from a
maximally negative number can overflow). */
static rtx
-neg_const_int (enum machine_mode mode, const_rtx i)
+neg_const_int (machine_mode mode, const_rtx i)
{
return gen_int_mode (-(unsigned HOST_WIDE_INT) INTVAL (i), mode);
}
the most significant bit of machine mode MODE. */
bool
-mode_signbit_p (enum machine_mode mode, const_rtx x)
+mode_signbit_p (machine_mode mode, const_rtx x)
{
unsigned HOST_WIDE_INT val;
unsigned int width;
precision of MODE is too large to handle. */
bool
-val_signbit_p (enum machine_mode mode, unsigned HOST_WIDE_INT val)
+val_signbit_p (machine_mode mode, unsigned HOST_WIDE_INT val)
{
unsigned int width;
/* Test whether the most significant bit of mode MODE is set in VAL.
Returns false if the precision of MODE is too large to handle. */
bool
-val_signbit_known_set_p (enum machine_mode mode, unsigned HOST_WIDE_INT val)
+val_signbit_known_set_p (machine_mode mode, unsigned HOST_WIDE_INT val)
{
unsigned int width;
/* Test whether the most significant bit of mode MODE is clear in VAL.
Returns false if the precision of MODE is too large to handle. */
bool
-val_signbit_known_clear_p (enum machine_mode mode, unsigned HOST_WIDE_INT val)
+val_signbit_known_clear_p (machine_mode mode, unsigned HOST_WIDE_INT val)
{
unsigned int width;
seeing if the expression folds. */
rtx
-simplify_gen_binary (enum rtx_code code, enum machine_mode mode, rtx op0,
+simplify_gen_binary (enum rtx_code code, machine_mode mode, rtx op0,
rtx op1)
{
rtx tem;
avoid_constant_pool_reference (rtx x)
{
rtx c, tmp, addr;
- enum machine_mode cmode;
+ machine_mode cmode;
HOST_WIDE_INT offset = 0;
switch (GET_CODE (x))
&& MEM_OFFSET_KNOWN_P (x))
{
tree decl = MEM_EXPR (x);
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
HOST_WIDE_INT offset = 0;
switch (TREE_CODE (decl))
the specified operation. */
rtx
-simplify_gen_unary (enum rtx_code code, enum machine_mode mode, rtx op,
- enum machine_mode op_mode)
+simplify_gen_unary (enum rtx_code code, machine_mode mode, rtx op,
+ machine_mode op_mode)
{
rtx tem;
/* Likewise for ternary operations. */
rtx
-simplify_gen_ternary (enum rtx_code code, enum machine_mode mode,
- enum machine_mode op0_mode, rtx op0, rtx op1, rtx op2)
+simplify_gen_ternary (enum rtx_code code, machine_mode mode,
+ machine_mode op0_mode, rtx op0, rtx op1, rtx op2)
{
rtx tem;
CMP_MODE specifies mode comparison is done in. */
rtx
-simplify_gen_relational (enum rtx_code code, enum machine_mode mode,
- enum machine_mode cmp_mode, rtx op0, rtx op1)
+simplify_gen_relational (enum rtx_code code, machine_mode mode,
+ machine_mode cmp_mode, rtx op0, rtx op1)
{
rtx tem;
rtx (*fn) (rtx, const_rtx, void *), void *data)
{
enum rtx_code code = GET_CODE (x);
- enum machine_mode mode = GET_MODE (x);
- enum machine_mode op_mode;
+ machine_mode mode = GET_MODE (x);
+ machine_mode op_mode;
const char *fmt;
rtx op0, op1, op2, newx, op;
rtvec vec, newvec;
assume that truncating it too is a no-op. */
static rtx
-simplify_truncation (enum machine_mode mode, rtx op,
- enum machine_mode op_mode)
+simplify_truncation (machine_mode mode, rtx op,
+ machine_mode op_mode)
{
unsigned int precision = GET_MODE_UNIT_PRECISION (mode);
unsigned int op_precision = GET_MODE_UNIT_PRECISION (op_mode);
truncation without the extension. Finally, if the outermode
is larger than the origmode, we can just extend to the appropriate
mode. */
- enum machine_mode origmode = GET_MODE (XEXP (op, 0));
+ machine_mode origmode = GET_MODE (XEXP (op, 0));
if (mode == origmode)
return XEXP (op, 0);
else if (precision <= GET_MODE_UNIT_PRECISION (origmode))
MODE with input operand OP whose mode was originally OP_MODE.
Return zero if no simplification can be made. */
rtx
-simplify_unary_operation (enum rtx_code code, enum machine_mode mode,
- rtx op, enum machine_mode op_mode)
+simplify_unary_operation (enum rtx_code code, machine_mode mode,
+ rtx op, machine_mode op_mode)
{
rtx trueop, tem;
/* Perform some simplifications we can do even if the operands
aren't constant. */
static rtx
-simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
+simplify_unary_operation_1 (enum rtx_code code, machine_mode mode, rtx op)
{
enum rtx_code reversed;
rtx temp;
&& GET_CODE (SUBREG_REG (op)) == ASHIFT
&& XEXP (SUBREG_REG (op), 0) == const1_rtx)
{
- enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op));
+ machine_mode inner_mode = GET_MODE (SUBREG_REG (op));
rtx x;
x = gen_rtx_ROTATE (inner_mode,
if (GET_CODE (op) == IOR || GET_CODE (op) == AND)
{
rtx in1 = XEXP (op, 0), in2 = XEXP (op, 1);
- enum machine_mode op_mode;
+ machine_mode op_mode;
op_mode = GET_MODE (in1);
in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
&& XEXP (op, 1) == const0_rtx
&& SCALAR_INT_MODE_P (GET_MODE (XEXP (op, 0))))
{
- enum machine_mode inner = GET_MODE (XEXP (op, 0));
+ machine_mode inner = GET_MODE (XEXP (op, 0));
int isize = GET_MODE_PRECISION (inner);
if (STORE_FLAG_VALUE == 1)
{
&& (rcode == SIGN_EXTEND
|| (rcode == ASHIFTRT && CONST_INT_P (XEXP (rhs, 1)))))
{
- enum machine_mode lmode = GET_MODE (lhs);
- enum machine_mode rmode = GET_MODE (rhs);
+ machine_mode lmode = GET_MODE (lhs);
+ machine_mode rmode = GET_MODE (rhs);
int bits;
if (lcode == ASHIFTRT)
&& XEXP (XEXP (op, 0), 1) == XEXP (op, 1)
&& GET_MODE_BITSIZE (GET_MODE (op)) > INTVAL (XEXP (op, 1)))
{
- enum machine_mode tmode
+ machine_mode tmode
= mode_for_size (GET_MODE_BITSIZE (GET_MODE (op))
- INTVAL (XEXP (op, 1)), MODE_INT, 1);
gcc_assert (GET_MODE_BITSIZE (mode)
&& (rcode == ZERO_EXTEND
|| (rcode == LSHIFTRT && CONST_INT_P (XEXP (rhs, 1)))))
{
- enum machine_mode lmode = GET_MODE (lhs);
- enum machine_mode rmode = GET_MODE (rhs);
+ machine_mode lmode = GET_MODE (lhs);
+ machine_mode rmode = GET_MODE (rhs);
int bits;
if (lcode == LSHIFTRT)
&& XEXP (XEXP (op, 0), 1) == XEXP (op, 1)
&& GET_MODE_PRECISION (GET_MODE (op)) > INTVAL (XEXP (op, 1)))
{
- enum machine_mode tmode
+ machine_mode tmode
= mode_for_size (GET_MODE_PRECISION (GET_MODE (op))
- INTVAL (XEXP (op, 1)), MODE_INT, 1);
if (tmode != BLKmode)
be MODE with input operand OP whose mode was originally OP_MODE.
Return zero if the value cannot be computed. */
rtx
-simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
- rtx op, enum machine_mode op_mode)
+simplify_const_unary_operation (enum rtx_code code, machine_mode mode,
+ rtx op, machine_mode op_mode)
{
unsigned int width = GET_MODE_PRECISION (mode);
RTVEC_ELT (v, i) = op;
else
{
- enum machine_mode inmode = GET_MODE (op);
+ machine_mode inmode = GET_MODE (op);
int in_elt_size = GET_MODE_SIZE (GET_MODE_INNER (inmode));
unsigned in_n_elts = (GET_MODE_SIZE (inmode) / in_elt_size);
{
int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
- enum machine_mode opmode = GET_MODE (op);
+ machine_mode opmode = GET_MODE (op);
int op_elt_size = GET_MODE_SIZE (GET_MODE_INNER (opmode));
unsigned op_n_elts = (GET_MODE_SIZE (opmode) / op_elt_size);
rtvec v = rtvec_alloc (n_elts);
if (CONST_SCALAR_INT_P (op) && width > 0)
{
wide_int result;
- enum machine_mode imode = op_mode == VOIDmode ? mode : op_mode;
+ machine_mode imode = op_mode == VOIDmode ? mode : op_mode;
rtx_mode_t op0 = std::make_pair (op, imode);
int int_value;
Return zero if no simplification or canonicalization is possible. */
static rtx
-simplify_byte_swapping_operation (enum rtx_code code, enum machine_mode mode,
+simplify_byte_swapping_operation (enum rtx_code code, machine_mode mode,
rtx op0, rtx op1)
{
rtx tem;
canonicalization is possible. */
static rtx
-simplify_associative_operation (enum rtx_code code, enum machine_mode mode,
+simplify_associative_operation (enum rtx_code code, machine_mode mode,
rtx op0, rtx op1)
{
rtx tem;
Don't use this for relational operations such as EQ or LT.
Use simplify_relational_operation instead. */
rtx
-simplify_binary_operation (enum rtx_code code, enum machine_mode mode,
+simplify_binary_operation (enum rtx_code code, machine_mode mode,
rtx op0, rtx op1)
{
rtx trueop0, trueop1;
actual constants. */
static rtx
-simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
+simplify_binary_operation_1 (enum rtx_code code, machine_mode mode,
rtx op0, rtx op1, rtx trueop0, rtx trueop1)
{
rtx tem, reversed, opleft, opright;
&& (~GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))
& UINTVAL (trueop1)) == 0)
{
- enum machine_mode imode = GET_MODE (XEXP (op0, 0));
+ machine_mode imode = GET_MODE (XEXP (op0, 0));
tem = simplify_gen_binary (AND, imode, XEXP (op0, 0),
gen_int_mode (INTVAL (trueop1),
imode));
if (GET_CODE (op0) == TRUNCATE && CONST_INT_P (trueop1))
{
rtx x = XEXP (op0, 0);
- enum machine_mode xmode = GET_MODE (x);
+ machine_mode xmode = GET_MODE (x);
tem = simplify_gen_binary (AND, xmode, x,
gen_int_mode (INTVAL (trueop1), xmode));
return simplify_gen_unary (TRUNCATE, mode, tem, xmode);
&& STORE_FLAG_VALUE == 1
&& INTVAL (trueop1) < (HOST_WIDE_INT)width)
{
- enum machine_mode imode = GET_MODE (XEXP (op0, 0));
+ machine_mode imode = GET_MODE (XEXP (op0, 0));
unsigned HOST_WIDE_INT zero_val = 0;
if (CLZ_DEFINED_VALUE_AT_ZERO (imode, zero_val)
rtx op0 = XEXP (trueop0, 0);
rtx op1 = XEXP (trueop0, 1);
- enum machine_mode opmode = GET_MODE (op0);
+ machine_mode opmode = GET_MODE (op0);
int elt_size = GET_MODE_SIZE (GET_MODE_INNER (opmode));
int n_elts = GET_MODE_SIZE (opmode) / elt_size;
rtx op00 = XEXP (op0, 0);
rtx op01 = XEXP (op0, 1);
- enum machine_mode mode00, mode01;
+ machine_mode mode00, mode01;
int n_elts00, n_elts01;
mode00 = GET_MODE (op00);
{
rtx subop0 = XEXP (trueop0, 0);
rtx subop1 = XEXP (trueop0, 1);
- enum machine_mode mode0 = GET_MODE (subop0);
- enum machine_mode mode1 = GET_MODE (subop1);
+ machine_mode mode0 = GET_MODE (subop0);
+ machine_mode mode1 = GET_MODE (subop1);
int li = GET_MODE_SIZE (GET_MODE_INNER (mode0));
int l0 = GET_MODE_SIZE (mode0) / li;
int l1 = GET_MODE_SIZE (mode1) / li;
return 0;
case VEC_CONCAT:
{
- enum machine_mode op0_mode = (GET_MODE (trueop0) != VOIDmode
+ machine_mode op0_mode = (GET_MODE (trueop0) != VOIDmode
? GET_MODE (trueop0)
: GET_MODE_INNER (mode));
- enum machine_mode op1_mode = (GET_MODE (trueop1) != VOIDmode
+ machine_mode op1_mode = (GET_MODE (trueop1) != VOIDmode
? GET_MODE (trueop1)
: GET_MODE_INNER (mode));
}
rtx
-simplify_const_binary_operation (enum rtx_code code, enum machine_mode mode,
+simplify_const_binary_operation (enum rtx_code code, machine_mode mode,
rtx op0, rtx op1)
{
unsigned int width = GET_MODE_PRECISION (mode);
&& GET_CODE (op1) == CONST_VECTOR)
{
unsigned n_elts = GET_MODE_NUNITS (mode);
- enum machine_mode op0mode = GET_MODE (op0);
+ machine_mode op0mode = GET_MODE (op0);
unsigned op0_n_elts = GET_MODE_NUNITS (op0mode);
- enum machine_mode op1mode = GET_MODE (op1);
+ machine_mode op1mode = GET_MODE (op1);
unsigned op1_n_elts = GET_MODE_NUNITS (op1mode);
rtvec v = rtvec_alloc (n_elts);
unsigned int i;
}
static rtx
-simplify_plus_minus (enum rtx_code code, enum machine_mode mode, rtx op0,
+simplify_plus_minus (enum rtx_code code, machine_mode mode, rtx op0,
rtx op1)
{
struct simplify_plus_minus_op_data ops[16];
the operands or, if both are VOIDmode, the operands are compared in
"infinite precision". */
rtx
-simplify_relational_operation (enum rtx_code code, enum machine_mode mode,
- enum machine_mode cmp_mode, rtx op0, rtx op1)
+simplify_relational_operation (enum rtx_code code, machine_mode mode,
+ machine_mode cmp_mode, rtx op0, rtx op1)
{
rtx tem, trueop0, trueop1;
mode the comparison is done in, so it is the mode of the operands. */
static rtx
-simplify_relational_operation_1 (enum rtx_code code, enum machine_mode mode,
- enum machine_mode cmp_mode, rtx op0, rtx op1)
+simplify_relational_operation_1 (enum rtx_code code, machine_mode mode,
+ machine_mode cmp_mode, rtx op0, rtx op1)
{
enum rtx_code op0code = GET_CODE (op0);
rtx
simplify_const_relational_operation (enum rtx_code code,
- enum machine_mode mode,
+ machine_mode mode,
rtx op0, rtx op1)
{
rtx tem;
/* It would be nice if we really had a mode here. However, the
largest int representable on the target is as good as
infinite. */
- enum machine_mode cmode = (mode == VOIDmode) ? MAX_MODE_INT : mode;
+ machine_mode cmode = (mode == VOIDmode) ? MAX_MODE_INT : mode;
rtx_mode_t ptrueop0 = std::make_pair (trueop0, cmode);
rtx_mode_t ptrueop1 = std::make_pair (trueop1, cmode);
a constant. Return 0 if no simplifications is possible. */
rtx
-simplify_ternary_operation (enum rtx_code code, enum machine_mode mode,
- enum machine_mode op0_mode, rtx op0, rtx op1,
+simplify_ternary_operation (enum rtx_code code, machine_mode mode,
+ machine_mode op0_mode, rtx op0, rtx op1,
rtx op2)
{
unsigned int width = GET_MODE_PRECISION (mode);
if (COMPARISON_P (op0) && ! side_effects_p (op0))
{
- enum machine_mode cmp_mode = (GET_MODE (XEXP (op0, 0)) == VOIDmode
+ machine_mode cmp_mode = (GET_MODE (XEXP (op0, 0)) == VOIDmode
? GET_MODE (XEXP (op0, 1))
: GET_MODE (XEXP (op0, 0)));
rtx temp;
and then repacking them again for OUTERMODE. */
static rtx
-simplify_immed_subreg (enum machine_mode outermode, rtx op,
- enum machine_mode innermode, unsigned int byte)
+simplify_immed_subreg (machine_mode outermode, rtx op,
+ machine_mode innermode, unsigned int byte)
{
enum {
value_bit = 8,
rtx result_s;
rtvec result_v = NULL;
enum mode_class outer_class;
- enum machine_mode outer_submode;
+ machine_mode outer_submode;
int max_bitsize;
/* Some ports misuse CCmode. */
/* Simplify SUBREG:OUTERMODE(OP:INNERMODE, BYTE)
Return 0 if no simplifications are possible. */
rtx
-simplify_subreg (enum machine_mode outermode, rtx op,
- enum machine_mode innermode, unsigned int byte)
+simplify_subreg (machine_mode outermode, rtx op,
+ machine_mode innermode, unsigned int byte)
{
/* Little bit of sanity checking. */
gcc_assert (innermode != VOIDmode);
or not at all if changing back op starting mode. */
if (GET_CODE (op) == SUBREG)
{
- enum machine_mode innermostmode = GET_MODE (SUBREG_REG (op));
+ machine_mode innermostmode = GET_MODE (SUBREG_REG (op));
int final_offset = byte + SUBREG_BYTE (op);
rtx newx;
/* Make a SUBREG operation or equivalent if it folds. */
rtx
-simplify_gen_subreg (enum machine_mode outermode, rtx op,
- enum machine_mode innermode, unsigned int byte)
+simplify_gen_subreg (machine_mode outermode, rtx op,
+ machine_mode innermode, unsigned int byte)
{
rtx newx;
simplify_rtx (const_rtx x)
{
const enum rtx_code code = GET_CODE (x);
- const enum machine_mode mode = GET_MODE (x);
+ const machine_mode mode = GET_MODE (x);
switch (GET_RTX_CLASS (code))
{
/* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. PROB
is the probability of jumping to LABEL. */
static void
-do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
+do_jump_if_equal (machine_mode mode, rtx op0, rtx op1, rtx label,
int unsignedp, int prob)
{
gcc_assert (prob <= REG_BR_PROB_BASE);
&& ! have_insn_for (COMPARE, GET_MODE (index)))
{
int unsignedp = TYPE_UNSIGNED (index_type);
- enum machine_mode wider_mode;
+ machine_mode wider_mode;
for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
wider_mode = GET_MODE_WIDER_MODE (wider_mode))
if (have_insn_for (COMPARE, wider_mode))
vec<tree> dispatch_table)
{
tree index_type = integer_type_node;
- enum machine_mode index_mode = TYPE_MODE (index_type);
+ machine_mode index_mode = TYPE_MODE (index_type);
int ncases = dispatch_table.length ();
int unsignedp = TYPE_UNSIGNED (index_type);
int probability;
int prob = node->prob, subtree_prob = node->subtree_prob;
- enum machine_mode mode = GET_MODE (index);
- enum machine_mode imode = TYPE_MODE (index_type);
+ machine_mode mode = GET_MODE (index);
+ machine_mode imode = TYPE_MODE (index_type);
/* Handle indices detected as constant during RTL expansion. */
if (mode == VOIDmode)
it may have padding as well. If LIMIT is nonzero, modes of wider
than MAX_FIXED_MODE_SIZE will not be used. */
-enum machine_mode
+machine_mode
mode_for_size (unsigned int size, enum mode_class mclass, int limit)
{
- enum machine_mode mode;
+ machine_mode mode;
int i;
if (limit && size > MAX_FIXED_MODE_SIZE)
/* Similar, except passed a tree node. */
-enum machine_mode
+machine_mode
mode_for_size_tree (const_tree size, enum mode_class mclass, int limit)
{
unsigned HOST_WIDE_INT uhwi;
/* Similar, but never return BLKmode; return the narrowest mode that
contains at least the requested number of value bits. */
-enum machine_mode
+machine_mode
smallest_mode_for_size (unsigned int size, enum mode_class mclass)
{
- enum machine_mode mode = VOIDmode;
+ machine_mode mode = VOIDmode;
int i;
/* Get the first mode which has at least this size, in the
/* Find an integer mode of the exact same size, or BLKmode on failure. */
-enum machine_mode
-int_mode_for_mode (enum machine_mode mode)
+machine_mode
+int_mode_for_mode (machine_mode mode)
{
switch (GET_MODE_CLASS (mode))
{
/* Find a mode that can be used for efficient bitwise operations on MODE.
Return BLKmode if no such mode exists. */
-enum machine_mode
-bitwise_mode_for_mode (enum machine_mode mode)
+machine_mode
+bitwise_mode_for_mode (machine_mode mode)
{
/* Quick exit if we already have a suitable mode. */
unsigned int bitsize = GET_MODE_BITSIZE (mode);
care whether there is a register for the inner mode. */
if (COMPLEX_MODE_P (mode))
{
- enum machine_mode trial = mode;
+ machine_mode trial = mode;
if (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT)
trial = mode_for_size (bitsize, MODE_COMPLEX_INT, false);
if (trial != BLKmode
modes if an integer mode would be too big. */
if (VECTOR_MODE_P (mode) || bitsize > MAX_FIXED_MODE_SIZE)
{
- enum machine_mode trial = mode;
+ machine_mode trial = mode;
if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
trial = mode_for_size (bitsize, MODE_VECTOR_INT, 0);
if (trial != BLKmode
Return null if no such mode exists. */
tree
-bitwise_type_for_mode (enum machine_mode mode)
+bitwise_type_for_mode (machine_mode mode)
{
mode = bitwise_mode_for_mode (mode);
if (mode == BLKmode)
NUNITS elements of mode INNERMODE. Returns BLKmode if there
is no suitable mode. */
-enum machine_mode
-mode_for_vector (enum machine_mode innermode, unsigned nunits)
+machine_mode
+mode_for_vector (machine_mode innermode, unsigned nunits)
{
- enum machine_mode mode;
+ machine_mode mode;
/* First, look for a supported vector type. */
if (SCALAR_FLOAT_MODE_P (innermode))
BIGGEST_ALIGNMENT. */
unsigned int
-get_mode_alignment (enum machine_mode mode)
+get_mode_alignment (machine_mode mode)
{
return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT));
}
precision of the mode of its elements. */
unsigned int
-element_precision (enum machine_mode mode)
+element_precision (machine_mode mode)
{
if (COMPLEX_MODE_P (mode) || VECTOR_MODE_P (mode))
mode = GET_MODE_INNER (mode);
/* Return the natural mode of an array, given that it is SIZE bytes in
total and has elements of type ELEM_TYPE. */
-static enum machine_mode
+static machine_mode
mode_for_array (tree elem_type, tree size)
{
tree elem_size;
&& TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
&& GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
{
- enum machine_mode xmode
+ machine_mode xmode
= mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1);
unsigned int xalign = GET_MODE_ALIGNMENT (xmode);
compute_record_mode (tree type)
{
tree field;
- enum machine_mode mode = VOIDmode;
+ machine_mode mode = VOIDmode;
/* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
However, if possible, we use a mode that fits in a register
unsigned int align = TYPE_ALIGN (type);
unsigned int precision = TYPE_PRECISION (type);
unsigned int user_align = TYPE_USER_ALIGN (type);
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
/* Copy it into all variants. */
for (variant = TYPE_MAIN_VARIANT (type);
finish_bitfield_representative (tree repr, tree field)
{
unsigned HOST_WIDE_INT bitsize, maxbitsize;
- enum machine_mode mode;
+ machine_mode mode;
tree nextf, size;
size = size_diffop (DECL_FIELD_OFFSET (field),
case POINTER_TYPE:
case REFERENCE_TYPE:
{
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
if (TREE_CODE (type) == REFERENCE_TYPE && reference_types_internal)
{
addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (type));
referenced by a function and re-compute the TYPE_MODE once, rather
than make the TYPE_MODE macro call a function. */
-enum machine_mode
+machine_mode
vector_type_mode (const_tree t)
{
- enum machine_mode mode;
+ machine_mode mode;
gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
&& (!targetm.vector_mode_supported_p (mode)
|| !have_regs_of_mode[mode]))
{
- enum machine_mode innermode = TREE_TYPE (t)->type_common.mode;
+ machine_mode innermode = TREE_TYPE (t)->type_common.mode;
/* For integers, try mapping it to a same-sized scalar mode. */
if (GET_MODE_CLASS (innermode) == MODE_INT)
available, storing it in *OUT_MODE if so. */
bool
-bit_field_mode_iterator::next_mode (enum machine_mode *out_mode)
+bit_field_mode_iterator::next_mode (machine_mode *out_mode)
{
for (; m_mode != VOIDmode; m_mode = GET_MODE_WIDER_MODE (m_mode))
{
If VOLATILEP is true the narrow_volatile_bitfields target hook is used to
decide which of the above modes should be used. */
-enum machine_mode
+machine_mode
get_best_mode (int bitsize, int bitpos,
unsigned HOST_WIDE_INT bitregion_start,
unsigned HOST_WIDE_INT bitregion_end,
unsigned int align,
- enum machine_mode largest_mode, bool volatilep)
+ machine_mode largest_mode, bool volatilep)
{
bit_field_mode_iterator iter (bitsize, bitpos, bitregion_start,
bitregion_end, align, volatilep);
- enum machine_mode widest_mode = VOIDmode;
- enum machine_mode mode;
+ machine_mode widest_mode = VOIDmode;
+ machine_mode mode;
while (iter.next_mode (&mode)
/* ??? For historical reasons, reject modes that would normally
receive greater alignment, even if unaligned accesses are
SIGN). The returned constants are made to be usable in TARGET_MODE. */
void
-get_mode_bounds (enum machine_mode mode, int sign,
- enum machine_mode target_mode,
+get_mode_bounds (machine_mode mode, int sign,
+ machine_mode target_mode,
rtx *mmin, rtx *mmax)
{
unsigned size = GET_MODE_PRECISION (mode);
If LIMIT is nonzero, then don't use modes bigger than MAX_FIXED_MODE_SIZE.
The value is BLKmode if no other mode is found. This is like
mode_for_size, but is passed a tree. */
-extern enum machine_mode mode_for_size_tree (const_tree, enum mode_class, int);
+extern machine_mode mode_for_size_tree (const_tree, enum mode_class, int);
-extern tree bitwise_type_for_mode (enum machine_mode);
+extern tree bitwise_type_for_mode (machine_mode);
/* Given a VAR_DECL, PARM_DECL or RESULT_DECL, clears the results of
a previous call to layout_decl and calls it again. */
extern tree variable_size (tree);
/* Vector types need to check target flags to determine type. */
-extern enum machine_mode vector_type_mode (const_tree);
+extern machine_mode vector_type_mode (const_tree);
#endif // GCC_STOR_LAYOUT_H
The default version of this function takes care of putting symbolic\n\
constants in @code{flag_pic} mode in @code{data_section} and everything\n\
else in @code{readonly_data_section}.",
- section *, (enum machine_mode mode, rtx x, unsigned HOST_WIDE_INT align),
+ section *, (machine_mode mode, rtx x, unsigned HOST_WIDE_INT align),
default_select_rtx_section)
/* Select a unique section name for DECL. RELOC is the same as
(reassociation_width,
"This hook is called by tree reassociator to determine a level of\n\
parallelism required in output calculations chain.",
-int, (unsigned int opc, enum machine_mode mode),
+int, (unsigned int opc, machine_mode mode),
hook_int_uint_mode_1)
HOOK_VECTOR_END (sched)
DEFHOOK
(vec_perm_const_ok,
"Return true if a vector created for @code{vec_perm_const} is valid.",
- bool, (enum machine_mode, const unsigned char *sel),
+ bool, (machine_mode, const unsigned char *sel),
NULL)
/* Return true if the target supports misaligned store/load of a
the elements in the vectors should be of type @var{type}. @var{is_packed}\n\
parameter is true if the memory access is defined in a packed struct.",
bool,
- (enum machine_mode mode, const_tree type, int misalignment, bool is_packed),
+ (machine_mode mode, const_tree type, int misalignment, bool is_packed),
default_builtin_support_vector_misalignment)
/* Return the builtin decl needed to load a vector of TYPE. */
mode @var{mode}. The default is\n\
equal to @code{word_mode}, because the vectorizer can do some\n\
transformations even in absence of specialized @acronym{SIMD} hardware.",
- enum machine_mode,
- (enum machine_mode mode),
+ machine_mode,
+ (machine_mode mode),
default_preferred_simd_mode)
/* Returns a mask of vector sizes to iterate over when auto-vectorizing
DEFHOOK_UNDOC
(eh_return_filter_mode,
"Return machine mode for filter value.",
- enum machine_mode, (void),
+ machine_mode, (void),
default_eh_return_filter_mode)
/* Return machine mode for libgcc expanded cmp instructions. */
of compare instructions expanded to libgcc calls. If not defined\n\
@code{word_mode} is returned which is the right choice for a majority of\n\
targets.",
- enum machine_mode, (void),
+ machine_mode, (void),
default_libgcc_cmp_return_mode)
/* Return machine mode for libgcc expanded shift instructions. */
of shift instructions expanded to libgcc calls. If not defined\n\
@code{word_mode} is returned which is the right choice for a majority of\n\
targets.",
- enum machine_mode, (void),
+ machine_mode, (void),
default_libgcc_shift_count_mode)
/* Return machine mode to be used for _Unwind_Word type. */
(unwind_word_mode,
"Return machine mode to be used for @code{_Unwind_Word} type.\n\
The default is to use @code{word_mode}.",
- enum machine_mode, (void),
+ machine_mode, (void),
default_unwind_word_mode)
/* Given two decls, merge their attributes and return the result. */
ignored. This function should return the result of the call to the\n\
built-in function.",
rtx,
- (tree exp, rtx target, rtx subtarget, enum machine_mode mode, int ignore),
+ (tree exp, rtx target, rtx subtarget, machine_mode mode, int ignore),
default_expand_builtin)
/* Select a replacement for a target-specific builtin. This is done
@var{x} satisfies @code{CONSTANT_P}, so you need not check this.\n\
\n\
The default definition returns true.",
- bool, (enum machine_mode mode, rtx x),
+ bool, (machine_mode mode, rtx x),
hook_bool_mode_rtx_true)
/* True if the constant X cannot be placed in the constant pool. */
from the constant pool instead of spilling and reloading a register\n\
holding the constant. This restriction is often true of addresses\n\
of TLS symbols for various targets.",
- bool, (enum machine_mode mode, rtx x),
+ bool, (machine_mode mode, rtx x),
hook_bool_mode_rtx_false)
DEFHOOK_UNDOC
is safe to omit this hook or make it return @var{x} if it cannot find\n\
a valid way to legitimize the address. But often a machine-dependent\n\
strategy can generate better code.",
- rtx, (rtx x, rtx oldx, enum machine_mode mode),
+ rtx, (rtx x, rtx oldx, machine_mode mode),
default_legitimize_address)
/* Given an address RTX, undo the effects of LEGITIMIZE_ADDRESS. */
\n\
Using the hook is usually simpler because it limits the number of\n\
files that are recompiled when changes are made.",
- bool, (enum machine_mode mode, rtx x, bool strict),
+ bool, (machine_mode mode, rtx x, bool strict),
default_legitimate_address_p)
/* True if the given constant can be put into an object_block. */
of @var{x}.\n\
\n\
The default version returns false for all constants.",
- bool, (enum machine_mode mode, const_rtx x),
+ bool, (machine_mode mode, const_rtx x),
hook_bool_mode_const_rtx_false)
/* True if the given decl can be put into an object_block. */
@code{SHIFT_COUNT_TRUNCATED} is false, and some shift patterns\n\
nevertheless truncate the shift count, you may get better code\n\
by overriding it.",
- unsigned HOST_WIDE_INT, (enum machine_mode mode),
+ unsigned HOST_WIDE_INT, (machine_mode mode),
default_shift_truncation_mask)
/* Return the number of divisions in the given MODE that should be present,
that should be there for GCC to perform the optimization for a variable\n\
of mode @var{mode}. The default implementation returns 3 if the machine\n\
has an instruction for the division, and 2 if it does not.",
- unsigned int, (enum machine_mode mode),
+ unsigned int, (machine_mode mode),
default_min_divisions_for_recip_mul)
/* If the representation of integral MODE is such that values are
In order to enforce the representation of @code{mode},\n\
@code{TRULY_NOOP_TRUNCATION} should return false when truncating to\n\
@code{mode}.",
- int, (enum machine_mode mode, enum machine_mode rep_mode),
+ int, (machine_mode mode, machine_mode rep_mode),
default_mode_rep_extended)
/* True if MODE is valid for a pointer in __attribute__((mode("MODE"))). */
"Define this to return nonzero if the port can handle pointers\n\
with machine mode @var{mode}. The default version of this\n\
hook returns true for both @code{ptr_mode} and @code{Pmode}.",
- bool, (enum machine_mode mode),
+ bool, (machine_mode mode),
default_valid_pointer_mode)
/* Disambiguate with errno. */
@var{address_space} if the target supports named address spaces.\n\
The default version of this hook returns @code{ptr_mode} for the\n\
generic address space only.",
- enum machine_mode, (addr_space_t address_space),
+ machine_mode, (addr_space_t address_space),
default_addr_space_pointer_mode)
/* MODE to use for an address in another address space. */
@var{address_space} if the target supports named address spaces.\n\
The default version of this hook returns @code{Pmode} for the\n\
generic address space only.",
- enum machine_mode, (addr_space_t address_space),
+ machine_mode, (addr_space_t address_space),
default_addr_space_address_mode)
/* True if MODE is valid for a pointer in __attribute__((mode("MODE")))
version of this hook returns true for the modes returned by either the\n\
@code{TARGET_ADDR_SPACE_POINTER_MODE} or @code{TARGET_ADDR_SPACE_ADDRESS_MODE}\n\
target hooks for the given address space.",
- bool, (enum machine_mode mode, addr_space_t as),
+ bool, (machine_mode mode, addr_space_t as),
default_addr_space_valid_pointer_mode)
/* True if an address is a valid memory address to a given named address
finished. This target hook is the same as the\n\
@code{TARGET_LEGITIMATE_ADDRESS_P} target hook, except that it includes\n\
explicit named address space support.",
- bool, (enum machine_mode mode, rtx exp, bool strict, addr_space_t as),
+ bool, (machine_mode mode, rtx exp, bool strict, addr_space_t as),
default_addr_space_legitimate_address_p)
/* Return an updated address to convert an invalid pointer to a named
with mode @var{mode} in the named address space @var{as}. This target\n\
hook is the same as the @code{TARGET_LEGITIMIZE_ADDRESS} target hook,\n\
except that it includes explicit named address space support.",
- rtx, (rtx x, rtx oldx, enum machine_mode mode, addr_space_t as),
+ rtx, (rtx x, rtx oldx, machine_mode mode, addr_space_t as),
default_addr_space_legitimize_address)
/* True if one named address space is a subset of another named address. */
required to handle the basic C types (as defined by the port).\n\
Included here are the double-word arithmetic supported by the\n\
code in @file{optabs.c}.",
- bool, (enum machine_mode mode),
+ bool, (machine_mode mode),
default_scalar_mode_supported_p)
/* Similarly for vector modes. "Supported" here is less strict. At
"Define this to return nonzero if the port is prepared to handle\n\
insns involving vector mode @var{mode}. At the very least, it\n\
must have move patterns for this mode.",
- bool, (enum machine_mode mode),
+ bool, (machine_mode mode),
hook_bool_mode_false)
DEFHOOK
If this hook allows @code{val} to have a scalar mode, then\n\
@code{int8x8x3_t} can have the same mode. GCC can then store\n\
@code{int8x8x3_t}s in registers rather than forcing them onto the stack.",
- bool, (enum machine_mode mode, unsigned HOST_WIDE_INT nelems),
+ bool, (machine_mode mode, unsigned HOST_WIDE_INT nelems),
hook_bool_mode_uhwi_false)
DEFHOOK
@code{TARGET_SCALAR_MODE_SUPPORTED_P}. The default version of this \n\
hook returns true for all of @code{SFmode}, @code{DFmode}, \n\
@code{XFmode} and @code{TFmode}, if such modes exist.",
- bool, (enum machine_mode mode),
+ bool, (machine_mode mode),
default_libgcc_floating_mode_supported_p)
/* Compute cost of moving data from a register of class FROM to one of
if the @samp{mov@var{m}} pattern's constraints do not allow such copying.\n\
\n\
The default version of this function returns 2.",
- int, (enum machine_mode mode, reg_class_t from, reg_class_t to),
+ int, (machine_mode mode, reg_class_t from, reg_class_t to),
default_register_move_cost)
/* Compute cost of moving registers to/from memory. */
4 is not correct for your machine, use this target hook to add some other\n\
value to the result of that function. The arguments to that function\n\
are the same as to this target hook.",
- int, (enum machine_mode mode, reg_class_t rclass, bool in),
+ int, (machine_mode mode, reg_class_t rclass, bool in),
default_memory_move_cost)
/* True for MODE if the target expects that registers in this mode will
that can be performed in some cases. If you do not define this hook\n\
to return a nonzero value when it is required, the compiler will run out\n\
of spill registers and print a fatal error message.",
- bool, (enum machine_mode mode),
+ bool, (machine_mode mode),
hook_bool_mode_false)
/* Register number for a flags register. Only needs to be defined if the
should be considered in the definition of this macro. Equivalent costs\n\
should probably only be given to addresses with different numbers of\n\
registers on machines with lots of registers.",
- int, (rtx address, enum machine_mode mode, addr_space_t as, bool speed),
+ int, (rtx address, machine_mode mode, addr_space_t as, bool speed),
default_address_cost)
/* Return where to allocate pseudo for a given hard register initial value. */
corresponding Dwarf frame register should have. This is normally\n\
used to return a smaller mode than the raw mode to prevent call\n\
clobbered parts of a register altering the frame register size",
- enum machine_mode, (int regno),
+ machine_mode, (int regno),
default_dwarf_frame_reg_mode)
/* If expand_builtin_init_dwarf_reg_sizes needs to fill in table
The default version of this hook checks whether the modes are the\n\
same. If they are, it returns that mode. If they are different, it\n\
returns @code{VOIDmode}.",
- enum machine_mode, (enum machine_mode m1, enum machine_mode m2),
+ machine_mode, (machine_mode m1, machine_mode m2),
default_cc_modes_compatible)
/* Do machine-dependent code transformations. Called just before
The default is to not promote arguments and return values. You can\n\
also define the hook to @code{default_promote_function_mode_always_promote}\n\
if you would like to apply the same rules given by @code{PROMOTE_MODE}.",
- enum machine_mode, (const_tree type, enum machine_mode mode, int *punsignedp,
+ machine_mode, (const_tree type, machine_mode mode, int *punsignedp,
const_tree funtype, int for_return),
default_promote_function_mode)
The pointer is passed in whatever way is appropriate for passing a pointer\n\
to that type.",
bool,
- (cumulative_args_t cum, enum machine_mode mode, const_tree type, bool named),
+ (cumulative_args_t cum, machine_mode mode, const_tree type, bool named),
hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false)
DEFHOOK
happens for an inline function, which is not actually compiled until the\n\
end of the source file. The hook @code{TARGET_SETUP_INCOMING_VARARGS} should\n\
not generate any instructions in this case.",
- void, (cumulative_args_t args_so_far, enum machine_mode mode, tree type,
+ void, (cumulative_args_t args_so_far, machine_mode mode, tree type,
int *pretend_args_size, int second_time),
default_setup_incoming_varargs)
solely in registers. The file @file{expr.h} defines a\n\
definition that is usually appropriate, refer to @file{expr.h} for additional\n\
documentation.",
- bool, (enum machine_mode mode, const_tree type),
+ bool, (machine_mode mode, const_tree type),
must_pass_in_stack_var_size_or_pad)
/* Return true if type TYPE, mode MODE, which is passed by reference,
\n\
The default version of this hook always returns false.",
bool,
- (cumulative_args_t cum, enum machine_mode mode, const_tree type, bool named),
+ (cumulative_args_t cum, machine_mode mode, const_tree type, bool named),
hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false)
/* Return zero for arguments passed entirely on the stack or entirely
@code{TARGET_FUNCTION_ARG} for these arguments should return the first\n\
register to be used by the caller for this argument; likewise\n\
@code{TARGET_FUNCTION_INCOMING_ARG}, for the called function.",
- int, (cumulative_args_t cum, enum machine_mode mode, tree type, bool named),
+ int, (cumulative_args_t cum, machine_mode mode, tree type, bool named),
hook_int_CUMULATIVE_ARGS_mode_tree_bool_0)
/* Update the state in CA to advance past an argument in the
on the stack. The compiler knows how to track the amount of stack space\n\
used for arguments without any special help.",
void,
- (cumulative_args_t ca, enum machine_mode mode, const_tree type, bool named),
+ (cumulative_args_t ca, machine_mode mode, const_tree type, bool named),
default_function_arg_advance)
/* Return zero if the argument described by the state of CA should
argument, the compiler will abort. If @code{REG_PARM_STACK_SPACE} is\n\
defined, the argument will be computed in the stack and then loaded into\n\
a register.",
- rtx, (cumulative_args_t ca, enum machine_mode mode, const_tree type,
+ rtx, (cumulative_args_t ca, machine_mode mode, const_tree type,
bool named),
default_function_arg)
\n\
If @code{TARGET_FUNCTION_INCOMING_ARG} is not defined,\n\
@code{TARGET_FUNCTION_ARG} serves both purposes.",
- rtx, (cumulative_args_t ca, enum machine_mode mode, const_tree type,
+ rtx, (cumulative_args_t ca, machine_mode mode, const_tree type,
bool named),
default_function_incoming_arg)
"This hook returns the alignment boundary, in bits, of an argument\n\
with the specified mode and type. The default hook returns\n\
@code{PARM_BOUNDARY} for all arguments.",
- unsigned int, (enum machine_mode mode, const_tree type),
+ unsigned int, (machine_mode mode, const_tree type),
default_function_arg_boundary)
DEFHOOK
which is the default value for this hook. You can define this hook to\n\
return a different value if an argument size must be rounded to a larger\n\
value.",
- unsigned int, (enum machine_mode mode, const_tree type),
+ unsigned int, (machine_mode mode, const_tree type),
default_function_arg_round_boundary)
/* Return the diagnostic message string if function without a prototype
representing the place where the library function result will be returned.\n\
\n\
If this hook is not defined, then LIBCALL_VALUE will be used.",
- rtx, (enum machine_mode mode, const_rtx fun),
+ rtx, (machine_mode mode, const_rtx fun),
default_libcall_value)
/* Return true if REGNO is a possible register number for
"This target hook returns the mode to be used when accessing raw return\
registers in @code{__builtin_return}. Define this macro if the value\
in @var{reg_raw_mode} is not correct.",
- enum machine_mode, (int regno),
+ machine_mode, (int regno),
default_get_reg_raw_mode)
/* Return a mode wide enough to copy any argument value that might be
"This target hook returns the mode to be used when accessing raw argument\
registers in @code{__builtin_apply_args}. Define this macro if the value\
in @var{reg_raw_mode} is not correct.",
- enum machine_mode, (int regno),
+ machine_mode, (int regno),
default_get_reg_raw_mode)
HOOK_VECTOR_END (calls)
pseudos of the given mode and class, or @code{NO_REGS} if only memory\
should be used. Not defining this hook is equivalent to returning\
@code{NO_REGS} for all inputs.",
- reg_class_t, (reg_class_t, enum machine_mode),
+ reg_class_t, (reg_class_t, machine_mode),
NULL)
DEFHOOK
for the cstore being performed. Not definiting this hook is the same\
as accepting the mode encoded into operand 0 of the cstore expander\
patterns.",
- enum machine_mode, (enum insn_code icode),
+ machine_mode, (enum insn_code icode),
default_cstore_mode)
/* True if a structure, union or array with MODE containing FIELD should
retain the field's mode.\n\
\n\
Normally, this is not needed.",
- bool, (const_tree field, enum machine_mode mode),
+ bool, (const_tree field, machine_mode mode),
default_member_type_forces_blk)
/* Return the class for a secondary reload, and fill in extra information. */
of the individual moves due to expected fortuitous scheduling and/or special\n\
forwarding logic, you can set @code{sri->extra_cost} to a negative amount.",
reg_class_t,
- (bool in_p, rtx x, reg_class_t reload_class, enum machine_mode reload_mode,
+ (bool in_p, rtx x, reg_class_t reload_class, machine_mode reload_mode,
secondary_reload_info *sri),
default_secondary_reload)
\n\
The default version of this target hook returns the size of @var{mode}\n\
in words.",
- unsigned char, (reg_class_t rclass, enum machine_mode mode),
+ unsigned char, (reg_class_t rclass, machine_mode mode),
default_class_max_nregs)
DEFHOOK
(mode_for_suffix,
"Return machine mode for non-standard constant literal suffix @var{c},\
or VOIDmode if non-standard suffixes are unsupported.",
- enum machine_mode, (char c),
+ machine_mode, (char c),
default_mode_for_suffix)
HOOK_VECTOR_END (c)
"If defined, this function returns an appropriate alignment in bits for an\
atomic object of machine_mode @var{mode}. If 0 is returned then the\
default alignment for the specified mode is used. ",
- unsigned int, (enum machine_mode mode),
+ unsigned int, (machine_mode mode),
hook_uint_mode_0)
DEFHOOK
bool
-default_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+default_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
rtx addr ATTRIBUTE_UNUSED,
bool strict ATTRIBUTE_UNUSED)
{
return 0;
}
-enum machine_mode
+machine_mode
default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
- enum machine_mode mode,
+ machine_mode mode,
int *punsignedp ATTRIBUTE_UNUSED,
const_tree funtype ATTRIBUTE_UNUSED,
int for_return ATTRIBUTE_UNUSED)
return mode;
}
-enum machine_mode
+machine_mode
default_promote_function_mode_always_promote (const_tree type,
- enum machine_mode mode,
+ machine_mode mode,
int *punsignedp,
const_tree funtype ATTRIBUTE_UNUSED,
int for_return ATTRIBUTE_UNUSED)
return promote_mode (type, mode, punsignedp);
}
-enum machine_mode
-default_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
+machine_mode
+default_cc_modes_compatible (machine_mode m1, machine_mode m2)
{
if (m1 == m2)
return m1;
rtx
default_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
return x;
}
void
default_setup_incoming_varargs (cumulative_args_t ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED,
int *pretend_arg_size ATTRIBUTE_UNUSED,
int second_time ATTRIBUTE_UNUSED)
!= default_setup_incoming_varargs);
}
-enum machine_mode
+machine_mode
default_eh_return_filter_mode (void)
{
return targetm.unwind_word_mode ();
}
-enum machine_mode
+machine_mode
default_libgcc_cmp_return_mode (void)
{
return word_mode;
}
-enum machine_mode
+machine_mode
default_libgcc_shift_count_mode (void)
{
return word_mode;
}
-enum machine_mode
+machine_mode
default_unwind_word_mode (void)
{
return word_mode;
/* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */
unsigned HOST_WIDE_INT
-default_shift_truncation_mask (enum machine_mode mode)
+default_shift_truncation_mask (machine_mode mode)
{
return SHIFT_COUNT_TRUNCATED ? GET_MODE_BITSIZE (mode) - 1 : 0;
}
/* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */
unsigned int
-default_min_divisions_for_recip_mul (enum machine_mode mode ATTRIBUTE_UNUSED)
+default_min_divisions_for_recip_mul (machine_mode mode ATTRIBUTE_UNUSED)
{
return have_insn_for (DIV, mode) ? 3 : 2;
}
/* The default implementation of TARGET_MODE_REP_EXTENDED. */
int
-default_mode_rep_extended (enum machine_mode mode ATTRIBUTE_UNUSED,
- enum machine_mode mode_rep ATTRIBUTE_UNUSED)
+default_mode_rep_extended (machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode_rep ATTRIBUTE_UNUSED)
{
return UNKNOWN;
}
/* Return machine mode for non-standard suffix
or VOIDmode if non-standard suffixes are unsupported. */
-enum machine_mode
+machine_mode
default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED)
{
return VOIDmode;
bool
hook_pass_by_reference_must_pass_in_stack (cumulative_args_t c ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED, const_tree type ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED, const_tree type ATTRIBUTE_UNUSED,
bool named_arg ATTRIBUTE_UNUSED)
{
return targetm.calls.must_pass_in_stack (mode, type);
bool
hook_callee_copies_named (cumulative_args_t ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED, bool named)
{
return named;
supported by optabs.c. */
bool
-default_scalar_mode_supported_p (enum machine_mode mode)
+default_scalar_mode_supported_p (machine_mode mode)
{
int precision = GET_MODE_PRECISION (mode);
be supported as a scalar mode). */
bool
-default_libgcc_floating_mode_supported_p (enum machine_mode mode)
+default_libgcc_floating_mode_supported_p (machine_mode mode)
{
switch (mode)
{
bool
hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false (
cumulative_args_t ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
{
return false;
bool
hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true (
cumulative_args_t ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
{
return true;
int
hook_int_CUMULATIVE_ARGS_mode_tree_bool_0 (
cumulative_args_t ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
{
return 0;
void
default_function_arg_advance (cumulative_args_t ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
rtx
default_function_arg (cumulative_args_t ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
rtx
default_function_incoming_arg (cumulative_args_t ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
}
unsigned int
-default_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
+default_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED)
{
return PARM_BOUNDARY;
}
unsigned int
-default_function_arg_round_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
+default_function_arg_round_boundary (machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED)
{
return PARM_BOUNDARY;
}
rtx
-default_libcall_value (enum machine_mode mode ATTRIBUTE_UNUSED,
+default_libcall_value (machine_mode mode ATTRIBUTE_UNUSED,
const_rtx fun ATTRIBUTE_UNUSED)
{
#ifdef LIBCALL_VALUE
reg_class_t
default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
reg_class_t reload_class_i ATTRIBUTE_UNUSED,
- enum machine_mode reload_mode ATTRIBUTE_UNUSED,
+ machine_mode reload_mode ATTRIBUTE_UNUSED,
secondary_reload_info *sri)
{
enum reg_class rclass = NO_REGS;
memory access if it supports movmisalign patten.
is_packed is true if the memory access is defined in a packed struct. */
bool
-default_builtin_support_vector_misalignment (enum machine_mode mode,
+default_builtin_support_vector_misalignment (machine_mode mode,
const_tree type
ATTRIBUTE_UNUSED,
int misalignment
/* By default, only attempt to parallelize bitwise operations, and
possibly adds/subtracts using bit-twiddling. */
-enum machine_mode
-default_preferred_simd_mode (enum machine_mode mode ATTRIBUTE_UNUSED)
+machine_mode
+default_preferred_simd_mode (machine_mode mode ATTRIBUTE_UNUSED)
{
return word_mode;
}
/* Determine whether or not a pointer mode is valid. Assume defaults
of ptr_mode or Pmode - can be overridden. */
bool
-default_valid_pointer_mode (enum machine_mode mode)
+default_valid_pointer_mode (machine_mode mode)
{
return (mode == ptr_mode || mode == Pmode);
}
/* Return the mode for a pointer to a given ADDRSPACE, defaulting to ptr_mode
for the generic address space only. */
-enum machine_mode
+machine_mode
default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
{
gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
/* Return the mode for an address in a given ADDRSPACE, defaulting to Pmode
for the generic address space only. */
-enum machine_mode
+machine_mode
default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
{
gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
/* Named address space version of valid_pointer_mode. */
bool
-default_addr_space_valid_pointer_mode (enum machine_mode mode, addr_space_t as)
+default_addr_space_valid_pointer_mode (machine_mode mode, addr_space_t as)
{
if (!ADDR_SPACE_GENERIC_P (as))
return (mode == targetm.addr_space.pointer_mode (as)
/* Named address space version of legitimate_address_p. */
bool
-default_addr_space_legitimate_address_p (enum machine_mode mode, rtx mem,
+default_addr_space_legitimate_address_p (machine_mode mode, rtx mem,
bool strict, addr_space_t as)
{
if (!ADDR_SPACE_GENERIC_P (as))
rtx
default_addr_space_legitimize_address (rtx x, rtx oldx,
- enum machine_mode mode, addr_space_t as)
+ machine_mode mode, addr_space_t as)
{
if (!ADDR_SPACE_GENERIC_P (as))
return x;
/* Compute cost of moving registers to/from memory. */
int
-default_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+default_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t rclass ATTRIBUTE_UNUSED,
bool in ATTRIBUTE_UNUSED)
{
TO, using MODE. */
int
-default_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+default_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t from ATTRIBUTE_UNUSED,
reg_class_t to ATTRIBUTE_UNUSED)
{
unsigned char
default_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
#ifdef CLASS_MAX_NREGS
return (unsigned char) CLASS_MAX_NREGS ((enum reg_class) rclass, mode);
/* Determine the correct mode for a Dwarf frame register that represents
register REGNO. */
-enum machine_mode
+machine_mode
default_dwarf_frame_reg_mode (int regno)
{
- enum machine_mode save_mode = reg_raw_mode[regno];
+ machine_mode save_mode = reg_raw_mode[regno];
if (HARD_REGNO_CALL_PART_CLOBBERED (regno, save_mode))
save_mode = choose_hard_reg_mode (regno, 1, true);
/* To be used by targets where reg_raw_mode doesn't return the right
mode for registers used in apply_builtin_return and apply_builtin_arg. */
-enum machine_mode
+machine_mode
default_get_reg_raw_mode (int regno)
{
return reg_raw_mode[regno];
/* Default version of cstore_mode. */
-enum machine_mode
+machine_mode
default_cstore_mode (enum insn_code icode)
{
return insn_data[(int) icode].operand[0].mode;
/* Default version of member_type_forces_blk. */
bool
-default_member_type_forces_blk (const_tree, enum machine_mode)
+default_member_type_forces_blk (const_tree, machine_mode)
{
return false;
}
#ifndef GCC_TARGHOOKS_H
#define GCC_TARGHOOKS_H
-extern bool default_legitimate_address_p (enum machine_mode, rtx, bool);
+extern bool default_legitimate_address_p (machine_mode, rtx, bool);
extern void default_external_libcall (rtx);
-extern rtx default_legitimize_address (rtx, rtx, enum machine_mode);
+extern rtx default_legitimize_address (rtx, rtx, machine_mode);
extern int default_unspec_may_trap_p (const_rtx, unsigned);
-extern enum machine_mode default_promote_function_mode (const_tree, enum machine_mode,
+extern machine_mode default_promote_function_mode (const_tree, machine_mode,
int *, const_tree, int);
-extern enum machine_mode default_promote_function_mode_always_promote
- (const_tree, enum machine_mode, int *, const_tree, int);
+extern machine_mode default_promote_function_mode_always_promote
+ (const_tree, machine_mode, int *, const_tree, int);
-extern enum machine_mode default_cc_modes_compatible (enum machine_mode,
- enum machine_mode);
+extern machine_mode default_cc_modes_compatible (machine_mode,
+ machine_mode);
extern bool default_return_in_memory (const_tree, const_tree);
extern rtx default_expand_builtin_saveregs (void);
-extern void default_setup_incoming_varargs (cumulative_args_t, enum machine_mode, tree, int *, int);
+extern void default_setup_incoming_varargs (cumulative_args_t, machine_mode, tree, int *, int);
extern rtx default_builtin_setjmp_frame_value (void);
extern bool default_pretend_outgoing_varargs_named (cumulative_args_t);
-extern enum machine_mode default_eh_return_filter_mode (void);
-extern enum machine_mode default_libgcc_cmp_return_mode (void);
-extern enum machine_mode default_libgcc_shift_count_mode (void);
-extern enum machine_mode default_unwind_word_mode (void);
+extern machine_mode default_eh_return_filter_mode (void);
+extern machine_mode default_libgcc_cmp_return_mode (void);
+extern machine_mode default_libgcc_shift_count_mode (void);
+extern machine_mode default_unwind_word_mode (void);
extern unsigned HOST_WIDE_INT default_shift_truncation_mask
- (enum machine_mode);
-extern unsigned int default_min_divisions_for_recip_mul (enum machine_mode);
-extern int default_mode_rep_extended (enum machine_mode, enum machine_mode);
+ (machine_mode);
+extern unsigned int default_min_divisions_for_recip_mul (machine_mode);
+extern int default_mode_rep_extended (machine_mode, machine_mode);
extern tree default_stack_protect_guard (void);
extern tree default_external_stack_protect_fail (void);
extern tree default_hidden_stack_protect_fail (void);
-extern enum machine_mode default_mode_for_suffix (char);
+extern machine_mode default_mode_for_suffix (char);
extern tree default_cxx_guard_type (void);
extern tree default_cxx_get_cookie_size (tree);
extern bool hook_pass_by_reference_must_pass_in_stack
- (cumulative_args_t, enum machine_mode mode, const_tree, bool);
+ (cumulative_args_t, machine_mode mode, const_tree, bool);
extern bool hook_callee_copies_named
- (cumulative_args_t ca, enum machine_mode, const_tree, bool);
+ (cumulative_args_t ca, machine_mode, const_tree, bool);
extern void default_print_operand (FILE *, rtx, int);
extern void default_print_operand_address (FILE *, rtx);
extern bool default_print_operand_punct_valid_p (unsigned char);
extern tree default_mangle_assembler_name (const char *);
-extern bool default_scalar_mode_supported_p (enum machine_mode);
-extern bool default_libgcc_floating_mode_supported_p (enum machine_mode);
+extern bool default_scalar_mode_supported_p (machine_mode);
+extern bool default_libgcc_floating_mode_supported_p (machine_mode);
extern bool targhook_words_big_endian (void);
extern bool targhook_float_words_big_endian (void);
extern bool default_float_exceptions_rounding_supported_p (void);
extern bool default_builtin_vector_alignment_reachable (const_tree, bool);
extern bool
-default_builtin_support_vector_misalignment (enum machine_mode mode,
+default_builtin_support_vector_misalignment (machine_mode mode,
const_tree,
int, bool);
-extern enum machine_mode default_preferred_simd_mode (enum machine_mode mode);
+extern machine_mode default_preferred_simd_mode (machine_mode mode);
extern unsigned int default_autovectorize_vector_sizes (void);
extern void *default_init_cost (struct loop *);
extern unsigned default_add_stmt_cost (void *, int, enum vect_cost_for_stmt,
extern bool hook_bool_CUMULATIVE_ARGS_true (cumulative_args_t);
extern bool hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false
- (cumulative_args_t, enum machine_mode, const_tree, bool);
+ (cumulative_args_t, machine_mode, const_tree, bool);
extern bool hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
- (cumulative_args_t, enum machine_mode, const_tree, bool);
+ (cumulative_args_t, machine_mode, const_tree, bool);
extern int hook_int_CUMULATIVE_ARGS_mode_tree_bool_0
- (cumulative_args_t, enum machine_mode, tree, bool);
+ (cumulative_args_t, machine_mode, tree, bool);
extern const char *hook_invalid_arg_for_unprototyped_fn
(const_tree, const_tree, const_tree);
extern void default_function_arg_advance
- (cumulative_args_t, enum machine_mode, const_tree, bool);
+ (cumulative_args_t, machine_mode, const_tree, bool);
extern rtx default_function_arg
- (cumulative_args_t, enum machine_mode, const_tree, bool);
+ (cumulative_args_t, machine_mode, const_tree, bool);
extern rtx default_function_incoming_arg
- (cumulative_args_t, enum machine_mode, const_tree, bool);
-extern unsigned int default_function_arg_boundary (enum machine_mode,
+ (cumulative_args_t, machine_mode, const_tree, bool);
+extern unsigned int default_function_arg_boundary (machine_mode,
const_tree);
-extern unsigned int default_function_arg_round_boundary (enum machine_mode,
+extern unsigned int default_function_arg_round_boundary (machine_mode,
const_tree);
extern bool hook_bool_const_rtx_commutative_p (const_rtx, int);
extern rtx default_function_value (const_tree, const_tree, bool);
-extern rtx default_libcall_value (enum machine_mode, const_rtx);
+extern rtx default_libcall_value (machine_mode, const_rtx);
extern bool default_function_value_regno_p (const unsigned int);
extern rtx default_internal_arg_pointer (void);
extern rtx default_static_chain (const_tree, bool);
extern bool default_register_usage_leveling_p (void);
extern bool default_different_addr_displacement_p (void);
extern reg_class_t default_secondary_reload (bool, rtx, reg_class_t,
- enum machine_mode,
+ machine_mode,
secondary_reload_info *);
extern void default_target_option_override (void);
extern void hook_void_bitmap (bitmap);
extern bool default_target_option_valid_attribute_p (tree, tree, tree, int);
extern bool default_target_option_pragma_parse (tree, tree);
extern bool default_target_can_inline_p (tree, tree);
-extern bool default_valid_pointer_mode (enum machine_mode);
+extern bool default_valid_pointer_mode (machine_mode);
extern bool default_ref_may_alias_errno (struct ao_ref *);
-extern enum machine_mode default_addr_space_pointer_mode (addr_space_t);
-extern enum machine_mode default_addr_space_address_mode (addr_space_t);
-extern bool default_addr_space_valid_pointer_mode (enum machine_mode,
+extern machine_mode default_addr_space_pointer_mode (addr_space_t);
+extern machine_mode default_addr_space_address_mode (addr_space_t);
+extern bool default_addr_space_valid_pointer_mode (machine_mode,
addr_space_t);
-extern bool default_addr_space_legitimate_address_p (enum machine_mode, rtx,
+extern bool default_addr_space_legitimate_address_p (machine_mode, rtx,
bool, addr_space_t);
-extern rtx default_addr_space_legitimize_address (rtx, rtx, enum machine_mode,
+extern rtx default_addr_space_legitimize_address (rtx, rtx, machine_mode,
addr_space_t);
extern bool default_addr_space_subset_p (addr_space_t, addr_space_t);
extern rtx default_addr_space_convert (rtx, tree, tree);
extern tree default_builtin_tm_load_store (tree);
-extern int default_memory_move_cost (enum machine_mode, reg_class_t, bool);
-extern int default_register_move_cost (enum machine_mode, reg_class_t,
+extern int default_memory_move_cost (machine_mode, reg_class_t, bool);
+extern int default_register_move_cost (machine_mode, reg_class_t,
reg_class_t);
extern bool default_profile_before_prologue (void);
extern reg_class_t default_preferred_output_reload_class (rtx, reg_class_t);
extern reg_class_t default_preferred_rename_class (reg_class_t rclass);
extern bool default_class_likely_spilled_p (reg_class_t);
-extern unsigned char default_class_max_nregs (reg_class_t, enum machine_mode);
+extern unsigned char default_class_max_nregs (reg_class_t, machine_mode);
extern enum unwind_info_type default_debug_unwind_info (void);
extern int default_jump_align_max_skip (rtx_insn *);
extern section * default_function_section(tree decl, enum node_frequency freq,
bool startup, bool exit);
-extern enum machine_mode default_dwarf_frame_reg_mode (int);
-extern enum machine_mode default_get_reg_raw_mode (int);
+extern machine_mode default_dwarf_frame_reg_mode (int);
+extern machine_mode default_get_reg_raw_mode (int);
extern bool default_keep_leaf_when_profiled ();
extern void *default_get_pch_validity (size_t *);
extern void default_asm_output_ident_directive (const char*);
-extern enum machine_mode default_cstore_mode (enum insn_code);
-extern bool default_member_type_forces_blk (const_tree, enum machine_mode);
+extern machine_mode default_cstore_mode (enum insn_code);
+extern bool default_member_type_forces_blk (const_tree, machine_mode);
extern void default_atomic_assign_expand_fenv (tree *, tree *, tree *);
extern tree build_va_arg_indirect_ref (tree);
extern tree std_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
enum tree_code code;
tree cst, core, toffset;
HOST_WIDE_INT bitpos, bitsize;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp, volatilep;
STRIP_NOPS (expr);
{
HOST_WIDE_INT bitsize, bitpos;
tree toff;
- enum machine_mode mode;
+ machine_mode mode;
int uns, vol;
aff_tree tmp;
tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
check_target_format (tree arg)
{
tree type;
- enum machine_mode mode;
+ machine_mode mode;
const struct real_format *rfmt;
type = TREE_TYPE (arg);
expand_complex_libcall (gimple_stmt_iterator *gsi, tree ar, tree ai,
tree br, tree bi, enum tree_code code)
{
- enum machine_mode mode;
+ machine_mode mode;
enum built_in_function bcode;
tree fn, type, lhs;
gimple old_stmt, stmt;
{
tree base, poffset;
HOST_WIDE_INT pbitsize, pbitpos;
- enum machine_mode pmode;
+ machine_mode pmode;
int punsignedp, pvolatilep;
op0 = TREE_OPERAND (op0, 0);
tree ref = DR_REF (dr);
HOST_WIDE_INT pbitsize, pbitpos;
tree base, poffset;
- enum machine_mode pmode;
+ machine_mode pmode;
int punsignedp, pvolatilep;
affine_iv base_iv, offset_iv;
tree init, dinit, step;
size_tree = TREE_OPERAND (exp, 1);
else if (!VOID_TYPE_P (TREE_TYPE (exp)))
{
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
+ machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
if (mode == BLKmode)
size_tree = TYPE_SIZE (TREE_TYPE (exp));
else
ifcvt_can_use_mask_load_store (gimple stmt)
{
tree lhs, ref;
- enum machine_mode mode;
+ machine_mode mode;
basic_block bb = gimple_bb (stmt);
bool is_load;
if (TREE_CODE (type) == VECTOR_TYPE)
{
- enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
- enum machine_mode simd
+ machine_mode inner = TYPE_MODE (TREE_TYPE (type));
+ machine_mode simd
= targetm.vectorize.preferred_simd_mode (inner);
int simd_mode_size = GET_MODE_SIZE (simd);
return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
insert_value_copy_on_edge (edge e, int dest, tree src, source_location locus)
{
rtx dest_rtx, seq, x;
- enum machine_mode dest_mode, src_mode;
+ machine_mode dest_mode, src_mode;
int unsignedp;
tree var;
tree var = TREE_CODE (name) == SSA_NAME ? SSA_NAME_VAR (name) : name;
tree type = TREE_TYPE (var);
int unsignedp;
- enum machine_mode reg_mode = promote_decl_mode (var, &unsignedp);
+ machine_mode reg_mode = promote_decl_mode (var, &unsignedp);
rtx x = gen_reg_rtx (reg_mode);
if (POINTER_TYPE_P (type))
mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
if (TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
|| handled_component_p (TREE_OPERAND (rhs1, 0)))
{
- enum machine_mode mode;
+ machine_mode mode;
HOST_WIDE_INT bitsize, bitpos;
int unsignedp;
int volatilep = 0;
to where step is placed to *STEP_P and offset to *OFFSET_P. */
static void
-gen_addr_rtx (enum machine_mode address_mode,
+gen_addr_rtx (machine_mode address_mode,
rtx symbol, rtx base, rtx index, rtx step, rtx offset,
rtx *addr, rtx **step_p, rtx **offset_p)
{
addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
bool really_expand)
{
- enum machine_mode address_mode = targetm.addr_space.address_mode (as);
- enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
+ machine_mode address_mode = targetm.addr_space.address_mode (as);
+ machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
rtx address, sym, bse, idx, st, off;
struct mem_addr_template *templ;
ADDR is valid on the current target. */
static bool
-valid_mem_ref_p (enum machine_mode mode, addr_space_t as,
+valid_mem_ref_p (machine_mode mode, addr_space_t as,
struct mem_address *addr)
{
rtx address;
aff_tree *addr, bool speed)
{
addr_space_t as = TYPE_ADDR_SPACE (type);
- enum machine_mode address_mode = targetm.addr_space.address_mode (as);
+ machine_mode address_mode = targetm.addr_space.address_mode (as);
HOST_WIDE_INT coef;
unsigned best_mult_cost = 0, acost;
tree mult_elt = NULL_TREE, elt;
static void
canonicalize_value (ccp_prop_value_t *val)
{
- enum machine_mode mode;
+ machine_mode mode;
tree type;
REAL_VALUE_TYPE d;
bool important, struct iv_use *use)
{
basic_block use_bb = gimple_bb (use->stmt);
- enum machine_mode mem_mode;
+ machine_mode mem_mode;
unsigned HOST_WIDE_INT cstepi;
/* If we insert the increment in any position other than the standard
produce_memory_decl_rtl (tree obj, int *regno)
{
addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (obj));
- enum machine_mode address_mode = targetm.addr_space.address_mode (as);
+ machine_mode address_mode = targetm.addr_space.address_mode (as);
rtx x;
gcc_assert (obj);
bool
-multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, enum machine_mode mode,
+multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, machine_mode mode,
addr_space_t as)
{
#define MAX_RATIO 128
valid_mult = valid_mult_list[data_index];
if (!valid_mult)
{
- enum machine_mode address_mode = targetm.addr_space.address_mode (as);
+ machine_mode address_mode = targetm.addr_space.address_mode (as);
rtx reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1);
rtx reg2 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2);
rtx addr, scaled;
static comp_cost
get_address_cost (bool symbol_present, bool var_present,
unsigned HOST_WIDE_INT offset, HOST_WIDE_INT ratio,
- HOST_WIDE_INT cstep, enum machine_mode mem_mode,
+ HOST_WIDE_INT cstep, machine_mode mem_mode,
addr_space_t as, bool speed,
bool stmt_after_inc, bool *may_autoinc)
{
- enum machine_mode address_mode = targetm.addr_space.address_mode (as);
+ machine_mode address_mode = targetm.addr_space.address_mode (as);
static vec<address_cost_data> address_cost_data_list;
unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mem_mode;
address_cost_data data;
the cost in COST. */
static bool
-get_shiftadd_cost (tree expr, enum machine_mode mode, comp_cost cost0,
+get_shiftadd_cost (tree expr, machine_mode mode, comp_cost cost0,
comp_cost cost1, tree mult, bool speed, comp_cost *cost)
{
comp_cost res;
static unsigned address_cost [2];
tree op0, op1;
comp_cost cost0, cost1, cost;
- enum machine_mode mode;
+ machine_mode mode;
if (!costs_initialized)
{
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos;
tree toffset;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp, volatilep;
core = get_inner_reference (addr, &bitsize, &bitpos, &toffset, &mode,
tree e1, tree e2, bool *symbol_present, bool *var_present,
unsigned HOST_WIDE_INT *offset, bitmap *depends_on)
{
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (e1));
+ machine_mode mode = TYPE_MODE (TREE_TYPE (e1));
unsigned HOST_WIDE_INT off1, off2;
aff_tree aff_e1, aff_e2;
tree type;
comp_cost cost;
widest_int rat;
bool speed = optimize_bb_for_speed_p (gimple_bb (at));
- enum machine_mode mem_mode = (address_p
+ machine_mode mem_mode = (address_p
? TYPE_MODE (TREE_TYPE (*use->op_p))
: VOIDmode);
extern struct loop *outermost_invariant_loop_for_expr (struct loop *, tree);
extern bool expr_invariant_in_loop_p (struct loop *, tree);
bool may_be_nonaddressable_p (tree expr);
-bool multiplier_allowed_in_address_p (HOST_WIDE_INT, enum machine_mode,
+bool multiplier_allowed_in_address_p (HOST_WIDE_INT, machine_mode,
addr_space_t);
void tree_ssa_iv_optimize (void);
gimple stmt;
edge exit = single_dom_exit (loop);
gimple_seq stmts;
- enum machine_mode mode;
+ machine_mode mode;
bool unsigned_p = false;
for (psi = gsi_start_phis (loop->header);
static bool
nontemporal_store_p (struct mem_ref *ref)
{
- enum machine_mode mode;
+ machine_mode mode;
enum insn_code code;
/* REF must be a write that is not reused. We require it to be independent
REAL_VALUE_TYPE c2, dconst3;
HOST_WIDE_INT n;
tree type, sqrtfn, cbrtfn, sqrt_arg0, sqrt_sqrt, result, cbrt_x, powi_cbrt_x;
- enum machine_mode mode;
+ machine_mode mode;
bool hw_sqrt_exists, c_is_int, c2_is_int;
/* If the exponent isn't a constant, there's nothing of interest
tree real_part, imag_part, addend1, addend2, sum, result;
tree type = TREE_TYPE (TREE_TYPE (arg));
tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
if (!flag_unsafe_math_optimizations
|| !optimize_bb_for_speed_p (gimple_bb (gsi_stmt (*gsi)))
/* Leaf node is an array or component ref. Memorize its base and
offset from base to compare to other such leaf node. */
HOST_WIDE_INT bitsize, bitpos;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp, volatilep;
tree offset, base_addr;
{
tree lhs, rhs1, rhs2, type, type1, type2;
enum insn_code handler;
- enum machine_mode to_mode, from_mode, actual_mode;
+ machine_mode to_mode, from_mode, actual_mode;
optab op;
int actual_precision;
location_t loc = gimple_location (stmt);
optab this_optab;
enum tree_code wmult_code;
enum insn_code handler;
- enum machine_mode to_mode, from_mode, actual_mode;
+ machine_mode to_mode, from_mode, actual_mode;
location_t loc = gimple_location (stmt);
int actual_precision;
bool from_unsigned1, from_unsigned2;
static int
get_reassociation_width (int ops_num, enum tree_code opc,
- enum machine_mode mode)
+ machine_mode mode)
{
int param_width = PARAM_VALUE (PARAM_TREE_REASSOC_WIDTH);
int width;
}
else
{
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
+ machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
int ops_num = ops.length ();
int width = get_reassociation_width (ops_num, rhs_code, mode);
tree new_lhs = lhs;
size_tree = op->op0;
else
{
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
if (mode == BLKmode)
size_tree = TYPE_SIZE (type);
else
static void
unpack_ts_type_common_value_fields (struct bitpack_d *bp, tree expr)
{
- enum machine_mode mode;
+ machine_mode mode;
mode = bp_unpack_enum (bp, machine_mode, MAX_MACHINE_MODE);
SET_TYPE_MODE (expr, mode);
{
unsigned int i, len = vec_safe_length (info->constructors[num]);
constructor_elt *elt;
- enum machine_mode mode;
+ machine_mode mode;
int sign = 0;
tree smaller_type;
vect_lanes_optab_supported_p (const char *name, convert_optab optab,
tree vectype, unsigned HOST_WIDE_INT count)
{
- enum machine_mode mode, array_mode;
+ machine_mode mode, array_mode;
bool limit_p;
mode = TYPE_MODE (vectype);
struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
tree offtype = NULL_TREE;
tree decl, base, off;
- enum machine_mode pmode;
+ machine_mode pmode;
int punsignedp, pvolatilep;
base = DR_REF (dr);
tree outer_step, outer_base, outer_init;
HOST_WIDE_INT pbitsize, pbitpos;
tree poffset;
- enum machine_mode pmode;
+ machine_mode pmode;
int punsignedp, pvolatilep;
affine_iv base_iv, offset_iv;
tree dinit;
bool
vect_grouped_store_supported (tree vectype, unsigned HOST_WIDE_INT count)
{
- enum machine_mode mode = TYPE_MODE (vectype);
+ machine_mode mode = TYPE_MODE (vectype);
/* vect_permute_store_chain requires the group size to be equal to 3 or
be a power of two. */
bool
vect_grouped_load_supported (tree vectype, unsigned HOST_WIDE_INT count)
{
- enum machine_mode mode = TYPE_MODE (vectype);
+ machine_mode mode = TYPE_MODE (vectype);
/* vect_permute_load_chain requires the group size to be equal to 3 or
be a power of two. */
vect_transform_grouped_load (gimple stmt, vec<tree> dr_chain, int size,
gimple_stmt_iterator *gsi)
{
- enum machine_mode mode;
+ machine_mode mode;
vec<tree> result_chain = vNULL;
/* DR_CHAIN contains input data-refs that are a part of the interleaving.
gimple stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
- enum machine_mode mode = TYPE_MODE (vectype);
+ machine_mode mode = TYPE_MODE (vectype);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
struct loop *vect_loop = NULL;
bool nested_in_vect_loop = false;
enum tree_code code)
{
tree result, compute_type;
- enum machine_mode mode;
+ machine_mode mode;
int n_words = tree_to_uhwi (TYPE_SIZE_UNIT (type)) / UNITS_PER_WORD;
location_t loc = gimple_location (gsi_stmt (*gsi));
expand_vector_operation (gimple_stmt_iterator *gsi, tree type, tree compute_type,
gimple assign, enum tree_code code)
{
- enum machine_mode compute_mode = TYPE_MODE (compute_type);
+ machine_mode compute_mode = TYPE_MODE (compute_type);
/* If the compute mode is not a vector mode (hence we are not decomposing
a BLKmode vector to smaller, hardware-supported vectors), we may want
static tree
type_for_widest_vector_mode (tree type, optab op)
{
- enum machine_mode inner_mode = TYPE_MODE (type);
- enum machine_mode best_mode = VOIDmode, mode;
+ machine_mode inner_mode = TYPE_MODE (type);
+ machine_mode best_mode = VOIDmode, mode;
int best_nunits = 0;
if (SCALAR_FLOAT_MODE_P (inner_mode))
so skip these checks. */
if (compute_type == type)
{
- enum machine_mode compute_mode = TYPE_MODE (compute_type);
+ machine_mode compute_mode = TYPE_MODE (compute_type);
if (VECTOR_MODE_P (compute_mode))
{
if (op && optab_handler (op, compute_mode) != CODE_FOR_nothing)
tree vectype;
gimple stmt, orig_stmt;
tree reduction_op;
- enum machine_mode mode;
+ machine_mode mode;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
void *target_cost_data = LOOP_VINFO_TARGET_COST_DATA (loop_vinfo);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
stmt_vec_info prev_phi_info;
tree vectype;
- enum machine_mode mode;
+ machine_mode mode;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo), *outer_loop = NULL;
basic_block exit_bb;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
enum tree_code code, orig_code, epilog_reduc_code;
- enum machine_mode vec_mode;
+ machine_mode vec_mode;
int op_type;
optab optab, reduc_optab;
tree new_temp = NULL_TREE;
optab = optab_for_tree_code (rhs_code, vectype, optab_default);
if (optab != unknown_optab)
{
- enum machine_mode vec_mode = TYPE_MODE (vectype);
+ machine_mode vec_mode = TYPE_MODE (vectype);
int icode = (int) optab_handler (optab, vec_mode);
if (icode != CODE_FOR_nothing)
return NULL;
tree cond_expr, then_clause, else_clause;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt), def_stmt_info;
tree type, vectype, comp_vectype, itype = NULL_TREE, vecitype;
- enum machine_mode cmpmode;
+ machine_mode cmpmode;
gimple pattern_stmt, def_stmt;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
if (TREE_CODE (TREE_TYPE (rhs1)) != INTEGER_TYPE)
{
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (rhs1));
+ machine_mode mode = TYPE_MODE (TREE_TYPE (rhs1));
tree itype
= build_nonstandard_integer_type (GET_MODE_BITSIZE (mode), 1);
vecitype = get_vectype_for_scalar_type (itype);
|| (TYPE_PRECISION (TREE_TYPE (rhs1))
!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1)))))
{
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (rhs1));
+ machine_mode mode = TYPE_MODE (TREE_TYPE (rhs1));
itype
= build_nonstandard_integer_type (GET_MODE_BITSIZE (mode), 1);
}
}
else
{
- enum machine_mode vec_mode;
+ machine_mode vec_mode;
enum insn_code icode;
optab optab;
tree vectype, scalar_type, first_op1 = NULL_TREE;
optab optab;
int icode;
- enum machine_mode optab_op2_mode;
- enum machine_mode vec_mode;
+ machine_mode optab_op2_mode;
+ machine_mode vec_mode;
struct data_reference *first_dr;
HOST_WIDE_INT dummy;
gimple first_load = NULL, prev_first_load = NULL, old_first_load = NULL;
int number_of_mask_fixes = 1;
bool mask_fixed = false;
bool needs_first_vector = false;
- enum machine_mode mode;
+ machine_mode mode;
mode = TYPE_MODE (vectype);
vec<tree> interm_types = vNULL;
tree last_oprnd, intermediate_type, cvt_type = NULL_TREE;
int op_type;
- enum machine_mode rhs_mode;
+ machine_mode rhs_mode;
unsigned short fltsz;
/* Is STMT a vectorizable conversion? */
vect_supportable_shift (enum tree_code code, tree scalar_type)
{
- enum machine_mode vec_mode;
+ machine_mode vec_mode;
optab optab;
int icode;
tree vectype;
tree vectype;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
enum tree_code code;
- enum machine_mode vec_mode;
+ machine_mode vec_mode;
tree new_temp;
optab optab;
int icode;
- enum machine_mode optab_op2_mode;
+ machine_mode optab_op2_mode;
tree def;
gimple def_stmt;
enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
tree vectype;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
enum tree_code code;
- enum machine_mode vec_mode;
+ machine_mode vec_mode;
tree new_temp;
int op_type;
optab optab;
tree elem_type;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
struct loop *loop = NULL;
- enum machine_mode vec_mode;
+ machine_mode vec_mode;
tree dummy;
enum dr_alignment_support alignment_support_scheme;
tree def;
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
tree elem_type;
tree new_temp;
- enum machine_mode mode;
+ machine_mode mode;
gimple new_stmt = NULL;
tree dummy;
enum dr_alignment_support alignment_support_scheme;
static tree
get_vectype_for_scalar_type_and_size (tree scalar_type, unsigned size)
{
- enum machine_mode inner_mode = TYPE_MODE (scalar_type);
- enum machine_mode simd_mode;
+ machine_mode inner_mode = TYPE_MODE (scalar_type);
+ machine_mode simd_mode;
unsigned int nbytes = GET_MODE_SIZE (inner_mode);
int nunits;
tree vectype;
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
loop_vec_info loop_info = STMT_VINFO_LOOP_VINFO (stmt_info);
struct loop *vect_loop = NULL;
- enum machine_mode vec_mode;
+ machine_mode vec_mode;
enum insn_code icode1, icode2;
optab optab1, optab2;
tree vectype = vectype_in;
enum tree_code c1, c2;
int i;
tree prev_type, intermediate_type;
- enum machine_mode intermediate_mode, prev_mode;
+ machine_mode intermediate_mode, prev_mode;
optab optab3, optab4;
*multi_step_cvt = 0;
enum tree_code *code1, int *multi_step_cvt,
vec<tree> *interm_types)
{
- enum machine_mode vec_mode;
+ machine_mode vec_mode;
enum insn_code icode1;
optab optab1, interm_optab;
tree vectype = vectype_in;
tree narrow_vectype = vectype_out;
enum tree_code c1;
tree intermediate_type;
- enum machine_mode intermediate_mode, prev_mode;
+ machine_mode intermediate_mode, prev_mode;
int i;
bool uns;
{
tree rhs1 = gimple_assign_rhs1 (stmt);
value_range_t *vr = get_value_range (rhs1);
- enum machine_mode fltmode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (stmt)));
- enum machine_mode mode;
+ machine_mode fltmode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (stmt)));
+ machine_mode mode;
tree tem;
gimple conv;
constructed, reuse it. */
tree
-build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
+build_pointer_type_for_mode (tree to_type, machine_mode mode,
bool can_alias_all)
{
tree t;
{
addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
: TYPE_ADDR_SPACE (to_type);
- enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
+ machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
return build_pointer_type_for_mode (to_type, pointer_mode, false);
}
/* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
tree
-build_reference_type_for_mode (tree to_type, enum machine_mode mode,
+build_reference_type_for_mode (tree to_type, machine_mode mode,
bool can_alias_all)
{
tree t;
{
addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
: TYPE_ADDR_SPACE (to_type);
- enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
+ machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
return build_reference_type_for_mode (to_type, pointer_mode, false);
}
the information necessary for debugging output. */
static tree
-make_vector_type (tree innertype, int nunits, enum machine_mode mode)
+make_vector_type (tree innertype, int nunits, machine_mode mode)
{
tree t;
inchash::hash hstate;
if (targetm.libfunc_gnu_prefix)
prefix = "__gnu_";
- type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
+ type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
if (type == NULL)
continue;
inner_type = TREE_TYPE (type);
/* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
the inner type. */
tree
-build_vector_type_for_mode (tree innertype, enum machine_mode mode)
+build_vector_type_for_mode (tree innertype, machine_mode mode)
{
int nunits;
extern tree signed_type_for (tree);
extern tree unsigned_type_for (tree);
extern tree truth_type_for (tree);
-extern tree build_pointer_type_for_mode (tree, enum machine_mode, bool);
+extern tree build_pointer_type_for_mode (tree, machine_mode, bool);
extern tree build_pointer_type (tree);
-extern tree build_reference_type_for_mode (tree, enum machine_mode, bool);
+extern tree build_reference_type_for_mode (tree, machine_mode, bool);
extern tree build_reference_type (tree);
-extern tree build_vector_type_for_mode (tree, enum machine_mode);
+extern tree build_vector_type_for_mode (tree, machine_mode);
extern tree build_vector_type (tree innertype, int nunits);
extern tree build_opaque_vector_type (tree innertype, int nunits);
extern tree build_index_type (tree);
look for the ultimate containing object, which is returned and specify
the access position and size. */
extern tree get_inner_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
- tree *, enum machine_mode *, int *, int *,
+ tree *, machine_mode *, int *, int *,
bool);
/* Return a tree representing the lower bound of the array mentioned in
TODO: handle bit-fields as if touching the whole field. */
HOST_WIDE_INT bitsize, bitpos;
tree offset;
- enum machine_mode mode;
+ machine_mode mode;
int volatilep = 0, unsignedp = 0;
base = get_inner_reference (expr, &bitsize, &bitpos, &offset,
&mode, &unsignedp, &volatilep, false);
int modebitsize = GET_MODE_BITSIZE (TYPE_MODE (type));
HOST_WIDE_INT bitsize, bitpos;
tree offset;
- enum machine_mode mode;
+ machine_mode mode;
int volatilep = 0, unsignedp = 0;
tree base = get_inner_reference (rhs, &bitsize, &bitpos, &offset, &mode,
&unsignedp, &volatilep, false);
{
tree expr_type = TREE_TYPE (expr);
tree t, tt, fn, min, max;
- enum machine_mode mode = TYPE_MODE (expr_type);
+ machine_mode mode = TYPE_MODE (expr_type);
int prec = TYPE_PRECISION (type);
bool uns_p = TYPE_UNSIGNED (type);
HOST_WIDE_INT bitsize, bitpos;
tree offset;
- enum machine_mode mode;
+ machine_mode mode;
int volatilep = 0, unsignedp = 0;
tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
&unsignedp, &volatilep, false);
instructions. */
static rtx
-gen_lowpart_for_debug (enum machine_mode mode, rtx x)
+gen_lowpart_for_debug (machine_mode mode, rtx x)
{
rtx result = gen_lowpart_if_possible (mode, x);
if (result)
the same addresses without modifying the corresponding registers. */
static rtx
-cleanup_auto_inc_dec (rtx src, enum machine_mode mem_mode ATTRIBUTE_UNUSED)
+cleanup_auto_inc_dec (rtx src, machine_mode mem_mode ATTRIBUTE_UNUSED)
{
rtx x = src;
#ifdef AUTO_INC_DEC
{
rtx_insn *next, *end = NEXT_INSN (BB_END (this_basic_block));
rtx loc;
- rtx (*saved_rtl_hook_no_emit) (enum machine_mode, rtx);
+ rtx (*saved_rtl_hook_no_emit) (machine_mode, rtx);
struct rtx_subst_pair p;
p.to = src;
struct adjust_mem_data
{
bool store;
- enum machine_mode mem_mode;
+ machine_mode mem_mode;
HOST_WIDE_INT stack_adjust;
rtx_expr_list *side_effects;
};
/* Transform X into narrower mode MODE from wider mode WMODE. */
static rtx
-use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
+use_narrower_mode (rtx x, machine_mode mode, machine_mode wmode)
{
rtx op0, op1;
if (CONSTANT_P (x))
{
struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
rtx mem, addr = loc, tem;
- enum machine_mode mem_mode_save;
+ machine_mode mem_mode_save;
bool store_save;
switch (GET_CODE (loc))
{
vt_canonicalize_addr (dataflow_set *set, rtx oloc)
{
HOST_WIDE_INT ofst = 0;
- enum machine_mode mode = GET_MODE (oloc);
+ machine_mode mode = GET_MODE (oloc);
rtx loc = oloc;
rtx x;
bool retry = true;
static bool
track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
- enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
+ machine_mode *mode_out, HOST_WIDE_INT *offset_out)
{
- enum machine_mode mode;
+ machine_mode mode;
if (expr == NULL || !track_expr_p (expr, true))
return false;
mode = GET_MODE (loc);
if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
{
- enum machine_mode pseudo_mode;
+ machine_mode pseudo_mode;
pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
on the returned value are updated. */
static rtx
-var_lowpart (enum machine_mode mode, rtx loc)
+var_lowpart (machine_mode mode, rtx loc)
{
unsigned int offset, reg_offset, regno;
/* Find a VALUE corresponding to X. */
static inline cselib_val *
-find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
+find_use_val (rtx x, machine_mode mode, struct count_use_info *cui)
{
int i;
MO_CLOBBER if no micro operation is to be generated. */
static enum micro_operation_type
-use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
+use_type (rtx loc, struct count_use_info *cui, machine_mode *modep)
{
tree expr;
static void
add_uses (rtx loc, struct count_use_info *cui)
{
- enum machine_mode mode = VOIDmode;
+ machine_mode mode = VOIDmode;
enum micro_operation_type type = use_type (loc, cui, &mode);
if (type != MO_CLOBBER)
&& !MEM_P (XEXP (vloc, 0)))
{
rtx mloc = vloc;
- enum machine_mode address_mode = get_address_mode (mloc);
+ machine_mode address_mode = get_address_mode (mloc);
cselib_val *val
= cselib_lookup (XEXP (mloc, 0), address_mode, 0,
GET_MODE (mloc));
else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
&& (val = find_use_val (vloc, GET_MODE (oloc), cui)))
{
- enum machine_mode mode2;
+ machine_mode mode2;
enum micro_operation_type type2;
rtx nloc = NULL;
bool resolvable = REG_P (vloc) || MEM_P (vloc);
}
else if (type == MO_VAL_USE)
{
- enum machine_mode mode2 = VOIDmode;
+ machine_mode mode2 = VOIDmode;
enum micro_operation_type type2;
cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
rtx vloc, oloc = loc, nloc;
&& !MEM_P (XEXP (oloc, 0)))
{
rtx mloc = oloc;
- enum machine_mode address_mode = get_address_mode (mloc);
+ machine_mode address_mode = get_address_mode (mloc);
cselib_val *val
= cselib_lookup (XEXP (mloc, 0), address_mode, 0,
GET_MODE (mloc));
static void
add_stores (rtx loc, const_rtx expr, void *cuip)
{
- enum machine_mode mode = VOIDmode, mode2;
+ machine_mode mode = VOIDmode, mode2;
struct count_use_info *cui = (struct count_use_info *)cuip;
basic_block bb = cui->bb;
micro_operation mo;
&& !MEM_P (XEXP (loc, 0)))
{
rtx mloc = loc;
- enum machine_mode address_mode = get_address_mode (mloc);
+ machine_mode address_mode = get_address_mode (mloc);
cselib_val *val = cselib_lookup (XEXP (mloc, 0),
address_mode, 0,
GET_MODE (mloc));
&& targetm.calls.struct_value_rtx (type, 0) == 0)
{
tree struct_addr = build_pointer_type (TREE_TYPE (type));
- enum machine_mode mode = TYPE_MODE (struct_addr);
+ machine_mode mode = TYPE_MODE (struct_addr);
rtx reg;
INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
nargs + 1);
nargs);
if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
{
- enum machine_mode mode;
+ machine_mode mode;
t = TYPE_ARG_TYPES (type);
mode = TYPE_MODE (TREE_VALUE (t));
this_arg = targetm.calls.function_arg (args_so_far, mode,
else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
|| GET_MODE_CLASS (GET_MODE (x)) == MODE_PARTIAL_INT)
{
- enum machine_mode mode = GET_MODE (x);
+ machine_mode mode = GET_MODE (x);
while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
&& GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
{
/* For non-integer stack argument see also if they weren't
initialized by integers. */
- enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
+ machine_mode imode = int_mode_for_mode (GET_MODE (mem));
if (imode != GET_MODE (mem) && imode != BLKmode)
{
val = cselib_lookup (adjust_address_nv (mem, imode, 0),
if (t && t != void_list_node)
{
tree argtype = TREE_VALUE (t);
- enum machine_mode mode = TYPE_MODE (argtype);
+ machine_mode mode = TYPE_MODE (argtype);
rtx reg;
if (pass_by_reference (&args_so_far_v, mode, argtype, true))
{
&& GET_MODE (x) == mode
&& item)
{
- enum machine_mode indmode
+ machine_mode indmode
= TYPE_MODE (TREE_TYPE (argtype));
rtx mem = gen_rtx_MEM (indmode, x);
cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
{
rtx item;
tree dtemp = (**debug_args)[ix + 1];
- enum machine_mode mode = DECL_MODE (dtemp);
+ machine_mode mode = DECL_MODE (dtemp);
item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
}
if (this_arg)
{
- enum machine_mode mode
+ machine_mode mode
= TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
rtx clobbered = gen_rtx_MEM (mode, this_arg);
HOST_WIDE_INT token
var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
for (i = 0; i < var->n_var_parts; i++)
{
- enum machine_mode mode, wider_mode;
+ machine_mode mode, wider_mode;
rtx loc2;
HOST_WIDE_INT offset;
rtx decl_rtl = DECL_RTL_IF_SET (parm);
rtx incoming = DECL_INCOMING_RTL (parm);
tree decl;
- enum machine_mode mode;
+ machine_mode mode;
HOST_WIDE_INT offset;
dataflow_set *out;
decl_or_value dv;
if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
&& INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
{
- enum machine_mode indmode
+ machine_mode indmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
rtx mem = gen_rtx_MEM (indmode, incoming);
cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
&& (len = int_size_in_bytes (TREE_TYPE (decl))) > 0
&& TREE_STRING_LENGTH (decl) >= len)
{
- enum machine_mode mode;
+ machine_mode mode;
unsigned int modesize;
const char *str;
HOST_WIDE_INT i;
/* Return the section to use for constant merging. */
section *
-mergeable_constant_section (enum machine_mode mode ATTRIBUTE_UNUSED,
+mergeable_constant_section (machine_mode mode ATTRIBUTE_UNUSED,
unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,
unsigned int flags ATTRIBUTE_UNUSED)
{
else if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl))
{
const char *asmspec = name+1;
- enum machine_mode mode = DECL_MODE (decl);
+ machine_mode mode = DECL_MODE (decl);
reg_number = decode_reg_name (asmspec);
/* First detect errors in declaring global registers. */
if (reg_number == -1)
x = create_block_symbol (name, get_block_for_decl (decl), -1);
else
{
- enum machine_mode address_mode = Pmode;
+ machine_mode address_mode = Pmode;
if (TREE_TYPE (decl) != error_mark_node)
{
addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
it into words it if is multi-word, otherwise split it into bytes. */
if (size > 1)
{
- enum machine_mode omode, imode;
+ machine_mode omode, imode;
unsigned int subalign;
unsigned int subsize, i;
enum mode_class mclass;
}
\f
void
-assemble_real (REAL_VALUE_TYPE d, enum machine_mode mode, unsigned int align)
+assemble_real (REAL_VALUE_TYPE d, machine_mode mode, unsigned int align)
{
long data[4] = {0, 0, 0, 0};
int i;
rtx constant;
HOST_WIDE_INT offset;
hashval_t hash;
- enum machine_mode mode;
+ machine_mode mode;
unsigned int align;
int labelno;
int mark;
const_rtx_hash_1 (const_rtx x)
{
unsigned HOST_WIDE_INT hwi;
- enum machine_mode mode;
+ machine_mode mode;
enum rtx_code code;
hashval_t h;
int i;
and return a MEM rtx to refer to it in memory. */
rtx
-force_const_mem (enum machine_mode mode, rtx x)
+force_const_mem (machine_mode mode, rtx x)
{
struct constant_descriptor_rtx *desc, tmp;
struct rtx_constant_pool *pool;
/* Similar, return the mode. */
-enum machine_mode
+machine_mode
get_pool_mode (const_rtx addr)
{
return SYMBOL_REF_CONSTANT (addr)->mode;
in MODE with known alignment ALIGN. */
static void
-output_constant_pool_2 (enum machine_mode mode, rtx x, unsigned int align)
+output_constant_pool_2 (machine_mode mode, rtx x, unsigned int align)
{
switch (GET_MODE_CLASS (mode))
{
case MODE_VECTOR_UACCUM:
{
int i, units;
- enum machine_mode submode = GET_MODE_INNER (mode);
+ machine_mode submode = GET_MODE_INNER (mode);
unsigned int subalign = MIN (align, GET_MODE_BITSIZE (submode));
gcc_assert (GET_CODE (x) == CONST_VECTOR);
break;
case VECTOR_CST:
{
- enum machine_mode inner = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
+ machine_mode inner = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
unsigned int nalign = MIN (align, GET_MODE_ALIGNMENT (inner));
int elt_size = GET_MODE_SIZE (inner);
output_constant (VECTOR_CST_ELT (exp, 0), elt_size, align);
}
section *
-default_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
+default_select_rtx_section (machine_mode mode ATTRIBUTE_UNUSED,
rtx x,
unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
{
}
section *
-default_elf_select_rtx_section (enum machine_mode mode, rtx x,
+default_elf_select_rtx_section (machine_mode mode, rtx x,
unsigned HOST_WIDE_INT align)
{
int reloc = compute_reloc_for_rtx (x);
make_debug_expr_from_rtl (const_rtx exp)
{
tree ddecl = make_node (DEBUG_EXPR_DECL), type;
- enum machine_mode mode = GET_MODE (exp);
+ machine_mode mode = GET_MODE (exp);
rtx dval;
DECL_ARTIFICIAL (ddecl) = 1;