this to be no less than the 1/p */
#define MAX_INSNS_SKIPPED 3
-/* The values of unspec's first field. */
-enum {
- ARC_UNSPEC_PLT = 3,
- ARC_UNSPEC_GOT,
- ARC_UNSPEC_GOTOFF
-} ;
-
-
-enum arc_builtins {
- ARC_BUILTIN_NOP = 2,
- ARC_BUILTIN_NORM = 3,
- ARC_BUILTIN_NORMW = 4,
- ARC_BUILTIN_SWAP = 5,
- ARC_BUILTIN_BRK = 6,
- ARC_BUILTIN_DIVAW = 7,
- ARC_BUILTIN_EX = 8,
- ARC_BUILTIN_MUL64 = 9,
- ARC_BUILTIN_MULU64 = 10,
- ARC_BUILTIN_RTIE = 11,
- ARC_BUILTIN_SYNC = 12,
- ARC_BUILTIN_CORE_READ = 13,
- ARC_BUILTIN_CORE_WRITE = 14,
- ARC_BUILTIN_FLAG = 15,
- ARC_BUILTIN_LR = 16,
- ARC_BUILTIN_SR = 17,
- ARC_BUILTIN_SLEEP = 18,
- ARC_BUILTIN_SWI = 19,
- ARC_BUILTIN_TRAP_S = 20,
- ARC_BUILTIN_UNIMP_S = 21,
- ARC_BUILTIN_ALIGNED = 22,
-
- /* Sentinel to mark start of simd builtins. */
- ARC_SIMD_BUILTIN_BEGIN = 1000,
-
- ARC_SIMD_BUILTIN_VADDAW = 1001,
- ARC_SIMD_BUILTIN_VADDW = 1002,
- ARC_SIMD_BUILTIN_VAVB = 1003,
- ARC_SIMD_BUILTIN_VAVRB = 1004,
- ARC_SIMD_BUILTIN_VDIFAW = 1005,
- ARC_SIMD_BUILTIN_VDIFW = 1006,
- ARC_SIMD_BUILTIN_VMAXAW = 1007,
- ARC_SIMD_BUILTIN_VMAXW = 1008,
- ARC_SIMD_BUILTIN_VMINAW = 1009,
- ARC_SIMD_BUILTIN_VMINW = 1010,
- ARC_SIMD_BUILTIN_VMULAW = 1011,
- ARC_SIMD_BUILTIN_VMULFAW = 1012,
- ARC_SIMD_BUILTIN_VMULFW = 1013,
- ARC_SIMD_BUILTIN_VMULW = 1014,
- ARC_SIMD_BUILTIN_VSUBAW = 1015,
- ARC_SIMD_BUILTIN_VSUBW = 1016,
- ARC_SIMD_BUILTIN_VSUMMW = 1017,
- ARC_SIMD_BUILTIN_VAND = 1018,
- ARC_SIMD_BUILTIN_VANDAW = 1019,
- ARC_SIMD_BUILTIN_VBIC = 1020,
- ARC_SIMD_BUILTIN_VBICAW = 1021,
- ARC_SIMD_BUILTIN_VOR = 1022,
- ARC_SIMD_BUILTIN_VXOR = 1023,
- ARC_SIMD_BUILTIN_VXORAW = 1024,
- ARC_SIMD_BUILTIN_VEQW = 1025,
- ARC_SIMD_BUILTIN_VLEW = 1026,
- ARC_SIMD_BUILTIN_VLTW = 1027,
- ARC_SIMD_BUILTIN_VNEW = 1028,
- ARC_SIMD_BUILTIN_VMR1AW = 1029,
- ARC_SIMD_BUILTIN_VMR1W = 1030,
- ARC_SIMD_BUILTIN_VMR2AW = 1031,
- ARC_SIMD_BUILTIN_VMR2W = 1032,
- ARC_SIMD_BUILTIN_VMR3AW = 1033,
- ARC_SIMD_BUILTIN_VMR3W = 1034,
- ARC_SIMD_BUILTIN_VMR4AW = 1035,
- ARC_SIMD_BUILTIN_VMR4W = 1036,
- ARC_SIMD_BUILTIN_VMR5AW = 1037,
- ARC_SIMD_BUILTIN_VMR5W = 1038,
- ARC_SIMD_BUILTIN_VMR6AW = 1039,
- ARC_SIMD_BUILTIN_VMR6W = 1040,
- ARC_SIMD_BUILTIN_VMR7AW = 1041,
- ARC_SIMD_BUILTIN_VMR7W = 1042,
- ARC_SIMD_BUILTIN_VMRB = 1043,
- ARC_SIMD_BUILTIN_VH264F = 1044,
- ARC_SIMD_BUILTIN_VH264FT = 1045,
- ARC_SIMD_BUILTIN_VH264FW = 1046,
- ARC_SIMD_BUILTIN_VVC1F = 1047,
- ARC_SIMD_BUILTIN_VVC1FT = 1048,
-
- /* Va, Vb, rlimm instructions. */
- ARC_SIMD_BUILTIN_VBADDW = 1050,
- ARC_SIMD_BUILTIN_VBMAXW = 1051,
- ARC_SIMD_BUILTIN_VBMINW = 1052,
- ARC_SIMD_BUILTIN_VBMULAW = 1053,
- ARC_SIMD_BUILTIN_VBMULFW = 1054,
- ARC_SIMD_BUILTIN_VBMULW = 1055,
- ARC_SIMD_BUILTIN_VBRSUBW = 1056,
- ARC_SIMD_BUILTIN_VBSUBW = 1057,
-
- /* Va, Vb, Ic instructions. */
- ARC_SIMD_BUILTIN_VASRW = 1060,
- ARC_SIMD_BUILTIN_VSR8 = 1061,
- ARC_SIMD_BUILTIN_VSR8AW = 1062,
-
- /* Va, Vb, u6 instructions. */
- ARC_SIMD_BUILTIN_VASRRWi = 1065,
- ARC_SIMD_BUILTIN_VASRSRWi = 1066,
- ARC_SIMD_BUILTIN_VASRWi = 1067,
- ARC_SIMD_BUILTIN_VASRPWBi = 1068,
- ARC_SIMD_BUILTIN_VASRRPWBi = 1069,
- ARC_SIMD_BUILTIN_VSR8AWi = 1070,
- ARC_SIMD_BUILTIN_VSR8i = 1071,
-
- /* Va, Vb, u8 (simm) instructions. */
- ARC_SIMD_BUILTIN_VMVAW = 1075,
- ARC_SIMD_BUILTIN_VMVW = 1076,
- ARC_SIMD_BUILTIN_VMVZW = 1077,
- ARC_SIMD_BUILTIN_VD6TAPF = 1078,
-
- /* Va, rlimm, u8 (simm) instructions. */
- ARC_SIMD_BUILTIN_VMOVAW = 1080,
- ARC_SIMD_BUILTIN_VMOVW = 1081,
- ARC_SIMD_BUILTIN_VMOVZW = 1082,
-
- /* Va, Vb instructions. */
- ARC_SIMD_BUILTIN_VABSAW = 1085,
- ARC_SIMD_BUILTIN_VABSW = 1086,
- ARC_SIMD_BUILTIN_VADDSUW = 1087,
- ARC_SIMD_BUILTIN_VSIGNW = 1088,
- ARC_SIMD_BUILTIN_VEXCH1 = 1089,
- ARC_SIMD_BUILTIN_VEXCH2 = 1090,
- ARC_SIMD_BUILTIN_VEXCH4 = 1091,
- ARC_SIMD_BUILTIN_VUPBAW = 1092,
- ARC_SIMD_BUILTIN_VUPBW = 1093,
- ARC_SIMD_BUILTIN_VUPSBAW = 1094,
- ARC_SIMD_BUILTIN_VUPSBW = 1095,
-
- ARC_SIMD_BUILTIN_VDIRUN = 1100,
- ARC_SIMD_BUILTIN_VDORUN = 1101,
- ARC_SIMD_BUILTIN_VDIWR = 1102,
- ARC_SIMD_BUILTIN_VDOWR = 1103,
-
- ARC_SIMD_BUILTIN_VREC = 1105,
- ARC_SIMD_BUILTIN_VRUN = 1106,
- ARC_SIMD_BUILTIN_VRECRUN = 1107,
- ARC_SIMD_BUILTIN_VENDREC = 1108,
-
- ARC_SIMD_BUILTIN_VLD32WH = 1110,
- ARC_SIMD_BUILTIN_VLD32WL = 1111,
- ARC_SIMD_BUILTIN_VLD64 = 1112,
- ARC_SIMD_BUILTIN_VLD32 = 1113,
- ARC_SIMD_BUILTIN_VLD64W = 1114,
- ARC_SIMD_BUILTIN_VLD128 = 1115,
- ARC_SIMD_BUILTIN_VST128 = 1116,
- ARC_SIMD_BUILTIN_VST64 = 1117,
-
- ARC_SIMD_BUILTIN_VST16_N = 1120,
- ARC_SIMD_BUILTIN_VST32_N = 1121,
-
- ARC_SIMD_BUILTIN_VINTI = 1201,
-
- ARC_SIMD_BUILTIN_END
-};
-
/* A nop is needed between a 4 byte insn that sets the condition codes and
a branch that uses them (the same isn't true for an 8 byte insn that sets
the condition codes). Set by arc_ccfsm_advance. Used by
static void arc_init_reg_tables (void);
static bool arc_return_in_memory (const_tree, const_tree);
-static void arc_init_simd_builtins (void);
static bool arc_vector_mode_supported_p (machine_mode);
static bool arc_can_use_doloop_p (const widest_int &, const widest_int &,
#undef TARGET_EXPAND_BUILTIN
#define TARGET_EXPAND_BUILTIN arc_expand_builtin
+#undef TARGET_BUILTIN_DECL
+#define TARGET_BUILTIN_DECL arc_builtin_decl
+
#undef TARGET_ASM_OUTPUT_MI_THUNK
#define TARGET_ASM_OUTPUT_MI_THUNK arc_output_mi_thunk
return !arc_legitimate_constant_p (mode, x);
}
+/* IDs for all the ARC builtins. */
+
+enum arc_builtin_id
+ {
+#define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, MASK) \
+ ARC_BUILTIN_ ## NAME,
+#include "builtins.def"
+#undef DEF_BUILTIN
+
+ ARC_BUILTIN_COUNT
+ };
+
+struct GTY(()) arc_builtin_description
+{
+ enum insn_code icode;
+ int n_args;
+ tree fndecl;
+};
+
+static GTY(()) struct arc_builtin_description
+arc_bdesc[ARC_BUILTIN_COUNT] =
+{
+#define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, MASK) \
+ { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
+#include "builtins.def"
+#undef DEF_BUILTIN
+};
+
+/* Transform UP into lowercase and write the result to LO.
+ You must provide enough space for LO. Return LO. */
+
+static char*
+arc_tolower (char *lo, const char *up)
+{
+ char *lo0 = lo;
+
+ for (; *up; up++, lo++)
+ *lo = TOLOWER (*up);
+
+ *lo = '\0';
+
+ return lo0;
+}
+
+/* Implement `TARGET_BUILTIN_DECL'. */
-/* Generic function to define a builtin. */
-#define def_mbuiltin(MASK, NAME, TYPE, CODE) \
- do \
- { \
- if (MASK) \
- add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL, NULL_TREE); \
- } \
- while (0)
+static tree
+arc_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
+{
+ if (id < ARC_BUILTIN_COUNT)
+ return arc_bdesc[id].fndecl;
+ return error_mark_node;
+}
static void
arc_init_builtins (void)
{
- tree endlink = void_list_node;
-
- tree void_ftype_void
- = build_function_type (void_type_node,
- endlink);
-
- tree int_ftype_int
- = build_function_type (integer_type_node,
- tree_cons (NULL_TREE, integer_type_node, endlink));
-
- tree pcvoid_type_node
- = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
- tree int_ftype_pcvoid_int
- = build_function_type (integer_type_node,
- tree_cons (NULL_TREE, pcvoid_type_node,
- tree_cons (NULL_TREE, integer_type_node,
- endlink)));
-
- tree int_ftype_short_int
- = build_function_type (integer_type_node,
- tree_cons (NULL_TREE, short_integer_type_node, endlink));
-
- tree void_ftype_int_int
- = build_function_type (void_type_node,
- tree_cons (NULL_TREE, integer_type_node,
- tree_cons (NULL_TREE, integer_type_node, endlink)));
- tree void_ftype_usint_usint
- = build_function_type (void_type_node,
- tree_cons (NULL_TREE, long_unsigned_type_node,
- tree_cons (NULL_TREE, long_unsigned_type_node, endlink)));
-
- tree int_ftype_int_int
- = build_function_type (integer_type_node,
- tree_cons (NULL_TREE, integer_type_node,
- tree_cons (NULL_TREE, integer_type_node, endlink)));
-
- tree usint_ftype_usint
- = build_function_type (long_unsigned_type_node,
- tree_cons (NULL_TREE, long_unsigned_type_node, endlink));
-
- tree void_ftype_usint
- = build_function_type (void_type_node,
- tree_cons (NULL_TREE, long_unsigned_type_node, endlink));
-
- /* Add the builtins. */
- def_mbuiltin (1,"__builtin_arc_nop", void_ftype_void, ARC_BUILTIN_NOP);
- def_mbuiltin (TARGET_NORM, "__builtin_arc_norm", int_ftype_int, ARC_BUILTIN_NORM);
- def_mbuiltin (TARGET_NORM, "__builtin_arc_normw", int_ftype_short_int, ARC_BUILTIN_NORMW);
- def_mbuiltin (TARGET_SWAP, "__builtin_arc_swap", int_ftype_int, ARC_BUILTIN_SWAP);
- def_mbuiltin (TARGET_MUL64_SET,"__builtin_arc_mul64", void_ftype_int_int, ARC_BUILTIN_MUL64);
- def_mbuiltin (TARGET_MUL64_SET,"__builtin_arc_mulu64", void_ftype_usint_usint, ARC_BUILTIN_MULU64);
- def_mbuiltin (1,"__builtin_arc_rtie", void_ftype_void, ARC_BUILTIN_RTIE);
- def_mbuiltin (TARGET_ARC700,"__builtin_arc_sync", void_ftype_void, ARC_BUILTIN_SYNC);
- def_mbuiltin ((TARGET_EA_SET),"__builtin_arc_divaw", int_ftype_int_int, ARC_BUILTIN_DIVAW);
- def_mbuiltin (1,"__builtin_arc_brk", void_ftype_void, ARC_BUILTIN_BRK);
- def_mbuiltin (1,"__builtin_arc_flag", void_ftype_usint, ARC_BUILTIN_FLAG);
- def_mbuiltin (1,"__builtin_arc_sleep", void_ftype_usint, ARC_BUILTIN_SLEEP);
- def_mbuiltin (1,"__builtin_arc_swi", void_ftype_void, ARC_BUILTIN_SWI);
- def_mbuiltin (1,"__builtin_arc_core_read", usint_ftype_usint, ARC_BUILTIN_CORE_READ);
- def_mbuiltin (1,"__builtin_arc_core_write", void_ftype_usint_usint, ARC_BUILTIN_CORE_WRITE);
- def_mbuiltin (1,"__builtin_arc_lr", usint_ftype_usint, ARC_BUILTIN_LR);
- def_mbuiltin (1,"__builtin_arc_sr", void_ftype_usint_usint, ARC_BUILTIN_SR);
- def_mbuiltin (TARGET_ARC700,"__builtin_arc_trap_s", void_ftype_usint, ARC_BUILTIN_TRAP_S);
- def_mbuiltin (TARGET_ARC700,"__builtin_arc_unimp_s", void_ftype_void, ARC_BUILTIN_UNIMP_S);
- def_mbuiltin (1,"__builtin_arc_aligned", int_ftype_pcvoid_int, ARC_BUILTIN_ALIGNED);
-
- if (TARGET_SIMD_SET)
- arc_init_simd_builtins ();
-}
-
-static rtx arc_expand_simd_builtin (tree, rtx, rtx, machine_mode, int);
+ tree pcvoid_type_node
+ = build_pointer_type (build_qualified_type (void_type_node,
+ TYPE_QUAL_CONST));
+ tree V8HI_type_node = build_vector_type_for_mode (intHI_type_node,
+ V8HImode);
+
+ tree void_ftype_void
+ = build_function_type_list (void_type_node, NULL_TREE);
+ tree int_ftype_int
+ = build_function_type_list (integer_type_node, integer_type_node,
+ NULL_TREE);
+ tree int_ftype_pcvoid_int
+ = build_function_type_list (integer_type_node, pcvoid_type_node,
+ integer_type_node, NULL_TREE);
+ tree void_ftype_usint_usint
+ = build_function_type_list (void_type_node, long_unsigned_type_node,
+ long_unsigned_type_node, NULL_TREE);
+ tree int_ftype_int_int
+ = build_function_type_list (integer_type_node, integer_type_node,
+ integer_type_node, NULL_TREE);
+ tree usint_ftype_usint
+ = build_function_type_list (long_unsigned_type_node,
+ long_unsigned_type_node, NULL_TREE);
+ tree void_ftype_usint
+ = build_function_type_list (void_type_node, long_unsigned_type_node,
+ NULL_TREE);
+ tree int_ftype_void
+ = build_function_type_list (integer_type_node, void_type_node,
+ NULL_TREE);
+ tree void_ftype_int
+ = build_function_type_list (void_type_node, integer_type_node,
+ NULL_TREE);
+ tree int_ftype_short
+ = build_function_type_list (integer_type_node, short_integer_type_node,
+ NULL_TREE);
+
+ /* Old ARC SIMD types. */
+ tree v8hi_ftype_v8hi_v8hi
+ = build_function_type_list (V8HI_type_node, V8HI_type_node,
+ V8HI_type_node, NULL_TREE);
+ tree v8hi_ftype_v8hi_int
+ = build_function_type_list (V8HI_type_node, V8HI_type_node,
+ integer_type_node, NULL_TREE);
+ tree v8hi_ftype_v8hi_int_int
+ = build_function_type_list (V8HI_type_node, V8HI_type_node,
+ integer_type_node, integer_type_node,
+ NULL_TREE);
+ tree void_ftype_v8hi_int_int
+ = build_function_type_list (void_type_node, V8HI_type_node,
+ integer_type_node, integer_type_node,
+ NULL_TREE);
+ tree void_ftype_v8hi_int_int_int
+ = build_function_type_list (void_type_node, V8HI_type_node,
+ integer_type_node, integer_type_node,
+ integer_type_node, NULL_TREE);
+ tree v8hi_ftype_int_int
+ = build_function_type_list (V8HI_type_node, integer_type_node,
+ integer_type_node, NULL_TREE);
+ tree void_ftype_int_int
+ = build_function_type_list (void_type_node, integer_type_node,
+ integer_type_node, NULL_TREE);
+ tree v8hi_ftype_v8hi
+ = build_function_type_list (V8HI_type_node, V8HI_type_node,
+ NULL_TREE);
+
+ /* Add the builtins. */
+#define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, MASK) \
+ { \
+ int id = ARC_BUILTIN_ ## NAME; \
+ const char *Name = "__builtin_arc_" #NAME; \
+ char *name = (char*) alloca (1 + strlen (Name)); \
+ \
+ gcc_assert (id < ARC_BUILTIN_COUNT); \
+ if (MASK) \
+ arc_bdesc[id].fndecl \
+ = add_builtin_function (arc_tolower(name, Name), TYPE, id, \
+ BUILT_IN_MD, NULL, NULL_TREE); \
+ }
+#include "builtins.def"
+#undef DEF_BUILTIN
+}
+
+/* Helper to expand __builtin_arc_aligned (void* val, int
+ alignval). */
+
+static rtx
+arc_expand_builtin_aligned (tree exp)
+{
+ tree arg0 = CALL_EXPR_ARG (exp, 0);
+ tree arg1 = CALL_EXPR_ARG (exp, 1);
+ fold (arg1);
+ rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+ rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+
+ if (!CONST_INT_P (op1))
+ {
+ /* If we can't fold the alignment to a constant integer
+ whilst optimizing, this is probably a user error. */
+ if (optimize)
+ warning (0, "__builtin_arc_aligned with non-constant alignment");
+ }
+ else
+ {
+ HOST_WIDE_INT alignTest = INTVAL (op1);
+ /* Check alignTest is positive, and a power of two. */
+ if (alignTest <= 0 || alignTest != (alignTest & -alignTest))
+ {
+ error ("invalid alignment value for __builtin_arc_aligned");
+ return NULL_RTX;
+ }
+
+ if (CONST_INT_P (op0))
+ {
+ HOST_WIDE_INT pnt = INTVAL (op0);
+
+ if ((pnt & (alignTest - 1)) == 0)
+ return const1_rtx;
+ }
+ else
+ {
+ unsigned align = get_pointer_alignment (arg0);
+ unsigned numBits = alignTest * BITS_PER_UNIT;
+
+ if (align && align >= numBits)
+ return const1_rtx;
+ /* Another attempt to ascertain alignment. Check the type
+ we are pointing to. */
+ if (POINTER_TYPE_P (TREE_TYPE (arg0))
+ && TYPE_ALIGN (TREE_TYPE (TREE_TYPE (arg0))) >= numBits)
+ return const1_rtx;
+ }
+ }
+
+ /* Default to false. */
+ return const0_rtx;
+}
+
+/* Helper arc_expand_builtin, generates a pattern for the given icode
+ and arguments. */
+
+static rtx_insn *
+apply_GEN_FCN (enum insn_code icode, rtx *arg)
+{
+ switch (insn_data[icode].n_generator_args)
+ {
+ case 0:
+ return GEN_FCN (icode) ();
+ case 1:
+ return GEN_FCN (icode) (arg[0]);
+ case 2:
+ return GEN_FCN (icode) (arg[0], arg[1]);
+ case 3:
+ return GEN_FCN (icode) (arg[0], arg[1], arg[2]);
+ case 4:
+ return GEN_FCN (icode) (arg[0], arg[1], arg[2], arg[3]);
+ case 5:
+ return GEN_FCN (icode) (arg[0], arg[1], arg[2], arg[3], arg[4]);
+ default:
+ gcc_unreachable ();
+ }
+}
/* Expand an expression EXP that calls a built-in function,
with result going to TARGET if that's convenient
static rtx
arc_expand_builtin (tree exp,
rtx target,
- rtx subtarget,
- machine_mode mode,
- int ignore)
-{
- tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
- tree arg0;
- tree arg1;
- rtx op0;
- rtx op1;
- int fcode = DECL_FUNCTION_CODE (fndecl);
- int icode;
+ rtx subtarget ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
+ int ignore ATTRIBUTE_UNUSED)
+{
+ tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
+ unsigned int id = DECL_FUNCTION_CODE (fndecl);
+ const struct arc_builtin_description *d = &arc_bdesc[id];
+ int i, j, n_args = call_expr_nargs (exp);
+ rtx pat = NULL_RTX;
+ rtx xop[5];
+ enum insn_code icode = d->icode;
+ machine_mode tmode = insn_data[icode].operand[0].mode;
+ int nonvoid;
+ tree arg0;
+ tree arg1;
+ tree arg2;
+ tree arg3;
+ rtx op0;
+ rtx op1;
+ rtx op2;
+ rtx op3;
+ rtx op4;
machine_mode mode0;
machine_mode mode1;
+ machine_mode mode2;
+ machine_mode mode3;
+ machine_mode mode4;
- if (fcode > ARC_SIMD_BUILTIN_BEGIN && fcode < ARC_SIMD_BUILTIN_END)
- return arc_expand_simd_builtin (exp, target, subtarget, mode, ignore);
+ if (id >= ARC_BUILTIN_COUNT)
+ internal_error ("bad builtin fcode");
- switch (fcode)
+ /* 1st part: Expand special builtins. */
+ switch (id)
{
case ARC_BUILTIN_NOP:
- emit_insn (gen_nop ());
+ emit_insn (gen_nopv ());
return NULL_RTX;
- case ARC_BUILTIN_NORM:
- icode = CODE_FOR_clrsbsi2;
- arg0 = CALL_EXPR_ARG (exp, 0);
- op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- mode0 = insn_data[icode].operand[1].mode;
- target = gen_reg_rtx (SImode);
+ case ARC_BUILTIN_RTIE:
+ case ARC_BUILTIN_SYNC:
+ case ARC_BUILTIN_BRK:
+ case ARC_BUILTIN_SWI:
+ case ARC_BUILTIN_UNIMP_S:
+ gcc_assert (icode != 0);
+ emit_insn (GEN_FCN (icode) (const1_rtx));
+ return NULL_RTX;
- if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
- op0 = copy_to_mode_reg (mode0, op0);
+ case ARC_BUILTIN_ALIGNED:
+ return arc_expand_builtin_aligned (exp);
- emit_insn (gen_clrsbsi2 (target, op0));
+ case ARC_BUILTIN_CLRI:
+ target = gen_reg_rtx (SImode);
+ emit_insn (gen_clri (target, const1_rtx));
return target;
- case ARC_BUILTIN_NORMW:
-
- /* FIXME : This should all be HImode, not SImode. */
- icode = CODE_FOR_normw;
+ case ARC_BUILTIN_TRAP_S:
+ case ARC_BUILTIN_SLEEP:
arg0 = CALL_EXPR_ARG (exp, 0);
+ fold (arg0);
op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- mode0 = insn_data[icode].operand[1].mode;
- target = gen_reg_rtx (SImode);
-
- if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
- op0 = copy_to_mode_reg (mode0, convert_to_mode (mode0, op0,0));
- emit_insn (gen_normw (target, op0));
- return target;
+ if (!CONST_INT_P (op0) || !satisfies_constraint_L (op0))
+ {
+ error ("builtin operand should be an unsigned 6-bit value");
+ return NULL_RTX;
+ }
+ gcc_assert (icode != 0);
+ emit_insn (GEN_FCN (icode) (op0));
+ return NULL_RTX;
- case ARC_BUILTIN_MUL64:
- icode = CODE_FOR_mul64;
+ case ARC_BUILTIN_VDORUN:
+ case ARC_BUILTIN_VDIRUN:
arg0 = CALL_EXPR_ARG (exp, 0);
arg1 = CALL_EXPR_ARG (exp, 1);
- op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+ op0 = expand_expr (arg0, NULL_RTX, SImode, EXPAND_NORMAL);
+ op1 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
- mode0 = insn_data[icode].operand[0].mode;
- mode1 = insn_data[icode].operand[1].mode;
+ target = gen_rtx_REG (SImode, (id == ARC_BUILTIN_VDIRUN) ? 131 : 139);
+
+ mode0 = insn_data[icode].operand[1].mode;
+ mode1 = insn_data[icode].operand[2].mode;
- if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
+ if (!insn_data[icode].operand[1].predicate (op0, mode0))
op0 = copy_to_mode_reg (mode0, op0);
- if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
+ if (!insn_data[icode].operand[2].predicate (op1, mode1))
op1 = copy_to_mode_reg (mode1, op1);
- emit_insn (gen_mul64 (op0,op1));
+ pat = GEN_FCN (icode) (target, op0, op1);
+ if (!pat)
+ return NULL_RTX;
+
+ emit_insn (pat);
return NULL_RTX;
- case ARC_BUILTIN_MULU64:
- icode = CODE_FOR_mulu64;
+ case ARC_BUILTIN_VDIWR:
+ case ARC_BUILTIN_VDOWR:
arg0 = CALL_EXPR_ARG (exp, 0);
arg1 = CALL_EXPR_ARG (exp, 1);
- op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+ op0 = expand_expr (arg0, NULL_RTX, SImode, EXPAND_NORMAL);
+ op1 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
+
+ if (!CONST_INT_P (op0)
+ || !(UNSIGNED_INT3 (INTVAL (op0))))
+ error ("operand 1 should be an unsigned 3-bit immediate");
- mode0 = insn_data[icode].operand[0].mode;
mode1 = insn_data[icode].operand[1].mode;
- if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
- op0 = copy_to_mode_reg (mode0, op0);
+ if (icode == CODE_FOR_vdiwr_insn)
+ target = gen_rtx_REG (SImode,
+ ARC_FIRST_SIMD_DMA_CONFIG_IN_REG + INTVAL (op0));
+ else if (icode == CODE_FOR_vdowr_insn)
+ target = gen_rtx_REG (SImode,
+ ARC_FIRST_SIMD_DMA_CONFIG_OUT_REG + INTVAL (op0));
+ else
+ gcc_unreachable ();
- if (! (*insn_data[icode].operand[0].predicate) (op1, mode1))
+ if (!insn_data[icode].operand[2].predicate (op1, mode1))
op1 = copy_to_mode_reg (mode1, op1);
- emit_insn (gen_mulu64 (op0,op1));
- return NULL_RTX;
-
- case ARC_BUILTIN_RTIE:
- icode = CODE_FOR_rtie;
- emit_insn (gen_rtie (const1_rtx));
- return NULL_RTX;
+ pat = GEN_FCN (icode) (target, op1);
+ if (!pat)
+ return NULL_RTX;
- case ARC_BUILTIN_SYNC:
- icode = CODE_FOR_sync;
- emit_insn (gen_sync (const1_rtx));
+ emit_insn (pat);
return NULL_RTX;
- case ARC_BUILTIN_SWAP:
- icode = CODE_FOR_swap;
+ case ARC_BUILTIN_VASRW:
+ case ARC_BUILTIN_VSR8:
+ case ARC_BUILTIN_VSR8AW:
arg0 = CALL_EXPR_ARG (exp, 0);
- op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+ arg1 = CALL_EXPR_ARG (exp, 1);
+ op0 = expand_expr (arg0, NULL_RTX, V8HImode, EXPAND_NORMAL);
+ op1 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
+ op2 = gen_rtx_REG (V8HImode, ARC_FIRST_SIMD_VR_REG);
+
+ target = gen_reg_rtx (V8HImode);
mode0 = insn_data[icode].operand[1].mode;
- target = gen_reg_rtx (SImode);
+ mode1 = insn_data[icode].operand[2].mode;
- if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
+ if (!insn_data[icode].operand[1].predicate (op0, mode0))
op0 = copy_to_mode_reg (mode0, op0);
- emit_insn (gen_swap (target, op0));
- return target;
+ if ((!insn_data[icode].operand[2].predicate (op1, mode1))
+ || !(UNSIGNED_INT3 (INTVAL (op1))))
+ error ("operand 2 should be an unsigned 3-bit value (I0-I7)");
- case ARC_BUILTIN_DIVAW:
- icode = CODE_FOR_divaw;
- arg0 = CALL_EXPR_ARG (exp, 0);
- arg1 = CALL_EXPR_ARG (exp, 1);
+ pat = GEN_FCN (icode) (target, op0, op1, op2);
+ if (!pat)
+ return NULL_RTX;
- op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- target = gen_reg_rtx (SImode);
+ emit_insn (pat);
+ return target;
- mode0 = insn_data[icode].operand[0].mode;
- mode1 = insn_data[icode].operand[1].mode;
+ case ARC_BUILTIN_VLD32WH:
+ case ARC_BUILTIN_VLD32WL:
+ case ARC_BUILTIN_VLD64:
+ case ARC_BUILTIN_VLD32:
+ rtx src_vreg;
+ icode = d->icode;
+ arg0 = CALL_EXPR_ARG (exp, 0); /* source vreg. */
+ arg1 = CALL_EXPR_ARG (exp, 1); /* [I]0-7. */
+ arg2 = CALL_EXPR_ARG (exp, 2); /* u8. */
- if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
- op0 = copy_to_mode_reg (mode0, op0);
+ src_vreg = expand_expr (arg0, NULL_RTX, V8HImode, EXPAND_NORMAL);
+ op0 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
+ op1 = expand_expr (arg2, NULL_RTX, SImode, EXPAND_NORMAL);
+ op2 = gen_rtx_REG (V8HImode, ARC_FIRST_SIMD_VR_REG);
- if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
- op1 = copy_to_mode_reg (mode1, op1);
+ /* target <- src vreg. */
+ emit_insn (gen_move_insn (target, src_vreg));
- emit_insn (gen_divaw (target, op0, op1));
- return target;
+ /* target <- vec_concat: target, mem (Ib, u8). */
+ mode0 = insn_data[icode].operand[3].mode;
+ mode1 = insn_data[icode].operand[1].mode;
- case ARC_BUILTIN_BRK:
- icode = CODE_FOR_brk;
- emit_insn (gen_brk (const1_rtx));
- return NULL_RTX;
+ if ((!insn_data[icode].operand[3].predicate (op0, mode0))
+ || !(UNSIGNED_INT3 (INTVAL (op0))))
+ error ("operand 1 should be an unsigned 3-bit value (I0-I7)");
- case ARC_BUILTIN_SLEEP:
- icode = CODE_FOR_sleep;
- arg0 = CALL_EXPR_ARG (exp, 0);
+ if ((!insn_data[icode].operand[1].predicate (op1, mode1))
+ || !(UNSIGNED_INT8 (INTVAL (op1))))
+ error ("operand 2 should be an unsigned 8-bit value");
- fold (arg0);
+ pat = GEN_FCN (icode) (target, op1, op2, op0);
+ if (!pat)
+ return NULL_RTX;
- op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- mode0 = insn_data[icode].operand[1].mode;
+ emit_insn (pat);
+ return target;
- emit_insn (gen_sleep (op0));
- return NULL_RTX;
+ case ARC_BUILTIN_VLD64W:
+ case ARC_BUILTIN_VLD128:
+ arg0 = CALL_EXPR_ARG (exp, 0); /* dest vreg. */
+ arg1 = CALL_EXPR_ARG (exp, 1); /* [I]0-7. */
- case ARC_BUILTIN_SWI:
- icode = CODE_FOR_swi;
- emit_insn (gen_swi (const1_rtx));
- return NULL_RTX;
+ op0 = gen_rtx_REG (V8HImode, ARC_FIRST_SIMD_VR_REG);
+ op1 = expand_expr (arg0, NULL_RTX, SImode, EXPAND_NORMAL);
+ op2 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
- case ARC_BUILTIN_FLAG:
- icode = CODE_FOR_flag;
- arg0 = CALL_EXPR_ARG (exp, 0);
- op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- mode0 = insn_data[icode].operand[0].mode;
+ /* target <- src vreg. */
+ target = gen_reg_rtx (V8HImode);
- if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
- op0 = copy_to_mode_reg (mode0, op0);
+ /* target <- vec_concat: target, mem (Ib, u8). */
+ mode0 = insn_data[icode].operand[1].mode;
+ mode1 = insn_data[icode].operand[2].mode;
+ mode2 = insn_data[icode].operand[3].mode;
- emit_insn (gen_flag (op0));
- return NULL_RTX;
+ if ((!insn_data[icode].operand[2].predicate (op1, mode1))
+ || !(UNSIGNED_INT3 (INTVAL (op1))))
+ error ("operand 1 should be an unsigned 3-bit value (I0-I7)");
- case ARC_BUILTIN_CORE_READ:
- icode = CODE_FOR_core_read;
- arg0 = CALL_EXPR_ARG (exp, 0);
- target = gen_reg_rtx (SImode);
+ if ((!insn_data[icode].operand[3].predicate (op2, mode2))
+ || !(UNSIGNED_INT8 (INTVAL (op2))))
+ error ("operand 2 should be an unsigned 8-bit value");
- fold (arg0);
+ pat = GEN_FCN (icode) (target, op0, op1, op2);
- op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- mode0 = insn_data[icode].operand[1].mode;
+ if (!pat)
+ return NULL_RTX;
- emit_insn (gen_core_read (target, op0));
+ emit_insn (pat);
return target;
- case ARC_BUILTIN_CORE_WRITE:
- icode = CODE_FOR_core_write;
- arg0 = CALL_EXPR_ARG (exp, 0);
- arg1 = CALL_EXPR_ARG (exp, 1);
-
- fold (arg1);
+ case ARC_BUILTIN_VST128:
+ case ARC_BUILTIN_VST64:
+ arg0 = CALL_EXPR_ARG (exp, 0); /* src vreg. */
+ arg1 = CALL_EXPR_ARG (exp, 1); /* [I]0-7. */
+ arg2 = CALL_EXPR_ARG (exp, 2); /* u8. */
- op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+ op0 = gen_rtx_REG (V8HImode, ARC_FIRST_SIMD_VR_REG);
+ op1 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
+ op2 = expand_expr (arg2, NULL_RTX, SImode, EXPAND_NORMAL);
+ op3 = expand_expr (arg0, NULL_RTX, V8HImode, EXPAND_NORMAL);
mode0 = insn_data[icode].operand[0].mode;
mode1 = insn_data[icode].operand[1].mode;
+ mode2 = insn_data[icode].operand[2].mode;
+ mode3 = insn_data[icode].operand[3].mode;
- emit_insn (gen_core_write (op0, op1));
- return NULL_RTX;
-
- case ARC_BUILTIN_LR:
- icode = CODE_FOR_lr;
- arg0 = CALL_EXPR_ARG (exp, 0);
- target = gen_reg_rtx (SImode);
+ if ((!insn_data[icode].operand[1].predicate (op1, mode1))
+ || !(UNSIGNED_INT3 (INTVAL (op1))))
+ error ("operand 2 should be an unsigned 3-bit value (I0-I7)");
- fold (arg0);
+ if ((!insn_data[icode].operand[2].predicate (op2, mode2))
+ || !(UNSIGNED_INT8 (INTVAL (op2))))
+ error ("operand 3 should be an unsigned 8-bit value");
- op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- mode0 = insn_data[icode].operand[1].mode;
+ if (!insn_data[icode].operand[3].predicate (op3, mode3))
+ op3 = copy_to_mode_reg (mode3, op3);
- emit_insn (gen_lr (target, op0));
- return target;
+ pat = GEN_FCN (icode) (op0, op1, op2, op3);
+ if (!pat)
+ return NULL_RTX;
- case ARC_BUILTIN_SR:
- icode = CODE_FOR_sr;
- arg0 = CALL_EXPR_ARG (exp, 0);
- arg1 = CALL_EXPR_ARG (exp, 1);
+ emit_insn (pat);
+ return NULL_RTX;
- fold (arg1);
+ case ARC_BUILTIN_VST16_N:
+ case ARC_BUILTIN_VST32_N:
+ arg0 = CALL_EXPR_ARG (exp, 0); /* source vreg. */
+ arg1 = CALL_EXPR_ARG (exp, 1); /* u3. */
+ arg2 = CALL_EXPR_ARG (exp, 2); /* [I]0-7. */
+ arg3 = CALL_EXPR_ARG (exp, 3); /* u8. */
- op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+ op0 = expand_expr (arg3, NULL_RTX, SImode, EXPAND_NORMAL);
+ op1 = gen_rtx_REG (V8HImode, ARC_FIRST_SIMD_VR_REG);
+ op2 = expand_expr (arg2, NULL_RTX, SImode, EXPAND_NORMAL);
+ op3 = expand_expr (arg0, NULL_RTX, V8HImode, EXPAND_NORMAL);
+ op4 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
mode0 = insn_data[icode].operand[0].mode;
- mode1 = insn_data[icode].operand[1].mode;
+ mode2 = insn_data[icode].operand[2].mode;
+ mode3 = insn_data[icode].operand[3].mode;
+ mode4 = insn_data[icode].operand[4].mode;
- emit_insn (gen_sr (op0, op1));
- return NULL_RTX;
+ /* Do some correctness checks for the operands. */
+ if ((!insn_data[icode].operand[0].predicate (op0, mode0))
+ || !(UNSIGNED_INT8 (INTVAL (op0))))
+ error ("operand 4 should be an unsigned 8-bit value (0-255)");
- case ARC_BUILTIN_TRAP_S:
- icode = CODE_FOR_trap_s;
- arg0 = CALL_EXPR_ARG (exp, 0);
+ if ((!insn_data[icode].operand[2].predicate (op2, mode2))
+ || !(UNSIGNED_INT3 (INTVAL (op2))))
+ error ("operand 3 should be an unsigned 3-bit value (I0-I7)");
- fold (arg0);
+ if (!insn_data[icode].operand[3].predicate (op3, mode3))
+ op3 = copy_to_mode_reg (mode3, op3);
- op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- mode0 = insn_data[icode].operand[1].mode;
+ if ((!insn_data[icode].operand[4].predicate (op4, mode4))
+ || !(UNSIGNED_INT3 (INTVAL (op4))))
+ error ("operand 2 should be an unsigned 3-bit value (subreg 0-7)");
+ else if (icode == CODE_FOR_vst32_n_insn
+ && ((INTVAL (op4) % 2) != 0))
+ error ("operand 2 should be an even 3-bit value (subreg 0,2,4,6)");
- /* We don't give an error for non-cost values here because
- we still want to allow things to be fixed up by later inlining /
- constant folding / dead code elimination. */
- if (CONST_INT_P (op0) && !satisfies_constraint_L (op0))
- {
- /* Keep this message in sync with the one in arc.md:trap_s,
- because *.md files don't get scanned by exgettext. */
- error ("operand to trap_s should be an unsigned 6-bit value");
- }
- emit_insn (gen_trap_s (op0));
- return NULL_RTX;
+ pat = GEN_FCN (icode) (op0, op1, op2, op3, op4);
+ if (!pat)
+ return NULL_RTX;
- case ARC_BUILTIN_UNIMP_S:
- icode = CODE_FOR_unimp_s;
- emit_insn (gen_unimp_s (const1_rtx));
+ emit_insn (pat);
return NULL_RTX;
- case ARC_BUILTIN_ALIGNED:
- /* __builtin_arc_aligned (void* val, int alignval) */
- arg0 = CALL_EXPR_ARG (exp, 0);
- arg1 = CALL_EXPR_ARG (exp, 1);
- fold (arg1);
- op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- target = gen_reg_rtx (SImode);
+ default:
+ break;
+ }
+
+ /* 2nd part: Expand regular builtins. */
+ if (icode == 0)
+ internal_error ("bad builtin fcode");
+
+ nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
+ j = 0;
- if (!CONST_INT_P (op1))
+ if (nonvoid)
+ {
+ if (target == NULL_RTX
+ || GET_MODE (target) != tmode
+ || !insn_data[icode].operand[0].predicate (target, tmode))
{
- /* If we can't fold the alignment to a constant integer
- whilst optimizing, this is probably a user error. */
- if (optimize)
- warning (0, "__builtin_arc_aligned with non-constant alignment");
+ target = gen_reg_rtx (tmode);
}
- else
+ xop[j++] = target;
+ }
+
+ gcc_assert (n_args <= 4);
+ for (i = 0; i < n_args; i++, j++)
+ {
+ tree arg = CALL_EXPR_ARG (exp, i);
+ machine_mode mode = insn_data[icode].operand[j].mode;
+ rtx op = expand_expr (arg, NULL_RTX, mode, EXPAND_NORMAL);
+ machine_mode opmode = GET_MODE (op);
+ char c = insn_data[icode].operand[j].constraint[0];
+
+ /* SIMD extension requires exact immediate operand match. */
+ if ((id > ARC_BUILTIN_SIMD_BEGIN)
+ && (id < ARC_BUILTIN_SIMD_END)
+ && (c != 'v')
+ && (c != 'r'))
{
- HOST_WIDE_INT alignTest = INTVAL (op1);
- /* Check alignTest is positive, and a power of two. */
- if (alignTest <= 0 || alignTest != (alignTest & -alignTest))
+ if (!CONST_INT_P (op))
+ error ("builtin requires an immediate for operand %d", j);
+ switch (c)
{
- error ("invalid alignment value for __builtin_arc_aligned");
- return NULL_RTX;
+ case 'L':
+ if (!satisfies_constraint_L (op))
+ error ("operand %d should be a 6 bit unsigned immediate", j);
+ break;
+ case 'P':
+ if (!satisfies_constraint_P (op))
+ error ("operand %d should be a 8 bit unsigned immediate", j);
+ break;
+ case 'K':
+ if (!satisfies_constraint_K (op))
+ error ("operand %d should be a 3 bit unsigned immediate", j);
+ break;
+ default:
+ error ("unknown builtin immediate operand type for operand %d",
+ j);
}
+ }
- if (CONST_INT_P (op0))
- {
- HOST_WIDE_INT pnt = INTVAL (op0);
+ if (CONST_INT_P (op))
+ opmode = mode;
- if ((pnt & (alignTest - 1)) == 0)
- return const1_rtx;
- }
- else
- {
- unsigned align = get_pointer_alignment (arg0);
- unsigned numBits = alignTest * BITS_PER_UNIT;
-
- if (align && align >= numBits)
- return const1_rtx;
- /* Another attempt to ascertain alignment. Check the type
- we are pointing to. */
- if (POINTER_TYPE_P (TREE_TYPE (arg0))
- && TYPE_ALIGN (TREE_TYPE (TREE_TYPE (arg0))) >= numBits)
- return const1_rtx;
- }
+ if ((opmode == SImode) && (mode == HImode))
+ {
+ opmode = HImode;
+ op = gen_lowpart (HImode, op);
}
- /* Default to false. */
- return const0_rtx;
+ /* In case the insn wants input operands in modes different from
+ the result, abort. */
+ gcc_assert (opmode == mode || opmode == VOIDmode);
- default:
- break;
+ if (!insn_data[icode].operand[i + nonvoid].predicate (op, mode))
+ op = copy_to_mode_reg (mode, op);
+
+ xop[j] = op;
}
- /* @@@ Should really do something sensible here. */
- return NULL_RTX;
+ pat = apply_GEN_FCN (icode, xop);
+ if (pat == NULL_RTX)
+ return NULL_RTX;
+
+ emit_insn (pat);
+
+ if (nonvoid)
+ return target;
+ else
+ return const0_rtx;
}
/* Returns true if the operands[opno] is a valid compile-time constant to be
ASM_OUTPUT_SKIP (stream, size);
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-/* SIMD builtins support. */
-enum simd_insn_args_type {
- Va_Vb_Vc,
- Va_Vb_rlimm,
- Va_Vb_Ic,
- Va_Vb_u6,
- Va_Vb_u8,
- Va_rlimm_u8,
-
- Va_Vb,
-
- void_rlimm,
- void_u6,
-
- Da_u3_rlimm,
- Da_rlimm_rlimm,
-
- Va_Ib_u8,
- void_Va_Ib_u8,
-
- Va_Vb_Ic_u8,
- void_Va_u3_Ib_u8
-};
-
-struct builtin_description
-{
- enum simd_insn_args_type args_type;
- const enum insn_code icode;
- const char * const name;
- const enum arc_builtins code;
-};
-
-static const struct builtin_description arc_simd_builtin_desc_list[] =
-{
- /* VVV builtins go first. */
-#define SIMD_BUILTIN(type, code, string, builtin) \
- { type,CODE_FOR_##code, "__builtin_arc_" string, \
- ARC_SIMD_BUILTIN_##builtin },
-
- SIMD_BUILTIN (Va_Vb_Vc, vaddaw_insn, "vaddaw", VADDAW)
- SIMD_BUILTIN (Va_Vb_Vc, vaddw_insn, "vaddw", VADDW)
- SIMD_BUILTIN (Va_Vb_Vc, vavb_insn, "vavb", VAVB)
- SIMD_BUILTIN (Va_Vb_Vc, vavrb_insn, "vavrb", VAVRB)
- SIMD_BUILTIN (Va_Vb_Vc, vdifaw_insn, "vdifaw", VDIFAW)
- SIMD_BUILTIN (Va_Vb_Vc, vdifw_insn, "vdifw", VDIFW)
- SIMD_BUILTIN (Va_Vb_Vc, vmaxaw_insn, "vmaxaw", VMAXAW)
- SIMD_BUILTIN (Va_Vb_Vc, vmaxw_insn, "vmaxw", VMAXW)
- SIMD_BUILTIN (Va_Vb_Vc, vminaw_insn, "vminaw", VMINAW)
- SIMD_BUILTIN (Va_Vb_Vc, vminw_insn, "vminw", VMINW)
- SIMD_BUILTIN (Va_Vb_Vc, vmulaw_insn, "vmulaw", VMULAW)
- SIMD_BUILTIN (Va_Vb_Vc, vmulfaw_insn, "vmulfaw", VMULFAW)
- SIMD_BUILTIN (Va_Vb_Vc, vmulfw_insn, "vmulfw", VMULFW)
- SIMD_BUILTIN (Va_Vb_Vc, vmulw_insn, "vmulw", VMULW)
- SIMD_BUILTIN (Va_Vb_Vc, vsubaw_insn, "vsubaw", VSUBAW)
- SIMD_BUILTIN (Va_Vb_Vc, vsubw_insn, "vsubw", VSUBW)
- SIMD_BUILTIN (Va_Vb_Vc, vsummw_insn, "vsummw", VSUMMW)
- SIMD_BUILTIN (Va_Vb_Vc, vand_insn, "vand", VAND)
- SIMD_BUILTIN (Va_Vb_Vc, vandaw_insn, "vandaw", VANDAW)
- SIMD_BUILTIN (Va_Vb_Vc, vbic_insn, "vbic", VBIC)
- SIMD_BUILTIN (Va_Vb_Vc, vbicaw_insn, "vbicaw", VBICAW)
- SIMD_BUILTIN (Va_Vb_Vc, vor_insn, "vor", VOR)
- SIMD_BUILTIN (Va_Vb_Vc, vxor_insn, "vxor", VXOR)
- SIMD_BUILTIN (Va_Vb_Vc, vxoraw_insn, "vxoraw", VXORAW)
- SIMD_BUILTIN (Va_Vb_Vc, veqw_insn, "veqw", VEQW)
- SIMD_BUILTIN (Va_Vb_Vc, vlew_insn, "vlew", VLEW)
- SIMD_BUILTIN (Va_Vb_Vc, vltw_insn, "vltw", VLTW)
- SIMD_BUILTIN (Va_Vb_Vc, vnew_insn, "vnew", VNEW)
- SIMD_BUILTIN (Va_Vb_Vc, vmr1aw_insn, "vmr1aw", VMR1AW)
- SIMD_BUILTIN (Va_Vb_Vc, vmr1w_insn, "vmr1w", VMR1W)
- SIMD_BUILTIN (Va_Vb_Vc, vmr2aw_insn, "vmr2aw", VMR2AW)
- SIMD_BUILTIN (Va_Vb_Vc, vmr2w_insn, "vmr2w", VMR2W)
- SIMD_BUILTIN (Va_Vb_Vc, vmr3aw_insn, "vmr3aw", VMR3AW)
- SIMD_BUILTIN (Va_Vb_Vc, vmr3w_insn, "vmr3w", VMR3W)
- SIMD_BUILTIN (Va_Vb_Vc, vmr4aw_insn, "vmr4aw", VMR4AW)
- SIMD_BUILTIN (Va_Vb_Vc, vmr4w_insn, "vmr4w", VMR4W)
- SIMD_BUILTIN (Va_Vb_Vc, vmr5aw_insn, "vmr5aw", VMR5AW)
- SIMD_BUILTIN (Va_Vb_Vc, vmr5w_insn, "vmr5w", VMR5W)
- SIMD_BUILTIN (Va_Vb_Vc, vmr6aw_insn, "vmr6aw", VMR6AW)
- SIMD_BUILTIN (Va_Vb_Vc, vmr6w_insn, "vmr6w", VMR6W)
- SIMD_BUILTIN (Va_Vb_Vc, vmr7aw_insn, "vmr7aw", VMR7AW)
- SIMD_BUILTIN (Va_Vb_Vc, vmr7w_insn, "vmr7w", VMR7W)
- SIMD_BUILTIN (Va_Vb_Vc, vmrb_insn, "vmrb", VMRB)
- SIMD_BUILTIN (Va_Vb_Vc, vh264f_insn, "vh264f", VH264F)
- SIMD_BUILTIN (Va_Vb_Vc, vh264ft_insn, "vh264ft", VH264FT)
- SIMD_BUILTIN (Va_Vb_Vc, vh264fw_insn, "vh264fw", VH264FW)
- SIMD_BUILTIN (Va_Vb_Vc, vvc1f_insn, "vvc1f", VVC1F)
- SIMD_BUILTIN (Va_Vb_Vc, vvc1ft_insn, "vvc1ft", VVC1FT)
-
- SIMD_BUILTIN (Va_Vb_rlimm, vbaddw_insn, "vbaddw", VBADDW)
- SIMD_BUILTIN (Va_Vb_rlimm, vbmaxw_insn, "vbmaxw", VBMAXW)
- SIMD_BUILTIN (Va_Vb_rlimm, vbminw_insn, "vbminw", VBMINW)
- SIMD_BUILTIN (Va_Vb_rlimm, vbmulaw_insn, "vbmulaw", VBMULAW)
- SIMD_BUILTIN (Va_Vb_rlimm, vbmulfw_insn, "vbmulfw", VBMULFW)
- SIMD_BUILTIN (Va_Vb_rlimm, vbmulw_insn, "vbmulw", VBMULW)
- SIMD_BUILTIN (Va_Vb_rlimm, vbrsubw_insn, "vbrsubw", VBRSUBW)
- SIMD_BUILTIN (Va_Vb_rlimm, vbsubw_insn, "vbsubw", VBSUBW)
-
- /* Va, Vb, Ic instructions. */
- SIMD_BUILTIN (Va_Vb_Ic, vasrw_insn, "vasrw", VASRW)
- SIMD_BUILTIN (Va_Vb_Ic, vsr8_insn, "vsr8", VSR8)
- SIMD_BUILTIN (Va_Vb_Ic, vsr8aw_insn, "vsr8aw", VSR8AW)
-
- /* Va, Vb, u6 instructions. */
- SIMD_BUILTIN (Va_Vb_u6, vasrrwi_insn, "vasrrwi", VASRRWi)
- SIMD_BUILTIN (Va_Vb_u6, vasrsrwi_insn, "vasrsrwi", VASRSRWi)
- SIMD_BUILTIN (Va_Vb_u6, vasrwi_insn, "vasrwi", VASRWi)
- SIMD_BUILTIN (Va_Vb_u6, vasrpwbi_insn, "vasrpwbi", VASRPWBi)
- SIMD_BUILTIN (Va_Vb_u6, vasrrpwbi_insn,"vasrrpwbi", VASRRPWBi)
- SIMD_BUILTIN (Va_Vb_u6, vsr8awi_insn, "vsr8awi", VSR8AWi)
- SIMD_BUILTIN (Va_Vb_u6, vsr8i_insn, "vsr8i", VSR8i)
-
- /* Va, Vb, u8 (simm) instructions. */
- SIMD_BUILTIN (Va_Vb_u8, vmvaw_insn, "vmvaw", VMVAW)
- SIMD_BUILTIN (Va_Vb_u8, vmvw_insn, "vmvw", VMVW)
- SIMD_BUILTIN (Va_Vb_u8, vmvzw_insn, "vmvzw", VMVZW)
- SIMD_BUILTIN (Va_Vb_u8, vd6tapf_insn, "vd6tapf", VD6TAPF)
-
- /* Va, rlimm, u8 (simm) instructions. */
- SIMD_BUILTIN (Va_rlimm_u8, vmovaw_insn, "vmovaw", VMOVAW)
- SIMD_BUILTIN (Va_rlimm_u8, vmovw_insn, "vmovw", VMOVW)
- SIMD_BUILTIN (Va_rlimm_u8, vmovzw_insn, "vmovzw", VMOVZW)
-
- /* Va, Vb instructions. */
- SIMD_BUILTIN (Va_Vb, vabsaw_insn, "vabsaw", VABSAW)
- SIMD_BUILTIN (Va_Vb, vabsw_insn, "vabsw", VABSW)
- SIMD_BUILTIN (Va_Vb, vaddsuw_insn, "vaddsuw", VADDSUW)
- SIMD_BUILTIN (Va_Vb, vsignw_insn, "vsignw", VSIGNW)
- SIMD_BUILTIN (Va_Vb, vexch1_insn, "vexch1", VEXCH1)
- SIMD_BUILTIN (Va_Vb, vexch2_insn, "vexch2", VEXCH2)
- SIMD_BUILTIN (Va_Vb, vexch4_insn, "vexch4", VEXCH4)
- SIMD_BUILTIN (Va_Vb, vupbaw_insn, "vupbaw", VUPBAW)
- SIMD_BUILTIN (Va_Vb, vupbw_insn, "vupbw", VUPBW)
- SIMD_BUILTIN (Va_Vb, vupsbaw_insn, "vupsbaw", VUPSBAW)
- SIMD_BUILTIN (Va_Vb, vupsbw_insn, "vupsbw", VUPSBW)
-
- /* DIb, rlimm, rlimm instructions. */
- SIMD_BUILTIN (Da_rlimm_rlimm, vdirun_insn, "vdirun", VDIRUN)
- SIMD_BUILTIN (Da_rlimm_rlimm, vdorun_insn, "vdorun", VDORUN)
-
- /* DIb, limm, rlimm instructions. */
- SIMD_BUILTIN (Da_u3_rlimm, vdiwr_insn, "vdiwr", VDIWR)
- SIMD_BUILTIN (Da_u3_rlimm, vdowr_insn, "vdowr", VDOWR)
-
- /* rlimm instructions. */
- SIMD_BUILTIN (void_rlimm, vrec_insn, "vrec", VREC)
- SIMD_BUILTIN (void_rlimm, vrun_insn, "vrun", VRUN)
- SIMD_BUILTIN (void_rlimm, vrecrun_insn, "vrecrun", VRECRUN)
- SIMD_BUILTIN (void_rlimm, vendrec_insn, "vendrec", VENDREC)
-
- /* Va, [Ib,u8] instructions. */
- SIMD_BUILTIN (Va_Vb_Ic_u8, vld32wh_insn, "vld32wh", VLD32WH)
- SIMD_BUILTIN (Va_Vb_Ic_u8, vld32wl_insn, "vld32wl", VLD32WL)
- SIMD_BUILTIN (Va_Vb_Ic_u8, vld64_insn, "vld64", VLD64)
- SIMD_BUILTIN (Va_Vb_Ic_u8, vld32_insn, "vld32", VLD32)
-
- SIMD_BUILTIN (Va_Ib_u8, vld64w_insn, "vld64w", VLD64W)
- SIMD_BUILTIN (Va_Ib_u8, vld128_insn, "vld128", VLD128)
- SIMD_BUILTIN (void_Va_Ib_u8, vst128_insn, "vst128", VST128)
- SIMD_BUILTIN (void_Va_Ib_u8, vst64_insn, "vst64", VST64)
-
- /* Va, [Ib, u8] instructions. */
- SIMD_BUILTIN (void_Va_u3_Ib_u8, vst16_n_insn, "vst16_n", VST16_N)
- SIMD_BUILTIN (void_Va_u3_Ib_u8, vst32_n_insn, "vst32_n", VST32_N)
-
- SIMD_BUILTIN (void_u6, vinti_insn, "vinti", VINTI)
-};
-
-static void
-arc_init_simd_builtins (void)
-{
- int i;
- tree endlink = void_list_node;
- tree V8HI_type_node = build_vector_type_for_mode (intHI_type_node, V8HImode);
-
- tree v8hi_ftype_v8hi_v8hi
- = build_function_type (V8HI_type_node,
- tree_cons (NULL_TREE, V8HI_type_node,
- tree_cons (NULL_TREE, V8HI_type_node,
- endlink)));
- tree v8hi_ftype_v8hi_int
- = build_function_type (V8HI_type_node,
- tree_cons (NULL_TREE, V8HI_type_node,
- tree_cons (NULL_TREE, integer_type_node,
- endlink)));
-
- tree v8hi_ftype_v8hi_int_int
- = build_function_type (V8HI_type_node,
- tree_cons (NULL_TREE, V8HI_type_node,
- tree_cons (NULL_TREE, integer_type_node,
- tree_cons (NULL_TREE,
- integer_type_node,
- endlink))));
-
- tree void_ftype_v8hi_int_int
- = build_function_type (void_type_node,
- tree_cons (NULL_TREE, V8HI_type_node,
- tree_cons (NULL_TREE, integer_type_node,
- tree_cons (NULL_TREE,
- integer_type_node,
- endlink))));
-
- tree void_ftype_v8hi_int_int_int
- = (build_function_type
- (void_type_node,
- tree_cons (NULL_TREE, V8HI_type_node,
- tree_cons (NULL_TREE, integer_type_node,
- tree_cons (NULL_TREE, integer_type_node,
- tree_cons (NULL_TREE,
- integer_type_node,
- endlink))))));
-
- tree v8hi_ftype_int_int
- = build_function_type (V8HI_type_node,
- tree_cons (NULL_TREE, integer_type_node,
- tree_cons (NULL_TREE, integer_type_node,
- endlink)));
-
- tree void_ftype_int_int
- = build_function_type (void_type_node,
- tree_cons (NULL_TREE, integer_type_node,
- tree_cons (NULL_TREE, integer_type_node,
- endlink)));
-
- tree void_ftype_int
- = build_function_type (void_type_node,
- tree_cons (NULL_TREE, integer_type_node, endlink));
-
- tree v8hi_ftype_v8hi
- = build_function_type (V8HI_type_node, tree_cons (NULL_TREE, V8HI_type_node,
- endlink));
-
- /* These asserts have been introduced to ensure that the order of builtins
- does not get messed up, else the initialization goes wrong. */
- gcc_assert (arc_simd_builtin_desc_list [0].args_type == Va_Vb_Vc);
- for (i=0; arc_simd_builtin_desc_list [i].args_type == Va_Vb_Vc; i++)
- def_mbuiltin (TARGET_SIMD_SET, arc_simd_builtin_desc_list[i].name,
- v8hi_ftype_v8hi_v8hi, arc_simd_builtin_desc_list[i].code);
-
- gcc_assert (arc_simd_builtin_desc_list [i].args_type == Va_Vb_rlimm);
- for (; arc_simd_builtin_desc_list [i].args_type == Va_Vb_rlimm; i++)
- def_mbuiltin (TARGET_SIMD_SET, arc_simd_builtin_desc_list[i].name,
- v8hi_ftype_v8hi_int, arc_simd_builtin_desc_list[i].code);
-
- gcc_assert (arc_simd_builtin_desc_list [i].args_type == Va_Vb_Ic);
- for (; arc_simd_builtin_desc_list [i].args_type == Va_Vb_Ic; i++)
- def_mbuiltin (TARGET_SIMD_SET, arc_simd_builtin_desc_list[i].name,
- v8hi_ftype_v8hi_int, arc_simd_builtin_desc_list[i].code);
-
- gcc_assert (arc_simd_builtin_desc_list [i].args_type == Va_Vb_u6);
- for (; arc_simd_builtin_desc_list [i].args_type == Va_Vb_u6; i++)
- def_mbuiltin (TARGET_SIMD_SET, arc_simd_builtin_desc_list[i].name,
- v8hi_ftype_v8hi_int, arc_simd_builtin_desc_list[i].code);
-
- gcc_assert (arc_simd_builtin_desc_list [i].args_type == Va_Vb_u8);
- for (; arc_simd_builtin_desc_list [i].args_type == Va_Vb_u8; i++)
- def_mbuiltin (TARGET_SIMD_SET, arc_simd_builtin_desc_list[i].name,
- v8hi_ftype_v8hi_int, arc_simd_builtin_desc_list[i].code);
-
- gcc_assert (arc_simd_builtin_desc_list [i].args_type == Va_rlimm_u8);
- for (; arc_simd_builtin_desc_list [i].args_type == Va_rlimm_u8; i++)
- def_mbuiltin (TARGET_SIMD_SET, arc_simd_builtin_desc_list[i].name,
- v8hi_ftype_int_int, arc_simd_builtin_desc_list[i].code);
-
- gcc_assert (arc_simd_builtin_desc_list [i].args_type == Va_Vb);
- for (; arc_simd_builtin_desc_list [i].args_type == Va_Vb; i++)
- def_mbuiltin (TARGET_SIMD_SET, arc_simd_builtin_desc_list[i].name,
- v8hi_ftype_v8hi, arc_simd_builtin_desc_list[i].code);
-
- gcc_assert (arc_simd_builtin_desc_list [i].args_type == Da_rlimm_rlimm);
- for (; arc_simd_builtin_desc_list [i].args_type == Da_rlimm_rlimm; i++)
- def_mbuiltin (TARGET_SIMD_SET, arc_simd_builtin_desc_list [i].name,
- void_ftype_int_int, arc_simd_builtin_desc_list[i].code);
-
- gcc_assert (arc_simd_builtin_desc_list [i].args_type == Da_u3_rlimm);
- for (; arc_simd_builtin_desc_list [i].args_type == Da_u3_rlimm; i++)
- def_mbuiltin (TARGET_SIMD_SET, arc_simd_builtin_desc_list[i].name,
- void_ftype_int_int, arc_simd_builtin_desc_list[i].code);
-
- gcc_assert (arc_simd_builtin_desc_list [i].args_type == void_rlimm);
- for (; arc_simd_builtin_desc_list [i].args_type == void_rlimm; i++)
- def_mbuiltin (TARGET_SIMD_SET, arc_simd_builtin_desc_list[i].name,
- void_ftype_int, arc_simd_builtin_desc_list[i].code);
-
- gcc_assert (arc_simd_builtin_desc_list [i].args_type == Va_Vb_Ic_u8);
- for (; arc_simd_builtin_desc_list [i].args_type == Va_Vb_Ic_u8; i++)
- def_mbuiltin (TARGET_SIMD_SET, arc_simd_builtin_desc_list[i].name,
- v8hi_ftype_v8hi_int_int, arc_simd_builtin_desc_list[i].code);
-
- gcc_assert (arc_simd_builtin_desc_list [i].args_type == Va_Ib_u8);
- for (; arc_simd_builtin_desc_list [i].args_type == Va_Ib_u8; i++)
- def_mbuiltin (TARGET_SIMD_SET, arc_simd_builtin_desc_list[i].name,
- v8hi_ftype_int_int, arc_simd_builtin_desc_list[i].code);
-
- gcc_assert (arc_simd_builtin_desc_list [i].args_type == void_Va_Ib_u8);
- for (; arc_simd_builtin_desc_list [i].args_type == void_Va_Ib_u8; i++)
- def_mbuiltin (TARGET_SIMD_SET, arc_simd_builtin_desc_list [i].name,
- void_ftype_v8hi_int_int, arc_simd_builtin_desc_list[i].code);
-
- gcc_assert (arc_simd_builtin_desc_list [i].args_type == void_Va_u3_Ib_u8);
- for (; arc_simd_builtin_desc_list [i].args_type == void_Va_u3_Ib_u8; i++)
- def_mbuiltin (TARGET_SIMD_SET, arc_simd_builtin_desc_list[i].name,
- void_ftype_v8hi_int_int_int,
- arc_simd_builtin_desc_list[i].code);
-
- gcc_assert (arc_simd_builtin_desc_list [i].args_type == void_u6);
- for (; arc_simd_builtin_desc_list [i].args_type == void_u6; i++)
- def_mbuiltin (TARGET_SIMD_SET, arc_simd_builtin_desc_list[i].name,
- void_ftype_int, arc_simd_builtin_desc_list[i].code);
-
- gcc_assert(i == ARRAY_SIZE (arc_simd_builtin_desc_list));
-}
-
-/* Helper function of arc_expand_builtin; has the same parameters,
- except that EXP is now known to be a call to a simd builtin. */
-
-static rtx
-arc_expand_simd_builtin (tree exp,
- rtx target,
- rtx subtarget ATTRIBUTE_UNUSED,
- machine_mode mode ATTRIBUTE_UNUSED,
- int ignore ATTRIBUTE_UNUSED)
-{
- tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
- tree arg0;
- tree arg1;
- tree arg2;
- tree arg3;
- rtx op0;
- rtx op1;
- rtx op2;
- rtx op3;
- rtx op4;
- rtx pat;
- unsigned int i;
- int fcode = DECL_FUNCTION_CODE (fndecl);
- int icode;
- machine_mode mode0;
- machine_mode mode1;
- machine_mode mode2;
- machine_mode mode3;
- machine_mode mode4;
- const struct builtin_description * d;
-
- for (i = 0, d = arc_simd_builtin_desc_list;
- i < ARRAY_SIZE (arc_simd_builtin_desc_list); i++, d++)
- if (d->code == (const enum arc_builtins) fcode)
- break;
-
- /* We must get an entry here. */
- gcc_assert (i < ARRAY_SIZE (arc_simd_builtin_desc_list));
-
- switch (d->args_type)
- {
- case Va_Vb_rlimm:
- icode = d->icode;
- arg0 = CALL_EXPR_ARG (exp, 0);
- arg1 = CALL_EXPR_ARG (exp, 1);
- op0 = expand_expr (arg0, NULL_RTX, V8HImode, EXPAND_NORMAL);
- op1 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
-
- target = gen_reg_rtx (V8HImode);
- mode0 = insn_data[icode].operand[1].mode;
- mode1 = insn_data[icode].operand[2].mode;
-
- if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
- op0 = copy_to_mode_reg (mode0, op0);
-
- if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
- op1 = copy_to_mode_reg (mode1, op1);
-
- pat = GEN_FCN (icode) (target, op0, op1);
- if (! pat)
- return 0;
-
- emit_insn (pat);
- return target;
-
- case Va_Vb_u6:
- case Va_Vb_u8:
- icode = d->icode;
- arg0 = CALL_EXPR_ARG (exp, 0);
- arg1 = CALL_EXPR_ARG (exp, 1);
- op0 = expand_expr (arg0, NULL_RTX, V8HImode, EXPAND_NORMAL);
- op1 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
-
- target = gen_reg_rtx (V8HImode);
- mode0 = insn_data[icode].operand[1].mode;
- mode1 = insn_data[icode].operand[2].mode;
-
- if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
- op0 = copy_to_mode_reg (mode0, op0);
-
- if (! (*insn_data[icode].operand[2].predicate) (op1, mode1)
- || (d->args_type == Va_Vb_u6 && !UNSIGNED_INT6 (INTVAL (op1)))
- || (d->args_type == Va_Vb_u8 && !UNSIGNED_INT8 (INTVAL (op1))))
- error ("operand 2 of %s instruction should be an unsigned %d-bit value",
- d->name,
- (d->args_type == Va_Vb_u6)? 6: 8);
-
- pat = GEN_FCN (icode) (target, op0, op1);
- if (! pat)
- return 0;
-
- emit_insn (pat);
- return target;
-
- case Va_rlimm_u8:
- icode = d->icode;
- arg0 = CALL_EXPR_ARG (exp, 0);
- arg1 = CALL_EXPR_ARG (exp, 1);
- op0 = expand_expr (arg0, NULL_RTX, SImode, EXPAND_NORMAL);
- op1 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
-
- target = gen_reg_rtx (V8HImode);
- mode0 = insn_data[icode].operand[1].mode;
- mode1 = insn_data[icode].operand[2].mode;
-
- if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
- op0 = copy_to_mode_reg (mode0, op0);
-
- if ( (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
- || !(UNSIGNED_INT8 (INTVAL (op1))))
- error ("operand 2 of %s instruction should be an unsigned 8-bit value",
- d->name);
-
- pat = GEN_FCN (icode) (target, op0, op1);
- if (! pat)
- return 0;
-
- emit_insn (pat);
- return target;
-
- case Va_Vb_Ic:
- icode = d->icode;
- arg0 = CALL_EXPR_ARG (exp, 0);
- arg1 = CALL_EXPR_ARG (exp, 1);
- op0 = expand_expr (arg0, NULL_RTX, V8HImode, EXPAND_NORMAL);
- op1 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
- op2 = gen_rtx_REG (V8HImode, ARC_FIRST_SIMD_VR_REG);
-
- target = gen_reg_rtx (V8HImode);
- mode0 = insn_data[icode].operand[1].mode;
- mode1 = insn_data[icode].operand[2].mode;
-
- if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
- op0 = copy_to_mode_reg (mode0, op0);
-
- if ( (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
- || !(UNSIGNED_INT3 (INTVAL (op1))))
- error ("operand 2 of %s instruction should be an unsigned 3-bit value (I0-I7)",
- d->name);
-
- pat = GEN_FCN (icode) (target, op0, op1, op2);
- if (! pat)
- return 0;
-
- emit_insn (pat);
- return target;
-
- case Va_Vb_Vc:
- icode = d->icode;
- arg0 = CALL_EXPR_ARG (exp, 0);
- arg1 = CALL_EXPR_ARG (exp, 1);
- op0 = expand_expr (arg0, NULL_RTX, V8HImode, EXPAND_NORMAL);
- op1 = expand_expr (arg1, NULL_RTX, V8HImode, EXPAND_NORMAL);
-
- target = gen_reg_rtx (V8HImode);
- mode0 = insn_data[icode].operand[1].mode;
- mode1 = insn_data[icode].operand[2].mode;
-
- if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
- op0 = copy_to_mode_reg (mode0, op0);
-
- if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
- op1 = copy_to_mode_reg (mode1, op1);
-
- pat = GEN_FCN (icode) (target, op0, op1);
- if (! pat)
- return 0;
-
- emit_insn (pat);
- return target;
-
- case Va_Vb:
- icode = d->icode;
- arg0 = CALL_EXPR_ARG (exp, 0);
- op0 = expand_expr (arg0, NULL_RTX, V8HImode, EXPAND_NORMAL);
-
- target = gen_reg_rtx (V8HImode);
- mode0 = insn_data[icode].operand[1].mode;
-
- if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
- op0 = copy_to_mode_reg (mode0, op0);
-
- pat = GEN_FCN (icode) (target, op0);
- if (! pat)
- return 0;
-
- emit_insn (pat);
- return target;
-
- case Da_rlimm_rlimm:
- icode = d->icode;
- arg0 = CALL_EXPR_ARG (exp, 0);
- arg1 = CALL_EXPR_ARG (exp, 1);
- op0 = expand_expr (arg0, NULL_RTX, SImode, EXPAND_NORMAL);
- op1 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
-
-
- if (icode == CODE_FOR_vdirun_insn)
- target = gen_rtx_REG (SImode, 131);
- else if (icode == CODE_FOR_vdorun_insn)
- target = gen_rtx_REG (SImode, 139);
- else
- gcc_unreachable ();
-
- mode0 = insn_data[icode].operand[1].mode;
- mode1 = insn_data[icode].operand[2].mode;
-
- if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
- op0 = copy_to_mode_reg (mode0, op0);
-
- if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
- op1 = copy_to_mode_reg (mode1, op1);
-
-
- pat = GEN_FCN (icode) (target, op0, op1);
- if (! pat)
- return 0;
-
- emit_insn (pat);
- return NULL_RTX;
-
- case Da_u3_rlimm:
- icode = d->icode;
- arg0 = CALL_EXPR_ARG (exp, 0);
- arg1 = CALL_EXPR_ARG (exp, 1);
- op0 = expand_expr (arg0, NULL_RTX, SImode, EXPAND_NORMAL);
- op1 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
-
-
- if (! (GET_CODE (op0) == CONST_INT)
- || !(UNSIGNED_INT3 (INTVAL (op0))))
- error ("operand 1 of %s instruction should be an unsigned 3-bit value (DR0-DR7)",
- d->name);
-
- mode1 = insn_data[icode].operand[1].mode;
-
- if (icode == CODE_FOR_vdiwr_insn)
- target = gen_rtx_REG (SImode,
- ARC_FIRST_SIMD_DMA_CONFIG_IN_REG + INTVAL (op0));
- else if (icode == CODE_FOR_vdowr_insn)
- target = gen_rtx_REG (SImode,
- ARC_FIRST_SIMD_DMA_CONFIG_OUT_REG + INTVAL (op0));
- else
- gcc_unreachable ();
-
- if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
- op1 = copy_to_mode_reg (mode1, op1);
-
- pat = GEN_FCN (icode) (target, op1);
- if (! pat)
- return 0;
-
- emit_insn (pat);
- return NULL_RTX;
-
- case void_u6:
- icode = d->icode;
- arg0 = CALL_EXPR_ARG (exp, 0);
-
- fold (arg0);
-
- op0 = expand_expr (arg0, NULL_RTX, SImode, EXPAND_NORMAL);
- mode0 = insn_data[icode].operand[0].mode;
-
- /* op0 should be u6. */
- if (! (*insn_data[icode].operand[0].predicate) (op0, mode0)
- || !(UNSIGNED_INT6 (INTVAL (op0))))
- error ("operand of %s instruction should be an unsigned 6-bit value",
- d->name);
-
- pat = GEN_FCN (icode) (op0);
- if (! pat)
- return 0;
-
- emit_insn (pat);
- return NULL_RTX;
-
- case void_rlimm:
- icode = d->icode;
- arg0 = CALL_EXPR_ARG (exp, 0);
-
- fold (arg0);
-
- op0 = expand_expr (arg0, NULL_RTX, SImode, EXPAND_NORMAL);
- mode0 = insn_data[icode].operand[0].mode;
-
- if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
- op0 = copy_to_mode_reg (mode0, op0);
-
- pat = GEN_FCN (icode) (op0);
- if (! pat)
- return 0;
-
- emit_insn (pat);
- return NULL_RTX;
-
- case Va_Vb_Ic_u8:
- {
- rtx src_vreg;
- icode = d->icode;
- arg0 = CALL_EXPR_ARG (exp, 0); /* source vreg */
- arg1 = CALL_EXPR_ARG (exp, 1); /* [I]0-7 */
- arg2 = CALL_EXPR_ARG (exp, 2); /* u8 */
-
- src_vreg = expand_expr (arg0, NULL_RTX, V8HImode, EXPAND_NORMAL);
- op0 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL); /* [I]0-7 */
- op1 = expand_expr (arg2, NULL_RTX, SImode, EXPAND_NORMAL); /* u8 */
- op2 = gen_rtx_REG (V8HImode, ARC_FIRST_SIMD_VR_REG); /* VR0 */
-
- /* target <- src vreg */
- emit_insn (gen_move_insn (target, src_vreg));
-
- /* target <- vec_concat: target, mem(Ib, u8) */
- mode0 = insn_data[icode].operand[3].mode;
- mode1 = insn_data[icode].operand[1].mode;
-
- if ( (!(*insn_data[icode].operand[3].predicate) (op0, mode0))
- || !(UNSIGNED_INT3 (INTVAL (op0))))
- error ("operand 1 of %s instruction should be an unsigned 3-bit value (I0-I7)",
- d->name);
-
- if ( (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
- || !(UNSIGNED_INT8 (INTVAL (op1))))
- error ("operand 2 of %s instruction should be an unsigned 8-bit value",
- d->name);
-
- pat = GEN_FCN (icode) (target, op1, op2, op0);
- if (! pat)
- return 0;
-
- emit_insn (pat);
- return target;
- }
-
- case void_Va_Ib_u8:
- icode = d->icode;
- arg0 = CALL_EXPR_ARG (exp, 0); /* src vreg */
- arg1 = CALL_EXPR_ARG (exp, 1); /* [I]0-7 */
- arg2 = CALL_EXPR_ARG (exp, 2); /* u8 */
-
- op0 = gen_rtx_REG (V8HImode, ARC_FIRST_SIMD_VR_REG); /* VR0 */
- op1 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL); /* I[0-7] */
- op2 = expand_expr (arg2, NULL_RTX, SImode, EXPAND_NORMAL); /* u8 */
- op3 = expand_expr (arg0, NULL_RTX, V8HImode, EXPAND_NORMAL); /* Vdest */
-
- mode0 = insn_data[icode].operand[0].mode;
- mode1 = insn_data[icode].operand[1].mode;
- mode2 = insn_data[icode].operand[2].mode;
- mode3 = insn_data[icode].operand[3].mode;
-
- if ( (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
- || !(UNSIGNED_INT3 (INTVAL (op1))))
- error ("operand 2 of %s instruction should be an unsigned 3-bit value (I0-I7)",
- d->name);
-
- if ( (!(*insn_data[icode].operand[2].predicate) (op2, mode2))
- || !(UNSIGNED_INT8 (INTVAL (op2))))
- error ("operand 3 of %s instruction should be an unsigned 8-bit value",
- d->name);
-
- if (!(*insn_data[icode].operand[3].predicate) (op3, mode3))
- op3 = copy_to_mode_reg (mode3, op3);
-
- pat = GEN_FCN (icode) (op0, op1, op2, op3);
- if (! pat)
- return 0;
-
- emit_insn (pat);
- return NULL_RTX;
-
- case Va_Ib_u8:
- icode = d->icode;
- arg0 = CALL_EXPR_ARG (exp, 0); /* dest vreg */
- arg1 = CALL_EXPR_ARG (exp, 1); /* [I]0-7 */
-
- op0 = gen_rtx_REG (V8HImode, ARC_FIRST_SIMD_VR_REG); /* VR0 */
- op1 = expand_expr (arg0, NULL_RTX, SImode, EXPAND_NORMAL); /* I[0-7] */
- op2 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL); /* u8 */
-
- /* target <- src vreg */
- target = gen_reg_rtx (V8HImode);
-
- /* target <- vec_concat: target, mem(Ib, u8) */
- mode0 = insn_data[icode].operand[1].mode;
- mode1 = insn_data[icode].operand[2].mode;
- mode2 = insn_data[icode].operand[3].mode;
-
- if ( (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
- || !(UNSIGNED_INT3 (INTVAL (op1))))
- error ("operand 1 of %s instruction should be an unsigned 3-bit value (I0-I7)",
- d->name);
-
- if ( (!(*insn_data[icode].operand[3].predicate) (op2, mode2))
- || !(UNSIGNED_INT8 (INTVAL (op2))))
- error ("operand 2 of %s instruction should be an unsigned 8-bit value",
- d->name);
-
- pat = GEN_FCN (icode) (target, op0, op1, op2);
- if (! pat)
- return 0;
-
- emit_insn (pat);
- return target;
-
- case void_Va_u3_Ib_u8:
- icode = d->icode;
- arg0 = CALL_EXPR_ARG (exp, 0); /* source vreg */
- arg1 = CALL_EXPR_ARG (exp, 1); /* u3 */
- arg2 = CALL_EXPR_ARG (exp, 2); /* [I]0-7 */
- arg3 = CALL_EXPR_ARG (exp, 3); /* u8 */
-
- op0 = expand_expr (arg3, NULL_RTX, SImode, EXPAND_NORMAL); /* u8 */
- op1 = gen_rtx_REG (V8HImode, ARC_FIRST_SIMD_VR_REG); /* VR */
- op2 = expand_expr (arg2, NULL_RTX, SImode, EXPAND_NORMAL); /* [I]0-7 */
- op3 = expand_expr (arg0, NULL_RTX, V8HImode, EXPAND_NORMAL);/* vreg to be stored */
- op4 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL); /* vreg 0-7 subreg no. */
-
- mode0 = insn_data[icode].operand[0].mode;
- mode2 = insn_data[icode].operand[2].mode;
- mode3 = insn_data[icode].operand[3].mode;
- mode4 = insn_data[icode].operand[4].mode;
-
- /* Do some correctness checks for the operands. */
- if ( (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
- || !(UNSIGNED_INT8 (INTVAL (op0))))
- error ("operand 4 of %s instruction should be an unsigned 8-bit value (0-255)",
- d->name);
-
- if ( (!(*insn_data[icode].operand[2].predicate) (op2, mode2))
- || !(UNSIGNED_INT3 (INTVAL (op2))))
- error ("operand 3 of %s instruction should be an unsigned 3-bit value (I0-I7)",
- d->name);
-
- if (!(*insn_data[icode].operand[3].predicate) (op3, mode3))
- op3 = copy_to_mode_reg (mode3, op3);
-
- if ( (!(*insn_data[icode].operand[4].predicate) (op4, mode4))
- || !(UNSIGNED_INT3 (INTVAL (op4))))
- error ("operand 2 of %s instruction should be an unsigned 3-bit value (subreg 0-7)",
- d->name);
- else if (icode == CODE_FOR_vst32_n_insn
- && ((INTVAL(op4) % 2 ) != 0))
- error ("operand 2 of %s instruction should be an even 3-bit value (subreg 0,2,4,6)",
- d->name);
-
- pat = GEN_FCN (icode) (op0, op1, op2, op3, op4);
- if (! pat)
- return 0;
-
- emit_insn (pat);
- return NULL_RTX;
-
- default:
- gcc_unreachable ();
- }
- return NULL_RTX;
-}
-
static bool
arc_preserve_reload_p (rtx in)
{
/* Produce the two LR insns to get the high and low parts. */
emit_insn (gen_rtx_SET (destHigh,
- gen_rtx_UNSPEC_VOLATILE (Pmode, gen_rtvec (1, src),
- VUNSPEC_LR_HIGH)));
+ gen_rtx_UNSPEC_VOLATILE (Pmode,
+ gen_rtvec (1, src),
+ VUNSPEC_ARC_LR_HIGH)));
emit_insn (gen_rtx_SET (destLow,
- gen_rtx_UNSPEC_VOLATILE (Pmode, gen_rtvec (1, src),
- VUNSPEC_LR)));
+ gen_rtx_UNSPEC_VOLATILE (Pmode,
+ gen_rtvec (1, src),
+ VUNSPEC_ARC_LR)));
}
}
else if (state == destDx)
emit_insn (gen_rtx_UNSPEC_VOLATILE (Pmode,
gen_rtvec (3, dest, srcHigh, srcLow),
- VUNSPEC_DEXCL_NORES));
+ VUNSPEC_ARC_DEXCL_NORES));
}
else
--- /dev/null
+/* Copyright (C) 2015 Free Software Foundation, Inc.
+
+ This file is part of GCC.
+
+ GCC is free software; you can redistribute it and/or modify it under
+ the terms of the GNU General Public License as published by the Free
+ Software Foundation; either version 3, or (at your option) any later
+ version.
+
+ GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+ WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with GCC; see the file COPYING3. If not see
+ <http://www.gnu.org/licenses/>. */
+
+/* This file contains the definitions and documentation for the
+ builtins defined in the ARC part of the GNU compiler. Before
+ including this file, define a macro
+
+ DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, MASK)
+
+ NAME: `__builtin_arc_name' will be the user-level name of the builtin.
+ `ARC_BUILTIN_NAME' will be the internal builtin's id.
+ N_ARGS: Number of input arguments. If special treatment is needed,
+ set to -1 and handle it by hand, see arc.c:arc_expand_builtin().
+ TYPE: A tree node describing the prototype of the built-in.
+ ICODE: Name of attached insn or expander. If special treatment in arc.c
+ is needed to expand the built-in, use `nothing'.
+ MASK: CPU selector mask. */
+
+/* Special builtins. */
+DEF_BUILTIN (NOP, 0, void_ftype_void, nothing, 1)
+DEF_BUILTIN (RTIE, 0, void_ftype_void, rtie, 1)
+DEF_BUILTIN (SYNC, 0, void_ftype_void, sync, TARGET_ARC700)
+DEF_BUILTIN (BRK, 0, void_ftype_void, brk, 1)
+DEF_BUILTIN (SWI, 0, void_ftype_void, swi, 1)
+DEF_BUILTIN (UNIMP_S, 0, void_ftype_void, unimp_s, !TARGET_ARC600_FAMILY)
+DEF_BUILTIN (TRAP_S, 1, void_ftype_usint, trap_s, !TARGET_ARC600_FAMILY)
+DEF_BUILTIN (ALIGNED, 2, int_ftype_pcvoid_int, nothing, 1)
+DEF_BUILTIN (CLRI, 0, int_ftype_void, clri, TARGET_V2)
+DEF_BUILTIN (SLEEP, 1, void_ftype_usint, sleep, 1)
+
+DEF_BUILTIN (FLAG, 1, void_ftype_usint, flag, 1)
+DEF_BUILTIN (SR, 2, void_ftype_usint_usint, sr, 1)
+DEF_BUILTIN (KFLAG, 1, void_ftype_usint, kflag, TARGET_V2)
+DEF_BUILTIN (CORE_WRITE, 2, void_ftype_usint_usint, core_write, 1)
+DEF_BUILTIN (SETI, 1, void_ftype_int, seti, TARGET_V2)
+
+/* Regular builtins. */
+DEF_BUILTIN (NORM, 1, int_ftype_int, clrsbsi2, TARGET_NORM)
+DEF_BUILTIN (NORMW, 1, int_ftype_short, normw, TARGET_NORM)
+DEF_BUILTIN (SWAP, 1, int_ftype_int, swap, TARGET_SWAP)
+DEF_BUILTIN (DIVAW, 2, int_ftype_int_int, divaw, TARGET_EA_SET)
+DEF_BUILTIN (CORE_READ, 1, usint_ftype_usint, core_read, 1)
+DEF_BUILTIN (LR, 1, usint_ftype_usint, lr, 1)
+DEF_BUILTIN (FFS, 1, int_ftype_int, ffs, (TARGET_EM && TARGET_NORM) || TARGET_HS)
+DEF_BUILTIN (FLS, 1, int_ftype_int, fls, (TARGET_EM && TARGET_NORM) || TARGET_HS)
+
+/* ARC SIMD extenssion. */
+/* BEGIN SIMD marker. */
+DEF_BUILTIN (SIMD_BEGIN, 0, void_ftype_void, nothing, 0)
+
+DEF_BUILTIN ( VADDAW, 2, v8hi_ftype_v8hi_v8hi, vaddaw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VADDW, 2, v8hi_ftype_v8hi_v8hi, vaddw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VAVB, 2, v8hi_ftype_v8hi_v8hi, vavb_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VAVRB, 2, v8hi_ftype_v8hi_v8hi, vavrb_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VDIFAW, 2, v8hi_ftype_v8hi_v8hi, vdifaw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VDIFW, 2, v8hi_ftype_v8hi_v8hi, vdifw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMAXAW, 2, v8hi_ftype_v8hi_v8hi, vmaxaw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMAXW, 2, v8hi_ftype_v8hi_v8hi, vmaxw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMINAW, 2, v8hi_ftype_v8hi_v8hi, vminaw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMINW, 2, v8hi_ftype_v8hi_v8hi, vminw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMULAW, 2, v8hi_ftype_v8hi_v8hi, vmulaw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VMULFAW, 2, v8hi_ftype_v8hi_v8hi, vmulfaw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMULFW, 2, v8hi_ftype_v8hi_v8hi, vmulfw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMULW, 2, v8hi_ftype_v8hi_v8hi, vmulw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VSUBAW, 2, v8hi_ftype_v8hi_v8hi, vsubaw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VSUBW, 2, v8hi_ftype_v8hi_v8hi, vsubw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VSUMMW, 2, v8hi_ftype_v8hi_v8hi, vsummw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VAND, 2, v8hi_ftype_v8hi_v8hi, vand_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VANDAW, 2, v8hi_ftype_v8hi_v8hi, vandaw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VBIC, 2, v8hi_ftype_v8hi_v8hi, vbic_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VBICAW, 2, v8hi_ftype_v8hi_v8hi, vbicaw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VOR, 2, v8hi_ftype_v8hi_v8hi, vor_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VXOR, 2, v8hi_ftype_v8hi_v8hi, vxor_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VXORAW, 2, v8hi_ftype_v8hi_v8hi, vxoraw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VEQW, 2, v8hi_ftype_v8hi_v8hi, veqw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VLEW, 2, v8hi_ftype_v8hi_v8hi, vlew_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VLTW, 2, v8hi_ftype_v8hi_v8hi, vltw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VNEW, 2, v8hi_ftype_v8hi_v8hi, vnew_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMR1AW, 2, v8hi_ftype_v8hi_v8hi, vmr1aw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMR1W, 2, v8hi_ftype_v8hi_v8hi, vmr1w_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMR2AW, 2, v8hi_ftype_v8hi_v8hi, vmr2aw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMR2W, 2, v8hi_ftype_v8hi_v8hi, vmr2w_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMR3AW, 2, v8hi_ftype_v8hi_v8hi, vmr3aw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMR3W, 2, v8hi_ftype_v8hi_v8hi, vmr3w_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMR4AW, 2, v8hi_ftype_v8hi_v8hi, vmr4aw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMR4W, 2, v8hi_ftype_v8hi_v8hi, vmr4w_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMR5AW, 2, v8hi_ftype_v8hi_v8hi, vmr5aw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMR5W, 2, v8hi_ftype_v8hi_v8hi, vmr5w_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMR6AW, 2, v8hi_ftype_v8hi_v8hi, vmr6aw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMR6W, 2, v8hi_ftype_v8hi_v8hi, vmr6w_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMR7AW, 2, v8hi_ftype_v8hi_v8hi, vmr7aw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMR7W, 2, v8hi_ftype_v8hi_v8hi, vmr7w_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMRB, 2, v8hi_ftype_v8hi_v8hi, vmrb_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VH264F, 2, v8hi_ftype_v8hi_v8hi, vh264f_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VH264FT, 2, v8hi_ftype_v8hi_v8hi, vh264ft_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VH264FW, 2, v8hi_ftype_v8hi_v8hi, vh264fw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VVC1F, 2, v8hi_ftype_v8hi_v8hi, vvc1f_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VVC1FT, 2, v8hi_ftype_v8hi_v8hi, vvc1ft_insn, TARGET_SIMD_SET)
+
+DEF_BUILTIN ( VBADDW, 2, v8hi_ftype_v8hi_int, vbaddw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VBMAXW, 2, v8hi_ftype_v8hi_int, vbmaxw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VBMINW, 2, v8hi_ftype_v8hi_int, vbminw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VBMULAW, 2, v8hi_ftype_v8hi_int, vbmulaw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VBMULFW, 2, v8hi_ftype_v8hi_int, vbmulfw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VBMULW, 2, v8hi_ftype_v8hi_int, vbmulw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VBRSUBW, 2, v8hi_ftype_v8hi_int, vbrsubw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VBSUBW, 2, v8hi_ftype_v8hi_int, vbsubw_insn, TARGET_SIMD_SET)
+
+/* Va, Vb, Ic instructions. */
+DEF_BUILTIN ( VASRW, 2, v8hi_ftype_v8hi_int, vasrw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VSR8, 2, v8hi_ftype_v8hi_int, vsr8_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VSR8AW, 2, v8hi_ftype_v8hi_int, vsr8aw_insn, TARGET_SIMD_SET)
+
+/* Va, Vb, u6 instructions. */
+DEF_BUILTIN ( VASRRWi, 2, v8hi_ftype_v8hi_int, vasrrwi_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VASRSRWi, 2, v8hi_ftype_v8hi_int, vasrsrwi_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VASRWi, 2, v8hi_ftype_v8hi_int, vasrwi_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VASRPWBi, 2, v8hi_ftype_v8hi_int, vasrpwbi_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VASRRPWBi, 2, v8hi_ftype_v8hi_int, vasrrpwbi_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VSR8AWi, 2, v8hi_ftype_v8hi_int, vsr8awi_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VSR8i, 2, v8hi_ftype_v8hi_int, vsr8i_insn, TARGET_SIMD_SET)
+
+/* Va, Vb, u8 (simm) instructions. */
+DEF_BUILTIN ( VMVAW, 2, v8hi_ftype_v8hi_int, vmvaw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMVW, 2, v8hi_ftype_v8hi_int, vmvw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMVZW, 2, v8hi_ftype_v8hi_int, vmvzw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VD6TAPF, 2, v8hi_ftype_v8hi_int, vd6tapf_insn, TARGET_SIMD_SET)
+
+/* Va, rlimm, u8 (simm) instructions. */
+DEF_BUILTIN (VMOVAW, 2, v8hi_ftype_int_int, vmovaw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VMOVW, 2, v8hi_ftype_int_int, vmovw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VMOVZW, 2, v8hi_ftype_int_int, vmovzw_insn, TARGET_SIMD_SET)
+
+/* Va, Vb instructions. */
+DEF_BUILTIN ( VABSAW, 1, v8hi_ftype_v8hi, vabsaw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VABSW, 1, v8hi_ftype_v8hi, vabsw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VADDSUW, 1, v8hi_ftype_v8hi, vaddsuw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VSIGNW, 1, v8hi_ftype_v8hi, vsignw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VEXCH1, 1, v8hi_ftype_v8hi, vexch1_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VEXCH2, 1, v8hi_ftype_v8hi, vexch2_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VEXCH4, 1, v8hi_ftype_v8hi, vexch4_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VUPBAW, 1, v8hi_ftype_v8hi, vupbaw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VUPBW, 1, v8hi_ftype_v8hi, vupbw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VUPSBAW, 1, v8hi_ftype_v8hi, vupsbaw_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VUPSBW, 1, v8hi_ftype_v8hi, vupsbw_insn, TARGET_SIMD_SET)
+
+/* SIMD special DIb, rlimm, rlimm instructions. */
+DEF_BUILTIN (VDIRUN, 2, void_ftype_int_int, vdirun_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VDORUN, 2, void_ftype_int_int, vdorun_insn, TARGET_SIMD_SET)
+
+/* SIMD special DIb, limm, rlimm instructions. */
+DEF_BUILTIN (VDIWR, 2, void_ftype_int_int, vdiwr_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VDOWR, 2, void_ftype_int_int, vdowr_insn, TARGET_SIMD_SET)
+
+/* rlimm instructions. */
+DEF_BUILTIN ( VREC, 1, void_ftype_int, vrec_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VRUN, 1, void_ftype_int, vrun_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VRECRUN, 1, void_ftype_int, vrecrun_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VENDREC, 1, void_ftype_int, vendrec_insn, TARGET_SIMD_SET)
+
+/* Va, [Ib,u8] instructions. */
+DEF_BUILTIN (VLD32WH, 3, v8hi_ftype_v8hi_int_int, vld32wh_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VLD32WL, 3, v8hi_ftype_v8hi_int_int, vld32wl_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VLD64, 3, v8hi_ftype_v8hi_int_int, vld64_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VLD32, 3, v8hi_ftype_v8hi_int_int, vld32_insn, TARGET_SIMD_SET)
+
+DEF_BUILTIN (VLD64W, 2, v8hi_ftype_int_int, vld64w_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VLD128, 2, v8hi_ftype_int_int, vld128_insn, TARGET_SIMD_SET)
+
+DEF_BUILTIN (VST128, 3, void_ftype_v8hi_int_int, vst128_insn, TARGET_SIMD_SET)
+DEF_BUILTIN ( VST64, 3, void_ftype_v8hi_int_int, vst64_insn, TARGET_SIMD_SET)
+
+/* Va, [Ib, u8] instructions. */
+DEF_BUILTIN (VST16_N, 4, void_ftype_v8hi_int_int_int, vst16_n_insn, TARGET_SIMD_SET)
+DEF_BUILTIN (VST32_N, 4, void_ftype_v8hi_int_int_int, vst32_n_insn, TARGET_SIMD_SET)
+
+DEF_BUILTIN (VINTI, 1, void_ftype_int, vinti_insn, TARGET_SIMD_SET)
+
+/* END SIMD marker. */
+DEF_BUILTIN (SIMD_END, 0, void_ftype_void, nothing, 0)