V4SF_FTYPE_V4SF_V4SF_INT,
V2DI_FTYPE_V2DI_V2DI_INT,
V2DI2TI_FTYPE_V2DI_V2DI_INT,
+ V1DI2DI_FTYPE_V1DI_V1DI_INT,
V2DF_FTYPE_V2DF_V2DF_INT,
V2DI_FTYPE_V2DI_UINT_UINT,
- V2DI_FTYPE_V2DI_V2DI_UINT_UINT,
- DI_FTYPE_DI_DI_INT
+ V2DI_FTYPE_V2DI_V2DI_UINT_UINT
};
/* Builtins with variable number of arguments. */
/* SSSE3. */
{ OPTION_MASK_ISA_SSSE3, CODE_FOR_ssse3_palignrti, "__builtin_ia32_palignr128", IX86_BUILTIN_PALIGNR128, UNKNOWN, (int) V2DI2TI_FTYPE_V2DI_V2DI_INT },
- { OPTION_MASK_ISA_SSSE3, CODE_FOR_ssse3_palignrdi, "__builtin_ia32_palignr", IX86_BUILTIN_PALIGNR, UNKNOWN, (int) DI_FTYPE_DI_DI_INT },
+ { OPTION_MASK_ISA_SSSE3, CODE_FOR_ssse3_palignrdi, "__builtin_ia32_palignr", IX86_BUILTIN_PALIGNR, UNKNOWN, (int) V1DI2DI_FTYPE_V1DI_V1DI_INT },
/* SSE4.1 */
{ OPTION_MASK_ISA_SSE4_1, CODE_FOR_sse4_1_blendpd, "__builtin_ia32_blendpd", IX86_BUILTIN_BLENDPD, UNKNOWN, (int) V2DF_FTYPE_V2DF_V2DF_INT },
tree v1di_ftype_v1di_v1di
= build_function_type_list (V1DI_type_node,
V1DI_type_node, V1DI_type_node, NULL_TREE);
-
- tree di_ftype_di_di_int
- = build_function_type_list (long_long_unsigned_type_node,
- long_long_unsigned_type_node,
- long_long_unsigned_type_node,
+ tree v1di_ftype_v1di_v1di_int
+ = build_function_type_list (V1DI_type_node,
+ V1DI_type_node, V1DI_type_node,
integer_type_node, NULL_TREE);
-
tree v2si_ftype_v2sf
= build_function_type_list (V2SI_type_node, V2SF_type_node, NULL_TREE);
tree v2sf_ftype_v2si
case V2DI_FTYPE_V2DI_V2DI_UINT_UINT:
type = v2di_ftype_v2di_v2di_unsigned_unsigned;
break;
- case DI_FTYPE_DI_DI_INT:
- type = di_ftype_di_di_int;
+ case V1DI2DI_FTYPE_V1DI_V1DI_INT:
+ type = v1di_ftype_v1di_v1di_int;
break;
default:
gcc_unreachable ();
rmode = V2DImode;
nargs_constant = 1;
break;
+ case V1DI2DI_FTYPE_V1DI_V1DI_INT:
+ nargs = 3;
+ rmode = DImode;
+ nargs_constant = 1;
+ break;
case V2DI_FTYPE_V2DI_UINT_UINT:
nargs = 3;
nargs_constant = 2;
nargs = 4;
nargs_constant = 2;
break;
- case DI_FTYPE_DI_DI_INT:
- nargs = 3;
- nargs_constant = 1;
- break;
default:
gcc_unreachable ();
}
extern __inline __m64 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
_mm_alignr_pi8(__m64 __X, __m64 __Y, const int __N)
{
- return (__m64) __builtin_ia32_palignr ((long long)__X,
- (long long)__Y, __N * 8);
+ return (__m64) __builtin_ia32_palignr ((__v1di)__X,
+ (__v1di)__Y, __N * 8);
}
#else
#define _mm_alignr_epi8(X, Y, N) \
(__v2di)(__m128i)(Y), \
(int)(N) * 8))
#define _mm_alignr_pi8(X, Y, N) \
- ((__m64) __builtin_ia32_palignr ((long long)(__m64)(X), \
- (long long)(__m64)(Y), \
+ ((__m64) __builtin_ia32_palignr ((__v1di)(__m64)(X), \
+ (__v1di)(__m64)(Y), \
(int)(N) * 8))
#endif
v8qi __builtin_ia32_psignb (v8qi, v8qi)
v2si __builtin_ia32_psignd (v2si, v2si)
v4hi __builtin_ia32_psignw (v4hi, v4hi)
-long long __builtin_ia32_palignr (long long, long long, int)
+v1di __builtin_ia32_palignr (v1di, v1di, int)
v8qi __builtin_ia32_pabsb (v8qi)
v2si __builtin_ia32_pabsd (v2si)
v4hi __builtin_ia32_pabsw (v4hi)