From: Aldy Hernandez Date: Mon, 4 Feb 2002 22:50:56 +0000 (+0000) Subject: extend.texi: Warn about unsupported usage of altivec builtins. X-Git-Url: https://git.libre-soc.org/?a=commitdiff_plain;h=ae4b4a02e878b8ec473d5cabd3b043fe8dfd8940;p=gcc.git extend.texi: Warn about unsupported usage of altivec builtins. 2002-01-31 Aldy Hernandez * doc/extend.texi: Warn about unsupported usage of altivec builtins. * config/rs6000/rs6000.md (altivec_vcmp*_p): Remove. (altivec_predicate_*): New. * config/rs6000/altivec.h: Rewrite predicates to use new builtins. Add C++ version of vec_*() functions. * config/rs6000/rs6000.c (bdesc_altivec_preds): New. (bdesc_2arg): Remove altivec predicates. (altivec_expand_builtin): Handle predicates. (altivec_init_builtins): Handle predicates. (altivec_expand_predicate_builtin): New. From-SVN: r49500 --- diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 85d8be72747..bf46b29b504 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,20 @@ +2002-02-05 Aldy Hernandez + + * doc/extend.texi: Warn about unsupported usage of altivec + builtins. + + * config/rs6000/rs6000.md (altivec_vcmp*_p): Remove. + (altivec_predicate_*): New. + + * config/rs6000/altivec.h: Rewrite predicates to use new builtins. + Add C++ version of vec_*() functions. + + * config/rs6000/rs6000.c (bdesc_altivec_preds): New. + (bdesc_2arg): Remove altivec predicates. + (altivec_expand_builtin): Handle predicates. + (altivec_init_builtins): Handle predicates. + (altivec_expand_predicate_builtin): New. + 2002-02-04 John David Anglin * pa.c (DO_FRAME_NOTES): Move forward. diff --git a/gcc/config/rs6000/altivec.h b/gcc/config/rs6000/altivec.h index 4d5b0a35149..4e8cdf27b09 100644 --- a/gcc/config/rs6000/altivec.h +++ b/gcc/config/rs6000/altivec.h @@ -39,14 +39,21 @@ Boston, MA 02111-1307, USA. */ #define __vector __attribute__((vector_size(16))) -/* Dummy prototype. */ -extern void __altivec_link_error_invalid_argument (); - /* You are allowed to undef this for C++ compatability. */ #define vector __vector +#define pixel short + +/* Dummy prototype. */ +extern int __altivec_link_error_invalid_argument (); + /* Helper macros. */ +#define __CR6_EQ 0 +#define __CR6_EQ_REV 1 +#define __CR6_LT 2 +#define __CR6_LT_REV 3 + #define __bin_args_eq(xtype, x, ytype, y) \ (__builtin_types_compatible_p (xtype, typeof (x)) \ && __builtin_types_compatible_p (ytype, typeof (y))) @@ -63,12 +70,3847 @@ extern void __altivec_link_error_invalid_argument (); #ifdef __cplusplus -/* C++ stuff here. */ - +/* vec_add */ + +inline vector signed char +vec_add (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vaddubm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_add (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vaddubm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_add (vector unsigned char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vaddubm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_add (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vaddubm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_add (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vadduhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_add (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vadduhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_add (vector unsigned short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vadduhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_add (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vadduhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_add (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vadduwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_add (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vadduwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_add (vector unsigned int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vadduwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_add (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vadduwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_add (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vaddfp ((vector float) a1, (vector float) a2); +} + +/* vec_addc */ + +inline vector unsigned int +vec_addc (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vaddcuw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_adds */ + +inline vector unsigned char +vec_adds (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vaddubs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_adds (vector unsigned char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vaddubs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_adds (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vaddubs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed char +vec_adds (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vaddsbs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned short +vec_adds (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vadduhs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_adds (vector unsigned short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vadduhs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_adds (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vadduhs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_adds (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vaddshs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned int +vec_adds (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vadduws ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_adds (vector unsigned int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vadduws ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_adds (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vadduws ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_adds (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vaddsws ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_and */ + +inline vector float +vec_and (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_and (vector float a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_and (vector signed int a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_and (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_and (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_and (vector unsigned int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_and (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_and (vector signed short a1, vector signed short a2) +{ + return (vector signed int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_and (vector signed short a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_and (vector unsigned short a1, vector signed short a2) +{ + return (vector signed int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_and (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_and (vector signed char a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_and (vector signed char a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_and (vector unsigned char a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_and (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_andc */ + +inline vector float +vec_andc (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_andc (vector float a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_andc (vector signed int a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_andc (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_andc (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_andc (vector unsigned int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_andc (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_andc (vector signed short a1, vector signed short a2) +{ + return (vector signed int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_andc (vector signed short a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_andc (vector unsigned short a1, vector signed short a2) +{ + return (vector signed int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_andc (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_andc (vector signed char a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_andc (vector signed char a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_andc (vector unsigned char a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_andc (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_avg */ + +inline vector unsigned char +vec_avg (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vavgub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed char +vec_avg (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vavgsb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned short +vec_avg (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vavguh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_avg (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vavgsh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned int +vec_avg (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vavguw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_avg (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vavgsw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_ceil */ + +inline vector float +vec_ceil (vector float a1) +{ + return (vector float) __builtin_altivec_vrfip ((vector float) a1); +} + +/* vec_cmpb */ + +inline vector signed int +vec_cmpb (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vcmpbfp ((vector float) a1, (vector float) a2); +} + +/* vec_cmpeq */ + +inline vector signed char +vec_cmpeq (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vcmpequb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed char +vec_cmpeq (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vcmpequb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_cmpeq (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vcmpequh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_cmpeq (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vcmpequh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_cmpeq (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vcmpequw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_cmpeq (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vcmpequw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_cmpeq (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vcmpeqfp ((vector float) a1, (vector float) a2); +} + +/* vec_cmpge */ + +inline vector signed int +vec_cmpge (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vcmpgefp ((vector float) a1, (vector float) a2); +} + +/* vec_cmpgt */ + +inline vector signed char +vec_cmpgt (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vcmpgtub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed char +vec_cmpgt (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vcmpgtsb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_cmpgt (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vcmpgtuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_cmpgt (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vcmpgtsh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_cmpgt (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vcmpgtuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_cmpgt (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vcmpgtsw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_cmpgt (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vcmpgtfp ((vector float) a1, (vector float) a2); +} + +/* vec_cmple */ + +inline vector signed int +vec_cmple (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vcmpgefp ((vector float) a1, (vector float) a2); +} + +/* vec_cmplt */ + +inline vector signed char +vec_cmplt (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vcmpgtub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed char +vec_cmplt (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vcmpgtsb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_cmplt (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vcmpgtuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_cmplt (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vcmpgtsh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_cmplt (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vcmpgtuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_cmplt (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vcmpgtsw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_cmplt (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vcmpgtfp ((vector float) a1, (vector float) a2); +} + +/* vec_ctf */ + +inline vector float +vec_ctf (vector unsigned int a1, int a2) +{ + return (vector float) __builtin_altivec_vcfux ((vector signed int) a1, (const char) a2); +} + +inline vector float +vec_ctf (vector signed int a1, int a2) +{ + return (vector float) __builtin_altivec_vcfsx ((vector signed int) a1, (const char) a2); +} + +/* vec_cts */ + +inline vector signed int +vec_cts (vector float a1, int a2) +{ + return (vector signed int) __builtin_altivec_vctsxs ((vector float) a1, (const char) a2); +} + +/* vec_ctu */ + +inline vector unsigned int +vec_ctu (vector float a1, int a2) +{ + return (vector signed int) __builtin_altivec_vctuxs ((vector float) a1, (const char) a2); +} + +/* vec_dss */ + +inline void +vec_dss (int a1) +{ + __builtin_altivec_dss ((const char) a1); +} + +/* vec_dssall */ + +inline void +vec_dssall () +{ + __builtin_altivec_dssall (); +} + +/* vec_dst */ + +inline void +vec_dst (void *a1, int a2, int a3) +{ + __builtin_altivec_dst ((void *) a1, a2, (const char) a3); +} + +/* vec_dstst */ + +inline void +vec_dstst (void *a1, int a2, int a3) +{ + __builtin_altivec_dstst ((void *) a1, a2, (const char) a3); +} + +/* vec_dststt */ + +inline void +vec_dststt (void *a1, int a2, int a3) +{ + __builtin_altivec_dststt ((void *) a1, a2, (const char) a3); +} + +/* vec_dstt */ + +inline void +vec_dstt (void *a1, int a2, int a3) +{ + __builtin_altivec_dstt ((void *) a1, a2, (const char) a3); +} + +/* vec_expte */ + +inline vector float +vec_expte (vector float a1) +{ + return (vector float) __builtin_altivec_vexptefp ((vector float) a1); +} + +/* vec_floor */ + +inline vector float +vec_floor (vector float a1) +{ + return (vector float) __builtin_altivec_vrfim ((vector float) a1); +} + +/* vec_ld */ + +inline vector float +vec_ld (int a1, vector float *a2) +{ + return (vector float) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector signed int +vec_ld (int a1, vector signed int *a2) +{ + return (vector signed int) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector unsigned int +vec_ld (int a1, vector unsigned int *a2) +{ + return (vector unsigned int) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector signed short +vec_ld (int a1, vector signed short *a2) +{ + return (vector signed short) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector unsigned short +vec_ld (int a1, vector unsigned short *a2) +{ + return (vector unsigned short) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector signed char +vec_ld (int a1, vector signed char *a2) +{ + return (vector signed char) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector unsigned char +vec_ld (int a1, vector unsigned char *a2) +{ + return (vector unsigned char) __builtin_altivec_lvx (a1, (void *) a2); +} + +/* vec_lde */ + +inline vector signed char +vec_lde (int a1, vector signed char *a2) +{ + return (vector signed char) __builtin_altivec_lvebx (a1, (void *) a2); +} + +inline vector unsigned char +vec_lde (int a1, vector unsigned char *a2) +{ + return (vector unsigned char) __builtin_altivec_lvebx (a1, (void *) a2); +} + +inline vector signed short +vec_lde (int a1, vector signed short *a2) +{ + return (vector signed short) __builtin_altivec_lvehx (a1, (void *) a2); +} + +inline vector unsigned short +vec_lde (int a1, vector unsigned short *a2) +{ + return (vector unsigned short) __builtin_altivec_lvehx (a1, (void *) a2); +} + +inline vector float +vec_lde (int a1, vector float *a2) +{ + return (vector float) __builtin_altivec_lvewx (a1, (void *) a2); +} + +inline vector signed int +vec_lde (int a1, vector signed int *a2) +{ + return (vector signed int) __builtin_altivec_lvewx (a1, (void *) a2); +} + +inline vector unsigned int +vec_lde (int a1, vector unsigned int *a2) +{ + return (vector unsigned int) __builtin_altivec_lvewx (a1, (void *) a2); +} + +/* vec_ldl */ + +inline vector float +vec_ldl (int a1, vector float *a2) +{ + return (vector float) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector signed int +vec_ldl (int a1, vector signed int *a2) +{ + return (vector signed int) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector unsigned int +vec_ldl (int a1, vector unsigned int *a2) +{ + return (vector unsigned int) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector signed short +vec_ldl (int a1, vector signed short *a2) +{ + return (vector signed short) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector unsigned short +vec_ldl (int a1, vector unsigned short *a2) +{ + return (vector unsigned short) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector signed char +vec_ldl (int a1, vector signed char *a2) +{ + return (vector signed char) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector unsigned char +vec_ldl (int a1, vector unsigned char *a2) +{ + return (vector unsigned char) __builtin_altivec_lvxl (a1, (void *) a2); +} + +/* vec_loge */ + +inline vector float +vec_loge (vector float a1) +{ + return (vector float) __builtin_altivec_vlogefp ((vector float) a1); +} + +/* vec_lvsl */ + +inline vector unsigned char +vec_lvsl (int a1, vector unsigned char *a2) +{ + return (vector unsigned char) __builtin_altivec_lvsl (a1, (void *) a2); +} + +/* vec_lvsr */ + +inline vector unsigned char +vec_lvsr (int a1, vector unsigned char *a2) +{ + return (vector signed char) __builtin_altivec_lvsr (a1, (void *) a2); +} + +/* vec_madd */ + +inline vector float +vec_madd (vector float a1, vector float a2, vector float a3) +{ + return (vector float) __builtin_altivec_vmaddfp ((vector float) a1, (vector float) a2, (vector float) a3); +} + +/* vec_madds */ + +inline vector signed short +vec_madds (vector signed short a1, vector signed short a2, vector signed short a3) +{ + return (vector signed short) __builtin_altivec_vmhaddshs ((vector signed short) a1, (vector signed short) a2, (vector signed short) a3); +} + +/* vec_max */ + +inline vector unsigned char +vec_max (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vmaxub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_max (vector unsigned char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vmaxub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_max (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vmaxub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed char +vec_max (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vmaxsb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned short +vec_max (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vmaxuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_max (vector unsigned short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vmaxuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_max (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vmaxuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_max (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vmaxsh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned int +vec_max (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vmaxuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_max (vector unsigned int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vmaxuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_max (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vmaxuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_max (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vmaxsw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_max (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vmaxfp ((vector float) a1, (vector float) a2); +} + +/* vec_mergeh */ + +inline vector signed char +vec_mergeh (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vmrghb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_mergeh (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vmrghb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_mergeh (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vmrghh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_mergeh (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vmrghh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector float +vec_mergeh (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vmrghw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_mergeh (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vmrghw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_mergeh (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vmrghw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_mergel */ + +inline vector signed char +vec_mergel (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vmrglb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_mergel (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vmrglb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_mergel (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vmrglh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_mergel (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vmrglh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector float +vec_mergel (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vmrglw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_mergel (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vmrglw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_mergel (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vmrglw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_mfvscr */ + +inline vector unsigned short +vec_mfvscr () +{ + return (vector signed short) __builtin_altivec_mfvscr (); +} + +/* vec_min */ + +inline vector unsigned char +vec_min (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vminub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_min (vector unsigned char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vminub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_min (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vminub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed char +vec_min (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vminsb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned short +vec_min (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vminuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_min (vector unsigned short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vminuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_min (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vminuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_min (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vminsh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned int +vec_min (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vminuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_min (vector unsigned int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vminuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_min (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vminuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_min (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vminsw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_min (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vminfp ((vector float) a1, (vector float) a2); +} + +/* vec_mladd */ + +inline vector signed short +vec_mladd (vector signed short a1, vector signed short a2, vector signed short a3) +{ + return (vector signed short) __builtin_altivec_vmladduhm ((vector signed short) a1, (vector signed short) a2, (vector signed short) a3); +} + +inline vector signed short +vec_mladd (vector signed short a1, vector unsigned short a2, vector unsigned short a3) +{ + return (vector signed short) __builtin_altivec_vmladduhm ((vector signed short) a1, (vector signed short) a2, (vector signed short) a3); +} + +inline vector signed short +vec_mladd (vector unsigned short a1, vector signed short a2, vector signed short a3) +{ + return (vector signed short) __builtin_altivec_vmladduhm ((vector signed short) a1, (vector signed short) a2, (vector signed short) a3); +} + +inline vector unsigned short +vec_mladd (vector unsigned short a1, vector unsigned short a2, vector unsigned short a3) +{ + return (vector signed short) __builtin_altivec_vmladduhm ((vector signed short) a1, (vector signed short) a2, (vector signed short) a3); +} + +/* vec_mradds */ + +inline vector signed short +vec_mradds (vector signed short a1, vector signed short a2, vector signed short a3) +{ + return (vector signed short) __builtin_altivec_vmhraddshs ((vector signed short) a1, (vector signed short) a2, (vector signed short) a3); +} + +/* vec_msum */ + +inline vector unsigned int +vec_msum (vector unsigned char a1, vector unsigned char a2, vector unsigned int a3) +{ + return (vector signed int) __builtin_altivec_vmsumubm ((vector signed char) a1, (vector signed char) a2, (vector signed int) a3); +} + +inline vector signed int +vec_msum (vector signed char a1, vector unsigned char a2, vector signed int a3) +{ + return (vector signed int) __builtin_altivec_vmsummbm ((vector signed char) a1, (vector signed char) a2, (vector signed int) a3); +} + +inline vector unsigned int +vec_msum (vector unsigned short a1, vector unsigned short a2, vector unsigned int a3) +{ + return (vector signed int) __builtin_altivec_vmsumuhm ((vector signed short) a1, (vector signed short) a2, (vector signed int) a3); +} + +inline vector signed int +vec_msum (vector signed short a1, vector signed short a2, vector signed int a3) +{ + return (vector signed int) __builtin_altivec_vmsumshm ((vector signed short) a1, (vector signed short) a2, (vector signed int) a3); +} + +/* vec_msums */ + +inline vector unsigned int +vec_msums (vector unsigned short a1, vector unsigned short a2, vector unsigned int a3) +{ + return (vector signed int) __builtin_altivec_vmsumuhs ((vector signed short) a1, (vector signed short) a2, (vector signed int) a3); +} + +inline vector signed int +vec_msums (vector signed short a1, vector signed short a2, vector signed int a3) +{ + return (vector signed int) __builtin_altivec_vmsumshs ((vector signed short) a1, (vector signed short) a2, (vector signed int) a3); +} + +/* vec_mtvscr */ + +inline void +vec_mtvscr (vector signed int a1) +{ + __builtin_altivec_mtvscr ((vector signed int) a1); +} + +inline void +vec_mtvscr (vector unsigned int a1) +{ + __builtin_altivec_mtvscr ((vector signed int) a1); +} + +inline void +vec_mtvscr (vector signed short a1) +{ + __builtin_altivec_mtvscr ((vector signed int) a1); +} + +inline void +vec_mtvscr (vector unsigned short a1) +{ + __builtin_altivec_mtvscr ((vector signed int) a1); +} + +inline void +vec_mtvscr (vector signed char a1) +{ + __builtin_altivec_mtvscr ((vector signed int) a1); +} + +inline void +vec_mtvscr (vector unsigned char a1) +{ + __builtin_altivec_mtvscr ((vector signed int) a1); +} + +/* vec_mule */ + +inline vector unsigned short +vec_mule (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed short) __builtin_altivec_vmuleub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_mule (vector signed char a1, vector signed char a2) +{ + return (vector signed short) __builtin_altivec_vmulesb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned int +vec_mule (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vmuleuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_mule (vector signed short a1, vector signed short a2) +{ + return (vector signed int) __builtin_altivec_vmulesh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_mulo */ + +inline vector unsigned short +vec_mulo (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed short) __builtin_altivec_vmuloub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_mulo (vector signed char a1, vector signed char a2) +{ + return (vector signed short) __builtin_altivec_vmulosb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned int +vec_mulo (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vmulouh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_mulo (vector signed short a1, vector signed short a2) +{ + return (vector signed int) __builtin_altivec_vmulosh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_nmsub */ + +inline vector float +vec_nmsub (vector float a1, vector float a2, vector float a3) +{ + return (vector float) __builtin_altivec_vnmsubfp ((vector float) a1, (vector float) a2, (vector float) a3); +} + +/* vec_nor */ + +inline vector float +vec_nor (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_nor (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_nor (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_nor (vector signed short a1, vector signed short a2) +{ + return (vector signed int) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_nor (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_nor (vector signed char a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_nor (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_or */ + +inline vector float +vec_or (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_or (vector float a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_or (vector signed int a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_or (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_or (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_or (vector unsigned int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_or (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_or (vector signed short a1, vector signed short a2) +{ + return (vector signed int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_or (vector signed short a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_or (vector unsigned short a1, vector signed short a2) +{ + return (vector signed int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_or (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_or (vector signed char a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_or (vector signed char a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_or (vector unsigned char a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_or (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_pack */ + +inline vector signed char +vec_pack (vector signed short a1, vector signed short a2) +{ + return (vector signed char) __builtin_altivec_vpkuhum ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned char +vec_pack (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed char) __builtin_altivec_vpkuhum ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_pack (vector signed int a1, vector signed int a2) +{ + return (vector signed short) __builtin_altivec_vpkuwum ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_pack (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed short) __builtin_altivec_vpkuwum ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_packpx */ + +inline vector signed short +vec_packpx (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed short) __builtin_altivec_vpkpx ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_packs */ + +inline vector unsigned char +vec_packs (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed char) __builtin_altivec_vpkuhus ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed char +vec_packs (vector signed short a1, vector signed short a2) +{ + return (vector signed char) __builtin_altivec_vpkshss ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_packs (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed short) __builtin_altivec_vpkuwus ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_packs (vector signed int a1, vector signed int a2) +{ + return (vector signed short) __builtin_altivec_vpkswss ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_packsu */ + +inline vector unsigned char +vec_packsu (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed char) __builtin_altivec_vpkuhus ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned char +vec_packsu (vector signed short a1, vector signed short a2) +{ + return (vector signed char) __builtin_altivec_vpkshus ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_packsu (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed short) __builtin_altivec_vpkuwus ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_packsu (vector signed int a1, vector signed int a2) +{ + return (vector signed short) __builtin_altivec_vpkswus ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_perm */ + +inline vector float +vec_perm (vector float a1, vector float a2, vector unsigned char a3) +{ + return (vector signed int) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3); +} + +inline vector signed int +vec_perm (vector signed int a1, vector signed int a2, vector unsigned char a3) +{ + return (vector signed int) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3); +} + +inline vector unsigned int +vec_perm (vector unsigned int a1, vector unsigned int a2, vector unsigned char a3) +{ + return (vector signed int) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3); +} + +inline vector signed short +vec_perm (vector signed short a1, vector signed short a2, vector unsigned char a3) +{ + return (vector signed int) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3); +} + +inline vector unsigned short +vec_perm (vector unsigned short a1, vector unsigned short a2, vector unsigned char a3) +{ + return (vector signed int) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3); +} + +inline vector signed char +vec_perm (vector signed char a1, vector signed char a2, vector unsigned char a3) +{ + return (vector signed int) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3); +} + +inline vector unsigned char +vec_perm (vector unsigned char a1, vector unsigned char a2, vector unsigned char a3) +{ + return (vector signed int) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3); +} + +/* vec_re */ + +inline vector float +vec_re (vector float a1) +{ + return (vector float) __builtin_altivec_vrefp ((vector float) a1); +} + +/* vec_rl */ + +inline vector signed char +vec_rl (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vrlb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_rl (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vrlb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_rl (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vrlh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_rl (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vrlh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_rl (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vrlw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_rl (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vrlw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_round */ + +inline vector float +vec_round (vector float a1) +{ + return (vector float) __builtin_altivec_vrfin ((vector float) a1); +} + +/* vec_rsqrte */ + +inline vector float +vec_rsqrte (vector float a1) +{ + return (vector float) __builtin_altivec_vrsqrtefp ((vector float) a1); +} + +/* vec_sel */ + +inline vector float +vec_sel (vector float a1, vector float a2, vector signed int a3) +{ + return (vector signed int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector float +vec_sel (vector float a1, vector float a2, vector unsigned int a3) +{ + return (vector signed int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector signed int +vec_sel (vector signed int a1, vector signed int a2, vector signed int a3) +{ + return (vector signed int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector signed int +vec_sel (vector signed int a1, vector signed int a2, vector unsigned int a3) +{ + return (vector signed int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector unsigned int +vec_sel (vector unsigned int a1, vector unsigned int a2, vector signed int a3) +{ + return (vector signed int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector unsigned int +vec_sel (vector unsigned int a1, vector unsigned int a2, vector unsigned int a3) +{ + return (vector signed int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector signed short +vec_sel (vector signed short a1, vector signed short a2, vector signed short a3) +{ + return (vector signed int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector signed short +vec_sel (vector signed short a1, vector signed short a2, vector unsigned short a3) +{ + return (vector signed int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector unsigned short +vec_sel (vector unsigned short a1, vector unsigned short a2, vector signed short a3) +{ + return (vector signed int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector unsigned short +vec_sel (vector unsigned short a1, vector unsigned short a2, vector unsigned short a3) +{ + return (vector signed int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector signed char +vec_sel (vector signed char a1, vector signed char a2, vector signed char a3) +{ + return (vector signed int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector signed char +vec_sel (vector signed char a1, vector signed char a2, vector unsigned char a3) +{ + return (vector signed int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector unsigned char +vec_sel (vector unsigned char a1, vector unsigned char a2, vector signed char a3) +{ + return (vector signed int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector unsigned char +vec_sel (vector unsigned char a1, vector unsigned char a2, vector unsigned char a3) +{ + return (vector signed int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +/* vec_sl */ + +inline vector signed char +vec_sl (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vslb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_sl (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vslb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_sl (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vslh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_sl (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vslh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_sl (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vslw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sl (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vslw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_sld */ + +inline vector float +vec_sld (vector float a1, vector float a2, int a3) +{ + return (vector signed int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3); +} + +inline vector signed int +vec_sld (vector signed int a1, vector signed int a2, int a3) +{ + return (vector signed int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3); +} + +inline vector unsigned int +vec_sld (vector unsigned int a1, vector unsigned int a2, int a3) +{ + return (vector signed int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3); +} + +inline vector signed short +vec_sld (vector signed short a1, vector signed short a2, int a3) +{ + return (vector signed int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3); +} + +inline vector unsigned short +vec_sld (vector unsigned short a1, vector unsigned short a2, int a3) +{ + return (vector signed int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3); +} + +inline vector signed char +vec_sld (vector signed char a1, vector signed char a2, int a3) +{ + return (vector signed int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3); +} + +inline vector unsigned char +vec_sld (vector unsigned char a1, vector unsigned char a2, int a3) +{ + return (vector signed int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3); +} + +/* vec_sll */ + +inline vector signed int +vec_sll (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_sll (vector signed int a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_sll (vector signed int a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sll (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sll (vector unsigned int a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sll (vector unsigned int a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_sll (vector signed short a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_sll (vector signed short a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_sll (vector signed short a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_sll (vector unsigned short a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_sll (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_sll (vector unsigned short a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_sll (vector signed char a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_sll (vector signed char a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_sll (vector signed char a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_sll (vector unsigned char a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_sll (vector unsigned char a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_sll (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_slo */ + +inline vector float +vec_slo (vector float a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_slo (vector float a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_slo (vector signed int a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_slo (vector signed int a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_slo (vector unsigned int a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_slo (vector unsigned int a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_slo (vector signed short a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_slo (vector signed short a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_slo (vector unsigned short a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_slo (vector unsigned short a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_slo (vector signed char a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_slo (vector signed char a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_slo (vector unsigned char a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_slo (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_splat */ + +inline vector signed char +vec_splat (vector signed char a1, int a2) +{ + return (vector signed char) __builtin_altivec_vspltb ((vector signed char) a1, (const char) a2); +} + +inline vector unsigned char +vec_splat (vector unsigned char a1, int a2) +{ + return (vector signed char) __builtin_altivec_vspltb ((vector signed char) a1, (const char) a2); +} + +inline vector signed short +vec_splat (vector signed short a1, int a2) +{ + return (vector signed short) __builtin_altivec_vsplth ((vector signed short) a1, (const char) a2); +} + +inline vector unsigned short +vec_splat (vector unsigned short a1, int a2) +{ + return (vector signed short) __builtin_altivec_vsplth ((vector signed short) a1, (const char) a2); +} + +inline vector float +vec_splat (vector float a1, int a2) +{ + return (vector signed int) __builtin_altivec_vspltw ((vector signed int) a1, (const char) a2); +} + +inline vector signed int +vec_splat (vector signed int a1, int a2) +{ + return (vector signed int) __builtin_altivec_vspltw ((vector signed int) a1, (const char) a2); +} + +inline vector unsigned int +vec_splat (vector unsigned int a1, int a2) +{ + return (vector signed int) __builtin_altivec_vspltw ((vector signed int) a1, (const char) a2); +} + +/* vec_splat_s8 */ + +inline vector signed char +vec_splat_s8 (int a1) +{ + return (vector signed char) __builtin_altivec_vspltisb ((const char) a1); +} + +/* vec_splat_s16 */ + +inline vector signed short +vec_splat_s16 (int a1) +{ + return (vector signed short) __builtin_altivec_vspltish ((const char) a1); +} + +/* vec_splat_s32 */ + +inline vector signed int +vec_splat_s32 (int a1) +{ + return (vector signed int) __builtin_altivec_vspltisw ((const char) a1); +} + +/* vec_splat_u8 */ + +inline vector unsigned char +vec_splat_u8 (int a1) +{ + return (vector signed char) __builtin_altivec_vspltisb ((const char) a1); +} + +/* vec_splat_u16 */ + +inline vector unsigned short +vec_splat_u16 (int a1) +{ + return (vector signed short) __builtin_altivec_vspltish ((const char) a1); +} + +/* vec_splat_u32 */ + +inline vector unsigned int +vec_splat_u32 (int a1) +{ + return (vector signed int) __builtin_altivec_vspltisw ((const char) a1); +} + +/* vec_sr */ + +inline vector signed char +vec_sr (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vsrb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_sr (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vsrb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_sr (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vsrh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_sr (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vsrh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_sr (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsrw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sr (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsrw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_sra */ + +inline vector signed char +vec_sra (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vsrab ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_sra (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vsrab ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_sra (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vsrah ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_sra (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vsrah ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_sra (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsraw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sra (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsraw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_srl */ + +inline vector signed int +vec_srl (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_srl (vector signed int a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_srl (vector signed int a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_srl (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_srl (vector unsigned int a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_srl (vector unsigned int a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_srl (vector signed short a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_srl (vector signed short a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_srl (vector signed short a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_srl (vector unsigned short a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_srl (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_srl (vector unsigned short a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_srl (vector signed char a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_srl (vector signed char a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_srl (vector signed char a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_srl (vector unsigned char a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_srl (vector unsigned char a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_srl (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_sro */ + +inline vector float +vec_sro (vector float a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_sro (vector float a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_sro (vector signed int a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_sro (vector signed int a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sro (vector unsigned int a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sro (vector unsigned int a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_sro (vector signed short a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_sro (vector signed short a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_sro (vector unsigned short a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_sro (vector unsigned short a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_sro (vector signed char a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_sro (vector signed char a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_sro (vector unsigned char a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_sro (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_st */ + +inline void +vec_st (vector float a1, int a2, void *a3) +{ + __builtin_altivec_stvx ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_st (vector signed int a1, int a2, void *a3) +{ + __builtin_altivec_stvx ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_st (vector unsigned int a1, int a2, void *a3) +{ + __builtin_altivec_stvx ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_st (vector signed short a1, int a2, void *a3) +{ + __builtin_altivec_stvx ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_st (vector unsigned short a1, int a2, void *a3) +{ + __builtin_altivec_stvx ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_st (vector signed char a1, int a2, void *a3) +{ + __builtin_altivec_stvx ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_st (vector unsigned char a1, int a2, void *a3) +{ + __builtin_altivec_stvx ((vector signed int) a1, a2, (void *) a3); +} + +/* vec_ste */ + +inline void +vec_ste (vector signed char a1, int a2, void *a3) +{ + __builtin_altivec_stvebx ((vector signed char) a1, a2, (void *) a3); +} + +inline void +vec_ste (vector unsigned char a1, int a2, void *a3) +{ + __builtin_altivec_stvebx ((vector signed char) a1, a2, (void *) a3); +} + +inline void +vec_ste (vector signed short a1, int a2, void *a3) +{ + __builtin_altivec_stvehx ((vector signed short) a1, a2, (void *) a3); +} + +inline void +vec_ste (vector unsigned short a1, int a2, void *a3) +{ + __builtin_altivec_stvehx ((vector signed short) a1, a2, (void *) a3); +} + +inline void +vec_ste (vector float a1, int a2, void *a3) +{ + __builtin_altivec_stvewx ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_ste (vector signed int a1, int a2, void *a3) +{ + __builtin_altivec_stvewx ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_ste (vector unsigned int a1, int a2, void *a3) +{ + __builtin_altivec_stvewx ((vector signed int) a1, a2, (void *) a3); +} + +/* vec_stl */ + +inline void +vec_stl (vector float a1, int a2, void *a3) +{ + __builtin_altivec_stvxl ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_stl (vector signed int a1, int a2, void *a3) +{ + __builtin_altivec_stvxl ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_stl (vector unsigned int a1, int a2, void *a3) +{ + __builtin_altivec_stvxl ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_stl (vector signed short a1, int a2, void *a3) +{ + __builtin_altivec_stvxl ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_stl (vector unsigned short a1, int a2, void *a3) +{ + __builtin_altivec_stvxl ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_stl (vector signed char a1, int a2, void *a3) +{ + __builtin_altivec_stvxl ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_stl (vector unsigned char a1, int a2, void *a3) +{ + __builtin_altivec_stvxl ((vector signed int) a1, a2, (void *) a3); +} + +/* vec_sub */ + +inline vector signed char +vec_sub (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vsububm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_sub (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vsububm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_sub (vector unsigned char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vsububm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_sub (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vsububm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_sub (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vsubuhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_sub (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vsubuhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_sub (vector unsigned short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vsubuhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_sub (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vsubuhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_sub (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vsubuwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sub (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsubuwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sub (vector unsigned int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vsubuwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sub (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsubuwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_sub (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vsubfp ((vector float) a1, (vector float) a2); +} + +/* vec_subc */ + +inline vector unsigned int +vec_subc (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsubcuw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_subs */ + +inline vector unsigned char +vec_subs (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vsububs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_subs (vector unsigned char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vsububs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_subs (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vsububs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed char +vec_subs (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vsubsbs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned short +vec_subs (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vsubuhs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_subs (vector unsigned short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vsubuhs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_subs (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vsubuhs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_subs (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vsubshs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned int +vec_subs (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsubuws ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_subs (vector unsigned int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vsubuws ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_subs (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsubuws ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_subs (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vsubsws ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_sum4s */ + +inline vector unsigned int +vec_sum4s (vector unsigned char a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsum4ubs ((vector signed char) a1, (vector signed int) a2); +} + +inline vector signed int +vec_sum4s (vector signed char a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vsum4sbs ((vector signed char) a1, (vector signed int) a2); +} + +inline vector signed int +vec_sum4s (vector signed short a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vsum4shs ((vector signed short) a1, (vector signed int) a2); +} + +/* vec_sum2s */ + +inline vector signed int +vec_sum2s (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vsum2sws ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_sums */ + +inline vector signed int +vec_sums (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vsumsws ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_trunc */ + +inline vector float +vec_trunc (vector float a1) +{ + return (vector float) __builtin_altivec_vrfiz ((vector float) a1); +} + +/* vec_unpackh */ + +inline vector signed short +vec_unpackh (vector signed char a1) +{ + return (vector signed short) __builtin_altivec_vupkhsb ((vector signed char) a1); +} + +inline vector signed int +vec_unpackh (vector signed short a1) +{ + return (vector signed int) __builtin_altivec_vupkhsh ((vector signed short) a1); +} + +/* vec_unpackl */ + +inline vector signed short +vec_unpackl (vector signed char a1) +{ + return (vector signed short) __builtin_altivec_vupklsb ((vector signed char) a1); +} + +inline vector signed int +vec_unpackl (vector signed short a1) +{ + return (vector signed int) __builtin_altivec_vupklsh ((vector signed short) a1); +} + +/* vec_xor */ + +inline vector float +vec_xor (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_xor (vector float a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_xor (vector signed int a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_xor (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_xor (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_xor (vector unsigned int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_xor (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_xor (vector signed short a1, vector signed short a2) +{ + return (vector signed int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_xor (vector signed short a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_xor (vector unsigned short a1, vector signed short a2) +{ + return (vector signed int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_xor (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_xor (vector signed char a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_xor (vector signed char a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_xor (vector unsigned char a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_xor (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_all_eq */ + +inline int +vec_all_eq (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_LT, a1, a2); +} + +inline int +vec_all_eq (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_LT, a1, a2); +} + +inline int +vec_all_eq (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_eq (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_eq (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_eq (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_eq (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_eq (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_eq (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_eq (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_eq (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_eq (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_eq (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpeqfp_p (__CR6_LT, a1, a2); +} + +/* vec_all_ge */ + +inline int +vec_all_ge (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_all_ge (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_all_ge (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_all_ge (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtsb_p (__CR6_EQ, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_all_ge (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_all_ge (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_all_ge (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_all_ge (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtsh_p (__CR6_EQ, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_all_ge (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_all_ge (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_all_ge (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_all_ge (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtsw_p (__CR6_EQ, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_all_ge (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgefp_p (__CR6_EQ, a1, a2); +} + +/* vec_all_gt */ + +inline int +vec_all_gt (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_gt (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_gt (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_gt (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtsb_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_gt (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_gt (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_gt (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_gt (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtsh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_gt (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_gt (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_gt (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_gt (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtsw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_gt (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgtfp_p (__CR6_LT, a1, a2); +} + +/* vec_all_in */ + +inline int +vec_all_in (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpbfp_p (__CR6_EQ, a1, a2); +} + +/* vec_all_le */ + +inline int +vec_all_le (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_le (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_le (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_le (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtsb_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_le (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_le (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_le (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_le (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtsh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_le (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_le (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_le (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_le (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtsw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_le (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgefp_p (__CR6_LT, a2, a1); +} + +/* vec_all_lt */ + +inline int +vec_all_lt (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_all_lt (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_all_lt (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_all_lt (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtsb_p (__CR6_LT, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_all_lt (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_all_lt (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_all_lt (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_all_lt (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtsh_p (__CR6_LT, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_all_lt (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_all_lt (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_all_lt (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_all_lt (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtsw_p (__CR6_LT, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_all_lt (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgtfp_p (__CR6_LT, a2, a1); +} + +/* vec_all_nan */ + +inline int +vec_all_nan (vector float a1) +{ + return __builtin_altivec_vcmpeqfp_p (__CR6_EQ, a1, a1); +} + +/* vec_all_ne */ + +inline int +vec_all_ne (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_ne (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_ne (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_ne (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_ne (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_ne (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_ne (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_ne (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_ne (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_ne (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_ne (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_ne (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_ne (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpeqfp_p (__CR6_EQ, a1, a2); +} + +/* vec_all_nge */ + +inline int +vec_all_nge (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgefp_p (__CR6_EQ, a1, a2); +} + +/* vec_all_ngt */ + +inline int +vec_all_ngt (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgtfp_p (__CR6_EQ, a1, a2); +} + +/* vec_all_nle */ + +inline int +vec_all_nle (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgefp_p (__CR6_EQ, a2, a1); +} + +/* vec_all_nlt */ + +inline int +vec_all_nlt (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgtfp_p (__CR6_EQ, a2, a1); +} + +/* vec_all_numeric */ + +inline int +vec_all_numeric (vector float a1) +{ + return __builtin_altivec_vcmpeqfp_p (__CR6_EQ, a1, a1); +} + +/* vec_any_eq */ + +inline int +vec_any_eq (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_eq (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_eq (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_eq (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_eq (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_eq (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_eq (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_eq (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_eq (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_eq (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_eq (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_eq (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_eq (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpeqfp_p (__CR6_EQ_REV, a1, a2); +} + +/* vec_any_ge */ + +inline int +vec_any_ge (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_any_ge (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_any_ge (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_any_ge (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtsb_p (__CR6_LT_REV, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_any_ge (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_any_ge (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_any_ge (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_any_ge (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtsh_p (__CR6_LT_REV, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_any_ge (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_any_ge (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_any_ge (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_any_ge (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtsw_p (__CR6_LT_REV, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_any_ge (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgefp_p (__CR6_EQ_REV, a1, a2); +} + +/* vec_any_gt */ + +inline int +vec_any_gt (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_gt (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_gt (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_gt (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtsb_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_gt (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_gt (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_gt (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_gt (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtsh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_gt (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_gt (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_gt (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_gt (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtsw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_gt (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgtfp_p (__CR6_EQ_REV, a1, a2); +} + +/* vec_any_le */ + +inline int +vec_any_le (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_le (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_le (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_le (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtsb_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_le (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_le (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_le (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_le (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtsh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_le (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_le (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_le (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_le (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtsw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_le (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgefp_p (__CR6_LT_REV, a2, a1); +} + +/* vec_any_lt */ + +inline int +vec_any_lt (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_any_lt (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_any_lt (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_any_lt (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtsb_p (__CR6_EQ_REV, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_any_lt (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_any_lt (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_any_lt (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_any_lt (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtsh_p (__CR6_EQ_REV, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_any_lt (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_any_lt (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_any_lt (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_any_lt (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtsw_p (__CR6_EQ_REV, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_any_lt (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgtfp_p (__CR6_EQ_REV, a2, a1); +} + +/* vec_any_nan */ + +inline int +vec_any_nan (vector float a1) +{ + return __builtin_altivec_vcmpeqfp_p (__CR6_LT_REV, a1, a1); +} + +/* vec_any_ne */ + +inline int +vec_any_ne (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_ne (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_ne (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_ne (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_ne (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_ne (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_ne (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_ne (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_ne (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_ne (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_ne (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_ne (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_ne (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpeqfp_p (__CR6_LT_REV, a1, a2); +} + +/* vec_any_nge */ + +inline int +vec_any_nge (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgefp_p (__CR6_LT_REV, a1, a2); +} + +/* vec_any_ngt */ + +inline int +vec_any_ngt (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgtfp_p (__CR6_LT_REV, a1, a2); +} + +/* vec_any_nle */ + +inline int +vec_any_nle (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgefp_p (__CR6_LT_REV, a2, a1); +} + +/* vec_any_nlt */ + +inline int +vec_any_nlt (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgtfp_p (__CR6_LT_REV, a2, a1); +} + +/* vec_any_numeric */ + +inline int +vec_any_numeric (vector float a1) +{ + return __builtin_altivec_vcmpeqfp_p (__CR6_EQ_REV, a1, a1); +} + +/* vec_any_out */ + +inline int +vec_any_out (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpbfp_p (__CR6_EQ_REV, a1, a2); +} #else /* not C++ */ -/* Hairy macros that implement the AltiVec high-level programming - interface for C. */ +/* "... and so I think no man in a century will suffer as greatly as + you will." */ + +#define vec_step(t) \ + __ch (__builtin_types_compatible_p (t, vector signed int), 4, \ + __ch (__builtin_types_compatible_p (t, vector unsigned int), 4, \ + __ch (__builtin_types_compatible_p (t, vector signed short), 8, \ + __ch (__builtin_types_compatible_p (t, vector unsigned short), 8, \ + __ch (__builtin_types_compatible_p (t, vector signed char), 16, \ + __ch (__builtin_types_compatible_p (t, vector unsigned char), 16, \ + __ch (__builtin_types_compatible_p (t, vector float), 4, \ + __altivec_link_error_invalid_argument ()))))))) #define vec_add(a1, a2) \ __ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ @@ -99,7 +3941,7 @@ __ch (__bin_args_eq (vector float, a1, vector float, a2), \ (vector float) __builtin_altivec_vaddfp ((vector float) a1, (vector float) a2), \ __altivec_link_error_invalid_argument ()))))))))))))) -#define vec_addc(a1, a2) __builtin_altivec_vaddcuw (a1, a2) +#define vec_addc(a1, a2) (vector unsigned int) __builtin_altivec_vaddcuw ((vector signed int) a1, (vector signed int) a2) #define vec_adds(a1, a2) \ __ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ @@ -269,9 +4111,9 @@ __ch (__bin_args_eq (vector float, a1, vector float, a2), \ __altivec_link_error_invalid_argument ()))))))) #define vec_ctf(a1, a2) \ -__ch (__bin_args_eq (vector unsigned int, a1, const char, a2), \ +__ch (__bin_args_eq (vector unsigned int, a1, int, a2), \ (vector float) __builtin_altivec_vcfux ((vector signed int) a1, (const char) a2), \ -__ch (__bin_args_eq (vector signed int, a1, const char, a2), \ +__ch (__bin_args_eq (vector signed int, a1, int, a2), \ (vector float) __builtin_altivec_vcfsx ((vector signed int) a1, (const char) a2), \ __altivec_link_error_invalid_argument ())) @@ -374,9 +4216,9 @@ __altivec_link_error_invalid_argument ())))))))))))))) #define vec_loge(a1) __builtin_altivec_vlogefp (a1) -#define vec_lvsl(a1, a2) __builtin_altivec_lvsl (a1, a2) +#define vec_lvsl(a1, a2) (vector unsigned char) __builtin_altivec_lvsl (a1, a2) -#define vec_lvsr(a1, a2) __builtin_altivec_lvsr (a1, a2) +#define vec_lvsr(a1, a2) (vector unsigned char) __builtin_altivec_lvsr (a1, a2) #define vec_madd(a1, a2, a3) __builtin_altivec_vmaddfp (a1, a2, a3) @@ -445,7 +4287,7 @@ __ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ (vector unsigned int) __builtin_altivec_vmrglw ((vector signed int) a1, (vector signed int) a2), \ __altivec_link_error_invalid_argument ()))))))) -#define vec_mfvscr() __builtin_altivec_mfvscr () +#define vec_mfvscr (vector unsigned short) __builtin_altivec_mfvscr () #define vec_min(a1, a2) \ __ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ @@ -631,7 +4473,7 @@ __ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ (vector unsigned short) __builtin_altivec_vpkswus ((vector signed int) a1, (vector signed int) a2), \ __altivec_link_error_invalid_argument ())))) -#define vec_perm(a1, a2, a3, a4) \ +#define vec_perm(a1, a2, a3) \ __ch (__tern_args_eq (vector float, a1, vector float, a2, vector unsigned char, a3), \ (vector float) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3), \ __ch (__tern_args_eq (vector signed int, a1, vector signed int, a2, vector unsigned char, a3), \ @@ -716,19 +4558,19 @@ __ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ __altivec_link_error_invalid_argument ())))))) #define vec_sld(a1, a2, a3) \ -__ch (__tern_args_eq (vector float, a1, vector float, a2, const char, a3), \ +__ch (__tern_args_eq (vector float, a1, vector float, a2, int, a3), \ (vector float) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3), \ -__ch (__tern_args_eq (vector signed int, a1, vector signed int, a2, const char, a3), \ +__ch (__tern_args_eq (vector signed int, a1, vector signed int, a2, int, a3), \ (vector signed int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3), \ -__ch (__tern_args_eq (vector unsigned int, a1, vector unsigned int, a2, const char, a3), \ +__ch (__tern_args_eq (vector unsigned int, a1, vector unsigned int, a2, int, a3), \ (vector unsigned int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3), \ -__ch (__tern_args_eq (vector signed short, a1, vector signed short, a2, const char, a3), \ +__ch (__tern_args_eq (vector signed short, a1, vector signed short, a2, int, a3), \ (vector signed short) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3), \ -__ch (__tern_args_eq (vector unsigned short, a1, vector unsigned short, a2, const char, a3), \ +__ch (__tern_args_eq (vector unsigned short, a1, vector unsigned short, a2, int, a3), \ (vector unsigned short) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3), \ -__ch (__tern_args_eq (vector signed char, a1, vector signed char, a2, const char, a3), \ +__ch (__tern_args_eq (vector signed char, a1, vector signed char, a2, int, a3), \ (vector signed char) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3), \ -__ch (__tern_args_eq (vector unsigned char, a1, vector unsigned char, a2, const char, a3), \ +__ch (__tern_args_eq (vector unsigned char, a1, vector unsigned char, a2, int, a3), \ (vector unsigned char) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3), \ __altivec_link_error_invalid_argument ()))))))) @@ -803,19 +4645,19 @@ __ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ __altivec_link_error_invalid_argument ())))))))))))))) #define vec_splat(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, const char, a2), \ +__ch (__bin_args_eq (vector signed char, a1, int, a2), \ (vector signed char) __builtin_altivec_vspltb ((vector signed char) a1, (const char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, const char, a2), \ +__ch (__bin_args_eq (vector unsigned char, a1, int, a2), \ (vector unsigned char) __builtin_altivec_vspltb ((vector signed char) a1, (const char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, const char, a2), \ +__ch (__bin_args_eq (vector signed short, a1, int, a2), \ (vector signed short) __builtin_altivec_vsplth ((vector signed short) a1, (const char) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, const char, a2), \ +__ch (__bin_args_eq (vector unsigned short, a1, int, a2), \ (vector unsigned short) __builtin_altivec_vsplth ((vector signed short) a1, (const char) a2), \ -__ch (__bin_args_eq (vector float, a1, const char, a2), \ +__ch (__bin_args_eq (vector float, a1, int, a2), \ (vector float) __builtin_altivec_vspltw ((vector signed int) a1, (const char) a2), \ -__ch (__bin_args_eq (vector signed int, a1, const char, a2), \ +__ch (__bin_args_eq (vector signed int, a1, int, a2), \ (vector signed int) __builtin_altivec_vspltw ((vector signed int) a1, (const char) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, const char, a2), \ +__ch (__bin_args_eq (vector unsigned int, a1, int, a2), \ (vector unsigned int) __builtin_altivec_vspltw ((vector signed int) a1, (const char) a2), \ __altivec_link_error_invalid_argument ()))))))) @@ -825,11 +4667,11 @@ __ch (__bin_args_eq (vector unsigned int, a1, const char, a2), \ #define vec_splat_s32(a1) __builtin_altivec_vspltisw (a1) -#define vec_splat_u8(a1) __builtin_altivec_vspltisb (a1) +#define vec_splat_u8(a1) (vector unsigned char) __builtin_altivec_vspltisb (a1) -#define vec_splat_u16(a1) __builtin_altivec_vspltish (a1) +#define vec_splat_u16(a1) (vector unsigned short) __builtin_altivec_vspltish (a1) -#define vec_splat_u32(a1) __builtin_altivec_vspltisw (a1) +#define vec_splat_u32(a1) (vector unsigned int) __builtin_altivec_vspltisw (a1) #define vec_sr(a1, a2) \ __ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ @@ -983,7 +4825,7 @@ __ch (__bin_args_eq (vector float, a1, vector float, a2), \ (vector float) __builtin_altivec_vsubfp ((vector float) a1, (vector float) a2), \ __altivec_link_error_invalid_argument ()))))))))))))) -#define vec_subc(a1, a2) __builtin_altivec_vsubcuw (a1, a2) +#define vec_subc(a1, a2) (vector unsigned int) __builtin_altivec_vsubcuw ((vector unsigned int) a1, (vector unsigned int) a2) #define vec_subs(a1, a2) \ __ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ @@ -1078,381 +4920,383 @@ __ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ (vector unsigned char) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2), \ __altivec_link_error_invalid_argument ()))))))))))))))) +/* Predicates. */ + #define vec_all_eq(a1, a2) \ __ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpequb_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpequb_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpequb_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpequb_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpequh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpequh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpequh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpequh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpequw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpequw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpequw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpequw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpeqfp_p ((vector float) a1, (vector float) a2), \ + __builtin_altivec_vcmpeqfp_p (__CR6_LT, (vector float) a1, (vector float) a2), \ __altivec_link_error_invalid_argument ()))))))))))))) #define vec_all_ge(a1, a2) \ __ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) a2, (vector signed char) a1), \ __ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) a2, (vector signed char) a1), \ __ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) a2, (vector signed char) a1), \ __ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtsb_p (__CR6_EQ, (vector signed char) a2, (vector signed char) a1), \ __ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) a2, (vector signed short) a1), \ __ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) a2, (vector signed short) a1), \ __ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) a2, (vector signed short) a1), \ __ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtsh_p (__CR6_EQ, (vector signed short) a2, (vector signed short) a1), \ __ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) a2, (vector signed int) a1), \ __ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) a2, (vector signed int) a1), \ __ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) a2, (vector signed int) a1), \ __ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtsw_p (__CR6_EQ, (vector signed int) a2, (vector signed int) a1), \ __ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpgefp_p ((vector float) a1, (vector float) a2), \ + __builtin_altivec_vcmpgefp_p (__CR6_EQ, (vector float) a1, (vector float) a2), \ __altivec_link_error_invalid_argument ()))))))))))))) #define vec_all_gt(a1, a2) \ __ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtsb_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtsh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtsw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpgtfp_p ((vector float) a1, (vector float) a2), \ + __builtin_altivec_vcmpgtfp_p (__CR6_LT, (vector float) a1, (vector float) a2), \ __altivec_link_error_invalid_argument ()))))))))))))) -#define vec_all_in(a1, a2) __builtin_altivec_vcmpbfp_p (a1, a2) +#define vec_all_in(a1, a2) __builtin_altivec_vcmpbfp_p (__CR6_EQ, a1, a2) #define vec_all_le(a1, a2) \ __ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtsb_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtsh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtsw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpgefp_p ((vector float) a1, (vector float) a2), \ + __builtin_altivec_vcmpgefp_p (__CR6_LT, (vector float) a2, (vector float) a1), \ __altivec_link_error_invalid_argument ()))))))))))))) #define vec_all_lt(a1, a2) \ __ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) a2, (vector signed char) a1), \ __ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) a2, (vector signed char) a1), \ __ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) a2, (vector signed char) a1), \ __ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtsb_p (__CR6_LT, (vector signed char) a2, (vector signed char) a1), \ __ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) a2, (vector signed short) a1), \ __ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) a2, (vector signed short) a1), \ __ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) a2, (vector signed short) a1), \ __ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtsh_p (__CR6_LT, (vector signed short) a2, (vector signed short) a1), \ __ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) a2, (vector signed int) a1), \ __ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) a2, (vector signed int) a1), \ __ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) a2, (vector signed int) a1), \ __ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtsw_p (__CR6_LT, (vector signed int) a2, (vector signed int) a1), \ __ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpgtfp_p ((vector float) a1, (vector float) a2), \ + __builtin_altivec_vcmpgtfp_p (__CR6_LT, (vector float) a2, (vector float) a1), \ __altivec_link_error_invalid_argument ()))))))))))))) -#define vec_all_nan(a1) __builtin_altivec_vcmpeqfp_p (a1) +#define vec_all_nan(a1) __builtin_altivec_vcmpeqfp_p (__CR6_EQ, a1, a1) #define vec_all_ne(a1, a2) \ __ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpequb_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpequb_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpequb_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpequb_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpequh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpequh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpequh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpequh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpequw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpequw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpequw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpequw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpeqfp_p ((vector float) a1, (vector float) a2), \ + __builtin_altivec_vcmpeqfp_p (__CR6_EQ, (vector float) a1, (vector float) a2), \ __altivec_link_error_invalid_argument ()))))))))))))) -#define vec_all_nge(a1, a2) __builtin_altivec_vcmpgefp_p (a1, a2) +#define vec_all_nge(a1, a2) __builtin_altivec_vcmpgefp_p (__CR6_EQ, a1, a2) -#define vec_all_ngt(a1, a2) __builtin_altivec_vcmpgtfp_p (a1, a2) +#define vec_all_ngt(a1, a2) __builtin_altivec_vcmpgtfp_p (__CR6_EQ, a1, a2) -#define vec_all_nle(a1, a2) __builtin_altivec_vcmpgefp_p (a1, a2) +#define vec_all_nle(a1, a2) __builtin_altivec_vcmpgefp_p (__CR6_EQ, a2, a1) -#define vec_all_nlt(a1, a2) __builtin_altivec_vcmpgtfp_p (a1, a2) +#define vec_all_nlt(a1, a2) __builtin_altivec_vcmpgtfp_p (__CR6_EQ, a2, a1) -#define vec_all_numeric(a1) __builtin_altivec_vcmpeqfp_p (a1) +#define vec_all_numeric(a1) __builtin_altivec_vcmpeqfp_p (__CR6_EQ, a1, a1) #define vec_any_eq(a1, a2) \ __ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpequb_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpequb_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpequb_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpequb_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpequh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpequh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpequh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpequh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpequw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpequw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpequw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpequw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpeqfp_p ((vector float) a1, (vector float) a2), \ + __builtin_altivec_vcmpeqfp_p (__CR6_EQ_REV, (vector float) a1, (vector float) a2), \ __altivec_link_error_invalid_argument ()))))))))))))) #define vec_any_ge(a1, a2) \ __ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) a2, (vector signed char) a1), \ __ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) a2, (vector signed char) a1), \ __ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) a2, (vector signed char) a1), \ __ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtsb_p (__CR6_LT_REV, (vector signed char) a2, (vector signed char) a1), \ __ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) a2, (vector signed short) a1), \ __ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) a2, (vector signed short) a1), \ __ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) a2, (vector signed short) a1), \ __ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtsh_p (__CR6_LT_REV, (vector signed short) a2, (vector signed short) a1), \ __ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) a2, (vector signed int) a1), \ __ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) a2, (vector signed int) a1), \ __ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) a2, (vector signed int) a1), \ __ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtsw_p (__CR6_LT_REV, (vector signed int) a2, (vector signed int) a1), \ __ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpgefp_p ((vector float) a1, (vector float) a2), \ + __builtin_altivec_vcmpgefp_p (__CR6_EQ_REV, (vector float) a1, (vector float) a2), \ __altivec_link_error_invalid_argument ()))))))))))))) #define vec_any_gt(a1, a2) \ __ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtsb_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtsh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtsw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpgtfp_p ((vector float) a1, (vector float) a2), \ + __builtin_altivec_vcmpgtfp_p (__CR6_EQ_REV, (vector float) a1, (vector float) a2), \ __altivec_link_error_invalid_argument ()))))))))))))) #define vec_any_le(a1, a2) \ __ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtsb_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtsh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtsw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpgefp_p ((vector float) a1, (vector float) a2), \ + __builtin_altivec_vcmpgefp_p (__CR6_EQ_REV, (vector float) a2, (vector float) a1), \ __altivec_link_error_invalid_argument ()))))))))))))) #define vec_any_lt(a1, a2) \ __ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) a2, (vector signed char) a1), \ __ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) a2, (vector signed char) a1), \ __ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) a2, (vector signed char) a1), \ __ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpgtsb_p (__CR6_EQ_REV, (vector signed char) a2, (vector signed char) a1), \ __ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) a2, (vector signed short) a1), \ __ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) a2, (vector signed short) a1), \ __ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) a2, (vector signed short) a1), \ __ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpgtsh_p (__CR6_EQ_REV, (vector signed short) a2, (vector signed short) a1), \ __ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) a2, (vector signed int) a1), \ __ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) a2, (vector signed int) a1), \ __ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) a2, (vector signed int) a1), \ __ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpgtsw_p (__CR6_EQ_REV, (vector signed int) a2, (vector signed int) a1), \ __ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpgtfp_p ((vector float) a1, (vector float) a2), \ + __builtin_altivec_vcmpgtfp_p (__CR6_EQ_REV, (vector float) a2, (vector float) a1), \ __altivec_link_error_invalid_argument ()))))))))))))) -#define vec_any_nan(a1) __builtin_altivec_vcmpeqfp_p (a1) +#define vec_any_nan(a1) __builtin_altivec_vcmpeqfp_p (__CR6_LT_REV, a1, a1) #define vec_any_ne(a1, a2) \ __ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpequb_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpequb_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpequb_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ + __builtin_altivec_vcmpequb_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2), \ __ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpequh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpequh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpequh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ + __builtin_altivec_vcmpequh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2), \ __ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpequw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpequw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpequw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ + __builtin_altivec_vcmpequw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2), \ __ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpeqfp_p ((vector float) a1, (vector float) a2), \ + __builtin_altivec_vcmpeqfp_p (__CR6_LT_REV, (vector float) a1, (vector float) a2), \ __altivec_link_error_invalid_argument ()))))))))))))) -#define vec_any_nge(a1, a2) __builtin_altivec_vcmpgefp_p (a1, a2) +#define vec_any_nge(a1, a2) __builtin_altivec_vcmpgefp_p (__CR6_LT_REV, a1, a2) -#define vec_any_ngt(a1, a2) __builtin_altivec_vcmpgtfp_p (a1, a2) +#define vec_any_ngt(a1, a2) __builtin_altivec_vcmpgtfp_p (__CR6_LT_REV, a1, a2) -#define vec_any_nle(a1, a2) __builtin_altivec_vcmpgefp_p (a1, a2) +#define vec_any_nle(a1, a2) __builtin_altivec_vcmpgefp_p (__CR6_LT_REV, a2, a1) -#define vec_any_nlt(a1, a2) __builtin_altivec_vcmpgtfp_p (a1, a2) +#define vec_any_nlt(a1, a2) __builtin_altivec_vcmpgtfp_p (__CR6_LT_REV, a2, a1) -#define vec_any_numeric(a1) __builtin_altivec_vcmpeqfp_p (a1) +#define vec_any_numeric(a1) __builtin_altivec_vcmpeqfp_p (__CR6_EQ_REV, a1, a1) -#define vec_any_out(a1, a2) __builtin_altivec_vcmpbfp_p (a1, a2) +#define vec_any_out(a1, a2) __builtin_altivec_vcmpbfp_p (__CR6_EQ_REV, a1, a2) #endif /* __cplusplus */ diff --git a/gcc/config/rs6000/rs6000.c b/gcc/config/rs6000/rs6000.c index f8375a43a36..7a33dc21820 100644 --- a/gcc/config/rs6000/rs6000.c +++ b/gcc/config/rs6000/rs6000.c @@ -163,6 +163,7 @@ static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int static rtx altivec_expand_builtin PARAMS ((tree, rtx)); static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx)); static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx)); +static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx)); static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx)); static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree)); static void rs6000_parse_abi_options PARAMS ((void)); @@ -3352,19 +3353,34 @@ static const struct builtin_description bdesc_2arg[] = { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS }, { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS }, { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp_p, "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp_p, "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb_p, "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh_p, "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw_p, "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp_p, "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp_p, "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb_p, "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh_p, "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw_p, "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub_p, "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh_p, "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw_p, "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P }, +}; + +/* AltiVec predicates. */ + +struct builtin_description_predicates +{ + const unsigned int mask; + const enum insn_code icode; + const char *opcode; + const char *const name; + const enum rs6000_builtins code; +}; + +static const struct builtin_description_predicates bdesc_altivec_preds[] = +{ + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P } }; /* Simple unary operations: VECb = foo (unsigned literal) or VECb = @@ -3422,6 +3438,7 @@ altivec_expand_unop_builtin (icode, arglist, target) return target; } + static rtx altivec_expand_binop_builtin (icode, arglist, target) enum insn_code icode; @@ -3459,6 +3476,87 @@ altivec_expand_binop_builtin (icode, arglist, target) return target; } +static rtx +altivec_expand_predicate_builtin (icode, opcode, arglist, target) + enum insn_code icode; + const char *opcode; + tree arglist; + rtx target; +{ + rtx pat, scratch; + tree cr6_form = TREE_VALUE (arglist); + tree arg0 = TREE_VALUE (TREE_CHAIN (arglist)); + tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); + rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0); + rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0); + enum machine_mode tmode = SImode; + enum machine_mode mode0 = insn_data[icode].operand[1].mode; + enum machine_mode mode1 = insn_data[icode].operand[2].mode; + int cr6_form_int; + + if (TREE_CODE (cr6_form) != INTEGER_CST) + { + error ("argument 1 of __builtin_altivec_predicate must be a constant"); + return NULL_RTX; + } + else + cr6_form_int = TREE_INT_CST_LOW (cr6_form); + + if (mode0 != mode1) + abort (); + + /* If we have invalid arguments, bail out before generating bad rtl. */ + if (arg0 == error_mark_node || arg1 == error_mark_node) + return NULL_RTX; + + if (target == 0 + || GET_MODE (target) != tmode + || ! (*insn_data[icode].operand[0].predicate) (target, tmode)) + target = gen_reg_rtx (tmode); + + if (! (*insn_data[icode].operand[1].predicate) (op0, mode0)) + op0 = copy_to_mode_reg (mode0, op0); + if (! (*insn_data[icode].operand[2].predicate) (op1, mode1)) + op1 = copy_to_mode_reg (mode1, op1); + + scratch = gen_reg_rtx (mode0); + + pat = GEN_FCN (icode) (scratch, op0, op1, + gen_rtx (SYMBOL_REF, Pmode, opcode)); + if (! pat) + return 0; + emit_insn (pat); + + /* The vec_any* and vec_all* predicates use the same opcodes for two + different operations, but the bits in CR6 will be different + depending on what information we want. So we have to play tricks + with CR6 to get the right bits out. + + If you think this is disgusting, look at the specs for the + AltiVec predicates. */ + + switch (cr6_form_int) + { + case 0: + emit_insn (gen_cr6_test_for_zero (target)); + break; + case 1: + emit_insn (gen_cr6_test_for_zero_reverse (target)); + break; + case 2: + emit_insn (gen_cr6_test_for_lt (target)); + break; + case 3: + emit_insn (gen_cr6_test_for_lt_reverse (target)); + break; + default: + error ("argument 1 of __builtin_altivec_predicate is out of range"); + break; + } + + return target; +} + static rtx altivec_expand_stv_builtin (icode, arglist) enum insn_code icode; @@ -3543,6 +3641,7 @@ altivec_expand_builtin (exp, target) rtx target; { struct builtin_description *d; + struct builtin_description_predicates *dp; size_t i; enum insn_code icode; tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); @@ -3830,6 +3929,12 @@ altivec_expand_builtin (exp, target) if (d->code == fcode) return altivec_expand_binop_builtin (d->icode, arglist, target); + /* Expand the AltiVec predicates. */ + dp = (struct builtin_description_predicates *) bdesc_altivec_preds; + for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++) + if (dp->code == fcode) + return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target); + /* LV* are funky. We initialized them differently. */ switch (fcode) { @@ -3899,7 +4004,8 @@ rs6000_init_builtins () static void altivec_init_builtins (void) { - struct builtin_description * d; + struct builtin_description *d; + struct builtin_description_predicates *dp; size_t i; tree endlink = void_list_node; @@ -4265,6 +4371,38 @@ altivec_init_builtins (void) tree_cons (NULL_TREE, V16QI_type_node, endlink))); + tree int_ftype_int_v4si_v4si + = build_function_type + (integer_type_node, + tree_cons (NULL_TREE, integer_type_node, + tree_cons (NULL_TREE, V4SI_type_node, + tree_cons (NULL_TREE, V4SI_type_node, + endlink)))); + + tree int_ftype_int_v4sf_v4sf + = build_function_type + (integer_type_node, + tree_cons (NULL_TREE, integer_type_node, + tree_cons (NULL_TREE, V4SF_type_node, + tree_cons (NULL_TREE, V4SF_type_node, + endlink)))); + + tree int_ftype_int_v8hi_v8hi + = build_function_type + (integer_type_node, + tree_cons (NULL_TREE, integer_type_node, + tree_cons (NULL_TREE, V8HI_type_node, + tree_cons (NULL_TREE, V8HI_type_node, + endlink)))); + + tree int_ftype_int_v16qi_v16qi + = build_function_type + (integer_type_node, + tree_cons (NULL_TREE, integer_type_node, + tree_cons (NULL_TREE, V16QI_type_node, + tree_cons (NULL_TREE, V16QI_type_node, + endlink)))); + tree v16qi_ftype_int_pvoid = build_function_type (V16QI_type_node, tree_cons (NULL_TREE, integer_type_node, @@ -4412,6 +4550,36 @@ altivec_init_builtins (void) for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++) def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code); + /* Initialize the predicates. */ + dp = (struct builtin_description_predicates *) bdesc_altivec_preds; + for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++) + { + enum machine_mode mode1; + tree type; + + mode1 = insn_data[dp->icode].operand[1].mode; + + switch (mode1) + { + case V4SImode: + type = int_ftype_int_v4si_v4si; + break; + case V8HImode: + type = int_ftype_int_v8hi_v8hi; + break; + case V16QImode: + type = int_ftype_int_v16qi_v16qi; + break; + case V4SFmode: + type = int_ftype_int_v4sf_v4sf; + break; + default: + abort (); + } + + def_builtin (dp->mask, dp->name, type, dp->code); + } + /* Add the simple binary operators. */ d = (struct builtin_description *) bdesc_2arg; for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++) diff --git a/gcc/config/rs6000/rs6000.h b/gcc/config/rs6000/rs6000.h index 2deaf75bf4b..b0d1a1209b1 100644 --- a/gcc/config/rs6000/rs6000.h +++ b/gcc/config/rs6000/rs6000.h @@ -2938,19 +2938,6 @@ enum rs6000_builtins ALTIVEC_BUILTIN_VUPKLSB, ALTIVEC_BUILTIN_VUPKLPX, ALTIVEC_BUILTIN_VUPKLSH, - ALTIVEC_BUILTIN_VCMPBFP_P, - ALTIVEC_BUILTIN_VCMPEQFP_P, - ALTIVEC_BUILTIN_VCMPEQUB_P, - ALTIVEC_BUILTIN_VCMPEQUH_P, - ALTIVEC_BUILTIN_VCMPEQUW_P, - ALTIVEC_BUILTIN_VCMPGEFP_P, - ALTIVEC_BUILTIN_VCMPGTFP_P, - ALTIVEC_BUILTIN_VCMPGTSB_P, - ALTIVEC_BUILTIN_VCMPGTSH_P, - ALTIVEC_BUILTIN_VCMPGTSW_P, - ALTIVEC_BUILTIN_VCMPGTUB_P, - ALTIVEC_BUILTIN_VCMPGTUH_P, - ALTIVEC_BUILTIN_VCMPGTUW_P, ALTIVEC_BUILTIN_MTVSCR, ALTIVEC_BUILTIN_MFVSCR, ALTIVEC_BUILTIN_DSSALL, @@ -2970,5 +2957,18 @@ enum rs6000_builtins ALTIVEC_BUILTIN_STVEBX, ALTIVEC_BUILTIN_STVEHX, ALTIVEC_BUILTIN_STVEWX, - ALTIVEC_BUILTIN_STVXL + ALTIVEC_BUILTIN_STVXL, + ALTIVEC_BUILTIN_VCMPBFP_P, + ALTIVEC_BUILTIN_VCMPEQFP_P, + ALTIVEC_BUILTIN_VCMPEQUB_P, + ALTIVEC_BUILTIN_VCMPEQUH_P, + ALTIVEC_BUILTIN_VCMPEQUW_P, + ALTIVEC_BUILTIN_VCMPGEFP_P, + ALTIVEC_BUILTIN_VCMPGTFP_P, + ALTIVEC_BUILTIN_VCMPGTSB_P, + ALTIVEC_BUILTIN_VCMPGTSH_P, + ALTIVEC_BUILTIN_VCMPGTSW_P, + ALTIVEC_BUILTIN_VCMPGTUB_P, + ALTIVEC_BUILTIN_VCMPGTUH_P, + ALTIVEC_BUILTIN_VCMPGTUW_P }; diff --git a/gcc/config/rs6000/rs6000.md b/gcc/config/rs6000/rs6000.md index 8fb45ff0ac9..1d1533137c9 100644 --- a/gcc/config/rs6000/rs6000.md +++ b/gcc/config/rs6000/rs6000.md @@ -15318,110 +15318,79 @@ ;; AltiVec predicates. -(define_insn "altivec_vcmpequb_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v") - (match_operand:V16QI 2 "register_operand" "v")] 173))] - "TARGET_ALTIVEC" - "vcmpequb. %0,%1,%2" -[(set_attr "type" "veccmp")]) - -(define_insn "altivec_vcmpequh_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v") - (match_operand:V8HI 2 "register_operand" "v")] 174))] - "TARGET_ALTIVEC" - "vcmpequh. %0,%1,%2" -[(set_attr "type" "veccmp")]) - -(define_insn "altivec_vcmpequw_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") - (match_operand:V4SI 2 "register_operand" "v")] 175))] - "TARGET_ALTIVEC" - "vcmpequw. %0,%1,%2" -[(set_attr "type" "veccmp")]) - -(define_insn "altivec_vcmpeqfp_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v") - (match_operand:V4SF 2 "register_operand" "v")] 176))] +(define_expand "cr6_test_for_zero" + [(set (match_operand:SI 0 "register_operand" "=r") + (eq:SI (reg:CC 74) + (const_int 0)))] "TARGET_ALTIVEC" - "vcmpeqfp. %0,%1,%2" -[(set_attr "type" "veccmp")]) + "") -(define_insn "altivec_vcmpgtub_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v") - (match_operand:V16QI 2 "register_operand" "v")] 177))] +(define_expand "cr6_test_for_zero_reverse" + [(set (match_operand:SI 0 "register_operand" "=r") + (eq:SI (reg:CC 74) + (const_int 0))) + (set (match_dup 0) (minus:SI (const_int 1) (match_dup 0)))] "TARGET_ALTIVEC" - "vcmpgtub. %0,%1,%2" -[(set_attr "type" "veccmp")]) + "") -(define_insn "altivec_vcmpgtsb_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v") - (match_operand:V16QI 2 "register_operand" "v")] 178))] +(define_expand "cr6_test_for_lt" + [(set (match_operand:SI 0 "register_operand" "=r") + (lt:SI (reg:CC 74) + (const_int 0)))] "TARGET_ALTIVEC" - "vcmpgtsb. %0,%1,%2" -[(set_attr "type" "veccmp")]) + "") -(define_insn "altivec_vcmpgtuw_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") - (match_operand:V4SI 2 "register_operand" "v")] 179))] +(define_expand "cr6_test_for_lt_reverse" + [(set (match_operand:SI 0 "register_operand" "=r") + (lt:SI (reg:CC 74) + (const_int 0))) + (set (match_dup 0) (minus:SI (const_int 1) (match_dup 0)))] "TARGET_ALTIVEC" - "vcmpgtuw. %0,%1,%2" -[(set_attr "type" "veccmp")]) + "") -(define_insn "altivec_vcmpgtsw_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") - (match_operand:V4SI 2 "register_operand" "v")] 180))] - "TARGET_ALTIVEC" - "vcmpgtsw. %0,%1,%2" -[(set_attr "type" "veccmp")]) +;; We can get away with generating the opcode on the fly (%3 below) +;; because all the predicates have the same scheduling parameters. -(define_insn "altivec_vcmpgefp_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v") - (match_operand:V4SF 2 "register_operand" "v")] 181))] +(define_insn "altivec_predicate_v4si" + [(set (reg:CC 74) + (unspec:CC [(match_operand:V4SI 1 "register_operand" "v") + (match_operand:V4SI 2 "register_operand" "v") + (match_operand 3 "any_operand" "")] 173)) + (clobber (match_scratch:V4SI 0 "=v"))] "TARGET_ALTIVEC" - "vcmpgefp. %0,%1,%2" + "%3 %0,%1,%2" [(set_attr "type" "veccmp")]) -(define_insn "altivec_vcmpgtfp_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v") - (match_operand:V4SF 2 "register_operand" "v")] 182))] +(define_insn "altivec_predicate_v4sf" + [(set (reg:CC 74) + (unspec:CC [(match_operand:V4SF 1 "register_operand" "v") + (match_operand:V4SF 2 "register_operand" "v") + (match_operand 3 "any_operand" "")] 174)) + (clobber (match_scratch:V4SF 0 "=v"))] "TARGET_ALTIVEC" - "vcmpgtfp. %0,%1,%2" + "%3 %0,%1,%2" [(set_attr "type" "veccmp")]) -(define_insn "altivec_vcmpbfp_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v") - (match_operand:V4SF 2 "register_operand" "v")] 183))] +(define_insn "altivec_predicate_v8hi" + [(set (reg:CC 74) + (unspec:CC [(match_operand:V8HI 1 "register_operand" "v") + (match_operand:V8HI 2 "register_operand" "v") + (match_operand 3 "any_operand" "")] 175)) + (clobber (match_scratch:V8HI 0 "=v"))] "TARGET_ALTIVEC" - "vcmpbfp. %0,%1,%2" + "%3 %0,%1,%2" [(set_attr "type" "veccmp")]) -(define_insn "altivec_vcmpgtuh_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v") - (match_operand:V8HI 2 "register_operand" "v")] 184))] +(define_insn "altivec_predicate_v16qi" + [(set (reg:CC 74) + (unspec:CC [(match_operand:V16QI 1 "register_operand" "v") + (match_operand:V16QI 2 "register_operand" "v") + (match_operand 3 "any_operand" "")] 175)) + (clobber (match_scratch:V16QI 0 "=v"))] "TARGET_ALTIVEC" - "vcmpgtuh. %0,%1,%2" + "%3 %0,%1,%2" [(set_attr "type" "veccmp")]) -(define_insn "altivec_vcmpgtsh_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v") - (match_operand:V8HI 2 "register_operand" "v")] 185))] - "TARGET_ALTIVEC" - "vcmpgtsh. %0,%1,%2" - [(set_attr "type" "veccmp")]) - (define_insn "altivec_mtvscr" [(unspec [(match_operand:V4SI 0 "register_operand" "v")] 186)] "TARGET_ALTIVEC" diff --git a/gcc/doc/extend.texi b/gcc/doc/extend.texi index ad88a2a83af..1663367bf0c 100644 --- a/gcc/doc/extend.texi +++ b/gcc/doc/extend.texi @@ -4889,6 +4889,11 @@ The following functions are made available by including @option{-mabi=altivec}. The functions implement the functionality described in Motorola's AltiVec Programming Interface Manual. +@emph{Note:} Only the @code{} interface is supported. +Internally, GCC uses built-in functions to achieve the functionality in +the aforementioned header file, but they are not supported and are +subject to change without notice. + @smallexample vector signed char vec_abs (vector signed char, vector signed char); vector signed short vec_abs (vector signed short, vector signed short);